├── .bandit ├── .coveragerc ├── .flake8 ├── .github ├── ISSUE_TEMPLATE │ ├── bug-report.md │ └── feature-request.md ├── PULL_REQUEST_TEMPLATE.md ├── release-drafter.yml └── workflows │ ├── ci.yml │ ├── cron.yml │ ├── release-drafter.yml │ ├── release.yml │ └── reusable-qa.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .pre-commit-hooks.yaml ├── .prettierrc.yaml ├── .readthedocs.yaml ├── CHANGELOG.md ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── codecov.yml ├── docs ├── .gitignore ├── changelog.md ├── cli │ ├── index.md │ ├── pip-compile.md │ └── pip-sync.md ├── conf.py ├── contributing.md ├── index.md ├── pkg │ └── .gitignore ├── requirements.in └── requirements.txt ├── examples ├── django.in ├── flask.in ├── hypothesis.in ├── protection.in ├── readme │ ├── constraints.txt │ └── pyproject.toml └── sentry.in ├── img └── pip-tools-overview.svg ├── piptools ├── __init__.py ├── __main__.py ├── _compat │ ├── __init__.py │ └── pip_compat.py ├── build.py ├── cache.py ├── exceptions.py ├── locations.py ├── logging.py ├── py.typed ├── repositories │ ├── __init__.py │ ├── base.py │ ├── local.py │ └── pypi.py ├── resolver.py ├── scripts │ ├── __init__.py │ ├── compile.py │ ├── options.py │ └── sync.py ├── subprocess_utils.py ├── sync.py ├── utils.py └── writer.py ├── pyproject.toml ├── tests ├── __init__.py ├── conftest.py ├── constants.py ├── test_build.py ├── test_cache.py ├── test_cli_compile.py ├── test_cli_sync.py ├── test_data │ ├── fake-editables.json │ ├── fake-index.json │ ├── minimal_wheels │ │ ├── small-fake-multi-arch-0.1.tar.gz │ │ ├── small_fake_a-0.1-py2.py3-none-any.whl │ │ ├── small_fake_a-0.2-py2.py3-none-any.whl │ │ ├── small_fake_a-0.3b1-py2.py3-none-any.whl │ │ ├── small_fake_b-0.1-py2.py3-none-any.whl │ │ ├── small_fake_b-0.2-py2.py3-none-any.whl │ │ ├── small_fake_b-0.3-py2.py3-none-any.whl │ │ ├── small_fake_multi_arch-0.1-py2.py3-none-manylinux1_i686.whl │ │ ├── small_fake_multi_arch-0.1-py2.py3-none-manylinux1_x86_64.whl │ │ ├── small_fake_multi_arch-0.1-py2.py3-none-win32.whl │ │ ├── small_fake_with_deps-0.1-py2.py3-none-any.whl │ │ ├── small_fake_with_deps_and_sub_deps-0.1-py2.py3-none-any.whl │ │ └── small_fake_with_unpinned_deps-0.1-py2.py3-none-any.whl │ └── packages │ │ ├── fake_with_deps │ │ └── setup.py │ │ ├── small_fake_a │ │ └── setup.py │ │ ├── small_fake_with_build_deps │ │ ├── backend │ │ │ └── backend.py │ │ ├── pyproject.toml │ │ └── setup.py │ │ ├── small_fake_with_deps │ │ └── setup.py │ │ ├── small_fake_with_deps_and_sub_deps │ │ └── setup.py │ │ ├── small_fake_with_pyproject │ │ └── pyproject.toml │ │ ├── small_fake_with_subdir │ │ └── subdir │ │ │ └── setup.py │ │ └── small_fake_with_unpinned_deps │ │ └── setup.py ├── test_fake_index.py ├── test_logging.py ├── test_minimal_upgrade.py ├── test_pip_compat.py ├── test_repository_local.py ├── test_repository_pypi.py ├── test_resolver.py ├── test_subprocess_utils.py ├── test_sync.py ├── test_top_level_editable.py ├── test_utils.py ├── test_writer.py └── utils.py └── tox.ini /.bandit: -------------------------------------------------------------------------------- 1 | [bandit] 2 | exclude: tests,.tox,.eggs,.venv,.git 3 | skips: B101 4 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | plugins = covdefaults 3 | omit = 4 | piptools/_compat/* 5 | 6 | [report] 7 | include = piptools/*, tests/* 8 | fail_under = 99 9 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 100 3 | # E203 conflicts with PEP8; see https://github.com/psf/black#slices 4 | extend-ignore = E203 5 | 6 | # flake8-pytest-style 7 | # PT001: 8 | pytest-fixture-no-parentheses = true 9 | # PT006: 10 | pytest-parametrize-names-type = tuple 11 | # PT007: 12 | pytest-parametrize-values-type = tuple 13 | pytest-parametrize-values-row-type = tuple 14 | # PT023: 15 | pytest-mark-no-parentheses = true 16 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug-report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | --- 5 | 6 | 7 | 8 | #### Environment Versions 9 | 10 | 1. OS Type 11 | 1. Python version: `$ python -V` 12 | 1. pip version: `$ pip --version` 13 | 1. pip-tools version: `$ pip-compile --version` 14 | 15 | #### Steps to replicate 16 | 17 | 1. ... 18 | 2. ... 19 | 3. ... 20 | 21 | #### Expected result 22 | 23 | ... 24 | 25 | #### Actual result 26 | 27 | ... 28 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature-request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | --- 5 | 6 | #### What's the problem this feature will solve? 7 | 8 | 9 | 10 | #### Describe the solution you'd like 11 | 12 | 13 | 14 | 15 | 16 | #### Alternative Solutions 17 | 18 | 19 | 20 | #### Additional context 21 | 22 | 23 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | ##### Contributor checklist 4 | 5 | - [ ] Included tests for the changes. 6 | - [ ] PR title is short, clear, and ready to be included in the user-facing changelog. 7 | 8 | ##### Maintainer checklist 9 | 10 | - [ ] Verified one of these labels is present: `backwards incompatible`, `feature`, `enhancement`, `deprecation`, `bug`, `dependency`, `docs` or `skip-changelog` as they determine changelog listing. 11 | - [ ] Assign the PR to an existing or new milestone for the target version (following [Semantic Versioning](https://blog.versioneye.com/2014/01/16/semantic-versioning/)). 12 | -------------------------------------------------------------------------------- /.github/release-drafter.yml: -------------------------------------------------------------------------------- 1 | name-template: "$RESOLVED_VERSION" 2 | tag-template: "$RESOLVED_VERSION" 3 | 4 | categories: 5 | - title: "Backwards Incompatible Changes" 6 | labels: 7 | - "backwards incompatible" 8 | 9 | - title: "Features" 10 | labels: 11 | - "feature" 12 | - "enhancement" 13 | # usually combined with enhancement (avoid double listing) 14 | - "deprecation" 15 | 16 | - title: "Bug Fixes" 17 | labels: 18 | - "bug" 19 | - "bug fix" 20 | 21 | - title: "Other Changes" 22 | labels: 23 | - "dependency" 24 | - "docs" 25 | 26 | category-template: "$TITLE:" 27 | change-template: "- $TITLE ([#$NUMBER](https://github.com/jazzband/pip-tools/pull/$NUMBER)). Thanks @$AUTHOR" 28 | exclude-contributors: 29 | - "pre-commit-ci" 30 | exclude-labels: 31 | - "skip-changelog" 32 | - "maintenance" 33 | - "trivial" 34 | template: | 35 | $CHANGES 36 | 37 | version-resolver: 38 | major: 39 | labels: 40 | - "backwards incompatible" 41 | minor: 42 | labels: 43 | - "feature" 44 | - "enhancement" 45 | - "deprecation" 46 | patch: 47 | labels: 48 | - "bug fix" 49 | - "bug" 50 | - "dependency" 51 | - "docs" 52 | default: minor 53 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | merge_group: 5 | pull_request: 6 | push: 7 | branches: 8 | - main 9 | tags: 10 | workflow_call: 11 | inputs: 12 | cpython-pip-version: 13 | description: >- 14 | A JSON string with pip versions 15 | to test against under CPython. 16 | required: true 17 | type: string 18 | 19 | concurrency: 20 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} 21 | cancel-in-progress: true 22 | 23 | env: 24 | FORCE_COLOR: 1 # Request colored output from CLI tools supporting it 25 | MYPY_FORCE_COLOR: 1 # MyPy's color enforcement 26 | PIP_DISABLE_PIP_VERSION_CHECK: 1 27 | PIP_NO_PYTHON_VERSION_WARNING: 1 28 | PIP_NO_WARN_SCRIPT_LOCATION: 1 29 | PRE_COMMIT_COLOR: 1 30 | PY_COLORS: 1 # Recognized by the `py` package, dependency of `pytest` 31 | TOX_PARALLEL_NO_SPINNER: 1 32 | TOX_TESTENV_PASSENV: >- 33 | FORCE_COLOR 34 | MYPY_FORCE_COLOR 35 | NO_COLOR 36 | PY_COLORS 37 | PYTEST_THEME 38 | PYTEST_THEME_MODE 39 | PRE_COMMIT_COLOR 40 | 41 | jobs: 42 | linters: 43 | name: Linters 44 | uses: ./.github/workflows/reusable-qa.yml 45 | 46 | test: 47 | name: ${{ matrix.os }} / ${{ matrix.python-version }} / ${{ matrix.pip-version }} 48 | runs-on: ${{ matrix.os }}-latest 49 | strategy: 50 | fail-fast: false 51 | matrix: 52 | os: 53 | - Ubuntu 54 | - Windows 55 | - macOS 56 | python-version: 57 | - "3.11" 58 | - "3.10" 59 | - "3.9" 60 | - "3.8" 61 | pip-version: >- 62 | ${{ 63 | fromJSON( 64 | inputs.cpython-pip-version 65 | && inputs.cpython-pip-version 66 | || '["supported", "lowest"]' 67 | ) 68 | }} 69 | env: 70 | TOXENV: >- 71 | pip${{ matrix.pip-version }}${{ 72 | !inputs.cpython-pip-version 73 | && '-coverage' 74 | || '' 75 | }} 76 | steps: 77 | - uses: actions/checkout@v3 78 | - name: Set up Python ${{ matrix.python-version }} from GitHub 79 | id: python-install 80 | if: "!endsWith(matrix.python-version, '-dev')" 81 | uses: actions/setup-python@v4 82 | with: 83 | python-version: ${{ matrix.python-version }} 84 | - name: Set up Python ${{ matrix.python-version }} from deadsnakes 85 | if: endsWith(matrix.python-version, '-dev') 86 | uses: deadsnakes/action@v2.1.1 87 | with: 88 | python-version: ${{ matrix.python-version }} 89 | - name: Log python version info (${{ matrix.python-version }}) 90 | run: python --version --version 91 | - name: Get pip cache dir 92 | id: pip-cache 93 | shell: bash 94 | run: | 95 | echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT 96 | - name: Pip cache 97 | uses: actions/cache@v3 98 | with: 99 | path: ${{ steps.pip-cache.outputs.dir }} 100 | key: >- 101 | ${{ runner.os }}-pip-${{ hashFiles('setup.cfg') }}-${{ 102 | hashFiles('pyproject.toml') }}-${{ hashFiles('tox.ini') }}-${{ 103 | hashFiles('.pre-commit-config.yaml') }} 104 | restore-keys: | 105 | ${{ runner.os }}-pip- 106 | ${{ runner.os }}- 107 | - name: Install test dependencies 108 | run: python -m pip install -U tox virtualenv 109 | - name: Prepare test environment 110 | run: tox -vv --notest -p auto --parallel-live 111 | - name: Test pip ${{ matrix.pip-version }} 112 | run: tox --skip-pkg-install 113 | - name: Upload coverage to Codecov 114 | if: >- 115 | !inputs.cpython-pip-version 116 | uses: codecov/codecov-action@v3 117 | with: 118 | files: ./coverage.xml 119 | flags: >- 120 | CI-GHA, 121 | OS-${{ runner.os }}, 122 | VM-${{ matrix.os }}, 123 | Py-${{ steps.python-install.outputs.python-version }}, 124 | Pip-${{ matrix.pip-version }} 125 | name: >- 126 | OS-${{ runner.os }}, 127 | VM-${{ matrix.os }}, 128 | Py-${{ steps.python-install.outputs.python-version }}, 129 | Pip-${{ matrix.pip-version }} 130 | 131 | pypy: 132 | name: ${{ matrix.os }} / ${{ matrix.python-version }} / ${{ matrix.pip-version }} 133 | runs-on: ${{ matrix.os }}-latest 134 | strategy: 135 | fail-fast: false 136 | matrix: 137 | os: 138 | - Ubuntu 139 | - MacOS 140 | - Windows 141 | python-version: 142 | - pypy-3.10 143 | pip-version: 144 | - supported 145 | env: 146 | TOXENV: pip${{ matrix.pip-version }} 147 | steps: 148 | - uses: actions/checkout@v3 149 | - name: Set up Python ${{ matrix.python-version }} 150 | uses: actions/setup-python@v4 151 | with: 152 | python-version: ${{ matrix.python-version }} 153 | - name: Get pip cache dir 154 | id: pip-cache 155 | shell: bash 156 | run: | 157 | echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT 158 | - name: Pip cache 159 | uses: actions/cache@v3 160 | with: 161 | path: ${{ steps.pip-cache.outputs.dir }} 162 | key: >- 163 | ${{ runner.os }}-pip-${{ hashFiles('setup.cfg') }}-${{ 164 | hashFiles('pyproject.toml') }}-${{ hashFiles('tox.ini') }}-${{ 165 | hashFiles('.pre-commit-config.yaml') }} 166 | restore-keys: | 167 | ${{ runner.os }}-pip- 168 | ${{ runner.os }}- 169 | - name: Install tox 170 | run: pip install tox 171 | - name: Prepare test environment 172 | run: tox --notest -p auto --parallel-live 173 | - name: Test pip ${{ matrix.pip-version }} 174 | run: tox 175 | 176 | check: # This job does nothing and is only used for the branch protection 177 | if: always() 178 | 179 | needs: 180 | - linters 181 | - pypy 182 | - test 183 | 184 | runs-on: ubuntu-latest 185 | 186 | steps: 187 | - name: Decide whether the needed jobs succeeded or failed 188 | uses: re-actors/alls-green@afee1c1eac2a506084c274e9c02c8e0687b48d9e 189 | with: 190 | jobs: ${{ toJSON(needs) }} 191 | -------------------------------------------------------------------------------- /.github/workflows/cron.yml: -------------------------------------------------------------------------------- 1 | name: Cron 2 | 3 | on: 4 | schedule: 5 | # Run everyday at 03:53 UTC 6 | - cron: 53 3 * * * 7 | 8 | jobs: 9 | main: 10 | name: CI 11 | uses: ./.github/workflows/ci.yml 12 | with: 13 | cpython-pip-version: >- 14 | ["main", "latest", "supported", "lowest"] 15 | -------------------------------------------------------------------------------- /.github/workflows/release-drafter.yml: -------------------------------------------------------------------------------- 1 | name: release-drafter 2 | 3 | on: 4 | push: 5 | # branches to consider in the event; optional, defaults to all 6 | branches: 7 | - main 8 | - "releases/**" 9 | - "stable/**" 10 | 11 | jobs: 12 | update_release_draft: 13 | permissions: 14 | contents: write 15 | pull-requests: read 16 | if: github.repository == 'jazzband/pip-tools' 17 | runs-on: ubuntu-latest 18 | steps: 19 | # Drafts your next release notes as Pull Requests are merged into "main" 20 | - uses: release-drafter/release-drafter@v5 21 | env: 22 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 23 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | release: 8 | types: 9 | - published 10 | 11 | jobs: 12 | build: 13 | if: github.repository == 'jazzband/pip-tools' 14 | runs-on: ubuntu-latest 15 | 16 | steps: 17 | - uses: actions/checkout@v3 18 | with: 19 | fetch-depth: 0 20 | 21 | - name: Set up Python 22 | uses: actions/setup-python@v4 23 | with: 24 | python-version: 3.9 25 | 26 | - name: Install dependencies 27 | run: | 28 | python -m pip install -U pip 29 | python -m pip install -U twine build setuptools-scm 30 | 31 | - name: Build package 32 | run: | 33 | python -m setuptools_scm 34 | python -m build 35 | twine check --strict dist/* 36 | 37 | - name: Upload packages to Jazzband 38 | if: github.event.action == 'published' 39 | uses: pypa/gh-action-pypi-publish@release/v1 40 | with: 41 | user: jazzband 42 | password: ${{ secrets.JAZZBAND_RELEASE_KEY }} 43 | repository_url: https://jazzband.co/projects/pip-tools/upload 44 | -------------------------------------------------------------------------------- /.github/workflows/reusable-qa.yml: -------------------------------------------------------------------------------- 1 | name: QA 2 | 3 | on: 4 | workflow_call: 5 | 6 | jobs: 7 | qa: 8 | name: ${{ matrix.toxenv }} 9 | runs-on: ubuntu-latest 10 | strategy: 11 | fail-fast: false 12 | matrix: 13 | toxenv: 14 | - readme 15 | - build-docs 16 | - linkcheck-docs 17 | python-version: 18 | - "3.x" 19 | env: 20 | PY_COLORS: 1 21 | TOXENV: ${{ matrix.toxenv }} 22 | TOX_PARALLEL_NO_SPINNER: 1 23 | steps: 24 | - uses: actions/checkout@main 25 | - name: Set up Python ${{ matrix.python-version }} 26 | uses: actions/setup-python@v4 27 | with: 28 | python-version: ${{ matrix.python-version }} 29 | - name: Get pip cache dir 30 | id: pip-cache 31 | run: | 32 | echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT 33 | - name: Pip cache 34 | uses: actions/cache@v3 35 | with: 36 | path: ${{ steps.pip-cache.outputs.dir }} 37 | key: >- 38 | ${{ runner.os }}-pip-${{ hashFiles('setup.cfg') }}-${{ 39 | hashFiles('pyproject.toml') }}-${{ hashFiles('tox.ini') }}-${{ 40 | hashFiles('.pre-commit-config.yaml') }} 41 | restore-keys: | 42 | ${{ runner.os }}-pip- 43 | ${{ runner.os }}- 44 | - name: Prepare cache key 45 | id: cache-key 46 | run: echo "sha-256=$(python -VV | sha256sum | cut -d' ' -f1)" >> $GITHUB_OUTPUT 47 | - uses: actions/cache@v3 48 | with: 49 | path: ~/.cache/pre-commit 50 | key: pre-commit|${{ steps.cache-key.outputs.sha-256 }}|${{ hashFiles('.pre-commit-config.yaml') }} 51 | - name: Install tox 52 | run: pip install tox 53 | - name: Prepare test environment 54 | run: tox -vv --notest -p auto --parallel-live 55 | - name: Test ${{ matrix.toxenv }} 56 | run: tox --skip-pkg-install 57 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Ignore cram test output 2 | *.t.err 3 | 4 | # Python cruft 5 | *.pyc 6 | 7 | # Virtualenvs 8 | .envrc 9 | .direnv 10 | .venv 11 | venv/ 12 | 13 | # Testing 14 | .pytest_cache/ 15 | .tox 16 | htmlcov 17 | 18 | # Build output 19 | build 20 | dist 21 | *.egg-info 22 | .coverage 23 | .coverage.* 24 | coverage.xml 25 | .cache 26 | 27 | # IDE 28 | .idea 29 | 30 | # Test files 31 | requirements.in 32 | requirements.txt 33 | .eggs/ 34 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/psf/black 3 | rev: 25.1.0 4 | hooks: 5 | - id: black 6 | args: [--target-version=py38] 7 | - repo: https://github.com/PyCQA/isort 8 | rev: 6.0.0 9 | hooks: 10 | - id: isort 11 | - repo: https://github.com/asottile/pyupgrade 12 | rev: v3.19.1 13 | hooks: 14 | - id: pyupgrade 15 | args: [--py38-plus] 16 | - repo: https://github.com/PyCQA/flake8 17 | rev: 7.1.2 18 | hooks: 19 | - id: flake8 20 | additional_dependencies: 21 | - flake8-pytest-style 22 | - repo: https://github.com/pre-commit/mirrors-mypy 23 | rev: v1.15.0 24 | hooks: 25 | - id: mypy 26 | # Avoid error: Duplicate module named 'setup' 27 | # https://github.com/python/mypy/issues/4008 28 | # Keep exclude in sync with mypy own excludes 29 | exclude: ^tests/test_data/ 30 | additional_dependencies: 31 | - click==8.0.1 32 | - pep517==0.10.0 33 | - toml==0.10.2 34 | - pip==20.3.4 35 | - build==1.0.0 36 | - pyproject_hooks==1.0.0 37 | - pytest==7.4.2 38 | language_version: python3.9 39 | - repo: https://github.com/PyCQA/bandit 40 | rev: 1.8.3 41 | hooks: 42 | - id: bandit 43 | args: [--ini, .bandit] 44 | exclude: ^tests/ 45 | - repo: https://github.com/pre-commit/mirrors-prettier 46 | rev: v4.0.0-alpha.8 47 | hooks: 48 | - id: prettier 49 | additional_dependencies: 50 | - "prettier" 51 | - "prettier-plugin-toml" 52 | -------------------------------------------------------------------------------- /.pre-commit-hooks.yaml: -------------------------------------------------------------------------------- 1 | - id: pip-compile 2 | name: pip-compile 3 | description: Automatically compile requirements. 4 | entry: pip-compile 5 | language: python 6 | files: ^requirements\.(in|txt)$ 7 | pass_filenames: false 8 | -------------------------------------------------------------------------------- /.prettierrc.yaml: -------------------------------------------------------------------------------- 1 | printWidth: 88 2 | overrides: 3 | - files: "CHANGELOG.md" 4 | options: 5 | proseWrap: always 6 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # https://docs.readthedocs.io/en/stable/config-file/v2.html 2 | 3 | version: 2 4 | 5 | # Build documentation in the docs/ directory with Sphinx 6 | sphinx: 7 | builder: dirhtml 8 | configuration: docs/conf.py 9 | fail_on_warning: true 10 | 11 | formats: 12 | - pdf 13 | - htmlzip 14 | 15 | build: 16 | os: ubuntu-22.04 17 | tools: 18 | python: >- 19 | 3.11 20 | jobs: 21 | post_checkout: 22 | - git fetch --unshallow || true 23 | 24 | python: 25 | install: 26 | - requirements: docs/requirements.txt 27 | - method: pip 28 | path: . 29 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Code of Conduct 2 | 3 | As contributors and maintainers of the Jazzband projects, and in the interest of 4 | fostering an open and welcoming community, we pledge to respect all people who 5 | contribute through reporting issues, posting feature requests, updating documentation, 6 | submitting pull requests or patches, and other activities. 7 | 8 | We are committed to making participation in the Jazzband a harassment-free experience 9 | for everyone, regardless of the level of experience, gender, gender identity and 10 | expression, sexual orientation, disability, personal appearance, body size, race, 11 | ethnicity, age, religion, or nationality. 12 | 13 | Examples of unacceptable behavior by participants include: 14 | 15 | - The use of sexualized language or imagery 16 | - Personal attacks 17 | - Trolling or insulting/derogatory comments 18 | - Public or private harassment 19 | - Publishing other's private information, such as physical or electronic addresses, 20 | without explicit permission 21 | - Other unethical or unprofessional conduct 22 | 23 | The Jazzband roadies have the right and responsibility to remove, edit, or reject 24 | comments, commits, code, wiki edits, issues, and other contributions that are not 25 | aligned to this Code of Conduct, or to ban temporarily or permanently any contributor 26 | for other behaviors that they deem inappropriate, threatening, offensive, or harmful. 27 | 28 | By adopting this Code of Conduct, the roadies commit themselves to fairly and 29 | consistently applying these principles to every aspect of managing the jazzband 30 | projects. Roadies who do not follow or enforce the Code of Conduct may be permanently 31 | removed from the Jazzband roadies. 32 | 33 | This code of conduct applies both within project spaces and in public spaces when an 34 | individual is representing the project or its community. 35 | 36 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by 37 | contacting the roadies at `roadies@jazzband.co`. All complaints will be reviewed and 38 | investigated and will result in a response that is deemed necessary and appropriate to 39 | the circumstances. Roadies are obligated to maintain confidentiality with regard to the 40 | reporter of an incident. 41 | 42 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 43 | 1.3.0, available at [https://contributor-covenant.org/version/1/3/0/][version] 44 | 45 | [homepage]: https://contributor-covenant.org 46 | [version]: https://contributor-covenant.org/version/1/3/0/ 47 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | [![Jazzband](https://jazzband.co/static/img/jazzband.svg)](https://jazzband.co/) 2 | 3 | This is a [Jazzband](https://jazzband.co/) project. By contributing you agree 4 | to abide by the [Contributor Code of Conduct](https://jazzband.co/about/conduct) 5 | and follow the [guidelines](https://jazzband.co/about/guidelines). 6 | 7 | ## Project Contribution Guidelines 8 | 9 | Here are a few additional or emphasized guidelines to follow when contributing to `pip-tools`: 10 | 11 | - If you need to have a virtualenv outside of `tox`, it is possible to reuse its configuration to provision it with [tox devenv](). 12 | - Always provide tests for your changes and run `tox -p all` to make sure they are passing the checks locally. 13 | - Give a clear one-line description in the PR (that the maintainers can add to [CHANGELOG] afterwards). 14 | - Wait for the review of at least one other contributor before merging (even if you're a Jazzband member). 15 | - Before merging, assign the PR to a milestone for a version to help with the release process. 16 | 17 | The only exception to those guidelines is for trivial changes, such as 18 | documentation corrections or contributions that do not change pip-tools itself. 19 | 20 | Contributions following these guidelines are always welcomed, encouraged and appreciated. 21 | 22 | ## Project Release Process 23 | 24 | Jazzband aims to give full access to all members, including performing releases, as described in the 25 | [Jazzband Releases documentation](https://jazzband.co/about/releases). 26 | 27 | To help keeping track of the releases and their changes, here's the current release process: 28 | 29 | - Check to see if any recently merged PRs are missing from the milestone of the version about to be released. 30 | - Create a branch for the release. _Ex: release-3.4.0_. 31 | - Update the [CHANGELOG] with the version, date and add the text from [drafter release](https://github.com/jazzband/pip-tools/releases). 32 | - Push the branch to your fork and create a pull request. 33 | - Merge the pull request after the changes being approved. 34 | - Make sure that the tests/CI still pass. 35 | - Once ready, go to [releases](https://github.com/jazzband/pip-tools/releases) page and publish the latest draft release. This will push a tag on the HEAD of the main branch, trigger the CI pipeline and 36 | deploy a pip-tools release in the **Jazzband private package index** upon success. 37 | - The pip-tools "lead" project members will receive an email notification to review the release and 38 | deploy it to the public PyPI if all is correct. 39 | - Once the release to the public PyPI is confirmed, close the milestone. 40 | 41 | Please be mindful of other before and when performing a release, and use this access responsibly. 42 | 43 | Do not hesitate to ask questions if you have any before performing a release. 44 | 45 | [changelog]: https://github.com/jazzband/pip-tools/blob/main/CHANGELOG.md 46 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c). All rights reserved. 4 | 5 | Redistribution and use in source and binary forms, with or without modification, 6 | are permitted provided that the following conditions are met: 7 | 8 | 1. Redistributions of source code must retain the above copyright notice, 9 | this list of conditions and the following disclaimer. 10 | 11 | 2. Redistributions in binary form must reproduce the above copyright 12 | notice, this list of conditions and the following disclaimer in the 13 | documentation and/or other materials provided with the distribution. 14 | 15 | 3. Neither the name of pip-tools nor the names of its contributors may be 16 | used to endorse or promote products derived from this software without 17 | specific prior written permission. 18 | 19 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 20 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 21 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 22 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR 23 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 24 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 25 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 26 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 27 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 28 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 29 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | comment: false # avoid spamming reviews 2 | -------------------------------------------------------------------------------- /docs/.gitignore: -------------------------------------------------------------------------------- 1 | !requirements.in 2 | !requirements.txt 3 | -------------------------------------------------------------------------------- /docs/changelog.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ```{include} ../CHANGELOG.md 4 | 5 | ``` 6 | -------------------------------------------------------------------------------- /docs/cli/index.md: -------------------------------------------------------------------------------- 1 | # Command Line Reference 2 | 3 | This page provides a reference for the `pip-tools` command-line interface (CLI): 4 | 5 | ```{toctree} 6 | :maxdepth: 1 7 | 8 | pip-compile 9 | pip-sync 10 | ``` 11 | -------------------------------------------------------------------------------- /docs/cli/pip-compile.md: -------------------------------------------------------------------------------- 1 | # pip-compile 2 | 3 | ```{program-output} pip-compile --help 4 | 5 | ``` 6 | -------------------------------------------------------------------------------- /docs/cli/pip-sync.md: -------------------------------------------------------------------------------- 1 | # pip-sync 2 | 3 | ```{program-output} pip-sync --help 4 | 5 | ``` 6 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 2 | """Configuration file for the Sphinx documentation builder.""" 3 | 4 | from __future__ import annotations 5 | 6 | from importlib.metadata import version as get_version 7 | from pathlib import Path 8 | 9 | from sphinx.util import logging 10 | from sphinx.util.console import bold 11 | 12 | logger = logging.getLogger(__name__) 13 | 14 | # -- Path setup -------------------------------------------------------------- 15 | 16 | PROJECT_ROOT_DIR = Path(__file__).parents[1].resolve() 17 | 18 | 19 | # -- Project information ----------------------------------------------------- 20 | 21 | project = "pip-tools" 22 | author = f"{project} Contributors" 23 | copyright = f"The {author}" 24 | 25 | # The full version, including alpha/beta/rc tags 26 | release = get_version(project) 27 | 28 | # The short X.Y version 29 | version = ".".join(release.split(".")[:3]) 30 | 31 | logger.info(bold("%s version: %s"), project, version) 32 | logger.info(bold("%s release: %s"), project, release) 33 | 34 | # -- General configuration --------------------------------------------------- 35 | 36 | # Add any Sphinx extension module names here, as strings. They can be 37 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 38 | # ones. 39 | extensions = [ 40 | # Stdlib extensions: 41 | "sphinx.ext.intersphinx", 42 | # Third-party extensions: 43 | "myst_parser", 44 | "sphinxcontrib.apidoc", 45 | "sphinxcontrib.programoutput", 46 | ] 47 | 48 | 49 | # -- Options for HTML output ------------------------------------------------- 50 | 51 | # The theme to use for HTML and HTML Help pages. See the documentation for 52 | # a list of builtin themes. 53 | # 54 | html_theme = "furo" 55 | html_title = f"{project} documentation v{release}" 56 | 57 | 58 | # -- Options for intersphinx ---------------------------------------------------------- 59 | # https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html#configuration 60 | 61 | intersphinx_mapping = { 62 | "python": ("https://docs.python.org/3", None), 63 | } 64 | 65 | 66 | # ------------------------------------------------------------------------- 67 | default_role = "any" 68 | nitpicky = True 69 | 70 | linkcheck_ignore = [ 71 | r"^https://matrix\.to/#", 72 | r"^https://img.shields.io/matrix", 73 | ] 74 | 75 | nitpick_ignore_regex = [ 76 | ("py:class", "pip.*"), 77 | ("py:class", "pathlib.*"), 78 | ("py:class", "click.*"), 79 | ("py:class", "build.*"), 80 | ("py:class", "optparse.*"), 81 | ("py:class", "_ImportLibDist"), 82 | ("py:class", "PackageMetadata"), 83 | ("py:class", "importlib.*"), 84 | ("py:class", "IndexContent"), 85 | ("py:exc", "click.*"), 86 | ] 87 | 88 | suppress_warnings = ["myst.xref_missing"] 89 | 90 | # -- Apidoc options ------------------------------------------------------- 91 | 92 | apidoc_excluded_paths: list[str] = [] 93 | apidoc_extra_args = [ 94 | "--implicit-namespaces", 95 | "--private", # include “_private” modules 96 | ] 97 | apidoc_module_first = False 98 | apidoc_module_dir = "../piptools" 99 | apidoc_output_dir = "pkg" 100 | apidoc_separate_modules = True 101 | apidoc_toc_file = None 102 | -------------------------------------------------------------------------------- /docs/contributing.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | ```{include} ../CONTRIBUTING.md 4 | 5 | ``` 6 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | # Welcome to pip-tools' documentation! 2 | 3 | ```{include} ../README.md 4 | 5 | ``` 6 | 7 | ```{toctree} 8 | :hidden: 9 | :maxdepth: 2 10 | :caption: Contents 11 | 12 | cli/index 13 | contributing 14 | changelog 15 | ``` 16 | 17 | ```{toctree} 18 | :hidden: 19 | :caption: Private API reference 20 | 21 | pkg/modules 22 | ``` 23 | -------------------------------------------------------------------------------- /docs/pkg/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | -------------------------------------------------------------------------------- /docs/requirements.in: -------------------------------------------------------------------------------- 1 | furo 2 | myst-parser 3 | setuptools-scm 4 | sphinx 5 | sphinxcontrib-apidoc 6 | sphinxcontrib-programoutput 7 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.11 3 | # by the following command: 4 | # 5 | # pip-compile --allow-unsafe --strip-extras requirements.in 6 | # 7 | alabaster==0.7.13 8 | # via sphinx 9 | babel==2.12.1 10 | # via sphinx 11 | beautifulsoup4==4.12.2 12 | # via furo 13 | certifi==2024.7.4 14 | # via requests 15 | charset-normalizer==3.2.0 16 | # via requests 17 | docutils==0.20.1 18 | # via 19 | # myst-parser 20 | # sphinx 21 | furo==2023.8.17 22 | # via -r requirements.in 23 | idna==3.7 24 | # via requests 25 | imagesize==1.4.1 26 | # via sphinx 27 | jinja2==3.1.4 28 | # via 29 | # myst-parser 30 | # sphinx 31 | markdown-it-py==3.0.0 32 | # via 33 | # mdit-py-plugins 34 | # myst-parser 35 | markupsafe==2.1.3 36 | # via jinja2 37 | mdit-py-plugins==0.4.0 38 | # via myst-parser 39 | mdurl==0.1.2 40 | # via markdown-it-py 41 | myst-parser==2.0.0 42 | # via -r requirements.in 43 | packaging==23.1 44 | # via 45 | # setuptools-scm 46 | # sphinx 47 | pbr==6.0.0 48 | # via sphinxcontrib-apidoc 49 | pygments==2.16.1 50 | # via 51 | # furo 52 | # sphinx 53 | pyyaml==6.0.1 54 | # via myst-parser 55 | requests==2.32.0 56 | # via sphinx 57 | setuptools-scm==7.1.0 58 | # via -r requirements.in 59 | snowballstemmer==2.2.0 60 | # via sphinx 61 | soupsieve==2.4.1 62 | # via beautifulsoup4 63 | sphinx==7.2.2 64 | # via 65 | # -r requirements.in 66 | # furo 67 | # myst-parser 68 | # sphinx-basic-ng 69 | # sphinxcontrib-apidoc 70 | # sphinxcontrib-applehelp 71 | # sphinxcontrib-devhelp 72 | # sphinxcontrib-htmlhelp 73 | # sphinxcontrib-programoutput 74 | # sphinxcontrib-qthelp 75 | # sphinxcontrib-serializinghtml 76 | sphinx-basic-ng==1.0.0b2 77 | # via furo 78 | sphinxcontrib-apidoc==0.5.0 79 | # via -r requirements.in 80 | sphinxcontrib-applehelp==1.0.7 81 | # via sphinx 82 | sphinxcontrib-devhelp==1.0.5 83 | # via sphinx 84 | sphinxcontrib-htmlhelp==2.0.4 85 | # via sphinx 86 | sphinxcontrib-jsmath==1.0.1 87 | # via sphinx 88 | sphinxcontrib-programoutput==0.17 89 | # via -r requirements.in 90 | sphinxcontrib-qthelp==1.0.6 91 | # via sphinx 92 | sphinxcontrib-serializinghtml==1.1.8 93 | # via sphinx 94 | typing-extensions==4.7.1 95 | # via setuptools-scm 96 | urllib3==2.2.2 97 | # via requests 98 | 99 | # The following packages are considered to be unsafe in a requirements file: 100 | setuptools==70.0.0 101 | # via setuptools-scm 102 | -------------------------------------------------------------------------------- /examples/django.in: -------------------------------------------------------------------------------- 1 | # This file includes the Django project, and the debug toolbar 2 | Django<2.2.1 # suppose some version requirement 3 | django-debug-toolbar 4 | -------------------------------------------------------------------------------- /examples/flask.in: -------------------------------------------------------------------------------- 1 | # Flask has 2nd and 3rd level dependencies 2 | Flask 3 | -------------------------------------------------------------------------------- /examples/hypothesis.in: -------------------------------------------------------------------------------- 1 | hypothesis[django] 2 | -------------------------------------------------------------------------------- /examples/protection.in: -------------------------------------------------------------------------------- 1 | # This package depends on setuptools, which should not end up in the compiled 2 | # requirements, because it may cause conflicts with pip itself 3 | python-levenshtein>=0.12.0 4 | -------------------------------------------------------------------------------- /examples/readme/constraints.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.11 3 | # by the following command: 4 | # 5 | # pip-compile --all-build-deps --all-extras --output-file=constraints.txt --strip-extras pyproject.toml 6 | # 7 | asgiref==3.5.2 8 | # via django 9 | attrs==22.1.0 10 | # via pytest 11 | django==4.1 12 | # via my-cool-django-app (pyproject.toml) 13 | editables==0.3 14 | # via hatchling 15 | hatchling==1.11.1 16 | # via my-cool-django-app (pyproject.toml::build-system.requires) 17 | iniconfig==1.1.1 18 | # via pytest 19 | packaging==21.3 20 | # via 21 | # hatchling 22 | # pytest 23 | pathspec==0.10.2 24 | # via hatchling 25 | pluggy==1.0.0 26 | # via 27 | # hatchling 28 | # pytest 29 | py==1.11.0 30 | # via pytest 31 | pyparsing==3.0.9 32 | # via packaging 33 | pytest==7.1.2 34 | # via my-cool-django-app (pyproject.toml) 35 | sqlparse==0.4.2 36 | # via django 37 | tomli==2.0.1 38 | # via 39 | # hatchling 40 | # pytest 41 | -------------------------------------------------------------------------------- /examples/readme/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["hatchling"] 3 | build-backend = "hatchling.build" 4 | 5 | [project] 6 | name = "my-cool-django-app" 7 | version = "42" 8 | dependencies = ["django"] 9 | 10 | [project.optional-dependencies] 11 | dev = ["pytest"] 12 | -------------------------------------------------------------------------------- /examples/sentry.in: -------------------------------------------------------------------------------- 1 | # Sentry has a very large dependency tree 2 | sentry 3 | -------------------------------------------------------------------------------- /img/pip-tools-overview.svg: -------------------------------------------------------------------------------- 1 |
Source Spec files
Source Spec fil...
Compiled Spec files
Compiled Spec f...
Your (virtual)
environment
Your (virtual)...
pip-compile
pip-compile
pip-sync
pip-sync
PyPI
PyPI
requirements.indev-requirements.in...requirements.txtdev-requirements.txt...Viewer does not support full SVG 1.1
-------------------------------------------------------------------------------- /piptools/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import locale 4 | 5 | from click import secho 6 | 7 | # Needed for locale.getpreferredencoding(False) to work 8 | # in pip._internal.utils.encoding.auto_decode 9 | try: 10 | locale.setlocale(locale.LC_ALL, "") 11 | except locale.Error as e: # pragma: no cover 12 | # setlocale can apparently crash if locale are uninitialized 13 | secho(f"Ignoring error when setting locale: {e}", fg="red") 14 | -------------------------------------------------------------------------------- /piptools/__main__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import click 4 | 5 | from piptools.scripts import compile, sync 6 | 7 | 8 | @click.group() 9 | def cli() -> None: 10 | pass 11 | 12 | 13 | cli.add_command(compile.cli, "compile") 14 | cli.add_command(sync.cli, "sync") 15 | 16 | 17 | # Enable ``python -m piptools ...``. 18 | if __name__ == "__main__": 19 | cli() 20 | -------------------------------------------------------------------------------- /piptools/_compat/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from .pip_compat import ( 4 | Distribution, 5 | create_wheel_cache, 6 | get_dev_pkgs, 7 | parse_requirements, 8 | ) 9 | 10 | __all__ = [ 11 | "Distribution", 12 | "parse_requirements", 13 | "create_wheel_cache", 14 | "get_dev_pkgs", 15 | ] 16 | -------------------------------------------------------------------------------- /piptools/_compat/pip_compat.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import optparse 4 | from dataclasses import dataclass 5 | from typing import TYPE_CHECKING, Iterable, Iterator, Set, cast 6 | 7 | from pip._internal.cache import WheelCache 8 | from pip._internal.index.package_finder import PackageFinder 9 | from pip._internal.metadata import BaseDistribution 10 | from pip._internal.metadata.pkg_resources import Distribution as _PkgResourcesDist 11 | from pip._internal.models.direct_url import DirectUrl 12 | from pip._internal.models.link import Link 13 | from pip._internal.network.session import PipSession 14 | from pip._internal.req import InstallRequirement 15 | from pip._internal.req import parse_requirements as _parse_requirements 16 | from pip._internal.req.constructors import install_req_from_parsed_requirement 17 | from pip._vendor.pkg_resources import Requirement 18 | 19 | # The Distribution interface has changed between pkg_resources and 20 | # importlib.metadata, so this compat layer allows for a consistent access 21 | # pattern. In pip 22.1, importlib.metadata became the default on Python 3.11 22 | # (and later), but is overridable. `select_backend` returns what's being used. 23 | if TYPE_CHECKING: 24 | from pip._internal.metadata.importlib import Distribution as _ImportLibDist 25 | 26 | from ..utils import PIP_VERSION, copy_install_requirement 27 | 28 | 29 | @dataclass(frozen=True) 30 | class Distribution: 31 | key: str 32 | version: str 33 | requires: Iterable[Requirement] 34 | direct_url: DirectUrl | None 35 | 36 | @classmethod 37 | def from_pip_distribution(cls, dist: BaseDistribution) -> Distribution: 38 | # TODO: Use only the BaseDistribution protocol properties and methods 39 | # instead of specializing by type. 40 | if isinstance(dist, _PkgResourcesDist): 41 | return cls._from_pkg_resources(dist) 42 | else: 43 | return cls._from_importlib(dist) 44 | 45 | @classmethod 46 | def _from_pkg_resources(cls, dist: _PkgResourcesDist) -> Distribution: 47 | return cls( 48 | dist._dist.key, dist._dist.version, dist._dist.requires(), dist.direct_url 49 | ) 50 | 51 | @classmethod 52 | def _from_importlib(cls, dist: _ImportLibDist) -> Distribution: 53 | """Mimic pkg_resources.Distribution.requires for the case of no 54 | extras. 55 | 56 | This doesn't fulfill that API's ``extras`` parameter but 57 | satisfies the needs of pip-tools. 58 | """ 59 | reqs = (Requirement.parse(req) for req in (dist._dist.requires or ())) 60 | requires = [ 61 | req 62 | for req in reqs 63 | if not req.marker or req.marker.evaluate({"extra": None}) 64 | ] 65 | return cls(dist._dist.name, dist._dist.version, requires, dist.direct_url) 66 | 67 | 68 | class FileLink(Link): # type: ignore[misc] 69 | """Wrapper for ``pip``'s ``Link`` class.""" 70 | 71 | _url: str 72 | 73 | @property 74 | def file_path(self) -> str: 75 | # overriding the actual property to bypass some validation 76 | return self._url 77 | 78 | 79 | def parse_requirements( 80 | filename: str, 81 | session: PipSession, 82 | finder: PackageFinder | None = None, 83 | options: optparse.Values | None = None, 84 | constraint: bool = False, 85 | isolated: bool = False, 86 | ) -> Iterator[InstallRequirement]: 87 | for parsed_req in _parse_requirements( 88 | filename, session, finder=finder, options=options, constraint=constraint 89 | ): 90 | install_req = install_req_from_parsed_requirement(parsed_req, isolated=isolated) 91 | if install_req.editable and not parsed_req.requirement.startswith("file://"): 92 | # ``Link.url`` is what is saved to the output file 93 | # we set the url directly to undo the transformation in pip's Link class 94 | file_link = FileLink(install_req.link.url) 95 | file_link._url = parsed_req.requirement 96 | install_req.link = file_link 97 | yield copy_install_requirement(install_req) 98 | 99 | 100 | def create_wheel_cache(cache_dir: str, format_control: str | None = None) -> WheelCache: 101 | kwargs: dict[str, str | None] = {"cache_dir": cache_dir} 102 | if PIP_VERSION[:2] <= (23, 0): 103 | kwargs["format_control"] = format_control 104 | return WheelCache(**kwargs) 105 | 106 | 107 | def get_dev_pkgs() -> set[str]: 108 | if PIP_VERSION[:2] <= (23, 1): 109 | from pip._internal.commands.freeze import DEV_PKGS 110 | 111 | return cast(Set[str], DEV_PKGS) 112 | 113 | from pip._internal.commands.freeze import _dev_pkgs 114 | 115 | return cast(Set[str], _dev_pkgs()) 116 | -------------------------------------------------------------------------------- /piptools/build.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import collections 4 | import contextlib 5 | import os 6 | import pathlib 7 | import sys 8 | import tempfile 9 | from dataclasses import dataclass 10 | from importlib import metadata as importlib_metadata 11 | from typing import Any, Iterator, Protocol, TypeVar, overload 12 | 13 | import build 14 | import build.env 15 | import pyproject_hooks 16 | from pip._internal.req import InstallRequirement 17 | from pip._internal.req.constructors import parse_req_from_line 18 | from pip._vendor.packaging.markers import Marker 19 | from pip._vendor.packaging.requirements import Requirement 20 | 21 | from .utils import copy_install_requirement, install_req_from_line 22 | 23 | if sys.version_info >= (3, 11): 24 | import tomllib 25 | else: 26 | import tomli as tomllib 27 | 28 | PYPROJECT_TOML = "pyproject.toml" 29 | 30 | _T = TypeVar("_T") 31 | 32 | 33 | if sys.version_info >= (3, 10): 34 | from importlib.metadata import PackageMetadata 35 | else: 36 | 37 | class PackageMetadata(Protocol): 38 | @overload 39 | def get_all(self, name: str, failobj: None = None) -> list[Any] | None: ... 40 | 41 | @overload 42 | def get_all(self, name: str, failobj: _T) -> list[Any] | _T: ... 43 | 44 | 45 | @dataclass 46 | class StaticProjectMetadata: 47 | extras: tuple[str, ...] 48 | requirements: tuple[InstallRequirement, ...] 49 | 50 | 51 | @dataclass 52 | class ProjectMetadata: 53 | extras: tuple[str, ...] 54 | requirements: tuple[InstallRequirement, ...] 55 | build_requirements: tuple[InstallRequirement, ...] 56 | 57 | 58 | def maybe_statically_parse_project_metadata( 59 | src_file: pathlib.Path, 60 | ) -> StaticProjectMetadata | None: 61 | """ 62 | Return the metadata for a project, if it can be statically parsed from ``pyproject.toml``. 63 | 64 | This function is typically significantly faster than invoking a build backend. 65 | Returns None if the project metadata cannot be statically parsed. 66 | """ 67 | if src_file.name != PYPROJECT_TOML: 68 | return None 69 | 70 | try: 71 | with open(src_file, "rb") as f: 72 | pyproject_contents = tomllib.load(f) 73 | except tomllib.TOMLDecodeError: 74 | return None 75 | 76 | # Not valid PEP 621 metadata 77 | if ( 78 | "project" not in pyproject_contents 79 | or "name" not in pyproject_contents["project"] 80 | ): 81 | return None 82 | 83 | project_table = pyproject_contents["project"] 84 | 85 | # Dynamic dependencies require build backend invocation 86 | dynamic = project_table.get("dynamic", []) 87 | if "dependencies" in dynamic or "optional-dependencies" in dynamic: 88 | return None 89 | 90 | package_name = project_table["name"] 91 | comes_from = f"{package_name} ({src_file})" 92 | 93 | extras = project_table.get("optional-dependencies", {}).keys() 94 | install_requirements = [ 95 | InstallRequirement(Requirement(req), comes_from) 96 | for req in project_table.get("dependencies", []) 97 | ] 98 | for extra, reqs in ( 99 | pyproject_contents.get("project", {}).get("optional-dependencies", {}).items() 100 | ): 101 | for req in reqs: 102 | requirement = Requirement(req) 103 | if requirement.name == package_name: 104 | # Similar to logic for handling self-referential requirements 105 | # from _prepare_requirements 106 | requirement.url = src_file.parent.as_uri() 107 | # Note we don't need to modify `requirement` to include this extra 108 | marker = Marker(f"extra == '{extra}'") 109 | install_requirements.append( 110 | InstallRequirement(requirement, comes_from, markers=marker) 111 | ) 112 | 113 | return StaticProjectMetadata( 114 | extras=tuple(extras), 115 | requirements=tuple(install_requirements), 116 | ) 117 | 118 | 119 | def build_project_metadata( 120 | src_file: pathlib.Path, 121 | build_targets: tuple[str, ...], 122 | *, 123 | upgrade_packages: tuple[str, ...] | None = None, 124 | attempt_static_parse: bool, 125 | isolated: bool, 126 | quiet: bool, 127 | ) -> ProjectMetadata | StaticProjectMetadata: 128 | """ 129 | Return the metadata for a project. 130 | 131 | First, optionally attempt to determine the metadata statically from the 132 | ``pyproject.toml`` file. This will not work if build_targets are specified, 133 | since we cannot determine build requirements statically. 134 | 135 | Uses the ``prepare_metadata_for_build_wheel`` hook for the wheel metadata 136 | if available, otherwise ``build_wheel``. 137 | 138 | Uses the ``prepare_metadata_for_build_{target}`` hook for each ``build_targets`` 139 | if available. 140 | 141 | :param src_file: Project source file 142 | :param build_targets: A tuple of build targets to get the dependencies 143 | of (``sdist`` or ``wheel`` or ``editable``). 144 | :param attempt_static_parse: Whether to attempt to statically parse the 145 | project metadata from ``pyproject.toml``. 146 | Cannot be used with ``build_targets``. 147 | :param isolated: Whether to run invoke the backend in the current 148 | environment or to create an isolated one and invoke it 149 | there. 150 | :param quiet: Whether to suppress the output of subprocesses. 151 | """ 152 | 153 | if attempt_static_parse: 154 | if build_targets: 155 | raise ValueError( 156 | "Cannot execute the PEP 517 optional get_requires_for_build* " 157 | "hooks statically, as build requirements are requested" 158 | ) 159 | project_metadata = maybe_statically_parse_project_metadata(src_file) 160 | if project_metadata is not None: 161 | return project_metadata 162 | 163 | src_dir = src_file.parent 164 | with _create_project_builder( 165 | src_dir, 166 | upgrade_packages=upgrade_packages, 167 | isolated=isolated, 168 | quiet=quiet, 169 | ) as builder: 170 | metadata = _build_project_wheel_metadata(builder) 171 | extras = tuple(metadata.get_all("Provides-Extra") or ()) 172 | requirements = tuple( 173 | _prepare_requirements(metadata=metadata, src_file=src_file) 174 | ) 175 | build_requirements = tuple( 176 | _prepare_build_requirements( 177 | builder=builder, 178 | src_file=src_file, 179 | build_targets=build_targets, 180 | package_name=_get_name(metadata), 181 | ) 182 | ) 183 | return ProjectMetadata( 184 | extras=extras, 185 | requirements=requirements, 186 | build_requirements=build_requirements, 187 | ) 188 | 189 | 190 | @contextlib.contextmanager 191 | def _env_var( 192 | env_var_name: str, 193 | env_var_value: str, 194 | /, 195 | ) -> Iterator[None]: 196 | sentinel = object() 197 | original_pip_constraint = os.getenv(env_var_name, sentinel) 198 | pip_constraint_was_unset = original_pip_constraint is sentinel 199 | 200 | os.environ[env_var_name] = env_var_value 201 | try: 202 | yield 203 | finally: 204 | if pip_constraint_was_unset: 205 | del os.environ[env_var_name] 206 | return 207 | 208 | # Assert here is necessary because MyPy can't infer type 209 | # narrowing in the complex case. 210 | assert isinstance(original_pip_constraint, str) 211 | os.environ[env_var_name] = original_pip_constraint 212 | 213 | 214 | @contextlib.contextmanager 215 | def _temporary_constraints_file_set_for_pip( 216 | upgrade_packages: tuple[str, ...], 217 | ) -> Iterator[None]: 218 | with tempfile.NamedTemporaryFile( 219 | mode="w+t", 220 | delete=False, # FIXME: switch to `delete_on_close` in Python 3.12+ 221 | ) as tmpfile: 222 | # NOTE: `delete_on_close=False` here (or rather `delete=False`, 223 | # NOTE: temporarily) is important for cross-platform execution. It is 224 | # NOTE: required on Windows so that the underlying `pip install` 225 | # NOTE: invocation by pypa/build will be able to access the constraint 226 | # NOTE: file via a subprocess and not fail installing it due to a 227 | # NOTE: permission error related to this file handle still open in our 228 | # NOTE: parent process. To achieve this, we `.close()` the file 229 | # NOTE: descriptor before we hand off the control to the build frontend 230 | # NOTE: and with `delete_on_close=False`, the 231 | # NOTE: `tempfile.NamedTemporaryFile()` context manager does not remove 232 | # NOTE: it from disk right away. 233 | # NOTE: Due to support of versions below Python 3.12, we are forced to 234 | # NOTE: temporarily resort to using `delete=False`, meaning that the CM 235 | # NOTE: never attempts removing the file from disk, not even on exit. 236 | # NOTE: So we do this manually until we can migrate to using the more 237 | # NOTE: ergonomic argument `delete_on_close=False`. 238 | 239 | # Write packages to upgrade to a temporary file to set as 240 | # constraints for the installation to the builder environment, 241 | # in case build requirements are among them 242 | tmpfile.write("\n".join(upgrade_packages)) 243 | 244 | # FIXME: replace `delete` with `delete_on_close` in Python 3.12+ 245 | # FIXME: and replace `.close()` with `.flush()` 246 | tmpfile.close() 247 | 248 | try: 249 | with _env_var("PIP_CONSTRAINT", tmpfile.name): 250 | yield 251 | finally: 252 | # FIXME: replace `delete` with `delete_on_close` in Python 3.12+ 253 | # FIXME: and drop this manual deletion 254 | os.unlink(tmpfile.name) 255 | 256 | 257 | @contextlib.contextmanager 258 | def _create_project_builder( 259 | src_dir: pathlib.Path, 260 | *, 261 | upgrade_packages: tuple[str, ...] | None = None, 262 | isolated: bool, 263 | quiet: bool, 264 | ) -> Iterator[build.ProjectBuilder]: 265 | if quiet: 266 | runner = pyproject_hooks.quiet_subprocess_runner 267 | else: 268 | runner = pyproject_hooks.default_subprocess_runner 269 | 270 | if not isolated: 271 | yield build.ProjectBuilder(src_dir, runner=runner) 272 | return 273 | 274 | maybe_pip_constrained_context = ( 275 | contextlib.nullcontext() 276 | if upgrade_packages is None 277 | else _temporary_constraints_file_set_for_pip(upgrade_packages) 278 | ) 279 | 280 | with maybe_pip_constrained_context, build.env.DefaultIsolatedEnv() as env: 281 | builder = build.ProjectBuilder.from_isolated_env(env, src_dir, runner) 282 | env.install(builder.build_system_requires) 283 | env.install(builder.get_requires_for_build("wheel")) 284 | yield builder 285 | 286 | 287 | def _build_project_wheel_metadata( 288 | builder: build.ProjectBuilder, 289 | ) -> PackageMetadata: 290 | with tempfile.TemporaryDirectory() as tmpdir: 291 | path = pathlib.Path(builder.metadata_path(tmpdir)) 292 | return importlib_metadata.PathDistribution(path).metadata 293 | 294 | 295 | def _get_name(metadata: PackageMetadata) -> str: 296 | retval = metadata.get_all("Name")[0] # type: ignore[index] 297 | assert isinstance(retval, str) 298 | return retval 299 | 300 | 301 | def _prepare_requirements( 302 | metadata: PackageMetadata, src_file: pathlib.Path 303 | ) -> Iterator[InstallRequirement]: 304 | package_name = _get_name(metadata) 305 | comes_from = f"{package_name} ({src_file})" 306 | package_dir = src_file.parent 307 | 308 | for req in metadata.get_all("Requires-Dist") or []: 309 | parts = parse_req_from_line(req, comes_from) 310 | if parts.requirement.name == package_name: 311 | # Replace package name with package directory in the requirement 312 | # string so that pip can find the package as self-referential. 313 | # Note the string can contain extras, so we need to replace only 314 | # the package name, not the whole string. 315 | replaced_package_name = req.replace(package_name, str(package_dir), 1) 316 | parts = parse_req_from_line(replaced_package_name, comes_from) 317 | 318 | yield copy_install_requirement( 319 | InstallRequirement( 320 | parts.requirement, 321 | comes_from, 322 | link=parts.link, 323 | markers=parts.markers, 324 | extras=parts.extras, 325 | ) 326 | ) 327 | 328 | 329 | def _prepare_build_requirements( 330 | builder: build.ProjectBuilder, 331 | src_file: pathlib.Path, 332 | build_targets: tuple[str, ...], 333 | package_name: str, 334 | ) -> Iterator[InstallRequirement]: 335 | result = collections.defaultdict(set) 336 | 337 | # Build requirements will only be present if a pyproject.toml file exists, 338 | # but if there is also a setup.py file then only that will be explicitly 339 | # processed due to the order of `DEFAULT_REQUIREMENTS_FILES`. 340 | src_file = src_file.parent / PYPROJECT_TOML 341 | 342 | for req in builder.build_system_requires: 343 | result[req].add(f"{package_name} ({src_file}::build-system.requires)") 344 | for build_target in build_targets: 345 | for req in builder.get_requires_for_build(build_target): 346 | result[req].add( 347 | f"{package_name} ({src_file}::build-system.backend::{build_target})" 348 | ) 349 | 350 | for req, comes_from_sources in result.items(): 351 | for comes_from in comes_from_sources: 352 | yield install_req_from_line(req, comes_from=comes_from) 353 | -------------------------------------------------------------------------------- /piptools/cache.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import json 4 | import os 5 | import platform 6 | import sys 7 | from typing import Dict, Iterable, List, Tuple, cast 8 | 9 | from pip._internal.req import InstallRequirement 10 | from pip._vendor.packaging.requirements import Requirement 11 | 12 | from .exceptions import PipToolsError 13 | from .utils import as_tuple, key_from_req, lookup_table_from_tuples 14 | 15 | CacheKey = Tuple[str, str] 16 | CacheLookup = Dict[str, List[str]] 17 | CacheDict = Dict[str, CacheLookup] 18 | 19 | _PEP425_PY_TAGS = {"cpython": "cp", "pypy": "pp", "ironpython": "ip", "jython": "jy"} 20 | 21 | 22 | def _implementation_name() -> str: 23 | """ 24 | Get Python implementation and version. 25 | 26 | Similar to PEP 425, however the minor version is separated from the major to 27 | differentiate "3.10" and "31.0". 28 | """ 29 | implementation_name = platform.python_implementation().lower() 30 | implementation = _PEP425_PY_TAGS.get(implementation_name, "??") 31 | return "{}{}.{}".format(implementation, *sys.version_info) 32 | 33 | 34 | class CorruptCacheError(PipToolsError): 35 | def __init__(self, path: str): 36 | self.path = path 37 | 38 | def __str__(self) -> str: 39 | lines = [ 40 | "The dependency cache seems to have been corrupted.", 41 | "Inspect, or delete, the following file:", 42 | f" {self.path}", 43 | ] 44 | return os.linesep.join(lines) 45 | 46 | 47 | def read_cache_file(cache_file_path: str) -> CacheDict: 48 | with open(cache_file_path, encoding="utf-8") as cache_file: 49 | try: 50 | doc = json.load(cache_file) 51 | except (json.JSONDecodeError, UnicodeDecodeError): 52 | raise CorruptCacheError(cache_file_path) 53 | 54 | # Check version and load the contents 55 | if doc["__format__"] != 1: 56 | raise ValueError("Unknown cache file format") 57 | return cast(CacheDict, doc["dependencies"]) 58 | 59 | 60 | class DependencyCache: 61 | """ 62 | Create new persistent dependency cache for the current Python version. 63 | 64 | The cache file is written to the appropriate user cache dir for the 65 | current platform, i.e. 66 | 67 | ~/.cache/pip-tools/depcache-pyX.Y.json 68 | 69 | Where py indicates the Python implementation. 70 | Where X.Y indicates the Python version. 71 | """ 72 | 73 | def __init__(self, cache_dir: str): 74 | os.makedirs(cache_dir, exist_ok=True) 75 | cache_filename = f"depcache-{_implementation_name()}.json" 76 | 77 | self._cache_file = os.path.join(cache_dir, cache_filename) 78 | self._cache: CacheDict | None = None 79 | 80 | @property 81 | def cache(self) -> CacheDict: 82 | """ 83 | The dictionary that is the actual in-memory cache. This property 84 | lazily loads the cache from disk. 85 | """ 86 | if self._cache is None: 87 | try: 88 | self._cache = read_cache_file(self._cache_file) 89 | except FileNotFoundError: 90 | self._cache = {} 91 | return self._cache 92 | 93 | def as_cache_key(self, ireq: InstallRequirement) -> CacheKey: 94 | """ 95 | Given a requirement, return its cache key. 96 | 97 | This behavior is a little weird 98 | in order to allow backwards compatibility with cache files. For a requirement 99 | without extras, this will return, for example: 100 | 101 | ("ipython", "2.1.0") 102 | 103 | For a requirement with extras, the extras will be comma-separated and appended 104 | to the version, inside brackets, like so: 105 | 106 | ("ipython", "2.1.0[nbconvert,notebook]") 107 | """ 108 | name, version, extras = as_tuple(ireq) 109 | if not extras: 110 | extras_string = "" 111 | else: 112 | extras_string = f"[{','.join(extras)}]" 113 | return name, f"{version}{extras_string}" 114 | 115 | def write_cache(self) -> None: 116 | """Write the cache to disk as JSON.""" 117 | doc = {"__format__": 1, "dependencies": self._cache} 118 | with open(self._cache_file, "w", encoding="utf-8") as f: 119 | json.dump(doc, f, sort_keys=True) 120 | 121 | def clear(self) -> None: 122 | self._cache = {} 123 | self.write_cache() 124 | 125 | def __contains__(self, ireq: InstallRequirement) -> bool: 126 | pkgname, pkgversion_and_extras = self.as_cache_key(ireq) 127 | return pkgversion_and_extras in self.cache.get(pkgname, {}) 128 | 129 | def __getitem__(self, ireq: InstallRequirement) -> list[str]: 130 | pkgname, pkgversion_and_extras = self.as_cache_key(ireq) 131 | return self.cache[pkgname][pkgversion_and_extras] 132 | 133 | def __setitem__(self, ireq: InstallRequirement, values: list[str]) -> None: 134 | pkgname, pkgversion_and_extras = self.as_cache_key(ireq) 135 | self.cache.setdefault(pkgname, {}) 136 | self.cache[pkgname][pkgversion_and_extras] = values 137 | self.write_cache() 138 | 139 | def reverse_dependencies( 140 | self, ireqs: Iterable[InstallRequirement] 141 | ) -> dict[str, set[str]]: 142 | """ 143 | Return a lookup table of reverse dependencies for all the given ireqs. 144 | 145 | Since this is all static, it only works if the dependency cache 146 | contains the complete data, otherwise you end up with a partial view. 147 | This is typically no problem if you use this function after the entire 148 | dependency tree is resolved. 149 | """ 150 | ireqs_as_cache_values = [self.as_cache_key(ireq) for ireq in ireqs] 151 | return self._reverse_dependencies(ireqs_as_cache_values) 152 | 153 | def _reverse_dependencies( 154 | self, cache_keys: Iterable[tuple[str, str]] 155 | ) -> dict[str, set[str]]: 156 | """ 157 | Return a lookup table of reverse dependencies for all the given cache keys. 158 | 159 | Example input: 160 | 161 | [('pep8', '1.5.7'), 162 | ('flake8', '2.4.0'), 163 | ('mccabe', '0.3'), 164 | ('pyflakes', '0.8.1')] 165 | 166 | Example output: 167 | 168 | {'pep8': ['flake8'], 169 | 'flake8': [], 170 | 'mccabe': ['flake8'], 171 | 'pyflakes': ['flake8']} 172 | 173 | """ 174 | # First, collect all the dependencies into a sequence of (parent, child) 175 | # tuples, like [('flake8', 'pep8'), ('flake8', 'mccabe'), ...] 176 | return lookup_table_from_tuples( 177 | (key_from_req(Requirement(dep_name)), name) 178 | for name, version_and_extras in cache_keys 179 | for dep_name in self.cache[name][version_and_extras] 180 | ) 181 | -------------------------------------------------------------------------------- /piptools/exceptions.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import operator 4 | from typing import Iterable 5 | 6 | from pip._internal.index.package_finder import PackageFinder 7 | from pip._internal.models.candidate import InstallationCandidate 8 | from pip._internal.req import InstallRequirement 9 | from pip._internal.utils.misc import redact_auth_from_url 10 | 11 | 12 | class PipToolsError(Exception): 13 | pass 14 | 15 | 16 | class NoCandidateFound(PipToolsError): 17 | def __init__( 18 | self, 19 | ireq: InstallRequirement, 20 | candidates_tried: Iterable[InstallationCandidate], 21 | finder: PackageFinder, 22 | ) -> None: 23 | self.ireq = ireq 24 | self.candidates_tried = candidates_tried 25 | self.finder = finder 26 | 27 | def __str__(self) -> str: 28 | versions = [] 29 | pre_versions = [] 30 | 31 | for candidate in sorted( 32 | self.candidates_tried, key=operator.attrgetter("version") 33 | ): 34 | version = str(candidate.version) 35 | if candidate.version.is_prerelease: 36 | pre_versions.append(version) 37 | else: 38 | versions.append(version) 39 | 40 | lines = [f"Could not find a version that matches {self.ireq}"] 41 | 42 | if versions: 43 | lines.append(f"Tried: {', '.join(versions)}") 44 | 45 | if pre_versions: 46 | if self.finder.allow_all_prereleases: 47 | line = "Tried" 48 | else: 49 | line = "Skipped" 50 | 51 | line += f" pre-versions: {', '.join(pre_versions)}" 52 | lines.append(line) 53 | 54 | if versions or pre_versions: 55 | lines.append( 56 | "There are incompatible versions in the resolved dependencies:" 57 | ) 58 | source_ireqs = getattr(self.ireq, "_source_ireqs", []) 59 | lines.extend(f" {ireq}" for ireq in source_ireqs) 60 | else: 61 | redacted_urls = tuple( 62 | redact_auth_from_url(url) for url in self.finder.index_urls 63 | ) 64 | lines.append("No versions found") 65 | lines.append( 66 | "{} {} reachable?".format( 67 | "Were" if len(redacted_urls) > 1 else "Was", 68 | " or ".join(redacted_urls), 69 | ) 70 | ) 71 | return "\n".join(lines) 72 | 73 | 74 | class IncompatibleRequirements(PipToolsError): 75 | def __init__(self, ireq_a: InstallRequirement, ireq_b: InstallRequirement) -> None: 76 | self.ireq_a = ireq_a 77 | self.ireq_b = ireq_b 78 | 79 | def __str__(self) -> str: 80 | message = "Incompatible requirements found: {} and {}" 81 | return message.format(self.ireq_a, self.ireq_b) 82 | -------------------------------------------------------------------------------- /piptools/locations.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from pip._internal.utils.appdirs import user_cache_dir 4 | 5 | # The user_cache_dir helper comes straight from pip itself 6 | CACHE_DIR = user_cache_dir("pip-tools") 7 | 8 | # The project defaults specific to pip-tools should be written to this filenames 9 | DEFAULT_CONFIG_FILE_NAMES = (".pip-tools.toml", "pyproject.toml") 10 | -------------------------------------------------------------------------------- /piptools/logging.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import contextlib 4 | import logging 5 | import sys 6 | from typing import Any, Iterator 7 | 8 | import click 9 | 10 | # Initialise the builtin logging module for other component using it. 11 | # Ex: pip 12 | logging.basicConfig() 13 | 14 | 15 | class LogContext: 16 | stream = sys.stderr 17 | 18 | def __init__(self, verbosity: int = 0, indent_width: int = 2): 19 | self.verbosity = self._initial_verbosity = verbosity 20 | self.current_indent = self._initial_indent = 0 21 | self._indent_width = self._initial_indent_width = indent_width 22 | 23 | def log(self, message: str, *args: Any, **kwargs: Any) -> None: 24 | kwargs.setdefault("err", True) 25 | prefix = " " * self.current_indent 26 | click.secho(prefix + message, *args, **kwargs) 27 | 28 | def debug(self, message: str, *args: Any, **kwargs: Any) -> None: 29 | if self.verbosity >= 1: 30 | self.log(message, *args, **kwargs) 31 | 32 | def info(self, message: str, *args: Any, **kwargs: Any) -> None: 33 | if self.verbosity >= 0: 34 | self.log(message, *args, **kwargs) 35 | 36 | def warning(self, message: str, *args: Any, **kwargs: Any) -> None: 37 | kwargs.setdefault("fg", "yellow") 38 | self.log(message, *args, **kwargs) 39 | 40 | def error(self, message: str, *args: Any, **kwargs: Any) -> None: 41 | kwargs.setdefault("fg", "red") 42 | self.log(message, *args, **kwargs) 43 | 44 | def _indent(self) -> None: 45 | self.current_indent += self._indent_width 46 | 47 | def _dedent(self) -> None: 48 | self.current_indent -= self._indent_width 49 | 50 | @contextlib.contextmanager 51 | def indentation(self) -> Iterator[None]: 52 | """ 53 | Increase indentation. 54 | """ 55 | self._indent() 56 | try: 57 | yield 58 | finally: 59 | self._dedent() 60 | 61 | def reset(self) -> None: 62 | """Reset logger to initial state.""" 63 | self.verbosity = self._initial_verbosity 64 | self.current_indent = self._initial_indent 65 | self._indent_width = self._initial_indent_width 66 | 67 | 68 | log = LogContext() 69 | -------------------------------------------------------------------------------- /piptools/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/pip-tools/e4ed0c1e028d1ca73673a51722ba153f0c02b0c6/piptools/py.typed -------------------------------------------------------------------------------- /piptools/repositories/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from .local import LocalRequirementsRepository 4 | from .pypi import PyPIRepository 5 | 6 | __all__ = ["LocalRequirementsRepository", "PyPIRepository"] 7 | -------------------------------------------------------------------------------- /piptools/repositories/base.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import optparse 4 | from abc import ABCMeta, abstractmethod 5 | from contextlib import contextmanager 6 | from typing import Iterator 7 | 8 | from pip._internal.commands.install import InstallCommand 9 | from pip._internal.index.package_finder import PackageFinder 10 | from pip._internal.models.index import PyPI 11 | from pip._internal.network.session import PipSession 12 | from pip._internal.req import InstallRequirement 13 | 14 | 15 | class BaseRepository(metaclass=ABCMeta): 16 | DEFAULT_INDEX_URL = PyPI.simple_url 17 | 18 | def clear_caches(self) -> None: 19 | """Should clear any caches used by the implementation.""" 20 | 21 | @abstractmethod 22 | def find_best_match( 23 | self, ireq: InstallRequirement, prereleases: bool | None 24 | ) -> InstallRequirement: 25 | """ 26 | Returns a pinned InstallRequirement object that indicates the best match 27 | for the given InstallRequirement according to the external repository. 28 | """ 29 | 30 | @abstractmethod 31 | def get_dependencies(self, ireq: InstallRequirement) -> set[InstallRequirement]: 32 | """ 33 | Given a pinned, URL, or editable InstallRequirement, returns a set of 34 | dependencies (also InstallRequirements, but not necessarily pinned). 35 | They indicate the secondary dependencies for the given requirement. 36 | """ 37 | 38 | @abstractmethod 39 | def get_hashes(self, ireq: InstallRequirement) -> set[str]: 40 | """ 41 | Given a pinned InstallRequirement, returns a set of hashes that represent 42 | all of the files for a given requirement. It is not acceptable for an 43 | editable or unpinned requirement to be passed to this function. 44 | """ 45 | 46 | @abstractmethod 47 | @contextmanager 48 | def allow_all_wheels(self) -> Iterator[None]: 49 | """ 50 | Monkey patches pip.Wheel to allow wheels from all platforms and Python versions. 51 | """ 52 | 53 | @property 54 | @abstractmethod 55 | def options(self) -> optparse.Values: 56 | """Returns parsed pip options""" 57 | 58 | @property 59 | @abstractmethod 60 | def session(self) -> PipSession: 61 | """Returns a session to make requests""" 62 | 63 | @property 64 | @abstractmethod 65 | def finder(self) -> PackageFinder: 66 | """Returns a package finder to interact with simple repository API (PEP 503)""" 67 | 68 | @property 69 | @abstractmethod 70 | def command(self) -> InstallCommand: 71 | """Return an install command.""" 72 | -------------------------------------------------------------------------------- /piptools/repositories/local.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import optparse 4 | from contextlib import contextmanager 5 | from typing import Iterator, Mapping, cast 6 | 7 | from pip._internal.commands.install import InstallCommand 8 | from pip._internal.index.package_finder import PackageFinder 9 | from pip._internal.models.candidate import InstallationCandidate 10 | from pip._internal.network.session import PipSession 11 | from pip._internal.req import InstallRequirement 12 | from pip._internal.utils.hashes import FAVORITE_HASH 13 | 14 | from piptools.utils import as_tuple, key_from_ireq, make_install_requirement 15 | 16 | from .base import BaseRepository 17 | from .pypi import PyPIRepository 18 | 19 | 20 | def ireq_satisfied_by_existing_pin( 21 | ireq: InstallRequirement, existing_pin: InstallationCandidate 22 | ) -> bool: 23 | """ 24 | Return :py:data:`True` if the given ``InstallRequirement`` is satisfied by the 25 | previously encountered version pin. 26 | """ 27 | version = next(iter(existing_pin.req.specifier)).version 28 | result = ireq.req.specifier.contains( 29 | version, prereleases=existing_pin.req.specifier.prereleases 30 | ) 31 | return cast(bool, result) 32 | 33 | 34 | class LocalRequirementsRepository(BaseRepository): 35 | """ 36 | The LocalRequirementsRepository proxied the _real_ repository by first 37 | checking if a requirement can be satisfied by existing pins (i.e. the 38 | result of a previous compile step). 39 | 40 | In effect, if a requirement can be satisfied with a version pinned in the 41 | requirements file, we prefer that version over the best match found in 42 | PyPI. This keeps updates to the requirements.txt down to a minimum. 43 | """ 44 | 45 | def __init__( 46 | self, 47 | existing_pins: Mapping[str, InstallationCandidate], 48 | proxied_repository: PyPIRepository, 49 | reuse_hashes: bool = True, 50 | ): 51 | self._reuse_hashes = reuse_hashes 52 | self.repository = proxied_repository 53 | self.existing_pins = existing_pins 54 | 55 | @property 56 | def options(self) -> optparse.Values: 57 | return self.repository.options 58 | 59 | @property 60 | def finder(self) -> PackageFinder: 61 | return self.repository.finder 62 | 63 | @property 64 | def session(self) -> PipSession: 65 | return self.repository.session 66 | 67 | @property 68 | def command(self) -> InstallCommand: 69 | """Return an install command instance.""" 70 | return self.repository.command 71 | 72 | def clear_caches(self) -> None: 73 | self.repository.clear_caches() 74 | 75 | def find_best_match( 76 | self, ireq: InstallRequirement, prereleases: bool | None = None 77 | ) -> InstallationCandidate: 78 | key = key_from_ireq(ireq) 79 | existing_pin = self.existing_pins.get(key) 80 | if existing_pin and ireq_satisfied_by_existing_pin(ireq, existing_pin): 81 | project, version, _ = as_tuple(existing_pin) 82 | return make_install_requirement(project, version, ireq) 83 | else: 84 | return self.repository.find_best_match(ireq, prereleases) 85 | 86 | def get_dependencies(self, ireq: InstallRequirement) -> set[InstallRequirement]: 87 | return self.repository.get_dependencies(ireq) 88 | 89 | def get_hashes(self, ireq: InstallRequirement) -> set[str]: 90 | existing_pin = self._reuse_hashes and self.existing_pins.get( 91 | key_from_ireq(ireq) 92 | ) 93 | if existing_pin and ireq_satisfied_by_existing_pin(ireq, existing_pin): 94 | hashes = existing_pin.hash_options 95 | hexdigests = hashes.get(FAVORITE_HASH) 96 | if hexdigests: 97 | return { 98 | ":".join([FAVORITE_HASH, hexdigest]) for hexdigest in hexdigests 99 | } 100 | return self.repository.get_hashes(ireq) 101 | 102 | @contextmanager 103 | def allow_all_wheels(self) -> Iterator[None]: 104 | with self.repository.allow_all_wheels(): 105 | yield 106 | -------------------------------------------------------------------------------- /piptools/scripts/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/pip-tools/e4ed0c1e028d1ca73673a51722ba153f0c02b0c6/piptools/scripts/__init__.py -------------------------------------------------------------------------------- /piptools/scripts/options.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Any, Literal 4 | 5 | import click 6 | from pip._internal.commands import create_command 7 | from pip._internal.utils.misc import redact_auth_from_url 8 | 9 | from piptools.locations import CACHE_DIR, DEFAULT_CONFIG_FILE_NAMES 10 | from piptools.utils import UNSAFE_PACKAGES, override_defaults_from_config_file 11 | 12 | BuildTargetT = Literal["sdist", "wheel", "editable"] 13 | ALL_BUILD_TARGETS: tuple[BuildTargetT, ...] = ( 14 | "editable", 15 | "sdist", 16 | "wheel", 17 | ) 18 | 19 | 20 | def _get_default_option(option_name: str) -> Any: 21 | """ 22 | Get default value of the pip's option (including option from pip.conf) 23 | by a given option name. 24 | """ 25 | install_command = create_command("install") 26 | default_values = install_command.parser.get_default_values() 27 | return getattr(default_values, option_name) 28 | 29 | 30 | help_option_names = ("-h", "--help") 31 | 32 | # The options used by pip-compile and pip-sync are presented in no specific order. 33 | 34 | version = click.version_option(package_name="pip-tools") 35 | 36 | color = click.option( 37 | "--color/--no-color", 38 | default=None, 39 | help="Force output to be colorized or not, instead of auto-detecting color support", 40 | ) 41 | 42 | verbose = click.option( 43 | "-v", 44 | "--verbose", 45 | count=True, 46 | help="Show more output", 47 | ) 48 | quiet = click.option( 49 | "-q", 50 | "--quiet", 51 | count=True, 52 | help="Give less output", 53 | ) 54 | 55 | dry_run = click.option( 56 | "-n", 57 | "--dry-run", 58 | is_flag=True, 59 | help="Only show what would happen, don't change anything", 60 | ) 61 | 62 | pre = click.option( 63 | "-p", 64 | "--pre", 65 | is_flag=True, 66 | default=None, 67 | help="Allow resolving to prereleases (default is not)", 68 | ) 69 | 70 | rebuild = click.option( 71 | "-r", 72 | "--rebuild", 73 | is_flag=True, 74 | help="Clear any caches upfront, rebuild from scratch", 75 | ) 76 | 77 | extra = click.option( 78 | "--extra", 79 | "extras", 80 | multiple=True, 81 | help="Name of an extras_require group to install; may be used more than once", 82 | ) 83 | 84 | all_extras = click.option( 85 | "--all-extras", 86 | is_flag=True, 87 | default=False, 88 | help="Install all extras_require groups", 89 | ) 90 | 91 | find_links = click.option( 92 | "-f", 93 | "--find-links", 94 | multiple=True, 95 | help="Look for archives in this directory or on this HTML page; may be used more than once", 96 | ) 97 | 98 | index_url = click.option( 99 | "-i", 100 | "--index-url", 101 | help="Change index URL (defaults to {index_url})".format( 102 | index_url=redact_auth_from_url(_get_default_option("index_url")) 103 | ), 104 | ) 105 | 106 | no_index = click.option( 107 | "--no-index", 108 | is_flag=True, 109 | help="Ignore package index (only looking at --find-links URLs instead).", 110 | ) 111 | 112 | extra_index_url = click.option( 113 | "--extra-index-url", 114 | multiple=True, 115 | help="Add another index URL to search; may be used more than once", 116 | ) 117 | 118 | cert = click.option("--cert", help="Path to alternate CA bundle.") 119 | 120 | client_cert = click.option( 121 | "--client-cert", 122 | help=( 123 | "Path to SSL client certificate, a single file containing " 124 | "the private key and the certificate in PEM format." 125 | ), 126 | ) 127 | 128 | trusted_host = click.option( 129 | "--trusted-host", 130 | multiple=True, 131 | help=( 132 | "Mark this host as trusted, even though it does not have " 133 | "valid or any HTTPS; may be used more than once" 134 | ), 135 | ) 136 | 137 | header = click.option( 138 | "--header/--no-header", 139 | is_flag=True, 140 | default=True, 141 | help="Add header to generated file", 142 | ) 143 | 144 | emit_trusted_host = click.option( 145 | "--emit-trusted-host/--no-emit-trusted-host", 146 | is_flag=True, 147 | default=True, 148 | help="Add trusted host option to generated file", 149 | ) 150 | 151 | annotate = click.option( 152 | "--annotate/--no-annotate", 153 | is_flag=True, 154 | default=True, 155 | help="Annotate results, indicating where dependencies come from", 156 | ) 157 | 158 | annotation_style = click.option( 159 | "--annotation-style", 160 | type=click.Choice(("line", "split")), 161 | default="split", 162 | help="Choose the format of annotation comments", 163 | ) 164 | 165 | upgrade = click.option( 166 | "-U", 167 | "--upgrade/--no-upgrade", 168 | is_flag=True, 169 | default=False, 170 | help="Try to upgrade all dependencies to their latest versions", 171 | ) 172 | 173 | upgrade_package = click.option( 174 | "-P", 175 | "--upgrade-package", 176 | "upgrade_packages", 177 | nargs=1, 178 | multiple=True, 179 | help="Specify a particular package to upgrade; may be used more than once", 180 | ) 181 | 182 | output_file = click.option( 183 | "-o", 184 | "--output-file", 185 | nargs=1, 186 | default=None, 187 | type=click.File("w+b", atomic=True, lazy=True), 188 | help=( 189 | "Output file name. Required if more than one input file is given. " 190 | "Will be derived from input file otherwise." 191 | ), 192 | ) 193 | 194 | newline = click.option( 195 | "--newline", 196 | type=click.Choice(("LF", "CRLF", "native", "preserve"), case_sensitive=False), 197 | default="preserve", 198 | help="Override the newline control characters used", 199 | ) 200 | 201 | allow_unsafe = click.option( 202 | "--allow-unsafe/--no-allow-unsafe", 203 | is_flag=True, 204 | default=False, 205 | help=( 206 | "Pin packages considered unsafe: {}.\n\n" 207 | "WARNING: Future versions of pip-tools will enable this behavior by default. " 208 | "Use --no-allow-unsafe to keep the old behavior. It is recommended to pass the " 209 | "--allow-unsafe now to adapt to the upcoming change.".format( 210 | ", ".join(sorted(UNSAFE_PACKAGES)) 211 | ) 212 | ), 213 | ) 214 | 215 | strip_extras = click.option( 216 | "--strip-extras/--no-strip-extras", 217 | is_flag=True, 218 | default=None, 219 | help="Assure output file is constraints compatible, avoiding use of extras.", 220 | ) 221 | 222 | generate_hashes = click.option( 223 | "--generate-hashes", 224 | is_flag=True, 225 | default=False, 226 | help="Generate pip 8 style hashes in the resulting requirements file.", 227 | ) 228 | 229 | reuse_hashes = click.option( 230 | "--reuse-hashes/--no-reuse-hashes", 231 | is_flag=True, 232 | default=True, 233 | help=( 234 | "Improve the speed of --generate-hashes by reusing the hashes from an " 235 | "existing output file." 236 | ), 237 | ) 238 | 239 | max_rounds = click.option( 240 | "--max-rounds", 241 | default=10, 242 | help="Maximum number of rounds before resolving the requirements aborts.", 243 | ) 244 | 245 | src_files = click.argument( 246 | "src_files", 247 | nargs=-1, 248 | type=click.Path(exists=True, allow_dash=True), 249 | ) 250 | 251 | build_isolation = click.option( 252 | "--build-isolation/--no-build-isolation", 253 | is_flag=True, 254 | default=True, 255 | help=( 256 | "Enable isolation when building a modern source distribution. " 257 | "Build dependencies specified by PEP 518 must be already installed " 258 | "if build isolation is disabled." 259 | ), 260 | ) 261 | 262 | emit_find_links = click.option( 263 | "--emit-find-links/--no-emit-find-links", 264 | is_flag=True, 265 | default=True, 266 | help="Add the find-links option to generated file", 267 | ) 268 | 269 | cache_dir = click.option( 270 | "--cache-dir", 271 | help="Store the cache data in DIRECTORY.", 272 | default=CACHE_DIR, 273 | envvar="PIP_TOOLS_CACHE_DIR", 274 | show_default=True, 275 | show_envvar=True, 276 | type=click.Path(file_okay=False, writable=True), 277 | ) 278 | 279 | pip_args = click.option( 280 | "--pip-args", 281 | "pip_args_str", 282 | help="Arguments to pass directly to the pip command.", 283 | ) 284 | 285 | resolver = click.option( 286 | "--resolver", 287 | "resolver_name", 288 | type=click.Choice(("legacy", "backtracking")), 289 | default="backtracking", 290 | envvar="PIP_TOOLS_RESOLVER", 291 | help="Choose the dependency resolver.", 292 | ) 293 | 294 | emit_index_url = click.option( 295 | "--emit-index-url/--no-emit-index-url", 296 | is_flag=True, 297 | default=True, 298 | help="Add index URL to generated file", 299 | ) 300 | 301 | emit_options = click.option( 302 | "--emit-options/--no-emit-options", 303 | is_flag=True, 304 | default=True, 305 | help="Add options to generated file", 306 | ) 307 | 308 | unsafe_package = click.option( 309 | "--unsafe-package", 310 | multiple=True, 311 | help=( 312 | "Specify a package to consider unsafe; may be used more than once. " 313 | f"Replaces default unsafe packages: {', '.join(sorted(UNSAFE_PACKAGES))}" 314 | ), 315 | ) 316 | 317 | config = click.option( 318 | "--config", 319 | type=click.Path( 320 | exists=True, 321 | file_okay=True, 322 | dir_okay=False, 323 | readable=True, 324 | allow_dash=False, 325 | path_type=str, 326 | ), 327 | help=( 328 | f"Read configuration from TOML file. By default, looks for the following " 329 | f"files in the given order: {', '.join(DEFAULT_CONFIG_FILE_NAMES)}." 330 | ), 331 | is_eager=True, 332 | callback=override_defaults_from_config_file, 333 | ) 334 | 335 | no_config = click.option( 336 | "--no-config", 337 | is_flag=True, 338 | default=False, 339 | help="Do not read any config file.", 340 | is_eager=True, 341 | ) 342 | 343 | constraint = click.option( 344 | "-c", 345 | "--constraint", 346 | multiple=True, 347 | help="Constrain versions using the given constraints file; may be used more than once.", 348 | ) 349 | 350 | ask = click.option( 351 | "-a", 352 | "--ask", 353 | is_flag=True, 354 | help="Show what would happen, then ask whether to continue", 355 | ) 356 | 357 | force = click.option( 358 | "--force", is_flag=True, help="Proceed even if conflicts are found" 359 | ) 360 | 361 | python_executable = click.option( 362 | "--python-executable", 363 | help="Custom python executable path if targeting an environment other than current.", 364 | ) 365 | 366 | user = click.option( 367 | "--user", 368 | "user_only", 369 | is_flag=True, 370 | help="Restrict attention to user directory", 371 | ) 372 | 373 | build_deps_for = click.option( 374 | "--build-deps-for", 375 | "build_deps_targets", 376 | multiple=True, 377 | type=click.Choice(ALL_BUILD_TARGETS), 378 | help="Name of a build target to extract dependencies for. " 379 | "Static dependencies declared in 'pyproject.toml::build-system.requires' will be included as " 380 | "well; may be used more than once.", 381 | ) 382 | 383 | all_build_deps = click.option( 384 | "--all-build-deps", 385 | is_flag=True, 386 | default=False, 387 | help="Extract dependencies for all build targets. " 388 | "Static dependencies declared in 'pyproject.toml::build-system.requires' will be included as " 389 | "well.", 390 | ) 391 | 392 | only_build_deps = click.option( 393 | "--only-build-deps", 394 | is_flag=True, 395 | default=False, 396 | help="Extract a package only if it is a build dependency.", 397 | ) 398 | -------------------------------------------------------------------------------- /piptools/scripts/sync.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import itertools 4 | import os 5 | import shlex 6 | import shutil 7 | import sys 8 | from pathlib import Path 9 | from typing import cast 10 | 11 | import click 12 | from pip._internal.commands import create_command 13 | from pip._internal.commands.install import InstallCommand 14 | from pip._internal.index.package_finder import PackageFinder 15 | from pip._internal.metadata import get_environment 16 | 17 | from .. import sync 18 | from .._compat import Distribution, parse_requirements 19 | from ..exceptions import PipToolsError 20 | from ..logging import log 21 | from ..repositories import PyPIRepository 22 | from ..utils import ( 23 | flat_map, 24 | get_pip_version_for_python_executable, 25 | get_required_pip_specification, 26 | get_sys_path_for_python_executable, 27 | ) 28 | from . import options 29 | 30 | DEFAULT_REQUIREMENTS_FILE = "requirements.txt" 31 | 32 | 33 | @click.command( 34 | name="pip-sync", context_settings={"help_option_names": options.help_option_names} 35 | ) 36 | @options.version 37 | @options.ask 38 | @options.dry_run 39 | @options.force 40 | @options.find_links 41 | @options.index_url 42 | @options.extra_index_url 43 | @options.trusted_host 44 | @options.no_index 45 | @options.python_executable 46 | @options.verbose 47 | @options.quiet 48 | @options.user 49 | @options.cert 50 | @options.client_cert 51 | @options.src_files 52 | @options.pip_args 53 | @options.config 54 | @options.no_config 55 | def cli( 56 | ask: bool, 57 | dry_run: bool, 58 | force: bool, 59 | find_links: tuple[str, ...], 60 | index_url: str | None, 61 | extra_index_url: tuple[str, ...], 62 | trusted_host: tuple[str, ...], 63 | no_index: bool, 64 | python_executable: str | None, 65 | verbose: int, 66 | quiet: int, 67 | user_only: bool, 68 | cert: str | None, 69 | client_cert: str | None, 70 | src_files: tuple[str, ...], 71 | pip_args_str: str | None, 72 | config: Path | None, 73 | no_config: bool, 74 | ) -> None: 75 | """Synchronize virtual environment with requirements.txt.""" 76 | log.verbosity = verbose - quiet 77 | 78 | if not src_files: 79 | if os.path.exists(DEFAULT_REQUIREMENTS_FILE): 80 | src_files = (DEFAULT_REQUIREMENTS_FILE,) 81 | else: 82 | msg = "No requirement files given and no {} found in the current directory" 83 | log.error(msg.format(DEFAULT_REQUIREMENTS_FILE)) 84 | sys.exit(2) 85 | 86 | if any(src_file.endswith(".in") for src_file in src_files): 87 | msg = ( 88 | "Some input files have the .in extension, which is most likely an error " 89 | "and can cause weird behaviour. You probably meant to use " 90 | "the corresponding *.txt file?" 91 | ) 92 | if force: 93 | log.warning("WARNING: " + msg) 94 | else: 95 | log.error("ERROR: " + msg) 96 | sys.exit(2) 97 | 98 | if config: 99 | log.debug(f"Using pip-tools configuration defaults found in '{config !s}'.") 100 | 101 | if python_executable: 102 | _validate_python_executable(python_executable) 103 | 104 | install_command = cast(InstallCommand, create_command("install")) 105 | options, _ = install_command.parse_args([]) 106 | session = install_command._build_session(options) 107 | finder = install_command._build_package_finder(options=options, session=session) 108 | 109 | # Parse requirements file. Note, all options inside requirements file 110 | # will be collected by the finder. 111 | requirements = flat_map( 112 | lambda src: parse_requirements(src, finder=finder, session=session), src_files 113 | ) 114 | 115 | try: 116 | merged_requirements = sync.merge(requirements, ignore_conflicts=force) 117 | except PipToolsError as e: 118 | log.error(str(e)) 119 | sys.exit(2) 120 | 121 | paths = ( 122 | None 123 | if python_executable is None 124 | else get_sys_path_for_python_executable(python_executable) 125 | ) 126 | installed_dists = _get_installed_distributions( 127 | user_only=user_only, 128 | local_only=python_executable is None, 129 | paths=paths, 130 | ) 131 | to_install, to_uninstall = sync.diff(merged_requirements, installed_dists) 132 | 133 | install_flags = _compose_install_flags( 134 | finder, 135 | no_index=no_index, 136 | index_url=index_url, 137 | extra_index_url=extra_index_url, 138 | trusted_host=trusted_host, 139 | find_links=find_links, 140 | user_only=user_only, 141 | cert=cert, 142 | client_cert=client_cert, 143 | ) + shlex.split(pip_args_str or "") 144 | sys.exit( 145 | sync.sync( 146 | to_install, 147 | to_uninstall, 148 | dry_run=dry_run, 149 | install_flags=install_flags, 150 | ask=ask, 151 | python_executable=python_executable, 152 | ) 153 | ) 154 | 155 | 156 | def _validate_python_executable(python_executable: str) -> None: 157 | """ 158 | Validates incoming python_executable argument passed to CLI. 159 | """ 160 | resolved_python_executable = shutil.which(python_executable) 161 | if resolved_python_executable is None: 162 | msg = "Could not resolve '{}' as valid executable path or alias." 163 | log.error(msg.format(python_executable)) 164 | sys.exit(2) 165 | 166 | # Ensure that target python executable has the right version of pip installed 167 | pip_version = get_pip_version_for_python_executable(python_executable) 168 | required_pip_specification = get_required_pip_specification() 169 | if not required_pip_specification.contains(pip_version, prereleases=True): 170 | msg = ( 171 | "Target python executable '{}' has pip version {} installed. " 172 | "Version {} is expected." 173 | ) 174 | log.error( 175 | msg.format(python_executable, pip_version, required_pip_specification) 176 | ) 177 | sys.exit(2) 178 | 179 | 180 | def _compose_install_flags( 181 | finder: PackageFinder, 182 | no_index: bool, 183 | index_url: str | None, 184 | extra_index_url: tuple[str, ...], 185 | trusted_host: tuple[str, ...], 186 | find_links: tuple[str, ...], 187 | user_only: bool, 188 | cert: str | None, 189 | client_cert: str | None, 190 | ) -> list[str]: 191 | """ 192 | Compose install flags with the given finder and CLI options. 193 | """ 194 | result = [] 195 | 196 | # Build --index-url/--extra-index-url/--no-index 197 | if no_index: 198 | result.append("--no-index") 199 | elif index_url is not None: 200 | result.extend(["--index-url", index_url]) 201 | elif finder.index_urls: 202 | finder_index_url = finder.index_urls[0] 203 | if finder_index_url != PyPIRepository.DEFAULT_INDEX_URL: 204 | result.extend(["--index-url", finder_index_url]) 205 | for extra_index in finder.index_urls[1:]: 206 | result.extend(["--extra-index-url", extra_index]) 207 | else: 208 | result.append("--no-index") 209 | 210 | for extra_index in extra_index_url: 211 | result.extend(["--extra-index-url", extra_index]) 212 | 213 | # Build --trusted-hosts 214 | for host in itertools.chain(trusted_host, finder.trusted_hosts): 215 | result.extend(["--trusted-host", host]) 216 | 217 | # Build --find-links 218 | for link in itertools.chain(find_links, finder.find_links): 219 | result.extend(["--find-links", link]) 220 | 221 | # Build format controls --no-binary/--only-binary 222 | for format_control in ("no_binary", "only_binary"): 223 | formats = getattr(finder.format_control, format_control) 224 | if not formats: 225 | continue 226 | result.extend( 227 | ["--" + format_control.replace("_", "-"), ",".join(sorted(formats))] 228 | ) 229 | 230 | if user_only: 231 | result.append("--user") 232 | 233 | if cert is not None: 234 | result.extend(["--cert", cert]) 235 | 236 | if client_cert is not None: 237 | result.extend(["--client-cert", client_cert]) 238 | 239 | return result 240 | 241 | 242 | def _get_installed_distributions( 243 | local_only: bool = True, 244 | user_only: bool = False, 245 | paths: list[str] | None = None, 246 | ) -> list[Distribution]: 247 | """Return a list of installed Distribution objects.""" 248 | 249 | env = get_environment(paths) 250 | dists = env.iter_installed_distributions( 251 | local_only=local_only, 252 | user_only=user_only, 253 | skip=[], 254 | ) 255 | return [Distribution.from_pip_distribution(dist) for dist in dists] 256 | -------------------------------------------------------------------------------- /piptools/subprocess_utils.py: -------------------------------------------------------------------------------- 1 | # WARNING! BE CAREFUL UPDATING THIS FILE 2 | # Consider possible security implications associated with subprocess module. 3 | from __future__ import annotations 4 | 5 | import subprocess # nosec 6 | 7 | 8 | def run_python_snippet(python_executable: str, code_to_run: str) -> str: 9 | """ 10 | Execute Python code by calling ``python_executable`` with '-c' option. 11 | """ 12 | py_exec_cmd = python_executable, "-c", code_to_run 13 | 14 | # subprocess module should never be used with untrusted input 15 | return subprocess.check_output( # nosec 16 | py_exec_cmd, 17 | shell=False, 18 | text=True, 19 | ) 20 | -------------------------------------------------------------------------------- /piptools/sync.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import collections 4 | import os 5 | import sys 6 | import tempfile 7 | from subprocess import run # nosec 8 | from typing import Iterable, Mapping, ValuesView 9 | 10 | import click 11 | from pip._internal.models.direct_url import ArchiveInfo 12 | from pip._internal.req import InstallRequirement 13 | from pip._internal.utils.compat import stdlib_pkgs 14 | from pip._internal.utils.direct_url_helpers import ( 15 | direct_url_as_pep440_direct_reference, 16 | direct_url_from_link, 17 | ) 18 | from pip._vendor.packaging.utils import canonicalize_name 19 | 20 | from ._compat import Distribution, get_dev_pkgs 21 | from .exceptions import IncompatibleRequirements 22 | from .logging import log 23 | from .utils import ( 24 | flat_map, 25 | format_requirement, 26 | get_hashes_from_ireq, 27 | is_url_requirement, 28 | key_from_ireq, 29 | key_from_req, 30 | ) 31 | 32 | PACKAGES_TO_IGNORE = [ 33 | "-markerlib", 34 | "pip", 35 | "pip-tools", 36 | "pip-review", 37 | "pkg-resources", 38 | *stdlib_pkgs, 39 | *get_dev_pkgs(), 40 | ] 41 | 42 | 43 | def dependency_tree( 44 | installed_keys: Mapping[str, Distribution], root_key: str 45 | ) -> set[str]: 46 | """Calculate the dependency tree for a package. 47 | 48 | Return a collection of all of the package's dependencies. 49 | Uses a DFS traversal algorithm. 50 | 51 | ``installed_keys`` should be a {key: requirement} mapping, e.g. 52 | {'django': from_line('django==1.8')} 53 | :param root_key: the key to return the dependency tree for 54 | :type root_key: str 55 | """ 56 | dependencies = set() 57 | queue: collections.deque[Distribution] = collections.deque() 58 | 59 | if root_key in installed_keys: 60 | dep = installed_keys[root_key] 61 | queue.append(dep) 62 | 63 | while queue: 64 | v = queue.popleft() 65 | key = str(canonicalize_name(v.key)) 66 | if key in dependencies: 67 | continue 68 | 69 | dependencies.add(key) 70 | 71 | for dep_specifier in v.requires: 72 | dep_name = key_from_req(dep_specifier) 73 | if dep_name in installed_keys: 74 | dep = installed_keys[dep_name] 75 | 76 | if dep_specifier.specifier.contains(dep.version): 77 | queue.append(dep) 78 | 79 | return dependencies 80 | 81 | 82 | def get_dists_to_ignore(installed: Iterable[Distribution]) -> list[str]: 83 | """Return a collection of package names to ignore by ``pip-sync``. 84 | 85 | Based on the currently installed environment. For example, when pip-tools 86 | is installed in the local environment, it should be ignored, including all 87 | of its dependencies (e.g. click). When pip-tools is not installed 88 | locally, click should also be installed/uninstalled depending on the given 89 | requirements. 90 | """ 91 | installed_keys = {str(canonicalize_name(r.key)): r for r in installed} 92 | return list( 93 | flat_map(lambda req: dependency_tree(installed_keys, req), PACKAGES_TO_IGNORE) 94 | ) 95 | 96 | 97 | def merge( 98 | requirements: Iterable[InstallRequirement], ignore_conflicts: bool 99 | ) -> ValuesView[InstallRequirement]: 100 | by_key: dict[str, InstallRequirement] = {} 101 | 102 | for ireq in requirements: 103 | # Limitation: URL requirements are merged by precise string match, so 104 | # "file:///example.zip#egg=example", "file:///example.zip", and 105 | # "example==1.0" will not merge with each other 106 | if ireq.match_markers(): 107 | key = key_from_ireq(ireq) 108 | 109 | if not ignore_conflicts: 110 | existing_ireq = by_key.get(key) 111 | if existing_ireq: 112 | # NOTE: We check equality here since we can assume that the 113 | # requirements are all pinned 114 | if ( 115 | ireq.req 116 | and existing_ireq.req 117 | and ireq.specifier != existing_ireq.specifier 118 | ): 119 | raise IncompatibleRequirements(ireq, existing_ireq) 120 | 121 | # TODO: Always pick the largest specifier in case of a conflict 122 | by_key[key] = ireq 123 | return by_key.values() 124 | 125 | 126 | def diff_key_from_ireq(ireq: InstallRequirement) -> str: 127 | """Calculate key for comparing a compiled requirement with installed modules. 128 | 129 | For URL requirements, only provide a useful key if the url includes 130 | a hash, e.g. #sha1=..., in any of the supported hash algorithms. 131 | Otherwise return ``ireq.link`` so the key will not match and the package will 132 | reinstall. Reinstall is necessary to ensure that packages will reinstall 133 | if the contents at the URL have changed but the version has not. 134 | """ 135 | if is_url_requirement(ireq): 136 | if getattr(ireq.req, "name", None) and ireq.link.has_hash: 137 | return str( 138 | direct_url_as_pep440_direct_reference( 139 | direct_url_from_link(ireq.link), ireq.req.name 140 | ) 141 | ) 142 | # TODO: Also support VCS and editable installs. 143 | return str(ireq.link) 144 | return key_from_ireq(ireq) 145 | 146 | 147 | def diff_key_from_req(req: Distribution) -> str: 148 | """Get a unique key for the requirement.""" 149 | key = str(canonicalize_name(req.key)) 150 | if ( 151 | req.direct_url 152 | and isinstance(req.direct_url.info, ArchiveInfo) 153 | and req.direct_url.info.hash 154 | ): 155 | key = direct_url_as_pep440_direct_reference(req.direct_url, key) 156 | # TODO: Also support VCS and editable installs. 157 | return key 158 | 159 | 160 | def diff( 161 | compiled_requirements: Iterable[InstallRequirement], 162 | installed_dists: Iterable[Distribution], 163 | ) -> tuple[set[InstallRequirement], set[str]]: 164 | """Calculate which packages should be installed or uninstalled. 165 | 166 | Compared are the compiled requirements and a list of currently 167 | installed modules. 168 | """ 169 | requirements_lut = {diff_key_from_ireq(r): r for r in compiled_requirements} 170 | 171 | satisfied = set() # holds keys 172 | to_install = set() # holds InstallRequirement objects 173 | to_uninstall = set() # holds keys 174 | 175 | pkgs_to_ignore = get_dists_to_ignore(installed_dists) 176 | for dist in installed_dists: 177 | key = diff_key_from_req(dist) 178 | if key not in requirements_lut or not requirements_lut[key].match_markers(): 179 | to_uninstall.add(key) 180 | elif requirements_lut[key].specifier.contains(dist.version): 181 | satisfied.add(key) 182 | 183 | for key, requirement in requirements_lut.items(): 184 | if key not in satisfied and requirement.match_markers(): 185 | to_install.add(requirement) 186 | 187 | # Make sure to not uninstall any packages that should be ignored 188 | to_uninstall -= set(pkgs_to_ignore) 189 | 190 | return (to_install, to_uninstall) 191 | 192 | 193 | def sync( 194 | to_install: Iterable[InstallRequirement], 195 | to_uninstall: Iterable[InstallRequirement], 196 | dry_run: bool = False, 197 | install_flags: list[str] | None = None, 198 | ask: bool = False, 199 | python_executable: str | None = None, 200 | ) -> int: 201 | """Install and uninstall the given sets of modules.""" 202 | exit_code = 0 203 | 204 | python_executable = python_executable or sys.executable 205 | 206 | if not to_uninstall and not to_install: 207 | log.info("Everything up-to-date", err=False) 208 | return exit_code 209 | 210 | pip_flags = [] 211 | if log.verbosity < 0: 212 | pip_flags += ["-q"] 213 | 214 | if ask: 215 | dry_run = True 216 | 217 | if dry_run: 218 | if to_uninstall: 219 | click.echo("Would uninstall:") 220 | for pkg in sorted(to_uninstall): 221 | click.echo(f" {pkg}") 222 | 223 | if to_install: 224 | click.echo("Would install:") 225 | for ireq in sorted(to_install, key=key_from_ireq): 226 | click.echo(f" {format_requirement(ireq)}") 227 | 228 | exit_code = 1 229 | 230 | if ask and click.confirm("Would you like to proceed with these changes?"): 231 | dry_run = False 232 | exit_code = 0 233 | 234 | if not dry_run: 235 | if to_uninstall: 236 | run( # nosec 237 | [ 238 | python_executable, 239 | "-m", 240 | "pip", 241 | "uninstall", 242 | "-y", 243 | *pip_flags, 244 | *sorted(to_uninstall), 245 | ], 246 | check=True, 247 | ) 248 | 249 | if to_install: 250 | if install_flags is None: 251 | install_flags = [] 252 | # prepare requirement lines 253 | req_lines = [] 254 | for ireq in sorted(to_install, key=key_from_ireq): 255 | ireq_hashes = get_hashes_from_ireq(ireq) 256 | req_lines.append(format_requirement(ireq, hashes=ireq_hashes)) 257 | 258 | # save requirement lines to a temporary file 259 | tmp_req_file = tempfile.NamedTemporaryFile(mode="wt", delete=False) 260 | tmp_req_file.write("\n".join(req_lines)) 261 | tmp_req_file.close() 262 | 263 | try: 264 | run( # nosec 265 | [ 266 | python_executable, 267 | "-m", 268 | "pip", 269 | "install", 270 | "-r", 271 | tmp_req_file.name, 272 | *pip_flags, 273 | *install_flags, 274 | ], 275 | check=True, 276 | ) 277 | finally: 278 | os.unlink(tmp_req_file.name) 279 | 280 | return exit_code 281 | -------------------------------------------------------------------------------- /piptools/writer.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import io 4 | import os 5 | import re 6 | import sys 7 | from itertools import chain 8 | from typing import BinaryIO, Iterable, Iterator, cast 9 | 10 | from click import unstyle 11 | from click.core import Context 12 | from pip._internal.models.format_control import FormatControl 13 | from pip._internal.req.req_install import InstallRequirement 14 | from pip._vendor.packaging.markers import Marker 15 | from pip._vendor.packaging.utils import canonicalize_name 16 | 17 | from .logging import log 18 | from .utils import ( 19 | comment, 20 | dedup, 21 | format_requirement, 22 | get_compile_command, 23 | key_from_ireq, 24 | strip_extras, 25 | ) 26 | 27 | MESSAGE_UNHASHED_PACKAGE = comment( 28 | "# WARNING: pip install will require the following package to be hashed." 29 | "\n# Consider using a hashable URL like " 30 | "https://github.com/jazzband/pip-tools/archive/SOMECOMMIT.zip" 31 | ) 32 | 33 | MESSAGE_UNSAFE_PACKAGES_UNPINNED = comment( 34 | "# WARNING: The following packages were not pinned, but pip requires them to be" 35 | "\n# pinned when the requirements file includes hashes and the requirement is not" 36 | "\n# satisfied by a package already installed. " 37 | "Consider using the --allow-unsafe flag." 38 | ) 39 | 40 | MESSAGE_UNSAFE_PACKAGES = comment( 41 | "# The following packages are considered to be unsafe in a requirements file:" 42 | ) 43 | 44 | MESSAGE_UNINSTALLABLE = ( 45 | "The generated requirements file may be rejected by pip install. " 46 | "See # WARNING lines for details." 47 | ) 48 | 49 | 50 | strip_comes_from_line_re = re.compile(r" \(line \d+\)$") 51 | 52 | 53 | def _comes_from_as_string(comes_from: str | InstallRequirement) -> str: 54 | if isinstance(comes_from, str): 55 | return strip_comes_from_line_re.sub("", comes_from) 56 | return cast(str, canonicalize_name(key_from_ireq(comes_from))) 57 | 58 | 59 | def annotation_style_split(required_by: set[str]) -> str: 60 | sorted_required_by = sorted(required_by) 61 | if len(sorted_required_by) == 1: 62 | source = sorted_required_by[0] 63 | annotation = "# via " + source 64 | else: 65 | annotation_lines = ["# via"] 66 | for source in sorted_required_by: 67 | annotation_lines.append(" # " + source) 68 | annotation = "\n".join(annotation_lines) 69 | return annotation 70 | 71 | 72 | def annotation_style_line(required_by: set[str]) -> str: 73 | return f"# via {', '.join(sorted(required_by))}" 74 | 75 | 76 | class OutputWriter: 77 | def __init__( 78 | self, 79 | dst_file: BinaryIO, 80 | click_ctx: Context, 81 | dry_run: bool, 82 | emit_header: bool, 83 | emit_index_url: bool, 84 | emit_trusted_host: bool, 85 | annotate: bool, 86 | annotation_style: str, 87 | strip_extras: bool, 88 | generate_hashes: bool, 89 | default_index_url: str, 90 | index_urls: Iterable[str], 91 | trusted_hosts: Iterable[str], 92 | format_control: FormatControl, 93 | linesep: str, 94 | allow_unsafe: bool, 95 | find_links: list[str], 96 | emit_find_links: bool, 97 | emit_options: bool, 98 | ) -> None: 99 | self.dst_file = dst_file 100 | self.click_ctx = click_ctx 101 | self.dry_run = dry_run 102 | self.emit_header = emit_header 103 | self.emit_index_url = emit_index_url 104 | self.emit_trusted_host = emit_trusted_host 105 | self.annotate = annotate 106 | self.annotation_style = annotation_style 107 | self.strip_extras = strip_extras 108 | self.generate_hashes = generate_hashes 109 | self.default_index_url = default_index_url 110 | self.index_urls = index_urls 111 | self.trusted_hosts = trusted_hosts 112 | self.format_control = format_control 113 | self.linesep = linesep 114 | self.allow_unsafe = allow_unsafe 115 | self.find_links = find_links 116 | self.emit_find_links = emit_find_links 117 | self.emit_options = emit_options 118 | 119 | def _sort_key(self, ireq: InstallRequirement) -> tuple[bool, str]: 120 | return (not ireq.editable, key_from_ireq(ireq)) 121 | 122 | def write_header(self) -> Iterator[str]: 123 | if self.emit_header: 124 | yield comment("#") 125 | yield comment( 126 | "# This file is autogenerated by pip-compile with Python " 127 | f"{sys.version_info.major}.{sys.version_info.minor}" 128 | ) 129 | yield comment("# by the following command:") 130 | yield comment("#") 131 | compile_command = os.environ.get( 132 | "CUSTOM_COMPILE_COMMAND" 133 | ) or get_compile_command(self.click_ctx) 134 | yield comment(f"# {compile_command}") 135 | yield comment("#") 136 | 137 | def write_index_options(self) -> Iterator[str]: 138 | if self.emit_index_url: 139 | for index, index_url in enumerate(dedup(self.index_urls)): 140 | if index == 0 and index_url.rstrip("/") == self.default_index_url: 141 | continue 142 | flag = "--index-url" if index == 0 else "--extra-index-url" 143 | yield f"{flag} {index_url}" 144 | 145 | def write_trusted_hosts(self) -> Iterator[str]: 146 | if self.emit_trusted_host: 147 | for trusted_host in dedup(self.trusted_hosts): 148 | yield f"--trusted-host {trusted_host}" 149 | 150 | def write_format_controls(self) -> Iterator[str]: 151 | # The ordering of output needs to preserve the behavior of pip's 152 | # FormatControl.get_allowed_formats(). The behavior is the following: 153 | # 154 | # * Parsing of CLI options happens first to last. 155 | # * --only-binary takes precedence over --no-binary 156 | # * Package names take precedence over :all: 157 | # * We'll never see :all: in both due to mutual exclusion. 158 | # 159 | # So in summary, we want to emit :all: first and then package names later. 160 | no_binary = self.format_control.no_binary.copy() 161 | only_binary = self.format_control.only_binary.copy() 162 | 163 | if ":all:" in no_binary: 164 | yield "--no-binary :all:" 165 | no_binary.remove(":all:") 166 | if ":all:" in only_binary: 167 | yield "--only-binary :all:" 168 | only_binary.remove(":all:") 169 | for nb in dedup(sorted(no_binary)): 170 | yield f"--no-binary {nb}" 171 | for ob in dedup(sorted(only_binary)): 172 | yield f"--only-binary {ob}" 173 | 174 | def write_find_links(self) -> Iterator[str]: 175 | if self.emit_find_links: 176 | for find_link in dedup(self.find_links): 177 | yield f"--find-links {find_link}" 178 | 179 | def write_flags(self) -> Iterator[str]: 180 | if not self.emit_options: 181 | return 182 | emitted = False 183 | for line in chain( 184 | self.write_index_options(), 185 | self.write_find_links(), 186 | self.write_trusted_hosts(), 187 | self.write_format_controls(), 188 | ): 189 | emitted = True 190 | yield line 191 | if emitted: 192 | yield "" 193 | 194 | def _iter_lines( 195 | self, 196 | results: set[InstallRequirement], 197 | unsafe_requirements: set[InstallRequirement], 198 | unsafe_packages: set[str], 199 | markers: dict[str, Marker], 200 | hashes: dict[InstallRequirement, set[str]] | None = None, 201 | ) -> Iterator[str]: 202 | # default values 203 | unsafe_packages = unsafe_packages if self.allow_unsafe else set() 204 | hashes = hashes or {} 205 | 206 | # Check for unhashed or unpinned packages if at least one package does have 207 | # hashes, which will trigger pip install's --require-hashes mode. 208 | warn_uninstallable = False 209 | has_hashes = hashes and any(hash for hash in hashes.values()) 210 | 211 | yielded = False 212 | 213 | for line in self.write_header(): 214 | yield line 215 | yielded = True 216 | for line in self.write_flags(): 217 | yield line 218 | yielded = True 219 | 220 | unsafe_requirements = unsafe_requirements or { 221 | r for r in results if r.name in unsafe_packages 222 | } 223 | packages = {r for r in results if r.name not in unsafe_packages} 224 | 225 | if packages: 226 | for ireq in sorted(packages, key=self._sort_key): 227 | if has_hashes and not hashes.get(ireq): 228 | yield MESSAGE_UNHASHED_PACKAGE 229 | warn_uninstallable = True 230 | line = self._format_requirement( 231 | ireq, markers.get(key_from_ireq(ireq)), hashes=hashes 232 | ) 233 | yield line 234 | yielded = True 235 | 236 | if unsafe_requirements: 237 | yield "" 238 | yielded = True 239 | if has_hashes and not self.allow_unsafe: 240 | yield MESSAGE_UNSAFE_PACKAGES_UNPINNED 241 | warn_uninstallable = True 242 | else: 243 | yield MESSAGE_UNSAFE_PACKAGES 244 | 245 | for ireq in sorted(unsafe_requirements, key=self._sort_key): 246 | ireq_key = key_from_ireq(ireq) 247 | if not self.allow_unsafe: 248 | yield comment(f"# {ireq_key}") 249 | else: 250 | line = self._format_requirement( 251 | ireq, marker=markers.get(ireq_key), hashes=hashes 252 | ) 253 | yield line 254 | 255 | # Yield even when there's no real content, so that blank files are written 256 | if not yielded: 257 | yield "" 258 | 259 | if warn_uninstallable: 260 | log.warning(MESSAGE_UNINSTALLABLE) 261 | 262 | def write( 263 | self, 264 | results: set[InstallRequirement], 265 | unsafe_requirements: set[InstallRequirement], 266 | unsafe_packages: set[str], 267 | markers: dict[str, Marker], 268 | hashes: dict[InstallRequirement, set[str]] | None, 269 | ) -> None: 270 | if not self.dry_run: 271 | dst_file = io.TextIOWrapper( 272 | self.dst_file, 273 | encoding="utf8", 274 | newline=self.linesep, 275 | line_buffering=True, 276 | ) 277 | try: 278 | for line in self._iter_lines( 279 | results, unsafe_requirements, unsafe_packages, markers, hashes 280 | ): 281 | if self.dry_run: 282 | # Bypass the log level to always print this during a dry run 283 | log.log(line) 284 | else: 285 | log.info(line) 286 | dst_file.write(unstyle(line)) 287 | dst_file.write("\n") 288 | finally: 289 | if not self.dry_run: 290 | dst_file.detach() 291 | 292 | def _format_requirement( 293 | self, 294 | ireq: InstallRequirement, 295 | marker: Marker | None = None, 296 | hashes: dict[InstallRequirement, set[str]] | None = None, 297 | ) -> str: 298 | ireq_hashes = (hashes if hashes is not None else {}).get(ireq) 299 | 300 | line = format_requirement(ireq, marker=marker, hashes=ireq_hashes) 301 | if self.strip_extras: 302 | line = strip_extras(line) 303 | 304 | if not self.annotate: 305 | return line 306 | 307 | # Annotate what packages or reqs-ins this package is required by 308 | required_by = set() 309 | if hasattr(ireq, "_source_ireqs"): 310 | required_by |= { 311 | _comes_from_as_string(src_ireq.comes_from) 312 | for src_ireq in ireq._source_ireqs 313 | if src_ireq.comes_from 314 | } 315 | 316 | # Filter out the origin install requirements for extras. 317 | # See https://github.com/jazzband/pip-tools/issues/2003 318 | if ireq.comes_from and ( 319 | isinstance(ireq.comes_from, str) or ireq.comes_from.name != ireq.name 320 | ): 321 | required_by.add(_comes_from_as_string(ireq.comes_from)) 322 | 323 | required_by |= set(getattr(ireq, "_required_by", set())) 324 | 325 | if required_by: 326 | if self.annotation_style == "split": 327 | annotation = annotation_style_split(required_by) 328 | sep = "\n " 329 | elif self.annotation_style == "line": 330 | annotation = annotation_style_line(required_by) 331 | sep = "\n " if ireq_hashes else " " 332 | else: # pragma: no cover 333 | raise ValueError("Invalid value for annotation style") 334 | if self.strip_extras: 335 | annotation = strip_extras(annotation) 336 | # 24 is one reasonable column size to use here, that we've used in the past 337 | lines = f"{line:24}{sep}{comment(annotation)}".splitlines() 338 | line = "\n".join(ln.rstrip() for ln in lines) 339 | 340 | return line 341 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=63", "setuptools_scm[toml]>=7"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | # https://peps.python.org/pep-0621/#readme 7 | requires-python = ">=3.8" 8 | dynamic = ["version"] 9 | name = "pip-tools" 10 | description = "pip-tools keeps your pinned dependencies fresh." 11 | readme = "README.md" 12 | authors = [{ "name" = "Vincent Driessen", "email" = "me@nvie.com" }] 13 | license = { text = "BSD" } 14 | classifiers = [ 15 | "Development Status :: 5 - Production/Stable", 16 | "Environment :: Console", 17 | "Intended Audience :: Developers", 18 | "Intended Audience :: System Administrators", 19 | "License :: OSI Approved :: BSD License", 20 | "Operating System :: OS Independent", 21 | "Programming Language :: Python :: 3 :: Only", 22 | "Programming Language :: Python :: 3", 23 | "Programming Language :: Python :: 3.10", 24 | "Programming Language :: Python :: 3.11", 25 | "Programming Language :: Python :: 3.8", 26 | "Programming Language :: Python :: 3.9", 27 | "Programming Language :: Python :: Implementation :: CPython", 28 | "Programming Language :: Python :: Implementation :: PyPy", 29 | "Programming Language :: Python", 30 | "Topic :: Software Development :: Quality Assurance", 31 | "Topic :: Software Development :: Testing", 32 | "Topic :: System :: Systems Administration", 33 | "Topic :: Utilities", 34 | "Typing :: Typed", 35 | ] 36 | keywords = ["pip", "requirements", "packaging"] 37 | dependencies = [ 38 | # direct dependencies 39 | "build >= 1.0.0", 40 | "click >= 8", 41 | "pip >= 22.2", 42 | "pyproject_hooks", 43 | "tomli; python_version < '3.11'", 44 | # indirect dependencies 45 | "setuptools", # typically needed when pip-tools invokes setup.py 46 | "wheel", # pip plugin needed by pip-tools 47 | 48 | ] 49 | 50 | [project.urls] 51 | homepage = "https://github.com/jazzband/pip-tools/" 52 | documentation = "https://pip-tools.readthedocs.io/en/latest/" 53 | repository = "https://github.com/jazzband/pip-tools" 54 | changelog = "https://github.com/jazzband/pip-tools/releases" 55 | 56 | [project.optional-dependencies] 57 | testing = [ 58 | "pytest >= 7.2.0", 59 | "pytest-rerunfailures", 60 | "pytest-xdist", 61 | "tomli-w", 62 | # build deps for tests 63 | "flit_core >=2,<4", 64 | "poetry_core>=1.0.0", 65 | ] 66 | coverage = ["covdefaults", "pytest-cov"] 67 | 68 | [project.scripts] 69 | pip-compile = "piptools.scripts.compile:cli" 70 | pip-sync = "piptools.scripts.sync:cli" 71 | 72 | [tool.isort] 73 | profile = "black" 74 | add_imports = "from __future__ import annotations" 75 | 76 | [tool.mypy] 77 | disallow_untyped_defs = true 78 | disallow_any_generics = true 79 | disallow_incomplete_defs = true 80 | disallow_subclassing_any = true 81 | disallow_untyped_calls = true 82 | disallow_untyped_decorators = true 83 | ignore_missing_imports = true 84 | no_implicit_optional = true 85 | no_implicit_reexport = true 86 | strict_equality = true 87 | warn_redundant_casts = true 88 | warn_return_any = true 89 | warn_unused_configs = true 90 | warn_unused_ignores = true 91 | # Avoid error: Duplicate module named 'setup' 92 | # https://github.com/python/mypy/issues/4008 93 | exclude = "^tests/test_data/" 94 | 95 | [[tool.mypy.overrides]] 96 | module = ["tests.*"] 97 | disallow_untyped_defs = false 98 | disallow_incomplete_defs = false 99 | 100 | [tool.pytest.ini_options] 101 | addopts = [ 102 | # `pytest-xdist`: 103 | "--numprocesses=auto", 104 | 105 | # The `worksteal` distribution method is useful if the run times of different tests vary greatly, 106 | # as it ensures more efficient resource usage, improving the performance of testing. 107 | "--dist=worksteal", 108 | 109 | # Show 20 slowest invocations: 110 | "--durations=20", 111 | ] 112 | norecursedirs = ".* build dist venv test_data piptools/_compat/*" 113 | testpaths = "tests piptools" 114 | filterwarnings = ["always"] 115 | markers = ["network: mark tests that require internet access"] 116 | 117 | [tool.setuptools.packages.find] 118 | # needed only because we did not adopt src layout yet 119 | include = ["piptools*"] 120 | 121 | [tool.setuptools_scm] 122 | local_scheme = "dirty-tag" 123 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/pip-tools/e4ed0c1e028d1ca73673a51722ba153f0c02b0c6/tests/__init__.py -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import json 4 | import os 5 | import platform 6 | import subprocess 7 | import sys 8 | from contextlib import contextmanager 9 | from dataclasses import dataclass, field 10 | from functools import partial 11 | from pathlib import Path 12 | from textwrap import dedent 13 | from typing import Any, cast 14 | 15 | import pytest 16 | import tomli_w 17 | from click.testing import CliRunner 18 | from pip._internal.commands.install import InstallCommand 19 | from pip._internal.index.package_finder import PackageFinder 20 | from pip._internal.models.candidate import InstallationCandidate 21 | from pip._internal.models.link import Link 22 | from pip._internal.network.session import PipSession 23 | from pip._internal.req.constructors import ( 24 | install_req_from_editable, 25 | install_req_from_line, 26 | ) 27 | from pip._internal.utils.direct_url_helpers import direct_url_from_link 28 | from pip._vendor.packaging.version import Version 29 | from pip._vendor.pkg_resources import Requirement 30 | 31 | from piptools._compat import Distribution 32 | from piptools.cache import DependencyCache 33 | from piptools.exceptions import NoCandidateFound 34 | from piptools.locations import DEFAULT_CONFIG_FILE_NAMES 35 | from piptools.logging import log 36 | from piptools.repositories import PyPIRepository 37 | from piptools.repositories.base import BaseRepository 38 | from piptools.resolver import BacktrackingResolver, LegacyResolver 39 | from piptools.utils import ( 40 | PIP_VERSION, 41 | as_tuple, 42 | is_url_requirement, 43 | key_from_ireq, 44 | make_install_requirement, 45 | ) 46 | 47 | from .constants import MINIMAL_WHEELS_PATH, TEST_DATA_PATH 48 | from .utils import looks_like_ci 49 | 50 | 51 | @dataclass 52 | class FakeOptions: 53 | features_enabled: list[str] = field(default_factory=list) 54 | deprecated_features_enabled: list[str] = field(default_factory=list) 55 | target_dir: str | None = None 56 | 57 | 58 | class FakeRepository(BaseRepository): 59 | def __init__(self, options: FakeOptions): 60 | self._options = options 61 | 62 | with open(os.path.join(TEST_DATA_PATH, "fake-index.json")) as f: 63 | self.index = json.load(f) 64 | 65 | with open(os.path.join(TEST_DATA_PATH, "fake-editables.json")) as f: 66 | self.editables = json.load(f) 67 | 68 | def get_hashes(self, ireq): 69 | # Some fake hashes 70 | return { 71 | "test:123", 72 | "sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef", 73 | } 74 | 75 | def find_best_match(self, ireq, prereleases=False): 76 | if ireq.editable: 77 | return ireq 78 | 79 | versions = list( 80 | ireq.specifier.filter( 81 | self.index[key_from_ireq(ireq)], prereleases=prereleases 82 | ) 83 | ) 84 | if not versions: 85 | tried_versions = [ 86 | InstallationCandidate(ireq.name, version, "https://fake.url.foo") 87 | for version in self.index[key_from_ireq(ireq)] 88 | ] 89 | raise NoCandidateFound(ireq, tried_versions, ["https://fake.url.foo"]) 90 | best_version = max(versions, key=Version) 91 | return make_install_requirement(key_from_ireq(ireq), best_version, ireq) 92 | 93 | def get_dependencies(self, ireq): 94 | if ireq.editable or is_url_requirement(ireq): 95 | return self.editables[str(ireq.link)] 96 | 97 | name, version, extras = as_tuple(ireq) 98 | # Store non-extra dependencies under the empty string 99 | extras += ("",) 100 | dependencies = [ 101 | dep for extra in extras for dep in self.index[name][version][extra] 102 | ] 103 | return [ 104 | install_req_from_line(dep, constraint=ireq.constraint) 105 | for dep in dependencies 106 | ] 107 | 108 | @contextmanager 109 | def allow_all_wheels(self): 110 | # No need to do an actual pip.Wheel mock here. 111 | yield 112 | 113 | @property 114 | def options(self): 115 | return self._options 116 | 117 | @property 118 | def session(self) -> PipSession: 119 | """Not used""" 120 | 121 | @property 122 | def finder(self) -> PackageFinder: 123 | """Not used""" 124 | 125 | @property 126 | def command(self) -> InstallCommand: 127 | """Not used""" 128 | 129 | 130 | def pytest_collection_modifyitems(config, items): 131 | for item in items: 132 | # Mark network tests as flaky 133 | if item.get_closest_marker("network") and looks_like_ci(): 134 | item.add_marker(pytest.mark.flaky(reruns=3, reruns_delay=2)) 135 | 136 | 137 | @pytest.fixture 138 | def fake_dist(): 139 | def _fake_dist(line, deps=None): 140 | if deps is None: 141 | deps = [] 142 | req = Requirement.parse(line) 143 | key = req.name 144 | if "==" in line: 145 | version = line.split("==")[1] 146 | else: 147 | version = "0+unknown" 148 | requires = [Requirement.parse(d) for d in deps] 149 | direct_url = None 150 | if req.url: 151 | direct_url = direct_url_from_link(Link(req.url)) 152 | return Distribution(key, version, requires, direct_url) 153 | 154 | return _fake_dist 155 | 156 | 157 | @pytest.fixture 158 | def repository(): 159 | return FakeRepository( 160 | options=FakeOptions(deprecated_features_enabled=["legacy-resolver"]) 161 | ) 162 | 163 | 164 | @pytest.fixture 165 | def pypi_repository(tmpdir): 166 | return PyPIRepository( 167 | [ 168 | "--index-url", 169 | PyPIRepository.DEFAULT_INDEX_URL, 170 | "--use-deprecated", 171 | "legacy-resolver", 172 | ], 173 | cache_dir=(tmpdir / "pypi-repo"), 174 | ) 175 | 176 | 177 | @pytest.fixture 178 | def depcache(tmpdir): 179 | return DependencyCache(tmpdir / "dep-cache") 180 | 181 | 182 | @pytest.fixture 183 | def resolver(depcache, repository): 184 | # TODO: It'd be nicer if Resolver instance could be set up and then 185 | # use .resolve(...) on the specset, instead of passing it to 186 | # the constructor like this (it's not reusable) 187 | return partial( 188 | LegacyResolver, repository=repository, cache=depcache, existing_constraints={} 189 | ) 190 | 191 | 192 | @pytest.fixture 193 | def backtracking_resolver(depcache): 194 | # TODO: It'd be nicer if Resolver instance could be set up and then 195 | # use .resolve(...) on the specset, instead of passing it to 196 | # the constructor like this (it's not reusable) 197 | return partial( 198 | BacktrackingResolver, 199 | repository=FakeRepository(options=FakeOptions()), 200 | cache=depcache, 201 | existing_constraints={}, 202 | ) 203 | 204 | 205 | @pytest.fixture 206 | def base_resolver(depcache): 207 | return partial(LegacyResolver, cache=depcache, existing_constraints={}) 208 | 209 | 210 | @pytest.fixture 211 | def from_line(): 212 | def _from_line(*args, **kwargs): 213 | if PIP_VERSION[:2] <= (23, 0): 214 | hash_options = kwargs.pop("hash_options", {}) 215 | options = kwargs.pop("options", {}) 216 | options["hashes"] = hash_options 217 | kwargs["options"] = options 218 | return install_req_from_line(*args, **kwargs) 219 | 220 | return _from_line 221 | 222 | 223 | @pytest.fixture 224 | def from_editable(): 225 | return install_req_from_editable 226 | 227 | 228 | @pytest.fixture 229 | def runner(): 230 | cli_runner = CliRunner(mix_stderr=False) 231 | with cli_runner.isolated_filesystem(): 232 | yield cli_runner 233 | 234 | 235 | @pytest.fixture 236 | def tmpdir_cwd(tmpdir): 237 | with tmpdir.as_cwd(): 238 | yield Path(tmpdir) 239 | 240 | 241 | @pytest.fixture 242 | def make_pip_conf(tmpdir, monkeypatch): 243 | created_paths = [] 244 | 245 | def _make_pip_conf(content): 246 | pip_conf_file = "pip.conf" if os.name != "nt" else "pip.ini" 247 | path = (tmpdir / pip_conf_file).strpath 248 | 249 | with open(path, "w") as f: 250 | f.write(content) 251 | 252 | monkeypatch.setenv("PIP_CONFIG_FILE", path) 253 | 254 | created_paths.append(path) 255 | return path 256 | 257 | try: 258 | yield _make_pip_conf 259 | finally: 260 | for path in created_paths: 261 | os.remove(path) 262 | 263 | 264 | @pytest.fixture 265 | def pip_conf(make_pip_conf): 266 | return make_pip_conf( 267 | dedent( 268 | f"""\ 269 | [global] 270 | no-index = true 271 | find-links = {MINIMAL_WHEELS_PATH} 272 | """ 273 | ) 274 | ) 275 | 276 | 277 | @pytest.fixture 278 | def pip_with_index_conf(make_pip_conf): 279 | return make_pip_conf( 280 | dedent( 281 | f"""\ 282 | [global] 283 | index-url = http://example.com 284 | find-links = {MINIMAL_WHEELS_PATH} 285 | """ 286 | ) 287 | ) 288 | 289 | 290 | @pytest.fixture(scope="session") 291 | def make_package(tmp_path_factory): 292 | """ 293 | Make a package from a given name, version and list of required packages. 294 | """ 295 | 296 | def _make_package( 297 | name, 298 | version="0.1", 299 | install_requires=None, 300 | extras_require=None, 301 | build_system_requires=None, 302 | ): 303 | if install_requires is None: 304 | install_requires = [] 305 | 306 | if extras_require is None: 307 | extras_require = dict() 308 | 309 | install_requires_str = "[{}]".format( 310 | ",".join(f"{package!r}" for package in install_requires) 311 | ) 312 | 313 | package_dir = tmp_path_factory.mktemp("packages") / name / version 314 | package_dir.mkdir(parents=True) 315 | 316 | with (package_dir / "setup.py").open("w") as fp: 317 | fp.write( 318 | dedent( 319 | f"""\ 320 | from setuptools import setup 321 | setup( 322 | name={name!r}, 323 | version={version!r}, 324 | author="pip-tools", 325 | author_email="pip-tools@localhost", 326 | url="https://github.com/jazzband/pip-tools", 327 | install_requires={install_requires_str}, 328 | extras_require={extras_require}, 329 | py_modules=[{name!r}], 330 | ) 331 | """ 332 | ) 333 | ) 334 | 335 | # Create a README to avoid setuptools warnings. 336 | (package_dir / "README").touch() 337 | 338 | # Create a module to make the package importable. 339 | (package_dir / name).with_suffix(".py").touch() 340 | 341 | if build_system_requires: 342 | with (package_dir / "pyproject.toml").open("w") as fp: 343 | fp.write( 344 | dedent( 345 | f"""\ 346 | [build-system] 347 | requires = {json.dumps(build_system_requires)} 348 | """ 349 | ) 350 | ) 351 | 352 | return package_dir 353 | 354 | return _make_package 355 | 356 | 357 | @pytest.fixture(scope="session") 358 | def run_setup_file(): 359 | """ 360 | Run a setup.py file from a given package dir. 361 | """ 362 | 363 | def _run_setup_file(package_dir_path, *args): 364 | setup_file = package_dir_path / "setup.py" 365 | return subprocess.run( 366 | [sys.executable, str(setup_file), *args], 367 | cwd=str(package_dir_path), 368 | stdout=subprocess.DEVNULL, 369 | check=True, 370 | ) # nosec 371 | 372 | return _run_setup_file 373 | 374 | 375 | @pytest.fixture(scope="session") 376 | def make_wheel(run_setup_file): 377 | """ 378 | Make a wheel distribution from a given package dir. 379 | """ 380 | 381 | def _make_wheel(package_dir, dist_dir, *args): 382 | return run_setup_file( 383 | package_dir, "bdist_wheel", "--dist-dir", str(dist_dir), *args 384 | ) 385 | 386 | return _make_wheel 387 | 388 | 389 | @pytest.fixture 390 | def make_sdist(run_setup_file): 391 | """ 392 | Make a source distribution from a given package dir. 393 | """ 394 | 395 | def _make_sdist(package_dir, dist_dir, *args): 396 | return run_setup_file(package_dir, "sdist", "--dist-dir", str(dist_dir), *args) 397 | 398 | return _make_sdist 399 | 400 | 401 | @pytest.fixture 402 | def make_module(tmpdir): 403 | """ 404 | Make a metadata file with the given name and content and a fake module. 405 | """ 406 | 407 | def _make_module(fname, content): 408 | path = os.path.join(tmpdir, "sample_lib") 409 | os.mkdir(path) 410 | path = os.path.join(tmpdir, "sample_lib", "__init__.py") 411 | with open(path, "w") as stream: 412 | stream.write("'example module'\n__version__ = '1.2.3'") 413 | if fname == "setup.cfg": 414 | path = os.path.join(tmpdir, "pyproject.toml") 415 | with open(path, "w") as stream: 416 | stream.write( 417 | "\n".join( 418 | ( 419 | "[build-system]", 420 | 'requires = ["setuptools"]', 421 | 'build-backend = "setuptools.build_meta"', 422 | ) 423 | ) 424 | ) 425 | path = os.path.join(tmpdir, fname) 426 | with open(path, "w") as stream: 427 | stream.write(dedent(content)) 428 | return path 429 | 430 | return _make_module 431 | 432 | 433 | @pytest.fixture(scope="session") 434 | def fake_dists(tmp_path_factory, make_package, make_wheel): 435 | """ 436 | Generate distribution packages `small-fake-*` 437 | """ 438 | dists_path = tmp_path_factory.mktemp("dists") 439 | pkgs = [ 440 | make_package("small-fake-a", version="0.1"), 441 | make_package("small-fake-b", version="0.2"), 442 | make_package("small-fake-c", version="0.3"), 443 | ] 444 | for pkg in pkgs: 445 | make_wheel(pkg, dists_path) 446 | return dists_path 447 | 448 | 449 | @pytest.fixture(scope="session") 450 | def fake_dists_with_build_deps(tmp_path_factory, make_package, make_wheel): 451 | """Generate distribution packages with names that make sense for testing build deps.""" 452 | dists_path = tmp_path_factory.mktemp("dists") 453 | pkgs = [ 454 | make_package( 455 | "fake_static_build_dep", 456 | version="0.1", 457 | install_requires=["fake_transient_run_dep"], 458 | build_system_requires=["fake_transient_build_dep"], 459 | ), 460 | make_package("fake_dynamic_build_dep_for_all", version="0.2"), 461 | make_package("fake_dynamic_build_dep_for_sdist", version="0.3"), 462 | make_package("fake_dynamic_build_dep_for_wheel", version="0.4"), 463 | make_package("fake_dynamic_build_dep_for_editable", version="0.5"), 464 | make_package("fake_direct_runtime_dep", version="0.1"), 465 | make_package("fake_direct_extra_runtime_dep", version="0.2"), 466 | make_package("fake_transient_build_dep", version="0.3"), 467 | make_package("fake_transient_run_dep", version="0.3"), 468 | ] 469 | for pkg in pkgs: 470 | make_wheel(pkg, dists_path) 471 | return dists_path 472 | 473 | 474 | @pytest.fixture 475 | def venv(tmp_path): 476 | """Create a temporary venv and get the path of its directory of executables.""" 477 | subprocess.run( 478 | [sys.executable, "-m", "venv", os.fspath(tmp_path)], 479 | check=True, 480 | ) 481 | return tmp_path / ("Scripts" if platform.system() == "Windows" else "bin") 482 | 483 | 484 | @pytest.fixture(autouse=True) 485 | def _reset_log(): 486 | """ 487 | Since piptools.logging.log is a global variable we have to restore its initial 488 | state. Some tests can change logger verbosity which might cause a conflict 489 | with other tests that depend on it. 490 | """ 491 | log.reset() 492 | 493 | 494 | @pytest.fixture 495 | def make_config_file(tmpdir_cwd): 496 | """ 497 | Make a config file for pip-tools with a given parameter set to a specific 498 | value, returning a ``pathlib.Path`` to the config file. 499 | """ 500 | 501 | def _maker( 502 | pyproject_param: str, 503 | new_default: Any, 504 | config_file_name: str = DEFAULT_CONFIG_FILE_NAMES[0], 505 | section: str = "pip-tools", 506 | subsection: str | None = None, 507 | ) -> Path: 508 | # Create a nested directory structure if config_file_name includes directories 509 | config_dir = (tmpdir_cwd / config_file_name).parent 510 | config_dir.mkdir(exist_ok=True, parents=True) 511 | 512 | # Make a config file with this one config default override 513 | config_file = tmpdir_cwd / config_file_name 514 | 515 | nested_config = {pyproject_param: new_default} 516 | if subsection: 517 | nested_config = {subsection: nested_config} 518 | config_to_dump = {"tool": {section: nested_config}} 519 | config_file.write_text(tomli_w.dumps(config_to_dump)) 520 | return cast(Path, config_file.relative_to(tmpdir_cwd)) 521 | 522 | return _maker 523 | -------------------------------------------------------------------------------- /tests/constants.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | 5 | TEST_DATA_PATH = os.path.join(os.path.dirname(__file__), "test_data") 6 | MINIMAL_WHEELS_PATH = os.path.join(TEST_DATA_PATH, "minimal_wheels") 7 | PACKAGES_PATH = os.path.join(TEST_DATA_PATH, "packages") 8 | PACKAGES_RELATIVE_PATH = os.path.relpath( 9 | PACKAGES_PATH, os.path.commonpath([os.getcwd(), PACKAGES_PATH]) 10 | ) 11 | -------------------------------------------------------------------------------- /tests/test_build.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pathlib 4 | import shutil 5 | 6 | import pytest 7 | 8 | from piptools.build import ( 9 | ProjectMetadata, 10 | StaticProjectMetadata, 11 | build_project_metadata, 12 | maybe_statically_parse_project_metadata, 13 | ) 14 | from tests.constants import PACKAGES_PATH 15 | 16 | 17 | @pytest.mark.network 18 | def test_build_project_metadata_resolved_correct_build_dependencies( 19 | fake_dists_with_build_deps, tmp_path, monkeypatch 20 | ): 21 | """Test that the resolved build dependencies are correct. 22 | 23 | Because this is a slow process we test it only for one build target and rely 24 | on ``test_all_extras_and_all_build_deps`` to test that it works with multiple build 25 | targets. 26 | """ 27 | # When used as argument to the runner it is not passed to pip 28 | monkeypatch.setenv("PIP_FIND_LINKS", fake_dists_with_build_deps) 29 | src_pkg_path = pathlib.Path(PACKAGES_PATH) / "small_fake_with_build_deps" 30 | shutil.copytree(src_pkg_path, tmp_path, dirs_exist_ok=True) 31 | src_file = tmp_path / "setup.py" 32 | metadata = build_project_metadata( 33 | src_file, ("editable",), attempt_static_parse=False, isolated=True, quiet=False 34 | ) 35 | assert isinstance(metadata, ProjectMetadata) 36 | build_requirements = sorted(r.name for r in metadata.build_requirements) 37 | assert build_requirements == [ 38 | "fake_dynamic_build_dep_for_all", 39 | "fake_dynamic_build_dep_for_editable", 40 | "fake_static_build_dep", 41 | "setuptools", 42 | "wheel", 43 | ] 44 | 45 | 46 | def test_build_project_metadata_static(tmp_path): 47 | """Test static parsing branch of build_project_metadata""" 48 | src_pkg_path = pathlib.Path(PACKAGES_PATH) / "small_fake_with_pyproject" 49 | shutil.copytree(src_pkg_path, tmp_path, dirs_exist_ok=True) 50 | src_file = tmp_path / "pyproject.toml" 51 | metadata = build_project_metadata( 52 | src_file, (), attempt_static_parse=True, isolated=True, quiet=False 53 | ) 54 | assert isinstance(metadata, StaticProjectMetadata) 55 | requirements = [(r.name, r.extras, str(r.markers)) for r in metadata.requirements] 56 | requirements.sort(key=lambda x: x[0]) 57 | assert requirements == [ 58 | ("fake_direct_extra_runtime_dep", {"with_its_own_extra"}, 'extra == "x"'), 59 | ("fake_direct_runtime_dep", set(), "None"), 60 | ] 61 | assert metadata.extras == ("x",) 62 | 63 | 64 | def test_build_project_metadata_raises_error(tmp_path): 65 | src_pkg_path = pathlib.Path(PACKAGES_PATH) / "small_fake_with_build_deps" 66 | shutil.copytree(src_pkg_path, tmp_path, dirs_exist_ok=True) 67 | src_file = tmp_path / "setup.py" 68 | with pytest.raises( 69 | ValueError, match="Cannot execute the PEP 517 optional.* hooks statically" 70 | ): 71 | build_project_metadata( 72 | src_file, 73 | ("editable",), 74 | attempt_static_parse=True, 75 | isolated=True, 76 | quiet=False, 77 | ) 78 | 79 | 80 | def test_static_parse_valid(tmp_path): 81 | src_file = tmp_path / "pyproject.toml" 82 | 83 | valid = """ 84 | [project] 85 | name = "foo" 86 | version = "0.1.0" 87 | dependencies = ["bar>=1"] 88 | [project.optional-dependencies] 89 | baz = ["qux[extra]"] 90 | """ 91 | src_file.write_text(valid) 92 | metadata = maybe_statically_parse_project_metadata(src_file) 93 | assert isinstance(metadata, StaticProjectMetadata) 94 | assert [str(r.req) for r in metadata.requirements] == ["bar>=1", "qux[extra]"] 95 | assert metadata.extras == ("baz",) 96 | 97 | 98 | def test_static_parse_invalid(tmp_path): 99 | src_file = tmp_path / "pyproject.toml" 100 | 101 | invalid_toml = """this is not valid toml""" 102 | src_file.write_text(invalid_toml) 103 | assert maybe_statically_parse_project_metadata(src_file) is None 104 | 105 | no_pep621 = """ 106 | [build-system] 107 | requires = ["setuptools"] 108 | """ 109 | src_file.write_text(no_pep621) 110 | assert maybe_statically_parse_project_metadata(src_file) is None 111 | 112 | invalid_pep621 = """ 113 | [project] 114 | # no name 115 | version = "0.1.0" 116 | """ 117 | src_file.write_text(invalid_pep621) 118 | assert maybe_statically_parse_project_metadata(src_file) is None 119 | 120 | dynamic_deps = """ 121 | [project] 122 | name = "foo" 123 | dynamic = ["dependencies"] 124 | """ 125 | src_file.write_text(dynamic_deps) 126 | assert maybe_statically_parse_project_metadata(src_file) is None 127 | 128 | dynamic_optional_deps = """ 129 | [project] 130 | name = "foo" 131 | dynamic = ["optional-dependencies"] 132 | """ 133 | src_file.write_text(dynamic_optional_deps) 134 | assert maybe_statically_parse_project_metadata(src_file) is None 135 | 136 | src_file = tmp_path / "setup.py" 137 | src_file.write_text("print('hello')") 138 | assert maybe_statically_parse_project_metadata(src_file) is None 139 | -------------------------------------------------------------------------------- /tests/test_cache.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import sys 5 | from contextlib import contextmanager 6 | from shutil import rmtree 7 | from tempfile import NamedTemporaryFile 8 | 9 | import pytest 10 | 11 | from piptools.cache import CorruptCacheError, DependencyCache, read_cache_file 12 | 13 | 14 | @contextmanager 15 | def _read_cache_file_helper(to_write): 16 | """ 17 | On enter, create the file with the given string, and then yield its path. 18 | On exit, delete that file. 19 | 20 | :param str to_write: the content to write to the file 21 | :yield: the path to the temporary file 22 | """ 23 | # Create the file and write to it 24 | cache_file = NamedTemporaryFile(mode="w", delete=False) 25 | try: 26 | cache_file.write(to_write) 27 | cache_file.close() 28 | 29 | # Yield the path to the file 30 | yield cache_file.name 31 | 32 | finally: 33 | # Delete the file on exit 34 | os.remove(cache_file.name) 35 | 36 | 37 | def test_read_cache_file_not_json(): 38 | """ 39 | A cache file that's not JSON should throw a corrupt cache error. 40 | """ 41 | with _read_cache_file_helper("not json") as cache_file_name: 42 | with pytest.raises( 43 | CorruptCacheError, 44 | match="The dependency cache seems to have been corrupted.", 45 | ): 46 | read_cache_file(cache_file_name) 47 | 48 | 49 | def test_read_cache_file_wrong_format(): 50 | """ 51 | A cache file with a wrong "__format__" value should throw an assertion error. 52 | """ 53 | with _read_cache_file_helper('{"__format__": 2}') as cache_file_name: 54 | with pytest.raises(ValueError, match=r"^Unknown cache file format$"): 55 | read_cache_file(cache_file_name) 56 | 57 | 58 | def test_read_cache_file_successful(): 59 | """ 60 | A good cache file. 61 | """ 62 | with _read_cache_file_helper( 63 | '{"__format__": 1, "dependencies": "success"}' 64 | ) as cache_file_name: 65 | assert "success" == read_cache_file(cache_file_name) 66 | 67 | 68 | def test_read_cache_does_not_exist(tmpdir): 69 | cache = DependencyCache(cache_dir=tmpdir) 70 | assert cache.cache == {} 71 | 72 | 73 | @pytest.mark.skipif( 74 | sys.platform == "win32", reason="os.fchmod() not available on Windows" 75 | ) 76 | def test_read_cache_permission_error(tmpdir): 77 | cache = DependencyCache(cache_dir=tmpdir) 78 | with open(cache._cache_file, "w") as fp: 79 | os.fchmod(fp.fileno(), 0o000) 80 | with pytest.raises(IOError, match="Permission denied"): 81 | cache.cache 82 | 83 | 84 | def test_reverse_dependencies(from_line, tmpdir): 85 | # Create a cache object. The keys are packages, and the values are lists 86 | # of packages on which the keys depend. 87 | cache = DependencyCache(cache_dir=tmpdir) 88 | cache[from_line("top==1.2")] = ["middle>=0.3", "bottom>=5.1.2"] 89 | cache[from_line("top[xtra]==1.2")] = ["middle>=0.3", "bottom>=5.1.2", "bonus==0.4"] 90 | cache[from_line("middle==0.4")] = ["bottom<6"] 91 | cache[from_line("bottom==5.3.5")] = [] 92 | cache[from_line("bonus==0.4")] = [] 93 | 94 | # In this case, we're using top 1.2 without an extra, so the "bonus" package 95 | # is not depended upon. 96 | reversed_no_extra = cache.reverse_dependencies( 97 | [ 98 | from_line("top==1.2"), 99 | from_line("middle==0.4"), 100 | from_line("bottom==5.3.5"), 101 | from_line("bonus==0.4"), 102 | ] 103 | ) 104 | assert reversed_no_extra == {"middle": {"top"}, "bottom": {"middle", "top"}} 105 | 106 | # Now we're using top 1.2 with the "xtra" extra, so it depends 107 | # on the "bonus" package. 108 | reversed_extra = cache.reverse_dependencies( 109 | [ 110 | from_line("top[xtra]==1.2"), 111 | from_line("middle==0.4"), 112 | from_line("bottom==5.3.5"), 113 | from_line("bonus==0.4"), 114 | ] 115 | ) 116 | assert reversed_extra == { 117 | "middle": {"top"}, 118 | "bottom": {"middle", "top"}, 119 | "bonus": {"top"}, 120 | } 121 | 122 | # Clean up our temp directory 123 | rmtree(tmpdir) 124 | -------------------------------------------------------------------------------- /tests/test_cli_sync.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import subprocess 5 | import sys 6 | from unittest import mock 7 | 8 | import pytest 9 | from pip._vendor.packaging.version import Version 10 | 11 | from piptools.scripts import sync 12 | from piptools.scripts.sync import cli 13 | 14 | 15 | @pytest.fixture(autouse=True) 16 | def _temp_default_reqs(tmp_path, monkeypatch): 17 | monkeypatch.setattr( 18 | sync, "DEFAULT_REQUIREMENTS_FILE", str(tmp_path / "requirements.txt") 19 | ) 20 | 21 | 22 | def test_run_as_module_sync(): 23 | """piptools can be run as ``python -m piptools ...``.""" 24 | 25 | result = subprocess.run( 26 | [sys.executable, "-m", "piptools", "sync", "--help"], 27 | stdout=subprocess.PIPE, 28 | check=True, 29 | ) 30 | 31 | # Should have run pip-sync successfully. 32 | assert result.stdout.startswith(b"Usage:") 33 | assert b"Synchronize virtual environment with" in result.stdout 34 | 35 | 36 | @mock.patch("piptools.sync.run") 37 | def test_quiet_option(run, runner): 38 | """sync command can be run with `--quiet` or `-q` flag.""" 39 | 40 | with open(sync.DEFAULT_REQUIREMENTS_FILE, "w") as req_in: 41 | req_in.write("six==1.10.0") 42 | 43 | out = runner.invoke(cli, ["-q"]) 44 | assert not out.stderr_bytes 45 | assert out.exit_code == 0 46 | 47 | # for every call to pip ensure the `-q` flag is set 48 | assert run.call_count == 2 49 | for call in run.call_args_list: 50 | assert "-q" in call[0][0] 51 | 52 | 53 | @mock.patch("piptools.sync.run") 54 | def test_quiet_option_when_up_to_date(run, runner): 55 | """ 56 | Sync should output nothing when everything is up to date and quiet option is set. 57 | """ 58 | with open(sync.DEFAULT_REQUIREMENTS_FILE, "w"): 59 | pass 60 | 61 | with mock.patch("piptools.sync.diff", return_value=(set(), set())): 62 | out = runner.invoke(cli, ["-q"]) 63 | 64 | assert not out.stderr_bytes 65 | assert out.exit_code == 0 66 | run.assert_not_called() 67 | 68 | 69 | def test_no_requirements_file(runner): 70 | """ 71 | It should raise an error if there are no input files 72 | and a requirements.txt file does not exist. 73 | """ 74 | out = runner.invoke(cli) 75 | 76 | assert "No requirement files given" in out.stderr 77 | assert out.exit_code == 2 78 | 79 | 80 | def test_input_files_with_dot_in_extension(runner, tmp_path): 81 | """ 82 | It should raise an error if some of the input files have .in extension. 83 | """ 84 | req_in = tmp_path / "requirements.in" 85 | req_in.write_text("six==1.10.0") 86 | 87 | out = runner.invoke(cli, [str(req_in)]) 88 | 89 | assert "ERROR: Some input files have the .in extension" in out.stderr 90 | assert out.exit_code == 2 91 | 92 | 93 | def test_force_files_with_dot_in_extension(runner, tmp_path): 94 | """ 95 | It should print a warning and sync anyway if some of the input files 96 | have .in extension. 97 | """ 98 | req_in = tmp_path / "requirements.in" 99 | req_in.write_text("six==1.10.0") 100 | 101 | with mock.patch("piptools.sync.run"): 102 | out = runner.invoke(cli, [str(req_in), "--force"]) 103 | 104 | assert "WARNING: Some input files have the .in extension" in out.stderr 105 | assert out.exit_code == 0 106 | 107 | 108 | @pytest.mark.parametrize( 109 | ("req_lines", "should_raise"), 110 | ( 111 | (["six>1.10.0", "six<1.10.0"], True), 112 | ( 113 | ["six>1.10.0 ; python_version>='3.0'", "six<1.10.0 ; python_version<'3.0'"], 114 | False, 115 | ), 116 | ), 117 | ) 118 | def test_merge_error(req_lines, should_raise, runner): 119 | """ 120 | Sync command should raise an error if there are merge errors. 121 | It should not raise an error if otherwise incompatible requirements 122 | are isolated by exclusive environment markers. 123 | """ 124 | with open(sync.DEFAULT_REQUIREMENTS_FILE, "w") as req_in: 125 | for line in req_lines: 126 | req_in.write(line + "\n") 127 | 128 | with mock.patch("piptools.sync.run"): 129 | out = runner.invoke(cli, ["-n"]) 130 | 131 | if should_raise: 132 | assert out.exit_code == 2 133 | assert "Incompatible requirements found" in out.stderr 134 | else: 135 | assert out.exit_code == 1 136 | 137 | 138 | @pytest.mark.parametrize( 139 | "req_line", 140 | ( 141 | "file:.", 142 | "-e file:.", 143 | ), 144 | ) 145 | @mock.patch("piptools.sync.run") 146 | def test_merge_no_name_urls(run, req_line, runner, tmp_path): 147 | """ 148 | Test sync succeeds when merging requirements that lack names. 149 | """ 150 | reqs_paths = [ 151 | (tmp_path / name) for name in ("requirements.txt", "dev_requirements.txt") 152 | ] 153 | 154 | for reqs_path in reqs_paths: 155 | reqs_path.write_text(f"{req_line} \n") 156 | 157 | out = runner.invoke(cli, [str(path) for path in reqs_paths]) 158 | assert out.exit_code == 0 159 | assert run.call_count == 2 160 | 161 | 162 | @pytest.mark.parametrize( 163 | ("cli_flags", "expected_install_flags"), 164 | ( 165 | ( 166 | ["--find-links", "./libs1", "--find-links", "./libs2"], 167 | ["--find-links", "./libs1", "--find-links", "./libs2"], 168 | ), 169 | (["--no-index"], ["--no-index"]), 170 | ( 171 | ["--index-url", "https://example.com"], 172 | ["--index-url", "https://example.com"], 173 | ), 174 | ( 175 | ["--extra-index-url", "https://foo", "--extra-index-url", "https://bar"], 176 | ["--extra-index-url", "https://foo", "--extra-index-url", "https://bar"], 177 | ), 178 | ( 179 | ["--trusted-host", "foo", "--trusted-host", "bar"], 180 | ["--trusted-host", "foo", "--trusted-host", "bar"], 181 | ), 182 | (["--user"], ["--user"]), 183 | (["--cert", "foo.crt"], ["--cert", "foo.crt"]), 184 | (["--client-cert", "foo.pem"], ["--client-cert", "foo.pem"]), 185 | ( 186 | ["--pip-args", "--no-cache-dir --no-deps --no-warn-script-location"], 187 | ["--no-cache-dir", "--no-deps", "--no-warn-script-location"], 188 | ), 189 | (["--pip-args='--cache-dir=/tmp'"], ["--cache-dir=/tmp"]), 190 | ( 191 | ["--pip-args=\"--cache-dir='/tmp/cache dir with spaces/'\""], 192 | ["--cache-dir='/tmp/cache dir with spaces/'"], 193 | ), 194 | ), 195 | ) 196 | @mock.patch("piptools.sync.run") 197 | def test_pip_install_flags(run, cli_flags, expected_install_flags, runner): 198 | """ 199 | Test the cli flags have to be passed to the pip install command. 200 | """ 201 | with open(sync.DEFAULT_REQUIREMENTS_FILE, "w") as req_in: 202 | req_in.write("six==1.10.0") 203 | 204 | runner.invoke(cli, cli_flags) 205 | 206 | call_args = [call[0][0] for call in run.call_args_list] 207 | called_install_options = [args[6:] for args in call_args if args[3] == "install"] 208 | assert called_install_options == [expected_install_flags], "Called args: {}".format( 209 | call_args 210 | ) 211 | 212 | 213 | @pytest.mark.parametrize( 214 | "install_flags", 215 | ( 216 | ["--no-index"], 217 | ["--index-url", "https://example.com"], 218 | ["--extra-index-url", "https://example.com"], 219 | ["--find-links", "./libs1"], 220 | ["--trusted-host", "example.com"], 221 | ["--no-binary", ":all:"], 222 | ["--only-binary", ":all:"], 223 | ), 224 | ) 225 | @mock.patch("piptools.sync.run") 226 | def test_pip_install_flags_in_requirements_file(run, runner, install_flags): 227 | """ 228 | Test the options from requirements.txt file pass to the pip install command. 229 | """ 230 | with open(sync.DEFAULT_REQUIREMENTS_FILE, "w") as reqs: 231 | reqs.write(" ".join(install_flags) + "\n") 232 | reqs.write("six==1.10.0") 233 | 234 | out = runner.invoke(cli) 235 | assert out.exit_code == 0, out 236 | 237 | # Make sure pip install command has expected options 238 | call_args = [call[0][0] for call in run.call_args_list] 239 | called_install_options = [args[6:] for args in call_args if args[3] == "install"] 240 | assert called_install_options == [install_flags], f"Called args: {call_args}" 241 | 242 | 243 | @mock.patch("piptools.sync.run") 244 | def test_sync_ask_declined(run, runner): 245 | """ 246 | Make sure nothing is installed if the confirmation is declined 247 | """ 248 | with open(sync.DEFAULT_REQUIREMENTS_FILE, "w") as req_in: 249 | req_in.write("small-fake-a==1.10.0") 250 | 251 | runner.invoke(cli, ["--ask"], input="n\n") 252 | 253 | run.assert_not_called() 254 | 255 | 256 | @mock.patch("piptools.sync.run") 257 | def test_sync_ask_accepted(run, runner): 258 | """ 259 | Make sure pip is called when the confirmation is accepted (even if 260 | --dry-run is given) 261 | """ 262 | with open(sync.DEFAULT_REQUIREMENTS_FILE, "w") as req_in: 263 | req_in.write("small-fake-a==1.10.0") 264 | 265 | runner.invoke(cli, ["--ask", "--dry-run"], input="y\n") 266 | 267 | assert run.call_count == 2 268 | 269 | 270 | def test_sync_dry_run_returns_non_zero_exit_code(runner): 271 | """ 272 | Make sure non-zero exit code is returned when --dry-run is given. 273 | """ 274 | with open(sync.DEFAULT_REQUIREMENTS_FILE, "w") as req_in: 275 | req_in.write("small-fake-a==1.10.0") 276 | 277 | out = runner.invoke(cli, ["--dry-run"]) 278 | 279 | assert out.exit_code == 1 280 | 281 | 282 | @mock.patch("piptools.sync.run") 283 | def test_python_executable_option( 284 | run, 285 | runner, 286 | fake_dist, 287 | ): 288 | """ 289 | Make sure sync command can run with `--python-executable` option. 290 | """ 291 | with open(sync.DEFAULT_REQUIREMENTS_FILE, "w") as req_in: 292 | req_in.write("small-fake-a==1.10.0") 293 | 294 | custom_executable = os.path.abspath(sys.executable) 295 | 296 | runner.invoke(cli, ["--python-executable", custom_executable]) 297 | 298 | assert run.call_count == 2 299 | 300 | call_args = [call[0][0] for call in run.call_args_list] 301 | called_uninstall_options = [ 302 | args[:5] for args in call_args if args[3] == "uninstall" 303 | ] 304 | called_install_options = [args[:-1] for args in call_args if args[3] == "install"] 305 | 306 | assert called_uninstall_options == [ 307 | [custom_executable, "-m", "pip", "uninstall", "-y"] 308 | ] 309 | assert called_install_options == [[custom_executable, "-m", "pip", "install", "-r"]] 310 | 311 | 312 | @pytest.mark.parametrize( 313 | "python_executable", 314 | ( 315 | "/tmp/invalid_executable", 316 | "invalid_python", 317 | ), 318 | ) 319 | def test_invalid_python_executable(runner, python_executable): 320 | with open(sync.DEFAULT_REQUIREMENTS_FILE, "w") as req_in: 321 | req_in.write("small-fake-a==1.10.0") 322 | 323 | out = runner.invoke(cli, ["--python-executable", python_executable]) 324 | assert out.exit_code == 2, out 325 | message = "Could not resolve '{}' as valid executable path or alias.\n" 326 | assert out.stderr == message.format(python_executable) 327 | 328 | 329 | @mock.patch("piptools.scripts.sync.get_pip_version_for_python_executable") 330 | def test_invalid_pip_version_in_python_executable( 331 | get_pip_version_for_python_executable, runner, tmp_path 332 | ): 333 | with open(sync.DEFAULT_REQUIREMENTS_FILE, "w") as req_in: 334 | req_in.write("small-fake-a==1.10.0") 335 | 336 | custom_executable = tmp_path / "custom_executable" 337 | custom_executable.write_text("") 338 | 339 | custom_executable.chmod(0o700) 340 | 341 | get_pip_version_for_python_executable.return_value = Version("19.1") 342 | 343 | out = runner.invoke(cli, ["--python-executable", str(custom_executable)]) 344 | assert out.exit_code == 2, out 345 | message = ( 346 | "Target python executable '{}' has pip version 19.1 installed. " 347 | "Version" # ">=20.3 is expected.\n" part is omitted 348 | ) 349 | assert out.stderr.startswith(message.format(custom_executable)) 350 | 351 | 352 | @mock.patch("piptools.sync.run") 353 | def test_default_python_executable_option(run, runner): 354 | """ 355 | Make sure sys.executable is used when --python-executable is not provided. 356 | """ 357 | with open(sync.DEFAULT_REQUIREMENTS_FILE, "w") as req_in: 358 | req_in.write("small-fake-a==1.10.0") 359 | 360 | runner.invoke(cli) 361 | 362 | assert run.call_count == 2 363 | 364 | call_args = [call[0][0] for call in run.call_args_list] 365 | called_install_options = [args[:-1] for args in call_args if args[3] == "install"] 366 | assert called_install_options == [ 367 | [ 368 | sys.executable, 369 | "-m", 370 | "pip", 371 | "install", 372 | "-r", 373 | ] 374 | ] 375 | 376 | 377 | @mock.patch("piptools.sync.run") 378 | def test_default_config_option(run, runner, make_config_file, tmpdir_cwd): 379 | make_config_file("dry-run", True) 380 | 381 | with open(sync.DEFAULT_REQUIREMENTS_FILE, "w") as reqs_txt: 382 | reqs_txt.write("six==1.10.0") 383 | 384 | out = runner.invoke(cli) 385 | 386 | assert out.exit_code == 1 387 | assert "Would install:" in out.stdout 388 | 389 | 390 | @mock.patch("piptools.sync.run") 391 | def test_config_option(run, runner, make_config_file): 392 | config_file = make_config_file("dry-run", True) 393 | 394 | with open(sync.DEFAULT_REQUIREMENTS_FILE, "w") as reqs_txt: 395 | reqs_txt.write("six==1.10.0") 396 | 397 | out = runner.invoke(cli, ["--config", config_file.as_posix()]) 398 | 399 | assert out.exit_code == 1 400 | assert "Would install:" in out.stdout 401 | 402 | 403 | @mock.patch("piptools.sync.run") 404 | def test_no_config_option_overrides_config_with_defaults(run, runner, make_config_file): 405 | config_file = make_config_file("dry-run", True) 406 | 407 | with open(sync.DEFAULT_REQUIREMENTS_FILE, "w") as reqs_txt: 408 | reqs_txt.write("six==1.10.0") 409 | 410 | out = runner.invoke(cli, ["--no-config", "--config", config_file.as_posix()]) 411 | 412 | assert out.exit_code == 0 413 | assert "Would install:" not in out.stdout 414 | 415 | 416 | @mock.patch("piptools.sync.run") 417 | def test_raise_error_on_unknown_config_option(run, runner, tmp_path, make_config_file): 418 | config_file = make_config_file("unknown-option", True) 419 | 420 | with open(sync.DEFAULT_REQUIREMENTS_FILE, "w") as reqs_txt: 421 | reqs_txt.write("six==1.10.0") 422 | 423 | out = runner.invoke(cli, ["--config", config_file.as_posix()]) 424 | 425 | assert out.exit_code == 2 426 | assert "No such config key 'unknown_option'" in out.stderr 427 | 428 | 429 | @mock.patch("piptools.sync.run") 430 | def test_raise_error_on_invalid_config_option(run, runner, tmp_path, make_config_file): 431 | config_file = make_config_file("dry-run", ["invalid", "value"]) 432 | 433 | with open(sync.DEFAULT_REQUIREMENTS_FILE, "w") as reqs_txt: 434 | reqs_txt.write("six==1.10.0") 435 | 436 | out = runner.invoke(cli, ["--config", config_file.as_posix()]) 437 | 438 | assert out.exit_code == 2 439 | assert "Invalid value for config key 'dry_run': ['invalid', 'value']" in out.stderr 440 | 441 | 442 | @mock.patch("piptools.sync.run") 443 | def test_allow_in_config_pip_compile_option(run, runner, tmp_path, make_config_file): 444 | config_file = make_config_file("generate-hashes", True) # pip-compile's option 445 | 446 | with open(sync.DEFAULT_REQUIREMENTS_FILE, "w") as reqs_txt: 447 | reqs_txt.write("six==1.10.0") 448 | 449 | out = runner.invoke(cli, ["--verbose", "--config", config_file.as_posix()]) 450 | 451 | assert out.exit_code == 0 452 | assert "Using pip-tools configuration defaults found" in out.stderr 453 | 454 | 455 | @mock.patch("piptools.sync.run") 456 | def test_tool_specific_config_option(run, runner, make_config_file): 457 | config_file = make_config_file( 458 | "dry-run", True, section="pip-tools", subsection="sync" 459 | ) 460 | 461 | with open(sync.DEFAULT_REQUIREMENTS_FILE, "w") as reqs_txt: 462 | reqs_txt.write("six==1.10.0") 463 | 464 | out = runner.invoke(cli, ["--config", config_file.as_posix()]) 465 | 466 | assert out.exit_code == 1 467 | assert "Would install:" in out.stdout 468 | -------------------------------------------------------------------------------- /tests/test_data/fake-editables.json: -------------------------------------------------------------------------------- 1 | { 2 | "git+git://example.org/django.git#egg=django": [], 3 | "git+https://github.com/celery/billiard#egg=billiard==3.5.9999": [] 4 | } 5 | -------------------------------------------------------------------------------- /tests/test_data/fake-index.json: -------------------------------------------------------------------------------- 1 | { 2 | "aiohttp": { 3 | "3.6.2": { "": ["yarl"] } 4 | }, 5 | "anyjson": { 6 | "0.3.3": { "": [] } 7 | }, 8 | "amqp": { 9 | "1.4.9": { "": [] }, 10 | "2.0.2": { "": ["vine>=1.1.1"] }, 11 | "2.1.4": { "": ["vine>=1.1.3"] } 12 | }, 13 | "appdirs": { 14 | "1.4.9": { "": [] } 15 | }, 16 | "arrow": { 17 | "0.5.0": { "": ["python-dateutil"] }, 18 | "0.5.4": { "": ["python-dateutil"] } 19 | }, 20 | "billiard": { 21 | "3.3.0.23": { "": [] }, 22 | "3.5.0.2": { "": [] } 23 | }, 24 | "celery": { 25 | "3.1.18": { 26 | "": ["kombu<3.1,>=3.0.25", "pytz>0.dev.0", "billiard<3.4,>=3.3.0.20"] 27 | }, 28 | "3.1.23": { "": ["kombu>=3.0.34,<4", "pytz>0.dev.0", "billiard>=3.3.0.23"] }, 29 | "4.0.2": { 30 | "": ["kombu<5.0,>=4.0.2", "pytz>0.dev.0", "billiard<3.6.0,>=3.5.0.2"] 31 | } 32 | }, 33 | "click": { 34 | "3.3": { "": [] }, 35 | "4.0": { "": [] } 36 | }, 37 | "django": { 38 | "1.6.11": { "": [] }, 39 | "1.7.7": { "": [] }, 40 | "1.8": { "": [] } 41 | }, 42 | "fake-piptools-test-with-pinned-deps": { 43 | "0.1": { "": ["celery==3.1.18"] } 44 | }, 45 | "fake-piptools-test-with-unsafe-deps": { 46 | "0.1": { "": ["setuptools==34.0.0"] } 47 | }, 48 | "flask": { 49 | "0.10.1": { "": ["Jinja2>=2.4", "Werkzeug>=0.7", "itsdangerous>=0.21"] } 50 | }, 51 | "flask-cors": { 52 | "1.10.2": { "": ["Flask>=0.9", "Six"] }, 53 | "2.0.0": { "": ["Flask>=0.9", "Six"] } 54 | }, 55 | "gnureadline": { 56 | "6.3.3": { "": [] } 57 | }, 58 | "html5lib": { 59 | "0.999999999": { "": ["setuptools>=18.5"] } 60 | }, 61 | "idna": { 62 | "2.8": { "": [] } 63 | }, 64 | "ipython": { 65 | "2.1.0": { 66 | "": ["gnureadline"], 67 | "nbconvert": ["pygments", "jinja2", "Sphinx>=0.3"], 68 | "notebook": ["tornado>=3.1", "pyzmq>=2.1.11", "jinja2"] 69 | } 70 | }, 71 | "itsdangerous": { 72 | "0.24": { "": [] } 73 | }, 74 | "jinja2": { 75 | "2.7.3": { "": ["markupsafe"] } 76 | }, 77 | "kombu": { 78 | "3.0.35": { "": ["anyjson>=0.3.3", "amqp>=1.4.9,<2.0"] }, 79 | "4.0.2": { "": ["amqp<3.0,>=2.1.4"] } 80 | }, 81 | "librabbitmq": { 82 | "1.6.1": { "": ["amqp>=1.4.6"] } 83 | }, 84 | "markupsafe": { 85 | "0.23": { "": [] } 86 | }, 87 | "packaging": { 88 | "16.8": { "": [] } 89 | }, 90 | "psycopg2": { 91 | "2.5.4": { "": [] }, 92 | "2.6": { "": [] } 93 | }, 94 | "pygments": { 95 | "1.5": { "": [] } 96 | }, 97 | "pyzmq": { 98 | "2.1.12": { "": [] } 99 | }, 100 | "pytz": { 101 | "2016.4": { "": [] } 102 | }, 103 | "setuptools": { 104 | "34.0.0": { "": ["packaging>=16.8", "appdirs>=1.4.0"] }, 105 | "35.0.0": { "": [] } 106 | }, 107 | "six": { 108 | "1.6.1": { "": [] }, 109 | "1.9.0": { "": [] } 110 | }, 111 | "sphinx": { 112 | "0.3": { "": [] } 113 | }, 114 | "sqlalchemy": { 115 | "0.9.8": { "": [] }, 116 | "0.9.9": { "": [] }, 117 | "1.0.0b5": { "": [] } 118 | }, 119 | "tornado": { 120 | "3.2.2": { "": [] } 121 | }, 122 | "vine": { 123 | "1.1.1": { "": [] }, 124 | "1.1.3": { "": [] } 125 | }, 126 | "werkzeug": { 127 | "0.6": { "": [] }, 128 | "0.10": { "": [] }, 129 | "0.10.4": { "": [] } 130 | }, 131 | "yarl": { 132 | "1.4.2": { "": ["idna"] } 133 | } 134 | } 135 | -------------------------------------------------------------------------------- /tests/test_data/minimal_wheels/small-fake-multi-arch-0.1.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/pip-tools/e4ed0c1e028d1ca73673a51722ba153f0c02b0c6/tests/test_data/minimal_wheels/small-fake-multi-arch-0.1.tar.gz -------------------------------------------------------------------------------- /tests/test_data/minimal_wheels/small_fake_a-0.1-py2.py3-none-any.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/pip-tools/e4ed0c1e028d1ca73673a51722ba153f0c02b0c6/tests/test_data/minimal_wheels/small_fake_a-0.1-py2.py3-none-any.whl -------------------------------------------------------------------------------- /tests/test_data/minimal_wheels/small_fake_a-0.2-py2.py3-none-any.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/pip-tools/e4ed0c1e028d1ca73673a51722ba153f0c02b0c6/tests/test_data/minimal_wheels/small_fake_a-0.2-py2.py3-none-any.whl -------------------------------------------------------------------------------- /tests/test_data/minimal_wheels/small_fake_a-0.3b1-py2.py3-none-any.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/pip-tools/e4ed0c1e028d1ca73673a51722ba153f0c02b0c6/tests/test_data/minimal_wheels/small_fake_a-0.3b1-py2.py3-none-any.whl -------------------------------------------------------------------------------- /tests/test_data/minimal_wheels/small_fake_b-0.1-py2.py3-none-any.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/pip-tools/e4ed0c1e028d1ca73673a51722ba153f0c02b0c6/tests/test_data/minimal_wheels/small_fake_b-0.1-py2.py3-none-any.whl -------------------------------------------------------------------------------- /tests/test_data/minimal_wheels/small_fake_b-0.2-py2.py3-none-any.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/pip-tools/e4ed0c1e028d1ca73673a51722ba153f0c02b0c6/tests/test_data/minimal_wheels/small_fake_b-0.2-py2.py3-none-any.whl -------------------------------------------------------------------------------- /tests/test_data/minimal_wheels/small_fake_b-0.3-py2.py3-none-any.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/pip-tools/e4ed0c1e028d1ca73673a51722ba153f0c02b0c6/tests/test_data/minimal_wheels/small_fake_b-0.3-py2.py3-none-any.whl -------------------------------------------------------------------------------- /tests/test_data/minimal_wheels/small_fake_multi_arch-0.1-py2.py3-none-manylinux1_i686.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/pip-tools/e4ed0c1e028d1ca73673a51722ba153f0c02b0c6/tests/test_data/minimal_wheels/small_fake_multi_arch-0.1-py2.py3-none-manylinux1_i686.whl -------------------------------------------------------------------------------- /tests/test_data/minimal_wheels/small_fake_multi_arch-0.1-py2.py3-none-manylinux1_x86_64.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/pip-tools/e4ed0c1e028d1ca73673a51722ba153f0c02b0c6/tests/test_data/minimal_wheels/small_fake_multi_arch-0.1-py2.py3-none-manylinux1_x86_64.whl -------------------------------------------------------------------------------- /tests/test_data/minimal_wheels/small_fake_multi_arch-0.1-py2.py3-none-win32.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/pip-tools/e4ed0c1e028d1ca73673a51722ba153f0c02b0c6/tests/test_data/minimal_wheels/small_fake_multi_arch-0.1-py2.py3-none-win32.whl -------------------------------------------------------------------------------- /tests/test_data/minimal_wheels/small_fake_with_deps-0.1-py2.py3-none-any.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/pip-tools/e4ed0c1e028d1ca73673a51722ba153f0c02b0c6/tests/test_data/minimal_wheels/small_fake_with_deps-0.1-py2.py3-none-any.whl -------------------------------------------------------------------------------- /tests/test_data/minimal_wheels/small_fake_with_deps_and_sub_deps-0.1-py2.py3-none-any.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/pip-tools/e4ed0c1e028d1ca73673a51722ba153f0c02b0c6/tests/test_data/minimal_wheels/small_fake_with_deps_and_sub_deps-0.1-py2.py3-none-any.whl -------------------------------------------------------------------------------- /tests/test_data/minimal_wheels/small_fake_with_unpinned_deps-0.1-py2.py3-none-any.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/pip-tools/e4ed0c1e028d1ca73673a51722ba153f0c02b0c6/tests/test_data/minimal_wheels/small_fake_with_unpinned_deps-0.1-py2.py3-none-any.whl -------------------------------------------------------------------------------- /tests/test_data/packages/fake_with_deps/setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from setuptools import setup 4 | 5 | setup( 6 | name="fake_with_deps", 7 | version=0.1, 8 | install_requires=[ 9 | "python-dateutil>=2.4.2,<2.5", 10 | "colorama<0.4.0,>=0.3.7", 11 | "cornice<1.1,>=1.0.0", 12 | "enum34<1.1.7,>=1.0.4", 13 | "six>1.5,<=1.8", 14 | "ipaddress<1.1,>=1.0.16", 15 | "jsonschema<3.0,>=2.4.0", 16 | "pyramid<1.6,>=1.5.7", 17 | "pyzmq<14.8,>=14.7.0", 18 | "simplejson>=3.5,!=3.8,>3.9", 19 | "SQLAlchemy!=0.9.5,<2.0.0,>=0.7.8,>=1.0.0", 20 | "python-memcached>=1.57,<2.0", 21 | "xmltodict<=0.11,>=0.4.6", 22 | ], 23 | ) 24 | -------------------------------------------------------------------------------- /tests/test_data/packages/small_fake_a/setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from setuptools import setup 4 | 5 | setup(name="small_fake_a", version=0.1) 6 | -------------------------------------------------------------------------------- /tests/test_data/packages/small_fake_with_build_deps/backend/backend.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | # A dependency of the build backend that is not installed is equivalent to a build 4 | # backend that is not installed so we don't have to test both cases. 5 | import fake_static_build_dep # noqa: F401 6 | import setuptools.build_meta 7 | 8 | # Re-export all names in case more hooks are added in the future 9 | from setuptools.build_meta import * # noqa: F401, F403 10 | 11 | build_wheel = setuptools.build_meta.build_wheel 12 | build_sdist = setuptools.build_meta.build_sdist 13 | 14 | 15 | def get_requires_for_build_sdist(config_settings=None): 16 | result = setuptools.build_meta.get_requires_for_build_sdist(config_settings) 17 | assert result == [] 18 | result.append("fake_dynamic_build_dep_for_all") 19 | result.append("fake_dynamic_build_dep_for_sdist") 20 | return result 21 | 22 | 23 | def get_requires_for_build_wheel(config_settings=None): 24 | result = setuptools.build_meta.get_requires_for_build_wheel(config_settings) 25 | assert result == ["wheel"] 26 | result.append("fake_dynamic_build_dep_for_all") 27 | result.append("fake_dynamic_build_dep_for_wheel") 28 | return result 29 | 30 | 31 | def get_requires_for_build_editable(config_settings=None): 32 | return ["fake_dynamic_build_dep_for_all", "fake_dynamic_build_dep_for_editable"] 33 | -------------------------------------------------------------------------------- /tests/test_data/packages/small_fake_with_build_deps/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = [ 3 | "setuptools==68.1.2", 4 | "wheel==0.41.1", 5 | "fake_static_build_dep" 6 | ] 7 | build-backend = "backend" 8 | backend-path = ["backend"] 9 | -------------------------------------------------------------------------------- /tests/test_data/packages/small_fake_with_build_deps/setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from setuptools import setup 4 | 5 | setup( 6 | name="small_fake_with_build_deps", 7 | version=0.1, 8 | install_requires=[ 9 | "fake_direct_runtime_dep", 10 | ], 11 | extras_require={ 12 | "x": ["fake_direct_extra_runtime_dep"], 13 | }, 14 | ) 15 | -------------------------------------------------------------------------------- /tests/test_data/packages/small_fake_with_deps/setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from setuptools import setup 4 | 5 | setup( 6 | name="small_fake_with_deps", 7 | version=0.1, 8 | install_requires=["small-fake-a==0.1", "small-fake-b==0.1"], 9 | ) 10 | -------------------------------------------------------------------------------- /tests/test_data/packages/small_fake_with_deps_and_sub_deps/setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from setuptools import setup 4 | 5 | setup( 6 | name="small_fake_with_deps_and_sub_deps", 7 | version=0.1, 8 | install_requires=["small-fake-with-unpinned-deps"], 9 | ) 10 | -------------------------------------------------------------------------------- /tests/test_data/packages/small_fake_with_pyproject/pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name="small_fake_with_pyproject" 3 | version=0.1 4 | dependencies=[ 5 | "fake_direct_runtime_dep", 6 | ] 7 | [project.optional-dependencies] 8 | x = ["fake_direct_extra_runtime_dep[with_its_own_extra]"] 9 | -------------------------------------------------------------------------------- /tests/test_data/packages/small_fake_with_subdir/subdir/setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from setuptools import setup 4 | 5 | setup(name="small_fake_a", version=0.1) 6 | -------------------------------------------------------------------------------- /tests/test_data/packages/small_fake_with_unpinned_deps/setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from setuptools import setup 4 | 5 | setup( 6 | name="small_fake_with_unpinned_deps", 7 | version=0.1, 8 | install_requires=["small-fake-a", "small-fake-b"], 9 | ) 10 | -------------------------------------------------------------------------------- /tests/test_fake_index.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pytest 4 | 5 | 6 | def test_find_best_match(from_line, repository): 7 | ireq = from_line("django>1.5") 8 | assert str(repository.find_best_match(ireq)) == "django==1.8" 9 | 10 | ireq = from_line("django<1.8,~=1.6") 11 | assert str(repository.find_best_match(ireq)) == "django==1.7.7" 12 | 13 | # Extras available, but no extras specified 14 | ireq = from_line("ipython") 15 | assert str(repository.find_best_match(ireq)) == "ipython==2.1.0" 16 | 17 | # Make sure we include extras. They should be sorted in the output. 18 | ireq = from_line("ipython[notebook,nbconvert]") 19 | assert str(repository.find_best_match(ireq)) == "ipython[nbconvert,notebook]==2.1.0" 20 | 21 | 22 | def test_find_best_match_incl_prereleases(from_line, repository): 23 | ireq = from_line("SQLAlchemy") 24 | assert ( 25 | str(repository.find_best_match(ireq, prereleases=False)) == "sqlalchemy==0.9.9" 26 | ) 27 | assert ( 28 | str(repository.find_best_match(ireq, prereleases=True)) == "sqlalchemy==1.0.0b5" 29 | ) 30 | 31 | 32 | def test_find_best_match_for_editable(from_editable, repository): 33 | ireq = from_editable("git+git://whatev.org/blah.git#egg=flask") 34 | assert repository.find_best_match(ireq) == ireq 35 | 36 | 37 | def test_get_dependencies(from_line, repository): 38 | ireq = from_line("django==1.6.11") 39 | assert repository.get_dependencies(ireq) == [] 40 | 41 | ireq = from_line("Flask==0.10.1") 42 | dependencies = repository.get_dependencies(ireq) 43 | assert {str(req) for req in dependencies} == { 44 | "Werkzeug>=0.7", 45 | "Jinja2>=2.4", 46 | "itsdangerous>=0.21", 47 | } 48 | 49 | ireq = from_line("ipython==2.1.0") 50 | dependencies = repository.get_dependencies(ireq) 51 | assert {str(req) for req in dependencies} == {"gnureadline"} 52 | 53 | ireq = from_line("ipython[notebook]==2.1.0") 54 | dependencies = repository.get_dependencies(ireq) 55 | assert {str(req) for req in dependencies} == { 56 | "gnureadline", 57 | "pyzmq>=2.1.11", 58 | "tornado>=3.1", 59 | "jinja2", 60 | } 61 | 62 | ireq = from_line("ipython[notebook,nbconvert]==2.1.0") 63 | dependencies = repository.get_dependencies(ireq) 64 | assert {str(req) for req in dependencies} == { 65 | "gnureadline", 66 | "pyzmq>=2.1.11", 67 | "tornado>=3.1", 68 | "jinja2", 69 | "pygments", 70 | "Sphinx>=0.3", 71 | } 72 | 73 | 74 | def test_get_dependencies_for_editable(from_editable, repository): 75 | ireq = from_editable("git+git://example.org/django.git#egg=django") 76 | assert repository.get_dependencies(ireq) == [] 77 | 78 | 79 | def test_get_dependencies_rejects_non_pinned_requirements(from_line, repository): 80 | not_a_pinned_req = from_line("django>1.6") 81 | with pytest.raises(TypeError): 82 | repository.get_dependencies(not_a_pinned_req) 83 | 84 | 85 | def test_get_hashes(from_line, repository): 86 | ireq = from_line("django==1.8") 87 | expected = { 88 | "test:123", 89 | "sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef", 90 | } 91 | assert repository.get_hashes(ireq) == expected 92 | -------------------------------------------------------------------------------- /tests/test_logging.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from piptools.logging import LogContext 4 | 5 | 6 | def test_indentation(runner): 7 | """ 8 | Test LogContext.indentation() context manager increases indentation. 9 | """ 10 | log = LogContext(indent_width=2) 11 | 12 | with runner.isolation() as (_, stderr): 13 | log.log("Test message 1") 14 | with log.indentation(): 15 | log.log("Test message 2") 16 | with log.indentation(): 17 | log.log("Test message 3") 18 | log.log("Test message 4") 19 | log.log("Test message 5") 20 | 21 | assert stderr.getvalue().decode().splitlines() == [ 22 | "Test message 1", 23 | " Test message 2", 24 | " Test message 3", 25 | " Test message 4", 26 | "Test message 5", 27 | ] 28 | -------------------------------------------------------------------------------- /tests/test_minimal_upgrade.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pytest 4 | 5 | from piptools.repositories import LocalRequirementsRepository 6 | from piptools.utils import key_from_ireq 7 | 8 | 9 | @pytest.mark.parametrize( 10 | ("input", "pins", "expected"), 11 | ( 12 | (tup) 13 | for tup in [ 14 | # Add Flask to an existing requirements.in, using --no-upgrade 15 | ( 16 | ["flask", "jinja2", "werkzeug"], 17 | [ 18 | # The requirements.txt from a previous round 19 | "jinja2==2.7.3", 20 | "markupsafe==0.23", 21 | "werkzeug==0.6", 22 | ], 23 | [ 24 | # Add flask and upgrade werkzeug from incompatible 0.6 25 | "flask==0.10.1", 26 | "itsdangerous==0.24 (from flask==0.10.1)", 27 | "werkzeug==0.10.4", 28 | # Other requirements are unchanged from 29 | # the original requirements.txt 30 | "jinja2==2.7.3", 31 | "markupsafe==0.23 (from jinja2==2.7.3)", 32 | ], 33 | ) 34 | ] 35 | ), 36 | ) 37 | def test_no_upgrades(base_resolver, repository, from_line, input, pins, expected): 38 | input = [from_line(line) for line in input] 39 | existing_pins = {} 40 | for line in pins: 41 | ireq = from_line(line) 42 | existing_pins[key_from_ireq(ireq)] = ireq 43 | local_repository = LocalRequirementsRepository(existing_pins, repository) 44 | output = base_resolver( 45 | input, prereleases=False, repository=local_repository 46 | ).resolve() 47 | output = {str(line) for line in output} 48 | assert output == {str(line) for line in expected} 49 | -------------------------------------------------------------------------------- /tests/test_pip_compat.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from pathlib import Path, PurePosixPath 4 | 5 | from piptools._compat.pip_compat import parse_requirements 6 | 7 | from .constants import PACKAGES_RELATIVE_PATH 8 | 9 | 10 | def test_parse_requirements_preserve_editable_relative_path(tmp_path, repository): 11 | test_package_path = str( 12 | PurePosixPath(Path(PACKAGES_RELATIVE_PATH)) / "small_fake_a" 13 | ) 14 | requirements_in_path = str(tmp_path / "requirements.in") 15 | 16 | with open(requirements_in_path, "w") as requirements_in_file: 17 | requirements_in_file.write(f"-e {test_package_path}") 18 | 19 | [install_requirement] = parse_requirements( 20 | requirements_in_path, session=repository.session 21 | ) 22 | 23 | assert install_requirement.link.url == test_package_path 24 | assert install_requirement.link.file_path == test_package_path 25 | -------------------------------------------------------------------------------- /tests/test_repository_local.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pytest 4 | 5 | from piptools.repositories.local import LocalRequirementsRepository 6 | from piptools.utils import key_from_ireq 7 | 8 | EXPECTED = {"sha256:5e6071ee6e4c59e0d0408d366fe9b66781d2cf01be9a6e19a2433bb3c5336330"} 9 | 10 | 11 | def test_get_hashes_local_repository_cache_miss( 12 | capsys, pip_conf, from_line, pypi_repository 13 | ): 14 | existing_pins = {} 15 | local_repository = LocalRequirementsRepository(existing_pins, pypi_repository) 16 | with local_repository.allow_all_wheels(): 17 | hashes = local_repository.get_hashes(from_line("small-fake-a==0.1")) 18 | assert hashes == EXPECTED 19 | captured = capsys.readouterr() 20 | assert captured.out == "" 21 | assert captured.err == "" 22 | 23 | 24 | def test_get_hashes_local_repository_cache_hit(from_line, repository): 25 | # Create an install requirement with the hashes included in its options 26 | hash_options = {"sha256": [entry.split(":")[1] for entry in EXPECTED]} 27 | req = from_line("small-fake-a==0.1", hash_options=hash_options) 28 | existing_pins = {key_from_ireq(req): req} 29 | 30 | # Use fake repository so that we know the hashes are coming from cache 31 | local_repository = LocalRequirementsRepository(existing_pins, repository) 32 | with local_repository.allow_all_wheels(): 33 | hashes = local_repository.get_hashes(from_line("small-fake-a==0.1")) 34 | assert hashes == EXPECTED 35 | 36 | 37 | NONSENSE = {"sha256:NONSENSE"} 38 | 39 | 40 | @pytest.mark.parametrize( 41 | ("reuse_hashes", "expected"), ((True, NONSENSE), (False, EXPECTED)) 42 | ) 43 | def test_toggle_reuse_hashes_local_repository( 44 | capsys, pip_conf, from_line, pypi_repository, reuse_hashes, expected 45 | ): 46 | # Create an install requirement with the hashes included in its options 47 | hash_options = {"sha256": [entry.split(":")[1] for entry in NONSENSE]} 48 | req = from_line("small-fake-a==0.1", hash_options=hash_options) 49 | existing_pins = {key_from_ireq(req): req} 50 | 51 | local_repository = LocalRequirementsRepository( 52 | existing_pins, pypi_repository, reuse_hashes=reuse_hashes 53 | ) 54 | with local_repository.allow_all_wheels(): 55 | assert local_repository.get_hashes(from_line("small-fake-a==0.1")) == expected 56 | captured = capsys.readouterr() 57 | assert captured.out == "" 58 | assert captured.err == "" 59 | -------------------------------------------------------------------------------- /tests/test_subprocess_utils.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import sys 4 | 5 | from piptools.subprocess_utils import run_python_snippet 6 | 7 | 8 | def test_run_python_snippet_returns_multilne(): 9 | result = run_python_snippet(sys.executable, r'print("MULTILINE\nOUTPUT", end="")') 10 | assert result == "MULTILINE\nOUTPUT" 11 | -------------------------------------------------------------------------------- /tests/test_top_level_editable.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/pip-tools/e4ed0c1e028d1ca73673a51722ba153f0c02b0c6/tests/test_top_level_editable.py -------------------------------------------------------------------------------- /tests/test_writer.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import sys 4 | 5 | import pytest 6 | from pip._internal.models.format_control import FormatControl 7 | 8 | from piptools.scripts.compile import cli 9 | from piptools.utils import comment 10 | from piptools.writer import ( 11 | MESSAGE_UNHASHED_PACKAGE, 12 | MESSAGE_UNINSTALLABLE, 13 | MESSAGE_UNSAFE_PACKAGES, 14 | MESSAGE_UNSAFE_PACKAGES_UNPINNED, 15 | OutputWriter, 16 | ) 17 | 18 | 19 | @pytest.fixture 20 | def writer(tmpdir_cwd): 21 | with open("src_file", "w"), open("src_file2", "w"): 22 | pass 23 | 24 | cli_args = [ 25 | "--dry-run", 26 | "--output-file", 27 | "requirements.txt", 28 | "src_file", 29 | "src_file2", 30 | ] 31 | 32 | with cli.make_context("pip-compile", cli_args) as ctx: 33 | writer = OutputWriter( 34 | dst_file=ctx.params["output_file"], 35 | click_ctx=ctx, 36 | dry_run=True, 37 | emit_header=True, 38 | emit_index_url=True, 39 | emit_trusted_host=True, 40 | annotate=True, 41 | annotation_style="split", 42 | generate_hashes=False, 43 | default_index_url=None, 44 | index_urls=[], 45 | trusted_hosts=[], 46 | format_control=FormatControl(set(), set()), 47 | linesep="\n", 48 | allow_unsafe=False, 49 | find_links=[], 50 | emit_find_links=True, 51 | strip_extras=False, 52 | emit_options=True, 53 | ) 54 | yield writer 55 | 56 | 57 | def test_format_requirement_annotation_editable(from_editable, writer): 58 | # Annotations are printed as comments at a fixed column 59 | ireq = from_editable("git+git://fake.org/x/y.git#egg=y") 60 | ireq.comes_from = "xyz" 61 | 62 | assert writer._format_requirement( 63 | ireq 64 | ) == "-e git+git://fake.org/x/y.git#egg=y\n " + comment("# via xyz") 65 | 66 | 67 | def test_format_requirement_annotation(from_line, writer): 68 | ireq = from_line("test==1.2") 69 | ireq.comes_from = "xyz" 70 | 71 | assert writer._format_requirement(ireq) == "test==1.2\n " + comment("# via xyz") 72 | 73 | 74 | def test_format_requirement_annotation_lower_case(from_line, writer): 75 | ireq = from_line("Test==1.2") 76 | ireq.comes_from = "xyz" 77 | 78 | assert writer._format_requirement(ireq) == "test==1.2\n " + comment("# via xyz") 79 | 80 | 81 | def test_format_requirement_for_primary(from_line, writer): 82 | "Primary packages should get annotated." 83 | ireq = from_line("test==1.2") 84 | ireq.comes_from = "xyz" 85 | 86 | assert writer._format_requirement(ireq) == "test==1.2\n " + comment("# via xyz") 87 | 88 | 89 | def test_format_requirement_for_primary_lower_case(from_line, writer): 90 | "Primary packages should get annotated." 91 | ireq = from_line("Test==1.2") 92 | ireq.comes_from = "xyz" 93 | 94 | assert writer._format_requirement(ireq) == "test==1.2\n " + comment("# via xyz") 95 | 96 | 97 | def test_format_requirement_environment_marker(from_line, writer): 98 | "Environment markers should get passed through to output." 99 | ireq = from_line( 100 | 'test ; python_version == "2.7" and platform_python_implementation == "CPython"' 101 | ) 102 | 103 | result = writer._format_requirement(ireq, marker=ireq.markers) 104 | assert ( 105 | result == 'test ; python_version == "2.7" and ' 106 | 'platform_python_implementation == "CPython"' 107 | ) 108 | 109 | 110 | @pytest.mark.parametrize("allow_unsafe", ((True,), (False,))) 111 | def test_iter_lines__unsafe_dependencies(writer, from_line, allow_unsafe): 112 | writer.allow_unsafe = allow_unsafe 113 | writer.emit_header = False 114 | 115 | lines = writer._iter_lines( 116 | {from_line("test==1.2")}, 117 | {from_line("setuptools==1.10.0")}, 118 | unsafe_packages=set(), 119 | markers={}, 120 | ) 121 | 122 | expected_lines = ( 123 | "test==1.2", 124 | "", 125 | MESSAGE_UNSAFE_PACKAGES, 126 | "setuptools==1.10.0" if allow_unsafe else comment("# setuptools"), 127 | ) 128 | assert tuple(lines) == expected_lines 129 | 130 | 131 | def test_iter_lines__unsafe_with_hashes(capsys, writer, from_line): 132 | writer.allow_unsafe = False 133 | writer.emit_header = False 134 | ireqs = [from_line("test==1.2")] 135 | unsafe_ireqs = [from_line("setuptools==1.10.0")] 136 | hashes = {ireqs[0]: {"FAKEHASH"}, unsafe_ireqs[0]: set()} 137 | 138 | lines = writer._iter_lines( 139 | ireqs, unsafe_ireqs, unsafe_packages=set(), markers={}, hashes=hashes 140 | ) 141 | 142 | expected_lines = ( 143 | "test==1.2 \\\n --hash=FAKEHASH", 144 | "", 145 | MESSAGE_UNSAFE_PACKAGES_UNPINNED, 146 | comment("# setuptools"), 147 | ) 148 | assert tuple(lines) == expected_lines 149 | captured = capsys.readouterr() 150 | assert captured.out == "" 151 | assert captured.err.strip() == MESSAGE_UNINSTALLABLE 152 | 153 | 154 | def test_iter_lines__hash_missing(capsys, writer, from_line): 155 | writer.allow_unsafe = False 156 | writer.emit_header = False 157 | ireqs = [from_line("test==1.2"), from_line("file:///example/#egg=example")] 158 | hashes = {ireqs[0]: {"FAKEHASH"}, ireqs[1]: set()} 159 | 160 | lines = writer._iter_lines( 161 | ireqs, 162 | hashes=hashes, 163 | unsafe_requirements=set(), 164 | unsafe_packages=set(), 165 | markers={}, 166 | ) 167 | 168 | expected_lines = ( 169 | MESSAGE_UNHASHED_PACKAGE, 170 | "example @ file:///example/", 171 | "test==1.2 \\\n --hash=FAKEHASH", 172 | ) 173 | assert tuple(lines) == expected_lines 174 | captured = capsys.readouterr() 175 | assert captured.out == "" 176 | assert captured.err.strip() == MESSAGE_UNINSTALLABLE 177 | 178 | 179 | def test_iter_lines__no_warn_if_only_unhashable_packages(writer, from_line): 180 | """ 181 | There shouldn't be MESSAGE_UNHASHED_PACKAGE warning if there are only unhashable 182 | packages. See GH-1101. 183 | """ 184 | writer.allow_unsafe = False 185 | writer.emit_header = False 186 | ireqs = [ 187 | from_line("file:///unhashable-pkg1/#egg=unhashable-pkg1"), 188 | from_line("file:///unhashable-pkg2/#egg=unhashable-pkg2"), 189 | ] 190 | hashes = {ireq: set() for ireq in ireqs} 191 | 192 | lines = writer._iter_lines( 193 | ireqs, 194 | hashes=hashes, 195 | unsafe_requirements=set(), 196 | unsafe_packages=set(), 197 | markers={}, 198 | ) 199 | 200 | expected_lines = ( 201 | "unhashable-pkg1 @ file:///unhashable-pkg1/", 202 | "unhashable-pkg2 @ file:///unhashable-pkg2/", 203 | ) 204 | assert tuple(lines) == expected_lines 205 | 206 | 207 | def test_write_header(writer): 208 | expected = map( 209 | comment, 210 | [ 211 | "#", 212 | "# This file is autogenerated by pip-compile with Python " 213 | f"{sys.version_info.major}.{sys.version_info.minor}", 214 | "# by the following command:", 215 | "#", 216 | "# pip-compile --output-file={} src_file src_file2".format( 217 | writer.click_ctx.params["output_file"].name 218 | ), 219 | "#", 220 | ], 221 | ) 222 | assert list(writer.write_header()) == list(expected) 223 | 224 | 225 | def test_write_header_custom_compile_command(writer, monkeypatch): 226 | monkeypatch.setenv("CUSTOM_COMPILE_COMMAND", "./pipcompilewrapper") 227 | expected = map( 228 | comment, 229 | [ 230 | "#", 231 | "# This file is autogenerated by pip-compile with Python " 232 | f"{sys.version_info.major}.{sys.version_info.minor}", 233 | "# by the following command:", 234 | "#", 235 | "# ./pipcompilewrapper", 236 | "#", 237 | ], 238 | ) 239 | assert list(writer.write_header()) == list(expected) 240 | 241 | 242 | def test_write_header_no_emit_header(writer): 243 | """ 244 | There should not be headers if emit_header is False 245 | """ 246 | writer.emit_header = False 247 | 248 | with pytest.raises(StopIteration): 249 | next(writer.write_header()) 250 | 251 | 252 | @pytest.mark.parametrize( 253 | ("emit_options", "expected_flags"), 254 | ( 255 | pytest.param( 256 | True, 257 | ( 258 | "--index-url https://index-server", 259 | "--find-links links", 260 | "--trusted-host index-server", 261 | "--no-binary flask", 262 | "--only-binary django", 263 | "", 264 | ), 265 | id="on", 266 | ), 267 | pytest.param(False, (), id="off"), 268 | ), 269 | ) 270 | def test_write_flags_emit_options(writer, emit_options, expected_flags): 271 | """ 272 | There should be options if emit_options is True 273 | """ 274 | writer.emit_options = emit_options 275 | writer.index_urls = ["https://index-server"] 276 | writer.find_links = ["links"] 277 | writer.trusted_hosts = ["index-server"] 278 | writer.format_control = FormatControl(no_binary=["flask"], only_binary=["django"]) 279 | 280 | assert tuple(writer.write_flags()) == expected_flags 281 | 282 | 283 | def test_write_format_controls(writer): 284 | """ 285 | Tests --no-binary/--only-binary options. 286 | """ 287 | 288 | # FormatControl actually expects sets, but we give it lists here to 289 | # ensure that we are sorting them when writing. 290 | writer.format_control = FormatControl( 291 | no_binary=["psycopg2", "click"], only_binary=["pytz", "django"] 292 | ) 293 | lines = list(writer.write_format_controls()) 294 | 295 | expected_lines = [ 296 | "--no-binary click", 297 | "--no-binary psycopg2", 298 | "--only-binary django", 299 | "--only-binary pytz", 300 | ] 301 | assert lines == expected_lines 302 | 303 | 304 | @pytest.mark.parametrize( 305 | ("no_binary", "only_binary", "expected_lines"), 306 | ( 307 | ( 308 | [":all:"], 309 | ["django"], 310 | [ 311 | "--no-binary :all:", 312 | "--only-binary django", 313 | ], 314 | ), 315 | ( 316 | ["django"], 317 | [":all:"], 318 | [ 319 | "--only-binary :all:", 320 | "--no-binary django", 321 | ], 322 | ), 323 | ), 324 | ) 325 | def test_write_format_controls_all(writer, no_binary, only_binary, expected_lines): 326 | """ 327 | Tests --no-binary/--only-binary options 328 | with the value of :all:. We want to preserve 329 | the FormatControl behavior so we emit :all: 330 | first before packages. 331 | """ 332 | 333 | writer.format_control = FormatControl(no_binary=no_binary, only_binary=only_binary) 334 | lines = list(writer.write_format_controls()) 335 | 336 | assert lines == expected_lines 337 | 338 | 339 | @pytest.mark.parametrize( 340 | ("index_urls", "expected_lines"), 341 | ( 342 | # No urls - no options 343 | ([], []), 344 | # Single URL should be index-url 345 | (["https://index-url.com"], ["--index-url https://index-url.com"]), 346 | # First URL should be index-url, the others should be extra-index-url 347 | ( 348 | [ 349 | "https://index-url1.com", 350 | "https://index-url2.com", 351 | "https://index-url3.com", 352 | ], 353 | [ 354 | "--index-url https://index-url1.com", 355 | "--extra-index-url https://index-url2.com", 356 | "--extra-index-url https://index-url3.com", 357 | ], 358 | ), 359 | # If a first URL equals to the default URL, the the index url must not be set 360 | # and the others should be extra-index-url 361 | ( 362 | [ 363 | "https://default-index-url.com", 364 | "https://index-url1.com", 365 | "https://index-url2.com", 366 | ], 367 | [ 368 | "--extra-index-url https://index-url1.com", 369 | "--extra-index-url https://index-url2.com", 370 | ], 371 | ), 372 | # Not ignore URLs equal to the default index-url 373 | # (note: the previous case is exception) 374 | ( 375 | [ 376 | "https://index-url1.com", 377 | "https://default-index-url.com", 378 | "https://index-url2.com", 379 | ], 380 | [ 381 | "--index-url https://index-url1.com", 382 | "--extra-index-url https://default-index-url.com", 383 | "--extra-index-url https://index-url2.com", 384 | ], 385 | ), 386 | # Ignore URLs equal to the default index-url 387 | (["https://default-index-url.com", "https://default-index-url.com"], []), 388 | # All URLs must be deduplicated 389 | ( 390 | [ 391 | "https://index-url1.com", 392 | "https://index-url1.com", 393 | "https://index-url2.com", 394 | ], 395 | [ 396 | "--index-url https://index-url1.com", 397 | "--extra-index-url https://index-url2.com", 398 | ], 399 | ), 400 | ), 401 | ) 402 | def test_write_index_options(writer, index_urls, expected_lines): 403 | """ 404 | Test write_index_options method. 405 | """ 406 | writer.index_urls = index_urls 407 | writer.default_index_url = "https://default-index-url.com" 408 | assert list(writer.write_index_options()) == expected_lines 409 | 410 | 411 | def test_write_index_options_no_emit_index(writer): 412 | """ 413 | There should not be --index-url/--extra-index-url options 414 | if emit_index_url is False. 415 | """ 416 | writer.emit_index_url = False 417 | with pytest.raises(StopIteration): 418 | next(writer.write_index_options()) 419 | 420 | 421 | @pytest.mark.parametrize( 422 | ("find_links", "expected_lines"), 423 | ( 424 | ([], []), 425 | (["./foo"], ["--find-links ./foo"]), 426 | (["./foo", "./bar"], ["--find-links ./foo", "--find-links ./bar"]), 427 | ), 428 | ) 429 | def test_write_find_links(writer, find_links, expected_lines): 430 | """ 431 | Test write_find_links method. 432 | """ 433 | writer.find_links = find_links 434 | assert list(writer.write_find_links()) == expected_lines 435 | 436 | 437 | def test_write_order(writer, from_line): 438 | """ 439 | Order of packages should match that of `pip freeze`, with the exception 440 | that requirement names should be canonicalized. 441 | """ 442 | writer.emit_header = False 443 | 444 | packages = { 445 | from_line("package_a==0.1"), 446 | from_line("Package-b==2.3.4"), 447 | from_line("Package==5.6"), 448 | from_line("package2==7.8.9"), 449 | } 450 | expected_lines = [ 451 | "package==5.6", 452 | "package-a==0.1", 453 | "package-b==2.3.4", 454 | "package2==7.8.9", 455 | ] 456 | result = writer._iter_lines( 457 | packages, unsafe_requirements=set(), unsafe_packages=set(), markers={} 458 | ) 459 | assert list(result) == expected_lines 460 | -------------------------------------------------------------------------------- /tests/utils.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | 5 | # NOTE: keep in sync with "passenv" in tox.ini 6 | CI_VARIABLES = {"CI", "GITHUB_ACTIONS"} 7 | 8 | 9 | def looks_like_ci(): 10 | return bool(set(os.environ.keys()) & CI_VARIABLES) 11 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = 3 | # NOTE: keep this in sync with the env list in .github/workflows/ci.yml. 4 | py{38,39,310,311,312,py3}-pip{supported,lowest,latest,main}-coverage 5 | pip{supported,lowest,latest,main}-coverage 6 | pip{supported,lowest,latest,main} 7 | checkqa 8 | readme 9 | skip_missing_interpreters = True 10 | 11 | [testenv] 12 | description = run the tests with pytest 13 | extras = 14 | testing 15 | coverage: coverage 16 | deps = 17 | pipsupported: pip==24.2 18 | piplowest: pip==22.2.* 19 | piplatest: pip 20 | pipmain: https://github.com/pypa/pip/archive/main.zip 21 | setenv = 22 | coverage: PYTEST_ADDOPTS=--strict-markers --doctest-modules --cov --cov-report=term-missing --cov-report=xml {env:PYTEST_ADDOPTS:} 23 | commands_pre = 24 | piplatest: python -m pip install -U pip 25 | pip --version 26 | commands = pytest {posargs} 27 | passenv = 28 | CI 29 | FORCE_COLOR 30 | GITHUB_ACTIONS 31 | MYPY_FORCE_COLOR 32 | PRE_COMMIT_COLOR 33 | PY_COLORS 34 | pip_pre=True 35 | 36 | [testenv:checkqa] 37 | description = format the code base and check its quality 38 | skip_install = True 39 | deps = pre-commit 40 | commands_pre = 41 | commands = pre-commit run --all-files --show-diff-on-failure 42 | 43 | [testenv:readme] 44 | description = check whether the long description will render correctly on PyPI 45 | deps = 46 | build 47 | twine 48 | commands_pre = 49 | commands = 50 | python -m build --outdir {envtmpdir} --sdist {toxinidir} 51 | twine check --strict {envtmpdir}{/}* 52 | skip_install = true 53 | 54 | [testenv:pip-compile-docs] 55 | description = compile requirements for the documentation 56 | changedir = {[testenv:build-docs]changedir} 57 | commands_pre = 58 | commands = python -m piptools compile --strip-extras --allow-unsafe --quiet requirements.in {posargs} 59 | 60 | [testenv:build-docs] 61 | description = build the documentation 62 | deps = 63 | -r{toxinidir}/docs/requirements.txt 64 | # FIXME: re-enable the "-r" + "-c" paradigm once the pip bug is fixed. 65 | # Ref: https://github.com/pypa/pip/issues/9243 66 | # -r{toxinidir}/docs/requirements.in 67 | # -c{toxinidir}/docs/requirements.txt 68 | commands_pre = 69 | commands = 70 | # Retrieve possibly missing commits: 71 | -git fetch --unshallow 72 | -git fetch --tags 73 | 74 | # Build the html docs with Sphinx: 75 | {envpython} -m sphinx \ 76 | -j auto \ 77 | -b html \ 78 | {tty:--color} \ 79 | -a \ 80 | -n -W --keep-going \ 81 | -d "{temp_dir}/.doctrees" \ 82 | . \ 83 | "{envdir}/docs_out" 84 | 85 | # Print out the output docs dir and a way to serve html: 86 | -{envpython} -c\ 87 | 'import pathlib;\ 88 | docs_dir = pathlib.Path(r"{envdir}") / "docs_out";\ 89 | index_file = docs_dir / "index.html";\ 90 | print("\n" + "=" * 120 +\ 91 | f"\n\nDocumentation available under:\n\n\ 92 | \tfile://\{index_file\}\n\nTo serve docs, use\n\n\ 93 | \t$ python3 -m http.server --directory \ 94 | \N\{QUOTATION MARK\}\{docs_dir\}\N\{QUOTATION MARK\} 0\n\n" +\ 95 | "=" * 120)' 96 | changedir = {toxinidir}/docs 97 | isolated_build = true 98 | passenv = 99 | SSH_AUTH_SOCK 100 | skip_install = false 101 | allowlist_externals = 102 | git 103 | 104 | 105 | [testenv:preview-docs] 106 | description = preview the docs 107 | deps = 108 | sphinx-autobuild 109 | {[testenv:build-docs]deps} 110 | commands_pre = 111 | commands = 112 | # Retrieve possibly missing commits: 113 | -git fetch --unshallow 114 | -git fetch --tags 115 | 116 | # Build the html docs with sphinx-autobuild: 117 | {envpython} -m sphinx_autobuild \ 118 | -j auto \ 119 | -b html \ 120 | -n \ 121 | -W \ 122 | -d "{temp_dir}/.doctrees" \ 123 | . \ 124 | --watch ../README.md \ 125 | --watch ../CHANGELOG.md \ 126 | "{envdir}/docs_out" 127 | 128 | changedir = {[testenv:build-docs]changedir} 129 | isolated_build = {[testenv:build-docs]isolated_build} 130 | passenv = {[testenv:build-docs]passenv} 131 | skip_install = {[testenv:build-docs]skip_install} 132 | allowlist_externals = {[testenv:build-docs]allowlist_externals} 133 | 134 | 135 | [testenv:linkcheck-docs] 136 | description = check links in the documentation 137 | deps = 138 | -r{toxinidir}/docs/requirements.txt 139 | # FIXME: re-enable the "-r" + "-c" paradigm once the pip bug is fixed. 140 | # Ref: https://github.com/pypa/pip/issues/9243 141 | # -r{toxinidir}/docs/requirements.in 142 | # -c{toxinidir}/docs/requirements.txt 143 | commands_pre = 144 | commands = 145 | # Retrieve possibly missing commits: 146 | -git fetch --unshallow 147 | -git fetch --tags 148 | 149 | # Build the html docs with Sphinx: 150 | {envpython} -m sphinx \ 151 | -j auto \ 152 | -b linkcheck \ 153 | {tty:--color} \ 154 | -a \ 155 | -n -W --keep-going \ 156 | -d "{temp_dir}/.doctrees" \ 157 | . \ 158 | "{envdir}/docs_out" 159 | changedir = {toxinidir}/docs 160 | isolated_build = true 161 | passenv = 162 | SSH_AUTH_SOCK 163 | skip_install = false 164 | allowlist_externals = 165 | git 166 | --------------------------------------------------------------------------------