├── tests ├── __init__.py ├── conftest.py ├── utils.py ├── test_upgrade_feature.py ├── test_conflicts.py ├── test_utils.py ├── test_add_hashes.py ├── test_cli_v2.py ├── test_discover.py ├── test_cli_v1.py └── test_pipcompilemulti.py ├── nested ├── up.in ├── base.in ├── subproject │ ├── base.in │ ├── sub.in │ ├── base.txt │ └── sub.txt ├── diamond.in ├── base.txt ├── up.txt └── diamond.txt ├── conflicting-in-merge ├── base2.in ├── base1.in └── together.in ├── conflicting-in-ref ├── base1.in └── base2.in ├── docs ├── history.rst ├── pallets.png ├── _static │ └── custom.css ├── afterword.rst ├── precommit.rst ├── twitter_logo.svg ├── Makefile ├── features.rst ├── Uber_Logo_Black_RGB.svg ├── boilerplate.rst ├── index.rst ├── moz-logo-bw-rgb.svg ├── migration.rst ├── installation.rst ├── skydio-logo-black.svg ├── conf.py └── why.rst ├── requirements ├── base.in ├── testwin.in ├── test.in ├── local.in ├── testwin.txt ├── base.txt ├── testwin.hash ├── test.txt ├── base.hash ├── local.txt └── test.hash ├── MANIFEST.in ├── pipcompilemulti ├── features │ ├── __init__.py │ ├── forward.py │ ├── annotate_index.py │ ├── use_cache.py │ ├── base_dir.py │ ├── header.py │ ├── unsafe.py │ ├── file_extensions.py │ ├── forbid_post.py │ ├── limit_envs.py │ ├── compatible.py │ ├── base.py │ ├── limit_in_paths.py │ ├── skip_constraint_comments.py │ ├── add_hashes.py │ ├── autoresolve.py │ ├── upgrade.py │ └── controller.py ├── __init__.py ├── options.py ├── deduplicate.py ├── actions.py ├── cli_v1.py ├── config.py ├── discover.py ├── cli_v2.py ├── utils.py ├── verify.py ├── dependency.py └── environment.py ├── AUTHORS.rst ├── .pre-commit-hooks.yaml ├── .pre-commit-config.yaml ├── .editorconfig ├── .github └── workflows │ ├── python36.yml │ ├── python37.yml │ ├── python38.yml │ ├── python39.yml │ └── pipcompilemulti.yml ├── .coveragerc ├── .readthedocs.yml ├── appveyor ├── setup_build_env.cmd └── install.ps1 ├── setup.cfg ├── .gitignore ├── Makefile ├── LICENSE.txt ├── appveyor.yml ├── .travis.yml ├── tox.ini ├── README.rst ├── setup.py ├── CONTRIBUTING.rst ├── HISTORY.rst ├── how-to.md └── .pylintrc /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /nested/up.in: -------------------------------------------------------------------------------- 1 | six 2 | -------------------------------------------------------------------------------- /conflicting-in-merge/base2.in: -------------------------------------------------------------------------------- 1 | pytz 2 | -------------------------------------------------------------------------------- /conflicting-in-ref/base1.in: -------------------------------------------------------------------------------- 1 | Django 2 | -------------------------------------------------------------------------------- /conflicting-in-merge/base1.in: -------------------------------------------------------------------------------- 1 | pytz<2018 2 | -------------------------------------------------------------------------------- /nested/base.in: -------------------------------------------------------------------------------- 1 | -r subproject/sub.in 2 | -------------------------------------------------------------------------------- /nested/subproject/base.in: -------------------------------------------------------------------------------- 1 | -r ../up.in 2 | -------------------------------------------------------------------------------- /nested/subproject/sub.in: -------------------------------------------------------------------------------- 1 | -r base.in 2 | -------------------------------------------------------------------------------- /docs/history.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../HISTORY.rst 2 | -------------------------------------------------------------------------------- /nested/diamond.in: -------------------------------------------------------------------------------- 1 | -r base.in 2 | -r subproject/base.in 3 | -------------------------------------------------------------------------------- /conflicting-in-ref/base2.in: -------------------------------------------------------------------------------- 1 | -r base1.in 2 | 3 | opal<0.13 4 | -------------------------------------------------------------------------------- /requirements/base.in: -------------------------------------------------------------------------------- 1 | click 2 | pip-tools>=5.2 3 | toposort 4 | -------------------------------------------------------------------------------- /conflicting-in-merge/together.in: -------------------------------------------------------------------------------- 1 | -r base1.in 2 | -r base2.in 3 | -------------------------------------------------------------------------------- /requirements/testwin.in: -------------------------------------------------------------------------------- 1 | -r test.in 2 | 3 | colorama 4 | atomicwrites 5 | -------------------------------------------------------------------------------- /requirements/test.in: -------------------------------------------------------------------------------- 1 | -r base.in 2 | 3 | pytest 4 | pytest-cov 5 | mock 6 | more-itertools 7 | -------------------------------------------------------------------------------- /docs/pallets.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/john-sandall/pip-compile-multi/master/docs/pallets.png -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include AUTHORS.rst 2 | include CONTRIBUTING.rst 3 | include HISTORY.rst 4 | include LICENSE 5 | include requirements/base.in 6 | -------------------------------------------------------------------------------- /pipcompilemulti/features/__init__.py: -------------------------------------------------------------------------------- 1 | """Features as modules.""" 2 | 3 | from .controller import FeaturesController 4 | 5 | 6 | FEATURES = FeaturesController() 7 | -------------------------------------------------------------------------------- /pipcompilemulti/__init__.py: -------------------------------------------------------------------------------- 1 | """Pip compile multi aka requirements""" 2 | 3 | __author__ = 'Peter Demin' 4 | __email__ = 'peterdemin@gmail.com' 5 | __version__ = '2.4.0' 6 | -------------------------------------------------------------------------------- /docs/_static/custom.css: -------------------------------------------------------------------------------- 1 | body { 2 | font-family: 'minion pro', 'bell mt', Georgia, 'Hiragino Mincho Pro', serif; 3 | } 4 | 5 | div.document { 6 | width: 71em; 7 | } 8 | -------------------------------------------------------------------------------- /nested/base.txt: -------------------------------------------------------------------------------- 1 | # SHA1:6a7fb5b91341e12583307fb55069246d43096b44 2 | # 3 | # This file is autogenerated by pip-compile-multi 4 | # To update, run: 5 | # 6 | # pip-compile-multi 7 | # 8 | -r subproject/sub.txt 9 | -------------------------------------------------------------------------------- /nested/subproject/base.txt: -------------------------------------------------------------------------------- 1 | # SHA1:e975ad2caba20cf1afaea2cd3626c6c566edfacf 2 | # 3 | # This file is autogenerated by pip-compile-multi 4 | # To update, run: 5 | # 6 | # pip-compile-multi 7 | # 8 | -r ../up.txt 9 | -------------------------------------------------------------------------------- /nested/subproject/sub.txt: -------------------------------------------------------------------------------- 1 | # SHA1:a87fd594461015e819a1f468943967d12880b85d 2 | # 3 | # This file is autogenerated by pip-compile-multi 4 | # To update, run: 5 | # 6 | # pip-compile-multi 7 | # 8 | -r base.txt 9 | -------------------------------------------------------------------------------- /AUTHORS.rst: -------------------------------------------------------------------------------- 1 | ======= 2 | Credits 3 | ======= 4 | 5 | Development Lead 6 | ---------------- 7 | 8 | * Peter Demin 9 | 10 | Contributors 11 | ------------ 12 | 13 | None yet. Why not be the first? -------------------------------------------------------------------------------- /nested/up.txt: -------------------------------------------------------------------------------- 1 | # SHA1:bec9703f7a456cd2b4ab5fb3220ae016e3e394e3 2 | # 3 | # This file is autogenerated by pip-compile-multi 4 | # To update, run: 5 | # 6 | # pip-compile-multi 7 | # 8 | six==1.15.0 9 | # via -r nested/up.in 10 | -------------------------------------------------------------------------------- /nested/diamond.txt: -------------------------------------------------------------------------------- 1 | # SHA1:1301840447a3dce149692692b41974b7668ba22e 2 | # 3 | # This file is autogenerated by pip-compile-multi 4 | # To update, run: 5 | # 6 | # pip-compile-multi 7 | # 8 | -r base.txt 9 | -r subproject/base.txt 10 | -------------------------------------------------------------------------------- /requirements/local.in: -------------------------------------------------------------------------------- 1 | -r test.in 2 | 3 | tox 4 | twine 5 | wheel 6 | bump2version 7 | flake8 8 | collective.checkdocs 9 | pygments 10 | pre-commit 11 | pipdeptree 12 | pylint 13 | pep8-naming 14 | pycodestyle 15 | pydocstyle 16 | pylint 17 | sphinx 18 | -------------------------------------------------------------------------------- /.pre-commit-hooks.yaml: -------------------------------------------------------------------------------- 1 | - id: pip-compile-multi-verify 2 | name: pip-compile-multi verify 3 | language: python 4 | entry: pip-compile-multi verify 5 | files: ^requirements/ 6 | pass_filenames: false 7 | require_serial: true 8 | types: [file, non-executable, text] 9 | -------------------------------------------------------------------------------- /pipcompilemulti/options.py: -------------------------------------------------------------------------------- 1 | """Global dictionary holding configuration options.""" 2 | 3 | OPTIONS = {} 4 | 5 | LIST_OPTIONS = { 6 | 'add_hashes', 7 | 'compatible_patterns', 8 | 'forbid_post', 9 | 'include_names', 10 | 'upgrade_packages', 11 | } 12 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | """Pytest configuration.""" 2 | 3 | import pytest 4 | from pipcompilemulti.options import OPTIONS 5 | 6 | 7 | @pytest.fixture(autouse=True) 8 | def wipe_options(): 9 | """Reset global OPTIONS dictionary before every test.""" 10 | OPTIONS.clear() 11 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/peterdemin/pip-compile-multi 3 | rev: v1.3.2 4 | hooks: 5 | - id: pip-compile-multi-verify 6 | 7 | - repo: https://github.com/pre-commit/pre-commit-hooks 8 | rev: v2.1.0 9 | hooks: 10 | - id: flake8 11 | - id: trailing-whitespace 12 | -------------------------------------------------------------------------------- /docs/afterword.rst: -------------------------------------------------------------------------------- 1 | Have fun! 2 | --------- 3 | 4 | Now that occasional backward incompatible dependency release can't ruin your day, 5 | you can **spread the word** about ``pip-compile-multi``, ask for a new feature in a `GitHub issue`_, 6 | or even open a PR ;-). 7 | 8 | .. _GitHub issue: https://github.com/peterdemin/pip-compile-multi/issues 9 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # http://editorconfig.org 2 | 3 | root = true 4 | 5 | [*] 6 | indent_style = space 7 | indent_size = 4 8 | trim_trailing_whitespace = true 9 | insert_final_newline = true 10 | charset = utf-8 11 | end_of_line = lf 12 | 13 | [*.bat] 14 | indent_style = tab 15 | end_of_line = crlf 16 | 17 | [LICENSE] 18 | insert_final_newline = false 19 | 20 | [Makefile] 21 | indent_style = tab -------------------------------------------------------------------------------- /tests/utils.py: -------------------------------------------------------------------------------- 1 | """Test utilities.""" 2 | 3 | import os 4 | import tempfile 5 | import shutil 6 | import contextlib 7 | 8 | 9 | @contextlib.contextmanager 10 | def temp_dir(): 11 | """Create temporary directory with copy of requirements.""" 12 | tmp_dir = tempfile.mkdtemp() 13 | os.rmdir(tmp_dir) 14 | shutil.copytree('requirements', tmp_dir) 15 | yield tmp_dir 16 | shutil.rmtree(tmp_dir) 17 | -------------------------------------------------------------------------------- /requirements/testwin.txt: -------------------------------------------------------------------------------- 1 | # SHA1:ab0b9abf8863d5dced78a6f5664a664d8d2488cd 2 | # 3 | # This file is autogenerated by pip-compile-multi 4 | # To update, run: 5 | # 6 | # pip-compile-multi 7 | # 8 | -r test.txt 9 | atomicwrites==1.4.0 10 | # via -r requirements/testwin.in 11 | colorama==0.4.4 12 | # via -r requirements/testwin.in 13 | 14 | # The following packages are considered to be unsafe in a requirements file: 15 | # pip 16 | -------------------------------------------------------------------------------- /tests/test_upgrade_feature.py: -------------------------------------------------------------------------------- 1 | """Test upgrade feature.""" 2 | 3 | from pipcompilemulti.features import FEATURES 4 | from pipcompilemulti.options import OPTIONS 5 | 6 | 7 | def test_upgrade_package_disables_upgrade(): 8 | """Even if --update is passed, --upgrade-package disables it.""" 9 | OPTIONS.update({ 10 | 'upgrade': True, 11 | 'upgrade_packages': ['a'], 12 | }) 13 | assert not FEATURES.upgrade_all.enabled 14 | -------------------------------------------------------------------------------- /docs/precommit.rst: -------------------------------------------------------------------------------- 1 | Verify as pre-commit hook 2 | ========================= 3 | 4 | To verify that ``pip-compile-multi`` has been run after changing ``.in`` files as a `PreCommit`_ hook, just add the following to your local repo's ``.pre-commit-config.yaml`` file: 5 | 6 | .. code-block:: yaml 7 | 8 | - repo: https://github.com/peterdemin/pip-compile-multi 9 | rev: v1.3.2 10 | hooks: 11 | - id: pip-compile-multi-verify 12 | 13 | .. _PreCommit: https://pre-commit.com/ 14 | -------------------------------------------------------------------------------- /.github/workflows/python36.yml: -------------------------------------------------------------------------------- 1 | name: Python 3.6 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | pull_request: 7 | branches: [ master ] 8 | 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v2 14 | - name: Set up Python 3.6 15 | uses: actions/setup-python@v2 16 | with: 17 | python-version: '3.6' 18 | - name: Install tox 19 | run: | 20 | python -m pip install --upgrade pip 21 | python -m pip install tox 22 | - name: Run tox 23 | run: | 24 | tox 25 | -------------------------------------------------------------------------------- /.github/workflows/python37.yml: -------------------------------------------------------------------------------- 1 | name: Python 3.7 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | pull_request: 7 | branches: [ master ] 8 | 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v2 14 | - name: Set up Python 3.7 15 | uses: actions/setup-python@v2 16 | with: 17 | python-version: '3.7' 18 | - name: Install tox 19 | run: | 20 | python -m pip install --upgrade pip 21 | python -m pip install tox 22 | - name: Run tox 23 | run: | 24 | tox 25 | -------------------------------------------------------------------------------- /.github/workflows/python38.yml: -------------------------------------------------------------------------------- 1 | name: Python 3.8 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | pull_request: 7 | branches: [ master ] 8 | 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v2 14 | - name: Set up Python 3.8 15 | uses: actions/setup-python@v2 16 | with: 17 | python-version: '3.8' 18 | - name: Install tox 19 | run: | 20 | python -m pip install --upgrade pip 21 | python -m pip install tox 22 | - name: Run tox 23 | run: | 24 | tox 25 | -------------------------------------------------------------------------------- /.github/workflows/python39.yml: -------------------------------------------------------------------------------- 1 | name: Python 3.9 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | pull_request: 7 | branches: [ master ] 8 | 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v2 14 | - name: Set up Python 3.9 15 | uses: actions/setup-python@v2 16 | with: 17 | python-version: '3.9' 18 | - name: Install tox 19 | run: | 20 | python -m pip install --upgrade pip 21 | python -m pip install tox 22 | - name: Run tox 23 | run: | 24 | tox 25 | -------------------------------------------------------------------------------- /docs/twitter_logo.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SOURCEDIR = . 8 | BUILDDIR = _build 9 | 10 | # Put it first so that "make" without argument is like "make help". 11 | help: 12 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 13 | 14 | .PHONY: help Makefile 15 | 16 | # Catch-all target: route all unknown targets to Sphinx using the new 17 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 18 | %: Makefile 19 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | # .coveragerc to control coverage.py 2 | [run] 3 | branch = True 4 | 5 | [report] 6 | # Regexes for lines to exclude from consideration 7 | exclude_lines = 8 | # Have to re-enable the standard pragma 9 | pragma: no cover 10 | 11 | # Don't complain about missing debug-only code: 12 | def __repr__ 13 | if self\.debug 14 | 15 | # Don't complain if tests don't hit defensive assertion code: 16 | raise AssertionError 17 | raise NotImplementedError 18 | 19 | # Don't complain if non-runnable code isn't run: 20 | if 0: 21 | if __name__ == .__main__.: 22 | 23 | ignore_errors = True 24 | 25 | [html] 26 | directory = htmlcov 27 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | # Build documentation in the docs/ directory with Sphinx 9 | sphinx: 10 | configuration: docs/conf.py 11 | 12 | # Build documentation with MkDocs 13 | #mkdocs: 14 | # configuration: mkdocs.yml 15 | 16 | # Optionally build your docs in additional formats such as PDF and ePub 17 | formats: all 18 | 19 | # Optionally set the version of Python and requirements required to build your docs 20 | python: 21 | version: 3.6 22 | install: 23 | - requirements: requirements/local.hash 24 | - method: pip 25 | path: . 26 | -------------------------------------------------------------------------------- /tests/test_conflicts.py: -------------------------------------------------------------------------------- 1 | """End to end tests checking conflicts detection""" 2 | 3 | from click.testing import CliRunner 4 | import pytest 5 | from pipcompilemulti.cli_v1 import cli 6 | 7 | 8 | @pytest.mark.parametrize('conflict', ['merge', 'ref']) 9 | def test_conflict_detected(conflict): 10 | """Following types of version conflicts are detected: 11 | 12 | 1. Two files have different version and referenced from the third file. 13 | 2. File adds new constraint on package from referenced file. 14 | """ 15 | runner = CliRunner() 16 | result = runner.invoke(cli, ['--directory', 'conflicting-in-' + conflict]) 17 | assert result.exit_code == 1 18 | assert 'Please add constraints' in str(result.exception) 19 | -------------------------------------------------------------------------------- /pipcompilemulti/features/forward.py: -------------------------------------------------------------------------------- 1 | """Base feature for forwarding pip-tools options.""" 2 | 3 | from .base import BaseFeature 4 | 5 | 6 | class ForwardOption(BaseFeature): 7 | """Forward command line option to pip-tools.""" 8 | 9 | #: Pin command options when feature is enabled. 10 | enabled_pin_options = [] 11 | #: Pin command options when feature is disabled. 12 | disabled_pin_options = [] 13 | 14 | @property 15 | def enabled(self): 16 | """Is feature enabled.""" 17 | return self.value 18 | 19 | def pin_options(self): 20 | """Pin command options.""" 21 | if self.enabled: 22 | return self.enabled_pin_options 23 | return self.disabled_pin_options 24 | -------------------------------------------------------------------------------- /appveyor/setup_build_env.cmd: -------------------------------------------------------------------------------- 1 | :: To build extensions for 64 bit Python 3, we need to configure environment 2 | :: variables to use the MSVC 2010 C++ compilers from GRMSDKX_EN_DVD.iso of: 3 | :: MS Windows SDK for Windows 7 and .NET Framework 4 4 | :: 5 | :: More details at: 6 | :: https://github.com/cython/cython/wiki/64BitCythonExtensionsOnWindows 7 | 8 | IF "%PYTHON_ARCH%"=="64" ( 9 | ECHO Configuring environment to build with MSVC on a 64bit architecture 10 | ECHO Using Windows SDK %WINDOWS_SDK_VERSION% 11 | "C:\Program Files\Microsoft SDKs\Windows\%WINDOWS_SDK_VERSION%\Bin\SetEnv.cmd" /x64 /release 12 | SET DISTUTILS_USE_SDK=1 13 | SET MSSdk=1 14 | ) ELSE ( 15 | ECHO Using default MSVC build environment for 32bit architecture 16 | ) -------------------------------------------------------------------------------- /requirements/base.txt: -------------------------------------------------------------------------------- 1 | # SHA1:bf4337d28508b6c39305ca82c70a1c5d34330f8f 2 | # 3 | # This file is autogenerated by pip-compile-multi 4 | # To update, run: 5 | # 6 | # pip-compile-multi 7 | # 8 | click==7.1.2 9 | # via 10 | # -r requirements/base.in 11 | # pip-tools 12 | importlib-metadata==3.7.3 13 | # via pep517 14 | pep517==0.10.0 15 | # via pip-tools 16 | pip-tools==6.0.1 17 | # via -r requirements/base.in 18 | toml==0.10.2 19 | # via pep517 20 | toposort==1.6 21 | # via -r requirements/base.in 22 | typing-extensions==3.7.4.3 23 | # via importlib-metadata 24 | zipp==3.4.1 25 | # via 26 | # importlib-metadata 27 | # pep517 28 | 29 | # The following packages are considered to be unsafe in a requirements file: 30 | # pip 31 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [bumpversion] 2 | current_version = 2.4.0 3 | commit = True 4 | tag = True 5 | 6 | [wheel] 7 | universal = 1 8 | 9 | [bdist_wheel] 10 | universal = 1 11 | 12 | [bumpversion:file:setup.py] 13 | search = VERSION = "{current_version}" 14 | replace = VERSION = "{new_version}" 15 | 16 | [bumpversion:file:pipcompilemulti/__init__.py] 17 | search = __version__ = '{current_version}' 18 | replace = __version__ = '{new_version}' 19 | 20 | [requirements:Python 3] 21 | python = 3.6 22 | include_names = local, testwin 23 | 24 | [requirements:Python 3 hash] 25 | python = 3.6 26 | include_names = local, testwin 27 | add_hashes = local, testwin 28 | in_ext = txt 29 | out_ext = hash 30 | 31 | [pycodestyle] 32 | max-line-length = 120 33 | 34 | [flake8] 35 | max-line-length = 120 36 | -------------------------------------------------------------------------------- /requirements/testwin.hash: -------------------------------------------------------------------------------- 1 | # SHA1:a053c9dcc1ff686538ecf3dda5723ad59a09a7cd 2 | # 3 | # This file is autogenerated by pip-compile-multi 4 | # To update, run: 5 | # 6 | # pip-compile-multi 7 | # 8 | -r test.hash 9 | atomicwrites==1.4.0 \ 10 | --hash=sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197 \ 11 | --hash=sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a 12 | # via -r requirements/testwin.txt 13 | colorama==0.4.4 \ 14 | --hash=sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b \ 15 | --hash=sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2 16 | # via -r requirements/testwin.txt 17 | 18 | # The following packages are considered to be unsafe in a requirements file: 19 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.py[cod] 2 | /.env/ 3 | 4 | # C extensions 5 | *.so 6 | 7 | # Packages 8 | *.egg 9 | *.egg-info 10 | dist 11 | build 12 | eggs 13 | parts 14 | bin 15 | var 16 | sdist 17 | develop-eggs 18 | .installed.cfg 19 | lib 20 | lib64 21 | 22 | # Installer logs 23 | pip-log.txt 24 | 25 | # Unit test / coverage reports 26 | .coverage 27 | .tox 28 | nosetests.xml 29 | /.pytest_cache/ 30 | 31 | # Conflict tests side-effects 32 | /conflicting-in-*/*.txt 33 | 34 | # Translations 35 | *.mo 36 | 37 | # Mr Developer 38 | .mr.developer.cfg 39 | .project 40 | .pydevproject 41 | 42 | # Complexity 43 | output/*.html 44 | output/*/index.html 45 | 46 | # Sphinx 47 | docs/_build 48 | 49 | # Cookiecutter 50 | output/ 51 | boilerplate/ 52 | /.cache/ 53 | /.eggs/ 54 | 55 | # Vim 56 | Session.vim 57 | /htmlcov/ 58 | /.idea/ 59 | -------------------------------------------------------------------------------- /requirements/test.txt: -------------------------------------------------------------------------------- 1 | # SHA1:bdf4145293d4f6c3c8bb24ffb2443a854b777682 2 | # 3 | # This file is autogenerated by pip-compile-multi 4 | # To update, run: 5 | # 6 | # pip-compile-multi 7 | # 8 | -r base.txt 9 | attrs==20.3.0 10 | # via pytest 11 | coverage==5.5 12 | # via pytest-cov 13 | iniconfig==1.1.1 14 | # via pytest 15 | mock==4.0.3 16 | # via -r requirements/test.in 17 | more-itertools==8.7.0 18 | # via -r requirements/test.in 19 | packaging==20.9 20 | # via pytest 21 | pluggy==0.13.1 22 | # via pytest 23 | py==1.10.0 24 | # via pytest 25 | pyparsing==2.4.7 26 | # via packaging 27 | pytest-cov==2.11.1 28 | # via -r requirements/test.in 29 | pytest==6.2.2 30 | # via 31 | # -r requirements/test.in 32 | # pytest-cov 33 | 34 | # The following packages are considered to be unsafe in a requirements file: 35 | # pip 36 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | """Utils tests.""" 2 | 3 | import sys 4 | 5 | import pytest 6 | 7 | from pipcompilemulti.utils import recursive_refs 8 | 9 | 10 | @pytest.mark.skipif(sys.platform == "win32", reason="Pass normalization is wonky under Windows") 11 | def test_recursive_refs(): 12 | """Test sample inputs.""" 13 | result = sorted(recursive_refs([ 14 | {'in_path': 'base.in', 'refs': []}, 15 | {'in_path': 'sub/test.in', 'refs': ['../base.in']}, 16 | {'in_path': 'local.in', 'refs': ['sub/test.in']}, 17 | ], 'local.in')) 18 | assert result == ['base.in', 'sub/test.in'] 19 | 20 | result = sorted(recursive_refs([ 21 | {'in_path': 'base.in', 'refs': []}, 22 | {'in_path': 'sub/test.in', 'refs': ['../base.in']}, 23 | {'in_path': 'local.in', 'refs': ['sub/test.in']}, 24 | ], 'sub/test.in')) 25 | assert result == ['base.in'] 26 | -------------------------------------------------------------------------------- /docs/features.rst: -------------------------------------------------------------------------------- 1 | Features 2 | -------- 3 | 4 | ``pip-compile-multi`` supports many options to customize compilation. 5 | 6 | .. automodule:: pipcompilemulti.features.base_dir 7 | 8 | .. automodule:: pipcompilemulti.features.file_extensions 9 | 10 | .. automodule:: pipcompilemulti.features.upgrade 11 | 12 | .. automodule:: pipcompilemulti.features.use_cache 13 | 14 | .. automodule:: pipcompilemulti.features.compatible 15 | 16 | .. automodule:: pipcompilemulti.features.add_hashes 17 | 18 | .. automodule:: pipcompilemulti.features.unsafe 19 | 20 | .. automodule:: pipcompilemulti.features.header 21 | 22 | .. automodule:: pipcompilemulti.features.limit_in_paths 23 | 24 | .. automodule:: pipcompilemulti.features.limit_envs 25 | 26 | .. automodule:: pipcompilemulti.features.annotate_index 27 | 28 | .. automodule:: pipcompilemulti.features.autoresolve 29 | 30 | .. automodule:: pipcompilemulti.features.skip_constraint_comments 31 | 32 | .. automodule:: pipcompilemulti.verify 33 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: virtual_env_set 2 | virtual_env_set: 3 | ifndef VIRTUAL_ENV 4 | $(error VIRTUAL_ENV not set) 5 | endif 6 | 7 | ### DEPENDENCIES ### 8 | .PHONY: install 9 | install: requirements/local.hash virtual_env_set 10 | pip install -Ue . -r requirements/local.hash 11 | 12 | .PHONY: sync 13 | sync: requirements/local.hash virtual_env_set 14 | pip-sync requirements/local.hash 15 | pip install -e . --no-deps 16 | 17 | .PHONY: upgrade 18 | upgrade: virtual_env_set 19 | tox -e upgrade 20 | 21 | ### CI ### 22 | .PHONY: test 23 | test: 24 | tox 25 | 26 | .PHONY: clean 27 | clean: 28 | rm -rf build dist pip-compile-multi.egg-info docs/_build 29 | find . -name "*.pyc" -delete 30 | find * -type d -name '__pycache__' | xargs rm -rf 31 | 32 | .PHONY: build 33 | build: clean 34 | python setup.py sdist bdist_wheel 35 | 36 | .PHONY: release 37 | release: build 38 | twine upload dist/* 39 | 40 | ### MISC ### 41 | .PHONY: docs 42 | docs: virtual_env_set 43 | make -C docs html 44 | -------------------------------------------------------------------------------- /tests/test_add_hashes.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=too-few-public-methods,no-self-use,missing-module-docstring 2 | # pylint: disable=missing-function-docstring,missing-class-docstring 3 | import unittest 4 | 5 | from pipcompilemulti.options import OPTIONS 6 | from pipcompilemulti.features.add_hashes import AddHashes 7 | 8 | 9 | class AddHashesTestCase(unittest.TestCase): 10 | def setUp(self): 11 | self._add_hashes = AddHashes(FakeController()) 12 | OPTIONS[self._add_hashes.OPTION_NAME] = ['test'] 13 | self._add_hashes.on_discover([ 14 | {'in_path': 'base', 'refs': []}, 15 | {'in_path': 'test', 'refs': ['base']}, 16 | {'in_path': 'docs', 'refs': []}, 17 | ]) 18 | 19 | def test_pin_options(self): 20 | assert self._add_hashes.pin_options('base') == ['--generate-hashes'] 21 | assert self._add_hashes.pin_options('docs') == [] 22 | 23 | 24 | class FakeController(): 25 | def compose_input_file_path(self, name): 26 | return name 27 | -------------------------------------------------------------------------------- /docs/Uber_Logo_Black_RGB.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) [year] [fullname] 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /appveyor.yml: -------------------------------------------------------------------------------- 1 | environment: 2 | matrix: 3 | - PYTHON: "C:\\Python36" 4 | PYTHON_VERSION: "3.6.13" 5 | PYTHON_ARCH: "32" 6 | TOX_ENV: "py36" 7 | 8 | - PYTHON: "C:\\Python37" 9 | PYTHON_VERSION: "3.7.10" 10 | PYTHON_ARCH: "32" 11 | TOX_ENV: "py37" 12 | 13 | - PYTHON: "C:\\Python38" 14 | PYTHON_VERSION: "3.8.7" 15 | PYTHON_ARCH: "32" 16 | TOX_ENV: "py38" 17 | 18 | # NOT WORKING AS OF FEB 2021 19 | # - PYTHON: "C:\\Python39" 20 | # PYTHON_VERSION: "3.9.1" 21 | # PYTHON_ARCH: "32" 22 | # TOX_ENV: "py39" 23 | 24 | init: 25 | - "ECHO %PYTHON% %PYTHON_VERSION% %PYTHON_ARCH%" 26 | 27 | install: 28 | - "appveyor/setup_build_env.cmd" 29 | - "powershell appveyor/install.ps1" 30 | 31 | build: false # Not a C# project, build stuff at the test step instead. 32 | 33 | test_script: 34 | - "%PYTHON%/Scripts/tox -vvvvve %TOX_ENV%-windows" 35 | 36 | after_test: 37 | - "%PYTHON%/python setup.py bdist_wheel" 38 | - ps: "ls dist" 39 | 40 | artifacts: 41 | - path: dist\* 42 | 43 | #on_success: 44 | # - TODO: upload the content of dist/*.whl to a public wheelhouse 45 | -------------------------------------------------------------------------------- /pipcompilemulti/features/annotate_index.py: -------------------------------------------------------------------------------- 1 | """ 2 | Add index URL annotation 3 | ======================== 4 | 5 | This flag provides the ability to annotate the index URL mimicking the logic of 6 | the ``pip-compile`` ``--emit-index-url`` and ``--no-emit-index-url`` flag 7 | by opting to add or not add the ``pip`` index to the generated files. 8 | 9 | .. code-block:: text 10 | 11 | --annotate-index / --no-annotate-index Add index URL to generated files (default false) 12 | 13 | Note the default behavior is not to add the index, i.e., ``--no-annotate-index``. 14 | """ 15 | 16 | from .base import ClickOption 17 | from .forward import ForwardOption 18 | 19 | 20 | class AnnotateIndex(ForwardOption): 21 | """Optionally annotate the index URL to the generated files.""" 22 | 23 | OPTION_NAME = "annotate_index" 24 | CLICK_OPTION = ClickOption( 25 | long_option="--annotate-index/--no-annotate-index", 26 | default=False, 27 | is_flag=True, 28 | help_text="Add the index URL to generated files (default false).", 29 | ) 30 | enabled_pin_options = ["--emit-index-url"] 31 | disabled_pin_options = ["--no-emit-index-url"] 32 | -------------------------------------------------------------------------------- /.github/workflows/pipcompilemulti.yml: -------------------------------------------------------------------------------- 1 | name: Update Dependencies 2 | on: 3 | schedule: 4 | - cron: '15 15 * * 3' 5 | workflow_dispatch: {} 6 | 7 | jobs: 8 | build: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v2 12 | - name: Set up Python 3.6 13 | uses: actions/setup-python@v2 14 | with: 15 | python-version: '3.6' 16 | - name: Install tox 17 | run: | 18 | python -m pip install --upgrade pip 19 | pip install tox 20 | - name: Update dependencies 21 | run: | 22 | tox -e upgrade 23 | - name: Create commits 24 | run: | 25 | git config user.name 'pip-compile-multi' 26 | git config user.email 'pip-compile-multi@users.noreply.github.com' 27 | git commit -am "Update dependencies" 28 | - name: Create Pull Request 29 | uses: peter-evans/create-pull-request@v2 30 | with: 31 | token: ${{ secrets.PAT }} 32 | title: Update dependencies 33 | body: | 34 | Auto-generated by [pip-compile-multi](https://github.com/peterdemin/pip-compile-multi) 35 | branch: update-dependencies 36 | branch-suffix: timestamp 37 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: false 2 | language: python 3 | 4 | python: 5 | - "3.6" 6 | - "3.7" 7 | - "3.8" 8 | 9 | 10 | matrix: 11 | include: 12 | - os: linux 13 | python: 3.6 14 | env: TOXENV=lint 15 | 16 | before_install: 17 | - pip install codecov coveralls 18 | install: pip install tox-travis 19 | script: tox 20 | after_success: 21 | - codecov 22 | - coveralls 23 | 24 | deploy: 25 | provider: pypi 26 | user: peterdemin 27 | password: 28 | secure: "ZZTWaBiHQicobKw/xiD6IGQoAdrGmZ/DAXXitbnTE4jGpDKq1X+0JDNbYo1EVXQ/r+wmL3nBTz1DPrEymo//hli/4GzffargF7lnWg7GfrUSAxWUSWj36+AAyRLWGqwhSPQCxDymAi6TJF5VSKw1qDrs7vicjSOwtXnUxBANMaBxeFxhJJbhQpViBCTs6w5ZsPRFaWIVkxdOSFQY09ZgDBU17VYaVyzB5okZ5Ogk+3Xj5cqZKPDrptmuYSjdHm/LEj5q6gesuOOYTXfloqLirSTxPedo+gLUHoUIDgvPhNm1VJiiBx+d5DjIKhsOeV385rGYWUq92EznfrdPmorAmh62KkP4pTL1+Jd9gcS/FIXiMk3ga9tQLH51dqkRRgwgM5HMVYZIyl+D2I2Nw6RdMcViJZkv8VV5wF/58JiHdIdiutgo0Y14dtbUjryDoz5Ivsgp2NodyA/AJ97NCf8CVrYxOJivjEJ5tkP/ANGzg3/gDTy3t1qVRBaNyDRQK77TwbvpR9LWODZXKuSh7WXeZpflSYT1PL8mLtBgho0onzXPjmgyYgGMEDEAx+kaXc5xLj2uH1Z+RC7bXbYkeAxjnpkxwWKNcCnnpELE56732JYqdDh8Pjc6eJtPCS4mAFxNPAafR4wl6NNWOgT4vjnh6ZJAGeFBIKw21AWsYI99mc4=" 29 | on: 30 | tags: true 31 | distributions: "sdist bdist_wheel" 32 | skip_existing: true 33 | -------------------------------------------------------------------------------- /tests/test_cli_v2.py: -------------------------------------------------------------------------------- 1 | """End to end tests for CLI v2""" 2 | 3 | from functools import partial 4 | 5 | try: 6 | from unittest import mock 7 | except ImportError: 8 | import mock 9 | from click.testing import CliRunner 10 | 11 | import pytest 12 | 13 | from pipcompilemulti.cli_v2 import cli, read_config 14 | from .utils import temp_dir 15 | 16 | 17 | @pytest.fixture(autouse=True) 18 | def requirements_dir(): 19 | """Create temporary requirements directory for test time.""" 20 | with temp_dir() as tmp_dir: 21 | patch = partial(patched_config, tmp_dir) 22 | with mock.patch('pipcompilemulti.cli_v2.read_config', patch): 23 | yield 24 | 25 | 26 | @pytest.mark.parametrize('command', ['lock', 'upgrade', 'verify']) 27 | def test_command_exits_with_zero(command): 28 | """Run requirements command on self""" 29 | # pylint: disable=redefined-outer-name 30 | runner = CliRunner() 31 | result = runner.invoke(cli, [command]) 32 | assert result.exit_code == 0 33 | 34 | 35 | def patched_config(base_dir): 36 | """Override base_dir in each section of config.""" 37 | config_sections = read_config() 38 | for _, section in config_sections: 39 | section['base_dir'] = base_dir 40 | return config_sections 41 | -------------------------------------------------------------------------------- /pipcompilemulti/deduplicate.py: -------------------------------------------------------------------------------- 1 | """Remove packages included in referenced environments.""" 2 | 3 | import logging 4 | 5 | from pipcompilemulti.utils import recursive_refs, merged_packages 6 | 7 | 8 | logger = logging.getLogger("pip-compile-multi") 9 | 10 | 11 | class PackageDeduplicator: 12 | """Remove packages included in referenced environments.""" 13 | 14 | def __init__(self): 15 | self.env_packages = {} 16 | self.env_confs = None 17 | 18 | def on_discover(self, env_confs): 19 | """Save environment references.""" 20 | self.env_confs = env_confs 21 | 22 | def register_packages_for_env(self, in_path, packages): 23 | """Save environment packages.""" 24 | self.env_packages[in_path] = packages 25 | 26 | def ignored_packages(self, in_path): 27 | """Get package mapping from name to version for referenced environments.""" 28 | if self.env_confs is None: 29 | return {} 30 | rrefs = recursive_refs(self.env_confs, in_path) 31 | return merged_packages(self.env_packages, rrefs) 32 | 33 | def recursive_refs(self, in_path): 34 | """Return recursive list of environment names referenced by in_path.""" 35 | if self.env_confs is None: 36 | return {} 37 | return recursive_refs(self.env_confs, in_path) 38 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py{36,37,38,39,py3}-{linux,windows,darwin}, lint, checkdocs, verify 3 | skip_missing_interpreters = true 4 | 5 | [testenv] 6 | platform = linux: linux 7 | windows: win32 8 | commands = python -m pytest --cov=pipcompilemulti --cov-branch --cov-report=html 9 | deps = 10 | linux: -r{toxinidir}/requirements/test.hash 11 | darwin: -r{toxinidir}/requirements/test.hash 12 | windows: -r{toxinidir}/requirements/testwin.hash 13 | 14 | [testenv:lint] 15 | skip_install = true 16 | basepython=python3.6 17 | commands = 18 | pylint -r y pipcompilemulti/ tests/ 19 | deps = 20 | -r{toxinidir}/requirements/local.hash 21 | 22 | [testenv:checkdocs] 23 | skip_install = true 24 | commands = python setup.py checkdocs 25 | deps = 26 | collective.checkdocs 27 | pygments 28 | 29 | [testenv:verify] 30 | skipsdist = true 31 | skip_install = true 32 | basepython = python3.6 33 | deps = pip-compile-multi 34 | commands = pip-compile-multi verify 35 | whitelist_externals = pip-compile-multi 36 | 37 | [testenv:upgrade] 38 | basepython = python3.6 39 | usedevelop = True 40 | deps = -rrequirements/base.txt 41 | commands = 42 | pip-compile-multi 43 | pip-compile-multi -g local -i txt -o hash --allow-unsafe 44 | 45 | [pytest] 46 | addopts = -vvvs --doctest-modules 47 | log_cli=true 48 | log_level=NOTSET 49 | -------------------------------------------------------------------------------- /docs/boilerplate.rst: -------------------------------------------------------------------------------- 1 | Bonus: boilerplate to put in project's README 2 | --------------------------------------------- 3 | 4 | Nice way of introducing dependency management process to new team members for copy-pasting to `README.md`: 5 | 6 | .. code-block:: text 7 | 8 | ## Dependency management 9 | 10 | This project uses [pip-compile-multi](https://pypi.org/project/pip-compile-multi/) for hard-pinning dependencies versions. 11 | Please see its documentation for usage instructions. 12 | In short, `requirements/base.in` contains the list of direct requirements with occasional version constraints (like `Django<2`) 13 | and `requirements/base.txt` is automatically generated from it by adding recursive tree of dependencies with fixed versions. 14 | The same goes for `test` and `dev`. 15 | 16 | To upgrade dependency versions, run `pip-compile-multi`. 17 | 18 | To add a new dependency without upgrade, add it to `requirements/base.in` and run `pip-compile-multi --no-upgrade`. 19 | 20 | For installation always use `.txt` files. For example, command `pip install -Ue . -r requirements/dev.txt` will install 21 | this project in development mode, testing requirements and development tools. 22 | Another useful command is `pip-sync requirements/dev.txt`, it uninstalls packages from your virtualenv that aren't listed in the file. 23 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. pip-compile-multi documentation master file, created by 2 | sphinx-quickstart on Thu Aug 8 15:36:37 2019. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | pip-compile-multi 7 | ~~~~~~~~~~~~~~~~~ 8 | 9 | Pip-compile-multi is a command line utility, that compiles multiple 10 | requirements files to lock dependency versions. 11 | Underneath it uses `pip-tools`_ for actual compilation. 12 | Pip-compile-multi targets complex projects and provides highest 13 | level of automation and flexibility in class. 14 | 15 | .. _pip-tools: https://github.com/jazzband/pip-tools 16 | 17 | To install: 18 | 19 | .. code-block:: shell 20 | 21 | pip install pip-compile-multi 22 | 23 | To run: 24 | 25 | .. code-block:: shell 26 | 27 | pip-compile-multi 28 | 29 | 30 | Why use pip-compile-multi? 31 | ========================== 32 | 33 | .. toctree:: 34 | :maxdepth: 2 35 | 36 | why 37 | 38 | 39 | How to use pip-compile-multi? 40 | ============================= 41 | 42 | .. toctree:: 43 | :maxdepth: 2 44 | 45 | installation 46 | migration 47 | features 48 | precommit 49 | boilerplate 50 | 51 | 52 | Release notes 53 | ============= 54 | 55 | .. toctree:: 56 | :maxdepth: 2 57 | 58 | history 59 | 60 | 61 | .. include:: afterword.rst 62 | -------------------------------------------------------------------------------- /tests/test_discover.py: -------------------------------------------------------------------------------- 1 | """Environment discovery tests.""" 2 | 3 | import os 4 | import sys 5 | 6 | import pytest 7 | 8 | from pipcompilemulti.discover import discover 9 | 10 | 11 | @pytest.mark.skipif(sys.platform == "win32", reason="Pass normalization is wonky under Windows") 12 | def test_discover_nested(): 13 | """Test references to other dirs are discovered.""" 14 | envs = discover(os.path.join("nested", "*.in")) 15 | assert envs == [ 16 | { 17 | "in_path": os.path.join("nested", "up.in"), 18 | "name": "up", 19 | "refs": set(), 20 | }, 21 | { 22 | "in_path": os.path.join("nested", "subproject", "base.in"), 23 | "name": "base", 24 | "refs": {os.path.join("..", "up.in")}, 25 | }, 26 | { 27 | "in_path": os.path.join("nested", "subproject", "sub.in"), 28 | "name": "sub", 29 | "refs": {"base.in"}, 30 | }, 31 | { 32 | "in_path": os.path.join("nested", "base.in"), 33 | "name": "base", 34 | "refs": {os.path.join("subproject", "sub.in")}, 35 | }, 36 | { 37 | "in_path": os.path.join("nested", "diamond.in"), 38 | "name": "diamond", 39 | "refs": {"base.in", os.path.join("subproject", "base.in")}, 40 | }, 41 | ] 42 | -------------------------------------------------------------------------------- /pipcompilemulti/features/use_cache.py: -------------------------------------------------------------------------------- 1 | """ 2 | Use Cache 3 | ========= 4 | 5 | By default ``pip-compile-multi`` executes ``pip-compile`` with 6 | ``--rebuild`` flag. 7 | This flag clears any caches upfront and rebuilds from scratch. 8 | Such a strategy has proven to be more reliable in `edge cases`_, 9 | but causes significant performance degradation. 10 | 11 | Option ``--use-cache`` removes ``--rebuild`` flag from the call 12 | to ``pip-compile``. 13 | 14 | .. code-block:: text 15 | 16 | -u, --use-cache Use pip-tools cache to speed up compilation. 17 | 18 | .. note:: 19 | 20 | When using ``--use-cache``, ``pip-compile-multi`` can 21 | run **10 times faster**. But if you run into "magical" issues, 22 | try to rerun compilation without this flag first. 23 | 24 | .. _edge cases: https://github.com/jazzband/pip-tools/issues?q=--rebuild 25 | """ 26 | 27 | from .base import ClickOption 28 | from .forward import ForwardOption 29 | 30 | 31 | class UseCache(ForwardOption): 32 | """Use pip-tools cache, or rebuild from scratch.""" 33 | 34 | OPTION_NAME = 'use_cache' 35 | CLICK_OPTION = ClickOption( 36 | long_option='--use-cache', 37 | short_option='-u', 38 | is_flag=True, 39 | default=False, 40 | help_text='Use pip-tools cache to speed up compilation.', 41 | ) 42 | disabled_pin_options = ['--rebuild'] 43 | -------------------------------------------------------------------------------- /pipcompilemulti/features/base_dir.py: -------------------------------------------------------------------------------- 1 | """ 2 | Requirements Directory 3 | ====================== 4 | 5 | While it's a common practice to put requirements files inside ``requirements`` 6 | directory, it's not always the case. 7 | The directory can be overridden with this option: 8 | 9 | .. code-block:: text 10 | 11 | -d, --directory TEXT Directory path with requirements files 12 | """ 13 | 14 | import os 15 | 16 | from .base import BaseFeature, ClickOption 17 | 18 | 19 | class BaseDir(BaseFeature): 20 | """Override input file extension.""" 21 | 22 | OPTION_NAME = 'base_dir' 23 | CLICK_OPTION = ClickOption( 24 | long_option='--directory', 25 | short_option='-d', 26 | default="requirements", 27 | is_flag=False, 28 | help_text='Directory path with requirements files.', 29 | ) 30 | 31 | @property 32 | def path(self): 33 | """Get the base directory path. 34 | 35 | >>> BaseDir().path == 'requirements' 36 | True 37 | """ 38 | return self.value 39 | 40 | def file_path(self, file_name): 41 | """Compose file path for a given file name. 42 | 43 | >>> import os.path 44 | >>> expected = os.path.join('requirements', 'base.txt') 45 | >>> expected == BaseDir().file_path('base.txt') 46 | True 47 | """ 48 | return os.path.join(self.value, file_name) 49 | -------------------------------------------------------------------------------- /pipcompilemulti/features/header.py: -------------------------------------------------------------------------------- 1 | """ 2 | Custom Header 3 | ============= 4 | 5 | ``pip-compile-multi`` adds a brief header into generated files. 6 | Override it with 7 | 8 | .. code-block:: text 9 | 10 | -h, --header TEXT File path with custom header text for generated files 11 | """ 12 | 13 | from .base import BaseFeature, ClickOption 14 | 15 | 16 | DEFAULT_HEADER = """ 17 | # 18 | # This file is autogenerated by pip-compile-multi 19 | # To update, run: 20 | # 21 | # pip-compile-multi 22 | # 23 | """.lstrip() 24 | 25 | 26 | class CustomHeader(BaseFeature): 27 | """Put custom header at the beginning of locked files.""" 28 | 29 | OPTION_NAME = 'header_file' 30 | CLICK_OPTION = ClickOption( 31 | long_option='--header', 32 | short_option='-h', 33 | default='', 34 | help_text='File path with custom header text for generated files.', 35 | ) 36 | 37 | def __init__(self): 38 | self._header_text = None 39 | 40 | @property 41 | def text(self): 42 | """Text to put in the beginning of each generated file.""" 43 | if self._header_text is None: 44 | if self.value: 45 | self._header_text = self._read_header_text() 46 | else: 47 | self._header_text = DEFAULT_HEADER 48 | return self._header_text 49 | 50 | def _read_header_text(self): 51 | with open(self.value) as fp: 52 | return fp.read() 53 | -------------------------------------------------------------------------------- /pipcompilemulti/features/unsafe.py: -------------------------------------------------------------------------------- 1 | """ 2 | Allow Unsafe Packages 3 | ===================== 4 | 5 | If your project depends on packages that include ``setuptools`` 6 | or other packages considered "unsafe" by ``pip-tools`` and you 7 | still wish to have them included in the resulting requirements files, 8 | you can pass this option to do so: 9 | 10 | .. code-block:: text 11 | 12 | -s, --allow-unsafe Whether or not to include 'unsafe' packages 13 | in generated requirements files. Consult 14 | pip-compile --help for more information 15 | 16 | This is commonly used with --generate-hashes to avoid 17 | generating requirements files which `cannot be installed`_. 18 | 19 | .. _cannot be installed: https://github.com/jazzband/pip-tools/issues/806 20 | """ 21 | 22 | from .base import ClickOption 23 | from .forward import ForwardOption 24 | 25 | 26 | class AllowUnsafe(ForwardOption): 27 | """Use pip-tools cache, or rebuild from scratch.""" 28 | 29 | OPTION_NAME = 'allow_unsafe' 30 | CLICK_OPTION = ClickOption( 31 | long_option='--allow-unsafe', 32 | short_option='-s', 33 | is_flag=True, 34 | default=False, 35 | help_text="Whether or not to include 'unsafe' packages " 36 | 'in generated requirements files. ' 37 | 'Consult pip-compile --help for more information' 38 | ) 39 | enabled_pin_options = ['--allow-unsafe'] 40 | -------------------------------------------------------------------------------- /tests/test_cli_v1.py: -------------------------------------------------------------------------------- 1 | """End to end tests for CLI v2""" 2 | 3 | import sys 4 | from click.testing import CliRunner 5 | import pytest 6 | from pipcompilemulti.cli_v1 import cli 7 | from pipcompilemulti.options import OPTIONS 8 | from .utils import temp_dir 9 | 10 | 11 | @pytest.fixture(autouse=True) 12 | def requirements_dir(): 13 | """Create temporary requirements directory for test time.""" 14 | with temp_dir() as tmp_dir: 15 | OPTIONS['base_dir'] = tmp_dir 16 | yield 17 | 18 | 19 | @pytest.mark.parametrize('command', ['--no-upgrade', '--upgrade', 20 | '--upgrade-package=pip-tools']) 21 | def test_v1_command_exits_with_zero(command): 22 | """Run pip-compile-multi on self. 23 | 24 | pip-compile-multi --only-name local --generate-hashes local \ 25 | --in-ext txt --out-ext hash --use-cache 26 | """ 27 | local = ( 28 | 'local' 29 | if sys.version_info[0] >= 3 30 | else 'local27' 31 | ) 32 | runner = CliRunner() 33 | parameters = ['--only-name', local] 34 | parameters.append(command) 35 | result = runner.invoke(cli, parameters) 36 | parameters[:0] = ['--generate-hashes', local, 37 | '--in-ext', 'txt', 38 | '--out-ext', 'hash', 39 | '--use-cache'] 40 | result = runner.invoke(cli, parameters) 41 | assert result.exit_code == 0 42 | 43 | 44 | def test_v1_verify_exits_with_zero(): 45 | """Run pip-compile-multi on self""" 46 | runner = CliRunner() 47 | result = runner.invoke(cli, ['verify']) 48 | assert result.exit_code == 0 49 | -------------------------------------------------------------------------------- /pipcompilemulti/actions.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """High level actions to be called from CLI""" 3 | 4 | import logging 5 | 6 | from .discover import discover 7 | from .environment import Environment 8 | from .verify import generate_robust_hash_comment 9 | from .features import FEATURES 10 | from .deduplicate import PackageDeduplicator 11 | 12 | 13 | logger = logging.getLogger("pip-compile-multi") 14 | 15 | 16 | def recompile(): 17 | """Compile requirements files for all environments.""" 18 | env_confs = discover(FEATURES.compose_input_file_path('*')) 19 | FEATURES.on_discover(env_confs) 20 | deduplicator = PackageDeduplicator() 21 | deduplicator.on_discover(env_confs) 22 | sink_in_path = FEATURES.sink_in_path() 23 | if sink_in_path: 24 | sink_env = Environment(in_path=sink_in_path) 25 | logger.info( 26 | "Creating a temporary file with all dependencies at %s", 27 | sink_env.outfile, 28 | ) 29 | sink_env.create_lockfile() 30 | compile_topologically(env_confs, deduplicator) 31 | 32 | 33 | def compile_topologically(env_confs, deduplicator): 34 | """Compile environments in topological order of reference.""" 35 | for conf in env_confs: 36 | if not FEATURES.included(conf['in_path']): 37 | continue 38 | env = Environment(in_path=conf['in_path'], deduplicator=deduplicator) 39 | if env.maybe_create_lockfile(): 40 | # Only munge lockfile if it was written. 41 | header_text = generate_robust_hash_comment(env.infile) + FEATURES.get_header_text() 42 | env.replace_header(header_text) 43 | env.add_references(conf['refs']) 44 | -------------------------------------------------------------------------------- /pipcompilemulti/features/file_extensions.py: -------------------------------------------------------------------------------- 1 | """ 2 | Requirements Files Extensions 3 | ============================= 4 | 5 | By default ``pip-compile-multi`` compiles ``*.txt`` from ``*.in`` files. 6 | While this is a common naming pattern, each project can use it's own: 7 | 8 | .. code-block:: text 9 | 10 | -i, --in-ext TEXT File extension of input files 11 | -o, --out-ext TEXT File extension of output files 12 | """ 13 | 14 | import os 15 | from .base import BaseFeature, ClickOption 16 | 17 | 18 | class InputExtension(BaseFeature): 19 | """Override input file extension.""" 20 | 21 | OPTION_NAME = 'in_ext' 22 | CLICK_OPTION = ClickOption( 23 | long_option='--in-ext', 24 | short_option='-i', 25 | default="in", 26 | is_flag=False, 27 | help_text='File extension of input files.', 28 | ) 29 | 30 | def compose_input_file_name(self, base_name): 31 | """Compose file name given environment name. 32 | 33 | >>> InputExtension().compose_input_file_name('base') 34 | 'base.in' 35 | """ 36 | return '{0}.{1}'.format(base_name, self.value) 37 | 38 | 39 | class OutputExtension(BaseFeature): 40 | """Override output file extension.""" 41 | 42 | OPTION_NAME = 'out_ext' 43 | CLICK_OPTION = ClickOption( 44 | long_option='--out-ext', 45 | short_option='-o', 46 | default="txt", 47 | is_flag=False, 48 | help_text='File extension of output files.', 49 | ) 50 | 51 | def compose_output_file_path(self, in_path): 52 | """Compose file name given environment name. 53 | 54 | >>> OutputExtension().compose_output_file_path('sub/base.in') 55 | 'sub/base.txt' 56 | """ 57 | return '{0}.{1}'.format(os.path.splitext(in_path)[0], self.value) 58 | -------------------------------------------------------------------------------- /pipcompilemulti/cli_v1.py: -------------------------------------------------------------------------------- 1 | """The current stable version of command line interface.""" 2 | 3 | import os 4 | import sys 5 | import logging 6 | from traceback import print_exception 7 | 8 | import click 9 | 10 | from .actions import recompile 11 | from .verify import verify_environments 12 | from .features import FEATURES 13 | 14 | 15 | THIS_FILE = os.path.abspath(__file__) 16 | 17 | 18 | @click.group(invoke_without_command=True) 19 | @click.pass_context 20 | @FEATURES.bind 21 | def cli(ctx): 22 | """Recompile""" 23 | logging.basicConfig(level=logging.DEBUG, format="%(message)s") 24 | sys.excepthook = exception_hook 25 | if ctx.invoked_subcommand is None: 26 | recompile() 27 | 28 | 29 | @cli.command() 30 | @click.pass_context 31 | @FEATURES.bind 32 | def verify(ctx): 33 | """ 34 | For each environment verify hash comments and report failures. 35 | If any failure occured, exit with code 1. 36 | """ 37 | sys.excepthook = exception_hook 38 | ctx.exit(0 39 | if verify_environments() 40 | else 1) 41 | 42 | 43 | def exception_hook(exctype, value, traceback): 44 | """Strip exception printout above this module.""" 45 | print_exception(exctype, value, trim_traceback(traceback)) 46 | 47 | 48 | def trim_traceback(traceback): 49 | """Trim traceback top so it starts with this module. 50 | 51 | Return original traceback if this module is not found. 52 | """ 53 | level = 0 54 | new_traceback = traceback 55 | while new_traceback is not None: 56 | file_path = new_traceback.tb_frame.f_code.co_filename 57 | if THIS_FILE.startswith(file_path): 58 | return new_traceback 59 | level += 1 60 | new_traceback = new_traceback.tb_next 61 | return traceback 62 | 63 | 64 | if __name__ == '__main__': 65 | cli() # pylint: disable=no-value-for-parameter 66 | -------------------------------------------------------------------------------- /pipcompilemulti/features/forbid_post.py: -------------------------------------------------------------------------------- 1 | """ 2 | Forbid .postX release 3 | ===================== 4 | 5 | ``pip-compile-multi`` can remove ``.postX`` part of dependencies versions. 6 | 7 | .. code-block:: text 8 | 9 | -p, --forbid-post TEXT Environment name (base, test, etc) that cannot 10 | have packages with post-release versions 11 | (1.2.3.post777). 12 | Can be supplied multiple times. 13 | 14 | .. note:: 15 | 16 | Be careful with this option since different maintainers treat 17 | post releases differently. 18 | """ 19 | 20 | from .base import BaseFeature, ClickOption 21 | 22 | 23 | class ForbidPost(BaseFeature): 24 | """Truncate postXXX from versions for selected packages.""" 25 | 26 | OPTION_NAME = 'forbid_post' 27 | CLICK_OPTION = ClickOption( 28 | long_option='--forbid-post', 29 | short_option='-p', 30 | multiple=True, 31 | help_text="Environment name (base, test, etc) that cannot have " 32 | 'packages with post-release versions (1.2.3.post777). ' 33 | 'Can be supplied multiple times.' 34 | ) 35 | 36 | @property 37 | def enabled_envs(self): 38 | """Convert to set.""" 39 | return set(self.value or []) 40 | 41 | @staticmethod 42 | def drop_post(version): 43 | """Remove .postXXXX postfix from version. 44 | 45 | >>> ForbidPost.drop_post('1.2.3.post123') 46 | '1.2.3' 47 | >>> ForbidPost.drop_post('1.2.3') 48 | '1.2.3' 49 | """ 50 | post_index = version.find('.post') 51 | if post_index >= 0: 52 | return version[:post_index] 53 | return version 54 | 55 | def post_forbidden(self, env_name): 56 | """Whether post versions are forbidden for passed environment name.""" 57 | return env_name in self.enabled_envs 58 | -------------------------------------------------------------------------------- /pipcompilemulti/features/limit_envs.py: -------------------------------------------------------------------------------- 1 | """ 2 | Limit environments 3 | ================== 4 | 5 | .. warning:: 6 | 7 | This flag is deprecated and will be removed in future releases. 8 | Use :ref:`limit-in-files` instead. 9 | 10 | 11 | By default ``pip-compile-multi`` compiles all ``.in`` files in ``requirements`` directory. 12 | To limit compilation to only a subset, use 13 | 14 | .. code-block:: text 15 | 16 | -n, --only-name TEXT Compile only for passed environment names and 17 | their references. Can be supplied multiple 18 | times. 19 | 20 | For example, to compile one file under Python2.7 and another under Python3.6, run: 21 | 22 | .. code-block:: text 23 | 24 | $ virtual-env27/bin/pip-compile-multi -n deps27 25 | Locking requirements/deps27.in to requirements/deps27.txt. References: [] 26 | $ virtual-env36/bin/pip-compile-multi -n deps36 27 | Locking requirements/deps36.in to requirements/deps36.txt. References: [] 28 | """ 29 | 30 | from .base import ClickOption 31 | from .limit_in_paths import LimitInPaths 32 | 33 | 34 | class LimitEnvs(LimitInPaths): 35 | """Limit discovered environments to specified subset.""" 36 | 37 | OPTION_NAME = 'include_names' 38 | CLICK_OPTION = ClickOption( 39 | long_option='--only-name', 40 | short_option='-n', 41 | multiple=True, 42 | help_text='Compile only for passed environment names and their ' 43 | 'references. Can be supplied multiple times.', 44 | ) 45 | 46 | def __init__(self, controller): 47 | # pylint: disable=super-with-arguments 48 | self._controller = controller 49 | super(LimitEnvs, self).__init__() 50 | 51 | @property 52 | def direct_envs(self): 53 | """Set of environments included by command line options.""" 54 | return set( 55 | self._controller.compose_input_file_path(env_name) 56 | for env_name in self.value or [] 57 | ) 58 | -------------------------------------------------------------------------------- /docs/moz-logo-bw-rgb.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 8 | 9 | 10 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /pipcompilemulti/features/compatible.py: -------------------------------------------------------------------------------- 1 | """ 2 | Compatible Releases 3 | =================== 4 | 5 | `PEP-440`_ describes compatible release operator ``~=``. 6 | Sometimes it's useful to have some of the dependencies 7 | pinned using this operator. 8 | For example, rapidly changing internal libraries. 9 | The format for this option is 10 | 11 | .. code-block:: text 12 | 13 | -c, --compatible TEXT 14 | 15 | where TEXT is a `glob`_ pattern for library name. 16 | This option can be supplied multiple times. 17 | 18 | .. _glob: https://en.wikipedia.org/wiki/Glob_(programming) 19 | .. _PEP-440: https://www.python.org/dev/peps/pep-0440/#compatible-release 20 | """ 21 | 22 | from fnmatch import fnmatch 23 | 24 | from .base import BaseFeature, ClickOption 25 | 26 | 27 | class Compatible(BaseFeature): 28 | """Use ~= for selected packages.""" 29 | 30 | OPTION_NAME = 'compatible_patterns' 31 | CLICK_OPTION = ClickOption( 32 | long_option='--compatible', 33 | short_option='-c', 34 | multiple=True, 35 | help_text='Glob expression for packages with compatible (~=) ' 36 | 'version constraint. Can be supplied multiple times.' 37 | ) 38 | 39 | @property 40 | def patterns(self): 41 | """Use empty list as the default.""" 42 | return self.value or [] 43 | 44 | def constraint(self, package_name): 45 | """Return ``~=`` if package_name matches patterns, ``==`` otherwise. 46 | 47 | >>> from pipcompilemulti.options import OPTIONS 48 | >>> feature = Compatible() 49 | >>> OPTIONS[feature.OPTION_NAME] = ['xxx'] 50 | >>> feature.constraint('package') 51 | '==' 52 | >>> feature.constraint('xxx') 53 | '~=' 54 | """ 55 | return '~=' if self.is_matched(package_name) else '==' 56 | 57 | def is_matched(self, package_name): 58 | """Whether package name matches one of configured glob patterns.""" 59 | package = package_name.lower() 60 | for pattern in self.patterns: 61 | if fnmatch(package, pattern): 62 | return True 63 | return False 64 | -------------------------------------------------------------------------------- /pipcompilemulti/config.py: -------------------------------------------------------------------------------- 1 | """Get tasks options from INI file""" 2 | import sys 3 | import configparser 4 | 5 | from .options import LIST_OPTIONS 6 | 7 | 8 | def read_config(): 9 | """Read requirements.ini and return list of pairs (name, options) 10 | If no requirements sections found, return None. 11 | If some sections found, but none matches current runtime, return empty list. 12 | """ 13 | return filter_sections(read_sections()) 14 | 15 | 16 | def filter_sections(sections): 17 | """Filter through pairs (name, options) 18 | leaving only those that match runtime. 19 | 20 | If no requirements sections found, return None. 21 | If some sections found, but none matches current runtime, return empty list. 22 | """ 23 | if not sections: 24 | return None 25 | jobs = [] 26 | matchers = python_version_matchers() 27 | for name, options in sections: 28 | target_version = options.pop('python', None) 29 | if target_version in matchers: 30 | jobs.append((name, options)) 31 | return jobs 32 | 33 | 34 | def read_sections(): 35 | """Read ini files and return list of pairs (name, options)""" 36 | config = configparser.ConfigParser() 37 | config.read(('requirements.ini', 'setup.cfg', 'tox.ini')) 38 | return [ 39 | ( 40 | name, 41 | { 42 | key: parse_value(key, config[name][key]) 43 | for key in config[name] 44 | } 45 | ) 46 | for name in config.sections() 47 | if 'requirements' in name 48 | ] 49 | 50 | 51 | def parse_value(key, value): 52 | """Parse value as comma-delimited list if key is in LIST_OPTIONS""" 53 | if key in LIST_OPTIONS: 54 | return [item.strip() 55 | for item in value.split(',')] 56 | return value 57 | 58 | 59 | def python_version_matchers(): 60 | """Return set of string representations of current python version""" 61 | version = sys.version_info 62 | patterns = [ 63 | "{0}", 64 | "{0}{1}", 65 | "{0}.{1}", 66 | ] 67 | matchers = [ 68 | pattern.format(*version) 69 | for pattern in patterns 70 | ] + [None] 71 | return set(matchers) 72 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ================= 2 | pip-compile-multi 3 | ================= 4 | 5 | .. image:: https://badge.fury.io/py/pip-compile-multi.png 6 | :target: https://badge.fury.io/py/pip-compile-multi 7 | 8 | .. image:: https://github.com/peterdemin/pip-compile-multi/actions/workflows/python36.yml/badge.svg 9 | :target: https://github.com/peterdemin/pip-compile-multi/actions/workflows/python36.yml 10 | 11 | .. image:: https://ci.appveyor.com/api/projects/status/1spvqh9hlqtv2a81?svg=true 12 | :target: https://ci.appveyor.com/project/peterdemin/pip-compile-multi 13 | 14 | .. image:: https://img.shields.io/pypi/pyversions/pip-compile-multi.svg 15 | :target: https://pypi.python.org/pypi/pip-compile-multi 16 | 17 | `Docs `_ 18 | 19 | Compile multiple requirements files to lock dependency versions. 20 | 21 | Install 22 | ------- 23 | 24 | .. code-block:: shell 25 | 26 | pip install pip-compile-multi 27 | 28 | Run 29 | ---- 30 | 31 | .. code-block:: shell 32 | 33 | pip-compile-multi 34 | 35 | 36 | Trusted by 37 | ---------- 38 | 39 | |uber| |mozilla| |twitter| 40 | 41 | |nih| |skydio| |pallets| 42 | 43 | 44 | Read the Docs 45 | ------------- 46 | 47 | * `Why use pip-compile-multi `_ 48 | * `How to start using pip-compile-multi `_ 49 | * `List of features `_ 50 | 51 | .. |nih| image:: docs/NIH_logo.svg 52 | :width: 200 px 53 | :height: 200 px 54 | :target: https://www.nih.gov/ 55 | 56 | .. |uber| image:: docs/Uber_Logo_Black_RGB.svg 57 | :width: 200 px 58 | :height: 200 px 59 | :target: https://www.uber.com/ 60 | 61 | .. |mozilla| image:: docs/moz-logo-bw-rgb.svg 62 | :width: 200 px 63 | :height: 200 px 64 | :target: https://www.mozilla.org/ 65 | 66 | .. |skydio| image:: docs/skydio-logo-black.svg 67 | :width: 200 px 68 | :height: 200 px 69 | :target: https://www.skydio.com/ 70 | 71 | .. |pallets| image:: docs/pallets.png 72 | :width: 200 px 73 | :height: 200 px 74 | :target: https://palletsprojects.com/ 75 | 76 | .. |twitter| image:: docs/twitter_logo.svg 77 | :width: 200 px 78 | :height: 200 px 79 | :target: https://twitter.com/ 80 | -------------------------------------------------------------------------------- /pipcompilemulti/discover.py: -------------------------------------------------------------------------------- 1 | """Environment discovery""" 2 | 3 | import glob 4 | from collections import deque 5 | 6 | from toposort import toposort_flatten 7 | from .environment import Environment 8 | from .utils import fix_reference_path, extract_env_name 9 | 10 | 11 | __all__ = ('discover',) 12 | 13 | 14 | def discover(glob_pattern): 15 | """ 16 | Find all files matching given glob_pattern, 17 | parse them, and return list of environments: 18 | 19 | Recursively follow referenced files not matched by glob_pattern. 20 | 21 | >>> import os 22 | >>> envs = discover(os.path.join('requirements', '*.in')) 23 | >>> # import pprint; pprint.pprint(envs) 24 | >>> envs == [ 25 | ... {'in_path': os.path.join('requirements', 'base.in'), 'name': 'base', 26 | ... 'refs': set()}, 27 | ... {'in_path': os.path.join('requirements', 'test.in'), 'name': 'test', 28 | ... 'refs': {'base.in'}}, 29 | ... {'in_path': os.path.join('requirements', 'local.in'), 'name': 'local', 30 | ... 'refs': {'test.in'}}, 31 | ... {'in_path': os.path.join('requirements', 'testwin.in'), 'name': 'testwin', 32 | ... 'refs': {'test.in'}} 33 | ... ] 34 | True 35 | """ 36 | to_visit = deque(glob.glob(glob_pattern)) 37 | envs, all_in_paths = {}, set() 38 | while to_visit: 39 | in_path = to_visit.pop() 40 | # name = 41 | if in_path in all_in_paths: 42 | continue 43 | all_in_paths.add(in_path) 44 | envs[in_path] = { 45 | 'in_path': in_path, 46 | 'name': extract_env_name(in_path), 47 | 'refs': Environment.parse_references(in_path), 48 | } 49 | for ref in envs[in_path]['refs']: 50 | to_visit.append(fix_reference_path( 51 | orig_path=in_path, 52 | ref_path=ref 53 | )) 54 | return order_by_refs(envs.values()) 55 | 56 | 57 | def order_by_refs(envs): 58 | """Return topologicaly sorted list of environments. 59 | 60 | I.e. all referenced environments are placed before their references. 61 | """ 62 | topology = { 63 | env['in_path']: set(fix_reference_path(env['in_path'], ref) 64 | for ref in env['refs']) 65 | for env in envs 66 | } 67 | by_in_path = { 68 | env['in_path']: env 69 | for env in envs 70 | } 71 | return [ 72 | by_in_path[in_path] 73 | for in_path in toposort_flatten(topology) 74 | ] 75 | -------------------------------------------------------------------------------- /requirements/base.hash: -------------------------------------------------------------------------------- 1 | # SHA1:7649c64fcb6e65a94d0f3049875bdf56e1c8a37d 2 | # 3 | # This file is autogenerated by pip-compile-multi 4 | # To update, run: 5 | # 6 | # pip-compile-multi 7 | # 8 | click==7.1.2 \ 9 | --hash=sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a \ 10 | --hash=sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc 11 | # via 12 | # -r requirements/base.txt 13 | # pip-tools 14 | importlib-metadata==3.7.3 \ 15 | --hash=sha256:742add720a20d0467df2f444ae41704000f50e1234f46174b51f9c6031a1bd71 \ 16 | --hash=sha256:b74159469b464a99cb8cc3e21973e4d96e05d3024d337313fedb618a6e86e6f4 17 | # via 18 | # -r requirements/base.txt 19 | # pep517 20 | pep517==0.10.0 \ 21 | --hash=sha256:ac59f3f6b9726a49e15a649474539442cf76e0697e39df4869d25e68e880931b \ 22 | --hash=sha256:eba39d201ef937584ad3343df3581069085bacc95454c80188291d5b3ac7a249 23 | # via 24 | # -r requirements/base.txt 25 | # pip-tools 26 | pip-tools==6.0.1 \ 27 | --hash=sha256:3b0c7b95e8d3dfb011bb42cb38f356fcf5d0630480462b59c4d0a112b8d90281 \ 28 | --hash=sha256:50ec26df7710557ab574f19f7511830294999e6121b42b87473b48cb9984d788 29 | # via -r requirements/base.txt 30 | toml==0.10.2 \ 31 | --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \ 32 | --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f 33 | # via 34 | # -r requirements/base.txt 35 | # pep517 36 | toposort==1.6 \ 37 | --hash=sha256:2ade83028dd067a1d43c142469cbaf4136b92fdc1c4303f16c40f126442fdaf3 \ 38 | --hash=sha256:a7428f56ef844f5055bb9e9e44b343983773ae6dce0fe5b101e08e27ffbd50ac 39 | # via -r requirements/base.txt 40 | typing-extensions==3.7.4.3 \ 41 | --hash=sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918 \ 42 | --hash=sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c \ 43 | --hash=sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f 44 | # via 45 | # -r requirements/base.txt 46 | # importlib-metadata 47 | zipp==3.4.1 \ 48 | --hash=sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76 \ 49 | --hash=sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098 50 | # via 51 | # -r requirements/base.txt 52 | # importlib-metadata 53 | # pep517 54 | 55 | # The following packages are considered to be unsafe in a requirements file: 56 | pip==21.0.1 \ 57 | --hash=sha256:37fd50e056e2aed635dec96594606f0286640489b0db0ce7607f7e51890372d5 \ 58 | --hash=sha256:99bbde183ec5ec037318e774b0d8ae0a64352fe53b2c7fd630be1d07e94f41e5 59 | # via pip-tools 60 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """Package configuration""" 2 | 3 | import os 4 | from setuptools import setup, find_packages 5 | 6 | 7 | VERSION = "2.4.0" 8 | 9 | 10 | README = """ 11 | pip-compile-multi 12 | ================= 13 | 14 | Compile multiple requirements files to lock dependency versions. 15 | 16 | Install 17 | ------- 18 | 19 | .. code-block:: shell 20 | 21 | pip install pip-compile-multi 22 | 23 | Run 24 | ---- 25 | 26 | .. code-block:: shell 27 | 28 | pip-compile-multi 29 | 30 | 31 | Links 32 | ----- 33 | 34 | * Documentation: https://pip-compile-multi.readthedocs.io/en/latest/ 35 | * Releases: https://pypi.python.org/pypi/pip-compile-multi 36 | * Code: https://github.com/peterdemin/pip-compile-multi 37 | * Issue tracker: https://github.com/peterdemin/pip-compile-multi/issues 38 | 39 | """ 40 | 41 | 42 | with open('HISTORY.rst') as fp: 43 | HISTORY = fp.read().replace('.. :changelog:', '') 44 | 45 | 46 | with open(os.path.join('requirements', 'base.in')) as fp: 47 | REQUIREMENTS = list(fp) 48 | 49 | 50 | CONSOLE_SCRIPTS = [ 51 | 'pip-compile-multi = pipcompilemulti.cli_v1:cli', 52 | ] 53 | if os.environ.get('PCM_ALPHA') == 'ON': 54 | CONSOLE_SCRIPTS.append( 55 | 'requirements = pipcompilemulti.cli_v2:cli' 56 | ) 57 | 58 | 59 | setup( 60 | name='pip-compile-multi', 61 | version=VERSION, 62 | description="Compile multiple requirements files " 63 | "to lock dependency versions", 64 | long_description=README + '\n\n' + HISTORY, 65 | author='Peter Demin', 66 | author_email='peterdemin@gmail.com', 67 | url='https://github.com/peterdemin/pip-compile-multi', 68 | include_package_data=True, 69 | packages=find_packages(exclude=['tests']), 70 | install_requires=REQUIREMENTS, 71 | python_requires='~=3.6', 72 | license="MIT", 73 | zip_safe=False, 74 | keywords='pip-compile-multi', 75 | classifiers=[ 76 | 'Development Status :: 5 - Production/Stable', 77 | 'Intended Audience :: Developers', 78 | 'License :: OSI Approved :: BSD License', 79 | 'Natural Language :: English', 80 | 'Environment :: Console', 81 | 'Programming Language :: Python :: 3', 82 | 'Programming Language :: Python :: 3.6', 83 | 'Programming Language :: Python :: 3.7', 84 | 'Programming Language :: Python :: 3.8', 85 | 'Programming Language :: Python :: 3.9', 86 | 'Programming Language :: Python :: Implementation :: CPython', 87 | 'Programming Language :: Python :: Implementation :: PyPy', 88 | 'Topic :: Utilities', 89 | ], 90 | entry_points={ 91 | 'console_scripts': CONSOLE_SCRIPTS, 92 | }, 93 | setup_requires=['setuptools', 'wheel'], 94 | ) 95 | -------------------------------------------------------------------------------- /pipcompilemulti/cli_v2.py: -------------------------------------------------------------------------------- 1 | """Human-friendly interface to pip-compile-multi""" 2 | import functools 3 | import logging 4 | 5 | import click 6 | 7 | from .options import OPTIONS 8 | from .config import read_config, read_sections 9 | from .actions import recompile 10 | from .verify import verify_environments 11 | 12 | 13 | logger = logging.getLogger("pip-compile-multi") 14 | 15 | 16 | @click.group() 17 | def cli(): 18 | """Human-friendly interface to pip-compile-multi""" 19 | logging.basicConfig(level=logging.DEBUG, format="%(message)s") 20 | 21 | 22 | @cli.command() 23 | def lock(): 24 | """Lock new dependencies without upgrading""" 25 | OPTIONS['upgrade'] = False 26 | run_configurations(recompile, read_config) 27 | 28 | 29 | @cli.command() 30 | def upgrade(): 31 | """Upgrade locked dependency versions""" 32 | OPTIONS['upgrade'] = True 33 | OPTIONS['upgrade_packages'] = [] 34 | run_configurations(recompile, read_config) 35 | 36 | 37 | @cli.command() 38 | @click.pass_context 39 | def verify(ctx): 40 | """Upgrade locked dependency versions""" 41 | oks = run_configurations( 42 | skipper(verify_environments), 43 | read_sections, 44 | ) 45 | ctx.exit(0 46 | if False not in oks 47 | else 1) 48 | 49 | 50 | def skipper(func): 51 | """Decorator that memorizes base_dir, in_ext and out_ext from OPTIONS 52 | and skips execution for duplicates. 53 | """ 54 | @functools.wraps(func) 55 | def wrapped(): 56 | """Dummy docstring to make pylint happy.""" 57 | key = (OPTIONS['base_dir'], OPTIONS['in_ext'], OPTIONS['out_ext']) 58 | if key not in seen: 59 | seen[key] = func() 60 | return seen[key] 61 | seen = {} 62 | return wrapped 63 | 64 | 65 | def run_configurations(callback, sections_reader): 66 | """Parse configurations and execute callback for matching.""" 67 | base = { 68 | 'base_dir': 'requirements', 69 | 'in_ext': 'in', 70 | 'out_ext': 'txt', 71 | } 72 | sections = sections_reader() 73 | if sections is None: 74 | logger.info("Configuration not found in .ini files. " 75 | "Running with default settings") 76 | recompile() 77 | elif sections == []: 78 | logger.info("Configuration does not match current runtime. " 79 | "Exiting") 80 | results = [] 81 | for section, options in sections: 82 | OPTIONS.clear() 83 | OPTIONS.update(base) 84 | OPTIONS.update(options) 85 | logger.debug("Running configuration from section \"%s\". OPTIONS: %r", 86 | section, OPTIONS) 87 | results.append(callback()) 88 | return results 89 | 90 | 91 | if __name__ == '__main__': 92 | cli() # pylint: disable=no-value-for-parameter 93 | -------------------------------------------------------------------------------- /pipcompilemulti/features/base.py: -------------------------------------------------------------------------------- 1 | """Common functionality for features activated by command line option.""" 2 | 3 | import click 4 | 5 | from ..options import OPTIONS 6 | 7 | 8 | class ClickOption: 9 | """Click option parameters.""" 10 | 11 | def __init__(self, 12 | long_option='', 13 | short_option='', 14 | is_flag=False, 15 | default=None, 16 | multiple=False, 17 | help_text=''): 18 | self.long_option = long_option 19 | self.short_option = short_option 20 | self.is_flag = is_flag 21 | self.default = default 22 | self.multiple = multiple 23 | self.help_text = help_text 24 | 25 | def decorate(self, command): 26 | """Decorate click command with this option.""" 27 | return self.decorator()(command) 28 | 29 | def decorator(self): 30 | """Create click command decorator with this option.""" 31 | args = [self.long_option] 32 | kwargs = dict( 33 | is_flag=self.is_flag, 34 | multiple=self.multiple, 35 | help=self.help_text, 36 | ) 37 | if self.short_option: 38 | args.append(self.short_option) 39 | if self.default: 40 | kwargs.update(default=self.default) 41 | return click.option(*args, **kwargs) 42 | 43 | @property 44 | def argument_name(self): 45 | """Generate command argument name from long option. 46 | 47 | >>> ClickOption("--param-name").argument_name 48 | 'param_name' 49 | >>> ClickOption("--param-name/--no-param-name").argument_name 50 | 'param_name' 51 | """ 52 | return self.long_option.lstrip('--').split('/', 1)[0].replace('-', '_') 53 | 54 | 55 | class BaseFeature: 56 | """Base class for features.""" 57 | 58 | OPTION_NAME = None 59 | CLICK_OPTION = None 60 | 61 | def bind(self, command): 62 | """Bind feature's click option to passed command.""" 63 | return self.CLICK_OPTION.decorate(command) 64 | 65 | def extract_option(self, kwargs): 66 | """Pop option value from kwargs and save it in OPTIONS. 67 | 68 | If option was saved before and new value is the same as default, 69 | then keep previous value. 70 | This allows passing options both before and after ``verify`` command. 71 | """ 72 | new_value = kwargs.pop(self.CLICK_OPTION.argument_name) 73 | if self.OPTION_NAME in OPTIONS and new_value == self.CLICK_OPTION.default: 74 | # Do not overwrite with default if already set. 75 | return 76 | OPTIONS[self.OPTION_NAME] = new_value 77 | 78 | @property 79 | def value(self): 80 | """Option value.""" 81 | return OPTIONS.get(self.OPTION_NAME, self.CLICK_OPTION.default) 82 | -------------------------------------------------------------------------------- /pipcompilemulti/features/limit_in_paths.py: -------------------------------------------------------------------------------- 1 | """ 2 | .. _limit-in-files: 3 | 4 | Limit input files 5 | ================= 6 | 7 | By default ``pip-compile-multi`` compiles all ``.in`` files in ``requirements`` directory. 8 | To limit compilation to only a subset, use 9 | 10 | .. code-block:: text 11 | 12 | -t, --only-path TEXT Compile only for passed input file paths and 13 | their references. 14 | Can be supplied multiple times. 15 | 16 | For example, to compile one file under Python2.7 and another under Python3.6, run: 17 | 18 | .. code-block:: text 19 | 20 | $ virtual-env27/bin/pip-compile-multi -t requirements/deps27.in 21 | Locking requirements/deps27.in to requirements/deps27.txt. References: [] 22 | $ virtual-env36/bin/pip-compile-multi -t requirements/deps36.in 23 | Locking requirements/deps36.in to requirements/deps36.txt. References: [] 24 | """ 25 | 26 | from pipcompilemulti.utils import recursive_refs 27 | from .base import BaseFeature, ClickOption 28 | 29 | 30 | class LimitInPaths(BaseFeature): 31 | """Limit discovered input files to specified subset. 32 | 33 | >>> from pipcompilemulti.options import OPTIONS 34 | >>> feature = LimitInPaths() 35 | >>> OPTIONS[feature.OPTION_NAME] = ['test.in'] 36 | >>> feature.on_discover([ 37 | ... {'in_path': 'base.in', 'refs': []}, 38 | ... {'in_path': 'test.in', 'refs': ['base.in']}, 39 | ... {'in_path': 'docs.in', 'refs': []}, 40 | ... ]) 41 | >>> feature.included('base.in') 42 | True 43 | >>> feature.included('test.in') 44 | True 45 | >>> feature.included('docs.in') 46 | False 47 | """ 48 | 49 | OPTION_NAME = 'include_in_paths' 50 | CLICK_OPTION = ClickOption( 51 | long_option='--only-path', 52 | short_option='-t', 53 | multiple=True, 54 | help_text='Compile only for passed input paths and their ' 55 | 'references. Can be supplied multiple times.', 56 | ) 57 | 58 | def __init__(self): 59 | self._all_envs = None 60 | 61 | @property 62 | def direct_envs(self): 63 | """Set of environments included by command line options.""" 64 | return set(self.value or []) 65 | 66 | def on_discover(self, env_confs): 67 | """Save set of all (recursive) included environments.""" 68 | if not self.direct_envs: 69 | # No limit means all envs included: 70 | self._all_envs = [env['in_path'] for env in env_confs] 71 | return 72 | transitive_refs = { 73 | ref 74 | for in_path in self.direct_envs 75 | for ref in recursive_refs(env_confs, in_path) 76 | } 77 | self._all_envs = self.direct_envs | transitive_refs 78 | 79 | def included(self, in_path): 80 | """Whether environment is included directly or by reference.""" 81 | return in_path in self._all_envs 82 | -------------------------------------------------------------------------------- /docs/migration.rst: -------------------------------------------------------------------------------- 1 | How to start using pip-compile-multi on existing project 2 | -------------------------------------------------------- 3 | 4 | Initial situation 5 | ================= 6 | 7 | There are various ways to declare dependencies in Python project. 8 | The most straightforward is to just put them right into ``setup.py``, 9 | like `Flask`_ does. 10 | Another common option is to have one or more ``requirements.txt`` files in a project, 11 | like `Deluge`_ have. 12 | 13 | .. _Flask: https://github.com/pallets/flask/blob/master/setup.py#L52-L75 14 | .. _Deluge: https://github.com/deluge-torrent/deluge/blob/develop/requirements.txt 15 | 16 | 17 | Migration steps 18 | =============== 19 | 20 | 1. Create ``requirements`` directory. 21 | 2. Copy-paste the list of project runtime dependencies 22 | to ``requirements/base.in``. 23 | 3. Create ``requirements/test.in`` with test time dependencies. 24 | Make sure it has a reference to runtime dependencies - ``-r base.in``. 25 | 4. Run ``pip-compile-multi``. It will produce two more files: 26 | 27 | * ``requirements/base.txt`` 28 | * ``requirements/test.txt`` 29 | 30 | 5. :ref:`Unpin ` packages in ``.in`` files. 31 | 6. Run ``pip-compile-multi`` again to upgrade the compiled files. 32 | 33 | .. _unpin: 34 | 35 | How to unpin packages 36 | ===================== 37 | 38 | No constraints 39 | ~~~~~~~~~~~~~~ 40 | 41 | Some projects don't constraint it's dependencies. In this case, there's nothing to unpin, no more work needed. 42 | 43 | Hard-pinned versions (==) 44 | ~~~~~~~~~~~~~~~~~~~~~~~~~ 45 | 46 | Some projects hard-pin all dependencies, just to be safe. 47 | Most likely, the code will run fine with the next patch release, but it's hard to tell for sure. 48 | For such cases, comprehensive test suite is vital. 49 | 50 | Your milage may vary, but generally the fastest workflow is as follows: 51 | 52 | 1. Remove all the constraints, by deleting everything after package names (e.g. ``==1.2.3``). 53 | 2. Recompile ``.txt`` requirements. 54 | 3. Install new versions. 55 | 4. Run tests. 56 | 5. If tests passed, job's done. 57 | 6. If some of the tests fails, it's likely that some of the original constraints 58 | was indeed required. Try to find what's the incompatible package version, maybe read package's CHANGELOG. 59 | Maybe the simpliest way is to return whatever constraint was originally set to move the needle. 60 | 7. After updating one of the ``.in`` files, go to step 2. 61 | 62 | Two-way constraints (>1,<2) 63 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ 64 | 65 | The popular requirements policy is to add packages constrained to minor releases, like this:: 66 | 67 | uwsgi>2.0.0,<2.1.0 68 | 69 | The idea is that dependency is following SemVer and patch release won't break any functionality. 70 | Of course, the reality is that some times patch release introduces a bug, 71 | and some times next major release is backwards compatible. 72 | But looking at this line you can't tell if ``uwsgi==2.1.0`` is going to break your app. 73 | The only way to know it is to actually try. 74 | 75 | So for such projects the general approach is to remove all ``<`` constraints and see 76 | which of them were really needed. 77 | -------------------------------------------------------------------------------- /appveyor/install.ps1: -------------------------------------------------------------------------------- 1 | # Sample script to install Python and pip under Windows 2 | # Authors: Olivier Grisel and Kyle Kastner 3 | # License: CC0 1.0 Universal: http://creativecommons.org/publicdomain/zero/1.0/ 4 | 5 | $BASE_URL = "https://www.python.org/ftp/python/" 6 | $GET_PIP_URL = "https://bootstrap.pypa.io/get-pip.py" 7 | $GET_PIP_PATH = "C:\get-pip.py" 8 | 9 | 10 | function DownloadPython ($python_version, $platform_suffix) { 11 | $webclient = New-Object System.Net.WebClient 12 | $filename = "python-" + $python_version + $platform_suffix + ".msi" 13 | $url = $BASE_URL + $python_version + "/" + $filename 14 | 15 | $basedir = $pwd.Path + "\" 16 | $filepath = $basedir + $filename 17 | if (Test-Path $filename) { 18 | Write-Host "Reusing" $filepath 19 | return $filepath 20 | } 21 | 22 | # Download and retry up to 5 times in case of network transient errors. 23 | Write-Host "Downloading" $filename "from" $url 24 | $retry_attempts = 3 25 | for($i=0; $i -lt $retry_attempts; $i++){ 26 | try { 27 | $webclient.DownloadFile($url, $filepath) 28 | break 29 | } 30 | Catch [Exception]{ 31 | Start-Sleep 1 32 | } 33 | } 34 | Write-Host "File saved at" $filepath 35 | return $filepath 36 | } 37 | 38 | 39 | function InstallPython ($python_version, $architecture, $python_home) { 40 | Write-Host "Installing Python" $python_version "for" $architecture "bit architecture to" $python_home 41 | if (Test-Path $python_home) { 42 | Write-Host $python_home "already exists, skipping." 43 | return $false 44 | } 45 | if ($architecture -eq "32") { 46 | $platform_suffix = "" 47 | } else { 48 | $platform_suffix = ".amd64" 49 | } 50 | $filepath = DownloadPython $python_version $platform_suffix 51 | Write-Host "Installing" $filepath "to" $python_home 52 | $args = "/qn /i $filepath TARGETDIR=$python_home" 53 | Write-Host "msiexec.exe" $args 54 | Start-Process -FilePath "msiexec.exe" -ArgumentList $args -Wait -Passthru 55 | Write-Host "Python $python_version ($architecture) installation complete" 56 | return $true 57 | } 58 | 59 | 60 | function InstallPip ($python_home) { 61 | $pip_path = $python_home + "/Scripts/pip.exe" 62 | $python_path = $python_home + "/python.exe" 63 | if (-not(Test-Path $pip_path)) { 64 | Write-Host "Installing pip..." 65 | $webclient = New-Object System.Net.WebClient 66 | $webclient.DownloadFile($GET_PIP_URL, $GET_PIP_PATH) 67 | Write-Host "Executing:" $python_path $GET_PIP_PATH 68 | Start-Process -FilePath "$python_path" -ArgumentList "$GET_PIP_PATH" -Wait -Passthru 69 | } else { 70 | Write-Host "pip already installed." 71 | } 72 | } 73 | 74 | function InstallPackage ($python_home, $pkg) { 75 | $pip_path = $python_home + "/Scripts/pip.exe" 76 | & $pip_path install $pkg 77 | } 78 | 79 | function main () { 80 | InstallPython $env:PYTHON_VERSION $env:PYTHON_ARCH $env:PYTHON 81 | InstallPip $env:PYTHON 82 | InstallPackage $env:PYTHON tox 83 | InstallPackage $env:PYTHON wheel 84 | } 85 | 86 | main -------------------------------------------------------------------------------- /pipcompilemulti/features/skip_constraint_comments.py: -------------------------------------------------------------------------------- 1 | """ 2 | Skip constraints in comments of output files 3 | ============================================ 4 | 5 | When input files contain constraint references (e.g. '-c constraints.in'), 6 | pip-compile adds it to "via" comments. For example:: 7 | 8 | rsa==3.4.2 9 | # via 10 | # -c constraints.txt 11 | # google-auth 12 | 13 | When this option is enabled that snippet will be converted to:: 14 | 15 | rsa==3.4.2 16 | # via google-auth 17 | 18 | Saving two lines in .txt file. 19 | 20 | .. code-block:: text 21 | 22 | --skip-constraints Remove constraints from "via" comments. 23 | 24 | This feature is especially useful in combination with :ref:`autoresolve`. 25 | """ 26 | 27 | import re 28 | 29 | from .base import BaseFeature, ClickOption 30 | 31 | 32 | class SkipConstraintComments(BaseFeature): 33 | """Remove lines like ``-c file.txt`` from comments in output files.""" 34 | 35 | OPTION_NAME = 'skip_constraints' 36 | CLICK_OPTION = ClickOption( 37 | long_option='--skip-constraints', 38 | is_flag=True, 39 | default=False, 40 | help_text='Remove constraints from "via" comments.', 41 | ) 42 | _RE_VIA_COMMENT = re.compile( 43 | r'^\s*# via$' 44 | ) 45 | _RE_CONSTRAINT_COMMENT = re.compile( 46 | r'^\s*#\s+-c \S+$' 47 | ) 48 | _RE_PACKAGE_COMMENT = re.compile( 49 | r'^\s*#\s+((?:-r )?\S+)$' 50 | ) 51 | 52 | @property 53 | def enabled(self): 54 | """Whether feature was explicitly enabled or not.""" 55 | return self.value 56 | 57 | def process_dependency_comments(self, comment): 58 | """Remove constraint comments if feature is enabled.""" 59 | if self.enabled: 60 | return self._drop_sink_comment(comment) 61 | return comment 62 | 63 | def _drop_sink_comment(self, comment): 64 | r"""Erase sink constraint from comments. 65 | 66 | >>> feature = SkipConstraintComments() 67 | >>> feature._drop_sink_comment("\n# via\n# -c smth\n# pkg\n") 68 | '\n# via pkg' 69 | >>> feature._drop_sink_comment(" # via pkg") 70 | ' # via pkg' 71 | """ 72 | lines = comment.splitlines() 73 | if len(lines) > 2 and self._RE_VIA_COMMENT.match(lines[1]): 74 | result = lines[:2] 75 | for line in lines[2:]: 76 | if self._RE_CONSTRAINT_COMMENT.match(line): 77 | continue 78 | result.append(line) 79 | return "\n".join(self._collapse_single_via(result)) 80 | return comment 81 | 82 | def _collapse_single_via(self, lines): 83 | r"""Combine via into a single line when it has only two lines. 84 | 85 | >>> feature = SkipConstraintComments() 86 | >>> feature._collapse_single_via(["", "# via", "# pkg"]) 87 | ['', '# via pkg'] 88 | >>> feature._collapse_single_via([" # via pkg"]) 89 | [' # via pkg'] 90 | >>> feature._collapse_single_via(["", "# via", "# -r file"]) 91 | ['', '# via -r file'] 92 | """ 93 | if len(lines) == 3: 94 | matchobj = self._RE_PACKAGE_COMMENT.match(lines[2]) 95 | if matchobj: 96 | package = matchobj.group(1) 97 | return [lines[0], lines[1] + ' ' + package] 98 | return lines 99 | -------------------------------------------------------------------------------- /pipcompilemulti/features/add_hashes.py: -------------------------------------------------------------------------------- 1 | """ 2 | Generate hashes 3 | =============== 4 | 5 | Put package hash after pinned version for additional security. 6 | Format for this option is 7 | 8 | .. code-block:: text 9 | 10 | -g, --generate-hashes TEXT Input file name (base.in, requirements/test.in, etc) 11 | that needs packages hashes. 12 | Can be supplied multiple times. 13 | For backwards compatibility can be short 14 | environment name (base, test, etc.) 15 | 16 | Example invocation: 17 | 18 | .. code-block:: shell 19 | 20 | $ pip-compile-multi -g base -g docs 21 | 22 | Example output: 23 | 24 | .. code-block:: text 25 | 26 | pip-tools==1.11.0 \ 27 | --hash=sha256:50288eb066ce66dbef5401a21530712a93c659fe480c7d8d34e2379300555fa1 \ 28 | --hash=sha256:ba427b68443466c389e3b0b0ef55f537ab39344190ea980dfebb333d0e6a50a3 29 | first==2.0.1 \ 30 | --hash=sha256:3bb3de3582cb27071cfb514f00ed784dc444b7f96dc21e140de65fe00585c95e \ 31 | --hash=sha256:41d5b64e70507d0c3ca742d68010a76060eea8a3d863e9b5130ab11a4a91aa0e \ 32 | # via pip-tools 33 | 34 | ``pip`` requires all packages to have hashes if at least one has it. 35 | ``pip-compile-multi`` will recursively propagate this option to all 36 | environments that are referencing or referenced by selected environments. 37 | """ # noqa: E501 38 | import os 39 | 40 | from pipcompilemulti.utils import reference_cluster 41 | from .base import BaseFeature, ClickOption 42 | 43 | 44 | class AddHashes(BaseFeature): 45 | """Write hashes for pinned packages.""" 46 | 47 | OPTION_NAME = 'add_hashes' 48 | CLICK_OPTION = ClickOption( 49 | long_option='--generate-hashes', 50 | short_option='-g', 51 | multiple=True, 52 | help_text='Input file name (base.in, requirements/test.in, etc) ' 53 | 'that needs packages hashes. ' 54 | 'Can be supplied multiple times.', 55 | 56 | ) 57 | 58 | def __init__(self, controller): 59 | self._controller = controller 60 | self._hashed_by_reference = None 61 | 62 | @property 63 | def enabled_in_paths(self): 64 | """Convert list of .in paths to a set. 65 | 66 | For backwards compatibility, check if passed value is env name 67 | and convert it to in_path. 68 | """ 69 | names_or_paths = self.value or [] 70 | in_paths = set() 71 | for name_or_path in names_or_paths: 72 | in_path = self._controller.compose_input_file_path(name_or_path) 73 | if os.path.exists(in_path): 74 | in_paths.add(in_path) 75 | else: 76 | in_paths.add(name_or_path) 77 | return in_paths 78 | 79 | def on_discover(self, env_confs): 80 | """Save environment names that need hashing.""" 81 | self._hashed_by_reference = set() 82 | for in_path in self.enabled_in_paths: 83 | self._hashed_by_reference.update( 84 | reference_cluster(env_confs, in_path) 85 | ) 86 | 87 | def _needs_hashes(self, in_path): 88 | assert self._hashed_by_reference is not None 89 | return in_path in self._hashed_by_reference 90 | 91 | def pin_options(self, in_path): 92 | """Return --generate-hashes if env requires it.""" 93 | if self._needs_hashes(in_path): 94 | return ['--generate-hashes'] 95 | return [] 96 | -------------------------------------------------------------------------------- /pipcompilemulti/features/autoresolve.py: -------------------------------------------------------------------------------- 1 | """ 2 | .. _autoresolve: 3 | 4 | Autoresolve cross-file conflicts 5 | ================================ 6 | 7 | Compile requirements file, that references all other files first, 8 | and than use it as a constraint. 9 | 10 | .. code-block:: text 11 | 12 | --autoresolve/--no-autoresolve Automatically resolve 13 | cross-file conflicts. 14 | 15 | This strategy allows to resolve cross-file conflicts of two types: 16 | 17 | 1. Files FOO and BAR both have dependency PKG, but compile it to different versions. 18 | 2. FOO has PKG resolved to version 3. BAR references FOO, but also 19 | has another dependency, that constraints PKG to version 2. 20 | 21 | By compiling all-including file (aka *sink*), that references both FOO and BAR first, 22 | pip-compile-multi generates conflict-free set of versions. 23 | 24 | After that, this compiled file is passed as a constraint for compiling 25 | all requirements files. 26 | 27 | As the last step, the *sink* file is compiled again preserving reference files 28 | and skipping duplicate packages. 29 | 30 | .. note:: 31 | 32 | This feature works only if your project has a single requirements file, 33 | that references (directly or indirectly) all other files. 34 | """ 35 | 36 | from pipcompilemulti.utils import recursive_refs 37 | from .base import BaseFeature, ClickOption 38 | 39 | 40 | class Autoresolve(BaseFeature): 41 | """Detect sink file and use it unless the feature is explicitly disabled.""" 42 | 43 | OPTION_NAME = 'autoresolve' 44 | CLICK_OPTION = ClickOption( 45 | long_option='--autoresolve/--no-autoresolve', 46 | is_flag=True, 47 | default=False, 48 | help_text='Automatically resolve cross-file conflicts.', 49 | ) 50 | 51 | def __init__(self): 52 | self._sink_path = None 53 | 54 | @property 55 | def enabled(self): 56 | """Whether feature was explicitly disabled or not.""" 57 | return self.value 58 | 59 | def on_discover(self, env_confs): 60 | """Save set of all (recursive) included environments.""" 61 | self._sink_path = self._find_sink(env_confs) 62 | 63 | def sink_path(self): 64 | """Return sink path if it's enabled. Otherwise None""" 65 | return self._sink_path if self.enabled else None 66 | 67 | @staticmethod 68 | def _find_sink(envs): 69 | """Try to find requirements sink. 70 | 71 | Sink is a requirements file that references all other 72 | requirement files. 73 | 74 | If no sink exists, return None. 75 | 76 | >>> Autoresolve._find_sink([ 77 | ... {'in_path': 'base', 'refs': set()}, 78 | ... {'in_path': 'test', 'refs': {'base'}}, 79 | ... {'in_path': 'local', 'refs': {'test', 'base'}}, 80 | ... ]) 81 | 'local' 82 | >>> Autoresolve._find_sink([ 83 | ... {'in_path': 'base', 'refs': set()}, 84 | ... {'in_path': 'test', 'refs': {'base'}}, 85 | ... {'in_path': 'doc', 'refs': set()}, 86 | ... ]) 87 | >>> Autoresolve._find_sink([ 88 | ... {'in_path': 'base', 'refs': set()}, 89 | ... {'in_path': 'foo', 'refs': {'base'}}, 90 | ... {'in_path': 'bar', 'refs': {'base'}}, 91 | ... {'in_path': 'all', 'refs': {'foo', 'bar'}}, 92 | ... ]) 93 | 'all' 94 | """ 95 | all_envs = {env['in_path'] for env in envs} 96 | for env in envs: 97 | included_envs = set(recursive_refs(envs, env['in_path'])) | {env['in_path']} 98 | if all_envs == included_envs: 99 | return env['in_path'] 100 | return None 101 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Contributing 3 | ============ 4 | 5 | Contributions are welcome, and they are greatly appreciated! Every 6 | little bit helps, and credit will always be given. 7 | 8 | You can contribute in many ways: 9 | 10 | Types of Contributions 11 | ---------------------- 12 | 13 | Report Bugs 14 | ~~~~~~~~~~~ 15 | 16 | Report bugs at https://github.com/peterdemin/pip-compile-multi/issues. 17 | 18 | If you are reporting a bug, please include: 19 | 20 | * Your operating system name and version. 21 | * Any details about your local setup that might be helpful in troubleshooting. 22 | * Detailed steps to reproduce the bug. 23 | 24 | Fix Bugs 25 | ~~~~~~~~ 26 | 27 | Look through the GitHub issues for bugs. Anything tagged with "bug" 28 | is open to whoever wants to implement it. 29 | 30 | Implement Features 31 | ~~~~~~~~~~~~~~~~~~ 32 | 33 | Look through the GitHub issues for features. Anything tagged with "feature" 34 | is open to whoever wants to implement it. 35 | 36 | Write Documentation 37 | ~~~~~~~~~~~~~~~~~~~ 38 | 39 | pip-compile-multi could always use more documentation, whether as part of the 40 | official pip-compile-multi docs, in docstrings, or even on the web in blog posts, 41 | articles, and such. 42 | 43 | Submit Feedback 44 | ~~~~~~~~~~~~~~~ 45 | 46 | The best way to send feedback is to file an issue at https://github.com/peterdemin/pip-compile-multi/issues. 47 | 48 | If you are proposing a feature: 49 | 50 | * Explain in detail how it would work. 51 | * Keep the scope as narrow as possible, to make it easier to implement. 52 | * Remember that this is a volunteer-driven project, and that contributions 53 | are welcome :) 54 | 55 | Get Started! 56 | ------------ 57 | 58 | Ready to contribute? Here's how to set up `pip-compile-multi` for local development. 59 | 60 | 1. Fork the `pip-compile-multi` repo on GitHub. 61 | 2. Clone your fork locally:: 62 | 63 | $ git clone git@github.com:your_name_here/pip-compile-multi.git 64 | 65 | 3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development:: 66 | 67 | $ mkvirtualenv pip-compile-multi 68 | $ cd pip-compile-multi/ 69 | $ python setup.py develop 70 | 71 | 4. Create a branch for local development:: 72 | 73 | $ git checkout -b name-of-your-bugfix-or-feature 74 | 75 | Now you can make your changes locally. 76 | 77 | 5. When you're done making changes, check that your changes pass flake8 and the tests, including testing other Python versions with tox:: 78 | 79 | $ flake8 pip-compile-multi.py test_pip-compile-multi.py 80 | $ py.test 81 | $ tox 82 | 83 | To get flake8 and tox, just pip install them into your virtualenv. 84 | 85 | 6. Commit your changes and push your branch to GitHub:: 86 | 87 | $ git add . 88 | $ git commit -m "Your detailed description of your changes." 89 | $ git push origin name-of-your-bugfix-or-feature 90 | 91 | 7. Submit a pull request through the GitHub website. 92 | 93 | Pull Request Guidelines 94 | ----------------------- 95 | 96 | Before you submit a pull request, check that it meets these guidelines: 97 | 98 | 1. The pull request should include tests. 99 | 2. If the pull request adds functionality, the docs should be updated. Put 100 | your new functionality into a function with a docstring, and add the 101 | feature to the list in README.rst. 102 | 3. The pull request should work for Python 2.6, 2.7, 3.3, and 3.4, and for PyPy. Check 103 | https://travis-ci.org/peterdemin/pip-compile-multi/pull_requests 104 | and make sure that the tests pass for all supported Python versions. 105 | 106 | Tips 107 | ---- 108 | 109 | To run a subset of tests:: 110 | 111 | TODO -------------------------------------------------------------------------------- /pipcompilemulti/utils.py: -------------------------------------------------------------------------------- 1 | """Functional utilities for lists and dicts manipulation.""" 2 | 3 | import os 4 | import logging 5 | import itertools 6 | 7 | 8 | logger = logging.getLogger("pip-compile-multi") 9 | 10 | 11 | def extract_env_name(file_path): 12 | """Return environment name for given requirements file path. 13 | 14 | >>> extract_env_name("base.in") 15 | 'base' 16 | >>> extract_env_name("sub/req.in") 17 | 'req' 18 | """ 19 | return os.path.splitext(os.path.basename(file_path))[0] 20 | 21 | 22 | def fix_reference_path(orig_path, ref_path): 23 | """Find actual path to reference using relative path to original file. 24 | 25 | >>> fix_reference_path("dir/file", "../ref") 26 | 'ref' 27 | """ 28 | return os.path.normpath(os.path.join(os.path.dirname(orig_path), ref_path)) 29 | 30 | 31 | def recursive_refs(envs, in_path): 32 | """Return set of recursive refs for given env name.""" 33 | refs_by_in_path = { 34 | env['in_path']: { 35 | fix_reference_path(env['in_path'], ref) 36 | for ref in env['refs'] 37 | } 38 | for env in envs 39 | } 40 | refs = refs_by_in_path[in_path] 41 | if refs: 42 | indirect_refs = set( 43 | subref 44 | for ref in refs 45 | for subref in recursive_refs(envs, ref) 46 | ) 47 | else: 48 | indirect_refs = set() 49 | return set.union(refs, indirect_refs) 50 | 51 | 52 | def merged_packages(env_packages, names): 53 | """Return union set of environment packages with given names. 54 | 55 | >>> sorted(merged_packages( 56 | ... { 57 | ... 'a': {'x': 1, 'y': 2}, 58 | ... 'b': {'y': 2, 'z': 3}, 59 | ... 'c': {'z': 3, 'w': 4} 60 | ... }, 61 | ... ['a', 'b'] 62 | ... ).items()) 63 | [('x', 1), ('y', 2), ('z', 3)] 64 | """ 65 | combined_packages = sorted(itertools.chain.from_iterable( 66 | env_packages[name].items() 67 | for name in names 68 | )) 69 | result = {} 70 | errors = set() 71 | for name, version in combined_packages: 72 | if name in result: 73 | if result[name] != version: 74 | errors.add((name, version, result[name])) 75 | else: 76 | result[name] = version 77 | if errors: 78 | for error in sorted(errors): 79 | logger.error( 80 | "Package %s was resolved to different " 81 | "versions in different environments: %s and %s", 82 | error[0], error[1], error[2], 83 | ) 84 | raise RuntimeError( 85 | "Please add constraints for the package version listed above" 86 | ) 87 | return result 88 | 89 | 90 | def reference_cluster(envs, in_path): 91 | """ 92 | Return set of all env in_paths referencing or 93 | referenced by given in_path. 94 | 95 | >>> cluster = sorted(reference_cluster([ 96 | ... {'in_path': 'base', 'refs': []}, 97 | ... {'in_path': 'test', 'refs': ['base']}, 98 | ... {'in_path': 'local', 'refs': ['test']}, 99 | ... ], 'test')) 100 | >>> cluster == ['base', 'local', 'test'] 101 | True 102 | """ 103 | edges = [ 104 | set([env['in_path'], fix_reference_path(env['in_path'], ref)]) 105 | for env in envs 106 | for ref in env['refs'] 107 | ] 108 | prev, cluster = set(), set([in_path]) 109 | while prev != cluster: 110 | # While cluster grows 111 | prev = set(cluster) 112 | to_visit = [] 113 | for edge in edges: 114 | if cluster & edge: 115 | # Add adjacent nodes: 116 | cluster |= edge 117 | else: 118 | # Leave only edges that are out 119 | # of cluster for the next round: 120 | to_visit.append(edge) 121 | edges = to_visit 122 | return cluster 123 | -------------------------------------------------------------------------------- /requirements/local.txt: -------------------------------------------------------------------------------- 1 | # SHA1:cd048cffcfe17c5392659be8e6e64c90e1cc2451 2 | # 3 | # This file is autogenerated by pip-compile-multi 4 | # To update, run: 5 | # 6 | # pip-compile-multi 7 | # 8 | -r test.txt 9 | alabaster==0.7.12 10 | # via sphinx 11 | appdirs==1.4.4 12 | # via virtualenv 13 | astroid==2.5.1 14 | # via pylint 15 | babel==2.9.0 16 | # via sphinx 17 | bleach==3.3.0 18 | # via readme-renderer 19 | bump2version==1.0.1 20 | # via -r requirements/local.in 21 | certifi==2020.12.5 22 | # via requests 23 | cffi==1.14.5 24 | # via cryptography 25 | cfgv==3.2.0 26 | # via pre-commit 27 | chardet==4.0.0 28 | # via requests 29 | collective.checkdocs==0.2 30 | # via -r requirements/local.in 31 | colorama==0.4.4 32 | # via twine 33 | cryptography==3.4.6 34 | # via secretstorage 35 | distlib==0.3.1 36 | # via virtualenv 37 | docutils==0.16 38 | # via 39 | # collective.checkdocs 40 | # readme-renderer 41 | # sphinx 42 | filelock==3.0.12 43 | # via 44 | # tox 45 | # virtualenv 46 | flake8-polyfill==1.0.2 47 | # via pep8-naming 48 | flake8==3.9.0 49 | # via 50 | # -r requirements/local.in 51 | # flake8-polyfill 52 | identify==2.1.3 53 | # via pre-commit 54 | idna==2.10 55 | # via requests 56 | imagesize==1.2.0 57 | # via sphinx 58 | importlib-resources==5.1.2 59 | # via 60 | # pre-commit 61 | # virtualenv 62 | isort==5.7.0 63 | # via pylint 64 | jeepney==0.6.0 65 | # via 66 | # keyring 67 | # secretstorage 68 | jinja2==2.11.3 69 | # via sphinx 70 | keyring==23.0.0 71 | # via twine 72 | lazy-object-proxy==1.5.2 73 | # via astroid 74 | markupsafe==1.1.1 75 | # via jinja2 76 | mccabe==0.6.1 77 | # via 78 | # flake8 79 | # pylint 80 | nodeenv==1.5.0 81 | # via pre-commit 82 | pep8-naming==0.11.1 83 | # via -r requirements/local.in 84 | pipdeptree==2.0.0 85 | # via -r requirements/local.in 86 | pkginfo==1.7.0 87 | # via twine 88 | pre-commit==2.11.1 89 | # via -r requirements/local.in 90 | pycodestyle==2.7.0 91 | # via 92 | # -r requirements/local.in 93 | # flake8 94 | pycparser==2.20 95 | # via cffi 96 | pydocstyle==5.1.1 97 | # via -r requirements/local.in 98 | pyflakes==2.3.0 99 | # via flake8 100 | pygments==2.8.1 101 | # via 102 | # -r requirements/local.in 103 | # readme-renderer 104 | # sphinx 105 | pylint==2.7.2 106 | # via -r requirements/local.in 107 | pytz==2021.1 108 | # via babel 109 | pyyaml==5.4.1 110 | # via pre-commit 111 | readme-renderer==29.0 112 | # via twine 113 | requests-toolbelt==0.9.1 114 | # via twine 115 | requests==2.25.1 116 | # via 117 | # requests-toolbelt 118 | # sphinx 119 | # twine 120 | rfc3986==1.4.0 121 | # via twine 122 | secretstorage==3.3.1 123 | # via keyring 124 | six==1.15.0 125 | # via 126 | # bleach 127 | # readme-renderer 128 | # tox 129 | # virtualenv 130 | snowballstemmer==2.1.0 131 | # via 132 | # pydocstyle 133 | # sphinx 134 | sphinx==3.5.2 135 | # via -r requirements/local.in 136 | sphinxcontrib-applehelp==1.0.2 137 | # via sphinx 138 | sphinxcontrib-devhelp==1.0.2 139 | # via sphinx 140 | sphinxcontrib-htmlhelp==1.0.3 141 | # via sphinx 142 | sphinxcontrib-jsmath==1.0.1 143 | # via sphinx 144 | sphinxcontrib-qthelp==1.0.3 145 | # via sphinx 146 | sphinxcontrib-serializinghtml==1.1.4 147 | # via sphinx 148 | tox==3.23.0 149 | # via -r requirements/local.in 150 | tqdm==4.59.0 151 | # via twine 152 | twine==3.4.1 153 | # via -r requirements/local.in 154 | typed-ast==1.4.2 155 | # via astroid 156 | urllib3==1.26.4 157 | # via requests 158 | virtualenv==20.4.3 159 | # via 160 | # pre-commit 161 | # tox 162 | webencodings==0.5.1 163 | # via bleach 164 | wheel==0.36.2 165 | # via -r requirements/local.in 166 | wrapt==1.12.1 167 | # via astroid 168 | 169 | # The following packages are considered to be unsafe in a requirements file: 170 | # pip 171 | # setuptools 172 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | Installation 2 | ------------ 3 | 4 | Python Version 5 | ============== 6 | 7 | We recommend using the latest version of Python 3. 8 | Pip-compile-multi supports Python 3.5 and newer, Python 2.7, and PyPy. 9 | 10 | Dependencies 11 | ============ 12 | 13 | These distributions will be installed automatically when installing pip-compile-multi. 14 | 15 | * `Click`_ is a framework for writing command line applications. 16 | * `pip-tools`_ is a set of command line tools to help you keep your pip-based 17 | packages fresh, even when you've pinned them. 18 | * `toposort`_ implements topological sort algorithm. Pip-compile-multi uses it 19 | to compose compilation order of requirements files. 20 | 21 | .. _Click: https://palletsprojects.com/p/click/ 22 | .. _pip-tools: https://github.com/jazzband/pip-tools 23 | .. _toposort: https://pypi.org/project/toposort/ 24 | 25 | Virtual environments 26 | ==================== 27 | 28 | Use a virtual environment to manage the dependencies for your project, both in 29 | development and in production. 30 | 31 | What problem does a virtual environment solve? The more Python projects you 32 | have, the more likely it is that you need to work with different versions of 33 | Python libraries, or even Python itself. Newer versions of libraries for one 34 | project can break compatibility in another project. 35 | 36 | Virtual environments are independent groups of Python libraries, one for each 37 | project. Packages installed for one project will not affect other projects or 38 | the operating system's packages. 39 | 40 | Python 3 comes bundled with the :mod:`venv` module to create virtual 41 | environments. If you're using a modern version of Python, you can continue on 42 | to the next section. 43 | 44 | If you're using Python 2, see :ref:`install-install-virtualenv` first. 45 | 46 | .. _install-create-env: 47 | 48 | Create an environment 49 | ~~~~~~~~~~~~~~~~~~~~~ 50 | 51 | Create a project folder and a :file:`venv` folder within: 52 | 53 | .. code-block:: sh 54 | 55 | $ mkdir myproject 56 | $ cd myproject 57 | $ python3 -m venv venv 58 | 59 | On Windows: 60 | 61 | .. code-block:: bat 62 | 63 | $ py -3 -m venv venv 64 | 65 | If you needed to install virtualenv because you are using Python 2, use 66 | the following command instead: 67 | 68 | .. code-block:: sh 69 | 70 | $ python2 -m virtualenv venv 71 | 72 | On Windows: 73 | 74 | .. code-block:: bat 75 | 76 | > \Python27\Scripts\virtualenv.exe venv 77 | 78 | .. _install-activate-env: 79 | 80 | Activate the environment 81 | ~~~~~~~~~~~~~~~~~~~~~~~~ 82 | 83 | Before you work on your project, activate the corresponding environment: 84 | 85 | .. code-block:: sh 86 | 87 | $ . venv/bin/activate 88 | 89 | On Windows: 90 | 91 | .. code-block:: bat 92 | 93 | > venv\Scripts\activate 94 | 95 | Your shell prompt will change to show the name of the activated environment. 96 | 97 | Install pip-compile-multi 98 | ========================= 99 | 100 | Within the activated environment, use the following command to install pip-compile-multi: 101 | 102 | .. code-block:: shell 103 | 104 | pip install pip-compile-multi 105 | 106 | pip-compile-multi is now installed. Check out the :doc:`/features` or go to the 107 | :doc:`Documentation Overview `. 108 | 109 | .. _install-install-virtualenv: 110 | 111 | Install virtualenv 112 | ================== 113 | 114 | If you are using Python 2, the venv module is not available. Instead, 115 | install `virtualenv`_. 116 | 117 | On Linux, virtualenv is provided by your package manager: 118 | 119 | .. code-block:: sh 120 | 121 | # Debian, Ubuntu 122 | $ sudo apt-get install python-virtualenv 123 | 124 | # CentOS, Fedora 125 | $ sudo yum install python-virtualenv 126 | 127 | # Arch 128 | $ sudo pacman -S python-virtualenv 129 | 130 | If you are on Mac OS X or Windows, download `get-pip.py`_, then: 131 | 132 | .. code-block:: sh 133 | 134 | $ sudo python2 Downloads/get-pip.py 135 | $ sudo python2 -m pip install virtualenv 136 | 137 | On Windows, as an administrator: 138 | 139 | .. code-block:: bat 140 | 141 | > \Python27\python.exe Downloads\get-pip.py 142 | > \Python27\python.exe -m pip install virtualenv 143 | 144 | Now you can return above and :ref:`install-create-env`. 145 | 146 | .. _virtualenv: https://virtualenv.pypa.io/ 147 | .. _get-pip.py: https://bootstrap.pypa.io/get-pip.py 148 | -------------------------------------------------------------------------------- /pipcompilemulti/features/upgrade.py: -------------------------------------------------------------------------------- 1 | """ 2 | Disable upgrades 3 | ================ 4 | 5 | When new dependencies are added it's tempting to keep everything else the same. 6 | To recompile ``.txt`` keeping satisfying version use ``--no-upgrade``: 7 | 8 | .. code-block:: text 9 | 10 | --upgrade / --no-upgrade Upgrade package version (default true) 11 | 12 | The option has no effect if there are no existing ``.txt`` files. 13 | 14 | Upgrade only selected packages 15 | ============================== 16 | 17 | To upgrade only one package and keep everything else untouched, 18 | use following option: 19 | 20 | .. code-block:: text 21 | 22 | -P, --upgrade-package TEXT Only upgrade named package. 23 | Can be supplied multiple times. 24 | 25 | Under the hood it uses `the same option of pip-compile`_ 26 | and runs compilation only for files that have one of the passed packages. 27 | 28 | This option implies ``--no-upgrade`` and takes precedence over ``--upgrade``. 29 | 30 | Thanks to `Jonathan Rogers `_. 31 | 32 | .. _`the same option of pip-compile`: \ 33 | https://github.com/jazzband/pip-tools#updating-requirements 34 | """ 35 | 36 | from .base import BaseFeature, ClickOption 37 | from .forward import ForwardOption 38 | 39 | 40 | class UpgradeAll(ForwardOption): 41 | """Upgrade all packages in all environments.""" 42 | 43 | OPTION_NAME = 'upgrade' 44 | CLICK_OPTION = ClickOption( 45 | long_option='--upgrade/--no-upgrade', 46 | default=True, 47 | is_flag=True, 48 | help_text='Upgrade package version (default true)', 49 | ) 50 | enabled_pin_options = ['--upgrade'] 51 | 52 | def __init__(self, controller): 53 | self._controller = controller 54 | 55 | @property 56 | def enabled(self): 57 | """Whether global upgrade is enabled.""" 58 | return self.value and not self._controller.upgrade_selected.active 59 | 60 | 61 | class UpgradeSelected(BaseFeature): 62 | """Upgrade only specific packages in all environments.""" 63 | 64 | OPTION_NAME = 'upgrade_packages' 65 | CLICK_OPTION = ClickOption( 66 | long_option='--upgrade-package', 67 | short_option='-P', 68 | multiple=True, 69 | help_text='Only upgrade named package. ' 70 | 'Can be supplied multiple times.', 71 | ) 72 | 73 | def __init__(self, controller): 74 | self._controller = controller 75 | self.reset() 76 | 77 | def reset(self): 78 | """Clear cached packages.""" 79 | self._env_packages_cache = {} 80 | 81 | @property 82 | def package_names(self): 83 | """List of package names to upgrade.""" 84 | return self.value or [] 85 | 86 | @property 87 | def active(self): 88 | """Whether selective upgrade is active.""" 89 | return bool(self.package_names) 90 | 91 | def pin_options(self): 92 | """Pin command options for upgrading specific packages.""" 93 | return [ 94 | '--upgrade-package=' + package 95 | for package in self.package_names 96 | ] 97 | 98 | def has_package(self, in_path, package_name): 99 | """Whether specified package name is already in the outfile.""" 100 | return package_name.lower() in self._get_packages(in_path) 101 | 102 | def _get_packages(self, in_path): 103 | if in_path not in self._env_packages_cache: 104 | self._env_packages_cache[in_path] = self._read_packages( 105 | self._compose_output_file_path(in_path) 106 | ) 107 | return self._env_packages_cache[in_path] 108 | 109 | @staticmethod 110 | def _read_packages(outfile): 111 | try: 112 | with open(outfile) as fp: 113 | return set( 114 | line.split('==', 1)[0].lower() 115 | for line in fp 116 | if '==' in line 117 | ) 118 | except IOError: 119 | # Act as if file is empty 120 | return set() 121 | 122 | def _compose_output_file_path(self, in_path): 123 | return self._controller.compose_output_file_path(in_path) 124 | 125 | def affected(self, in_path): 126 | """Whether environment was affected by upgraded packages.""" 127 | if not self.active: 128 | return True 129 | return any( 130 | self.has_package(in_path, package_name) 131 | for package_name in self.package_names 132 | ) 133 | -------------------------------------------------------------------------------- /pipcompilemulti/verify.py: -------------------------------------------------------------------------------- 1 | """ 2 | Check that ``pip-compile-multi`` was run after changes in ``.in`` file 3 | ====================================================================== 4 | 5 | ``pip-compile-multi`` adds a special line (before header) at the beginning of each generated file. 6 | This line contains a SHA1 hash of the ``.in`` file's contents. 7 | 8 | Command 9 | 10 | .. code-block:: shell 11 | 12 | $ pip-compile-multi verify 13 | Verifying that requirements/base.txt was generated from requirements/base.in. 14 | Success - comments match. 15 | Verifying that requirements/test.txt was generated from requirements/test.in. 16 | Success - comments match. 17 | Verifying that requirements/local.txt was generated from requirements/local.in. 18 | Success - comments match. 19 | 20 | recalculates hashes for ``.in`` files and compares them with the stored values. 21 | 22 | If verification fails, an error message is logged and exit code 1 is returned: 23 | 24 | .. code-block:: shell 25 | 26 | $ pip-compile-multi verify 27 | Verifying that requirements/base.txt was generated from requirements/base.in. 28 | Success - comments match. 29 | Verifying that requirements/test.txt was generated from requirements/test.in. 30 | FAILURE! 31 | Expecting: # SHA1:c93d71964e14b04f3c8327d16dbc4d6b1bbc3b1d 32 | Found: # SHA1:6c2562322ca1bdc8309b08581a2aa4efbb5a4534 33 | Verifying that requirements/local.txt was generated from requirements/local.in. 34 | Success - comments match. 35 | 36 | 37 | In big teams it might be a good idea to have this check in ``tox.ini``: 38 | 39 | .. code-block:: ini 40 | 41 | [testenv:verify] 42 | skipsdist = true 43 | skip_install = true 44 | deps = pip-compile-multi 45 | commands = pip-compile-multi verify 46 | whitelist_externals = pip-compile-multi 47 | """ 48 | 49 | import hashlib 50 | import logging 51 | 52 | from .discover import discover 53 | from .environment import Environment 54 | from .features import FEATURES 55 | 56 | 57 | logger = logging.getLogger("pip-compile-multi") 58 | 59 | 60 | def verify_environments(): 61 | """ 62 | For each environment verify hash comments and report failures. 63 | If any failure occured, exit with code 1. 64 | """ 65 | env_confs = discover(FEATURES.compose_input_file_path('*')) 66 | success = True 67 | for conf in env_confs: 68 | env = Environment(in_path=conf['in_path']) 69 | current_comment = generate_hash_comment(env.infile) 70 | robust_comment = generate_robust_hash_comment(env.infile) 71 | existing_comment = parse_hash_comment(env.outfile) 72 | if existing_comment in (robust_comment, current_comment): 73 | logger.info("OK - %s was generated from %s.", 74 | env.outfile, env.infile) 75 | else: 76 | logger.error("ERROR! %s was not regenerated after changes in %s.", 77 | env.outfile, env.infile) 78 | logger.error("Expecting: %s", robust_comment.strip()) 79 | logger.error("Found: %s", existing_comment.strip()) 80 | success = False 81 | return success 82 | 83 | 84 | def generate_hash_comment(file_path): 85 | """ 86 | Read file with given file_path and return string of format 87 | 88 | # SHA1:da39a3ee5e6b4b0d3255bfef95601890afd80709 89 | 90 | which is hex representation of SHA1 file content hash 91 | """ 92 | with open(file_path, 'rb') as fp: 93 | hexdigest = hashlib.sha1(fp.read().strip()).hexdigest() 94 | return "# SHA1:{0}\n".format(hexdigest) 95 | 96 | 97 | def generate_robust_hash_comment(file_path): 98 | """ 99 | Read file with given file_path and return string of format 100 | 101 | # SHA1:da39a3ee5e6b4b0d3255bfef95601890afd80709 102 | 103 | which is hex representation of SHA1 file content hash. 104 | File content is pre-processed by stripping comments, whitespace and newlines. 105 | """ 106 | with open(file_path, 'rt') as fp: 107 | essense = ''.join(sorted( 108 | line.split('#')[0].strip() 109 | for line in fp 110 | )) 111 | hexdigest = hashlib.sha1(essense.encode("utf-8")).hexdigest() 112 | return "# SHA1:{0}\n".format(hexdigest) 113 | 114 | 115 | def parse_hash_comment(file_path): 116 | """ 117 | Read file with given file_path line by line, 118 | return the first line that starts with "# SHA1:", like this: 119 | 120 | # SHA1:da39a3ee5e6b4b0d3255bfef95601890afd80709 121 | """ 122 | with open(file_path) as fp: 123 | for line in fp: 124 | if line.startswith("# SHA1:"): 125 | return line 126 | return '' 127 | -------------------------------------------------------------------------------- /pipcompilemulti/dependency.py: -------------------------------------------------------------------------------- 1 | """Dependency class""" 2 | 3 | import re 4 | 5 | from .features import FEATURES 6 | 7 | 8 | class Dependency(object): 9 | r"""Single dependency. 10 | 11 | Comment may span multiple lines. 12 | 13 | >>> print(Dependency( 14 | ... "six==1.0\n " 15 | ... " --hash=abcdef\n" 16 | ... " # via\n" 17 | ... " # app\n" 18 | ... " # pkg" 19 | ... ).serialize()) 20 | six==1.0 \ 21 | --hash=abcdef 22 | # via 23 | # app 24 | # pkg 25 | >>> print(Dependency( 26 | ... "six==1.0\n" 27 | ... " # via\n" 28 | ... " # app\n" 29 | ... " # pkg" 30 | ... ).serialize()) 31 | six==1.0 32 | # via 33 | # app 34 | # pkg 35 | >>> # Old-style one-line 36 | >>> print(Dependency("six==1.0 # via pkg").serialize()) 37 | six==1.0 # via pkg 38 | >>> print(Dependency("-e https://site#egg=pkg==1\n # via lib").serialize()) 39 | https://site#egg=pkg==1 40 | # via lib 41 | """ 42 | 43 | COMMENT_JUSTIFICATION = 26 44 | 45 | # Example: 46 | # unidecode==0.4.21 # via myapp 47 | # [package] [version] [comment] 48 | RE_DEPENDENCY = re.compile( 49 | r'(?iu)(?P\S+)' 50 | r'==' 51 | r'(?P\S+)' 52 | r'(?P(?:\s*--hash=\S+)+)?' 53 | r'(?P(?:\s*#.*)+)?$' 54 | ) 55 | RE_EDITABLE_FLAG = re.compile( 56 | r'^-e ' 57 | ) 58 | # -e git+https://github.com/ansible/docutils.git@master#egg=docutils 59 | # -e "git+https://github.com/zulip/python-zulip-api.git@ 60 | # 0.4.1#egg=zulip==0.4.1_git&subdirectory=zulip" 61 | RE_VCS_DEPENDENCY = re.compile( 62 | r'(?iu)(?P-e)?' 63 | r'\s*' 64 | r'(?P\S+#egg=)' 65 | r'(?P[a-z0-9-_.]+)' 66 | r'(?P\S+)' 67 | r'(?P(?:\s*#.*)+)?$' 68 | ) 69 | 70 | def __init__(self, line): 71 | regular = self.RE_DEPENDENCY.match(line) 72 | if regular: 73 | self.valid = True 74 | self.is_vcs = False 75 | self.package = regular.group('package') 76 | self.version = regular.group('version').strip() 77 | self.hashes = (regular.group('hashes') or '').strip() 78 | self.comment = (regular.group('comment') or '').rstrip() 79 | return 80 | vcs = self.RE_VCS_DEPENDENCY.match(line) 81 | if vcs: 82 | self.valid = True 83 | self.is_vcs = True 84 | self.package = vcs.group('package') 85 | self.version = '' 86 | self.hashes = '' # No way! 87 | self.comment = (vcs.group('comment') or '').rstrip() 88 | self.line = line 89 | return 90 | self.valid = False 91 | 92 | def serialize(self): 93 | """ 94 | Render dependency back in string using: 95 | ~= if package is internal 96 | == otherwise 97 | """ 98 | if self.is_vcs: 99 | return self.without_editable(self.line).strip() 100 | equal = FEATURES.constraint(self.package) 101 | package_version = '{package}{equal}{version} '.format( 102 | package=self.without_editable(self.package), 103 | version=self.version, 104 | equal=equal, 105 | ) 106 | if self.hashes: 107 | hashes = self.hashes.split() 108 | lines = [package_version.strip()] 109 | lines.extend(hashes) 110 | result = ' \\\n '.join(lines) 111 | if self.comment: 112 | result += FEATURES.process_dependency_comments(self.comment) 113 | return result 114 | else: 115 | if self.comment.startswith('\n'): 116 | return ( 117 | package_version.rstrip() + 118 | FEATURES.process_dependency_comments(self.comment).rstrip() 119 | ) 120 | return '{0}{1}'.format( 121 | package_version.ljust(self.COMMENT_JUSTIFICATION), 122 | self.comment.lstrip(), 123 | ).rstrip() # rstrip for empty comment 124 | 125 | @classmethod 126 | def without_editable(cls, line): 127 | """ 128 | Remove the editable flag. 129 | It's there because pip-compile can't yet do without it 130 | (see https://github.com/jazzband/pip-tools/issues/272 upstream), 131 | but in the output of pip-compile it's no longer needed. 132 | """ 133 | if 'git+git@' in line: 134 | # git+git can't be installed without -e: 135 | return line 136 | return cls.RE_EDITABLE_FLAG.sub('', line) 137 | 138 | def drop_post(self, in_path): 139 | """Remove .postXXXX postfix from version if needed.""" 140 | self.version = FEATURES.drop_post(in_path, self.package, self.version) 141 | -------------------------------------------------------------------------------- /HISTORY.rst: -------------------------------------------------------------------------------- 1 | History 2 | ======= 3 | 4 | 2.4.0 (2021-03-17) 5 | ------------------ 6 | 7 | * Update --index/--no-index to --emit-index-url/--no-emit-index-url 8 | for compatibility with pip-tools 6.0. 9 | (Issue `#243`_). 10 | 11 | .. _#243: https://github.com/peterdemin/pip-compile-multi/issues/243 12 | 13 | 2.3.2 (2021-02-18) 14 | ------------------ 15 | 16 | * Fix cross-feature logic for --autoresolve and --upgrade-package. 17 | (PR `#236`_). 18 | 19 | .. _#236: https://github.com/peterdemin/pip-compile-multi/pull/236 20 | 21 | 2.3.1 (2021-02-16) 22 | ------------------ 23 | 24 | * Fix for a bug introduced in 2.2.2 when running pip-compile-multi 25 | installed for Python 3, and having ``python`` symlinked to Python 2. 26 | (Issue `#233`_, PR `#234`_). 27 | 28 | .. _#233: https://github.com/peterdemin/pip-compile-multi/issues/233 29 | .. _#234: https://github.com/peterdemin/pip-compile-multi/pull/234 30 | 31 | 2.3.0 (2021-02-04) 32 | ------------------ 33 | 34 | * Make SHA1 hashes of input files in a more robust way (Issue `#215`_). 35 | Now it ignores changes to comments, whitespace and order of packages. 36 | 37 | .. _#215: https://github.com/peterdemin/pip-compile-multi/issues/215 38 | 39 | 2.2.2 (2021-01-29) 40 | ------------------ 41 | 42 | * Add support for calling using `python -m pipcompilemulti.cli_v1` notation. 43 | 44 | 45 | 2.2.1 (2021-01-29) 46 | ------------------ 47 | 48 | * Add ``--skip-constraints`` option. 49 | * Fix bootstrapping for autoresolve case with missing output files. 50 | 51 | 52 | 2.2.0 (2020-01-22) 53 | ------------------ 54 | 55 | * Add ``--autoresolve`` option for conflict-free compilations (PR #224). 56 | * Auto-discover requirements in other directories by following references (PR #221). 57 | * Add support for new-style multiline "via" comments from pip-tools (PR #222). 58 | 59 | 60 | 2.1.0 (2020-08-19) 61 | ------------------ 62 | 63 | * Update dependencies. 64 | * Revert relative path normalization, introduced in #167 (thanks to @john-bodley #200). 65 | 66 | 67 | 2.0.0 (2020-05-18) 68 | ------------------ 69 | 70 | * Drop Python 2.7 support. pip-tools 4 no longer works with the latest pip, 71 | there's no way to continue Python 2.7 support. 72 | 73 | 74 | 1.5.9 (2020-03-23) 75 | ------------------ 76 | 77 | * Remove directory path from "via" annotations (thanks to @HALtheWise #166 #167). 78 | 79 | 80 | 1.5.8 (2019-09-27) 81 | ------------------ 82 | 83 | * Add option ``--annotate-index`` (thanks to @john-bodley #160). 84 | 85 | 1.5.7 (2019-09-27) 86 | ------------------ 87 | 88 | * Enable accidentially disabled ``--upgrade`` option. 89 | 90 | .. _1.5.6: 91 | 92 | 1.5.6 (2019-09-18) 93 | ------------------ 94 | 95 | * Minor fixes to packaging and documentation. 96 | 97 | Warning: this version is broken and won't pass ``--upgrade`` option to ``pip-compile``. 98 | If you have this version installed, you need to manually upgrade it. 99 | For example, using command:: 100 | 101 | pip-compile-multi --upgrade-package pip-compile-multi 102 | 103 | Like in this `PR `_. 104 | 105 | 1.5.4 (2019-09-16) 106 | ------------------ 107 | 108 | * Fixed MANIFEST to include features directory 109 | 110 | Warning: this version is broken and won't pass ``--upgrade`` option to ``pip-compile``. 111 | See notes for 1.5.6_ for details. 112 | 113 | 1.5.3 (2019-09-14) 114 | ------------------ 115 | 116 | * Refactored features to separate modules. 117 | * Allow passing verify options after verify command. 118 | * Trim irrelevant entries from the traceback. 119 | 120 | Warning: this version is broken and won't install ``features`` directory. 121 | See notes for 1.5.6_ for details. 122 | 123 | 1.5.2 (2019-09-12) 124 | ------------------ 125 | 126 | * Added option ``--allow-unsafe``. (thanks to @mozbhearsum #157). 127 | 128 | 1.5.1 (2019-08-08) 129 | ------------------ 130 | 131 | * Added option ``--use-cache``. (thanks to @kolotev #149). 132 | 133 | 134 | 1.5.0 (2019-08-06) 135 | ------------------ 136 | 137 | * Changed short option for ``--forbid-post`` from ``-P`` to ``-p`` 138 | (as it conflicted with ``-P`` for ``--upgrade-package`` #147). 139 | 140 | 141 | 1.3.1 (2019-02-19) 142 | ------------------ 143 | 144 | * Re-removed workaround for future[s] packages in Python3 145 | 146 | 1.3.0 (2018-12-27) 147 | ------------------ 148 | 149 | * Introduced CLI v2 (disabled by default) 150 | 151 | 152 | 1.2.2 (2018-11-20) 153 | ------------------ 154 | 155 | * Removed workaround for future[s] packages in Python3 (no longer needed) 156 | 157 | 1.2.1 (2018-04-16) 158 | ------------------- 159 | 160 | * Fixed Restructured text formatting (thanks to @yigor) 161 | * Updated test dependencies (and hashes) 162 | 163 | 1.2.0 (2018-04-03) 164 | ------------------- 165 | 166 | * Added --forbid-post option 167 | 168 | 1.1.12 (2018-02-23) 169 | ------------------- 170 | 171 | * Added checks for conflicting package versions 172 | * Added support for VCS dependencies 173 | * Added --no-upgrade option 174 | 175 | 1.1.11 (2018-02-09) 176 | ------------------- 177 | 178 | * Propagate --only-name option to references 179 | * Fixed extension override options 180 | 181 | 1.1.10 (2018-02-09) 182 | ------------------- 183 | 184 | * Added ``--generate-hashes`` option 185 | 186 | 1.1.9 (2018-02-08) 187 | ------------------ 188 | 189 | * Fixed directory override option 190 | * Added --only-name option 191 | 192 | 1.1.8 (2018-01-25) 193 | ------------------ 194 | 195 | * Fixed comment justification 196 | 197 | 1.1.6 (2018-01-19) 198 | ------------------ 199 | 200 | * Added ``pip-compile-multi verify`` command 201 | 202 | 1.1.5 (2018-01-16) 203 | ------------------ 204 | 205 | * Omit future[s] packages for Python3 206 | 207 | 1.1.0 (2018-01-12) 208 | ------------------ 209 | 210 | * Added files discovery. 211 | 212 | 1.0.0 (2018-01-11) 213 | ------------------ 214 | 215 | * First release on PyPI. 216 | -------------------------------------------------------------------------------- /pipcompilemulti/features/controller.py: -------------------------------------------------------------------------------- 1 | """Aggregate all features in a single controller.""" 2 | 3 | import os 4 | from functools import wraps 5 | 6 | from .add_hashes import AddHashes 7 | from .annotate_index import AnnotateIndex 8 | from .base_dir import BaseDir 9 | from .compatible import Compatible 10 | from .file_extensions import InputExtension, OutputExtension 11 | from .forbid_post import ForbidPost 12 | from .header import CustomHeader 13 | from .limit_envs import LimitEnvs 14 | from .limit_in_paths import LimitInPaths 15 | from .unsafe import AllowUnsafe 16 | from .upgrade import UpgradeAll, UpgradeSelected 17 | from .use_cache import UseCache 18 | from .autoresolve import Autoresolve 19 | from .skip_constraint_comments import SkipConstraintComments 20 | 21 | 22 | class FeaturesController: 23 | """Gateway to a list of features.""" 24 | # pylint: disable=too-many-instance-attributes 25 | 26 | def __init__(self): 27 | self.annotate_index = AnnotateIndex() 28 | self.use_cache = UseCache() 29 | self.input_extension = InputExtension() 30 | self.output_extension = OutputExtension() 31 | self.base_dir = BaseDir() 32 | self.compatible = Compatible() 33 | self.forbid_post = ForbidPost() 34 | self.add_hashes = AddHashes(self) 35 | self.upgrade_all = UpgradeAll(self) 36 | self.upgrade_selected = UpgradeSelected(self) 37 | self.limit_envs = LimitEnvs(self) 38 | self.limit_in_paths = LimitInPaths() 39 | self.header = CustomHeader() 40 | self.allow_unsafe = AllowUnsafe() 41 | self.autoresolve = Autoresolve() 42 | self.skip_constraint_comments = SkipConstraintComments() 43 | self._features = [ 44 | self.annotate_index, 45 | self.use_cache, 46 | self.input_extension, 47 | self.output_extension, 48 | self.base_dir, 49 | self.compatible, 50 | self.forbid_post, 51 | self.add_hashes, 52 | self.upgrade_all, 53 | self.upgrade_selected, 54 | self.limit_in_paths, 55 | self.limit_envs, 56 | self.header, 57 | self.allow_unsafe, 58 | self.autoresolve, 59 | self.skip_constraint_comments, 60 | ] 61 | 62 | def bind(self, command): 63 | """Bind all features to click command.""" 64 | @wraps(command) 65 | def save_command_options(*args, **kwargs): 66 | """Save option values and call original command without it.""" 67 | for feature in self._features: 68 | feature.extract_option(kwargs) 69 | return command(*args, **kwargs) 70 | 71 | for feature in self._features: 72 | save_command_options = feature.bind(save_command_options) 73 | return save_command_options 74 | 75 | def pin_options(self, in_path): 76 | """Return list of options to pin command.""" 77 | options = [] 78 | options.extend(self.use_cache.pin_options()) 79 | options.extend(self.add_hashes.pin_options(in_path)) 80 | options.extend(self.allow_unsafe.pin_options()) 81 | options.extend(self.upgrade_all.pin_options()) 82 | options.extend(self.upgrade_selected.pin_options()) 83 | options.extend(self.annotate_index.pin_options()) 84 | return options 85 | 86 | def compose_input_file_path(self, basename): 87 | """Return input file path by environment name.""" 88 | return self.base_dir.file_path( 89 | self.input_extension.compose_input_file_name(basename) 90 | ) 91 | 92 | def compose_output_file_path(self, in_path): 93 | """Return output file path by environment name.""" 94 | return self.output_extension.compose_output_file_path(in_path) 95 | 96 | def drop_post(self, in_path, package_name, version): 97 | """Whether post versions are forbidden for passed environment name.""" 98 | if self.forbid_post.post_forbidden(in_path): 99 | return self.forbid_post.drop_post(version) 100 | if self.compatible.is_matched(package_name): 101 | return self.forbid_post.drop_post(version) 102 | return version 103 | 104 | def constraint(self, package_name): 105 | """Return ``~=`` if package_name matches patterns, ``==`` otherwise.""" 106 | return self.compatible.constraint(package_name) 107 | 108 | def on_discover(self, env_confs): 109 | """Configure features with a list of discovered environments.""" 110 | self.add_hashes.on_discover(env_confs) 111 | self.limit_envs.on_discover(env_confs) 112 | self.limit_in_paths.on_discover(env_confs) 113 | self.upgrade_selected.reset() 114 | self.autoresolve.on_discover(env_confs) 115 | 116 | def affected(self, in_path): 117 | """Whether environment is affected by upgrade command.""" 118 | if self.upgrade_all.enabled: 119 | return True 120 | if self.upgrade_selected.affected(in_path): 121 | return True 122 | return in_path == self.autoresolve.sink_path() 123 | 124 | def included(self, in_path): 125 | """Whether in_path is included directly or by reference.""" 126 | return ( 127 | self.limit_envs.included(in_path) and self.limit_in_paths.included(in_path) 128 | ) 129 | 130 | def get_header_text(self): 131 | """Text to put in the beginning of each generated file.""" 132 | return self.header.text 133 | 134 | def sink_in_path(self): 135 | """Return input sink path if it's enabled. Otherwise None""" 136 | return self.autoresolve.sink_path() 137 | 138 | def sink_out_path(self): 139 | """Return sink output path if it's enabled and exists. Otherwise None""" 140 | infile = self.autoresolve.sink_path() 141 | if not infile: 142 | return None 143 | outfile = self.compose_output_file_path(infile) 144 | if not os.path.exists(outfile): 145 | return None 146 | return outfile 147 | 148 | def process_dependency_comments(self, comment): 149 | """Process comments of locked dependency (e.g. # via xxx).""" 150 | return self.skip_constraint_comments.process_dependency_comments(comment) 151 | -------------------------------------------------------------------------------- /docs/skydio-logo-black.svg: -------------------------------------------------------------------------------- 1 | 2 | image/svg+xml -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | """ 2 | Configuration file for the Sphinx documentation builder. 3 | 4 | This file does only contain a selection of the most common options. For a 5 | full list see the documentation: 6 | http://www.sphinx-doc.org/en/master/config 7 | """ 8 | # pylint: disable=invalid-name,redefined-builtin 9 | 10 | # -- Path setup -------------------------------------------------------------- 11 | import os 12 | import sys 13 | sys.path.insert(0, os.path.abspath('..')) 14 | 15 | 16 | # -- Project information ----------------------------------------------------- 17 | 18 | project = u'pip-compile-multi' 19 | copyright = u'2019, Peter Demin' 20 | author = u'Peter Demin' 21 | 22 | # The short X.Y version 23 | version = u'' 24 | # The full version, including alpha/beta/rc tags 25 | release = u'1.5.1' 26 | 27 | 28 | # -- General configuration --------------------------------------------------- 29 | 30 | # If your documentation needs a minimal Sphinx version, state it here. 31 | # 32 | # needs_sphinx = '1.0' 33 | 34 | # Add any Sphinx extension module names here, as strings. They can be 35 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 36 | # ones. 37 | extensions = [ 38 | 'sphinx.ext.autodoc', 39 | 'sphinx.ext.doctest', 40 | 'sphinx.ext.intersphinx', 41 | 'sphinx.ext.viewcode', 42 | ] 43 | 44 | # Add any paths that contain templates here, relative to this directory. 45 | templates_path = ['_templates'] 46 | 47 | # The suffix(es) of source filenames. 48 | # You can specify multiple suffix as a list of string: 49 | # 50 | # source_suffix = ['.rst', '.md'] 51 | source_suffix = '.rst' 52 | 53 | # The master toctree document. 54 | master_doc = 'index' 55 | 56 | # The language for content autogenerated by Sphinx. Refer to documentation 57 | # for a list of supported languages. 58 | # 59 | # This is also used if you do content translation via gettext catalogs. 60 | # Usually you set "language" from the command line for these cases. 61 | language = None 62 | 63 | # List of patterns, relative to source directory, that match files and 64 | # directories to ignore when looking for source files. 65 | # This pattern also affects html_static_path and html_extra_path. 66 | exclude_patterns = [u'_build', 'Thumbs.db', '.DS_Store'] 67 | 68 | # The name of the Pygments (syntax highlighting) style to use. 69 | pygments_style = None 70 | 71 | 72 | # -- Options for HTML output ------------------------------------------------- 73 | 74 | # The theme to use for HTML and HTML Help pages. See the documentation for 75 | # a list of builtin themes. 76 | # 77 | html_theme = 'alabaster' 78 | 79 | # Theme options are theme-specific and customize the look and feel of a theme 80 | # further. For a list of options available for each theme, see the 81 | # documentation. 82 | 83 | html_theme_options = { 84 | 'github_user': 'peterdemin', 85 | 'github_repo': 'pip-compile-multi', 86 | 'github_type': 'star', 87 | 'github_banner': True, 88 | 'github_button': True, 89 | 'extra_nav_links': { 90 | 'Code': 'https://github.com/peterdemin/pip-compile-multi', 91 | 'Releases': 'https://pypi.org/project/pip-compile-multi/', 92 | 'Issue tracker': 'https://github.com/peterdemin/pip-compile-multi/issues', 93 | }, 94 | } 95 | 96 | # Add any paths that contain custom static files (such as style sheets) here, 97 | # relative to this directory. They are copied after the builtin static files, 98 | # so a file named "default.css" will overwrite the builtin "default.css". 99 | html_static_path = ['_static'] 100 | 101 | # Custom sidebar templates, must be a dictionary that maps document names 102 | # to template names. 103 | # 104 | # The default sidebars (for documents that don't match any pattern) are 105 | # defined by theme itself. Builtin themes are using these templates by 106 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', 107 | # 'searchbox.html']``. 108 | 109 | html_sidebars = { 110 | '**': [ 111 | 'about.html', 112 | 'navigation.html', 113 | 'searchbox.html', 114 | 'sourcelink.html', 115 | ] 116 | } 117 | 118 | 119 | # -- Options for HTMLHelp output --------------------------------------------- 120 | 121 | # Output file base name for HTML help builder. 122 | htmlhelp_basename = 'pip-compile-multidoc' 123 | 124 | 125 | # -- Options for LaTeX output ------------------------------------------------ 126 | 127 | latex_elements = { 128 | # The paper size ('letterpaper' or 'a4paper'). 129 | # 130 | # 'papersize': 'letterpaper', 131 | 132 | # The font size ('10pt', '11pt' or '12pt'). 133 | # 134 | # 'pointsize': '10pt', 135 | 136 | # Additional stuff for the LaTeX preamble. 137 | # 138 | # 'preamble': '', 139 | 140 | # Latex figure (float) alignment 141 | # 142 | # 'figure_align': 'htbp', 143 | } 144 | 145 | # Grouping the document tree into LaTeX files. List of tuples 146 | # (source start file, target name, title, 147 | # author, documentclass [howto, manual, or own class]). 148 | latex_documents = [ 149 | (master_doc, 'pip-compile-multi.tex', u'pip-compile-multi Documentation', 150 | u'Peter Demin', 'manual'), 151 | ] 152 | 153 | 154 | # -- Options for manual page output ------------------------------------------ 155 | 156 | # One entry per manual page. List of tuples 157 | # (source start file, name, description, authors, manual section). 158 | man_pages = [ 159 | (master_doc, 'pip-compile-multi', u'pip-compile-multi Documentation', 160 | [author], 1) 161 | ] 162 | 163 | 164 | # -- Options for Texinfo output ---------------------------------------------- 165 | 166 | # Grouping the document tree into Texinfo files. List of tuples 167 | # (source start file, target name, title, author, 168 | # dir menu entry, description, category) 169 | texinfo_documents = [ 170 | (master_doc, 'pip-compile-multi', u'pip-compile-multi Documentation', 171 | author, 'pip-compile-multi', 'One line description of project.', 172 | 'Miscellaneous'), 173 | ] 174 | 175 | 176 | # -- Options for Epub output ------------------------------------------------- 177 | 178 | # Bibliographic Dublin Core info. 179 | epub_title = project 180 | 181 | # The unique identifier of the text. This can be a ISBN number 182 | # or the project homepage. 183 | # 184 | # epub_identifier = '' 185 | 186 | # A unique identification for the text. 187 | # 188 | # epub_uid = '' 189 | 190 | # A list of files that should not be packed into the epub file. 191 | epub_exclude_files = ['search.html'] 192 | 193 | 194 | # -- Extension configuration ------------------------------------------------- 195 | 196 | # -- Options for intersphinx extension --------------------------------------- 197 | 198 | # Example configuration for intersphinx: refer to the Python standard library. 199 | intersphinx_mapping = {'https://docs.python.org/': None} 200 | -------------------------------------------------------------------------------- /how-to.md: -------------------------------------------------------------------------------- 1 | # Managing dependencies in multi-platform Python project 2 | 3 | I'm going to describe the setup I made for my Python 4 | [project](https://github.com/peterdemin/pip-compile-multi). 5 | 6 | The project supports Python version 2.7 and 3.4+, PyPy, Linux, and Windows. 7 | It runs tests on every commit in Travis CI and AppVeyor. 8 | The project relies on a few runtime packages (5), 9 | a bunch for testing (12) and a lot for development (36). 10 | Some dependencies are shared, some specific to Python 2.7 and some to Windows. 11 | 12 | The project uses hard-pinned packages with hashes to verify the integrity of installed versions. 13 | 14 | ## Problems 15 | 16 | Let's summarize what we are solving: 17 | 18 | 1. The project has 3 set of dependencies: base, test and local which are needed respectfully during running, testing and developing. 19 | 2. Some packages are needed only under Python 2.7. 20 | 3. Other packages are needed only under Windows. 21 | 4. All packages must be hard-pinned and hashed. 22 | 23 | Alternatively, if turned into tasks: 24 | 25 | 1. Organize dependencies for each environment. 26 | 2. Orchestrate installation in different environments. 27 | 28 | ## Solutions 29 | 30 | The first tool we are using is [pip-compile-multi](https://github.com/peterdemin/pip-compile-multi), 31 | which is ~~ironically~~ the project we are using as an example. 32 | 33 | It has verbose documentation, so I'll briefly outline how it is applied here. 34 | There are 6 `.in` files in the `requirements` directory: base.in, test.in, local.in, py27.in, local27.in, testwin.in. 35 | `base`, `test` and `local` are meant to be used under Python 3. 36 | `py27` and `local27` are holding Python 2.7 backports of Python 3 packages and version constraints for projects, which dropped Python 2 support in newer versions. 37 | `testwin` has a single entry: `colorama`, which is `pytest` dependency that is installed only under windows. 38 | 39 | First, we are pinning packages to the current versions with the following command: 40 | 41 | ``` 42 | $ pip-compile-multi -n local -n testwin 43 | ``` 44 | 45 | It produces files `base.txt`, `test.txt`, `local.txt` and `testwin.txt` with recursively retrieved hard-pinned package versions. 46 | 47 | The second command takes these `.txt` files and produce `.hash` files with attached package hashes: 48 | 49 | ``` 50 | $ pip-compile-multi -n local -n testwin -g local -g testwin -i txt -o hash 51 | ``` 52 | 53 | The same operation must be repeated separately for Python 2.7 packages: 54 | 55 | ``` 56 | $ pip-compile-multi -n py27 -n local27 57 | $ pip-compile-multi -n py27 -n local27 -g py27 -g local27 -i txt -o hash 58 | ``` 59 | 60 | Separation is required because [pip-tools](https://github.com/jazzband/pip-tools) 61 | can't traverse packages that are not required in the current runtime. 62 | 63 | To automate this tasks, I'm using the second tool - [tox](https://tox.readthedocs.io/en/latest/). 64 | Here is my configuration: 65 | 66 | ``` 67 | [testenv:upgrade2] 68 | basepython = python2.7 69 | deps = pip-compile-multi 70 | commands = 71 | pip-compile-multi -n py27 -n local27 72 | pip-compile-multi -n py27 -n local27 -g py27 -g local27 -i txt -o hash 73 | 74 | [testenv:upgrade3] 75 | basepython = python3.6 76 | deps = pip-compile-multi 77 | commands = 78 | pip-compile-multi -n local -n testwin 79 | pip-compile-multi -n local -n testwin -g local -g testwin -i txt -o hash 80 | ``` 81 | 82 | To run it, I execute: 83 | 84 | ``` 85 | $ tox -e upgrade3 -e upgrade2 86 | ``` 87 | 88 | To run unit and doc tests locally I have somewhat complex testenv setup: 89 | 90 | ``` 91 | [tox] 92 | envlist = py{27,34,35,36,37}-{linux,windows} 93 | 94 | [testenv] 95 | platform = linux: linux 96 | windows: win32 97 | deps = 98 | linux: -r{toxinidir}/requirements/test.hash 99 | windows: -r{toxinidir}/requirements/testwin.hash 100 | py27: -r{toxinidir}/requirements/py27.hash 101 | commands = python -m pytest --doctest-modules pipcompilemulti.py test_pipcompilemulti.py 102 | ``` 103 | 104 | The setup says to use `test.hash` file under Linux, 105 | `testwin.hash` under Windows and 106 | add `py27.hash` if it is also running under Python 2.7. 107 | 108 | AppVeyor runs these tests under Windows; its configuration file defines which Python version to use and what parameters to pass to `tox.ini`: 109 | 110 | ``` 111 | environment: 112 | matrix: 113 | - PYTHON: "C:\\Python27" 114 | PYTHON_VERSION: "2.7.8" 115 | PYTHON_ARCH: "32" 116 | TOX_ENV: "py27" 117 | 118 | - PYTHON: "C:\\Python34" 119 | PYTHON_VERSION: "3.4.1" 120 | PYTHON_ARCH: "32" 121 | TOX_ENV: "py34" 122 | 123 | - PYTHON: "C:\\Python35" 124 | PYTHON_VERSION: "3.5.4" 125 | PYTHON_ARCH: "32" 126 | TOX_ENV: "py35" 127 | 128 | - PYTHON: "C:\\Python36" 129 | PYTHON_VERSION: "3.6.4" 130 | PYTHON_ARCH: "32" 131 | TOX_ENV: "py36" 132 | 133 | init: 134 | - "ECHO %PYTHON% %PYTHON_VERSION% %PYTHON_ARCH%" 135 | 136 | install: 137 | - "appveyor/setup_build_env.cmd" 138 | - "powershell appveyor/install.ps1" 139 | 140 | build: false 141 | 142 | test_script: 143 | - "%PYTHON%/Scripts/tox -e %TOX_ENV%-windows" 144 | ``` 145 | 146 | `environment.matrix` defines `TOX_ENV` variable, which is passed to `tox` in `test_script` step. 147 | 148 | Travis CI has the following configuration to run tests under Linux: 149 | 150 | ``` 151 | # Config file for automatic testing at travis-ci.org 152 | 153 | language: python 154 | 155 | python: 156 | - "3.7-dev" 157 | - "3.6" 158 | - "3.5" 159 | - "3.4" 160 | - "2.7" 161 | - "pypy" 162 | 163 | # command to install dependencies 164 | install: 165 | - ./install_test_deps.sh 166 | - pip install -e . 167 | 168 | # command to run tests using coverage, e.g., python setup.py test 169 | script: python -m pytest --doctest-modules pipcompilemulti.py test_pipcompilemulti.py 170 | ``` 171 | 172 | I stashed dependency installation step into bash script `install_test_deps.sh`: 173 | 174 | ``` 175 | #!/bin/sh 176 | 177 | python --version 2>&1 | grep -q 'Python 3' 178 | 179 | if [ $? -eq 0 ] 180 | then 181 | # Python 3 182 | exec pip install -r requirements/test.hash 183 | else 184 | # Python 2 or PyPy 185 | exec pip install -r requirements/test.hash -r requirements/py27.hash 186 | fi 187 | ``` 188 | 189 | I could have reused tox here too with [tox-travis](https://github.com/tox-dev/tox-travis). 190 | Someday, maybe, I will. 191 | 192 | ## Conclusion 193 | 194 | Python is multi-platform language, but it lacks built-in tools for secure management of dependencies versions. 195 | Tools like `pip-compile-multi` and `tox` accompanied by CI services like `Travis-ci` and `AppVeyor` significantly reduce the effort. However, correct configuration takes time and requires skills and persistence. 196 | 197 | Described solution can be used as a boilerplate for project setup, or as guidance for building another tool that puts a framework for complex dependency management. 198 | -------------------------------------------------------------------------------- /requirements/test.hash: -------------------------------------------------------------------------------- 1 | # SHA1:8dc6aafa9751146dc66c292bac6b8004f4ce985f 2 | # 3 | # This file is autogenerated by pip-compile-multi 4 | # To update, run: 5 | # 6 | # pip-compile-multi 7 | # 8 | -r base.hash 9 | attrs==20.3.0 \ 10 | --hash=sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6 \ 11 | --hash=sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700 12 | # via 13 | # -r requirements/test.txt 14 | # pytest 15 | coverage==5.5 \ 16 | --hash=sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c \ 17 | --hash=sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6 \ 18 | --hash=sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45 \ 19 | --hash=sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a \ 20 | --hash=sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03 \ 21 | --hash=sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529 \ 22 | --hash=sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a \ 23 | --hash=sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a \ 24 | --hash=sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2 \ 25 | --hash=sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6 \ 26 | --hash=sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759 \ 27 | --hash=sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53 \ 28 | --hash=sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a \ 29 | --hash=sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4 \ 30 | --hash=sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff \ 31 | --hash=sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502 \ 32 | --hash=sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793 \ 33 | --hash=sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb \ 34 | --hash=sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905 \ 35 | --hash=sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821 \ 36 | --hash=sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b \ 37 | --hash=sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81 \ 38 | --hash=sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0 \ 39 | --hash=sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b \ 40 | --hash=sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3 \ 41 | --hash=sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184 \ 42 | --hash=sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701 \ 43 | --hash=sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a \ 44 | --hash=sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82 \ 45 | --hash=sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638 \ 46 | --hash=sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5 \ 47 | --hash=sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083 \ 48 | --hash=sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6 \ 49 | --hash=sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90 \ 50 | --hash=sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465 \ 51 | --hash=sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a \ 52 | --hash=sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3 \ 53 | --hash=sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e \ 54 | --hash=sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066 \ 55 | --hash=sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf \ 56 | --hash=sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b \ 57 | --hash=sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae \ 58 | --hash=sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669 \ 59 | --hash=sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873 \ 60 | --hash=sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b \ 61 | --hash=sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6 \ 62 | --hash=sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb \ 63 | --hash=sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160 \ 64 | --hash=sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c \ 65 | --hash=sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079 \ 66 | --hash=sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d \ 67 | --hash=sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6 68 | # via 69 | # -r requirements/test.txt 70 | # pytest-cov 71 | iniconfig==1.1.1 \ 72 | --hash=sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3 \ 73 | --hash=sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32 74 | # via 75 | # -r requirements/test.txt 76 | # pytest 77 | mock==4.0.3 \ 78 | --hash=sha256:122fcb64ee37cfad5b3f48d7a7d51875d7031aaf3d8be7c42e2bee25044eee62 \ 79 | --hash=sha256:7d3fbbde18228f4ff2f1f119a45cdffa458b4c0dee32eb4d2bb2f82554bac7bc 80 | # via -r requirements/test.txt 81 | more-itertools==8.7.0 \ 82 | --hash=sha256:5652a9ac72209ed7df8d9c15daf4e1aa0e3d2ccd3c87f8265a0673cd9cbc9ced \ 83 | --hash=sha256:c5d6da9ca3ff65220c3bfd2a8db06d698f05d4d2b9be57e1deb2be5a45019713 84 | # via -r requirements/test.txt 85 | packaging==20.9 \ 86 | --hash=sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5 \ 87 | --hash=sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a 88 | # via 89 | # -r requirements/test.txt 90 | # pytest 91 | pluggy==0.13.1 \ 92 | --hash=sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0 \ 93 | --hash=sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d 94 | # via 95 | # -r requirements/test.txt 96 | # pytest 97 | py==1.10.0 \ 98 | --hash=sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3 \ 99 | --hash=sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a 100 | # via 101 | # -r requirements/test.txt 102 | # pytest 103 | pyparsing==2.4.7 \ 104 | --hash=sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1 \ 105 | --hash=sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b 106 | # via 107 | # -r requirements/test.txt 108 | # packaging 109 | pytest-cov==2.11.1 \ 110 | --hash=sha256:359952d9d39b9f822d9d29324483e7ba04a3a17dd7d05aa6beb7ea01e359e5f7 \ 111 | --hash=sha256:bdb9fdb0b85a7cc825269a4c56b48ccaa5c7e365054b6038772c32ddcdc969da 112 | # via -r requirements/test.txt 113 | pytest==6.2.2 \ 114 | --hash=sha256:9d1edf9e7d0b84d72ea3dbcdfd22b35fb543a5e8f2a60092dd578936bf63d7f9 \ 115 | --hash=sha256:b574b57423e818210672e07ca1fa90aaf194a4f63f3ab909a2c67ebb22913839 116 | # via 117 | # -r requirements/test.txt 118 | # pytest-cov 119 | 120 | # The following packages are considered to be unsafe in a requirements file: 121 | -------------------------------------------------------------------------------- /docs/why.rst: -------------------------------------------------------------------------------- 1 | Motivation 2 | ---------- 3 | 4 | I will start from the very basics of dependency management and will go very slow, 5 | so if you feel bored, just scroll to the next section. 6 | 7 | Suppose you have a python project with following direct dependencies: 8 | 9 | .. code-block:: text 10 | 11 | click 12 | pip-tools 13 | 14 | (Yes I took pip-compile-multi as an example). 15 | Let's save them as-is in ``requirements/base.in``. 16 | Those are unpinned libraries. It means that whenever developer runs 17 | 18 | .. code-block:: shell 19 | 20 | pip install -r requirements/base.in 21 | 22 | they will get *some* version of these libraries. 23 | And the chances are that if several developers do the same over some period, 24 | some will have different dependency versions than others. 25 | Also, if the project is online service, one day it may stop working after 26 | redeployment because some of the dependencies had backward incompatible release. 27 | These backward incompatible changes are relatively common. 28 | 29 | To avoid this problem, Python developers are hard-pinning (aka locking) their dependencies. 30 | So instead of a list of libraries, they have something like: 31 | 32 | .. code-block:: text 33 | 34 | click==6.7 35 | pip-tools==1.11.0 36 | 37 | (To keep things neat let's put this into ``requirements/base.txt``) 38 | That's good for a starter. But there are two significant drawbacks: 39 | 40 | 1. Developers have to do non-trivial operations if they want to keep up with 41 | newer versions (that have bug fixes and performance improvements). 42 | 2. Indirect dependencies (that is dependencies of dependencies) may still have 43 | backward-incompatible releases, that break everything. 44 | 45 | Let's put aside point 1 and fight point 2. Let's do 46 | 47 | .. code-block:: shell 48 | 49 | pip freeze > requirements/base.txt 50 | 51 | Now we have full hierarchy of dependencies hard-pinned: 52 | 53 | .. code-block:: text 54 | 55 | click==6.7 56 | first==2.0.1 57 | pip-tools==1.11.0 58 | six==1.11.0 59 | 60 | That's great, and solves the main problem - service will be deployed exactly [1]_ 61 | the same every single time and all developers will have same environments. 62 | 63 | .. [1] That's not true. 64 | Someone could re-upload broken package under existing version on PyPI. 65 | For 100% reproducible builds use hashes. 66 | 67 | This case is so common that there already are some tools to solve it. 68 | Two worth mentioning are: 69 | 70 | 1. `Pip Tools`_ - a mature package that is enhanced by ``pip-compile-multi``. 71 | 2. `PipEnv`_ - a fresh approach that is going to become the "official" Python way of locking dependencies some day. 72 | 73 | .. _Pip Tools: https://github.com/jazzband/pip-tools 74 | .. _PipEnv: https://github.com/pypa/pipenv 75 | 76 | 77 | But what if the project uses some packages that are not required by the service itself? 78 | For example ``pytest``, that is needed to run unit tests, but should never 79 | be deployed to a production site. Or ``flake8`` - syntax checking tool. 80 | If they are installed in the current virtual environment, they will get into 81 | ``pip freeze`` output. 82 | That's no good. 83 | And removing them manually from ``requirements/base.txt`` is not an option. 84 | But still, these packages must be pinned to ensure, that tests are running 85 | the same way on all development machines (and build server). 86 | 87 | So let's get hands dirty and put all the testing stuff into ``requirements/test.in``: 88 | 89 | .. code-block:: text 90 | 91 | -r base.in 92 | 93 | prospector 94 | pylint 95 | flake8 96 | mock 97 | six 98 | 99 | Note, how I put ``-r base.in`` in the beginning, so that *test* dependencies are installed 100 | along with the *base*. 101 | 102 | Now installation command is 103 | 104 | .. code-block:: shell 105 | 106 | pip install -e requirements/test.in 107 | 108 | For one single time (exceptionally to show how unacceptable is this task) 109 | let's manually compose ``requirements/test.txt``. 110 | After installation, run freeze to bring the whole list of all locked packages: 111 | 112 | .. code-block:: shell 113 | 114 | $ pip freeze 115 | astroid==1.6.0 116 | click==6.7 117 | dodgy==0.1.9 118 | first==2.0.1 119 | flake8==3.5.0 120 | flake8-polyfill==1.0.2 121 | isort==4.2.15 122 | lazy-object-proxy==1.3.1 123 | mccabe==0.6.1 124 | mock==2.0.0 125 | pbr==3.1.1 126 | pep8-naming==0.5.0 127 | pip-tools==1.11.0 128 | prospector==0.12.7 129 | pycodestyle==2.0.0 130 | pydocstyle==2.1.1 131 | pyflakes==1.6.0 132 | pylint==1.8.1 133 | pylint-celery==0.3 134 | pylint-common==0.2.5 135 | pylint-django==0.7.2 136 | pylint-flask==0.5 137 | pylint-plugin-utils==0.2.6 138 | PyYAML==3.12 139 | requirements-detector==0.5.2 140 | setoptconf==0.2.0 141 | six==1.11.0 142 | snowballstemmer==1.2.1 143 | wrapt==1.10.11 144 | 145 | Wow! That's quite a list! But we remember what goes into base.txt: 146 | 147 | 1. click 148 | 2. first 149 | 3. pip-tools 150 | 4. six 151 | 152 | Good, everything else can be put into ``requirements/test.txt``. 153 | But wait, ``six`` is included in ``test.in`` and is missing in ``test.txt``. 154 | That feels wrong. Ah, it's because we've moved ``six`` to the ``base.txt``. 155 | It's good that we didn't forget, that it should be in *base*. 156 | We might forget next time though. 157 | 158 | Why don't we automate it? That's what ``pip-compile-multi`` is for. 159 | 160 | Managing dependency versions in multiple environments 161 | ----------------------------------------------------- 162 | 163 | Let's rehearse. Example service has two groups of dependencies 164 | (or, as I call them, environments): 165 | 166 | .. code-block:: shell 167 | 168 | $ cat requirements/base.in 169 | click 170 | pip-tools 171 | 172 | $ cat requirements/test.in 173 | -r base.in 174 | prospector 175 | pylint 176 | flake8 177 | mock 178 | six 179 | 180 | To make automation even more appealing, let's add one more environment. 181 | I'll call it *local* - things that are needed during development, but are not 182 | required by tests, or service itself. 183 | 184 | .. code-block:: shell 185 | 186 | $ cat requirements/local.in 187 | -r test.in 188 | tox 189 | 190 | Now we want to put all *base* dependencies along with all their recursive dependencies 191 | in ``base.txt``, 192 | all recursive *test* dependencies except for *base* into ``test.txt``, 193 | and all recursive *local* dependencies except for *base* and *test* into ``local.txt``. 194 | 195 | .. code-block:: shell 196 | 197 | $ pip-compile-multi 198 | Locking requirements/base.in to requirements/base.txt. References: [] 199 | Locking requirements/test.in to requirements/test.txt. References: ['base'] 200 | Locking requirements/local.in to requirements/local.txt. References: ['base', 'test'] 201 | 202 | Yes, that's right. All the tedious dependency versions management job done with 203 | a single command that doesn't even have options. 204 | 205 | Now you can run ``git diff`` to review the changes and ``git commit`` to save them. 206 | To install the new set of versions run: 207 | 208 | .. code-block:: shell 209 | 210 | pip install -Ur requirements/local.txt 211 | 212 | It's a perfect time to run all the tests and make sure, that updates were 213 | backward compatible enough for your needs. 214 | More often than I'd like in big projects, it's not so. 215 | Let's say the new version of ``pylint`` dropped support of old Python version, 216 | that you still need to support. 217 | Than you open ``test.in`` and soft-pin it with descriptive comment: 218 | 219 | .. code-block:: shell 220 | 221 | $ cat requirements/test.in 222 | -r base.in 223 | prospector 224 | pylint<1.8 # Newer versions dropped support for Python 2.4 225 | flake8 226 | mock 227 | six 228 | 229 | I know, this example is made up. But you get the idea. 230 | That re-run ``pip-compile-multi`` to compile new ``test.txt`` and check new set. 231 | 232 | 233 | Benefits 234 | -------- 235 | 236 | I want to summarise, why ``pip-compile-multi`` might be a good addition to your project. 237 | Some of the benefits are achievable with other methods, but I want to be general: 238 | 239 | 1. Production will not suddenly break after redeployment because of 240 | backward incompatible dependency release. 241 | 2. Every development machine will have the same package versions. 242 | 3. Service still uses most recent versions of packages. 243 | And fresh means best here. 244 | 4. Dependencies are upgraded when the time is suitable for the service, 245 | not whenever they are released. 246 | 5. Different environments are separated into different files. 247 | 6. ``*.in`` files are small and manageable because they store only direct dependencies. 248 | 7. ``*.txt`` files are exhaustive and precise (but you don't need to edit them). -------------------------------------------------------------------------------- /pipcompilemulti/environment.py: -------------------------------------------------------------------------------- 1 | """Environment class""" 2 | 3 | import os 4 | import re 5 | import sys 6 | import logging 7 | import subprocess 8 | 9 | from .dependency import Dependency 10 | from .features import FEATURES 11 | from .deduplicate import PackageDeduplicator 12 | from .utils import extract_env_name 13 | 14 | 15 | logger = logging.getLogger("pip-compile-multi") 16 | 17 | 18 | class Environment(object): 19 | """requirements file""" 20 | 21 | RE_REF = re.compile(r'^(?:-r|--requirement)\s*(?P\S+).*$') 22 | RE_COMMENT = re.compile(r'^\s*#.*$') 23 | 24 | def __init__(self, in_path, deduplicator=None): 25 | """ 26 | Args: 27 | in_path: relative path to input file, e.g. requirements/base.in 28 | """ 29 | self.in_path = in_path 30 | self._dedup = deduplicator or PackageDeduplicator() 31 | self.ignore = self._dedup.ignored_packages(in_path) 32 | self.packages = {} 33 | self._outfile_pkg_names = None 34 | 35 | def maybe_create_lockfile(self): 36 | """ 37 | Write recursive dependencies list to outfile unless the goal is 38 | to upgrade specific package(s) which don't already appear. 39 | Populate package ignore set in either case and return 40 | boolean indicating whether outfile was written. 41 | """ 42 | logger.info( 43 | "Locking %s to %s. References: %r", 44 | self.infile, 45 | self.outfile, 46 | sorted(self._dedup.recursive_refs(self.in_path)), 47 | ) 48 | if not FEATURES.affected(self.in_path): 49 | self.fix_lockfile() # populate ignore set 50 | return False 51 | self.create_lockfile() 52 | return True 53 | 54 | def create_lockfile(self): 55 | """ 56 | Write recursive dependencies list to outfile 57 | with hard-pinned versions. 58 | Then fix it. 59 | """ 60 | original_in_file = "" 61 | sink_out_path = FEATURES.sink_out_path() 62 | try: 63 | if sink_out_path and sink_out_path != self.outfile: 64 | original_in_file = self._read_infile() 65 | self._inject_sink() 66 | process = subprocess.Popen( 67 | self.pin_command, 68 | stdout=subprocess.PIPE, 69 | stderr=subprocess.PIPE, 70 | ) 71 | stdout, stderr = process.communicate() 72 | finally: 73 | if original_in_file: 74 | self._restore_in_file(original_in_file) 75 | if process.returncode == 0: 76 | self.fix_lockfile() 77 | else: 78 | logger.critical("ERROR executing %s", ' '.join(self.pin_command)) 79 | logger.critical("Exit code: %s", process.returncode) 80 | logger.critical(stdout.decode('utf-8')) 81 | logger.critical(stderr.decode('utf-8')) 82 | raise RuntimeError("Failed to pip-compile {0}".format(self.infile)) 83 | 84 | @classmethod 85 | def parse_references(cls, filename): 86 | """ 87 | Read filename line by line searching for pattern: 88 | 89 | -r file.in 90 | or 91 | --requirement file.in 92 | 93 | return set of matched file names. 94 | E.g. {'file1.in', 'file2.in'} 95 | """ 96 | references = set() 97 | for line in open(filename): 98 | matched = cls.RE_REF.match(line) 99 | if matched: 100 | references.add(matched.group('path')) 101 | return references 102 | 103 | @property 104 | def name(self): 105 | """Generate environment name from in_path.""" 106 | return extract_env_name(self.in_path) 107 | 108 | @property 109 | def infile(self): 110 | """Path of the input file""" 111 | return self.in_path 112 | 113 | @property 114 | def outfile(self): 115 | """Path of the output file""" 116 | return FEATURES.compose_output_file_path(self.in_path) 117 | 118 | @property 119 | def pin_command(self): 120 | """Compose pip-compile shell command""" 121 | # Use the same interpreter binary 122 | python = sys.executable or 'python' 123 | parts = [ 124 | python, '-m', 'piptools', 'compile', 125 | '--no-header', 126 | '--verbose', 127 | ] 128 | parts.extend(FEATURES.pin_options(self.in_path)) 129 | parts.extend(['--output-file', self.outfile, self.infile]) 130 | return parts 131 | 132 | def fix_lockfile(self): 133 | """Run each section of outfile through fix_pin""" 134 | with open(self.outfile, 'rt') as fp: 135 | sections = [ 136 | self.fix_pin(section) 137 | for section in self.parse_sections(self.concatenated(fp)) 138 | ] 139 | with open(self.outfile, 'wt') as fp: 140 | fp.writelines([ 141 | section + '\n' 142 | for section in sections 143 | if section is not None 144 | ]) 145 | self._dedup.register_packages_for_env(self.in_path, self.packages) 146 | 147 | @staticmethod 148 | def concatenated(fp): 149 | r"""Read lines from fp concatenating on backslash (\\) 150 | 151 | >>> env = Environment('') 152 | >>> list(env.concatenated([ 153 | ... 'pkg', 'pkg # comment', 'pkg', '# comment', '# one more', 154 | ... 'foo', ' # via', '', ' # pkg', 155 | ... ])) 156 | ['pkg', 'pkg # comment', 'pkg', '# comment', '# one more', 'foo', ' # via', '', ' # pkg'] 157 | """ 158 | line_parts = [] 159 | for line in fp: 160 | line = line.rstrip() 161 | if line.endswith('\\'): 162 | line_parts.append(line[:-1].rstrip()) 163 | else: 164 | line_parts.append(line) 165 | yield ' '.join(line_parts) 166 | line_parts[:] = [] 167 | if line_parts: 168 | # Impossible: 169 | raise RuntimeError("Compiled file ends with backslash \\") 170 | 171 | def parse_sections(self, lines): 172 | r"""Combine lines with following comments into sections. 173 | 174 | >>> env = Environment('') 175 | >>> list(env.parse_sections([ 176 | ... 'pkg', 'pkg # comment', 'pkg', '# comment', '# one more', 177 | ... 'foo', ' # via', '', ' # pkg', 178 | ... ])) 179 | ['pkg', 'pkg # comment', 'pkg\n# comment\n# one more', 'foo\n # via', '\n # pkg'] 180 | """ 181 | section = [] 182 | for line in lines: 183 | if self.RE_COMMENT.match(line): 184 | section.append(line) 185 | else: 186 | if section: 187 | yield '\n'.join(section) 188 | section = [line] 189 | if section: 190 | yield '\n'.join(section) 191 | 192 | def fix_pin(self, section): 193 | """ 194 | Fix dependency by removing post-releases from versions 195 | and loosing constraints on internal packages. 196 | Drop packages from ignore set 197 | 198 | Also populate packages set 199 | """ 200 | dep = Dependency(section) 201 | if dep.valid: 202 | if dep.package in self.ignore: 203 | ignored_version = self.ignore[dep.package] 204 | if ignored_version is not None: 205 | # ignored_version can be None to disable conflict detection 206 | if dep.version and dep.version != ignored_version: 207 | logger.error( 208 | "Package %s was resolved to different " 209 | "versions in different environments: %s and %s", 210 | dep.package, dep.version, ignored_version, 211 | ) 212 | raise RuntimeError( 213 | "Please add constraints for the package " 214 | "version listed above" 215 | ) 216 | return None 217 | self.packages[dep.package] = dep.version 218 | dep.drop_post(self.in_path) 219 | return dep.serialize() 220 | return section.rstrip() 221 | 222 | def add_references(self, other_in_paths): 223 | """Add references to other_in_paths in outfile""" 224 | if not other_in_paths: 225 | # Skip on empty list 226 | return 227 | with open(self.outfile, 'rt') as fp: 228 | header, body = self.split_header(fp) 229 | with open(self.outfile, 'wt') as fp: 230 | fp.writelines(header) 231 | fp.writelines( 232 | '-r {0}\n'.format( 233 | FEATURES.compose_output_file_path(other_in_path) 234 | ) 235 | for other_in_path in sorted(other_in_paths) 236 | ) 237 | fp.writelines(body) 238 | 239 | @staticmethod 240 | def split_header(fp): 241 | """ 242 | Read file pointer and return pair of lines lists: 243 | first - header, second - the rest. 244 | """ 245 | body_start, header_ended = 0, False 246 | lines = [] 247 | for line in fp: 248 | if line.startswith('#') and not header_ended: 249 | # Header text 250 | body_start += 1 251 | else: 252 | header_ended = True 253 | lines.append(line) 254 | return lines[:body_start], lines[body_start:] 255 | 256 | def replace_header(self, header_text): 257 | """Replace pip-compile header with custom text""" 258 | with open(self.outfile, 'rt') as fp: 259 | _, body = self.split_header(fp) 260 | with open(self.outfile, 'wt') as fp: 261 | fp.write(header_text) 262 | fp.writelines(body) 263 | 264 | def _read_infile(self): 265 | with open(self.infile, "rt") as fp: 266 | return fp.read() 267 | 268 | def _restore_in_file(self, content): 269 | with open(self.infile, "wt") as fp: 270 | return fp.write(content) 271 | 272 | def _inject_sink(self): 273 | rel_sink_out_path = os.path.normpath(os.path.relpath( 274 | FEATURES.sink_out_path(), 275 | os.path.dirname(self.infile), 276 | )) 277 | with open(self.infile, "at") as fp: 278 | return fp.write("\n\n-c {}\n".format(rel_sink_out_path)) 279 | -------------------------------------------------------------------------------- /tests/test_pipcompilemulti.py: -------------------------------------------------------------------------------- 1 | """Tests for pip-compile-multi""" 2 | 3 | import os 4 | try: 5 | from unittest import mock 6 | except ImportError: 7 | import mock 8 | 9 | import pytest 10 | 11 | from pipcompilemulti.environment import Environment 12 | from pipcompilemulti.dependency import Dependency 13 | from pipcompilemulti.options import OPTIONS 14 | from pipcompilemulti.deduplicate import PackageDeduplicator 15 | from pipcompilemulti.utils import merged_packages, reference_cluster 16 | from pipcompilemulti.features.header import DEFAULT_HEADER 17 | 18 | 19 | PIN = 'pycodestyle==2.3.1 # via flake8' 20 | CMPT = 'pycodestyle~=2.3.1 # via flake8' 21 | 22 | 23 | def test_fix_compatible_pin(): 24 | """Test == is replaced with ~= for compatible dependencies""" 25 | env = Environment('xxx') 26 | with mock.patch.dict(OPTIONS, {'compatible_patterns': ['pycode*']}): 27 | result = env.fix_pin(PIN) 28 | assert result == CMPT 29 | 30 | 31 | def test_no_fix_incompatible_pin(): 32 | """Test dependency is left unchanged be default""" 33 | env = Environment('') 34 | result = env.fix_pin(PIN) 35 | assert result == PIN 36 | 37 | 38 | def test_pin_is_ommitted_if_set_to_ignore(): 39 | """Test ignored files won't pass""" 40 | dedup = PackageDeduplicator() 41 | dedup.on_discover([ 42 | {'in_path': 'a', 'refs': ['b']}, 43 | {'in_path': 'b', 'refs': []} 44 | ]) 45 | dedup.register_packages_for_env('b', {'pycodestyle': '2.3.1'}) 46 | env = Environment('a', deduplicator=dedup) 47 | result = env.fix_pin(PIN) 48 | assert result is None 49 | 50 | 51 | def test_post_releases_are_kept_by_default(): 52 | """Test postXXX versions are truncated to release""" 53 | pin = 'pycodestyle==2.3.1.post2231 # via flake8' 54 | env = Environment('') 55 | result = env.fix_pin(pin) 56 | assert result == pin 57 | 58 | 59 | def test_forbid_post_releases(): 60 | """Test postXXX versions are kept if allow_post=True""" 61 | pin = 'pycodestyle==2.3.1.post2231 # via flake8' 62 | with mock.patch.dict(OPTIONS, {'forbid_post': ['env']}): 63 | env = Environment('env') 64 | result = env.fix_pin(pin) 65 | assert result == PIN 66 | 67 | 68 | @pytest.mark.parametrize('in_path, refs', [ 69 | ('base.in', set()), 70 | ('test.in', {'base.in'}), 71 | ('local.in', {'test.in'}), 72 | ]) 73 | def test_parse_references(in_path, refs): 74 | """Check references are parsed for sample files""" 75 | env = Environment('') 76 | result = env.parse_references( 77 | os.path.join('requirements', in_path) 78 | ) 79 | assert result == refs 80 | 81 | 82 | def test_split_header(): 83 | """Check that default header is parsed from autogenerated base.txt""" 84 | with open(os.path.join('requirements', 'base.txt')) as fp: 85 | header, _ = Environment.split_header(fp) 86 | expected = [ 87 | line + '\n' 88 | for line in DEFAULT_HEADER.splitlines() 89 | ] 90 | assert header[1:] == expected 91 | 92 | 93 | def test_concatenation(): 94 | """Check lines are joined and extra spaces removed""" 95 | lines = Environment.concatenated([ 96 | 'abc \\\n', 97 | ' 123 \\\n', 98 | '?\n', 99 | 'MMM\n', 100 | ]) 101 | assert list(lines) == ['abc 123 ?', 'MMM'] 102 | 103 | 104 | def test_parse_hashes_with_comment(): 105 | """Check that sample is parsed""" 106 | dep = Dependency( 107 | 'lib==ver --hash=123 --hash=abc # comment' 108 | ) 109 | assert dep.hashes == '--hash=123 --hash=abc' 110 | 111 | 112 | def test_parse_hashes_without_comment(): 113 | """Check that sample is parsed""" 114 | dep = Dependency( 115 | 'lib==ver --hash=123 --hash=abc' 116 | ) 117 | assert dep.valid 118 | assert dep.hashes == '--hash=123 --hash=abc' 119 | 120 | 121 | def test_serialize_hashes(): 122 | """Check serialization in pip-tools style""" 123 | dep = Dependency( 124 | 'lib==ver --hash=123 --hash=abc # comment' 125 | ) 126 | text = dep.serialize() 127 | assert text == ( 128 | "lib==ver \\\n" 129 | " --hash=123 \\\n" 130 | " --hash=abc # comment" 131 | ) 132 | 133 | 134 | def test_reference_cluster(): 135 | """Check cluster propagets both ways""" 136 | for entry in ['base', 'test', 'local', 'doc']: 137 | cluster = reference_cluster([ 138 | {'in_path': 'base', 'refs': []}, 139 | {'in_path': 'test', 'refs': ['base']}, 140 | {'in_path': 'local', 'refs': ['test']}, 141 | {'in_path': 'doc', 'refs': ['base']}, 142 | {'in_path': 'side', 'refs': []}, 143 | ], entry) 144 | assert cluster == set(['base', 'doc', 'local', 'test']) 145 | 146 | 147 | def test_parse_vcs_dependencies(): 148 | """ 149 | Check VCS support 150 | https://pip.pypa.io/en/stable/reference/pip_install/#vcs-support 151 | """ 152 | cases = ( 153 | "git://git.myproject.org/MyProject#egg=MyProject", 154 | "-e git://git.myproject.org/MyProject#egg=MyProject", 155 | "git+http://git.myproject.org/MyProject#egg=MyProject", 156 | "-e git+http://git.myproject.org/MyProject#egg=MyProject", 157 | "git+https://git.myproject.org/MyProject#egg=MyProject", 158 | "-e git+https://git.myproject.org/MyProject#egg=MyProject", 159 | "git+ssh://git.myproject.org/MyProject#egg=MyProject", 160 | "-e git+ssh://git.myproject.org/MyProject#egg=MyProject", 161 | "git+git://git.myproject.org/MyProject#egg=MyProject", 162 | "-e git+git://git.myproject.org/MyProject#egg=MyProject", 163 | "git+file://git.myproject.org/MyProject#egg=MyProject", 164 | "-e git+file://git.myproject.org/MyProject#egg=MyProject", 165 | "-e git+git@git.myproject.org:MyProject#egg=MyProject", 166 | # Passing branch names, a commit hash or a tag name is possible like so: 167 | "git://git.myproject.org/MyProject.git@master#egg=MyProject", 168 | "-e git://git.myproject.org/MyProject.git@master#egg=MyProject", 169 | "git://git.myproject.org/MyProject.git@v1.0#egg=MyProject", 170 | "-e git://git.myproject.org/MyProject.git@v1.0#egg=MyProject", 171 | "git://git.myproject.org/MyProject.git@" 172 | "da39a3ee5e6b4b0d3255bfef95601890afd80709#egg=MyProject", 173 | "-e git://git.myproject.org/MyProject.git@" 174 | "da39a3ee5e6b4b0d3255bfef95601890afd80709#egg=MyProject", 175 | # Mercurial 176 | "hg+http://hg.myproject.org/MyProject#egg=MyProject", 177 | "-e hg+http://hg.myproject.org/MyProject#egg=MyProject", 178 | "hg+https://hg.myproject.org/MyProject#egg=MyProject", 179 | "-e hg+https://hg.myproject.org/MyProject#egg=MyProject", 180 | "hg+ssh://hg.myproject.org/MyProject#egg=MyProject", 181 | "-e hg+ssh://hg.myproject.org/MyProject#egg=MyProject", 182 | # You can also specify a revision number, a revision hash, 183 | # a tag name or a local branch name like so: 184 | "hg+http://hg.myproject.org/MyProject@da39a3ee5e6b#egg=MyProject", 185 | "-e hg+http://hg.myproject.org/MyProject@da39a3ee5e6b#egg=MyProject", 186 | "hg+http://hg.myproject.org/MyProject@2019#egg=MyProject", 187 | "-e hg+http://hg.myproject.org/MyProject@2019#egg=MyProject", 188 | "hg+http://hg.myproject.org/MyProject@v1.0#egg=MyProject", 189 | "-e hg+http://hg.myproject.org/MyProject@v1.0#egg=MyProject", 190 | "hg+http://hg.myproject.org/MyProject@special_feature#egg=MyProject", 191 | "-e hg+http://hg.myproject.org/MyProject@special_feature#egg=MyProject", 192 | # Subversion 193 | "svn+svn://svn.myproject.org/svn/MyProject#egg=MyProject", 194 | "-e svn+svn://svn.myproject.org/svn/MyProject#egg=MyProject", 195 | "svn+http://svn.myproject.org/svn/MyProject/trunk@2019#egg=MyProject", 196 | "-e svn+http://svn.myproject.org/svn/MyProject/trunk@2019#egg=MyProject", 197 | # Bazaar 198 | "bzr+http://bzr.myproject.org/MyProject/trunk#egg=MyProject", 199 | "-e bzr+http://bzr.myproject.org/MyProject/trunk#egg=MyProject", 200 | "bzr+sftp://user@myproject.org/MyProject/trunk#egg=MyProject", 201 | "-e bzr+sftp://user@myproject.org/MyProject/trunk#egg=MyProject", 202 | "bzr+ssh://user@myproject.org/MyProject/trunk#egg=MyProject", 203 | "-e bzr+ssh://user@myproject.org/MyProject/trunk#egg=MyProject", 204 | "bzr+ftp://user@myproject.org/MyProject/trunk#egg=MyProject", 205 | "-e bzr+ftp://user@myproject.org/MyProject/trunk#egg=MyProject", 206 | "bzr+lp:MyProject#egg=MyProject", 207 | "-e bzr+lp:MyProject#egg=MyProject", 208 | # Tags or revisions can be installed like so: 209 | "bzr+https://bzr.myproject.org/MyProject/trunk@2019#egg=MyProject", 210 | "-e bzr+https://bzr.myproject.org/MyProject/trunk@2019#egg=MyProject", 211 | "bzr+http://bzr.myproject.org/MyProject/trunk@v1.0#egg=MyProject", 212 | "-e bzr+http://bzr.myproject.org/MyProject/trunk@v1.0#egg=MyProject", 213 | # Zulip 214 | "-e git+https://github.com/zulip/talon.git@" 215 | "7d8bdc4dbcfcc5a73298747293b99fe53da55315#egg=talon==1.2.10.zulip1", 216 | "-e git+https://github.com/zulip/ultrajson@70ac02bec#egg=ujson==1.35+git", 217 | "-e git+https://github.com/zulip/virtualenv-clone.git@" 218 | "44e831da39ffb6b9bb5c7d103d98babccdca0456#egg=virtualenv-clone==0.2.6.zulip1", 219 | '-e "git+https://github.com/zulip/python-zulip-api.git@' 220 | '0.4.1#egg=zulip==0.4.1_git&subdirectory=zulip"', 221 | '-e "git+https://github.com/zulip/python-zulip-api.git@' 222 | '0.4.1#egg=zulip_bots==0.4.1+git&subdirectory=zulip_bots"', 223 | # AWX: 224 | "-e git+https://github.com/ansible/ansiconv.git@tower_1.0.0#egg=ansiconv", 225 | "-e git+https://github.com/ansible/django-qsstats-magic.git@" 226 | "tower_0.7.2#egg=django-qsstats-magic", 227 | "-e git+https://github.com/ansible/dm.xmlsec.binding.git@master#egg=dm.xmlsec.binding", 228 | "-e git+https://github.com/ansible/django-jsonbfield@" 229 | "fix-sqlite_serialization#egg=jsonbfield", 230 | "-e git+https://github.com/ansible/docutils.git@master#egg=docutils", 231 | 232 | ) 233 | for line in cases: 234 | dependency = Dependency(line) 235 | assert dependency.valid, line 236 | serialized = dependency.serialize() 237 | if line.startswith('-e') and 'git+git@' not in line: 238 | expect = line.split(' ', 1)[1] 239 | else: 240 | expect = line 241 | assert serialized == expect 242 | 243 | 244 | def test_merged_packages_raise_for_conflict(): 245 | """Check that package x can't be locked to versions 1 and 2""" 246 | with pytest.raises(RuntimeError): 247 | merged_packages( 248 | { 249 | 'a': {'x': 1}, 250 | 'b': {'x': 2}, 251 | }, 252 | ['a', 'b'] 253 | ) 254 | 255 | 256 | def test_fix_pin_detects_version_conflict(): 257 | """Check that package x can't be locked to versions 1 and 2""" 258 | dedup = PackageDeduplicator() 259 | dedup.on_discover([ 260 | {'in_path': 'a', 'refs': ['b']}, 261 | {'in_path': 'b', 'refs': []} 262 | ]) 263 | dedup.register_packages_for_env('b', {'x': '1'}) 264 | env = Environment('a', deduplicator=dedup) 265 | ignored_pin = env.fix_pin('x==1') 266 | assert ignored_pin is None 267 | with pytest.raises(RuntimeError): 268 | env.fix_pin('x==2') 269 | -------------------------------------------------------------------------------- /.pylintrc: -------------------------------------------------------------------------------- 1 | [MASTER] 2 | 3 | # A comma-separated list of package or module names from where C extensions may 4 | # be loaded. Extensions are loading into the active Python interpreter and may 5 | # run arbitrary code 6 | extension-pkg-whitelist= 7 | 8 | # Add files or directories to the blacklist. They should be base names, not 9 | # paths. 10 | ignore=CVS 11 | 12 | # Add files or directories matching the regex patterns to the blacklist. The 13 | # regex matches against base names, not paths. 14 | ignore-patterns= 15 | 16 | # Python code to execute, usually for sys.path manipulation such as 17 | # pygtk.require(). 18 | #init-hook= 19 | 20 | # Use multiple processes to speed up Pylint. 21 | jobs=1 22 | 23 | # List of plugins (as comma separated values of python modules names) to load, 24 | # usually to register additional checkers. 25 | load-plugins= 26 | 27 | # Pickle collected data for later comparisons. 28 | persistent=yes 29 | 30 | # Specify a configuration file. 31 | #rcfile= 32 | 33 | # When enabled, pylint would attempt to guess common misconfiguration and emit 34 | # user-friendly hints instead of false-positive error messages 35 | suggestion-mode=yes 36 | 37 | # Allow loading of arbitrary C extensions. Extensions are imported into the 38 | # active Python interpreter and may run arbitrary code. 39 | unsafe-load-any-extension=no 40 | 41 | 42 | [MESSAGES CONTROL] 43 | 44 | # Only show warnings with the listed confidence levels. Leave empty to show 45 | # all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED 46 | confidence= 47 | 48 | # Disable the message, report, category or checker with the given id(s). You 49 | # can either give multiple identifiers separated by comma (,) or put this 50 | # option multiple times (only on the command line, not in the configuration 51 | # file where it should appear only once).You can also use "--disable=all" to 52 | # disable everything first and then reenable specific checks. For example, if 53 | # you want to run only the similarities checker, you can use "--disable=all 54 | # --enable=similarities". If you want to run only the classes checker, but have 55 | # no Warning level messages displayed, use"--disable=all --enable=classes 56 | # --disable=W" 57 | disable=print-statement, 58 | parameter-unpacking, 59 | unpacking-in-except, 60 | old-raise-syntax, 61 | backtick, 62 | long-suffix, 63 | old-ne-operator, 64 | old-octal-literal, 65 | import-star-module-level, 66 | non-ascii-bytes-literal, 67 | raw-checker-failed, 68 | bad-inline-option, 69 | locally-disabled, 70 | locally-enabled, 71 | file-ignored, 72 | suppressed-message, 73 | useless-suppression, 74 | deprecated-pragma, 75 | apply-builtin, 76 | basestring-builtin, 77 | buffer-builtin, 78 | cmp-builtin, 79 | coerce-builtin, 80 | execfile-builtin, 81 | file-builtin, 82 | long-builtin, 83 | raw_input-builtin, 84 | reduce-builtin, 85 | standarderror-builtin, 86 | unicode-builtin, 87 | xrange-builtin, 88 | coerce-method, 89 | delslice-method, 90 | getslice-method, 91 | setslice-method, 92 | no-absolute-import, 93 | old-division, 94 | dict-iter-method, 95 | dict-view-method, 96 | next-method-called, 97 | metaclass-assignment, 98 | indexing-exception, 99 | raising-string, 100 | reload-builtin, 101 | oct-method, 102 | hex-method, 103 | nonzero-method, 104 | cmp-method, 105 | input-builtin, 106 | round-builtin, 107 | intern-builtin, 108 | unichr-builtin, 109 | map-builtin-not-iterating, 110 | zip-builtin-not-iterating, 111 | range-builtin-not-iterating, 112 | filter-builtin-not-iterating, 113 | using-cmp-argument, 114 | eq-without-hash, 115 | div-method, 116 | idiv-method, 117 | rdiv-method, 118 | exception-message-attribute, 119 | invalid-str-codec, 120 | sys-max-int, 121 | bad-python3-import, 122 | deprecated-string-function, 123 | deprecated-str-translate-call, 124 | deprecated-itertools-function, 125 | deprecated-types-field, 126 | next-method-defined, 127 | dict-items-not-iterating, 128 | dict-keys-not-iterating, 129 | dict-values-not-iterating, 130 | useless-object-inheritance, 131 | no-else-return, 132 | no-init, 133 | old-style-class 134 | 135 | # Enable the message, report, category or checker with the given id(s). You can 136 | # either give multiple identifier separated by comma (,) or put this option 137 | # multiple time (only on the command line, not in the configuration file where 138 | # it should appear only once). See also the "--disable" option for examples. 139 | enable=c-extension-no-member 140 | 141 | 142 | [REPORTS] 143 | 144 | # Python expression which should return a note less than 10 (10 is the highest 145 | # note). You have access to the variables errors warning, statement which 146 | # respectively contain the number of errors / warnings messages and the total 147 | # number of statements analyzed. This is used by the global evaluation report 148 | # (RP0004). 149 | evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) 150 | 151 | # Template used to display messages. This is a python new-style format string 152 | # used to format the message information. See doc for all details 153 | #msg-template= 154 | 155 | # Set the output format. Available formats are text, parseable, colorized, json 156 | # and msvs (visual studio).You can also give a reporter class, eg 157 | # mypackage.mymodule.MyReporterClass. 158 | output-format=text 159 | 160 | # Tells whether to display a full report or only the messages 161 | reports=no 162 | 163 | # Activate the evaluation score. 164 | score=yes 165 | 166 | 167 | [REFACTORING] 168 | 169 | # Maximum number of nested blocks for function / method body 170 | max-nested-blocks=5 171 | 172 | # Complete name of functions that never returns. When checking for 173 | # inconsistent-return-statements if a never returning function is called then 174 | # it will be considered as an explicit return statement and no message will be 175 | # printed. 176 | never-returning-functions=optparse.Values,sys.exit 177 | 178 | 179 | [SIMILARITIES] 180 | 181 | # Ignore comments when computing similarities. 182 | ignore-comments=yes 183 | 184 | # Ignore docstrings when computing similarities. 185 | ignore-docstrings=yes 186 | 187 | # Ignore imports when computing similarities. 188 | ignore-imports=no 189 | 190 | # Minimum lines number of a similarity. 191 | min-similarity-lines=4 192 | 193 | 194 | [FORMAT] 195 | 196 | # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. 197 | expected-line-ending-format= 198 | 199 | # Regexp for a line that is allowed to be longer than the limit. 200 | ignore-long-lines=^\s*(# )??$ 201 | 202 | # Number of spaces of indent required inside a hanging or continued line. 203 | indent-after-paren=4 204 | 205 | # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 206 | # tab). 207 | indent-string=' ' 208 | 209 | # Maximum number of characters on a single line. 210 | max-line-length=100 211 | 212 | # Maximum number of lines in a module 213 | max-module-lines=1000 214 | 215 | # List of optional constructs for which whitespace checking is disabled. `dict- 216 | # separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. 217 | # `trailing-comma` allows a space between comma and closing bracket: (a, ). 218 | # `empty-line` allows space-only lines. 219 | no-space-check=trailing-comma, 220 | dict-separator 221 | 222 | # Allow the body of a class to be on the same line as the declaration if body 223 | # contains single statement. 224 | single-line-class-stmt=no 225 | 226 | # Allow the body of an if to be on the same line as the test if there is no 227 | # else. 228 | single-line-if-stmt=no 229 | 230 | 231 | [MISCELLANEOUS] 232 | 233 | # List of note tags to take in consideration, separated by a comma. 234 | notes=FIXME, 235 | XXX, 236 | TODO 237 | 238 | 239 | [SPELLING] 240 | 241 | # Limits count of emitted suggestions for spelling mistakes 242 | max-spelling-suggestions=4 243 | 244 | # Spelling dictionary name. Available dictionaries: none. To make it working 245 | # install python-enchant package. 246 | spelling-dict= 247 | 248 | # List of comma separated words that should not be checked. 249 | spelling-ignore-words= 250 | 251 | # A path to a file that contains private dictionary; one word per line. 252 | spelling-private-dict-file= 253 | 254 | # Tells whether to store unknown words to indicated private dictionary in 255 | # --spelling-private-dict-file option instead of raising a message. 256 | spelling-store-unknown-words=no 257 | 258 | 259 | [LOGGING] 260 | 261 | # Logging modules to check that the string format arguments are in logging 262 | # function parameter format 263 | logging-modules=logging 264 | 265 | 266 | [BASIC] 267 | 268 | # Naming style matching correct argument names 269 | argument-naming-style=snake_case 270 | 271 | # Regular expression matching correct argument names. Overrides argument- 272 | # naming-style 273 | #argument-rgx= 274 | 275 | # Naming style matching correct attribute names 276 | attr-naming-style=snake_case 277 | 278 | # Regular expression matching correct attribute names. Overrides attr-naming- 279 | # style 280 | #attr-rgx= 281 | 282 | # Bad variable names which should always be refused, separated by a comma 283 | bad-names=foo, 284 | bar, 285 | baz, 286 | toto, 287 | tutu, 288 | tata 289 | 290 | # Naming style matching correct class attribute names 291 | class-attribute-naming-style=any 292 | 293 | # Regular expression matching correct class attribute names. Overrides class- 294 | # attribute-naming-style 295 | #class-attribute-rgx= 296 | 297 | # Naming style matching correct class names 298 | class-naming-style=PascalCase 299 | 300 | # Regular expression matching correct class names. Overrides class-naming-style 301 | #class-rgx= 302 | 303 | # Naming style matching correct constant names 304 | const-naming-style=UPPER_CASE 305 | 306 | # Regular expression matching correct constant names. Overrides const-naming- 307 | # style 308 | #const-rgx= 309 | 310 | # Minimum line length for functions/classes that require docstrings, shorter 311 | # ones are exempt. 312 | docstring-min-length=-1 313 | 314 | # Naming style matching correct function names 315 | function-naming-style=snake_case 316 | 317 | # Regular expression matching correct function names. Overrides function- 318 | # naming-style 319 | # Allow long test names 320 | function-rgx=[a-z_][a-z0-9_]{2,70}$ 321 | 322 | # Good variable names which should always be accepted, separated by a comma 323 | good-names=i, 324 | j, 325 | k, 326 | ex, 327 | Run, 328 | _, 329 | fp, 330 | logger, 331 | options 332 | 333 | # Include a hint for the correct naming format with invalid-name 334 | include-naming-hint=no 335 | 336 | # Naming style matching correct inline iteration names 337 | inlinevar-naming-style=any 338 | 339 | # Regular expression matching correct inline iteration names. Overrides 340 | # inlinevar-naming-style 341 | #inlinevar-rgx= 342 | 343 | # Naming style matching correct method names 344 | method-naming-style=snake_case 345 | 346 | # Regular expression matching correct method names. Overrides method-naming- 347 | # style 348 | #method-rgx= 349 | 350 | # Naming style matching correct module names 351 | module-naming-style=snake_case 352 | 353 | # Regular expression matching correct module names. Overrides module-naming- 354 | # style 355 | #module-rgx= 356 | 357 | # Colon-delimited sets of names that determine each other's naming style when 358 | # the name regexes allow several styles. 359 | name-group= 360 | 361 | # Regular expression which should only match function or class names that do 362 | # not require a docstring. 363 | no-docstring-rgx=^_ 364 | 365 | # List of decorators that produce properties, such as abc.abstractproperty. Add 366 | # to this list to register other decorators that produce valid properties. 367 | property-classes=abc.abstractproperty 368 | 369 | # Naming style matching correct variable names 370 | variable-naming-style=snake_case 371 | 372 | # Regular expression matching correct variable names. Overrides variable- 373 | # naming-style 374 | #variable-rgx= 375 | 376 | 377 | [TYPECHECK] 378 | 379 | # List of decorators that produce context managers, such as 380 | # contextlib.contextmanager. Add to this list to register other decorators that 381 | # produce valid context managers. 382 | contextmanager-decorators=contextlib.contextmanager 383 | 384 | # List of members which are set dynamically and missed by pylint inference 385 | # system, and so shouldn't trigger E1101 when accessed. Python regular 386 | # expressions are accepted. 387 | generated-members= 388 | 389 | # Tells whether missing members accessed in mixin class should be ignored. A 390 | # mixin class is detected if its name ends with "mixin" (case insensitive). 391 | ignore-mixin-members=yes 392 | 393 | # This flag controls whether pylint should warn about no-member and similar 394 | # checks whenever an opaque object is returned when inferring. The inference 395 | # can return multiple potential results while evaluating a Python object, but 396 | # some branches might not be evaluated, which results in partial inference. In 397 | # that case, it might be useful to still emit no-member and other checks for 398 | # the rest of the inferred objects. 399 | ignore-on-opaque-inference=yes 400 | 401 | # List of class names for which member attributes should not be checked (useful 402 | # for classes with dynamically set attributes). This supports the use of 403 | # qualified names. 404 | ignored-classes=optparse.Values,thread._local,_thread._local 405 | 406 | # List of module names for which member attributes should not be checked 407 | # (useful for modules/projects where namespaces are manipulated during runtime 408 | # and thus existing member attributes cannot be deduced by static analysis. It 409 | # supports qualified module names, as well as Unix pattern matching. 410 | ignored-modules= 411 | 412 | # Show a hint with possible names when a member name was not found. The aspect 413 | # of finding the hint is based on edit distance. 414 | missing-member-hint=yes 415 | 416 | # The minimum edit distance a name should have in order to be considered a 417 | # similar match for a missing member name. 418 | missing-member-hint-distance=1 419 | 420 | # The total number of similar names that should be taken in consideration when 421 | # showing a hint for a missing member. 422 | missing-member-max-choices=1 423 | 424 | 425 | [VARIABLES] 426 | 427 | # List of additional names supposed to be defined in builtins. Remember that 428 | # you should avoid to define new builtins when possible. 429 | additional-builtins= 430 | 431 | # Tells whether unused global variables should be treated as a violation. 432 | allow-global-unused-variables=yes 433 | 434 | # List of strings which can identify a callback function by name. A callback 435 | # name must start or end with one of those strings. 436 | callbacks=cb_, 437 | _cb 438 | 439 | # A regular expression matching the name of dummy variables (i.e. expectedly 440 | # not used). 441 | dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ 442 | 443 | # Argument names that match this expression will be ignored. Default to name 444 | # with leading underscore 445 | ignored-argument-names=_.*|^ignored_|^unused_ 446 | 447 | # Tells whether we should check for unused import in __init__ files. 448 | init-import=no 449 | 450 | # List of qualified module names which can have objects that can redefine 451 | # builtins. 452 | redefining-builtins-modules=six.moves,past.builtins,future.builtins 453 | 454 | 455 | [CLASSES] 456 | 457 | # List of method names used to declare (i.e. assign) instance attributes. 458 | defining-attr-methods=__init__, 459 | __new__, 460 | setUp 461 | 462 | # List of member names, which should be excluded from the protected access 463 | # warning. 464 | exclude-protected=_asdict, 465 | _fields, 466 | _replace, 467 | _source, 468 | _make 469 | 470 | # List of valid names for the first argument in a class method. 471 | valid-classmethod-first-arg=cls 472 | 473 | # List of valid names for the first argument in a metaclass class method. 474 | valid-metaclass-classmethod-first-arg=mcs 475 | 476 | 477 | [IMPORTS] 478 | 479 | # Allow wildcard imports from modules that define __all__. 480 | allow-wildcard-with-all=no 481 | 482 | # Analyse import fallback blocks. This can be used to support both Python 2 and 483 | # 3 compatible code, which means that the block might have code that exists 484 | # only in one or another interpreter, leading to false positives when analysed. 485 | analyse-fallback-blocks=no 486 | 487 | # Deprecated modules which should not be used, separated by a comma 488 | deprecated-modules=optparse,tkinter.tix 489 | 490 | # Create a graph of external dependencies in the given file (report RP0402 must 491 | # not be disabled) 492 | ext-import-graph= 493 | 494 | # Create a graph of every (i.e. internal and external) dependencies in the 495 | # given file (report RP0402 must not be disabled) 496 | import-graph= 497 | 498 | # Create a graph of internal dependencies in the given file (report RP0402 must 499 | # not be disabled) 500 | int-import-graph= 501 | 502 | # Force import order to recognize a module as part of the standard 503 | # compatibility libraries. 504 | known-standard-library= 505 | 506 | # Force import order to recognize a module as part of a third party library. 507 | known-third-party=enchant 508 | 509 | 510 | [DESIGN] 511 | 512 | # Maximum number of arguments for function / method 513 | max-args=500 514 | 515 | # Maximum number of attributes for a class (see R0902). 516 | max-attributes=7 517 | 518 | # Maximum number of boolean expressions in a if statement 519 | max-bool-expr=5 520 | 521 | # Maximum number of branch for function / method body 522 | max-branches=12 523 | 524 | # Maximum number of locals for function / method body 525 | max-locals=15 526 | 527 | # Maximum number of parents for a class (see R0901). 528 | max-parents=7 529 | 530 | # Maximum number of public methods for a class (see R0904). 531 | max-public-methods=20 532 | 533 | # Maximum number of return / yield for function / method body 534 | max-returns=6 535 | 536 | # Maximum number of statements in function / method body 537 | max-statements=50 538 | 539 | # Minimum number of public methods for a class (see R0903). 540 | min-public-methods=2 541 | 542 | 543 | [EXCEPTIONS] 544 | 545 | # Exceptions that will emit a warning when being caught. Defaults to 546 | # "Exception" 547 | overgeneral-exceptions=Exception 548 | --------------------------------------------------------------------------------