├── tests ├── __init__.py ├── fixtures │ ├── fake-editables.json │ └── fake-index.json ├── test_minimal_upgrade.py ├── test_utils.py ├── test_writer.py ├── test_fake_index.py ├── test_resolver.py ├── conftest.py ├── test_cache.py ├── test_cli.py └── test_sync.py ├── piptools ├── __init__.py ├── scripts │ ├── __init__.py │ ├── sync.py │ └── compile.py ├── repositories │ ├── __init__.py │ ├── base.py │ ├── local.py │ └── pypi.py ├── click.py ├── _compat │ ├── __init__.py │ ├── tempfile.py │ └── contextlib.py ├── locations.py ├── logging.py ├── exceptions.py ├── sync.py ├── writer.py ├── cache.py ├── utils.py ├── resolver.py └── io.py ├── setup.cfg ├── examples ├── hypothesis.in ├── flask.in ├── sentry.in ├── django.in └── protection.in ├── img └── pip-tools-overview.png ├── .github └── ISSUE_TEMPLATE.md ├── .gitignore ├── Makefile ├── tox.ini ├── .travis.yml ├── setup.py ├── README.md └── CHANGELOG.md /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /piptools/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /piptools/scripts/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [wheel] 2 | universal = 1 3 | -------------------------------------------------------------------------------- /examples/hypothesis.in: -------------------------------------------------------------------------------- 1 | hypothesis[django] 2 | -------------------------------------------------------------------------------- /examples/flask.in: -------------------------------------------------------------------------------- 1 | # Flask has 2nd and 3rd level dependencies 2 | Flask 3 | -------------------------------------------------------------------------------- /examples/sentry.in: -------------------------------------------------------------------------------- 1 | # Sentry has a very large dependency tree 2 | sentry 3 | -------------------------------------------------------------------------------- /tests/fixtures/fake-editables.json: -------------------------------------------------------------------------------- 1 | { 2 | "git+git://example.org/django.git#egg=django": [] 3 | } 4 | -------------------------------------------------------------------------------- /img/pip-tools-overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ramiro/pip-tools/master/img/pip-tools-overview.png -------------------------------------------------------------------------------- /piptools/repositories/__init__.py: -------------------------------------------------------------------------------- 1 | # flake8: noqa 2 | from .local import LocalRequirementsRepository 3 | from .pypi import PyPIRepository 4 | -------------------------------------------------------------------------------- /examples/django.in: -------------------------------------------------------------------------------- 1 | # This file includes the Django project, and the debug toolbar 2 | Django<1.8 # suppose some version requirement 3 | django-debug-toolbar 4 | -------------------------------------------------------------------------------- /piptools/click.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | import click 4 | click.disable_unicode_literals_warning = True 5 | 6 | from click import * # noqa 7 | -------------------------------------------------------------------------------- /examples/protection.in: -------------------------------------------------------------------------------- 1 | # This package depends on setuptools, which should not end up in the compiled 2 | # requirements, because it may cause conflicts with pip itself 3 | python-levenshtein==0.12.0 4 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | Describe the issue briefly here. 2 | 3 | ##### Steps to replicate 4 | 5 | 1. ... 6 | 2. ... 7 | 3. ... 8 | 9 | ##### Expected result 10 | 11 | ... 12 | 13 | ##### Actual result 14 | 15 | ... 16 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Ignore cram test output 2 | *.t.err 3 | 4 | # Python cruft 5 | *.pyc 6 | 7 | # Virtualenvs 8 | .envrc 9 | .direnv 10 | 11 | # Testing 12 | .tox 13 | htmlcov 14 | 15 | # Build output 16 | build 17 | dist 18 | *.egg-info 19 | .coverage 20 | .cache 21 | 22 | # IDE 23 | .idea 24 | venv/ 25 | 26 | # Test files 27 | requirements.in 28 | requirements.txt 29 | -------------------------------------------------------------------------------- /piptools/_compat/__init__.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | # flake8: noqa 3 | from __future__ import (absolute_import, division, print_function, 4 | unicode_literals) 5 | 6 | import six 7 | 8 | if six.PY2: 9 | from .tempfile import TemporaryDirectory 10 | from .contextlib import ExitStack 11 | else: 12 | from tempfile import TemporaryDirectory 13 | from contextlib import ExitStack 14 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | all: 2 | 3 | clean: 4 | rm -rf build/ dist/ 5 | 6 | release: clean 7 | # Check if latest tag is the current head we're releasing 8 | echo "Latest tag = $$(git tag | sort -nr | head -n1)" 9 | echo "HEAD SHA = $$(git sha head)" 10 | echo "Latest tag SHA = $$(git tag | sort -nr | head -n1 | xargs git sha)" 11 | @test "$$(git sha head)" = "$$(git tag | sort -nr | head -n1 | xargs git sha)" 12 | make force_release 13 | 14 | force_release: clean 15 | git push --tags 16 | python setup.py sdist bdist_wheel 17 | twine upload dist/* 18 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py{27,34,35,py}-pip{8,9,latest},flake8 3 | 4 | [testenv] 5 | deps = 6 | piplatest: pip 7 | pip8: pip~=8.0 8 | pip9: pip~=9.0 9 | coverage 10 | pytest 11 | commands = 12 | python -c 'import pip; print("Using pip %s" % pip.__version__)' 13 | python -m coverage run --source piptools -m pytest --strict {posargs:tests/} 14 | python -m coverage report -m 15 | python -m coverage html 16 | 17 | [testenv:flake8] 18 | basepython = python2.7 19 | deps = flake8 20 | commands = flake8 piptools tests --max-line-length=120 21 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: false 2 | language: python 3 | 4 | # Run tox with python 3.5, but let _it_ build the envs for the various Python 5 | # versions (instead of Travis) 6 | python: 3.5 7 | 8 | env: 9 | matrix: 10 | - TOXENV=py27-piplatest 11 | - TOXENV=py34-piplatest 12 | - TOXENV=py35-piplatest 13 | - TOXENV=py27-pip8 14 | - TOXENV=py34-pip8 15 | - TOXENV=py35-pip8 16 | - TOXENV=py27-pip9 17 | - TOXENV=py34-pip9 18 | - TOXENV=py35-pip9 19 | - TOXENV=flake8 20 | 21 | install: 22 | - travis_retry pip install tox 23 | 24 | cache: pip 25 | 26 | script: 27 | - tox 28 | -------------------------------------------------------------------------------- /piptools/locations.py: -------------------------------------------------------------------------------- 1 | import os 2 | from shutil import rmtree 3 | 4 | from .click import secho 5 | from pip.utils.appdirs import user_cache_dir 6 | 7 | # The user_cache_dir helper comes straight from pip itself 8 | CACHE_DIR = user_cache_dir('pip-tools') 9 | 10 | # NOTE 11 | # We used to store the cache dir under ~/.pip-tools, which is not the 12 | # preferred place to store caches for any platform. This has been addressed 13 | # in pip-tools==1.0.5, but to be good citizens, we point this out explicitly 14 | # to the user when this directory is still found. 15 | LEGACY_CACHE_DIR = os.path.expanduser('~/.pip-tools') 16 | 17 | if os.path.exists(LEGACY_CACHE_DIR): 18 | secho('Removing old cache dir {} (new cache dir is {})'.format(LEGACY_CACHE_DIR, CACHE_DIR), fg='yellow') 19 | rmtree(LEGACY_CACHE_DIR) 20 | -------------------------------------------------------------------------------- /piptools/logging.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | from __future__ import (absolute_import, division, print_function, 3 | unicode_literals) 4 | 5 | import sys 6 | 7 | from . import click 8 | 9 | 10 | class LogContext(object): 11 | def __init__(self, verbose=False): 12 | self.verbose = verbose 13 | 14 | def log(self, *args, **kwargs): 15 | click.secho(*args, **kwargs) 16 | 17 | def debug(self, *args, **kwargs): 18 | if self.verbose: 19 | self.log(*args, **kwargs) 20 | 21 | def info(self, *args, **kwargs): 22 | self.log(*args, **kwargs) 23 | 24 | def warning(self, *args, **kwargs): 25 | kwargs.setdefault('fg', 'yellow') 26 | kwargs.setdefault('file', sys.stderr) 27 | self.log(*args, **kwargs) 28 | 29 | def error(self, *args, **kwargs): 30 | kwargs.setdefault('fg', 'red') 31 | kwargs.setdefault('file', sys.stderr) 32 | self.log(*args, **kwargs) 33 | 34 | 35 | log = LogContext() 36 | -------------------------------------------------------------------------------- /piptools/exceptions.py: -------------------------------------------------------------------------------- 1 | class PipToolsError(Exception): 2 | pass 3 | 4 | 5 | class NoCandidateFound(PipToolsError): 6 | def __init__(self, ireq, candidates_tried): 7 | self.ireq = ireq 8 | self.candidates_tried = candidates_tried 9 | 10 | def __str__(self): 11 | sorted_versions = sorted(c.version for c in self.candidates_tried) 12 | lines = [ 13 | 'Could not find a version that matches {}'.format(self.ireq), 14 | 'Tried: {}'.format(', '.join(str(version) for version in sorted_versions) or '(no version found at all)') 15 | ] 16 | return '\n'.join(lines) 17 | 18 | 19 | class UnsupportedConstraint(PipToolsError): 20 | def __init__(self, message, constraint): 21 | super(UnsupportedConstraint, self).__init__(message) 22 | self.constraint = constraint 23 | 24 | def __str__(self): 25 | message = super(UnsupportedConstraint, self).__str__() 26 | return '{} (constraint was: {})'.format(message, str(self.constraint)) 27 | 28 | 29 | class IncompatibleRequirements(PipToolsError): 30 | def __init__(self, ireq_a, ireq_b): 31 | self.ireq_a = ireq_a 32 | self.ireq_b = ireq_b 33 | 34 | def __str__(self): 35 | message = "Incompatible requirements found: {} and {}" 36 | return message.format(self.ireq_a, self.ireq_b) 37 | -------------------------------------------------------------------------------- /piptools/repositories/base.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | from __future__ import (absolute_import, division, print_function, 3 | unicode_literals) 4 | 5 | from abc import ABCMeta, abstractmethod 6 | 7 | from six import add_metaclass 8 | 9 | 10 | @add_metaclass(ABCMeta) 11 | class BaseRepository(object): 12 | 13 | def clear_caches(self): 14 | """Should clear any caches used by the implementation.""" 15 | 16 | def freshen_build_caches(self): 17 | """Should start with fresh build/source caches.""" 18 | 19 | @abstractmethod 20 | def find_best_match(self, ireq): 21 | """ 22 | Return a Version object that indicates the best match for the given 23 | InstallRequirement according to the repository. 24 | """ 25 | 26 | @abstractmethod 27 | def get_dependencies(self, ireq): 28 | """ 29 | Given a pinned or an editable InstallRequirement, returns a set of 30 | dependencies (also InstallRequirements, but not necessarily pinned). 31 | They indicate the secondary dependencies for the given requirement. 32 | """ 33 | 34 | @abstractmethod 35 | def get_hashes(self, ireq): 36 | """ 37 | Given a pinned InstallRequire, returns a set of hashes that represent 38 | all of the files for a given requirement. It is not acceptable for an 39 | editable or unpinned requirement to be passed to this function. 40 | """ 41 | -------------------------------------------------------------------------------- /tests/test_minimal_upgrade.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from piptools.repositories import LocalRequirementsRepository 3 | from piptools.utils import name_from_req 4 | 5 | 6 | @pytest.mark.parametrize( 7 | ('input', 'pins', 'expected'), 8 | 9 | ((tup) for tup in [ 10 | 11 | # Add Flask to an existing requirements.in, using --no-upgrade 12 | (['flask', 'jinja2', 'werkzeug'], 13 | [ 14 | # The requirements.txt from a previous round 15 | 'jinja2==2.7.3', 16 | 'markupsafe==0.23', 17 | 'werkzeug==0.6'], 18 | [ 19 | # Add flask and upgrade werkzeug from incompatible 0.6 20 | 'flask==0.10.1', 21 | 'itsdangerous==0.24', 22 | 'werkzeug==0.10.4', 23 | # Other requirements are unchanged from the original requirements.txt 24 | 'jinja2==2.7.3', 25 | 'markupsafe==0.23'] 26 | ), 27 | ]) 28 | ) 29 | def test_no_upgrades(base_resolver, repository, from_line, input, pins, expected): 30 | input = [from_line(line) for line in input] 31 | existing_pins = dict() 32 | for line in pins: 33 | ireq = from_line(line) 34 | existing_pins[name_from_req(ireq.req)] = ireq 35 | local_repository = LocalRequirementsRepository(existing_pins, repository) 36 | output = base_resolver(input, prereleases=False, repository=local_repository).resolve() 37 | output = {str(line) for line in output} 38 | assert output == {str(line) for line in expected} 39 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | from pytest import raises 2 | 3 | from piptools.utils import as_tuple, format_requirement, format_specifier, flat_map 4 | 5 | 6 | def test_format_requirement(from_line): 7 | ireq = from_line('test==1.2') 8 | assert format_requirement(ireq) == 'test==1.2' 9 | assert format_requirement(ireq, include_specifier=False) == 'test' 10 | 11 | 12 | def test_format_requirement_editable(from_editable): 13 | ireq = from_editable('git+git://fake.org/x/y.git#egg=y') 14 | assert format_requirement(ireq) == '-e git+git://fake.org/x/y.git#egg=y' 15 | 16 | 17 | def test_format_specifier(from_line): 18 | ireq = from_line('foo') 19 | assert format_specifier(ireq) == '' 20 | 21 | ireq = from_line('foo==1.2') 22 | assert format_specifier(ireq) == '==1.2' 23 | 24 | ireq = from_line('foo>1.2,~=1.1,<1.5') 25 | assert format_specifier(ireq) == '~=1.1,>1.2,<1.5' 26 | ireq = from_line('foo~=1.1,<1.5,>1.2') 27 | assert format_specifier(ireq) == '~=1.1,>1.2,<1.5' 28 | 29 | 30 | def test_as_tuple(from_line): 31 | ireq = from_line('foo==1.1') 32 | name, version, extras = as_tuple(ireq) 33 | assert name == 'foo' 34 | assert version == '1.1' 35 | assert extras == () 36 | 37 | ireq = from_line('foo[extra1,extra2]==1.1') 38 | name, version, extras = as_tuple(ireq) 39 | assert name == 'foo' 40 | assert version == '1.1' 41 | assert extras == ("extra1", "extra2") 42 | 43 | # Non-pinned versions aren't accepted 44 | should_be_rejected = [ 45 | 'foo==1.*', 46 | 'foo~=1.1,<1.5,>1.2', 47 | 'foo', 48 | ] 49 | for spec in should_be_rejected: 50 | ireq = from_line(spec) 51 | with raises(TypeError): 52 | as_tuple(ireq) 53 | 54 | 55 | def test_flat_map(): 56 | assert [1, 2, 4, 1, 3, 9] == list(flat_map(lambda x: [1, x, x * x], [2, 3])) 57 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """ 2 | pip-tools keeps your pinned dependencies fresh. 3 | """ 4 | from setuptools import find_packages, setup 5 | 6 | setup( 7 | name='pip-tools', 8 | version='1.8.1dev0', 9 | url='https://github.com/nvie/pip-tools/', 10 | license='BSD', 11 | author='Vincent Driessen', 12 | author_email='me@nvie.com', 13 | description=__doc__, 14 | packages=find_packages(exclude=['tests']), 15 | install_requires=[ 16 | 'click>=6', 17 | 'first', 18 | 'six', 19 | ], 20 | zip_safe=False, 21 | entry_points={ 22 | 'console_scripts': [ 23 | 'pip-compile = piptools.scripts.compile:cli', 24 | 'pip-sync = piptools.scripts.sync:cli', 25 | ], 26 | }, 27 | platforms='any', 28 | classifiers=[ 29 | # As from https://pypi.python.org/pypi?%3Aaction=list_classifiers 30 | # 'Development Status :: 1 - Planning', 31 | # 'Development Status :: 2 - Pre-Alpha', 32 | # 'Development Status :: 3 - Alpha', 33 | # 'Development Status :: 4 - Beta', 34 | 'Development Status :: 5 - Production/Stable', 35 | # 'Development Status :: 6 - Mature', 36 | # 'Development Status :: 7 - Inactive', 37 | 'Programming Language :: Python', 38 | 'Programming Language :: Python :: 2', 39 | # 'Programming Language :: Python :: 2.3', 40 | # 'Programming Language :: Python :: 2.4', 41 | # 'Programming Language :: Python :: 2.5', 42 | # 'Programming Language :: Python :: 2.6', 43 | 'Programming Language :: Python :: 2.7', 44 | 'Programming Language :: Python :: 3', 45 | # 'Programming Language :: Python :: 3.0', 46 | # 'Programming Language :: Python :: 3.1', 47 | # 'Programming Language :: Python :: 3.2', 48 | # 'Programming Language :: Python :: 3.3', 49 | 'Programming Language :: Python :: 3.4', 50 | 'Programming Language :: Python :: 3.5', 51 | 'Intended Audience :: Developers', 52 | 'Intended Audience :: System Administrators', 53 | 'License :: OSI Approved :: BSD License', 54 | 'Operating System :: OS Independent', 55 | 'Topic :: System :: Systems Administration', 56 | ] 57 | ) 58 | -------------------------------------------------------------------------------- /tests/test_writer.py: -------------------------------------------------------------------------------- 1 | from pytest import fixture 2 | 3 | from pip.index import FormatControl 4 | from piptools.utils import comment 5 | from piptools.writer import OutputWriter 6 | 7 | 8 | @fixture 9 | def writer(): 10 | return OutputWriter(src_files=["src_file", "src_file2"], dst_file="dst_file", dry_run=True, 11 | emit_header=True, emit_index=True, annotate=True, 12 | generate_hashes=False, 13 | default_index_url=None, index_urls=[], 14 | trusted_hosts=[], 15 | format_control=FormatControl(set(), set())) 16 | 17 | 18 | def test_format_requirement_annotation_editable(from_editable, writer): 19 | # Annotations are printed as comments at a fixed column 20 | ireq = from_editable('git+git://fake.org/x/y.git#egg=y') 21 | reverse_dependencies = {'y': ['xyz']} 22 | 23 | assert (writer._format_requirement(ireq, 24 | reverse_dependencies, 25 | primary_packages=[]) == 26 | '-e git+git://fake.org/x/y.git#egg=y ' + comment('# via xyz')) 27 | 28 | 29 | def test_format_requirement_annotation(from_line, writer): 30 | ireq = from_line('test==1.2') 31 | reverse_dependencies = {'test': ['xyz']} 32 | 33 | assert (writer._format_requirement(ireq, 34 | reverse_dependencies, 35 | primary_packages=[]) == 36 | 'test==1.2 ' + comment('# via xyz')) 37 | 38 | 39 | def test_format_requirement_annotation_case_sensitive(from_line, writer): 40 | ireq = from_line('Test==1.2') 41 | reverse_dependencies = {'test': ['xyz']} 42 | 43 | assert (writer._format_requirement(ireq, 44 | reverse_dependencies, 45 | primary_packages=[]) == 46 | 'Test==1.2 ' + comment('# via xyz')) 47 | 48 | 49 | def test_format_requirement_not_for_primary(from_line, writer): 50 | "Primary packages should not get annotated." 51 | ireq = from_line('test==1.2') 52 | reverse_dependencies = {'test': ['xyz']} 53 | 54 | assert (writer._format_requirement(ireq, 55 | reverse_dependencies, 56 | primary_packages=['test']) == 57 | 'test==1.2') 58 | -------------------------------------------------------------------------------- /tests/fixtures/fake-index.json: -------------------------------------------------------------------------------- 1 | { 2 | "anyjson": { 3 | "0.3.3": {"": []} 4 | }, 5 | "amqp": { 6 | "1.4.9": {"": []}, 7 | "2.0.2": {"": ["vine>=1.1.1"]} 8 | }, 9 | "arrow": { 10 | "0.5.0": {"": ["python-dateutil"]}, 11 | "0.5.4": {"": ["python-dateutil"]} 12 | }, 13 | "billiard": { 14 | "3.3.0.23": {"": []} 15 | }, 16 | "celery": { 17 | "3.1.23": {"": [ 18 | "kombu>=3.0.34", 19 | "pytz>dev", 20 | "billiard>=3.3.0.23" 21 | ]} 22 | }, 23 | "click": { 24 | "3.3": {"": []}, 25 | "4.0": {"": []} 26 | }, 27 | "django": { 28 | "1.6.11": {"": []}, 29 | "1.7.7": {"": []}, 30 | "1.8": {"": []} 31 | }, 32 | "flask": { 33 | "0.10.1": {"": [ 34 | "Jinja2>=2.4", 35 | "Werkzeug>=0.7", 36 | "itsdangerous>=0.21" 37 | ]} 38 | }, 39 | "flask-cors": { 40 | "1.10.2": {"": [ 41 | "Flask>=0.9", 42 | "Six" 43 | ]}, 44 | "2.0.0": {"": [ 45 | "Flask>=0.9", 46 | "Six" 47 | ]} 48 | }, 49 | "gnureadline": { 50 | "6.3.3": {"": []} 51 | }, 52 | "ipython": { 53 | "2.1.0": { 54 | "": ["gnureadline"], 55 | "nbconvert": [ 56 | "pygments", 57 | "jinja2", 58 | "Sphinx>=0.3" 59 | ], 60 | "notebook": [ 61 | "tornado>=3.1", 62 | "pyzmq>=2.1.11", 63 | "jinja2" 64 | ] 65 | } 66 | }, 67 | "itsdangerous": { 68 | "0.24": {"": []} 69 | }, 70 | "jinja2": { 71 | "2.7.3": {"": [ 72 | "markupsafe" 73 | ]} 74 | }, 75 | "kombu": { 76 | "3.0.35": {"": [ 77 | "anyjson>=0.3.3", 78 | "amqp>=1.4.9,<2.0" 79 | ]} 80 | }, 81 | "librabbitmq": { 82 | "1.6.1": {"": ["amqp>=1.4.6"]} 83 | }, 84 | "markupsafe": { 85 | "0.23": {"": []} 86 | }, 87 | "psycopg2": { 88 | "2.5.4": {"": []}, 89 | "2.6": {"": []} 90 | }, 91 | "pygments": { 92 | "1.5": {"": []} 93 | }, 94 | "pyzmq": { 95 | "2.1.12": {"": []} 96 | }, 97 | "pytz": { 98 | "2016.4": {"": []} 99 | }, 100 | "six": { 101 | "1.6.1": {"": []}, 102 | "1.9.0": {"": []} 103 | }, 104 | "sphinx": { 105 | "0.3": {"": []} 106 | }, 107 | "sqlalchemy": { 108 | "0.9.8": {"": []}, 109 | "0.9.9": {"": []}, 110 | "1.0.0b5": {"": []} 111 | }, 112 | "tornado": { 113 | "3.2.2": {"": []} 114 | }, 115 | "vine": { 116 | "1.1.1": {"": []} 117 | }, 118 | "werkzeug": { 119 | "0.6": {"": []}, 120 | "0.10": {"": []}, 121 | "0.10.4": {"": []} 122 | } 123 | } 124 | -------------------------------------------------------------------------------- /piptools/repositories/local.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | from __future__ import (absolute_import, division, print_function, 3 | unicode_literals) 4 | 5 | from piptools.utils import as_tuple, key_from_req, make_install_requirement 6 | from .base import BaseRepository 7 | 8 | 9 | def ireq_satisfied_by_existing_pin(ireq, existing_pin): 10 | """ 11 | Return True if the given InstallationRequirement is satisfied by the 12 | previously encountered version pin. 13 | """ 14 | if hasattr(existing_pin.req, 'specs'): 15 | # pip < 8.1.2 16 | version = existing_pin.req.specs[0][1] 17 | return version in ireq.req 18 | else: 19 | # pip >= 8.1.2 20 | version = next(iter(existing_pin.req.specifier)).version 21 | return version in ireq.req.specifier 22 | 23 | 24 | class LocalRequirementsRepository(BaseRepository): 25 | """ 26 | The LocalRequirementsRepository proxied the _real_ repository by first 27 | checking if a requirement can be satisfied by existing pins (i.e. the 28 | result of a previous compile step). 29 | 30 | In effect, if a requirement can be satisfied with a version pinned in the 31 | requirements file, we prefer that version over the best match found in 32 | PyPI. This keeps updates to the requirements.txt down to a minimum. 33 | """ 34 | def __init__(self, existing_pins, proxied_repository): 35 | self.repository = proxied_repository 36 | self.existing_pins = existing_pins 37 | 38 | @property 39 | def finder(self): 40 | return self.repository.finder 41 | 42 | @property 43 | def session(self): 44 | return self.repository.session 45 | 46 | @property 47 | def DEFAULT_INDEX_URL(self): 48 | return self.repository.DEFAULT_INDEX_URL 49 | 50 | def clear_caches(self): 51 | self.repository.clear_caches() 52 | 53 | def freshen_build_caches(self): 54 | self.repository.freshen_build_caches() 55 | 56 | def find_best_match(self, ireq, prereleases=None): 57 | key = key_from_req(ireq.req) 58 | existing_pin = self.existing_pins.get(key) 59 | if existing_pin and ireq_satisfied_by_existing_pin(ireq, existing_pin): 60 | project, version, _ = as_tuple(existing_pin) 61 | return make_install_requirement( 62 | project, version, ireq.extras 63 | ) 64 | else: 65 | return self.repository.find_best_match(ireq, prereleases) 66 | 67 | def get_dependencies(self, ireq): 68 | return self.repository.get_dependencies(ireq) 69 | 70 | def get_hashes(self, ireq): 71 | return self.repository.get_hashes(ireq) 72 | -------------------------------------------------------------------------------- /tests/test_fake_index.py: -------------------------------------------------------------------------------- 1 | from pytest import raises 2 | 3 | 4 | def test_find_best_match(from_line, repository): 5 | ireq = from_line('django>1.5') 6 | assert str(repository.find_best_match(ireq)) == 'django==1.8' 7 | 8 | ireq = from_line('django<1.8,~=1.6') 9 | assert str(repository.find_best_match(ireq)) == 'django==1.7.7' 10 | 11 | # Extras available, but no extras specified 12 | ireq = from_line('ipython') 13 | assert str(repository.find_best_match(ireq)) == 'ipython==2.1.0' 14 | 15 | # Make sure we include extras. They should be sorted in the output. 16 | ireq = from_line('ipython[notebook,nbconvert]') 17 | assert str(repository.find_best_match(ireq)) == 'ipython[nbconvert,notebook]==2.1.0' 18 | 19 | 20 | def test_find_best_match_incl_prereleases(from_line, repository): 21 | ireq = from_line('SQLAlchemy') 22 | assert str(repository.find_best_match(ireq, prereleases=False)) == 'sqlalchemy==0.9.9' 23 | assert str(repository.find_best_match(ireq, prereleases=True)) == 'sqlalchemy==1.0.0b5' 24 | 25 | 26 | def test_find_best_match_for_editable(from_editable, repository): 27 | ireq = from_editable('git+git://whatev.org/blah.git#egg=flask') 28 | assert repository.find_best_match(ireq) == ireq 29 | 30 | 31 | def test_get_dependencies(from_line, repository): 32 | ireq = from_line('django==1.6.11') 33 | assert repository.get_dependencies(ireq) == [] 34 | 35 | ireq = from_line('Flask==0.10.1') 36 | dependencies = repository.get_dependencies(ireq) 37 | assert (set(str(req) for req in dependencies) == 38 | {'Werkzeug>=0.7', 'Jinja2>=2.4', 'itsdangerous>=0.21'}) 39 | 40 | ireq = from_line('ipython==2.1.0') 41 | dependencies = repository.get_dependencies(ireq) 42 | assert set(str(req) for req in dependencies) == {'gnureadline'} 43 | 44 | ireq = from_line('ipython[notebook]==2.1.0') 45 | dependencies = repository.get_dependencies(ireq) 46 | assert (set(str(req) for req in dependencies) == 47 | {'gnureadline', 'pyzmq>=2.1.11', 'tornado>=3.1', 'jinja2'}) 48 | 49 | ireq = from_line('ipython[notebook,nbconvert]==2.1.0') 50 | dependencies = repository.get_dependencies(ireq) 51 | assert (set(str(req) for req in dependencies) == 52 | {'gnureadline', 'pyzmq>=2.1.11', 'tornado>=3.1', 'jinja2', 'pygments', 'Sphinx>=0.3'}) 53 | 54 | 55 | def test_get_dependencies_for_editable(from_editable, repository): 56 | ireq = from_editable('git+git://example.org/django.git#egg=django') 57 | assert repository.get_dependencies(ireq) == [] 58 | 59 | 60 | def test_get_dependencies_rejects_non_pinned_requirements(from_line, repository): 61 | not_a_pinned_req = from_line('django>1.6') 62 | with raises(TypeError): 63 | repository.get_dependencies(not_a_pinned_req) 64 | -------------------------------------------------------------------------------- /tests/test_resolver.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | @pytest.mark.parametrize( 5 | ('input', 'expected', 'prereleases'), 6 | 7 | ((tup + (False,))[:3] for tup in [ 8 | 9 | (['Django'], ['django==1.8']), 10 | 11 | (['Flask'], 12 | ['flask==0.10.1', 'itsdangerous==0.24', 'markupsafe==0.23', 13 | 'jinja2==2.7.3', 'werkzeug==0.10.4']), 14 | 15 | (['Jinja2', 'markupsafe'], 16 | ['jinja2==2.7.3', 'markupsafe==0.23']), 17 | 18 | # We should return a normal release version if prereleases is False 19 | (['SQLAlchemy'], 20 | ['sqlalchemy==0.9.9']), 21 | 22 | # We should return the prerelease version if prereleases is True 23 | (['SQLAlchemy'], 24 | ['sqlalchemy==1.0.0b5'], 25 | True), 26 | 27 | # Ipython has extras available, but we don't require them in this test 28 | (['ipython'], 29 | ['ipython==2.1.0', 'gnureadline==6.3.3']), 30 | 31 | # We should get dependencies for extras 32 | (['ipython[notebook]'], 33 | [ 34 | 'ipython[notebook]==2.1.0', 35 | 'pyzmq==2.1.12', 36 | 'jinja2==2.7.3', 37 | 'tornado==3.2.2', 38 | 'markupsafe==0.23', 39 | 'gnureadline==6.3.3'] 40 | ), 41 | 42 | # We should get dependencies for multiple extras 43 | (['ipython[notebook,nbconvert]'], 44 | [ 45 | # Note that the extras should be sorted 46 | 'ipython[nbconvert,notebook]==2.1.0', 47 | 'pyzmq==2.1.12', 48 | 'jinja2==2.7.3', 49 | 'tornado==3.2.2', 50 | 'markupsafe==0.23', 51 | 'gnureadline==6.3.3', 52 | 'pygments==1.5', 53 | 'sphinx==0.3'] 54 | ), 55 | 56 | # We must take the union of all extras 57 | (['ipython[notebook]', 'ipython[nbconvert]'], 58 | [ 59 | # Note that the extras should be sorted 60 | 'ipython[nbconvert,notebook]==2.1.0', 61 | 'pyzmq==2.1.12', 62 | 'jinja2==2.7.3', 63 | 'tornado==3.2.2', 64 | 'markupsafe==0.23', 65 | 'gnureadline==6.3.3', 66 | 'pygments==1.5', 67 | 'sphinx==0.3'] 68 | ), 69 | 70 | # We must remove child dependencies from result if parent is removed (e.g. vine from amqp>=2.0) 71 | # See: GH-370 72 | (['celery', 'librabbitmq'], 73 | [ 74 | 'amqp==1.4.9', 75 | 'anyjson==0.3.3', 76 | 'billiard==3.3.0.23', 77 | 'celery==3.1.23', 78 | 'kombu==3.0.35', 79 | 'librabbitmq==1.6.1', 80 | 'pytz==2016.4'] 81 | ), 82 | ]) 83 | ) 84 | def test_resolver(resolver, from_line, input, expected, prereleases): 85 | input = [from_line(line) for line in input] 86 | output = resolver(input, prereleases=prereleases).resolve() 87 | output = {str(line) for line in output} 88 | assert output == {str(line) for line in expected} 89 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Build status](https://secure.travis-ci.org/nvie/pip-tools.png?branch=master)](https://secure.travis-ci.org/nvie/pip-tools) 2 | 3 | pip-tools = pip-compile + pip-sync 4 | ================================== 5 | 6 | A set of command line tools to help you keep your `pip`-based packages fresh, 7 | even when you've pinned them. [You do pin them, right?][0] 8 | 9 | ![pip-tools overview for phase II](./img/pip-tools-overview.png) 10 | 11 | [0]: http://nvie.com/posts/pin-your-packages/ 12 | 13 | 14 | Installation 15 | ============ 16 | 17 | ```console 18 | $ pip install --upgrade pip # pip-tools needs pip==6.1 or higher (!) 19 | $ pip install pip-tools 20 | ``` 21 | 22 | 23 | Example usage for `pip-compile` 24 | =============================== 25 | 26 | Suppose you have a Flask project, and want to pin it for production. Write the 27 | following line to a file: 28 | 29 | # requirements.in 30 | Flask 31 | 32 | Now, run `pip-compile requirements.in`: 33 | 34 | ```console 35 | $ pip-compile requirements.in 36 | # 37 | # This file is autogenerated by pip-compile 38 | # Make changes in requirements.in, then run this to update: 39 | # 40 | # pip-compile requirements.in 41 | # 42 | flask==0.10.1 43 | itsdangerous==0.24 # via flask 44 | jinja2==2.7.3 # via flask 45 | markupsafe==0.23 # via jinja2 46 | werkzeug==0.10.4 # via flask 47 | ``` 48 | 49 | And it will produce your `requirements.txt`, with all the Flask dependencies 50 | (and all underlying dependencies) pinned. Put this file under version control 51 | as well. 52 | 53 | To update all packages, periodically re-run `pip-compile --upgrade`. 54 | 55 | To update a specific package to the latest or a specific version use the `--upgrade-package` or `-P` flag: 56 | 57 | ```console 58 | $ pip-compile --upgrade-package flask # only update the flask package 59 | $ pip-compile --upgrade-package flask --upgrade-package requests # update both the flask and requests packages 60 | $ pip-compile -P flask -P requests==2.0.0 # update the flask package to the latest, and requests to v2.0.0 61 | ``` 62 | 63 | Example usage for `pip-sync` 64 | ============================ 65 | 66 | Now that you have a `requirements.txt`, you can use `pip-sync` to update your 67 | virtual env to reflect exactly what's in there. Note: this will 68 | install/upgrade/uninstall everything necessary to match the `requirements.txt` 69 | contents. 70 | 71 | ```console 72 | $ pip-sync 73 | Uninstalling flake8-2.4.1: 74 | Successfully uninstalled flake8-2.4.1 75 | Collecting click==4.1 76 | Downloading click-4.1-py2.py3-none-any.whl (62kB) 77 | 100% |████████████████████████████████| 65kB 1.8MB/s 78 | Found existing installation: click 4.0 79 | Uninstalling click-4.0: 80 | Successfully uninstalled click-4.0 81 | Successfully installed click-4.1 82 | ``` 83 | 84 | To sync multiple `*.txt` dependency lists, just pass them in via command line arguments e.g.: 85 | ```shell 86 | $ pip-sync dev-requirements.txt requirements.txt 87 | ``` 88 | Passing in empty arguments would cause it to default to `requirements.txt`. 89 | -------------------------------------------------------------------------------- /piptools/_compat/tempfile.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | from __future__ import (absolute_import, division, print_function, 3 | unicode_literals) 4 | 5 | import os as _os 6 | import sys as _sys 7 | import warnings as _warnings 8 | from tempfile import mkdtemp 9 | 10 | 11 | class TemporaryDirectory(object): 12 | """Create and return a temporary directory. This has the same 13 | behavior as mkdtemp but can be used as a context manager. For 14 | example: 15 | 16 | with TemporaryDirectory() as tmpdir: 17 | ... 18 | 19 | Upon exiting the context, the directory and everything contained 20 | in it are removed. 21 | """ 22 | 23 | def __init__(self, suffix="", prefix="tmp", dir=None): 24 | self._closed = False 25 | self.name = None # Handle mkdtemp raising an exception 26 | self.name = mkdtemp(suffix, prefix, dir) 27 | 28 | def __repr__(self): 29 | return "<{} {!r}>".format(self.__class__.__name__, self.name) 30 | 31 | def __enter__(self): 32 | return self.name 33 | 34 | def cleanup(self): 35 | if self.name and not self._closed: 36 | try: 37 | self._rmtree(self.name) 38 | except (TypeError, AttributeError) as ex: 39 | # Issue #10188: Emit a warning on stderr 40 | # if the directory could not be cleaned 41 | # up due to missing globals 42 | if "None" not in str(ex): 43 | raise 44 | print("ERROR: {!r} while cleaning up {!r}".format(ex, self,), 45 | file=_sys.stderr) 46 | return 47 | self._closed = True 48 | 49 | def __exit__(self, exc, value, tb): 50 | self.cleanup() 51 | 52 | def __del__(self): 53 | # Issue a ResourceWarning if implicit cleanup needed 54 | self.cleanup() 55 | 56 | # XXX (ncoghlan): The following code attempts to make 57 | # this class tolerant of the module nulling out process 58 | # that happens during CPython interpreter shutdown 59 | # Alas, it doesn't actually manage it. See issue #10188 60 | _listdir = staticmethod(_os.listdir) 61 | _path_join = staticmethod(_os.path.join) 62 | _isdir = staticmethod(_os.path.isdir) 63 | _islink = staticmethod(_os.path.islink) 64 | _remove = staticmethod(_os.remove) 65 | _rmdir = staticmethod(_os.rmdir) 66 | _warn = _warnings.warn 67 | 68 | def _rmtree(self, path): 69 | # Essentially a stripped down version of shutil.rmtree. We can't 70 | # use globals because they may be None'ed out at shutdown. 71 | for name in self._listdir(path): 72 | fullname = self._path_join(path, name) 73 | try: 74 | isdir = self._isdir(fullname) and not self._islink(fullname) 75 | except OSError: 76 | isdir = False 77 | if isdir: 78 | self._rmtree(fullname) 79 | else: 80 | try: 81 | self._remove(fullname) 82 | except OSError: 83 | pass 84 | try: 85 | self._rmdir(path) 86 | except OSError: 87 | pass 88 | -------------------------------------------------------------------------------- /piptools/scripts/sync.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | from __future__ import (absolute_import, division, print_function, 3 | unicode_literals) 4 | 5 | import os 6 | import sys 7 | 8 | import pip 9 | 10 | from .. import click, sync 11 | from ..exceptions import PipToolsError 12 | from ..logging import log 13 | from ..utils import assert_compatible_pip_version, flat_map 14 | 15 | # Make sure we're using a compatible version of pip 16 | assert_compatible_pip_version() 17 | 18 | DEFAULT_REQUIREMENTS_FILE = 'requirements.txt' 19 | 20 | 21 | @click.command() 22 | @click.version_option() 23 | @click.option('-n', '--dry-run', is_flag=True, help="Only show what would happen, don't change anything") 24 | @click.option('--force', is_flag=True, help="Proceed even if conflicts are found") 25 | @click.option('-f', '--find-links', multiple=True, help="Look for archives in this directory or on this HTML page", envvar='PIP_FIND_LINKS') # noqa 26 | @click.option('-i', '--index-url', help="Change index URL (defaults to PyPI)", envvar='PIP_INDEX_URL') 27 | @click.option('--extra-index-url', multiple=True, help="Add additional index URL to search", envvar='PIP_EXTRA_INDEX_URL') # noqa 28 | @click.option('--no-index', is_flag=True, help="Ignore package index (only looking at --find-links URLs instead)") 29 | @click.argument('src_files', required=False, type=click.Path(exists=True), nargs=-1) 30 | def cli(dry_run, force, find_links, index_url, extra_index_url, no_index, src_files): 31 | if not src_files: 32 | if os.path.exists(DEFAULT_REQUIREMENTS_FILE): 33 | src_files = (DEFAULT_REQUIREMENTS_FILE,) 34 | else: 35 | msg = 'No requirement files given and no {} found in the current directory' 36 | log.error(msg.format(DEFAULT_REQUIREMENTS_FILE)) 37 | sys.exit(2) 38 | 39 | if any(src_file.endswith('.in') for src_file in src_files): 40 | msg = ('Some input files have the .in extension, which is most likely an error and can ' 41 | 'cause weird behaviour. You probably meant to use the corresponding *.txt file?') 42 | if force: 43 | log.warning('WARNING: ' + msg) 44 | else: 45 | log.error('ERROR: ' + msg) 46 | sys.exit(2) 47 | 48 | requirements = flat_map(lambda src: pip.req.parse_requirements(src, session=True), 49 | src_files) 50 | 51 | try: 52 | requirements = sync.merge(requirements, ignore_conflicts=force) 53 | except PipToolsError as e: 54 | log.error(str(e)) 55 | sys.exit(2) 56 | 57 | installed_dists = pip.get_installed_distributions(skip=[]) 58 | to_install, to_uninstall = sync.diff(requirements, installed_dists) 59 | 60 | install_flags = [] 61 | for link in find_links or []: 62 | install_flags.extend(['-f', link]) 63 | if no_index: 64 | install_flags.append('--no-index') 65 | if index_url: 66 | install_flags.extend(['-i', index_url]) 67 | if extra_index_url: 68 | for extra_index in extra_index_url: 69 | install_flags.extend(['--extra-index-url', extra_index]) 70 | 71 | sys.exit(sync.sync(to_install, to_uninstall, verbose=True, dry_run=dry_run, 72 | install_flags=install_flags)) 73 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import json 2 | from functools import partial 3 | 4 | from pip._vendor.packaging.version import Version 5 | from pip._vendor.pkg_resources import Requirement 6 | from pip.req import InstallRequirement 7 | from pytest import fixture 8 | 9 | from piptools.cache import DependencyCache 10 | from piptools.repositories.base import BaseRepository 11 | from piptools.resolver import Resolver 12 | from piptools.utils import as_tuple, key_from_req, make_install_requirement 13 | 14 | 15 | class FakeRepository(BaseRepository): 16 | def __init__(self): 17 | with open('tests/fixtures/fake-index.json', 'r') as f: 18 | self.index = json.load(f) 19 | 20 | with open('tests/fixtures/fake-editables.json', 'r') as f: 21 | self.editables = json.load(f) 22 | 23 | def get_hashes(self, ireq): 24 | # Some fake hashes 25 | return { 26 | 'test:123', 27 | 'sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef', 28 | } 29 | 30 | def find_best_match(self, ireq, prereleases=False): 31 | if ireq.editable: 32 | return ireq 33 | 34 | versions = ireq.specifier.filter(self.index[key_from_req(ireq.req)], prereleases=prereleases) 35 | best_version = max(versions, key=Version) 36 | return make_install_requirement(key_from_req(ireq.req), best_version, ireq.extras) 37 | 38 | def get_dependencies(self, ireq): 39 | if ireq.editable: 40 | return self.editables[str(ireq.link)] 41 | 42 | name, version, extras = as_tuple(ireq) 43 | # Store non-extra dependencies under the empty string 44 | extras += ("",) 45 | dependencies = [dep for extra in extras for dep in self.index[name][version][extra]] 46 | return [InstallRequirement.from_line(dep) for dep in dependencies] 47 | 48 | 49 | class FakeInstalledDistribution(object): 50 | def __init__(self, line, deps=None): 51 | if deps is None: 52 | deps = [] 53 | self.deps = [Requirement.parse(d) for d in deps] 54 | 55 | self.req = Requirement.parse(line) 56 | 57 | self.key = key_from_req(self.req) 58 | self.specifier = self.req.specifier 59 | 60 | self.version = line.split("==")[1] 61 | 62 | def requires(self): 63 | return self.deps 64 | 65 | def as_requirement(self): 66 | return self.req 67 | 68 | 69 | @fixture 70 | def fake_dist(): 71 | return FakeInstalledDistribution 72 | 73 | 74 | @fixture 75 | def repository(): 76 | return FakeRepository() 77 | 78 | 79 | @fixture 80 | def depcache(tmpdir): 81 | return DependencyCache(str(tmpdir)) 82 | 83 | 84 | @fixture 85 | def resolver(depcache, repository): 86 | # TODO: It'd be nicer if Resolver instance could be set up and then 87 | # use .resolve(...) on the specset, instead of passing it to 88 | # the constructor like this (it's not reusable) 89 | return partial(Resolver, repository=repository, cache=depcache) 90 | 91 | 92 | @fixture 93 | def base_resolver(depcache): 94 | return partial(Resolver, cache=depcache) 95 | 96 | 97 | @fixture 98 | def from_line(): 99 | return InstallRequirement.from_line 100 | 101 | 102 | @fixture 103 | def from_editable(): 104 | return InstallRequirement.from_editable 105 | -------------------------------------------------------------------------------- /tests/test_cache.py: -------------------------------------------------------------------------------- 1 | from contextlib import contextmanager 2 | from os import remove 3 | from shutil import rmtree 4 | from tempfile import NamedTemporaryFile 5 | 6 | from pytest import raises 7 | 8 | from piptools.cache import read_cache_file, CorruptCacheError, DependencyCache 9 | 10 | 11 | @contextmanager 12 | def _read_cache_file_helper(to_write): 13 | """ 14 | On enter, create the file with the given string, and then yield its path. 15 | On exit, delete that file. 16 | 17 | :param str to_write: the content to write to the file 18 | :yield: the path to the temporary file 19 | """ 20 | try: 21 | # Create the file and write to it 22 | cache_file = NamedTemporaryFile(mode="w", delete=False) 23 | cache_file.write(to_write) 24 | cache_file.close() 25 | 26 | # Yield the path to the file 27 | yield cache_file.name 28 | 29 | finally: 30 | # Delete the file on exit 31 | remove(cache_file.name) 32 | 33 | 34 | def test_read_cache_file_not_json(): 35 | """ 36 | A cache file that's not JSON should throw a corrupt cache error. 37 | """ 38 | with _read_cache_file_helper("not json") as cache_file_name: 39 | with raises(CorruptCacheError): 40 | read_cache_file(cache_file_name) 41 | 42 | 43 | def test_read_cache_file_wrong_format(): 44 | """ 45 | A cache file with a wrong "__format__" value should throw an assertion error. 46 | """ 47 | with _read_cache_file_helper('{"__format__": 2}') as cache_file_name: 48 | with raises(AssertionError): 49 | read_cache_file(cache_file_name) 50 | 51 | 52 | def test_read_cache_file_successful(): 53 | """ 54 | A good cache file. 55 | """ 56 | with _read_cache_file_helper('{"__format__": 1, "dependencies": "success"}') as cache_file_name: 57 | assert "success" == read_cache_file(cache_file_name) 58 | 59 | 60 | def test_reverse_dependencies(from_line, tmpdir): 61 | # Since this is a test, make a temporary directory. Converting to str from py.path. 62 | tmp_dir_path = str(tmpdir) 63 | 64 | # Create a cache object. The keys are packages, and the values are lists of packages on which the keys depend. 65 | cache = DependencyCache(cache_dir=tmp_dir_path) 66 | cache[from_line("top==1.2")] = ["middle>=0.3", "bottom>=5.1.2"] 67 | cache[from_line("top[xtra]==1.2")] = ["middle>=0.3", "bottom>=5.1.2", "bonus==0.4"] 68 | cache[from_line("middle==0.4")] = ["bottom<6"] 69 | cache[from_line("bottom==5.3.5")] = [] 70 | cache[from_line("bonus==0.4")] = [] 71 | 72 | # In this case, we're using top 1.2 without an extra, so the "bonus" package is not depended upon. 73 | reversed_no_extra = cache.reverse_dependencies([ 74 | from_line("top==1.2"), 75 | from_line("middle==0.4"), 76 | from_line("bottom==5.3.5"), 77 | from_line("bonus==0.4") 78 | ]) 79 | assert reversed_no_extra == { 80 | 'middle': {'top'}, 81 | 'bottom': {'middle', 'top'} 82 | } 83 | 84 | # Now we're using top 1.2 with the "xtra" extra, so it depends on the "bonus" package. 85 | reversed_extra = cache.reverse_dependencies([ 86 | from_line("top[xtra]==1.2"), 87 | from_line("middle==0.4"), 88 | from_line("bottom==5.3.5"), 89 | from_line("bonus==0.4") 90 | ]) 91 | assert reversed_extra == { 92 | 'middle': {'top'}, 93 | 'bottom': {'middle', 'top'}, 94 | 'bonus': {'top'} 95 | } 96 | 97 | # Clean up our temp directory 98 | rmtree(tmp_dir_path) 99 | -------------------------------------------------------------------------------- /tests/test_cli.py: -------------------------------------------------------------------------------- 1 | import os 2 | from textwrap import dedent 3 | 4 | from click.testing import CliRunner 5 | 6 | import pytest 7 | from piptools.scripts.compile import cli 8 | 9 | 10 | @pytest.yield_fixture 11 | def pip_conf(tmpdir): 12 | test_conf = dedent("""\ 13 | [global] 14 | index-url = http://example.com 15 | trusted-host = example.com 16 | """) 17 | 18 | pip_conf_file = 'pip.conf' if os.name != 'nt' else 'pip.ini' 19 | path = (tmpdir / pip_conf_file).strpath 20 | 21 | with open(path, 'w') as f: 22 | f.write(test_conf) 23 | 24 | old_value = os.environ.get('PIP_CONFIG_FILE') 25 | try: 26 | os.environ['PIP_CONFIG_FILE'] = path 27 | yield path 28 | finally: 29 | if old_value is not None: 30 | os.environ['PIP_CONFIG_FILE'] = old_value 31 | else: 32 | del os.environ['PIP_CONFIG_FILE'] 33 | os.remove(path) 34 | 35 | 36 | def test_default_pip_conf_read(pip_conf): 37 | 38 | assert os.path.exists(pip_conf) 39 | 40 | runner = CliRunner() 41 | with runner.isolated_filesystem(): 42 | # preconditions 43 | open('requirements.in', 'w').close() 44 | out = runner.invoke(cli, ['-v']) 45 | 46 | # check that we have our index-url as specified in pip.conf 47 | assert 'Using indexes:\n http://example.com' in out.output 48 | assert '--index-url http://example.com' in out.output 49 | 50 | 51 | def test_command_line_overrides_pip_conf(pip_conf): 52 | 53 | assert os.path.exists(pip_conf) 54 | 55 | runner = CliRunner() 56 | with runner.isolated_filesystem(): 57 | # preconditions 58 | open('requirements.in', 'w').close() 59 | out = runner.invoke(cli, ['-v', '-i', 'http://override.com']) 60 | 61 | # check that we have our index-url as specified in pip.conf 62 | assert 'Using indexes:\n http://override.com' in out.output 63 | 64 | 65 | def test_find_links_option(pip_conf): 66 | 67 | assert os.path.exists(pip_conf) 68 | 69 | runner = CliRunner() 70 | with runner.isolated_filesystem(): 71 | open('requirements.in', 'w').close() 72 | out = runner.invoke(cli, ['-v', '-f', './libs1', '-f', './libs2']) 73 | 74 | # Check that find-links has been passed to pip 75 | assert 'Configuration:\n -f ./libs1\n -f ./libs2' in out.output 76 | 77 | 78 | def test_extra_index_option(pip_conf): 79 | 80 | assert os.path.exists(pip_conf) 81 | 82 | runner = CliRunner() 83 | with runner.isolated_filesystem(): 84 | open('requirements.in', 'w').close() 85 | out = runner.invoke(cli, ['-v', 86 | '--extra-index-url', 'http://extraindex1.com', 87 | '--extra-index-url', 'http://extraindex2.com']) 88 | assert ('Using indexes:\n' 89 | ' http://example.com\n' 90 | ' http://extraindex1.com\n' 91 | ' http://extraindex2.com' in out.output) 92 | 93 | 94 | def test_trusted_host(pip_conf): 95 | 96 | assert os.path.exists(pip_conf) 97 | 98 | runner = CliRunner() 99 | with runner.isolated_filesystem(): 100 | open('requirements.in', 'w').close() 101 | out = runner.invoke(cli, ['-v', 102 | '--trusted-host', 'example2.com']) 103 | print(out.output) 104 | assert ('--trusted-host example.com\n' 105 | '--trusted-host example2.com\n' in out.output) 106 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # 1.8.1 2 | 3 | - Recalculate secondary dependencies between rounds (#378) 4 | 5 | 6 | # 1.8.0 7 | 8 | - Adds support for upgrading individual packages with a new option 9 | `--upgrade-package`. To upgrade a _specific_ package to the latest or 10 | a specific version use `--upgrade-package `. To upgrade all packages, 11 | you can still use `pip-compile --upgrade`. (#409) 12 | - Adds support for pinning dependencies even further by including the hashes 13 | found on PyPI at compilation time, which will be re-checked when dependencies 14 | are installed at installation time. This adds protection against packages 15 | that are tampered with. (#383) 16 | - Improve support for extras, like `hypothesis[django]` 17 | - Drop support for pip < 8 18 | 19 | 20 | # 1.7.1 21 | 22 | - Add `--allow-unsafe` option (#377) 23 | 24 | 25 | # 1.7.0 26 | 27 | - Add compatibility with pip >= 8.1.2 (#374) 28 | Thanks so much, @jmbowman! 29 | 30 | 31 | # 1.6.5 32 | 33 | - Add warning that pip >= 8.1.2 is not supported until 1.7.x is out 34 | 35 | 36 | # 1.6.4 37 | 38 | - Incorporate fix for atomic file saving behaviour on the Windows platform 39 | (see #351) 40 | 41 | 42 | # 1.6.3 43 | 44 | - PyPI won't let me upload 1.6.2 45 | 46 | 47 | # 1.6.2 48 | 49 | - Respect pip configuration from pip.{ini,conf} 50 | - Fixes for atomic-saving of output files on Windows (see #351) 51 | 52 | 53 | # 1.6.1 54 | 55 | Minor changes: 56 | - pip-sync now supports being invoked from within and outside an activated 57 | virtualenv (see #317) 58 | - pip-compile: support -U as a shorthand for --upgrade 59 | - pip-compile: support pip's --no-binary and --binary-only flags 60 | 61 | Fixes: 62 | - Change header format of output files to mention all input files 63 | 64 | 65 | # 1.6 66 | 67 | Major change: 68 | - pip-compile will by default try to fulfill package specs by looking at 69 | a previously compiled output file first, before checking PyPI. This means 70 | pip-compile will only update the requirements.txt when it absolutely has to. 71 | To get the old behaviour (picking the latest version of all packages from 72 | PyPI), use the new `--upgrade` option. 73 | 74 | Minor changes: 75 | - Bugfix where pip-compile would lose "via" info when on pip 8 (see #313) 76 | - Ensure cache dir exists (see #315) 77 | 78 | 79 | # 1.5 80 | 81 | - Add support for pip>=8 82 | - Drop support for pip<7 83 | - Fix bug where `pip-sync` fails to uninstall packages if you're using the 84 | `--no-index` (or other) flags 85 | 86 | 87 | # 1.4.5 88 | 89 | - Add `--no-index` flag to `pip-compile` to avoid emitting `--index-url` into 90 | the output (useful if you have configured a different index in your global 91 | ~/.pip/pip.conf, for example) 92 | - Fix: ignore stdlib backport packages, like `argparse`, when listing which 93 | packages will be installed/uninstalled (#286) 94 | - Fix pip-sync failed uninstalling packages when using `--find-links` (#298) 95 | - Explicitly error when pip-tools is used with pip 8.0+ (for now) 96 | 97 | 98 | # 1.4.4 99 | 100 | - Fix: unintended change in behaviour where packages installed by `pip-sync` 101 | could accidentally get upgraded under certain conditions, even though the 102 | requirements.txt would dictate otherwise (see #290) 103 | 104 | 105 | # 1.4.3 106 | 107 | - Fix: add `--index-url` and `--extra-index-url` options to `pip-sync` 108 | - Fix: always install using `--upgrade` flag when running `pip-sync` 109 | 110 | 111 | # 1.4.2 112 | 113 | - Fix bug where umask was ignored when writing requirement files (#268) 114 | 115 | 116 | # 1.4.1 117 | 118 | - Fix bug where successive invocations of pip-sync with editables kept 119 | uninstalling/installing them (fixes #270) 120 | 121 | 122 | # 1.4.0 123 | 124 | - Add command line option -f / --find-links 125 | - Add command line option --no-index 126 | - Add command line alias -n (for --dry-run) 127 | - Fix a unicode issue 128 | 129 | 130 | # 1.3.0 131 | 132 | - Support multiple requirement files to pip-compile 133 | - Support requirements from stdin for pip-compile 134 | - Support --output-file option on pip-compile, to redirect output to a file (or stdout) 135 | 136 | 137 | # 1.2.0 138 | 139 | - Add CHANGELOG :) 140 | - Support pip-sync'ing editable requirements 141 | - Support extras properly (i.e. package[foo] syntax) 142 | 143 | (Anything before 1.2.0 was not recorded.) 144 | -------------------------------------------------------------------------------- /tests/test_sync.py: -------------------------------------------------------------------------------- 1 | from collections import Counter 2 | 3 | import pytest 4 | from piptools.exceptions import IncompatibleRequirements 5 | from piptools.sync import dependency_tree, diff, merge 6 | 7 | 8 | @pytest.mark.parametrize( 9 | ('installed', 'root', 'expected'), 10 | 11 | [ 12 | ([], 13 | 'pip-tools', []), 14 | 15 | ([('pip-tools==1', [])], 16 | 'pip-tools', ['pip-tools']), 17 | 18 | ([('pip-tools==1', []), 19 | ('django==1.7', [])], 20 | 'pip-tools', ['pip-tools']), 21 | 22 | ([('pip-tools==1', ['click>=2']), 23 | ('django==1.7', []), 24 | ('click==3', [])], 25 | 'pip-tools', ['pip-tools', 'click']), 26 | 27 | ([('pip-tools==1', ['click>=2']), 28 | ('django==1.7', []), 29 | ('click==1', [])], 30 | 'pip-tools', ['pip-tools']), 31 | 32 | ([('root==1', ['child==2']), 33 | ('child==2', ['grandchild==3']), 34 | ('grandchild==3', [])], 35 | 'root', ['root', 'child', 'grandchild']), 36 | 37 | ([('root==1', ['child==2']), 38 | ('child==2', ['root==1'])], 39 | 'root', ['root', 'child']), 40 | ] 41 | ) 42 | def test_dependency_tree(fake_dist, installed, root, expected): 43 | installed = {distribution.key: distribution 44 | for distribution in 45 | (fake_dist(name, deps) for name, deps in installed)} 46 | 47 | actual = dependency_tree(installed, root) 48 | assert actual == set(expected) 49 | 50 | 51 | def test_merge_detect_conflicts(from_line): 52 | requirements = [from_line('flask==1'), from_line('flask==2')] 53 | 54 | with pytest.raises(IncompatibleRequirements): 55 | merge(requirements, ignore_conflicts=False) 56 | 57 | 58 | def test_merge_ignore_conflicts(from_line): 59 | requirements = [from_line('flask==1'), from_line('flask==2')] 60 | 61 | assert Counter(requirements[1:2]) == Counter(merge(requirements, ignore_conflicts=True)) 62 | 63 | 64 | def test_merge(from_line): 65 | requirements = [from_line('flask==1'), 66 | from_line('flask==1'), 67 | from_line('django==2')] 68 | 69 | assert Counter(requirements[1:3]) == Counter(merge(requirements, ignore_conflicts=True)) 70 | 71 | 72 | def test_diff_should_do_nothing(): 73 | installed = [] # empty env 74 | reqs = [] # no requirements 75 | 76 | to_install, to_uninstall = diff(reqs, installed) 77 | assert to_install == set() 78 | assert to_uninstall == set() 79 | 80 | 81 | def test_diff_should_install(from_line): 82 | installed = [] # empty env 83 | reqs = [from_line('django==1.8')] 84 | 85 | to_install, to_uninstall = diff(reqs, installed) 86 | assert to_install == {'django==1.8'} 87 | assert to_uninstall == set() 88 | 89 | 90 | def test_diff_should_uninstall(fake_dist): 91 | installed = [fake_dist('django==1.8')] 92 | reqs = [] 93 | 94 | to_install, to_uninstall = diff(reqs, installed) 95 | assert to_install == set() 96 | assert to_uninstall == {'django'} # no version spec when uninstalling 97 | 98 | 99 | def test_diff_should_update(fake_dist, from_line): 100 | installed = [fake_dist('django==1.7')] 101 | reqs = [from_line('django==1.8')] 102 | 103 | to_install, to_uninstall = diff(reqs, installed) 104 | assert to_install == {'django==1.8'} 105 | assert to_uninstall == set() 106 | 107 | 108 | def test_diff_leave_packaging_packages_alone(fake_dist, from_line): 109 | # Suppose an env contains Django, and pip itself 110 | installed = [ 111 | fake_dist('django==1.7'), 112 | fake_dist('first==2.0.1'), 113 | fake_dist('pip==7.1.0'), 114 | ] 115 | 116 | # Then this Django-only requirement should keep pip around (i.e. NOT 117 | # uninstall it), but uninstall first 118 | reqs = [ 119 | from_line('django==1.7'), 120 | ] 121 | 122 | to_install, to_uninstall = diff(reqs, installed) 123 | assert to_install == set() 124 | assert to_uninstall == {'first'} 125 | 126 | 127 | def test_diff_leave_piptools_alone(fake_dist, from_line): 128 | # Suppose an env contains Django, and pip-tools itself (including all of 129 | # its dependencies) 130 | installed = [ 131 | fake_dist('django==1.7'), 132 | fake_dist('first==2.0.1'), 133 | fake_dist('pip-tools==1.1.1', [ 134 | 'click>=4', 135 | 'first', 136 | 'six', 137 | ]), 138 | fake_dist('six==1.9.0'), 139 | fake_dist('click==4.1'), 140 | fake_dist('foobar==0.3.6'), 141 | ] 142 | 143 | # Then this Django-only requirement should keep pip around (i.e. NOT 144 | # uninstall it), but uninstall first 145 | reqs = [ 146 | from_line('django==1.7'), 147 | ] 148 | 149 | to_install, to_uninstall = diff(reqs, installed) 150 | assert to_install == set() 151 | assert to_uninstall == {'foobar'} 152 | -------------------------------------------------------------------------------- /piptools/_compat/contextlib.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | from __future__ import (absolute_import, division, print_function, 3 | unicode_literals) 4 | 5 | import sys 6 | from collections import deque 7 | 8 | 9 | # Inspired by discussions on http://bugs.python.org/issue13585 10 | class ExitStack(object): 11 | """Context manager for dynamic management of a stack of exit callbacks 12 | 13 | For example: 14 | 15 | with ExitStack() as stack: 16 | files = [stack.enter_context(open(fname)) for fname in filenames] 17 | # All opened files will automatically be closed at the end of 18 | # the with statement, even if attempts to open files later 19 | # in the list throw an exception 20 | 21 | """ 22 | def __init__(self): 23 | self._exit_callbacks = deque() 24 | 25 | def pop_all(self): 26 | """Preserve the context stack by transferring it to a new instance""" 27 | new_stack = type(self)() 28 | new_stack._exit_callbacks = self._exit_callbacks 29 | self._exit_callbacks = deque() 30 | return new_stack 31 | 32 | def _push_cm_exit(self, cm, cm_exit): 33 | """Helper to correctly register callbacks to __exit__ methods""" 34 | def _exit_wrapper(*exc_details): 35 | return cm_exit(cm, *exc_details) 36 | _exit_wrapper.__self__ = cm 37 | self.push(_exit_wrapper) 38 | 39 | def push(self, exit): 40 | """Registers a callback with the standard __exit__ method signature 41 | 42 | Can suppress exceptions the same way __exit__ methods can. 43 | 44 | Also accepts any object with an __exit__ method (registering the 45 | method instead of the object itself) 46 | """ 47 | # We use an unbound method rather than a bound method to follow 48 | # the standard lookup behaviour for special methods 49 | _cb_type = type(exit) 50 | try: 51 | exit_method = _cb_type.__exit__ 52 | except AttributeError: 53 | # Not a context manager, so assume its a callable 54 | self._exit_callbacks.append(exit) 55 | else: 56 | self._push_cm_exit(exit, exit_method) 57 | return exit # Allow use as a decorator 58 | 59 | def callback(self, callback, *args, **kwds): 60 | """Registers an arbitrary callback and arguments. 61 | 62 | Cannot suppress exceptions. 63 | """ 64 | def _exit_wrapper(exc_type, exc, tb): 65 | callback(*args, **kwds) 66 | # We changed the signature, so using @wraps is not appropriate, but 67 | # setting __wrapped__ may still help with introspection 68 | _exit_wrapper.__wrapped__ = callback 69 | self.push(_exit_wrapper) 70 | return callback # Allow use as a decorator 71 | 72 | def enter_context(self, cm): 73 | """Enters the supplied context manager 74 | 75 | If successful, also pushes its __exit__ method as a callback and 76 | returns the result of the __enter__ method. 77 | """ 78 | # We look up the special methods on the type to match the with 79 | # statement 80 | _cm_type = type(cm) 81 | _exit = _cm_type.__exit__ 82 | result = _cm_type.__enter__(cm) 83 | self._push_cm_exit(cm, _exit) 84 | return result 85 | 86 | def close(self): 87 | """Immediately unwind the context stack""" 88 | self.__exit__(None, None, None) 89 | 90 | def __enter__(self): 91 | return self 92 | 93 | def __exit__(self, *exc_details): 94 | if not self._exit_callbacks: 95 | return 96 | 97 | # This looks complicated, but it is really just 98 | # setting up a chain of try-expect statements to ensure 99 | # that outer callbacks still get invoked even if an 100 | # inner one throws an exception 101 | def _invoke_next_callback(exc_details): 102 | # Callbacks are removed from the list in FIFO order 103 | # but the recursion means they're invoked in LIFO order 104 | cb = self._exit_callbacks.popleft() 105 | if not self._exit_callbacks: 106 | # Innermost callback is invoked directly 107 | return cb(*exc_details) 108 | # More callbacks left, so descend another level in the stack 109 | try: 110 | suppress_exc = _invoke_next_callback(exc_details) 111 | except: 112 | suppress_exc = cb(*sys.exc_info()) 113 | # Check if this cb suppressed the inner exception 114 | if not suppress_exc: 115 | raise 116 | else: 117 | # Check if inner cb suppressed the original exception 118 | if suppress_exc: 119 | exc_details = (None, None, None) 120 | suppress_exc = cb(*exc_details) or suppress_exc 121 | return suppress_exc 122 | # Kick off the recursive chain 123 | return _invoke_next_callback(exc_details) 124 | -------------------------------------------------------------------------------- /piptools/sync.py: -------------------------------------------------------------------------------- 1 | import collections 2 | import os 3 | import sys 4 | from subprocess import check_call 5 | 6 | from . import click 7 | from .exceptions import IncompatibleRequirements, UnsupportedConstraint 8 | from .utils import flat_map, key_from_req 9 | 10 | PACKAGES_TO_IGNORE = [ 11 | 'pip', 12 | 'pip-tools', 13 | 'pip-review', 14 | 'setuptools', 15 | 'wheel', 16 | ] 17 | 18 | 19 | def dependency_tree(installed_keys, root_key): 20 | """ 21 | Calculate the dependency tree for the package `root_key` and return 22 | a collection of all its dependencies. Uses a DFS traversal algorithm. 23 | 24 | `installed_keys` should be a {key: requirement} mapping, e.g. 25 | {'django': from_line('django==1.8')} 26 | `root_key` should be the key to return the dependency tree for. 27 | """ 28 | dependencies = set() 29 | queue = collections.deque() 30 | 31 | if root_key in installed_keys: 32 | dep = installed_keys[root_key] 33 | queue.append(dep) 34 | 35 | while queue: 36 | v = queue.popleft() 37 | key = key_from_req(v) 38 | if key in dependencies: 39 | continue 40 | 41 | dependencies.add(key) 42 | 43 | for dep_specifier in v.requires(): 44 | dep_name = key_from_req(dep_specifier) 45 | if dep_name in installed_keys: 46 | dep = installed_keys[dep_name] 47 | 48 | if dep_specifier.specifier.contains(dep.version): 49 | queue.append(dep) 50 | 51 | return dependencies 52 | 53 | 54 | def get_dists_to_ignore(installed): 55 | """ 56 | Returns a collection of package names to ignore when performing pip-sync, 57 | based on the currently installed environment. For example, when pip-tools 58 | is installed in the local environment, it should be ignored, including all 59 | of its dependencies (e.g. click). When pip-tools is not installed 60 | locally, click should also be installed/uninstalled depending on the given 61 | requirements. 62 | """ 63 | installed_keys = {key_from_req(r): r for r in installed} 64 | return list(flat_map(lambda req: dependency_tree(installed_keys, req), PACKAGES_TO_IGNORE)) 65 | 66 | 67 | def merge(requirements, ignore_conflicts): 68 | by_key = {} 69 | 70 | for ireq in requirements: 71 | if ireq.link is not None and not ireq.editable: 72 | msg = ('pip-compile does not support URLs as packages, unless they are editable. ' 73 | 'Perhaps add -e option?') 74 | raise UnsupportedConstraint(msg, ireq) 75 | 76 | key = ireq.link or key_from_req(ireq.req) 77 | 78 | if not ignore_conflicts: 79 | existing_ireq = by_key.get(key) 80 | if existing_ireq: 81 | # NOTE: We check equality here since we can assume that the 82 | # requirements are all pinned 83 | if ireq.specifier != existing_ireq.specifier: 84 | raise IncompatibleRequirements(ireq, existing_ireq) 85 | 86 | # TODO: Always pick the largest specifier in case of a conflict 87 | by_key[key] = ireq 88 | 89 | return by_key.values() 90 | 91 | 92 | def diff(compiled_requirements, installed_dists): 93 | """ 94 | Calculate which packages should be installed or uninstalled, given a set 95 | of compiled requirements and a list of currently installed modules. 96 | """ 97 | requirements_lut = {r.link or key_from_req(r.req): r for r in compiled_requirements} 98 | 99 | satisfied = set() # holds keys 100 | to_install = set() # holds keys-and-versions 101 | to_uninstall = set() # holds keys 102 | 103 | pkgs_to_ignore = get_dists_to_ignore(installed_dists) 104 | for dist in installed_dists: 105 | key = key_from_req(dist) 106 | if key not in requirements_lut: 107 | to_uninstall.add(key) 108 | elif requirements_lut[key].specifier.contains(dist.version): 109 | satisfied.add(key) 110 | 111 | for key, requirement in requirements_lut.items(): 112 | if key not in satisfied: 113 | to_install.add(str(requirement.link or requirement.req)) 114 | 115 | # Make sure to not uninstall any packages that should be ignored 116 | to_uninstall -= set(pkgs_to_ignore) 117 | 118 | return (to_install, to_uninstall) 119 | 120 | 121 | def sync(to_install, to_uninstall, verbose=False, dry_run=False, pip_flags=None, install_flags=None): 122 | """ 123 | Install and uninstalls the given sets of modules. 124 | """ 125 | if not to_uninstall and not to_install: 126 | click.echo("Everything up-to-date") 127 | 128 | if pip_flags is None: 129 | pip_flags = [] 130 | 131 | if not verbose: 132 | pip_flags += ['-q'] 133 | 134 | if os.environ.get('VIRTUAL_ENV'): 135 | # find pip via PATH 136 | pip = 'pip' 137 | else: 138 | # find pip in same directory as pip-sync entry-point script 139 | pip = os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])), 'pip') 140 | 141 | if to_uninstall: 142 | if dry_run: 143 | click.echo("Would uninstall:") 144 | for pkg in to_uninstall: 145 | click.echo(" {}".format(pkg)) 146 | else: 147 | check_call([pip, 'uninstall', '-y'] + pip_flags + sorted(to_uninstall)) 148 | 149 | if to_install: 150 | if install_flags is None: 151 | install_flags = [] 152 | if dry_run: 153 | click.echo("Would install:") 154 | for pkg in to_install: 155 | click.echo(" {}".format(pkg)) 156 | else: 157 | check_call([pip, 'install'] + pip_flags + install_flags + sorted(to_install)) 158 | return 0 159 | -------------------------------------------------------------------------------- /piptools/writer.py: -------------------------------------------------------------------------------- 1 | import os 2 | from itertools import chain 3 | 4 | from ._compat import ExitStack 5 | from .click import unstyle 6 | from .io import AtomicSaver 7 | from .logging import log 8 | from .utils import comment, format_requirement 9 | 10 | 11 | class OutputWriter(object): 12 | def __init__(self, src_files, dst_file, dry_run, emit_header, emit_index, 13 | annotate, generate_hashes, default_index_url, index_urls, 14 | trusted_hosts, format_control, allow_unsafe=False): 15 | self.src_files = src_files 16 | self.dst_file = dst_file 17 | self.dry_run = dry_run 18 | self.emit_header = emit_header 19 | self.emit_index = emit_index 20 | self.annotate = annotate 21 | self.generate_hashes = generate_hashes 22 | self.default_index_url = default_index_url 23 | self.index_urls = index_urls 24 | self.trusted_hosts = trusted_hosts 25 | self.format_control = format_control 26 | self.allow_unsafe = allow_unsafe 27 | 28 | def _sort_key(self, ireq): 29 | return (not ireq.editable, str(ireq.req).lower()) 30 | 31 | def write_header(self): 32 | if self.emit_header: 33 | yield comment('#') 34 | yield comment('# This file is autogenerated by pip-compile') 35 | yield comment('# To update, run:') 36 | yield comment('#') 37 | params = [] 38 | if not self.emit_index: 39 | params += ['--no-index'] 40 | if not self.annotate: 41 | params += ['--no-annotate'] 42 | if self.generate_hashes: 43 | params += ["--generate-hashes"] 44 | params += ['--output-file', self.dst_file] 45 | params += self.src_files 46 | yield comment('# pip-compile {}'.format(' '.join(params))) 47 | yield comment('#') 48 | 49 | def write_index_options(self): 50 | if self.emit_index: 51 | for index, index_url in enumerate(self.index_urls): 52 | if index_url.rstrip('/') == self.default_index_url: 53 | continue 54 | flag = '--index-url' if index == 0 else '--extra-index-url' 55 | yield '{} {}'.format(flag, index_url) 56 | 57 | def write_trusted_hosts(self): 58 | for trusted_host in self.trusted_hosts: 59 | yield '--trusted-host {}'.format(trusted_host) 60 | 61 | def write_format_controls(self): 62 | for nb in self.format_control.no_binary: 63 | yield '--no-binary {}'.format(nb) 64 | for ob in self.format_control.only_binary: 65 | yield '--only-binary {}'.format(ob) 66 | 67 | def write_flags(self): 68 | emitted = False 69 | for line in chain(self.write_index_options(), 70 | self.write_trusted_hosts(), 71 | self.write_format_controls()): 72 | emitted = True 73 | yield line 74 | if emitted: 75 | yield '' 76 | 77 | def _iter_lines(self, results, reverse_dependencies, primary_packages, hashes): 78 | for line in self.write_header(): 79 | yield line 80 | for line in self.write_flags(): 81 | yield line 82 | 83 | UNSAFE_PACKAGES = {'setuptools', 'distribute', 'pip'} 84 | unsafe_packages = {r for r in results if r.name in UNSAFE_PACKAGES} 85 | packages = {r for r in results if r.name not in UNSAFE_PACKAGES} 86 | 87 | packages = sorted(packages, key=self._sort_key) 88 | unsafe_packages = sorted(unsafe_packages, key=self._sort_key) 89 | 90 | for ireq in packages: 91 | line = self._format_requirement(ireq, reverse_dependencies, primary_packages, hashes=hashes) 92 | yield line 93 | 94 | if unsafe_packages: 95 | yield '' 96 | yield comment('# The following packages are considered to be unsafe in a requirements file:') 97 | 98 | for ireq in unsafe_packages: 99 | line = self._format_requirement( 100 | ireq, reverse_dependencies, primary_packages, 101 | hashes=hashes if self.allow_unsafe else None, 102 | include_specifier=self.allow_unsafe) 103 | if self.allow_unsafe: 104 | yield line 105 | else: 106 | yield comment('# ' + line) 107 | 108 | def write(self, results, reverse_dependencies, primary_packages, hashes): 109 | with ExitStack() as stack: 110 | f = None 111 | if not self.dry_run: 112 | f = stack.enter_context(AtomicSaver(self.dst_file)) 113 | 114 | for line in self._iter_lines(results, reverse_dependencies, primary_packages, hashes): 115 | log.info(line) 116 | if f: 117 | f.write(unstyle(line).encode('utf-8')) 118 | f.write(os.linesep.encode('utf-8')) 119 | 120 | def _format_requirement(self, ireq, reverse_dependencies, primary_packages, include_specifier=True, hashes=None): 121 | line = format_requirement(ireq, include_specifier=include_specifier) 122 | 123 | ireq_hashes = (hashes if hashes is not None else {}).get(ireq) 124 | if ireq_hashes: 125 | for hash_ in sorted(ireq_hashes): 126 | line += " \\\n --hash={}".format(hash_) 127 | 128 | if not self.annotate or ireq.name in primary_packages: 129 | return line 130 | 131 | # Annotate what packages this package is required by 132 | required_by = reverse_dependencies.get(ireq.name.lower(), []) 133 | if required_by: 134 | annotation = ", ".join(sorted(required_by)) 135 | line = "{:24}{}{}".format( 136 | line, 137 | " \\\n " if ireq_hashes else " ", 138 | comment("# via " + annotation)) 139 | return line 140 | -------------------------------------------------------------------------------- /piptools/cache.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | from __future__ import (absolute_import, division, print_function, 3 | unicode_literals) 4 | 5 | import json 6 | import os 7 | import sys 8 | 9 | from pkg_resources import Requirement 10 | 11 | from .exceptions import PipToolsError 12 | from .locations import CACHE_DIR 13 | from .utils import as_tuple, key_from_req, lookup_table 14 | 15 | 16 | class CorruptCacheError(PipToolsError): 17 | def __init__(self, path): 18 | self.path = path 19 | 20 | def __str__(self): 21 | lines = [ 22 | 'The dependency cache seems to have been corrupted.', 23 | 'Inspect, or delete, the following file:', 24 | ' {}'.format(self.path), 25 | ] 26 | return os.linesep.join(lines) 27 | 28 | 29 | def read_cache_file(cache_file_path): 30 | with open(cache_file_path, 'r') as cache_file: 31 | try: 32 | doc = json.load(cache_file) 33 | except ValueError: 34 | raise CorruptCacheError(cache_file_path) 35 | 36 | # Check version and load the contents 37 | assert doc['__format__'] == 1, 'Unknown cache file format' 38 | return doc['dependencies'] 39 | 40 | 41 | class DependencyCache(object): 42 | """ 43 | Creates a new persistent dependency cache for the current Python version. 44 | The cache file is written to the appropriate user cache dir for the 45 | current platform, i.e. 46 | 47 | ~/.cache/pip-tools/depcache-pyX.Y.json 48 | 49 | Where X.Y indicates the Python version. 50 | """ 51 | def __init__(self, cache_dir=None): 52 | if cache_dir is None: 53 | cache_dir = CACHE_DIR 54 | if not os.path.isdir(cache_dir): 55 | os.makedirs(cache_dir) 56 | py_version = '.'.join(str(digit) for digit in sys.version_info[:2]) 57 | cache_filename = 'depcache-py{}.json'.format(py_version) 58 | 59 | self._cache_file = os.path.join(cache_dir, cache_filename) 60 | self._cache = None 61 | 62 | @property 63 | def cache(self): 64 | """ 65 | The dictionary that is the actual in-memory cache. This property 66 | lazily loads the cache from disk. 67 | """ 68 | if self._cache is None: 69 | self.read_cache() 70 | return self._cache 71 | 72 | def as_cache_key(self, ireq): 73 | """ 74 | Given a requirement, return its cache key. This behavior is a little weird in order to allow backwards 75 | compatibility with cache files. For a requirement without extras, this will return, for example: 76 | 77 | ("ipython", "2.1.0") 78 | 79 | For a requirement with extras, the extras will be comma-separated and appended to the version, inside brackets, 80 | like so: 81 | 82 | ("ipython", "2.1.0[nbconvert,notebook]") 83 | """ 84 | name, version, extras = as_tuple(ireq) 85 | if not extras: 86 | extras_string = "" 87 | else: 88 | extras_string = "[{}]".format(",".join(extras)) 89 | return name, "{}{}".format(version, extras_string) 90 | 91 | def read_cache(self): 92 | """Reads the cached contents into memory.""" 93 | if os.path.exists(self._cache_file): 94 | self._cache = read_cache_file(self._cache_file) 95 | else: 96 | self._cache = {} 97 | 98 | def write_cache(self): 99 | """Writes the cache to disk as JSON.""" 100 | doc = { 101 | '__format__': 1, 102 | 'dependencies': self._cache, 103 | } 104 | with open(self._cache_file, 'w') as f: 105 | json.dump(doc, f, sort_keys=True) 106 | 107 | def clear(self): 108 | self._cache = {} 109 | self.write_cache() 110 | 111 | def __contains__(self, ireq): 112 | pkgname, pkgversion_and_extras = self.as_cache_key(ireq) 113 | return pkgversion_and_extras in self.cache.get(pkgname, {}) 114 | 115 | def __getitem__(self, ireq): 116 | pkgname, pkgversion_and_extras = self.as_cache_key(ireq) 117 | return self.cache[pkgname][pkgversion_and_extras] 118 | 119 | def __setitem__(self, ireq, values): 120 | pkgname, pkgversion_and_extras = self.as_cache_key(ireq) 121 | self.cache.setdefault(pkgname, {}) 122 | self.cache[pkgname][pkgversion_and_extras] = values 123 | self.write_cache() 124 | 125 | def get(self, ireq, default=None): 126 | pkgname, pkgversion_and_extras = self.as_cache_key(ireq) 127 | return self.cache.get(pkgname, {}).get(pkgversion_and_extras, default) 128 | 129 | def reverse_dependencies(self, ireqs): 130 | """ 131 | Returns a lookup table of reverse dependencies for all the given ireqs. 132 | 133 | Since this is all static, it only works if the dependency cache 134 | contains the complete data, otherwise you end up with a partial view. 135 | This is typically no problem if you use this function after the entire 136 | dependency tree is resolved. 137 | """ 138 | ireqs_as_cache_values = [self.as_cache_key(ireq) for ireq in ireqs] 139 | return self._reverse_dependencies(ireqs_as_cache_values) 140 | 141 | def _reverse_dependencies(self, cache_keys): 142 | """ 143 | Returns a lookup table of reverse dependencies for all the given cache keys. 144 | 145 | Example input: 146 | 147 | [('pep8', '1.5.7'), 148 | ('flake8', '2.4.0'), 149 | ('mccabe', '0.3'), 150 | ('pyflakes', '0.8.1')] 151 | 152 | Example output: 153 | 154 | {'pep8': ['flake8'], 155 | 'flake8': [], 156 | 'mccabe': ['flake8'], 157 | 'pyflakes': ['flake8']} 158 | 159 | """ 160 | # First, collect all the dependencies into a sequence of (parent, child) tuples, like [('flake8', 'pep8'), 161 | # ('flake8', 'mccabe'), ...] 162 | return lookup_table((key_from_req(Requirement.parse(dep_name)), name) 163 | for name, version_and_extras in cache_keys 164 | for dep_name in self.cache[name][version_and_extras]) 165 | -------------------------------------------------------------------------------- /piptools/utils.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | from __future__ import (absolute_import, division, print_function, 3 | unicode_literals) 4 | 5 | import sys 6 | from itertools import chain, groupby 7 | 8 | import pip 9 | from pip.req import InstallRequirement 10 | 11 | from first import first 12 | 13 | from .click import style 14 | 15 | 16 | def safeint(s): 17 | try: 18 | return int(s) 19 | except ValueError: 20 | return 0 21 | 22 | 23 | pip_version_info = tuple(safeint(digit) for digit in pip.__version__.split('.')) 24 | 25 | 26 | def assert_compatible_pip_version(): 27 | # Make sure we're using a reasonably modern version of pip 28 | if not pip_version_info >= (8, 0): 29 | print('pip-compile requires at least version 8.0 of pip ({} found), ' 30 | 'perhaps run `pip install --upgrade pip`?'.format(pip.__version__)) 31 | sys.exit(4) 32 | 33 | 34 | def key_from_req(req): 35 | """Get an all-lowercase version of the requirement's name.""" 36 | if hasattr(req, 'key'): 37 | # pip 8.1.1 or below, using pkg_resources 38 | return req.key 39 | else: 40 | # pip 8.1.2 or above, using packaging 41 | return req.name.lower() 42 | 43 | 44 | def name_from_req(req): 45 | """Get the name of the requirement""" 46 | if hasattr(req, 'project_name'): 47 | # pip 8.1.1 or below, using pkg_resources 48 | return req.project_name 49 | else: 50 | # pip 8.1.2 or above, using packaging 51 | return req.name 52 | 53 | 54 | def comment(text): 55 | return style(text, fg='green') 56 | 57 | 58 | def make_install_requirement(name, version, extras): 59 | # If no extras are specified, the extras string is blank 60 | extras_string = "" 61 | if extras: 62 | # Sort extras for stability 63 | extras_string = "[{}]".format(",".join(sorted(extras))) 64 | 65 | return InstallRequirement.from_line('{}{}=={}'.format(name, extras_string, str(version))) 66 | 67 | 68 | def format_requirement(ireq, include_specifier=True): 69 | """ 70 | Generic formatter for pretty printing InstallRequirements to the terminal 71 | in a less verbose way than using its `__str__` method. 72 | """ 73 | if ireq.editable: 74 | line = '-e {}'.format(ireq.link) 75 | elif include_specifier: 76 | line = str(ireq.req) 77 | else: 78 | line = name_from_req(ireq.req) 79 | return line 80 | 81 | 82 | def format_specifier(ireq): 83 | """ 84 | Generic formatter for pretty printing the specifier part of 85 | InstallRequirements to the terminal. 86 | """ 87 | # TODO: Ideally, this is carried over to the pip library itself 88 | specs = ireq.specifier._specs if ireq.req is not None else [] 89 | specs = sorted(specs, key=lambda x: x._spec[1]) 90 | return ','.join(str(s) for s in specs) or '' 91 | 92 | 93 | def is_pinned_requirement(ireq): 94 | """ 95 | Returns whether an InstallRequirement is a "pinned" requirement. 96 | 97 | An InstallRequirement is considered pinned if: 98 | 99 | - Is not editable 100 | - It has exactly one specifier 101 | - That specifier is "==" 102 | - The version does not contain a wildcard 103 | 104 | Examples: 105 | django==1.8 # pinned 106 | django>1.8 # NOT pinned 107 | django~=1.8 # NOT pinned 108 | django==1.* # NOT pinned 109 | """ 110 | if ireq.editable: 111 | return False 112 | 113 | if len(ireq.specifier._specs) != 1: 114 | return False 115 | 116 | op, version = first(ireq.specifier._specs)._spec 117 | return (op == '==' or op == '===') and not version.endswith('.*') 118 | 119 | 120 | def as_tuple(ireq): 121 | """ 122 | Pulls out the (name: str, version:str, extras:(str)) tuple from the pinned InstallRequirement. 123 | """ 124 | if not is_pinned_requirement(ireq): 125 | raise TypeError('Expected a pinned InstallRequirement, got {}'.format(ireq)) 126 | 127 | name = key_from_req(ireq.req) 128 | version = first(ireq.specifier._specs)._spec[1] 129 | extras = tuple(sorted(ireq.extras)) 130 | return name, version, extras 131 | 132 | 133 | def full_groupby(iterable, key=None): 134 | """Like groupby(), but sorts the input on the group key first.""" 135 | return groupby(sorted(iterable, key=key), key=key) 136 | 137 | 138 | def flat_map(fn, collection): 139 | """Map a function over a collection and flatten the result by one-level""" 140 | return chain.from_iterable(map(fn, collection)) 141 | 142 | 143 | def lookup_table(values, key=None, keyval=None, unique=False, use_lists=False): 144 | """ 145 | Builds a dict-based lookup table (index) elegantly. 146 | 147 | Supports building normal and unique lookup tables. For example: 148 | 149 | >>> lookup_table(['foo', 'bar', 'baz', 'qux', 'quux'], 150 | ... lambda s: s[0]) 151 | { 152 | 'b': {'bar', 'baz'}, 153 | 'f': {'foo'}, 154 | 'q': {'quux', 'qux'} 155 | } 156 | 157 | For key functions that uniquely identify values, set unique=True: 158 | 159 | >>> lookup_table(['foo', 'bar', 'baz', 'qux', 'quux'], 160 | ... lambda s: s[0], 161 | ... unique=True) 162 | { 163 | 'b': 'baz', 164 | 'f': 'foo', 165 | 'q': 'quux' 166 | } 167 | 168 | The values of the resulting lookup table will be values, not sets. 169 | 170 | For extra power, you can even change the values while building up the LUT. 171 | To do so, use the `keyval` function instead of the `key` arg: 172 | 173 | >>> lookup_table(['foo', 'bar', 'baz', 'qux', 'quux'], 174 | ... keyval=lambda s: (s[0], s[1:])) 175 | { 176 | 'b': {'ar', 'az'}, 177 | 'f': {'oo'}, 178 | 'q': {'uux', 'ux'} 179 | } 180 | 181 | """ 182 | if keyval is None: 183 | if key is None: 184 | keyval = (lambda v: v) 185 | else: 186 | keyval = (lambda v: (key(v), v)) 187 | 188 | if unique: 189 | return dict(keyval(v) for v in values) 190 | 191 | lut = {} 192 | for value in values: 193 | k, v = keyval(value) 194 | try: 195 | s = lut[k] 196 | except KeyError: 197 | if use_lists: 198 | s = lut[k] = list() 199 | else: 200 | s = lut[k] = set() 201 | if use_lists: 202 | s.append(v) 203 | else: 204 | s.add(v) 205 | return dict(lut) 206 | -------------------------------------------------------------------------------- /piptools/repositories/pypi.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | from __future__ import (absolute_import, division, print_function, 3 | unicode_literals) 4 | 5 | import hashlib 6 | import os 7 | from shutil import rmtree 8 | 9 | from pip.download import unpack_url 10 | from pip.index import PackageFinder 11 | from pip.req.req_set import RequirementSet 12 | from pip.utils.hashes import FAVORITE_HASH 13 | 14 | from ..cache import CACHE_DIR 15 | from ..exceptions import NoCandidateFound 16 | from ..utils import (is_pinned_requirement, lookup_table, 17 | make_install_requirement, pip_version_info) 18 | from .base import BaseRepository 19 | 20 | try: 21 | from tempfile import TemporaryDirectory # added in 3.2 22 | except ImportError: 23 | from .._compat import TemporaryDirectory 24 | 25 | 26 | class PyPIRepository(BaseRepository): 27 | DEFAULT_INDEX_URL = 'https://pypi.python.org/simple' 28 | 29 | """ 30 | The PyPIRepository will use the provided Finder instance to lookup 31 | packages. Typically, it looks up packages on PyPI (the default implicit 32 | config), but any other PyPI mirror can be used if index_urls is 33 | changed/configured on the Finder. 34 | """ 35 | def __init__(self, pip_options, session): 36 | self.session = session 37 | 38 | index_urls = [pip_options.index_url] + pip_options.extra_index_urls 39 | if pip_options.no_index: 40 | index_urls = [] 41 | 42 | self.finder = PackageFinder( 43 | find_links=pip_options.find_links, 44 | index_urls=index_urls, 45 | trusted_hosts=pip_options.trusted_hosts, 46 | allow_all_prereleases=pip_options.pre, 47 | process_dependency_links=pip_options.process_dependency_links, 48 | session=self.session, 49 | ) 50 | 51 | # Caches 52 | # stores project_name => InstallationCandidate mappings for all 53 | # versions reported by PyPI, so we only have to ask once for each 54 | # project 55 | self._available_candidates_cache = {} 56 | 57 | # Setup file paths 58 | self.freshen_build_caches() 59 | self._download_dir = os.path.join(CACHE_DIR, 'pkgs') 60 | self._wheel_download_dir = os.path.join(CACHE_DIR, 'wheels') 61 | 62 | def freshen_build_caches(self): 63 | """ 64 | Start with fresh build/source caches. Will remove any old build 65 | caches from disk automatically. 66 | """ 67 | self._build_dir = TemporaryDirectory('build') 68 | self._source_dir = TemporaryDirectory('source') 69 | 70 | @property 71 | def build_dir(self): 72 | return self._build_dir.name 73 | 74 | @property 75 | def source_dir(self): 76 | return self._source_dir.name 77 | 78 | def clear_caches(self): 79 | rmtree(self._download_dir, ignore_errors=True) 80 | rmtree(self._wheel_download_dir, ignore_errors=True) 81 | 82 | def find_all_candidates(self, req_name): 83 | if req_name not in self._available_candidates_cache: 84 | # pip 8 changed the internal API, making this a public method 85 | if pip_version_info >= (8, 0): 86 | candidates = self.finder.find_all_candidates(req_name) 87 | else: 88 | candidates = self.finder._find_all_versions(req_name) 89 | self._available_candidates_cache[req_name] = candidates 90 | return self._available_candidates_cache[req_name] 91 | 92 | def find_best_match(self, ireq, prereleases=None): 93 | """ 94 | Returns a Version object that indicates the best match for the given 95 | InstallRequirement according to the external repository. 96 | """ 97 | if ireq.editable: 98 | return ireq # return itself as the best match 99 | 100 | all_candidates = self.find_all_candidates(ireq.name) 101 | candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version, unique=True) 102 | matching_versions = ireq.specifier.filter((candidate.version for candidate in all_candidates), 103 | prereleases=prereleases) 104 | 105 | # Reuses pip's internal candidate sort key to sort 106 | matching_candidates = [candidates_by_version[ver] for ver in matching_versions] 107 | if not matching_candidates: 108 | raise NoCandidateFound(ireq, all_candidates) 109 | best_candidate = max(matching_candidates, key=self.finder._candidate_sort_key) 110 | 111 | # Turn the candidate into a pinned InstallRequirement 112 | return make_install_requirement( 113 | best_candidate.project, best_candidate.version, ireq.extras 114 | ) 115 | 116 | def get_dependencies(self, ireq): 117 | """ 118 | Given a pinned or an editable InstallRequirement, returns a set of 119 | dependencies (also InstallRequirements, but not necessarily pinned). 120 | They indicate the secondary dependencies for the given requirement. 121 | """ 122 | if not (ireq.editable or is_pinned_requirement(ireq)): 123 | raise TypeError('Expected pinned or editable InstallRequirement, got {}'.format(ireq)) 124 | 125 | if not os.path.isdir(self._download_dir): 126 | os.makedirs(self._download_dir) 127 | if not os.path.isdir(self._wheel_download_dir): 128 | os.makedirs(self._wheel_download_dir) 129 | 130 | reqset = RequirementSet(self.build_dir, 131 | self.source_dir, 132 | download_dir=self._download_dir, 133 | wheel_download_dir=self._wheel_download_dir, 134 | session=self.session) 135 | dependencies = reqset._prepare_file(self.finder, ireq) 136 | return set(dependencies) 137 | 138 | def get_hashes(self, ireq): 139 | """ 140 | Given a pinned InstallRequire, returns a set of hashes that represent 141 | all of the files for a given requirement. It is not acceptable for an 142 | editable or unpinned requirement to be passed to this function. 143 | """ 144 | if ireq.editable or not is_pinned_requirement(ireq): 145 | raise TypeError( 146 | "Expected pinned requirement, not unpinned or editable, got {}".format(ireq)) 147 | 148 | # We need to get all of the candidates that match our current version 149 | # pin, these will represent all of the files that could possibly 150 | # satisify this constraint. 151 | all_candidates = self.find_all_candidates(ireq.name) 152 | candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version) 153 | matching_versions = list( 154 | ireq.specifier.filter((candidate.version for candidate in all_candidates))) 155 | matching_candidates = candidates_by_version[matching_versions[0]] 156 | 157 | return { 158 | self._get_file_hash(candidate.location) 159 | for candidate in matching_candidates 160 | } 161 | 162 | def _get_file_hash(self, location): 163 | with TemporaryDirectory() as tmpdir: 164 | unpack_url( 165 | location, self.build_dir, 166 | download_dir=tmpdir, only_download=True, session=self.session 167 | ) 168 | files = os.listdir(tmpdir) 169 | assert len(files) == 1 170 | filename = os.path.abspath(os.path.join(tmpdir, files[0])) 171 | 172 | h = hashlib.new(FAVORITE_HASH) 173 | with open(filename, "rb") as fp: 174 | for chunk in iter(lambda: fp.read(8096), b""): 175 | h.update(chunk) 176 | 177 | return ":".join([FAVORITE_HASH, h.hexdigest()]) 178 | -------------------------------------------------------------------------------- /piptools/scripts/compile.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | from __future__ import (absolute_import, division, print_function, 3 | unicode_literals) 4 | 5 | import optparse 6 | import os 7 | import sys 8 | import tempfile 9 | 10 | import pip 11 | from pip.req import InstallRequirement, parse_requirements 12 | 13 | from .. import click 14 | from ..exceptions import PipToolsError 15 | from ..logging import log 16 | from ..repositories import LocalRequirementsRepository, PyPIRepository 17 | from ..resolver import Resolver 18 | from ..utils import (assert_compatible_pip_version, is_pinned_requirement, 19 | key_from_req) 20 | from ..writer import OutputWriter 21 | 22 | # Make sure we're using a compatible version of pip 23 | assert_compatible_pip_version() 24 | 25 | DEFAULT_REQUIREMENTS_FILE = 'requirements.in' 26 | 27 | 28 | class PipCommand(pip.basecommand.Command): 29 | name = 'PipCommand' 30 | 31 | 32 | @click.command() 33 | @click.version_option() 34 | @click.option('-v', '--verbose', is_flag=True, help="Show more output") 35 | @click.option('-n', '--dry-run', is_flag=True, help="Only show what would happen, don't change anything") 36 | @click.option('-p', '--pre', is_flag=True, default=None, help="Allow resolving to prereleases (default is not)") 37 | @click.option('-r', '--rebuild', is_flag=True, help="Clear any caches upfront, rebuild from scratch") 38 | @click.option('-f', '--find-links', multiple=True, help="Look for archives in this directory or on this HTML page", envvar='PIP_FIND_LINKS') # noqa 39 | @click.option('-i', '--index-url', help="Change index URL (defaults to PyPI)", envvar='PIP_INDEX_URL') 40 | @click.option('--extra-index-url', multiple=True, help="Add additional index URL to search", envvar='PIP_EXTRA_INDEX_URL') # noqa 41 | @click.option('--client-cert', help="Path to SSL client certificate, a single file containing the private key and the certificate in PEM format.") # noqa 42 | @click.option('--trusted-host', multiple=True, envvar='PIP_TRUSTED_HOST', 43 | help="Mark this host as trusted, even though it does not have " 44 | "valid or any HTTPS.") 45 | @click.option('--header/--no-header', is_flag=True, default=True, 46 | help="Add header to generated file") 47 | @click.option('--index/--no-index', is_flag=True, default=True, 48 | help="Add index URL to generated file") 49 | @click.option('--annotate/--no-annotate', is_flag=True, default=True, 50 | help="Annotate results, indicating where dependencies come from") 51 | @click.option('-U', '--upgrade', is_flag=True, default=False, 52 | help='Try to upgrade all dependencies to their latest versions') 53 | @click.option('-P', '--upgrade-package', 'upgrade_packages', nargs=1, multiple=True, 54 | help="Specify particular packages to upgrade.") 55 | @click.option('-o', '--output-file', nargs=1, type=str, default=None, 56 | help=('Output file name. Required if more than one input file is given. ' 57 | 'Will be derived from input file otherwise.')) 58 | @click.option('--allow-unsafe', is_flag=True, default=False, 59 | help="Pin packages considered unsafe: pip, setuptools & distribute") 60 | @click.option('--generate-hashes', is_flag=True, default=False, 61 | help="Generate pip 8 style hashes in the resulting requirements file.") 62 | @click.argument('src_files', nargs=-1, type=click.Path(exists=True, allow_dash=True)) 63 | def cli(verbose, dry_run, pre, rebuild, find_links, index_url, extra_index_url, 64 | client_cert, trusted_host, header, index, annotate, upgrade, upgrade_packages, 65 | output_file, allow_unsafe, generate_hashes, src_files): 66 | """Compiles requirements.txt from requirements.in specs.""" 67 | log.verbose = verbose 68 | 69 | if len(src_files) == 0: 70 | if not os.path.exists(DEFAULT_REQUIREMENTS_FILE): 71 | raise click.BadParameter(("If you do not specify an input file, " 72 | "the default is {}").format(DEFAULT_REQUIREMENTS_FILE)) 73 | src_files = (DEFAULT_REQUIREMENTS_FILE,) 74 | 75 | if len(src_files) == 1 and src_files[0] == '-': 76 | if not output_file: 77 | raise click.BadParameter('--output-file is required if input is from stdin') 78 | 79 | if len(src_files) > 1 and not output_file: 80 | raise click.BadParameter('--output-file is required if two or more input files are given.') 81 | 82 | if output_file: 83 | dst_file = output_file 84 | else: 85 | base_name, _, _ = src_files[0].rpartition('.') 86 | dst_file = base_name + '.txt' 87 | 88 | if upgrade and upgrade_packages: 89 | raise click.BadParameter('Only one of --upgrade or --upgrade-package can be provided as an argument.') 90 | 91 | ### 92 | # Setup 93 | ### 94 | 95 | # Use pip's parser for pip.conf management and defaults. 96 | # General options (find_links, index_url, extra_index_url, trusted_host, 97 | # and pre) are defered to pip. 98 | pip_command = PipCommand() 99 | index_opts = pip.cmdoptions.make_option_group( 100 | pip.cmdoptions.index_group, 101 | pip_command.parser, 102 | ) 103 | pip_command.parser.insert_option_group(0, index_opts) 104 | pip_command.parser.add_option(optparse.Option('--pre', action='store_true', default=False)) 105 | 106 | pip_args = [] 107 | if find_links: 108 | for link in find_links: 109 | pip_args.extend(['-f', link]) 110 | if index_url: 111 | pip_args.extend(['-i', index_url]) 112 | if extra_index_url: 113 | for extra_index in extra_index_url: 114 | pip_args.extend(['--extra-index-url', extra_index]) 115 | if client_cert: 116 | pip_args.extend(['--client-cert', client_cert]) 117 | if pre: 118 | pip_args.extend(['--pre']) 119 | if trusted_host: 120 | for host in trusted_host: 121 | pip_args.extend(['--trusted-host', host]) 122 | 123 | pip_options, _ = pip_command.parse_args(pip_args) 124 | 125 | session = pip_command._build_session(pip_options) 126 | repository = PyPIRepository(pip_options, session) 127 | 128 | # Pre-parse the inline package upgrade specs: they should take precedence 129 | # over the stuff in the requirements files 130 | upgrade_packages = [InstallRequirement.from_line(pkg) 131 | for pkg in upgrade_packages] 132 | upgrade_pkgs_by_key = {key_from_req(ireq.req): ireq 133 | for ireq in upgrade_packages} 134 | 135 | # Proxy with a LocalRequirementsRepository if --upgrade is not specified 136 | # (= default invocation) 137 | if not (upgrade or upgrade_packages) and os.path.exists(dst_file): 138 | existing_pins = {} 139 | ireqs = parse_requirements(dst_file, finder=repository.finder, session=repository.session, options=pip_options) 140 | for ireq in ireqs: 141 | key = key_from_req(ireq.req) 142 | 143 | # Packages explicitly listed on the command line should not remain 144 | # pinned by whatever is in the dst_file (the command line argument 145 | # overwrites the current pins) 146 | if key in upgrade_pkgs_by_key: 147 | ireq = upgrade_pkgs_by_key[key] 148 | 149 | if is_pinned_requirement(ireq): 150 | existing_pins[key] = ireq 151 | repository = LocalRequirementsRepository(existing_pins, repository) 152 | 153 | log.debug('Using indexes:') 154 | for index_url in repository.finder.index_urls: 155 | log.debug(' {}'.format(index_url)) 156 | 157 | if repository.finder.find_links: 158 | log.debug('') 159 | log.debug('Configuration:') 160 | for find_link in repository.finder.find_links: 161 | log.debug(' -f {}'.format(find_link)) 162 | 163 | ### 164 | # Parsing/collecting initial requirements 165 | ### 166 | 167 | constraints = [] 168 | for src_file in src_files: 169 | if src_file == '-': 170 | # pip requires filenames and not files. Since we want to support 171 | # piping from stdin, we need to briefly save the input from stdin 172 | # to a temporary file and have pip read that. 173 | with tempfile.NamedTemporaryFile(mode='wt') as tmpfile: 174 | tmpfile.write(sys.stdin.read()) 175 | tmpfile.flush() 176 | constraints.extend(parse_requirements( 177 | tmpfile.name, finder=repository.finder, session=repository.session, options=pip_options)) 178 | else: 179 | constraints.extend(parse_requirements( 180 | src_file, finder=repository.finder, session=repository.session, options=pip_options)) 181 | 182 | try: 183 | resolver = Resolver(constraints, repository, prereleases=pre, 184 | clear_caches=rebuild) 185 | results = resolver.resolve() 186 | if generate_hashes: 187 | hashes = resolver.resolve_hashes(results) 188 | else: 189 | hashes = None 190 | except PipToolsError as e: 191 | log.error(str(e)) 192 | sys.exit(2) 193 | 194 | log.debug('') 195 | 196 | ## 197 | # Output 198 | ## 199 | 200 | # Compute reverse dependency annotations statically, from the 201 | # dependency cache that the resolver has populated by now. 202 | # 203 | # TODO (1a): reverse deps for any editable package are lost 204 | # what SHOULD happen is that they are cached in memory, just 205 | # not persisted to disk! 206 | # 207 | # TODO (1b): perhaps it's easiest if the dependency cache has an API 208 | # that could take InstallRequirements directly, like: 209 | # 210 | # cache.set(ireq, ...) 211 | # 212 | # then, when ireq is editable, it would store in 213 | # 214 | # editables[egg_name][link_without_fragment] = deps 215 | # editables['pip-tools']['git+...ols.git@future'] = {'click>=3.0', 'six'} 216 | # 217 | # otherwise: 218 | # 219 | # self[as_name_version_tuple(ireq)] = {'click>=3.0', 'six'} 220 | # 221 | reverse_dependencies = None 222 | if annotate: 223 | reverse_dependencies = resolver.reverse_dependencies(results) 224 | 225 | writer = OutputWriter(src_files, dst_file, dry_run=dry_run, 226 | emit_header=header, emit_index=index, 227 | annotate=annotate, 228 | generate_hashes=generate_hashes, 229 | default_index_url=repository.DEFAULT_INDEX_URL, 230 | index_urls=repository.finder.index_urls, 231 | trusted_hosts=pip_options.trusted_hosts, 232 | format_control=repository.finder.format_control, 233 | allow_unsafe=allow_unsafe) 234 | writer.write(results=results, 235 | reverse_dependencies=reverse_dependencies, 236 | primary_packages={key_from_req(ireq.req) for ireq in constraints}, 237 | hashes=hashes) 238 | 239 | if dry_run: 240 | log.warning('Dry-run, so nothing updated.') 241 | -------------------------------------------------------------------------------- /piptools/resolver.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | from __future__ import (absolute_import, division, print_function, 3 | unicode_literals) 4 | 5 | import os 6 | from functools import partial 7 | from itertools import chain, count 8 | 9 | from first import first 10 | from pip.req import InstallRequirement 11 | 12 | from . import click 13 | from .cache import DependencyCache 14 | from .exceptions import UnsupportedConstraint 15 | from .logging import log 16 | from .utils import (format_requirement, format_specifier, full_groupby, 17 | is_pinned_requirement, key_from_req) 18 | 19 | green = partial(click.style, fg='green') 20 | magenta = partial(click.style, fg='magenta') 21 | 22 | 23 | def _dep_key(ireq): 24 | if ireq.req is None and ireq.link is not None: 25 | return str(ireq.link) 26 | else: 27 | return key_from_req(ireq.req) 28 | 29 | 30 | class RequirementSummary(object): 31 | """ 32 | Summary of a requirement's properties for comparison purposes. 33 | """ 34 | def __init__(self, req): 35 | self.req = req 36 | self.key = key_from_req(req) 37 | self.extras = str(sorted(req.extras)) 38 | if hasattr(req, 'specs'): 39 | # pip < 8.1.2 40 | self.specifier = str(req.specs) 41 | else: 42 | # pip >= 8.1.2 43 | self.specifier = str(req.specifier) 44 | 45 | def __eq__(self, other): 46 | return str(self) == str(other) 47 | 48 | def __hash__(self): 49 | return hash(str(self)) 50 | 51 | def __str__(self): 52 | return repr([self.key, self.specifier, self.extras]) 53 | 54 | 55 | class Resolver(object): 56 | def __init__(self, constraints, repository, cache=None, prereleases=False, clear_caches=False): 57 | """ 58 | This class resolves a given set of constraints (a collection of 59 | InstallRequirement objects) by consulting the given Repository and the 60 | DependencyCache. 61 | """ 62 | self.our_constraints = set(constraints) 63 | self.their_constraints = set() 64 | self.repository = repository 65 | if cache is None: 66 | cache = DependencyCache() # pragma: no cover 67 | self.dependency_cache = cache 68 | self.prereleases = prereleases 69 | self.clear_caches = clear_caches 70 | 71 | @property 72 | def constraints(self): 73 | return set(self._group_constraints(chain(self.our_constraints, 74 | self.their_constraints))) 75 | 76 | def resolve_hashes(self, ireqs): 77 | """ 78 | Finds acceptable hashes for all of the given InstallRequirements. 79 | """ 80 | return {ireq: self.repository.get_hashes(ireq) for ireq in ireqs} 81 | 82 | def resolve(self, max_rounds=10): 83 | """ 84 | Finds concrete package versions for all the given InstallRequirements 85 | and their recursive dependencies. The end result is a flat list of 86 | (name, version) tuples. (Or an editable package.) 87 | 88 | Resolves constraints one round at a time, until they don't change 89 | anymore. Protects against infinite loops by breaking out after a max 90 | number rounds. 91 | """ 92 | if self.clear_caches: 93 | self.dependency_cache.clear() 94 | self.repository.clear_caches() 95 | 96 | self._check_constraints() 97 | 98 | # Ignore existing packages 99 | os.environ[str('PIP_EXISTS_ACTION')] = str('i') # NOTE: str() wrapping necessary for Python 2/3 compat 100 | for current_round in count(start=1): 101 | if current_round > max_rounds: 102 | raise RuntimeError('No stable configuration of concrete packages ' 103 | 'could be found for the given constraints after ' 104 | '%d rounds of resolving.\n' 105 | 'This is likely a bug.' % max_rounds) 106 | 107 | log.debug('') 108 | log.debug(magenta('{:^60}'.format('ROUND {}'.format(current_round)))) 109 | has_changed, best_matches = self._resolve_one_round() 110 | log.debug('-' * 60) 111 | log.debug('Result of round {}: {}'.format(current_round, 112 | 'not stable' if has_changed else 'stable, done')) 113 | if not has_changed: 114 | break 115 | 116 | # If a package version (foo==2.0) was built in a previous round, 117 | # and in this round a different version of foo needs to be built 118 | # (i.e. foo==1.0), the directory will exist already, which will 119 | # cause a pip build failure. The trick is to start with a new 120 | # build cache dir for every round, so this can never happen. 121 | self.repository.freshen_build_caches() 122 | 123 | del os.environ['PIP_EXISTS_ACTION'] 124 | return best_matches 125 | 126 | def _check_constraints(self): 127 | for constraint in chain(self.our_constraints, self.their_constraints): 128 | if constraint.link is not None and not constraint.editable: 129 | msg = ('pip-compile does not support URLs as packages, unless they are editable. ' 130 | 'Perhaps add -e option?') 131 | raise UnsupportedConstraint(msg, constraint) 132 | 133 | def _group_constraints(self, constraints): 134 | """ 135 | Groups constraints (remember, InstallRequirements!) by their key name, 136 | and combining their SpecifierSets into a single InstallRequirement per 137 | package. For example, given the following constraints: 138 | 139 | Django<1.9,>=1.4.2 140 | django~=1.5 141 | Flask~=0.7 142 | 143 | This will be combined into a single entry per package: 144 | 145 | django~=1.5,<1.9,>=1.4.2 146 | flask~=0.7 147 | 148 | """ 149 | for _, ireqs in full_groupby(constraints, key=_dep_key): 150 | ireqs = list(ireqs) 151 | editable_ireq = first(ireqs, key=lambda ireq: ireq.editable) 152 | if editable_ireq: 153 | yield editable_ireq # ignore all the other specs: the editable one is the one that counts 154 | continue 155 | 156 | ireqs = iter(ireqs) 157 | combined_ireq = next(ireqs) 158 | combined_ireq.comes_from = None 159 | for ireq in ireqs: 160 | # NOTE we may be losing some info on dropped reqs here 161 | combined_ireq.req.specifier &= ireq.req.specifier 162 | # Return a sorted, de-duped tuple of extras 163 | combined_ireq.extras = tuple(sorted(set(tuple(combined_ireq.extras) + tuple(ireq.extras)))) 164 | yield combined_ireq 165 | 166 | def _resolve_one_round(self): 167 | """ 168 | Resolves one level of the current constraints, by finding the best 169 | match for each package in the repository and adding all requirements 170 | for those best package versions. Some of these constraints may be new 171 | or updated. 172 | 173 | Returns whether new constraints appeared in this round. If no 174 | constraints were added or changed, this indicates a stable 175 | configuration. 176 | """ 177 | # Sort this list for readability of terminal output 178 | constraints = sorted(self.constraints, key=_dep_key) 179 | log.debug('Current constraints:') 180 | for constraint in constraints: 181 | log.debug(' {}'.format(constraint)) 182 | 183 | log.debug('') 184 | log.debug('Finding the best candidates:') 185 | best_matches = set(self.get_best_match(ireq) for ireq in constraints) 186 | 187 | # Find the new set of secondary dependencies 188 | log.debug('') 189 | log.debug('Finding secondary dependencies:') 190 | # Grouping constraints to make clean diff between rounds 191 | theirs = set(self._group_constraints(dep 192 | for best_match in best_matches 193 | for dep in self._iter_dependencies(best_match))) 194 | 195 | # NOTE: We need to compare RequirementSummary objects, since 196 | # InstallRequirement does not define equality 197 | diff = {RequirementSummary(t.req) for t in theirs} - {RequirementSummary(t.req) for t in self.their_constraints} 198 | removed = ({RequirementSummary(t.req) for t in self.their_constraints} - 199 | {RequirementSummary(t.req) for t in theirs}) 200 | 201 | has_changed = len(diff) > 0 or len(removed) > 0 202 | if has_changed: 203 | log.debug('') 204 | log.debug('New dependencies found in this round:') 205 | for new_dependency in sorted(diff, key=lambda req: key_from_req(req.req)): 206 | log.debug(' adding {}'.format(new_dependency)) 207 | log.debug('Removed dependencies in this round:') 208 | for removed_dependency in sorted(removed, key=lambda req: key_from_req(req.req)): 209 | log.debug(' removing {}'.format(removed_dependency)) 210 | 211 | # Store the last round's results in the their_constraints 212 | self.their_constraints = theirs 213 | return has_changed, best_matches 214 | 215 | def get_best_match(self, ireq): 216 | """ 217 | Returns a (pinned or editable) InstallRequirement, indicating the best 218 | match to use for the given InstallRequirement (in the form of an 219 | InstallRequirement). 220 | 221 | Example: 222 | Given the constraint Flask>=0.10, may return Flask==0.10.1 at 223 | a certain moment in time. 224 | 225 | Pinned requirements will always return themselves, i.e. 226 | 227 | Flask==0.10.1 => Flask==0.10.1 228 | 229 | """ 230 | if ireq.editable: 231 | # NOTE: it's much quicker to immediately return instead of 232 | # hitting the index server 233 | best_match = ireq 234 | elif is_pinned_requirement(ireq): 235 | # NOTE: it's much quicker to immediately return instead of 236 | # hitting the index server 237 | best_match = ireq 238 | else: 239 | best_match = self.repository.find_best_match(ireq, prereleases=self.prereleases) 240 | 241 | # Format the best match 242 | log.debug(' found candidate {} (constraint was {})'.format(format_requirement(best_match), 243 | format_specifier(ireq))) 244 | return best_match 245 | 246 | def _iter_dependencies(self, ireq): 247 | """ 248 | Given a pinned or editable InstallRequirement, collects all the 249 | secondary dependencies for them, either by looking them up in a local 250 | cache, or by reaching out to the repository. 251 | 252 | Editable requirements will never be looked up, as they may have 253 | changed at any time. 254 | """ 255 | if ireq.editable: 256 | for dependency in self.repository.get_dependencies(ireq): 257 | yield dependency 258 | return 259 | elif not is_pinned_requirement(ireq): 260 | raise TypeError('Expected pinned or editable requirement, got {}'.format(ireq)) 261 | 262 | # Now, either get the dependencies from the dependency cache (for 263 | # speed), or reach out to the external repository to 264 | # download and inspect the package version and get dependencies 265 | # from there 266 | if ireq not in self.dependency_cache: 267 | log.debug(' {} not in cache, need to check index'.format(format_requirement(ireq)), fg='yellow') 268 | dependencies = self.repository.get_dependencies(ireq) 269 | self.dependency_cache[ireq] = sorted(str(ireq.req) for ireq in dependencies) 270 | 271 | # Example: ['Werkzeug>=0.9', 'Jinja2>=2.4'] 272 | dependency_strings = self.dependency_cache[ireq] 273 | log.debug(' {:25} requires {}'.format(format_requirement(ireq), 274 | ', '.join(sorted(dependency_strings, key=lambda s: s.lower())) or '-')) 275 | for dependency_string in dependency_strings: 276 | yield InstallRequirement.from_line(dependency_string) 277 | 278 | def reverse_dependencies(self, ireqs): 279 | non_editable = [ireq for ireq in ireqs if not ireq.editable] 280 | return self.dependency_cache.reverse_dependencies(non_editable) 281 | -------------------------------------------------------------------------------- /piptools/io.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # NOTE: 4 | # The classes in this module are vendored from boltons: 5 | # https://github.com/mahmoud/boltons/blob/master/boltons/fileutils.py 6 | # 7 | """Virtually every Python programmer has used Python for wrangling 8 | disk contents, and ``fileutils`` collects solutions to some of the 9 | most commonly-found gaps in the standard library. 10 | """ 11 | 12 | from __future__ import print_function 13 | 14 | import os 15 | import re 16 | import sys 17 | import stat 18 | import errno 19 | import fnmatch 20 | from shutil import copy2, copystat, Error 21 | 22 | 23 | __all__ = ['mkdir_p', 'atomic_save', 'AtomicSaver', 'FilePerms', 24 | 'iter_find_files', 'copytree'] 25 | 26 | 27 | FULL_PERMS = 511 # 0777 that both Python 2 and 3 can digest 28 | RW_PERMS = 438 29 | _SINGLE_FULL_PERM = 7 # or 07 in Python 2 30 | try: 31 | basestring 32 | except NameError: 33 | unicode = str # Python 3 compat 34 | basestring = (str, bytes) 35 | 36 | 37 | def mkdir_p(path): 38 | """Creates a directory and any parent directories that may need to 39 | be created along the way, without raising errors for any existing 40 | directories. This function mimics the behavior of the ``mkdir -p`` 41 | command available in Linux/BSD environments, but also works on 42 | Windows. 43 | """ 44 | try: 45 | os.makedirs(path) 46 | except OSError as exc: 47 | if exc.errno == errno.EEXIST and os.path.isdir(path): 48 | return 49 | raise 50 | return 51 | 52 | 53 | class FilePerms(object): 54 | """The :class:`FilePerms` type is used to represent standard POSIX 55 | filesystem permissions: 56 | 57 | * Read 58 | * Write 59 | * Execute 60 | 61 | Across three classes of user: 62 | 63 | * Owning (u)ser 64 | * Owner's (g)roup 65 | * Any (o)ther user 66 | 67 | This class assists with computing new permissions, as well as 68 | working with numeric octal ``777``-style and ``rwx``-style 69 | permissions. Currently it only considers the bottom 9 permission 70 | bits; it does not support sticky bits or more advanced permission 71 | systems. 72 | 73 | Args: 74 | user (str): A string in the 'rwx' format, omitting characters 75 | for which owning user's permissions are not provided. 76 | group (str): A string in the 'rwx' format, omitting characters 77 | for which owning group permissions are not provided. 78 | other (str): A string in the 'rwx' format, omitting characters 79 | for which owning other/world permissions are not provided. 80 | 81 | There are many ways to use :class:`FilePerms`: 82 | 83 | >>> FilePerms(user='rwx', group='xrw', other='wxr') # note character order 84 | FilePerms(user='rwx', group='rwx', other='rwx') 85 | >>> int(FilePerms('r', 'r', '')) 86 | 288 87 | >>> oct(288)[-3:] # XXX Py3k 88 | '440' 89 | 90 | See also the :meth:`FilePerms.from_int` and 91 | :meth:`FilePerms.from_path` classmethods for useful alternative 92 | ways to construct :class:`FilePerms` objects. 93 | """ 94 | # TODO: consider more than the lower 9 bits 95 | class _FilePermProperty(object): 96 | _perm_chars = 'rwx' 97 | _perm_set = frozenset('rwx') 98 | _perm_val = {'r': 4, 'w': 2, 'x': 1} # for sorting 99 | 100 | def __init__(self, attribute, offset): 101 | self.attribute = attribute 102 | self.offset = offset 103 | 104 | def __get__(self, fp_obj, type_=None): 105 | if fp_obj is None: 106 | return self 107 | return getattr(fp_obj, self.attribute) 108 | 109 | def __set__(self, fp_obj, value): 110 | cur = getattr(fp_obj, self.attribute) 111 | if cur == value: 112 | return 113 | try: 114 | invalid_chars = set(str(value)) - self._perm_set 115 | except TypeError: 116 | raise TypeError('expected string, not %r' % value) 117 | if invalid_chars: 118 | raise ValueError('got invalid chars %r in permission' 119 | ' specification %r, expected empty string' 120 | ' or one or more of %r' 121 | % (invalid_chars, value, self._perm_chars)) 122 | 123 | sort_key = (lambda c: self._perm_val[c]) 124 | new_value = ''.join(sorted(set(value), 125 | key=sort_key, reverse=True)) 126 | setattr(fp_obj, self.attribute, new_value) 127 | self._update_integer(fp_obj, new_value) 128 | 129 | def _update_integer(self, fp_obj, value): 130 | mode = 0 131 | key = 'xwr' 132 | for symbol in value: 133 | bit = 2 ** key.index(symbol) 134 | mode |= (bit << (self.offset * 3)) 135 | fp_obj._integer |= mode 136 | 137 | def __init__(self, user='', group='', other=''): 138 | self._user, self._group, self._other = '', '', '' 139 | self._integer = 0 140 | self.user = user 141 | self.group = group 142 | self.other = other 143 | 144 | @classmethod 145 | def from_int(cls, i): 146 | """Create a :class:`FilePerms` object from an integer. 147 | 148 | >>> FilePerms.from_int(0o644) # note the leading zero-oh for octal 149 | FilePerms(user='rw', group='r', other='r') 150 | """ 151 | i &= FULL_PERMS 152 | key = ('', 'x', 'w', 'xw', 'r', 'rx', 'rw', 'rwx') 153 | parts = [] 154 | while i: 155 | parts.append(key[i & _SINGLE_FULL_PERM]) 156 | i >>= 3 157 | parts.reverse() 158 | return cls(*parts) 159 | 160 | @classmethod 161 | def from_path(cls, path): 162 | """Make a new :class:`FilePerms` object based on the permissions 163 | assigned to the file or directory at *path*. 164 | 165 | Args: 166 | path (str): Filesystem path of the target file. 167 | 168 | >>> from os.path import expanduser 169 | >>> 'r' in FilePerms.from_path(expanduser('~')).user # probably 170 | True 171 | """ 172 | stat_res = os.stat(path) 173 | return cls.from_int(stat.S_IMODE(stat_res.st_mode)) 174 | 175 | def __int__(self): 176 | return self._integer 177 | 178 | # Sphinx tip: attribute docstrings come after the attribute 179 | user = _FilePermProperty('_user', 2) 180 | "Stores the ``rwx``-formatted *user* permission." 181 | group = _FilePermProperty('_group', 1) 182 | "Stores the ``rwx``-formatted *group* permission." 183 | other = _FilePermProperty('_other', 0) 184 | "Stores the ``rwx``-formatted *other* permission." 185 | 186 | def __repr__(self): 187 | cn = self.__class__.__name__ 188 | return ('%s(user=%r, group=%r, other=%r)' 189 | % (cn, self.user, self.group, self.other)) 190 | 191 | #### 192 | 193 | 194 | _TEXT_OPENFLAGS = os.O_RDWR | os.O_CREAT | os.O_EXCL 195 | if hasattr(os, 'O_NOINHERIT'): 196 | _TEXT_OPENFLAGS |= os.O_NOINHERIT 197 | if hasattr(os, 'O_NOFOLLOW'): 198 | _TEXT_OPENFLAGS |= os.O_NOFOLLOW 199 | _BIN_OPENFLAGS = _TEXT_OPENFLAGS 200 | if hasattr(os, 'O_BINARY'): 201 | _BIN_OPENFLAGS |= os.O_BINARY 202 | 203 | 204 | try: 205 | import fcntl as fcntl 206 | except ImportError: 207 | def set_cloexec(fd): 208 | "Dummy set_cloexec for platforms without fcntl support" 209 | pass 210 | else: 211 | def set_cloexec(fd): 212 | """Does a best-effort :func:`fcntl.fcntl` call to set a fd to be 213 | automatically closed by any future child processes. 214 | 215 | Implementation from the :mod:`tempfile` module. 216 | """ 217 | try: 218 | flags = fcntl.fcntl(fd, fcntl.F_GETFD, 0) 219 | except IOError: 220 | pass 221 | else: 222 | # flags read successfully, modify 223 | flags |= fcntl.FD_CLOEXEC 224 | fcntl.fcntl(fd, fcntl.F_SETFD, flags) 225 | return 226 | 227 | 228 | def atomic_save(dest_path, **kwargs): 229 | """A convenient interface to the :class:`AtomicSaver` type. See the 230 | :class:`AtomicSaver` documentation for details. 231 | """ 232 | return AtomicSaver(dest_path, **kwargs) 233 | 234 | 235 | def path_to_unicode(path): 236 | if isinstance(path, unicode): 237 | return path 238 | encoding = sys.getfilesystemencoding() or sys.getdefaultencoding() 239 | return path.decode(encoding) 240 | 241 | 242 | if os.name == 'nt': 243 | import ctypes 244 | from ctypes import c_wchar_p 245 | from ctypes.wintypes import DWORD, LPVOID 246 | 247 | _ReplaceFile = ctypes.windll.kernel32.ReplaceFile 248 | _ReplaceFile.argtypes = [c_wchar_p, c_wchar_p, c_wchar_p, 249 | DWORD, LPVOID, LPVOID] 250 | 251 | def replace(src, dst): 252 | # argument names match stdlib docs, docstring below 253 | try: 254 | # ReplaceFile fails if the dest file does not exist, so 255 | # first try to rename it into position 256 | os.rename(src, dst) 257 | return 258 | except WindowsError as we: 259 | if we.errno == errno.EEXIST: 260 | pass # continue with the ReplaceFile logic below 261 | else: 262 | raise 263 | 264 | src = path_to_unicode(src) 265 | dst = path_to_unicode(dst) 266 | res = _ReplaceFile(c_wchar_p(dst), c_wchar_p(src), 267 | None, 0, None, None) 268 | if not res: 269 | raise OSError('failed to replace %r with %r' % (dst, src)) 270 | return 271 | 272 | def atomic_rename(src, dst, overwrite=False): 273 | "Rename *src* to *dst*, replacing *dst* if *overwrite is True" 274 | if overwrite: 275 | replace(src, dst) 276 | else: 277 | os.rename(src, dst) 278 | return 279 | else: 280 | # wrapper func for cross compat + docs 281 | def replace(src, dst): 282 | # os.replace does the same thing on unix 283 | return os.rename(src, dst) 284 | 285 | def atomic_rename(src, dst, overwrite=False): 286 | "Rename *src* to *dst*, replacing *dst* if *overwrite is True" 287 | if overwrite: 288 | os.rename(src, dst) 289 | else: 290 | os.link(src, dst) 291 | os.unlink(dst) 292 | return 293 | 294 | 295 | _atomic_rename = atomic_rename # backwards compat 296 | 297 | replace.__doc__ = """Similar to :func:`os.replace` in Python 3.3+, 298 | this function will atomically create or replace the file at path 299 | *dst* with the file at path *src*. 300 | 301 | On Windows, this function uses the ReplaceFile API for maximum 302 | possible atomicity on a range of filesystems. 303 | """ 304 | 305 | 306 | class AtomicSaver(object): 307 | """``AtomicSaver`` is a configurable `context manager`_ that provides 308 | a writable :class:`file` which will be moved into place as long as 309 | no exceptions are raised within the context manager's block. These 310 | "part files" are created in the same directory as the destination 311 | path to ensure atomic move operations (i.e., no cross-filesystem 312 | moves occur). 313 | 314 | Args: 315 | dest_path (str): The path where the completed file will be 316 | written. 317 | overwrite (bool): Whether to overwrite the destination file if 318 | it exists at completion time. Defaults to ``True``. 319 | file_perms (int): Integer representation of file permissions 320 | for the newly-created file. Defaults are, when the 321 | destination path already exists, to copy the permissions 322 | from the previous file, or if the file did not exist, to 323 | respect the user's configured `umask`_, usually resulting 324 | in octal 0644 or 0664. 325 | part_file (str): Name of the temporary *part_file*. Defaults 326 | to *dest_path* + ``.part``. Note that this argument is 327 | just the filename, and not the full path of the part 328 | file. To guarantee atomic saves, part files are always 329 | created in the same directory as the destination path. 330 | overwrite_part (bool): Whether to overwrite the *part_file*, 331 | should it exist at setup time. Defaults to ``False``, 332 | which results in an :exc:`OSError` being raised on 333 | pre-existing part files. Be careful of setting this to 334 | ``True`` in situations when multiple threads or processes 335 | could be writing to the same part file. 336 | rm_part_on_exc (bool): Remove *part_file* on exception cases. 337 | Defaults to ``True``, but ``False`` can be useful for 338 | recovery in some cases. Note that resumption is not 339 | automatic and by default an :exc:`OSError` is raised if 340 | the *part_file* exists. 341 | 342 | Practically, the AtomicSaver serves a few purposes: 343 | 344 | * Avoiding overwriting an existing, valid file with a partially 345 | written one. 346 | * Providing a reasonable guarantee that a part file only has one 347 | writer at a time. 348 | * Optional recovery of partial data in failure cases. 349 | 350 | .. _context manager: https://docs.python.org/2/reference/compound_stmts.html#with 351 | .. _umask: https://en.wikipedia.org/wiki/Umask 352 | 353 | """ 354 | _default_file_perms = RW_PERMS 355 | 356 | # TODO: option to abort if target file modify date has changed since start? 357 | def __init__(self, dest_path, **kwargs): 358 | self.dest_path = dest_path 359 | self.overwrite = kwargs.pop('overwrite', True) 360 | self.file_perms = kwargs.pop('file_perms', None) 361 | self.overwrite_part = kwargs.pop('overwrite_part', False) 362 | self.part_filename = kwargs.pop('part_file', None) 363 | self.rm_part_on_exc = kwargs.pop('rm_part_on_exc', True) 364 | self.text_mode = kwargs.pop('text_mode', False) # for windows 365 | self.buffering = kwargs.pop('buffering', -1) 366 | if kwargs: 367 | raise TypeError('unexpected kwargs: %r' % (kwargs.keys(),)) 368 | 369 | self.dest_path = os.path.abspath(self.dest_path) 370 | self.dest_dir = os.path.dirname(self.dest_path) 371 | if not self.part_filename: 372 | self.part_path = dest_path + '.part' 373 | else: 374 | self.part_path = os.path.join(self.dest_dir, self.part_filename) 375 | self.mode = 'w+' if self.text_mode else 'w+b' 376 | self.open_flags = _TEXT_OPENFLAGS if self.text_mode else _BIN_OPENFLAGS 377 | 378 | self.part_file = None 379 | 380 | def _open_part_file(self): 381 | do_chmod = True 382 | file_perms = self.file_perms 383 | if file_perms is None: 384 | try: 385 | # try to copy from file being replaced 386 | stat_res = os.stat(self.dest_path) 387 | file_perms = stat.S_IMODE(stat_res.st_mode) 388 | except (OSError, IOError): 389 | # default if no destination file exists 390 | file_perms = self._default_file_perms 391 | do_chmod = False # respect the umask 392 | 393 | fd = os.open(self.part_path, self.open_flags, file_perms) 394 | set_cloexec(fd) 395 | self.part_file = os.fdopen(fd, self.mode, self.buffering) 396 | 397 | # if default perms are overridden by the user or previous dest_path 398 | # chmod away the effects of the umask 399 | if do_chmod: 400 | try: 401 | os.chmod(self.part_path, file_perms) 402 | except (OSError, IOError): 403 | self.part_file.close() 404 | raise 405 | return 406 | 407 | def setup(self): 408 | """Called on context manager entry (the :keyword:`with` statement), 409 | the ``setup()`` method creates the temporary file in the same 410 | directory as the destination file. 411 | 412 | ``setup()`` tests for a writable directory with rename permissions 413 | early, as the part file may not be written to immediately (not 414 | using :func:`os.access` because of the potential issues of 415 | effective vs. real privileges). 416 | 417 | If the caller is not using the :class:`AtomicSaver` as a 418 | context manager, this method should be called explicitly 419 | before writing. 420 | """ 421 | if os.path.lexists(self.dest_path): 422 | if not self.overwrite: 423 | raise OSError(errno.EEXIST, 424 | 'Overwrite disabled and file already exists', 425 | self.dest_path) 426 | if self.overwrite_part and os.path.lexists(self.part_path): 427 | os.unlink(self.part_path) 428 | self._open_part_file() 429 | return 430 | 431 | def __enter__(self): 432 | self.setup() 433 | return self.part_file 434 | 435 | def __exit__(self, exc_type, exc_val, exc_tb): 436 | self.part_file.close() 437 | if exc_type: 438 | if self.rm_part_on_exc: 439 | try: 440 | os.unlink(self.part_path) 441 | except Exception: 442 | pass # avoid masking original error 443 | return 444 | try: 445 | atomic_rename(self.part_path, self.dest_path, 446 | overwrite=self.overwrite) 447 | except OSError: 448 | if self.rm_part_on_exc: 449 | try: 450 | os.unlink(self.part_path) 451 | except Exception: 452 | pass # avoid masking original error 453 | raise # could not save destination file 454 | return 455 | 456 | 457 | _CUR_DIR = os.path.dirname(os.path.abspath(__file__)) 458 | 459 | 460 | def iter_find_files(directory, patterns, ignored=None): 461 | """Returns a generator that yields file paths under a *directory*, 462 | matching *patterns* using `glob`_ syntax (e.g., ``*.txt``). Also 463 | supports *ignored* patterns. 464 | 465 | Args: 466 | directory (str): Path that serves as the root of the 467 | search. Yielded paths will include this as a prefix. 468 | patterns (str or list): A single pattern or list of 469 | glob-formatted patterns to find under *directory*. 470 | ignored (str or list): A single pattern or list of 471 | glob-formatted patterns to ignore. 472 | 473 | For example, finding Python files in the current directory: 474 | 475 | >>> filenames = sorted(iter_find_files(_CUR_DIR, '*.py')) 476 | >>> filenames[-1].split('/')[-1] 477 | 'typeutils.py' 478 | 479 | Or, Python files while ignoring emacs lockfiles: 480 | 481 | >>> filenames = iter_find_files(_CUR_DIR, '*.py', ignored='.#*') 482 | 483 | .. _glob: https://en.wikipedia.org/wiki/Glob_%28programming%29 484 | 485 | """ 486 | if isinstance(patterns, basestring): 487 | patterns = [patterns] 488 | pats_re = re.compile('|'.join([fnmatch.translate(p) for p in patterns])) 489 | 490 | if not ignored: 491 | ignored = [] 492 | elif isinstance(ignored, basestring): 493 | ignored = [ignored] 494 | ign_re = re.compile('|'.join([fnmatch.translate(p) for p in ignored])) 495 | for root, dirs, files in os.walk(directory): 496 | for basename in files: 497 | if pats_re.match(basename): 498 | if ignored and ign_re.match(basename): 499 | continue 500 | filename = os.path.join(root, basename) 501 | yield filename 502 | return 503 | 504 | 505 | def copy_tree(src, dst, symlinks=False, ignore=None): 506 | """The ``copy_tree`` function is an exact copy of the built-in 507 | :func:`shutil.copytree`, with one key difference: it will not 508 | raise an exception if part of the tree already exists. It achieves 509 | this by using :func:`mkdir_p`. 510 | 511 | Args: 512 | src (str): Path of the source directory to copy. 513 | dst (str): Destination path. Existing directories accepted. 514 | symlinks (bool): If ``True``, copy symlinks rather than their 515 | contents. 516 | ignore (callable): A callable that takes a path and directory 517 | listing, returning the files within the listing to be ignored. 518 | 519 | For more details, check out :func:`shutil.copytree` and 520 | :func:`shutil.copy2`. 521 | 522 | """ 523 | names = os.listdir(src) 524 | if ignore is not None: 525 | ignored_names = ignore(src, names) 526 | else: 527 | ignored_names = set() 528 | 529 | mkdir_p(dst) 530 | errors = [] 531 | for name in names: 532 | if name in ignored_names: 533 | continue 534 | srcname = os.path.join(src, name) 535 | dstname = os.path.join(dst, name) 536 | try: 537 | if symlinks and os.path.islink(srcname): 538 | linkto = os.readlink(srcname) 539 | os.symlink(linkto, dstname) 540 | elif os.path.isdir(srcname): 541 | copytree(srcname, dstname, symlinks, ignore) 542 | else: 543 | # Will raise a SpecialFileError for unsupported file types 544 | copy2(srcname, dstname) 545 | # catch the Error from the recursive copytree so that we can 546 | # continue with other files 547 | except Error as e: 548 | errors.extend(e.args[0]) 549 | except EnvironmentError as why: 550 | errors.append((srcname, dstname, str(why))) 551 | try: 552 | copystat(src, dst) 553 | except OSError as why: 554 | if WindowsError is not None and isinstance(why, WindowsError): 555 | # Copying file access times may fail on Windows 556 | pass 557 | else: 558 | errors.append((src, dst, str(why))) 559 | if errors: 560 | raise Error(errors) 561 | 562 | 563 | copytree = copy_tree # alias for drop-in replacement of shutil 564 | 565 | 566 | try: 567 | file 568 | except NameError: 569 | file = object 570 | 571 | 572 | # like open(os.devnull) but with even fewer side effects 573 | class DummyFile(file): 574 | # TODO: raise ValueErrors on closed for all methods? 575 | # TODO: enforce read/write 576 | def __init__(self, path, mode='r', buffering=None): 577 | self.name = path 578 | self.mode = mode 579 | self.closed = False 580 | self.errors = None 581 | self.isatty = False 582 | self.encoding = None 583 | self.newlines = None 584 | self.softspace = 0 585 | 586 | def close(self): 587 | self.closed = True 588 | 589 | def fileno(self): 590 | return -1 591 | 592 | def flush(self): 593 | if self.closed: 594 | raise ValueError('I/O operation on a closed file') 595 | return 596 | 597 | def next(self): 598 | raise StopIteration() 599 | 600 | def read(self, size=0): 601 | if self.closed: 602 | raise ValueError('I/O operation on a closed file') 603 | return '' 604 | 605 | def readline(self, size=0): 606 | if self.closed: 607 | raise ValueError('I/O operation on a closed file') 608 | return '' 609 | 610 | def readlines(self, size=0): 611 | if self.closed: 612 | raise ValueError('I/O operation on a closed file') 613 | return [] 614 | 615 | def seek(self): 616 | if self.closed: 617 | raise ValueError('I/O operation on a closed file') 618 | return 619 | 620 | def tell(self): 621 | if self.closed: 622 | raise ValueError('I/O operation on a closed file') 623 | return 0 624 | 625 | def truncate(self): 626 | if self.closed: 627 | raise ValueError('I/O operation on a closed file') 628 | return 629 | 630 | def write(self, string): 631 | if self.closed: 632 | raise ValueError('I/O operation on a closed file') 633 | return 634 | 635 | def writelines(self, list_of_strings): 636 | if self.closed: 637 | raise ValueError('I/O operation on a closed file') 638 | return 639 | 640 | def __next__(self): 641 | raise StopIteration() 642 | 643 | def __enter__(self): 644 | if self.closed: 645 | raise ValueError('I/O operation on a closed file') 646 | return 647 | 648 | def __exit__(self, exc_type, exc_val, exc_tb): 649 | return 650 | --------------------------------------------------------------------------------