├── .coveragerc ├── .editorconfig ├── .gitignore ├── .travis.yml ├── CHANGELOG.rst ├── LICENSE ├── MANIFEST.in ├── Pipfile ├── Pipfile.lock ├── README.rst ├── appveyor.yml ├── docs ├── conf.py └── index.rst ├── news ├── .gitignore ├── 15.feature.rst ├── 22.bugfix.rst ├── 22.feature.rst ├── 24.bugfix.rst ├── 33.bugfix.rst ├── 34.bugfix.rst ├── 4.bugfix.rst ├── 49.feature.rst ├── 50.bugfix.rst ├── 51.bugfix.rst └── 53.feature.rst ├── pyproject.toml ├── setup.cfg ├── setup.py ├── src └── passa │ ├── __init__.py │ ├── __main__.py │ ├── actions │ ├── __init__.py │ ├── add.py │ ├── clean.py │ ├── freeze.py │ ├── init.py │ ├── install.py │ ├── lock.py │ ├── remove.py │ ├── sync.py │ └── upgrade.py │ ├── cli │ ├── __init__.py │ ├── _base.py │ ├── add.py │ ├── clean.py │ ├── freeze.py │ ├── init.py │ ├── install.py │ ├── lock.py │ ├── options.py │ ├── remove.py │ ├── sync.py │ └── upgrade.py │ ├── internals │ ├── __init__.py │ ├── _pip.py │ ├── _pip_shims.py │ ├── candidates.py │ ├── dependencies.py │ ├── hashes.py │ ├── markers.py │ ├── reporters.py │ ├── specifiers.py │ ├── traces.py │ └── utils.py │ ├── models │ ├── __init__.py │ ├── caches.py │ ├── lockers.py │ ├── metadata.py │ ├── projects.py │ ├── providers.py │ └── synchronizers.py │ └── operations │ ├── __init__.py │ ├── lock.py │ └── sync.py ├── tasks ├── CHANGELOG.rst.jinja2 ├── __init__.py ├── admin.py ├── pack │ ├── __main__.py │ └── lib │ │ └── typing.py └── package.py ├── tests ├── test_markers.py └── test_specifiers.py └── tox.ini /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch = True 3 | parallel = True 4 | source = src/passa/ 5 | 6 | [report] 7 | # Regexes for lines to exclude from consideration 8 | exclude_lines = 9 | # Have to re-enable the standard pragma 10 | pragma: no cover 11 | 12 | # Don't complain about missing debug-only code: 13 | def __repr__ 14 | if self\.debug 15 | 16 | # Don't complain if tests don't hit defensive assertion code: 17 | raise AssertionError 18 | raise NotImplementedError 19 | # Don't complain if non-runnable code isn't run: 20 | if 0: 21 | if __name__ == .__main__.: 22 | 23 | [html] 24 | directory = htmlcov 25 | 26 | [xml] 27 | output = coverage.xml 28 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | indent_style = space 5 | indent_size = 4 6 | end_of_line = lf 7 | charset = utf-8 8 | trim_trailing_whitespace = true 9 | insert_final_newline = true 10 | 11 | [*.md] 12 | trim_trailing_whitespace = false 13 | 14 | [*.toml] 15 | indent_size = 2 16 | 17 | [*.yaml] 18 | indent_size = 2 19 | 20 | # Makefiles always use tabs for indentation 21 | [Makefile] 22 | indent_style = tab 23 | 24 | # Batch files use tabs for indentation 25 | [*.bat] 26 | indent_style = tab 27 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .env 2 | .venv 3 | __pycache__ 4 | 5 | /build 6 | /dist 7 | /docs/_build 8 | /pack 9 | 10 | *.egg-info 11 | 12 | *.py[co] 13 | .pytest_cache/ 14 | .vscode/ 15 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | sudo: false 3 | cache: pip 4 | dist: trusty 5 | 6 | matrix: 7 | fast_finish: true 8 | 9 | install: 10 | - "python -m pip install --upgrade pip pytest-timeout" 11 | - "python -m pip install --upgrade -e .[tests]" 12 | script: 13 | - "python -m pytest -v -n 8 tests/" 14 | 15 | jobs: 16 | include: 17 | - python: "3.7" 18 | dist: xenial 19 | sudo: required 20 | - python: "3.6" 21 | - python: "2.7" 22 | - python: "3.5" 23 | - python: "3.4" 24 | - stage: packaging 25 | python: "3.6" 26 | install: 27 | - "python -m pip install --upgrade pip" 28 | - "python -m pip install --upgrade check-manifest readme-renderer" 29 | script: 30 | - "python setup.py check -m -r -s" 31 | - stage: packing 32 | python: "3.6" 33 | install: 34 | - "python -m pip install --upgrade -e .[pack]" 35 | script: 36 | - "invoke pack" 37 | - "python2.7 pack/passa.zip --help" 38 | - stage: coverage 39 | python: "3.6" 40 | install: 41 | - "python -m pip install --upgrade pip" 42 | - "python -m pip install --upgrade -e .[tests]" 43 | - "python -m pip install --upgrade pytest-timeout pytest-xdist pytest-cov" 44 | script: 45 | - "pytest -n auto --timeout 300 --cov=passa --cov-report=term-missing --cov-report=xml --cov-report=html tests" 46 | -------------------------------------------------------------------------------- /CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | 0.3.0 (2018-08-30) 2 | ================== 3 | 4 | Features 5 | -------- 6 | 7 | - Improves consolidation logic to merge ``Requires-Python`` into ``python_version`` markers. This greatly reduced clutterness in Pipfile.lock. `#14 `_ 8 | 9 | - Try to suppress installation errors unless ``PASSA_NO_SUPPRESS_EXCEPTIONS`` is set. This matches the behaviour of locking. `#17 `_ 10 | 11 | - ``sync`` is redisigned to be intergrated into ``add``, ``remove``, and ``upgrade``. Various ``clean`` operations are added to purge unneeded packages from the environment. ``install`` is added as a combination of ``lock`` and ``sync``. `#20 `_ 12 | 13 | 14 | Bug Fixes 15 | --------- 16 | 17 | - Fix entry point declaration in package so the ``passa`` command can work. `#18 `_ 18 | 19 | 20 | 0.2.0 (2018-08-29) 21 | ================== 22 | 23 | Features 24 | -------- 25 | 26 | - Add ``sync`` command to synchronize the running environment with Pipfile.lock. 27 | 28 | 29 | Bug Fixes 30 | --------- 31 | 32 | - Fix CLI invocation on Python 2. 33 | 34 | 35 | 0.1.0 (2018-08-28) 36 | ================== 37 | 38 | Features 39 | -------- 40 | 41 | - Initial Release! 42 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2018, Dan Ryan and Tzu-ping Chung 2 | 3 | Permission to use, copy, modify, and distribute this software for any 4 | purpose with or without fee is hereby granted, provided that the above 5 | copyright notice and this permission notice appear in all copies. 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES 8 | WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF 9 | MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR 10 | ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES 11 | WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN 12 | ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF 13 | OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 14 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.rst LICENSE CHANGELOG.rst 2 | recursive-include src *LICENSE* *COPYING* 3 | include pyproject.toml 4 | exclude .coveragerc 5 | exclude .editorconfig 6 | exclude .travis.yml 7 | exclude appveyor.yml 8 | exclude tox.ini 9 | exclude Pipfile* 10 | 11 | recursive-include docs Makefile *.rst *.py *.bat 12 | recursive-exclude docs requirements*.txt 13 | 14 | prune .github 15 | prune docs/build 16 | prune news 17 | prune tasks 18 | prune tests 19 | -------------------------------------------------------------------------------- /Pipfile: -------------------------------------------------------------------------------- 1 | [packages] 2 | passa = { editable = true, path = '.' } 3 | 4 | # Override sdist-only dependency via TOMLkit to fix build. (sarugaku/passa#61) 5 | [packages.functools32] 6 | file = """\ 7 | https://github.com/sarugaku/functools32/releases/download/3.2.3-2/\ 8 | functools32-3.2.3.post2-py2.py3-none-any.whl""" 9 | markers = "python_version < '3.0'" 10 | 11 | [dev-packages] 12 | black = '*' 13 | invoke = '*' 14 | parver = '*' 15 | passa = { editable = true, path = '.', extras = ['tests'] } 16 | sphinx = '*' 17 | sphinx-rtd-theme = '*' 18 | towncrier = '*' 19 | twine = '*' 20 | wheel = '*' 21 | 22 | [scripts] 23 | passa-add = 'python -m passa.cli.add' 24 | passa-remove = 'python -m passa.cli.remove' 25 | passa-upgrade = 'python -m passa.cli.upgrade' 26 | passa-lock = 'python -m passa.cli.lock' 27 | passa-freeze = 'python -m passa.cli.freeze' 28 | 29 | black = 'black src/passa/ --exclude "/(\.git|\.hg|\.mypy_cache|\.tox|\.venv|_build|buck-out|build|dist)/"' 30 | build = 'inv build' 31 | changelog = 'towncrier' 32 | docs = 'sphinx-build -b html docs docs/_build' 33 | draft = 'towncrier --draft' 34 | release = 'inv release' 35 | tests = "pytest -v tests" 36 | upload = 'inv upload' 37 | -------------------------------------------------------------------------------- /Pipfile.lock: -------------------------------------------------------------------------------- 1 | { 2 | "_meta": { 3 | "hash": { 4 | "sha256": "24931cad8ca14fb20d62933a3be5a0544d1dc47f4c3bce54f17ce74037fc7c23" 5 | }, 6 | "pipfile-spec": 6, 7 | "requires": {}, 8 | "sources": [ 9 | { 10 | "name": "pypi", 11 | "url": "https://pypi.org/simple", 12 | "verify_ssl": true 13 | } 14 | ] 15 | }, 16 | "default": { 17 | "functools32": { 18 | "file": "https://github.com/sarugaku/functools32/releases/download/3.2.3-2/functools32-3.2.3.post2-py2.py3-none-any.whl", 19 | "hashes": [ 20 | "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", 21 | "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" 22 | ], 23 | "index": "pypi", 24 | "markers": "python_version < '3.0'", 25 | "version": "==3.2.3.post2" 26 | }, 27 | "passa": { 28 | "editable": true, 29 | "path": "." 30 | } 31 | }, 32 | "develop": { 33 | "alabaster": { 34 | "hashes": [ 35 | "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359", 36 | "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02" 37 | ], 38 | "version": "==0.7.12" 39 | }, 40 | "appdirs": { 41 | "hashes": [ 42 | "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", 43 | "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128" 44 | ], 45 | "version": "==1.4.4" 46 | }, 47 | "arpeggio": { 48 | "hashes": [ 49 | "sha256:920d12cc762edb2eb56daae64a14c93e43dc181b481c88fc79314c0df6ee639e", 50 | "sha256:f01a330148d972714458faf7bc142e9780fb443d33bb2474e5df2ad0a81635bc" 51 | ], 52 | "version": "==1.10.1" 53 | }, 54 | "attrs": { 55 | "hashes": [ 56 | "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", 57 | "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" 58 | ], 59 | "version": "==20.3.0" 60 | }, 61 | "babel": { 62 | "hashes": [ 63 | "sha256:9d35c22fcc79893c3ecc85ac4a56cde1ecf3f19c540bba0922308a6c06ca6fa5", 64 | "sha256:da031ab54472314f210b0adcff1588ee5d1d1d0ba4dbd07b94dba82bde791e05" 65 | ], 66 | "version": "==2.9.0" 67 | }, 68 | "black": { 69 | "hashes": [ 70 | "sha256:817243426042db1d36617910df579a54f1afd659adb96fc5032fcf4b36209739", 71 | "sha256:e030a9a28f542debc08acceb273f228ac422798e5215ba2a791a6ddeaaca22a5" 72 | ], 73 | "index": "pypi", 74 | "version": "==18.9b0" 75 | }, 76 | "bleach": { 77 | "hashes": [ 78 | "sha256:6123ddc1052673e52bab52cdc955bcb57a015264a1c57d37bea2f6b817af0125", 79 | "sha256:98b3170739e5e83dd9dc19633f074727ad848cbedb6026708c8ac2d3b697a433" 80 | ], 81 | "version": "==3.3.0" 82 | }, 83 | "certifi": { 84 | "hashes": [ 85 | "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c", 86 | "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830" 87 | ], 88 | "version": "==2020.12.5" 89 | }, 90 | "chardet": { 91 | "hashes": [ 92 | "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa", 93 | "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5" 94 | ], 95 | "version": "==4.0.0" 96 | }, 97 | "click": { 98 | "hashes": [ 99 | "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a", 100 | "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc" 101 | ], 102 | "version": "==7.1.2" 103 | }, 104 | "docutils": { 105 | "hashes": [ 106 | "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af", 107 | "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc" 108 | ], 109 | "version": "==0.16" 110 | }, 111 | "idna": { 112 | "hashes": [ 113 | "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", 114 | "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" 115 | ], 116 | "version": "==2.10" 117 | }, 118 | "imagesize": { 119 | "hashes": [ 120 | "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1", 121 | "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1" 122 | ], 123 | "version": "==1.2.0" 124 | }, 125 | "incremental": { 126 | "hashes": [ 127 | "sha256:02f5de5aff48f6b9f665d99d48bfc7ec03b6e3943210de7cfc88856d755d6f57", 128 | "sha256:92014aebc6a20b78a8084cdd5645eeaa7f74b8933f70fa3ada2cfbd1e3b54321" 129 | ], 130 | "version": "==21.3.0" 131 | }, 132 | "invoke": { 133 | "hashes": [ 134 | "sha256:4f4de934b15c2276caa4fbc5a3b8a61c0eb0b234f2be1780d2b793321995c2d6", 135 | "sha256:dc492f8f17a0746e92081aec3f86ae0b4750bf41607ea2ad87e5a7b5705121b7", 136 | "sha256:eb6f9262d4d25b40330fb21d1e99bf0f85011ccc3526980f8a3eaedd4b43892e" 137 | ], 138 | "index": "pypi", 139 | "version": "==1.2.0" 140 | }, 141 | "jinja2": { 142 | "hashes": [ 143 | "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419", 144 | "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6" 145 | ], 146 | "index": "pypi", 147 | "version": "==2.11.3" 148 | }, 149 | "markupsafe": { 150 | "hashes": [ 151 | "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", 152 | "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", 153 | "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", 154 | "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", 155 | "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42", 156 | "sha256:195d7d2c4fbb0ee8139a6cf67194f3973a6b3042d742ebe0a9ed36d8b6f0c07f", 157 | "sha256:22c178a091fc6630d0d045bdb5992d2dfe14e3259760e713c490da5323866c39", 158 | "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", 159 | "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", 160 | "sha256:2beec1e0de6924ea551859edb9e7679da6e4870d32cb766240ce17e0a0ba2014", 161 | "sha256:3b8a6499709d29c2e2399569d96719a1b21dcd94410a586a18526b143ec8470f", 162 | "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", 163 | "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", 164 | "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", 165 | "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", 166 | "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b", 167 | "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", 168 | "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15", 169 | "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", 170 | "sha256:6f1e273a344928347c1290119b493a1f0303c52f5a5eae5f16d74f48c15d4a85", 171 | "sha256:6fffc775d90dcc9aed1b89219549b329a9250d918fd0b8fa8d93d154918422e1", 172 | "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", 173 | "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", 174 | "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", 175 | "sha256:7fed13866cf14bba33e7176717346713881f56d9d2bcebab207f7a036f41b850", 176 | "sha256:84dee80c15f1b560d55bcfe6d47b27d070b4681c699c572af2e3c7cc90a3b8e0", 177 | "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", 178 | "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", 179 | "sha256:98bae9582248d6cf62321dcb52aaf5d9adf0bad3b40582925ef7c7f0ed85fceb", 180 | "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", 181 | "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", 182 | "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", 183 | "sha256:a6a744282b7718a2a62d2ed9d993cad6f5f585605ad352c11de459f4108df0a1", 184 | "sha256:acf08ac40292838b3cbbb06cfe9b2cb9ec78fce8baca31ddb87aaac2e2dc3bc2", 185 | "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", 186 | "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", 187 | "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", 188 | "sha256:b1dba4527182c95a0db8b6060cc98ac49b9e2f5e64320e2b56e47cb2831978c7", 189 | "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", 190 | "sha256:b7d644ddb4dbd407d31ffb699f1d140bc35478da613b441c582aeb7c43838dd8", 191 | "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", 192 | "sha256:bf5aa3cbcfdf57fa2ee9cd1822c862ef23037f5c832ad09cfea57fa846dec193", 193 | "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", 194 | "sha256:caabedc8323f1e93231b52fc32bdcde6db817623d33e100708d9a68e1f53b26b", 195 | "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", 196 | "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2", 197 | "sha256:d53bc011414228441014aa71dbec320c66468c1030aae3a6e29778a3382d96e5", 198 | "sha256:d73a845f227b0bfe8a7455ee623525ee656a9e2e749e4742706d80a6065d5e2c", 199 | "sha256:d9be0ba6c527163cbed5e0857c451fcd092ce83947944d6c14bc95441203f032", 200 | "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7", 201 | "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be", 202 | "sha256:feb7b34d6325451ef96bc0e36e1a6c0c1c64bc1fbec4b854f4529e51887b1621" 203 | ], 204 | "version": "==1.1.1" 205 | }, 206 | "packaging": { 207 | "hashes": [ 208 | "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5", 209 | "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a" 210 | ], 211 | "version": "==20.9" 212 | }, 213 | "parver": { 214 | "hashes": [ 215 | "sha256:b8b2976fd8a73a0515465b2a265fd9b20cc25a6dc88bc1154fd5f60f10dad4db", 216 | "sha256:d9ae08a2629105fdb83e4971ae8a04f1de5a3803d1dd928f6e181aeadb398180" 217 | ], 218 | "index": "pypi", 219 | "version": "==0.2.0" 220 | }, 221 | "passa": { 222 | "editable": true, 223 | "path": "." 224 | }, 225 | "pkginfo": { 226 | "hashes": [ 227 | "sha256:029a70cb45c6171c329dfc890cde0879f8c52d6f3922794796e06f577bb03db4", 228 | "sha256:9fdbea6495622e022cc72c2e5e1b735218e4ffb2a2a69cde2694a6c1f16afb75" 229 | ], 230 | "version": "==1.7.0" 231 | }, 232 | "pygments": { 233 | "hashes": [ 234 | "sha256:2656e1a6edcdabf4275f9a3640db59fd5de107d88e8663c5d4e9a0fa62f77f94", 235 | "sha256:534ef71d539ae97d4c3a4cf7d6f110f214b0e687e92f9cb9d2a3b0d3101289c8" 236 | ], 237 | "version": "==2.8.1" 238 | }, 239 | "pyparsing": { 240 | "hashes": [ 241 | "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", 242 | "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" 243 | ], 244 | "version": "==2.4.7" 245 | }, 246 | "pytz": { 247 | "hashes": [ 248 | "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da", 249 | "sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798" 250 | ], 251 | "version": "==2021.1" 252 | }, 253 | "readme-renderer": { 254 | "hashes": [ 255 | "sha256:63b4075c6698fcfa78e584930f07f39e05d46f3ec97f65006e430b595ca6348c", 256 | "sha256:92fd5ac2bf8677f310f3303aa4bce5b9d5f9f2094ab98c29f13791d7b805a3db" 257 | ], 258 | "version": "==29.0" 259 | }, 260 | "requests": { 261 | "hashes": [ 262 | "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804", 263 | "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e" 264 | ], 265 | "version": "==2.25.1" 266 | }, 267 | "requests-toolbelt": { 268 | "hashes": [ 269 | "sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f", 270 | "sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0" 271 | ], 272 | "version": "==0.9.1" 273 | }, 274 | "six": { 275 | "hashes": [ 276 | "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", 277 | "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" 278 | ], 279 | "version": "==1.15.0" 280 | }, 281 | "snowballstemmer": { 282 | "hashes": [ 283 | "sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2", 284 | "sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914" 285 | ], 286 | "version": "==2.1.0" 287 | }, 288 | "sphinx": { 289 | "hashes": [ 290 | "sha256:120732cbddb1b2364471c3d9f8bfd4b0c5b550862f99a65736c77f970b142aea", 291 | "sha256:b348790776490894e0424101af9c8413f2a86831524bd55c5f379d3e3e12ca64" 292 | ], 293 | "index": "pypi", 294 | "version": "==1.8.2" 295 | }, 296 | "sphinx-rtd-theme": { 297 | "hashes": [ 298 | "sha256:02f02a676d6baabb758a20c7a479d58648e0f64f13e07d1b388e9bb2afe86a09", 299 | "sha256:d0f6bc70f98961145c5b0e26a992829363a197321ba571b31b24ea91879e0c96" 300 | ], 301 | "index": "pypi", 302 | "version": "==0.4.2" 303 | }, 304 | "sphinxcontrib-serializinghtml": { 305 | "hashes": [ 306 | "sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc", 307 | "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a" 308 | ], 309 | "version": "==1.1.4" 310 | }, 311 | "sphinxcontrib-websupport": { 312 | "hashes": [ 313 | "sha256:4edf0223a0685a7c485ae5a156b6f529ba1ee481a1417817935b20bde1956232", 314 | "sha256:6fc9287dfc823fe9aa432463edd6cea47fa9ebbf488d7f289b322ffcfca075c7" 315 | ], 316 | "version": "==1.2.4" 317 | }, 318 | "toml": { 319 | "hashes": [ 320 | "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", 321 | "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" 322 | ], 323 | "version": "==0.10.2" 324 | }, 325 | "towncrier": { 326 | "hashes": [ 327 | "sha256:3c0da1de042861df9030c0608d346d55cabe14cfa7cf04045f22b0af63eb8aa7", 328 | "sha256:cfde66ada782db9269407eaefb38562d3d2a4bc4d7647c0b2197ed184b869aae" 329 | ], 330 | "index": "pypi", 331 | "version": "==18.6.0" 332 | }, 333 | "tqdm": { 334 | "hashes": [ 335 | "sha256:9fdf349068d047d4cfbe24862c425883af1db29bcddf4b0eeb2524f6fbdb23c7", 336 | "sha256:d666ae29164da3e517fcf125e41d4fe96e5bb375cd87ff9763f6b38b5592fe33" 337 | ], 338 | "version": "==4.59.0" 339 | }, 340 | "twine": { 341 | "hashes": [ 342 | "sha256:7d89bc6acafb31d124e6e5b295ef26ac77030bf098960c2a4c4e058335827c5c", 343 | "sha256:fad6f1251195f7ddd1460cb76d6ea106c93adb4e56c41e0da79658e56e547d2c" 344 | ], 345 | "index": "pypi", 346 | "version": "==1.12.1" 347 | }, 348 | "urllib3": { 349 | "hashes": [ 350 | "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df", 351 | "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937" 352 | ], 353 | "version": "==1.26.4" 354 | }, 355 | "webencodings": { 356 | "hashes": [ 357 | "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", 358 | "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923" 359 | ], 360 | "version": "==0.5.1" 361 | }, 362 | "wheel": { 363 | "hashes": [ 364 | "sha256:029703bf514e16c8271c3821806a1c171220cc5bdd325cbf4e7da1e056a01db6", 365 | "sha256:1e53cdb3f808d5ccd0df57f964263752aa74ea7359526d3da6c02114ec1e1d44" 366 | ], 367 | "index": "pypi", 368 | "version": "==0.32.3" 369 | } 370 | } 371 | } 372 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | =================================== 2 | Passa: Toolset for Pipfile projects 3 | =================================== 4 | 5 | .. image:: https://img.shields.io/pypi/v/passa.svg 6 | :target: https://pypi.org/project/passa 7 | 8 | .. image:: https://img.shields.io/pypi/l/passa.svg 9 | :target: https://pypi.org/project/passa 10 | 11 | .. image:: https://api.travis-ci.com/sarugaku/passa.svg?branch=master 12 | :target: https://travis-ci.com/sarugaku/passa 13 | 14 | .. image:: https://ci.appveyor.com/api/projects/status/y9kpdaqy4di5nhyk/branch/master?svg=true 15 | :target: https://ci.appveyor.com/project/sarugaku/passa 16 | 17 | .. image:: https://img.shields.io/pypi/pyversions/passa.svg 18 | :target: https://pypi.org/project/passa 19 | 20 | .. image:: https://img.shields.io/badge/Say%20Thanks-!-1EAEDB.svg 21 | :target: https://saythanks.io/to/techalchemy 22 | 23 | .. image:: https://readthedocs.org/projects/passa/badge/?version=latest 24 | :target: https://passa.readthedocs.io/en/latest/?badge=latest 25 | :alt: Documentation Status 26 | 27 | 28 | Summary 29 | ======= 30 | 31 | Passa_ is a toolset for performing tasks in a Pipfile project, designed to be 32 | used as a backing component of Pipenv_. It contains several components: 33 | 34 | * A resolver designed for performing dependency resolution using a stateful 35 | look-forward algorithm to resolve dependencies (backed by ResolveLib_). 36 | * Interface to interact with individual requirement specifications inside 37 | Pipfile and Pipfile.lock (backed by RequirementsLib_). 38 | * A command line interface to invoke the above operations. 39 | 40 | .. _Passa: https://github.com/sarugaku/passa 41 | .. _Pipenv: https://github.com/pypa/pipenv 42 | .. _ResolveLib: https://github.com/sarugaku/resolvelib 43 | .. _RequirementsLib: https://github.com/sarugaku/requirementslib 44 | 45 | 46 | `Read the documentation `__. 47 | -------------------------------------------------------------------------------- /appveyor.yml: -------------------------------------------------------------------------------- 1 | branches: 2 | only: 3 | - master 4 | 5 | install: 6 | - "SET PATH=C:\\Python36-x64;%PATH%" 7 | - "python --version" 8 | - "python -m pip install --upgrade pip" 9 | - "python -m pip install --upgrade -e .[pack,tests]" 10 | 11 | build_script: 12 | - "python -m invoke pack" 13 | 14 | test_script: 15 | # Shorten paths, workaround https://bugs.python.org/issue18199 16 | - "subst T: %TEMP%" 17 | - "set TEMP=T:\\" 18 | - "set TMP=T:\\" 19 | - "python -m pytest -v -n 8 tests" 20 | 21 | artifacts: 22 | - path: ".\\pack\\passa.zip" 23 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Configuration file for the Sphinx documentation builder. 4 | # 5 | # This file does only contain a selection of the most common options. For a 6 | # full list see the documentation: 7 | # http://www.sphinx-doc.org/en/master/config 8 | 9 | # -- Path setup -------------------------------------------------------------- 10 | 11 | # If extensions (or modules to document with autodoc) are in another directory, 12 | # add these directories to sys.path here. If the directory is relative to the 13 | # documentation root, use os.path.abspath to make it absolute, like shown here. 14 | # 15 | # import os 16 | # import sys 17 | # sys.path.insert(0, os.path.abspath('.')) 18 | 19 | 20 | # -- Project information ----------------------------------------------------- 21 | 22 | project = 'Passa' 23 | copyright = '2018, Tzu-ping Chung, Dan Ryan' 24 | author = 'Tzu-ping Chung, Dan Ryan' 25 | 26 | # The short X.Y version 27 | version = '' 28 | # The full version, including alpha/beta/rc tags 29 | release = '' 30 | 31 | 32 | # -- General configuration --------------------------------------------------- 33 | 34 | # If your documentation needs a minimal Sphinx version, state it here. 35 | # 36 | # needs_sphinx = '1.0' 37 | 38 | # Add any Sphinx extension module names here, as strings. They can be 39 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 40 | # ones. 41 | extensions = [ 42 | ] 43 | 44 | # Add any paths that contain templates here, relative to this directory. 45 | templates_path = ['_templates'] 46 | 47 | # The suffix(es) of source filenames. 48 | # You can specify multiple suffix as a list of string: 49 | # 50 | # source_suffix = ['.rst', '.md'] 51 | source_suffix = '.rst' 52 | 53 | # The master toctree document. 54 | master_doc = 'index' 55 | 56 | # The language for content autogenerated by Sphinx. Refer to documentation 57 | # for a list of supported languages. 58 | # 59 | # This is also used if you do content translation via gettext catalogs. 60 | # Usually you set "language" from the command line for these cases. 61 | language = None 62 | 63 | # List of patterns, relative to source directory, that match files and 64 | # directories to ignore when looking for source files. 65 | # This pattern also affects html_static_path and html_extra_path . 66 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 67 | 68 | # The name of the Pygments (syntax highlighting) style to use. 69 | pygments_style = 'sphinx' 70 | 71 | 72 | # -- Options for HTML output ------------------------------------------------- 73 | 74 | # The theme to use for HTML and HTML Help pages. See the documentation for 75 | # a list of builtin themes. 76 | # 77 | html_theme = 'sphinx_rtd_theme' 78 | 79 | # Theme options are theme-specific and customize the look and feel of a theme 80 | # further. For a list of options available for each theme, see the 81 | # documentation. 82 | # 83 | # html_theme_options = {} 84 | 85 | # Add any paths that contain custom static files (such as style sheets) here, 86 | # relative to this directory. They are copied after the builtin static files, 87 | # so a file named "default.css" will overwrite the builtin "default.css". 88 | html_static_path = ['_static'] 89 | 90 | # Custom sidebar templates, must be a dictionary that maps document names 91 | # to template names. 92 | # 93 | # The default sidebars (for documents that don't match any pattern) are 94 | # defined by theme itself. Builtin themes are using these templates by 95 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', 96 | # 'searchbox.html']``. 97 | # 98 | # html_sidebars = {} 99 | 100 | 101 | # -- Options for HTMLHelp output --------------------------------------------- 102 | 103 | # Output file base name for HTML help builder. 104 | htmlhelp_basename = 'Passadoc' 105 | 106 | 107 | # -- Options for LaTeX output ------------------------------------------------ 108 | 109 | latex_elements = { 110 | # The paper size ('letterpaper' or 'a4paper'). 111 | # 112 | # 'papersize': 'letterpaper', 113 | 114 | # The font size ('10pt', '11pt' or '12pt'). 115 | # 116 | # 'pointsize': '10pt', 117 | 118 | # Additional stuff for the LaTeX preamble. 119 | # 120 | # 'preamble': '', 121 | 122 | # Latex figure (float) alignment 123 | # 124 | # 'figure_align': 'htbp', 125 | } 126 | 127 | # Grouping the document tree into LaTeX files. List of tuples 128 | # (source start file, target name, title, 129 | # author, documentclass [howto, manual, or own class]). 130 | latex_documents = [ 131 | (master_doc, 'Passa.tex', 'Passa Documentation', 132 | 'Tzu-ping Chung, Dan Ryan', 'manual'), 133 | ] 134 | 135 | 136 | # -- Options for manual page output ------------------------------------------ 137 | 138 | # One entry per manual page. List of tuples 139 | # (source start file, name, description, authors, manual section). 140 | man_pages = [ 141 | (master_doc, 'passa', 'Passa Documentation', 142 | [author], 1) 143 | ] 144 | 145 | 146 | # -- Options for Texinfo output ---------------------------------------------- 147 | 148 | # Grouping the document tree into Texinfo files. List of tuples 149 | # (source start file, target name, title, author, 150 | # dir menu entry, description, category) 151 | texinfo_documents = [ 152 | (master_doc, 'Passa', 'Passa Documentation', 153 | author, 'Passa', 'One line description of project.', 154 | 'Miscellaneous'), 155 | ] 156 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | =================================== 2 | Passa: Toolset for Pipfile projects 3 | =================================== 4 | 5 | Passa is a toolset for performing tasks in a Pipfile project. It contains 6 | several components: 7 | 8 | * A resolver designed for performing dependency resolution using a stateful 9 | look-forward algorithm to resolve dependencies (backed by ResolveLib_). 10 | * Interface to interact with individual requirement specifications inside 11 | Pipfile and Pipfile.lock (backed by RequirementsLib_). 12 | * A command line interface to invoke the above operations. 13 | 14 | .. _ResolveLib: https://github.com/sarugaku/resolvelib 15 | .. _RequirementsLib: https://github.com/sarugaku/requirementslib 16 | 17 | 18 | Quickstart 19 | ========== 20 | 21 | Passa distributions can be downloaded from Appveyor’s `artifacts page`, as a 22 | ZIP file. 23 | 24 | .. _`artifacts page`: https://ci.appveyor.com/project/sarugaku/passa/build/artifacts 25 | 26 | Once downloaded, you can run ``passa.zip`` with the interpreter of the 27 | environment you want to manage: 28 | 29 | .. code-block:: none 30 | 31 | python passa.zip --help 32 | 33 | Use Passa to generate Pipfile.lock from the Pipfile in the current directory: 34 | 35 | .. code-block:: none 36 | 37 | python passa.zip lock 38 | 39 | Add packages to the project: 40 | 41 | .. code-block:: none 42 | 43 | python passa.zip add pytz requests tqdm 44 | 45 | Remove packages from the project: 46 | 47 | .. code-block:: none 48 | 49 | python passa.zip remove pytz 50 | 51 | Generate requirements.txt for the current project: 52 | 53 | .. code-block:: none 54 | 55 | python passa.zip freeze --target requirements.txt 56 | 57 | 58 | 59 | Distribution Notes 60 | ================== 61 | 62 | Passa is available on PyPI and installable with pip, but it is not recommended 63 | for you to do so. Passa is designed to be run *inside* the Python environment, 64 | and, if installed with pip, would contaminate the very environment it wants to 65 | manage. 66 | 67 | The ZIP distribution is self-sufficient, and use only the interpreter (and the 68 | standard library) to run itself, avoiding the contamination. 69 | -------------------------------------------------------------------------------- /news/.gitignore: -------------------------------------------------------------------------------- 1 | !.gitignore 2 | -------------------------------------------------------------------------------- /news/15.feature.rst: -------------------------------------------------------------------------------- 1 | pip and setuptools are now excluded from the lock file, unless they are explicitly added to Pipfile. This prevents packages with ill-adviced dependencies to stop them from being freely upgradable. 2 | -------------------------------------------------------------------------------- /news/22.bugfix.rst: -------------------------------------------------------------------------------- 1 | Fix crash during wheel building from a non-editable local directory due to incorrect source population logic. 2 | -------------------------------------------------------------------------------- /news/22.feature.rst: -------------------------------------------------------------------------------- 1 | Source code for non-local source distribution and VCS is no longer kept after building wheels. A temporary directory is used instead, and removed after building. 2 | -------------------------------------------------------------------------------- /news/24.bugfix.rst: -------------------------------------------------------------------------------- 1 | Fix extras being dropped from the lock file if different parents requirement the same package with different extras. 2 | -------------------------------------------------------------------------------- /news/33.bugfix.rst: -------------------------------------------------------------------------------- 1 | Correctly fall back to detect prereleases if a specifier yields no available non-prerelease versions. 2 | -------------------------------------------------------------------------------- /news/34.bugfix.rst: -------------------------------------------------------------------------------- 1 | Always resolve to URL-based candidate if there are both named and non-named requirements pointing to the same package. The URL-based requirement must be specified by the user, so we respect that. 2 | -------------------------------------------------------------------------------- /news/4.bugfix.rst: -------------------------------------------------------------------------------- 1 | Enforce candidates with different extras to always have the same version in the resolution result. This prevents edge cases where duplicate entries in different sections could produce indeterministic results. 2 | -------------------------------------------------------------------------------- /news/49.feature.rst: -------------------------------------------------------------------------------- 1 | Fallback to evaluating egg-info in sdist if a wheel cannot be built out of it. 2 | -------------------------------------------------------------------------------- /news/50.bugfix.rst: -------------------------------------------------------------------------------- 1 | Correctly suppress the progress bar for pip’s ``unpack_url`` call. 2 | -------------------------------------------------------------------------------- /news/51.bugfix.rst: -------------------------------------------------------------------------------- 1 | Fix crash when running ``add`` and ``upgrade`` without a lock file. 2 | -------------------------------------------------------------------------------- /news/53.feature.rst: -------------------------------------------------------------------------------- 1 | Refactored and restructured the internals for improved organization and separation of concerns. 2 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ['setuptools>=36.2.2', 'wheel>=0.28.0'] 3 | 4 | [tool.towncrier] 5 | package = 'passa' 6 | package_dir = 'src' 7 | filename = 'CHANGELOG.rst' 8 | directory = 'news/' 9 | title_format = '{version} ({project_date})' 10 | issue_format = '`#{issue} `_' 11 | template = 'tasks/CHANGELOG.rst.jinja2' 12 | 13 | [[tool.towncrier.type]] 14 | directory = 'feature' 15 | name = 'Features' 16 | showcontent = true 17 | 18 | [[tool.towncrier.type]] 19 | directory = 'bugfix' 20 | name = 'Bug Fixes' 21 | showcontent = true 22 | 23 | [[tool.towncrier.type]] 24 | directory = 'trivial' 25 | name = 'Trivial Changes' 26 | showcontent = false 27 | 28 | [[tool.towncrier.type]] 29 | directory = 'removal' 30 | name = 'Removals and Deprecations' 31 | showcontent = true 32 | 33 | [[tool.towncrier.type]] 34 | directory = "docs" 35 | name = "Improved Documentation" 36 | showcontent = true 37 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = passa 3 | description = A resolver implementation for generating and interacting with Pipenv-compatible Lockfiles. 4 | url = https://github.com/sarugaku/passa 5 | author = Dan Ryan 6 | author_email = dan@danryan.co 7 | long_description = file: README.rst 8 | license = ISC License 9 | license_file = LICENSE 10 | keywords = 11 | dependency resolver 12 | package management 13 | package resolver 14 | resolver 15 | pip 16 | pipenv 17 | requirementslib 18 | pipfile 19 | dependencies 20 | dependency management 21 | classifier = 22 | Development Status :: 4 - Beta 23 | License :: OSI Approved :: ISC License (ISCL) 24 | Operating System :: OS Independent 25 | Programming Language :: Python :: 2 26 | Programming Language :: Python :: 2.7 27 | Programming Language :: Python :: 3 28 | Programming Language :: Python :: 3.4 29 | Programming Language :: Python :: 3.5 30 | Programming Language :: Python :: 3.6 31 | Programming Language :: Python :: 3.7 32 | Topic :: Software Development :: Libraries :: Python Modules 33 | 34 | [options] 35 | zip_safe = true 36 | python_requires = >=2.7,!=3.0,!=3.1,!=3.2,!=3.3 37 | setup_requires = setuptools>=36.2.2 38 | install_requires = 39 | appdirs 40 | distlib 41 | packaging 42 | pip-shims>=0.1.2 43 | plette[validation]>=0.2.2 44 | requests 45 | resolvelib>=0.2.1,!=1.0.0.dev0 46 | requirementslib>=1.1.1 47 | six 48 | vistir[spinner]>=0.1.4 49 | 50 | [options.extras_require] 51 | pack = 52 | invoke 53 | parver 54 | tests = 55 | pytest-xdist 56 | pytest-timeout 57 | pytest-cov 58 | pytest 59 | 60 | [options.entry_points] 61 | console_scripts = 62 | passa=passa.cli:main 63 | 64 | [bdist_wheel] 65 | universal = 1 66 | 67 | [isort] 68 | atomic=true 69 | lines_after_imports=2 70 | lines_between_types=1 71 | multi_line_output=5 72 | not_skip=__init__.py 73 | known_first_party = 74 | passa 75 | tests 76 | ignore_trailing_comma=true 77 | 78 | [flake8] 79 | exclude = .git,__pycache__,docs/,*.egg,build,data 80 | select = E,W,F 81 | ignore = 82 | # The default ignore list: 83 | E121,E123,E126,E226,E24,E704, 84 | # Our additions: 85 | # E127: continuation line over-indented for visual indent 86 | # E128: continuation line under-indented for visual indent 87 | # E129: visually indented line with same indent as next logical line 88 | # E222: multiple spaces after operator 89 | # E231: missing whitespace after ',' 90 | # E402: module level import not at top of file 91 | # E501: line too long 92 | E127,E128,E129,E222,E231,E402,E501 93 | 94 | [tool:pytest] 95 | strict = true 96 | addopts = -ra 97 | testpaths = tests/ 98 | norecursedirs = .* build dist news tasks docs 99 | 100 | [build-system] 101 | requires = ["setuptools", "wheel"] 102 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import ast 2 | import os 3 | 4 | from setuptools import find_packages, setup 5 | 6 | 7 | ROOT = os.path.dirname(__file__) 8 | 9 | PACKAGE_NAME = 'passa' 10 | 11 | VERSION = None 12 | 13 | with open(os.path.join(ROOT, 'src', PACKAGE_NAME, '__init__.py')) as f: 14 | for line in f: 15 | if line.startswith('__version__ = '): 16 | VERSION = ast.literal_eval(line[len('__version__ = '):].strip()) 17 | break 18 | if VERSION is None: 19 | raise EnvironmentError('failed to read version') 20 | 21 | 22 | # Put everything in setup.cfg, except those that don't actually work? 23 | setup( 24 | # These really don't work. 25 | package_dir={'': 'src'}, 26 | packages=find_packages('src'), 27 | 28 | # I don't know how to specify an empty key in setup.cfg. 29 | package_data={ 30 | '': ['LICENSE*', 'README*'], 31 | }, 32 | 33 | # I need this to be dynamic. 34 | version=VERSION, 35 | ) 36 | -------------------------------------------------------------------------------- /src/passa/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | __all__ = [ 4 | '__version__' 5 | ] 6 | 7 | __version__ = '0.3.1.dev0' 8 | -------------------------------------------------------------------------------- /src/passa/__main__.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from .cli import main 4 | 5 | if __name__ == '__main__': 6 | main() 7 | -------------------------------------------------------------------------------- /src/passa/actions/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sarugaku/passa/bf5e1dbba15363a5f705381f6fe2f86424d7e1ff/src/passa/actions/__init__.py -------------------------------------------------------------------------------- /src/passa/actions/add.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, print_function, unicode_literals 4 | 5 | import itertools 6 | import sys 7 | 8 | 9 | def add_packages(packages=[], editables=[], project=None, dev=False, sync=False, clean=False): 10 | from passa.models.lockers import PinReuseLocker 11 | from passa.operations.lock import lock 12 | 13 | lines = list(itertools.chain( 14 | packages, 15 | ("-e {}".format(e) for e in editables), 16 | )) 17 | 18 | project = project 19 | for line in lines: 20 | try: 21 | project.add_line_to_pipfile(line, develop=dev) 22 | except (TypeError, ValueError) as e: 23 | print("Cannot add {line!r} to Pipfile: {error}".format( 24 | line=line, error=str(e), 25 | ), file=sys.stderr) 26 | return 2 27 | 28 | prev_lockfile = project.lockfile 29 | 30 | locker = PinReuseLocker(project) 31 | success = lock(locker) 32 | if not success: 33 | return 1 34 | 35 | project._p.write() 36 | project._l.write() 37 | print("Written to project at", project.root) 38 | 39 | if not sync: 40 | return 41 | 42 | from passa.models.synchronizers import Synchronizer 43 | from passa.operations.sync import sync 44 | 45 | lockfile_diff = project.difference_lockfile(prev_lockfile) 46 | default = any(lockfile_diff.default) 47 | develop = any(lockfile_diff.develop) 48 | 49 | syncer = Synchronizer( 50 | project, default=default, develop=develop, 51 | clean_unneeded=clean, 52 | ) 53 | success = sync(syncer) 54 | if not success: 55 | return 1 56 | 57 | print("Synchronized project at", project.root) 58 | -------------------------------------------------------------------------------- /src/passa/actions/clean.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, print_function, unicode_literals 4 | 5 | 6 | def clean(project, default=True, dev=False): 7 | from passa.models.synchronizers import Cleaner 8 | from passa.operations.sync import clean 9 | 10 | cleaner = Cleaner(project, default=default, develop=dev) 11 | 12 | success = clean(cleaner) 13 | if not success: 14 | return 1 15 | 16 | print("Cleaned project at", project.root) 17 | -------------------------------------------------------------------------------- /src/passa/actions/freeze.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, print_function, unicode_literals 4 | 5 | import contextlib 6 | import io 7 | import itertools 8 | import sys 9 | 10 | import vistir.misc 11 | 12 | 13 | def _source_as_lines(source, extra): 14 | url = source["url"] 15 | if extra: 16 | lines = ["--extra-index-url {}".format(url)] 17 | else: 18 | lines = ["--index-url {}".format(url)] 19 | if not source.get("verify_ssl", True): 20 | lines = ["--trusted-host {}".format(url)] 21 | return lines 22 | 23 | 24 | def _requirement_as_line(requirement, sources, include_hashes): 25 | if requirement.index: 26 | sources = sources 27 | else: 28 | sources = None 29 | line = vistir.misc.to_text( 30 | requirement.as_line(sources=sources, include_hashes=include_hashes) 31 | ) 32 | return line 33 | 34 | 35 | @contextlib.contextmanager 36 | def open_for_output(filename): 37 | if filename is None: 38 | yield sys.stdout 39 | return 40 | with io.open(filename, "w", encoding="utf-8", newline="\n") as f: 41 | yield f 42 | 43 | 44 | def freeze(project=None, default=True, dev=True, include_hashes=None, target=None): 45 | from requirementslib import Requirement 46 | 47 | lockfile = project.lockfile 48 | if not lockfile: 49 | print("Pipfile.lock is required to export.", file=sys.stderr) 50 | return 1 51 | 52 | section_names = [] 53 | if default: 54 | section_names.append("default") 55 | if dev: 56 | section_names.append("develop") 57 | requirements = [ 58 | Requirement.from_pipfile(key, entry._data) 59 | for key, entry in itertools.chain.from_iterable( 60 | lockfile.get(name, {}).items() 61 | for name in section_names 62 | ) 63 | ] 64 | 65 | if include_hashes is None: 66 | include_hashes = all(r.is_named for r in requirements) 67 | 68 | sources = lockfile.meta.sources._data 69 | 70 | source_lines = list(vistir.misc.dedup(itertools.chain( 71 | itertools.chain.from_iterable( 72 | _source_as_lines(source, False) 73 | for source in sources[:1] 74 | ), 75 | itertools.chain.from_iterable( 76 | _source_as_lines(source, True) 77 | for source in sources[1:] 78 | ), 79 | ))) 80 | 81 | requirement_lines = sorted(vistir.misc.dedup( 82 | _requirement_as_line(requirement, sources, include_hashes) 83 | for requirement in requirements 84 | )) 85 | 86 | with open_for_output(target) as f: 87 | for line in source_lines: 88 | f.write(line) 89 | f.write("\n") 90 | f.write("\n") 91 | for line in requirement_lines: 92 | f.write(line) 93 | f.write("\n") 94 | -------------------------------------------------------------------------------- /src/passa/actions/init.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, print_function, unicode_literals 4 | 5 | import io 6 | import os 7 | from pip_shims import Command as PipCommand, cmdoptions 8 | import plette 9 | import six 10 | import vistir 11 | 12 | 13 | class PipCmd(PipCommand): 14 | name = "PipCmd" 15 | 16 | 17 | def get_sources(urls, trusted_hosts): 18 | trusted_hosts = [six.moves.urllib.parse.urlparse(url).netloc for url in trusted_hosts] 19 | sources = [] 20 | for url in urls: 21 | parsed_url = six.moves.urllib.parse.urlparse(url) 22 | netloc = parsed_url.netloc 23 | if '@' in netloc: 24 | _, _, netloc = netloc.rpartition('@') 25 | name, _, _ = netloc.partition('.') # Just use the domain name as the source name 26 | verify_ssl = True 27 | if netloc in trusted_hosts: 28 | verify_ssl = False 29 | sources.append({"url": url, "name": name, "verify_ssl": verify_ssl}) 30 | return sources 31 | 32 | 33 | def init_project(root=None, python_version=None): 34 | pipfile_path = os.path.join(root, "Pipfile") 35 | if os.path.isfile(pipfile_path): 36 | raise RuntimeError("{0!r} is already a Pipfile project".format(root)) 37 | if not os.path.exists(root): 38 | vistir.path.mkdir_p(root, mode=0o755) 39 | pip_command = PipCmd() 40 | cmdoptions.make_option_group(cmdoptions.index_group, pip_command.parser) 41 | parsed, _ = pip_command.parser.parse_args([]) 42 | index_urls = [parsed.index_url] + parsed.extra_index_urls 43 | sources = get_sources(index_urls, parsed.trusted_hosts) 44 | data = { 45 | "sources": sources, 46 | "packages": {}, 47 | "dev-packages": {}, 48 | } 49 | if python_version: 50 | data["requires"] = {"python_version": python_version} 51 | return create_project(pipfile_path=pipfile_path, data=data) 52 | 53 | 54 | def create_project(pipfile_path, data={}): 55 | pipfile = plette.pipfiles.Pipfile(data=data) 56 | with io.open(pipfile_path, "w") as fh: 57 | pipfile.dump(fh) 58 | print("Successfully created new pipfile at {0!r}".format(pipfile_path)) 59 | return 0 60 | -------------------------------------------------------------------------------- /src/passa/actions/install.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, print_function, unicode_literals 4 | 5 | 6 | def install(project=None, check=True, dev=False, clean=True): 7 | from passa.models.lockers import BasicLocker 8 | from passa.operations.lock import lock 9 | 10 | project = project 11 | 12 | if not check or not project.is_synced(): 13 | locker = BasicLocker(project) 14 | success = lock(locker) 15 | if not success: 16 | return 1 17 | project._l.write() 18 | print("Written to project at", project.root) 19 | 20 | from passa.models.synchronizers import Synchronizer 21 | from passa.operations.sync import sync 22 | 23 | syncer = Synchronizer( 24 | project, default=True, develop=dev, 25 | clean_unneeded=clean, 26 | ) 27 | 28 | success = sync(syncer) 29 | if not success: 30 | return 1 31 | 32 | print("Synchronized project at", project.root) 33 | -------------------------------------------------------------------------------- /src/passa/actions/lock.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, print_function, unicode_literals 4 | 5 | 6 | def lock(project=None): 7 | from passa.models.lockers import BasicLocker 8 | from passa.operations.lock import lock 9 | 10 | project = project 11 | locker = BasicLocker(project) 12 | success = lock(locker) 13 | if not success: 14 | return 15 | 16 | project._l.write() 17 | print("Written to project at", project.root) 18 | -------------------------------------------------------------------------------- /src/passa/actions/remove.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, print_function, unicode_literals 4 | 5 | 6 | def remove(project=None, only="default", packages=[], clean=True): 7 | from passa.models.lockers import PinReuseLocker 8 | from passa.operations.lock import lock 9 | 10 | default = (only != "dev") 11 | develop = (only != "default") 12 | 13 | project = project 14 | project.remove_keys_from_pipfile( 15 | packages, default=default, develop=develop, 16 | ) 17 | 18 | locker = PinReuseLocker(project) 19 | success = lock(locker) 20 | if not success: 21 | return 1 22 | 23 | project._p.write() 24 | project._l.write() 25 | print("Written to project at", project.root) 26 | 27 | if not clean: 28 | return 29 | 30 | from passa.models.synchronizers import Cleaner 31 | from passa.operations.sync import clean 32 | 33 | cleaner = Cleaner(project, default=True, develop=True) 34 | success = clean(cleaner) 35 | if not success: 36 | return 1 37 | 38 | print("Cleaned project at", project.root) 39 | -------------------------------------------------------------------------------- /src/passa/actions/sync.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, print_function, unicode_literals 4 | 5 | 6 | def sync(project=None, dev=False, clean=True): 7 | from passa.models.synchronizers import Synchronizer 8 | from passa.operations.sync import sync 9 | 10 | project = project 11 | syncer = Synchronizer( 12 | project, default=True, develop=dev, 13 | clean_unneeded=clean, 14 | ) 15 | 16 | success = sync(syncer) 17 | if not success: 18 | return 1 19 | 20 | print("Synchronized project at", project.root) 21 | -------------------------------------------------------------------------------- /src/passa/actions/upgrade.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, print_function, unicode_literals 4 | 5 | import sys 6 | 7 | 8 | def upgrade(project=None, strategy="only-if-needed", sync=True, packages=[]): 9 | from passa.models.lockers import EagerUpgradeLocker, PinReuseLocker 10 | from passa.operations.lock import lock 11 | 12 | for package in packages: 13 | if not project.contains_key_in_pipfile(package): 14 | print("{package!r} not found in Pipfile".format( 15 | package=package, 16 | ), file=sys.stderr) 17 | return 2 18 | 19 | project.remove_keys_from_lockfile(packages) 20 | 21 | prev_lockfile = project.lockfile 22 | 23 | if strategy == "eager": 24 | locker = EagerUpgradeLocker(project, packages) 25 | else: 26 | locker = PinReuseLocker(project) 27 | success = lock(locker) 28 | if not success: 29 | return 1 30 | 31 | project._l.write() 32 | print("Written to project at", project.root) 33 | 34 | if not sync: 35 | return 36 | 37 | from passa.operations.sync import sync 38 | from passa.models.synchronizers import Synchronizer 39 | 40 | lockfile_diff = project.difference_lockfile(prev_lockfile) 41 | default = bool(any(lockfile_diff.default)) 42 | develop = bool(any(lockfile_diff.develop)) 43 | 44 | syncer = Synchronizer( 45 | project, default=default, develop=develop, 46 | clean_unneeded=False, 47 | ) 48 | success = sync(syncer) 49 | if not success: 50 | return 1 51 | 52 | print("Synchronized project at", project.root) 53 | -------------------------------------------------------------------------------- /src/passa/cli/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, unicode_literals 4 | 5 | import argparse 6 | import importlib 7 | import pkgutil 8 | import sys 9 | 10 | from passa import __version__ 11 | 12 | 13 | CURRENT_MODULE_PATH = sys.modules[__name__].__path__ 14 | 15 | 16 | def main(argv=None): 17 | root_parser = argparse.ArgumentParser( 18 | prog="passa", 19 | description="Pipfile project management tool.", 20 | ) 21 | root_parser.add_argument( 22 | "--version", 23 | action="version", 24 | version="%(prog)s, version {}".format(__version__), 25 | help="show the version and exit", 26 | ) 27 | 28 | subparsers = root_parser.add_subparsers() 29 | for _, name, _ in pkgutil.iter_modules(CURRENT_MODULE_PATH, "."): 30 | module = importlib.import_module(name, __name__) 31 | try: 32 | klass = module.Command 33 | except AttributeError: 34 | continue 35 | parser = subparsers.add_parser(klass.name, help=klass.description) 36 | command = klass(parser) 37 | parser.set_defaults(func=command.run) 38 | 39 | options = root_parser.parse_args(argv) 40 | 41 | try: 42 | f = options.func 43 | except AttributeError: 44 | root_parser.print_help() 45 | result = -1 46 | else: 47 | result = f(options) 48 | if result is not None: 49 | sys.exit(result) 50 | -------------------------------------------------------------------------------- /src/passa/cli/_base.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, unicode_literals 4 | 5 | import argparse 6 | import os 7 | import sys 8 | 9 | from .options import project 10 | 11 | 12 | class BaseCommand(object): 13 | """A CLI command. 14 | """ 15 | name = None 16 | description = None 17 | default_arguments = [project] 18 | arguments = [] 19 | 20 | def __init__(self, parser=None): 21 | if not parser: 22 | parser = argparse.ArgumentParser( 23 | prog=os.path.basename(sys.argv[0]), 24 | description="Base argument parser for passa" 25 | ) 26 | self.parser = parser 27 | self.add_arguments() 28 | 29 | @classmethod 30 | def build_parser(cls): 31 | parser = argparse.ArgumentParser( 32 | prog="passa {}".format(cls.name), 33 | description=cls.description, 34 | ) 35 | return cls(parser) 36 | 37 | @classmethod 38 | def run_parser(cls): 39 | parser = cls.build_parser() 40 | parser() 41 | 42 | def __call__(self, argv=None): 43 | options = self.parser.parse_args(argv) 44 | result = self.main(options) 45 | if result is not None: 46 | sys.exit(result) 47 | 48 | def add_default_arguments(self): 49 | for arg in self.default_arguments: 50 | arg.add_to_parser(self.parser) 51 | 52 | def add_arguments(self): 53 | self.add_default_arguments() 54 | for arg in self.arguments: 55 | arg.add_to_parser(self.parser) 56 | 57 | def main(self, options): 58 | return self.run(options) 59 | 60 | def run(self, options): 61 | raise NotImplementedError 62 | -------------------------------------------------------------------------------- /src/passa/cli/add.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, print_function, unicode_literals 4 | 5 | from ..actions.add import add_packages 6 | from ._base import BaseCommand 7 | from .options import package_group 8 | 9 | 10 | class Command(BaseCommand): 11 | 12 | name = "add" 13 | description = "Add packages to project." 14 | arguments = [package_group] 15 | 16 | def run(self, options): 17 | if not options.editables and not options.packages: 18 | self.parser.error("Must supply either a requirement or --editable") 19 | return add_packages( 20 | packages=options.packages, 21 | editables=options.editables, 22 | project=options.project, 23 | dev=options.dev, 24 | sync=options.sync 25 | ) 26 | 27 | 28 | if __name__ == "__main__": 29 | Command.run_parser() 30 | -------------------------------------------------------------------------------- /src/passa/cli/clean.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, print_function, unicode_literals 4 | 5 | from ..actions.clean import clean 6 | from ._base import BaseCommand 7 | from .options import dev, no_default 8 | 9 | 10 | class Command(BaseCommand): 11 | 12 | name = "clean" 13 | description = "Uninstall unlisted packages from the environment." 14 | arguments = [dev, no_default] 15 | 16 | def run(self, options): 17 | return clean(project=options.project, default=options.default, dev=options.dev) 18 | 19 | 20 | if __name__ == "__main__": 21 | Command.run_parser() 22 | -------------------------------------------------------------------------------- /src/passa/cli/freeze.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, print_function, unicode_literals 4 | 5 | from ..actions.freeze import freeze 6 | from ._base import BaseCommand 7 | from .options import dev, include_hashes_group, no_default, target 8 | 9 | 10 | class Command(BaseCommand): 11 | 12 | name = "freeze" 13 | description = "Export project depenencies to requirements.txt." 14 | arguments = [dev, no_default, target, include_hashes_group] 15 | 16 | def run(self, options): 17 | return freeze( 18 | project=options.project, default=options.default, dev=options.dev, 19 | include_hashes=options.include_hashes 20 | ) 21 | 22 | 23 | if __name__ == "__main__": 24 | Command.run_parser() 25 | -------------------------------------------------------------------------------- /src/passa/cli/init.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, print_function, unicode_literals 4 | 5 | import argparse 6 | import os 7 | 8 | from ..actions.init import init_project 9 | from ._base import BaseCommand 10 | from .options import new_project_group 11 | 12 | 13 | class Command(BaseCommand): 14 | 15 | name = "init" 16 | description = "Create a new project." 17 | default_arguments = [] 18 | arguments = [new_project_group] 19 | 20 | def run(self, options): 21 | pipfile_path = os.path.join(options.project, "Pipfile") 22 | if os.path.exists(pipfile_path): 23 | raise argparse.ArgumentError( 24 | "{0!r} is already a Pipfile project".format(options.project), 25 | ) 26 | return init_project( 27 | root=options.project, python_version=options.python_version 28 | ) 29 | 30 | 31 | if __name__ == "__main__": 32 | Command.run_parser() 33 | -------------------------------------------------------------------------------- /src/passa/cli/install.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, print_function, unicode_literals 4 | 5 | from ..actions.install import install 6 | from ._base import BaseCommand 7 | from .options import dev, no_check, no_clean 8 | 9 | 10 | class Command(BaseCommand): 11 | 12 | name = "install" 13 | description = "Generate Pipfile.lock to synchronize the environment." 14 | arguments = [no_check, dev, no_clean] 15 | 16 | def run(self, options): 17 | return install(project=options.project, check=options.check, dev=options.dev, 18 | clean=options.clean) 19 | 20 | 21 | if __name__ == "__main__": 22 | Command.run_parser() 23 | -------------------------------------------------------------------------------- /src/passa/cli/lock.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, print_function, unicode_literals 4 | 5 | from ..actions.lock import lock 6 | from ._base import BaseCommand 7 | 8 | 9 | class Command(BaseCommand): 10 | name = "lock" 11 | description = "Generate Pipfile.lock." 12 | 13 | def run(self, options): 14 | return lock(project=options.project) 15 | 16 | 17 | if __name__ == "__main__": 18 | Command.run_parser() 19 | -------------------------------------------------------------------------------- /src/passa/cli/options.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | from __future__ import absolute_import 3 | 4 | import argparse 5 | import os 6 | import sys 7 | 8 | import tomlkit.exceptions 9 | 10 | import passa.models.projects 11 | import vistir 12 | 13 | 14 | PYTHON_VERSION = ".".join(str(v) for v in sys.version_info[:2]) 15 | 16 | 17 | class Project(passa.models.projects.Project): 18 | def __init__(self, root, *args, **kwargs): 19 | root = vistir.compat.Path(root).absolute() 20 | pipfile = root.joinpath("Pipfile") 21 | if not pipfile.is_file(): 22 | raise argparse.ArgumentError( 23 | "project", "{0!r} is not a Pipfile project".format(root), 24 | ) 25 | try: 26 | super(Project, self).__init__(root.as_posix(), *args, **kwargs) 27 | except tomlkit.exceptions.ParseError as e: 28 | raise argparse.ArgumentError( 29 | "project", "failed to parse Pipfile: {0!r}".format(str(e)), 30 | ) 31 | 32 | def __name__(self): 33 | return "Project Root" 34 | 35 | 36 | class Option(object): 37 | def __init__(self, *args, **kwargs): 38 | self.args = args 39 | self.kwargs = kwargs 40 | 41 | def add_to_parser(self, parser): 42 | parser.add_argument(*self.args, **self.kwargs) 43 | 44 | def add_to_group(self, group): 45 | group.add_argument(*self.args, **self.kwargs) 46 | 47 | 48 | class ArgumentGroup(object): 49 | def __init__( 50 | self, name, parser=None, 51 | is_mutually_exclusive=False, 52 | required=None, options=None): 53 | self.name = name 54 | self.options = options or [] 55 | self.parser = parser 56 | self.required = required 57 | self.is_mutually_exclusive = is_mutually_exclusive 58 | self.argument_group = None 59 | 60 | def add_to_parser(self, parser): 61 | group = None 62 | if self.is_mutually_exclusive: 63 | group = parser.add_mutually_exclusive_group(required=self.required) 64 | else: 65 | group = parser.add_argument_group() 66 | for option in self.options: 67 | option.add_to_group(group) 68 | self.argument_group = group 69 | self.parser = parser 70 | 71 | 72 | project = Option( 73 | "--project", metavar="project", default=os.getcwd(), type=Project, 74 | help="path to project root (directory containing Pipfile)", 75 | ) 76 | 77 | new_project = Option( 78 | "--project", metavar="project", default=os.getcwd(), type=str, 79 | help="path to project root (directory containing Pipfile)", 80 | ) 81 | 82 | python_version = Option( 83 | "--py-version", "--python-version", "--requires-python", metavar="python-version", 84 | dest="python_version", default=PYTHON_VERSION, type=str, 85 | help="required minor python version for the project" 86 | ) 87 | 88 | packages = Option( 89 | "packages", metavar="package", nargs="*", 90 | help="requirement to add (can be used multiple times)", 91 | ) 92 | 93 | editable = Option( 94 | '-e', '--editable', dest='editables', nargs="*", default=[], metavar='path/vcs', 95 | help="editable requirement to add (can be used multiple times)", 96 | ) 97 | 98 | dev = Option( 99 | "--dev", action="store_true", default=False, 100 | help="Use [dev-packages] for install/freeze/uninstall operations", 101 | ) 102 | 103 | no_sync = Option( 104 | "--no-sync", dest="sync", action="store_false", default=True, 105 | help="do not synchronize the environment", 106 | ) 107 | 108 | target = Option( 109 | "-t", "--target", default=None, 110 | help="file to export into (default is to print to stdout)" 111 | ) 112 | 113 | no_default = Option( 114 | "--no-default", dest="default", action="store_false", default=True, 115 | help="do not include default packages when exporting, importing, or cleaning" 116 | ) 117 | 118 | include_hashes = Option( 119 | "--include-hashes", dest="include_hashes", action="store_true", 120 | help="output hashes in requirements.txt (default is to guess)", 121 | ) 122 | 123 | no_include_hashes = Option( 124 | "--no-include-hashes", dest="include_hashes", action="store_false", 125 | help="do not output hashes in requirements.txt (default is to guess)", 126 | ) 127 | 128 | no_check = Option( 129 | "--no-check", dest="check", action="store_false", default=True, 130 | help="do not check if Pipfile.lock is up to date, always resolve", 131 | ) 132 | 133 | no_clean = Option( 134 | "--no-clean", dest="clean", action="store_false", default=True, 135 | help="do not remove packages not specified in Pipfile.lock", 136 | ) 137 | 138 | dev_only = Option( 139 | "--dev", dest="only", action="store_const", const="dev", 140 | help="only try to modify [dev-packages]", 141 | ) 142 | 143 | default_only = Option( 144 | "--default", dest="only", action="store_const", const="default", 145 | help="only try to modify [default]", 146 | ) 147 | 148 | strategy = Option( 149 | "--strategy", choices=["eager", "only-if-needed"], default="only-if-needed", 150 | help="how dependency upgrading is handled", 151 | ) 152 | 153 | include_hashes_group = ArgumentGroup("include_hashes", is_mutually_exclusive=True, options=[include_hashes, no_include_hashes]) 154 | dev_group = ArgumentGroup("dev", is_mutually_exclusive="True", options=[dev_only, default_only]) 155 | package_group = ArgumentGroup("packages", options=[packages, editable, dev, no_sync]) 156 | new_project_group = ArgumentGroup("new-project", options=[new_project, python_version]) 157 | -------------------------------------------------------------------------------- /src/passa/cli/remove.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, print_function, unicode_literals 4 | 5 | from ..actions.remove import remove 6 | from ._base import BaseCommand 7 | from .options import dev_group, no_clean, packages 8 | 9 | 10 | class Command(BaseCommand): 11 | 12 | name = "remove" 13 | description = "Remove packages from project." 14 | arguments = [dev_group, no_clean, packages] 15 | 16 | def run(self, options): 17 | return remove(project=options.project, only=options.only, 18 | packages=options.packages, clean=options.clean) 19 | 20 | 21 | if __name__ == "__main__": 22 | Command.run_parser() 23 | -------------------------------------------------------------------------------- /src/passa/cli/sync.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, print_function, unicode_literals 4 | 5 | from ..actions.sync import sync 6 | from ._base import BaseCommand 7 | from .options import dev, no_clean 8 | 9 | 10 | class Command(BaseCommand): 11 | 12 | name = "sync" 13 | description = "Install Pipfile.lock into the environment." 14 | arguments = [dev, no_clean] 15 | 16 | def run(self, options): 17 | return sync(project=options.project, dev=options.dev, clean=options.clean) 18 | 19 | 20 | if __name__ == "__main__": 21 | Command.run_parser() 22 | -------------------------------------------------------------------------------- /src/passa/cli/upgrade.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | from __future__ import absolute_import, print_function, unicode_literals 3 | 4 | from ..actions.upgrade import upgrade 5 | from ._base import BaseCommand 6 | from .options import no_clean, no_sync, packages, strategy 7 | 8 | 9 | class Command(BaseCommand): 10 | 11 | name = "upgrade" 12 | description = "Upgrade packages in project." 13 | arguments = [packages, strategy, no_clean, no_sync] 14 | 15 | def run(self, options): 16 | return upgrade(project=options.project, strategy=options.strategy, 17 | sync=options.sync, packages=options.packages) 18 | 19 | 20 | if __name__ == "__main__": 21 | Command.run_parser() 22 | -------------------------------------------------------------------------------- /src/passa/internals/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sarugaku/passa/bf5e1dbba15363a5f705381f6fe2f86424d7e1ff/src/passa/internals/__init__.py -------------------------------------------------------------------------------- /src/passa/internals/_pip.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, unicode_literals 4 | 5 | import contextlib 6 | import io 7 | import itertools 8 | import distutils.log 9 | import os 10 | 11 | import distlib.database 12 | import distlib.scripts 13 | import distlib.wheel 14 | import packaging.utils 15 | import pip_shims 16 | import setuptools.dist 17 | import six 18 | import vistir 19 | 20 | from ..models.caches import CACHE_DIR 21 | from ._pip_shims import VCS_SUPPORT, build_wheel as _build_wheel, unpack_url 22 | from .utils import filter_sources 23 | 24 | 25 | @vistir.path.ensure_mkdir_p(mode=0o775) 26 | def _get_src_dir(): 27 | src = os.environ.get("PIP_SRC") 28 | if src: 29 | return src 30 | virtual_env = os.environ.get("VIRTUAL_ENV") 31 | if virtual_env: 32 | return os.path.join(virtual_env, "src") 33 | return os.path.join(os.getcwd(), "src") # Match pip's behavior. 34 | 35 | 36 | def _prepare_wheel_building_kwargs(ireq): 37 | download_dir = os.path.join(CACHE_DIR, "pkgs") 38 | vistir.mkdir_p(download_dir) 39 | 40 | wheel_download_dir = os.path.join(CACHE_DIR, "wheels") 41 | vistir.mkdir_p(wheel_download_dir) 42 | 43 | if ireq.source_dir is not None: 44 | src_dir = ireq.source_dir 45 | elif ireq.editable: 46 | src_dir = _get_src_dir() 47 | else: 48 | src_dir = vistir.path.create_tracked_tempdir(prefix='passa-src') 49 | 50 | # This logic matches pip's behavior, although I don't fully understand the 51 | # intention. I guess the idea is to build editables in-place, otherwise out 52 | # of the source tree? 53 | if ireq.editable: 54 | build_dir = src_dir 55 | else: 56 | build_dir = vistir.path.create_tracked_tempdir(prefix="passa-build") 57 | 58 | return { 59 | "build_dir": build_dir, 60 | "src_dir": src_dir, 61 | "download_dir": download_dir, 62 | "wheel_download_dir": wheel_download_dir, 63 | } 64 | 65 | 66 | def _get_pip_index_urls(sources): 67 | index_urls = [] 68 | trusted_hosts = [] 69 | for source in sources: 70 | url = source.get("url") 71 | if not url: 72 | continue 73 | index_urls.append(url) 74 | if source.get("verify_ssl", True): 75 | continue 76 | host = six.moves.urllib.parse.urlparse(source["url"]).hostname 77 | trusted_hosts.append(host) 78 | return index_urls, trusted_hosts 79 | 80 | 81 | class _PipCommand(pip_shims.Command): 82 | name = "PipCommand" 83 | 84 | 85 | def _get_pip_session(trusted_hosts): 86 | cmd = _PipCommand() 87 | options, _ = cmd.parser.parse_args([]) 88 | options.cache_dir = CACHE_DIR 89 | options.trusted_hosts = trusted_hosts 90 | session = cmd._build_session(options) 91 | return session 92 | 93 | 94 | def _get_finder(sources): 95 | index_urls, trusted_hosts = _get_pip_index_urls(sources) 96 | session = _get_pip_session(trusted_hosts) 97 | finder = pip_shims.PackageFinder( 98 | find_links=[], 99 | index_urls=index_urls, 100 | trusted_hosts=trusted_hosts, 101 | allow_all_prereleases=True, 102 | session=session, 103 | ) 104 | return finder 105 | 106 | 107 | def _get_wheel_cache(): 108 | format_control = pip_shims.FormatControl(set(), set()) 109 | wheel_cache = pip_shims.WheelCache(CACHE_DIR, format_control) 110 | return wheel_cache 111 | 112 | 113 | def _convert_hashes(values): 114 | """Convert Pipfile.lock hash lines into InstallRequirement option format. 115 | 116 | The option format uses a str-list mapping. Keys are hash algorithms, and 117 | the list contains all values of that algorithm. 118 | """ 119 | hashes = {} 120 | if not values: 121 | return hashes 122 | for value in values: 123 | try: 124 | name, value = value.split(":", 1) 125 | except ValueError: 126 | name = "sha256" 127 | if name not in hashes: 128 | hashes[name] = [] 129 | hashes[name].append(value) 130 | return hashes 131 | 132 | 133 | class WheelBuildError(RuntimeError): 134 | pass 135 | 136 | 137 | def build_wheel(ireq, sources, hashes=None): 138 | """Build a wheel file for the InstallRequirement object. 139 | 140 | An artifact is downloaded (or read from cache). If the artifact is not a 141 | wheel, build one out of it. The dynamically built wheel is ephemeral; do 142 | not depend on its existence after the returned wheel goes out of scope. 143 | 144 | If `hashes` is truthy, it is assumed to be a list of hashes (as formatted 145 | in Pipfile.lock) to be checked against the download. 146 | 147 | Returns a `distlib.wheel.Wheel` instance. Raises a `WheelBuildError` (a 148 | `RuntimeError` subclass) if the wheel cannot be built. 149 | """ 150 | kwargs = _prepare_wheel_building_kwargs(ireq) 151 | finder = _get_finder(sources) 152 | 153 | # Not for upgrade, hash not required. Hashes are not required here even 154 | # when we provide them, because pip skips local wheel cache if we set it 155 | # to True. Hashes are checked later if we need to download the file. 156 | ireq.populate_link(finder, False, False) 157 | 158 | # Ensure ireq.source_dir is set. 159 | # This is intentionally set to build_dir, not src_dir. Comments from pip: 160 | # [...] if filesystem packages are not marked editable in a req, a non 161 | # deterministic error occurs when the script attempts to unpack the 162 | # build directory. 163 | # Also see comments in `_prepare_wheel_building_kwargs()` -- If the ireq 164 | # is editable, build_dir is actually src_dir, making the build in-place. 165 | ireq.ensure_has_source_dir(kwargs["build_dir"]) 166 | 167 | # Ensure the source is fetched. For wheels, it is enough to just download 168 | # because we'll use them directly. For an sdist, we need to unpack so we 169 | # can build it. 170 | if not ireq.editable or not pip_shims.is_file_url(ireq.link): 171 | if ireq.is_wheel: 172 | only_download = True 173 | download_dir = kwargs["wheel_download_dir"] 174 | else: 175 | only_download = False 176 | download_dir = kwargs["download_dir"] 177 | ireq.options["hashes"] = _convert_hashes(hashes) 178 | unpack_url( 179 | ireq.link, ireq.source_dir, download_dir, 180 | only_download=only_download, session=finder.session, 181 | hashes=ireq.hashes(False), progress_bar="off", 182 | ) 183 | 184 | if ireq.is_wheel: 185 | # If this is a wheel, use the downloaded thing. 186 | output_dir = kwargs["wheel_download_dir"] 187 | wheel_path = os.path.join(output_dir, ireq.link.filename) 188 | else: 189 | # Othereise we need to build an ephemeral wheel. 190 | wheel_path = _build_wheel( 191 | ireq, vistir.path.create_tracked_tempdir(prefix="ephem"), 192 | finder, _get_wheel_cache(), kwargs, 193 | ) 194 | if wheel_path is None or not os.path.exists(wheel_path): 195 | raise WheelBuildError 196 | return distlib.wheel.Wheel(wheel_path) 197 | 198 | 199 | def _obtrain_ref(vcs_obj, src_dir, name, rev=None): 200 | target_dir = os.path.join(src_dir, name) 201 | target_rev = vcs_obj.make_rev_options(rev) 202 | if not os.path.exists(target_dir): 203 | vcs_obj.obtain(target_dir) 204 | if (not vcs_obj.is_commit_id_equal(target_dir, rev) and 205 | not vcs_obj.is_commit_id_equal(target_dir, target_rev)): 206 | vcs_obj.update(target_dir, target_rev) 207 | return vcs_obj.get_revision(target_dir) 208 | 209 | 210 | def get_vcs_ref(requirement): 211 | backend = VCS_SUPPORT.get_backend(requirement.vcs) 212 | vcs = backend(url=requirement.req.vcs_uri) 213 | src = _get_src_dir() 214 | name = requirement.normalized_name 215 | ref = _obtrain_ref(vcs, src, name, rev=requirement.req.ref) 216 | return ref 217 | 218 | 219 | def find_installation_candidates(ireq, sources): 220 | finder = _get_finder(sources) 221 | return finder.find_all_candidates(ireq.name) 222 | 223 | 224 | class RequirementUninstaller(object): 225 | """A context manager to remove a package for the inner block. 226 | 227 | This uses `UninstallPathSet` to control the workflow. If the inner block 228 | exits correctly, the uninstallation is committed, otherwise rolled back. 229 | """ 230 | def __init__(self, ireq, auto_confirm, verbose): 231 | self.ireq = ireq 232 | self.pathset = None 233 | self.auto_confirm = auto_confirm 234 | self.verbose = verbose 235 | 236 | def __enter__(self): 237 | self.pathset = self.ireq.uninstall( 238 | auto_confirm=self.auto_confirm, 239 | verbose=self.verbose, 240 | ) 241 | return self.pathset 242 | 243 | def __exit__(self, exc_type, exc_value, traceback): 244 | if self.pathset is None: 245 | return 246 | if exc_type is None: 247 | self.pathset.commit() 248 | else: 249 | self.pathset.rollback() 250 | 251 | 252 | def uninstall(name, **kwargs): 253 | ireq = pip_shims.InstallRequirement.from_line(name) 254 | return RequirementUninstaller(ireq, **kwargs) 255 | 256 | 257 | @contextlib.contextmanager 258 | def _suppress_distutils_logs(): 259 | """Hack to hide noise generated by `setup.py develop`. 260 | 261 | There isn't a good way to suppress them now, so let's monky-patch. 262 | See https://bugs.python.org/issue25392. 263 | """ 264 | f = distutils.log.Log._log 265 | 266 | def _log(log, level, msg, args): 267 | if level >= distutils.log.ERROR: 268 | f(log, level, msg, args) 269 | 270 | distutils.log.Log._log = _log 271 | yield 272 | distutils.log.Log._log = f 273 | 274 | 275 | class NoopInstaller(object): 276 | """An installer. 277 | 278 | This class is not designed to be instantiated by itself, but used as a 279 | common interface for subclassing. 280 | 281 | An installer has two methods, `prepare()` and `install()`. Neither takes 282 | arguments, and should be called in that order to prepare an installation 283 | operation, and to actually install things. 284 | """ 285 | def prepare(self): 286 | pass 287 | 288 | def install(self): 289 | pass 290 | 291 | 292 | class EditableInstaller(NoopInstaller): 293 | """Installer to handle editable. 294 | """ 295 | def __init__(self, requirement): 296 | ireq = requirement.as_ireq() 297 | self.working_directory = ireq.setup_py_dir 298 | self.setup_py = ireq.setup_py 299 | 300 | def install(self): 301 | with vistir.cd(self.working_directory), _suppress_distutils_logs(): 302 | # Access from Setuptools to ensure things are patched correctly. 303 | setuptools.dist.distutils.core.run_setup( 304 | self.setup_py, ["develop", "--no-deps"], 305 | ) 306 | 307 | 308 | class WheelInstaller(NoopInstaller): 309 | """Installer by building a wheel. 310 | 311 | The wheel is built during `prepare()`, and installed in `install()`. 312 | """ 313 | def __init__(self, requirement, sources, paths): 314 | self.ireq = requirement.as_ireq() 315 | self.sources = filter_sources(requirement, sources) 316 | self.hashes = requirement.hashes or None 317 | self.paths = paths 318 | self.wheel = None 319 | 320 | def prepare(self): 321 | self.wheel = build_wheel(self.ireq, self.sources, self.hashes) 322 | 323 | def install(self): 324 | self.wheel.install(self.paths, distlib.scripts.ScriptMaker(None, None)) 325 | 326 | 327 | def _iter_egg_info_directories(root, name): 328 | name = packaging.utils.canonicalize_name(name) 329 | for parent, dirnames, filenames in os.walk(root): 330 | matched_indexes = [] 331 | for i, dirname in enumerate(dirnames): 332 | if not dirname.lower().endswith("egg-info"): 333 | continue 334 | egg_info_name = packaging.utils.canonicalize_name(dirname[:-9]) 335 | if egg_info_name != name: 336 | continue 337 | matched_indexes.append(i) 338 | yield os.path.join(parent, dirname) 339 | 340 | # Modify dirnames in-place to NOT look into egg-info directories. 341 | # This is a documented behavior in stdlib. 342 | for i in reversed(matched_indexes): 343 | del dirnames[i] 344 | 345 | 346 | def _read_pkg_info(directory): 347 | path = os.path.join(directory, "PKG-INFO") 348 | try: 349 | with io.open(path, encoding="utf-8", errors="replace") as f: 350 | return f.read() 351 | except (IOError, OSError): 352 | return None 353 | 354 | 355 | def _find_egg_info(ireq): 356 | """Find this package's .egg-info directory. 357 | 358 | Due to how sdists are designed, the .egg-info directory cannot be reliably 359 | found without running setup.py to aggregate all configurations. This 360 | function instead uses some heuristics to locate the egg-info directory 361 | that most likely represents this package. 362 | 363 | The best .egg-info directory's path is returned as a string. None is 364 | returned if no matches can be found. 365 | """ 366 | root = ireq.setup_py_dir 367 | 368 | directory_iterator = _iter_egg_info_directories(root, ireq.name) 369 | try: 370 | top_egg_info = next(directory_iterator) 371 | except StopIteration: # No egg-info found. Wat. 372 | return None 373 | directory_iterator = itertools.chain([top_egg_info], directory_iterator) 374 | 375 | # Read the sdist's PKG-INFO to determine which egg_info is best. 376 | pkg_info = _read_pkg_info(root) 377 | 378 | # PKG-INFO not readable. Just return whatever comes first, I guess. 379 | if pkg_info is None: 380 | return top_egg_info 381 | 382 | # Walk the sdist to find the egg-info with matching PKG-INFO. 383 | for directory in directory_iterator: 384 | egg_pkg_info = _read_pkg_info(directory) 385 | if egg_pkg_info == pkg_info: 386 | return directory 387 | 388 | # Nothing matches...? Use the first one we found, I guess. 389 | return top_egg_info 390 | 391 | 392 | def read_sdist_metadata(ireq): 393 | egg_info_dir = _find_egg_info(ireq) 394 | if not egg_info_dir: 395 | return None 396 | distribution = distlib.database.EggInfoDistribution(egg_info_dir) 397 | return distribution.metadata 398 | -------------------------------------------------------------------------------- /src/passa/internals/_pip_shims.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | """Shims to make the pip interface more consistent accross versions. 4 | 5 | There are currently two members: 6 | 7 | * VCS_SUPPORT is an instance of VcsSupport. 8 | * build_wheel abstracts the process to build a wheel out of a bunch parameters. 9 | * unpack_url wraps the actual function in pip to accept modern parameters. 10 | """ 11 | 12 | from __future__ import absolute_import, unicode_literals 13 | 14 | import pip_shims 15 | 16 | 17 | def _build_wheel_pre10(ireq, output_dir, finder, wheel_cache, kwargs): 18 | kwargs.update({"wheel_cache": wheel_cache, "session": finder.session}) 19 | reqset = pip_shims.RequirementSet(**kwargs) 20 | builder = pip_shims.WheelBuilder(reqset, finder) 21 | return builder._build_one(ireq, output_dir) 22 | 23 | 24 | def _build_wheel_modern(ireq, output_dir, finder, wheel_cache, kwargs): 25 | """Build a wheel. 26 | 27 | * ireq: The InstallRequirement object to build 28 | * output_dir: The directory to build the wheel in. 29 | * finder: pip's internal Finder object to find the source out of ireq. 30 | * kwargs: Various keyword arguments from `_prepare_wheel_building_kwargs`. 31 | """ 32 | kwargs.update({"progress_bar": "off", "build_isolation": False}) 33 | with pip_shims.RequirementTracker() as req_tracker: 34 | if req_tracker: 35 | kwargs["req_tracker"] = req_tracker 36 | preparer = pip_shims.RequirementPreparer(**kwargs) 37 | builder = pip_shims.WheelBuilder(finder, preparer, wheel_cache) 38 | return builder._build_one(ireq, output_dir) 39 | 40 | 41 | def _unpack_url_pre10(*args, **kwargs): 42 | """Shim for unpack_url in various pip versions. 43 | 44 | pip before 10.0 does not accept `progress_bar` here. Simply drop it. 45 | """ 46 | kwargs.pop("progress_bar", None) 47 | return pip_shims.unpack_url(*args, **kwargs) 48 | 49 | 50 | PIP_VERSION = pip_shims.utils._parse(pip_shims.pip_version) 51 | VERSION_10 = pip_shims.utils._parse("10") 52 | 53 | 54 | VCS_SUPPORT = pip_shims.VcsSupport() 55 | 56 | build_wheel = _build_wheel_modern 57 | unpack_url = pip_shims.unpack_url 58 | 59 | if PIP_VERSION < VERSION_10: 60 | build_wheel = _build_wheel_pre10 61 | unpack_url = _unpack_url_pre10 62 | -------------------------------------------------------------------------------- /src/passa/internals/candidates.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, unicode_literals 4 | 5 | import packaging.specifiers 6 | import packaging.version 7 | import requirementslib 8 | 9 | from ._pip import find_installation_candidates, get_vcs_ref 10 | 11 | 12 | def _filter_matching_python_requirement(candidates, required_python): 13 | # TODO: This should also takes the parent's python_version and 14 | # python_full_version markers, and only return matches with valid 15 | # intersections. For example, if parent requires `python_version >= '3.0'`, 16 | # this should not return entries with "Requires-Python: <3". 17 | for c in candidates: 18 | try: 19 | requires_python = c.requires_python 20 | except AttributeError: 21 | requires_python = c.location.requires_python 22 | if required_python and requires_python: 23 | # Old specifications had people setting this to single digits 24 | # which is effectively the same as '>=digit, 0 and elements[i - 1] == "and": 27 | # Remove the "and" before it. 28 | del elements[i - 1] 29 | elif elements: 30 | # This shouldn't ever happen, but is included for completeness. 31 | # If there is not an "and" before this element, try to remove the 32 | # operator after it. 33 | del elements[0] 34 | return (not elements) 35 | 36 | 37 | def get_without_extra(marker): 38 | """Build a new marker without the `extra == ...` part. 39 | 40 | The implementation relies very deep into packaging's internals, but I don't 41 | have a better way now (except implementing the whole thing myself). 42 | 43 | This could return `None` if the `extra == ...` part is the only one in the 44 | input marker. 45 | """ 46 | # TODO: Why is this very deep in the internals? Why is a better solution 47 | # implementing it yourself when someone is already maintaining a codebase 48 | # for this? It's literally a grammar implementation that is required to 49 | # meet the demands of a pep... -d 50 | if not marker: 51 | return None 52 | marker = Marker(str(marker)) 53 | elements = marker._markers 54 | _strip_extra(elements) 55 | if elements: 56 | return marker 57 | return None 58 | 59 | 60 | def _markers_collect_extras(markers, collection): 61 | # Optimization: the marker element is usually appended at the end. 62 | for el in reversed(markers): 63 | if (isinstance(el, tuple) and 64 | el[0].value == "extra" and 65 | el[1].value == "=="): 66 | collection.add(el[2].value) 67 | elif isinstance(el, list): 68 | _markers_collect_extras(el, collection) 69 | 70 | 71 | def get_contained_extras(marker): 72 | """Collect "extra == ..." operands from a marker. 73 | 74 | Returns a list of str. Each str is a speficied extra in this marker. 75 | """ 76 | if not marker: 77 | return set() 78 | marker = Marker(str(marker)) 79 | extras = set() 80 | _markers_collect_extras(marker._markers, extras) 81 | return extras 82 | 83 | 84 | def _markers_contains_extra(markers): 85 | # Optimization: the marker element is usually appended at the end. 86 | for element in reversed(markers): 87 | if isinstance(element, tuple) and element[0].value == "extra": 88 | return True 89 | elif isinstance(element, list): 90 | if _markers_contains_extra(element): 91 | return True 92 | return False 93 | 94 | 95 | def contains_extra(marker): 96 | """Check whehter a marker contains an "extra == ..." operand. 97 | """ 98 | if not marker: 99 | return False 100 | marker = Marker(str(marker)) 101 | return _markers_contains_extra(marker._markers) 102 | -------------------------------------------------------------------------------- /src/passa/internals/reporters.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, print_function, unicode_literals 4 | 5 | import resolvelib 6 | 7 | from .traces import trace_graph 8 | 9 | 10 | def print_title(text): 11 | print('\n{:=^84}\n'.format(text)) 12 | 13 | 14 | def print_requirement(r, end='\n'): 15 | print('{:>40}'.format(r.as_line(include_hashes=False)), end=end) 16 | 17 | 18 | def print_dependency(state, key): 19 | print_requirement(state.mapping[key], end='') 20 | parents = sorted( 21 | state.graph.iter_parents(key), 22 | key=lambda n: (-1, '') if n is None else (ord(n[0].lower()), n), 23 | ) 24 | for i, p in enumerate(parents): 25 | if p is None: 26 | line = '(user)' 27 | else: 28 | line = state.mapping[p].as_line(include_hashes=False) 29 | if i == 0: 30 | padding = ' <= ' 31 | else: 32 | padding = ' ' * 44 33 | print('{pad}{line}'.format(pad=padding, line=line)) 34 | 35 | 36 | class StdOutReporter(resolvelib.BaseReporter): 37 | """Simple reporter that prints things to stdout. 38 | """ 39 | def __init__(self, requirements): 40 | super(StdOutReporter, self).__init__() 41 | self.requirements = requirements 42 | 43 | def starting(self): 44 | self._prev = None 45 | print_title(' User requirements ') 46 | for r in self.requirements: 47 | print_requirement(r) 48 | 49 | def ending_round(self, index, state): 50 | print_title(' Round {} '.format(index)) 51 | mapping = state.mapping 52 | if self._prev is None: 53 | difference = set(mapping.keys()) 54 | changed = set() 55 | else: 56 | difference = set(mapping.keys()) - set(self._prev.keys()) 57 | changed = set( 58 | k for k, v in mapping.items() 59 | if k in self._prev and self._prev[k] != v 60 | ) 61 | self._prev = mapping 62 | 63 | if difference: 64 | print('New pins: ') 65 | for k in difference: 66 | print_dependency(state, k) 67 | print() 68 | 69 | if changed: 70 | print('Changed pins:') 71 | for k in changed: 72 | print_dependency(state, k) 73 | print() 74 | 75 | def ending(self, state): 76 | print_title(" STABLE PINS ") 77 | path_lists = trace_graph(state.graph) 78 | for k in sorted(state.mapping): 79 | print(state.mapping[k].as_line(include_hashes=False)) 80 | paths = path_lists[k] 81 | for path in paths: 82 | if path == [None]: 83 | print(' User requirement') 84 | continue 85 | print(' ', end='') 86 | for v in reversed(path[1:]): 87 | line = state.mapping[v].as_line(include_hashes=False) 88 | print(' <=', line, end='') 89 | print() 90 | print() 91 | -------------------------------------------------------------------------------- /src/passa/internals/specifiers.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, unicode_literals 4 | 5 | import itertools 6 | import operator 7 | 8 | from packaging.specifiers import SpecifierSet, Specifier 9 | from vistir.misc import dedup 10 | 11 | 12 | def _tuplize_version(version): 13 | return tuple(int(x) for x in version.split(".")) 14 | 15 | 16 | def _format_version(version): 17 | return ".".join(str(i) for i in version) 18 | 19 | 20 | # Prefer [x,y) ranges. 21 | REPLACE_RANGES = {">": ">=", "<=": "<"} 22 | 23 | 24 | def _format_pyspec(specifier): 25 | if isinstance(specifier, str): 26 | if not any(op in specifier for op in Specifier._operators.keys()): 27 | specifier = "=={0}".format(specifier) 28 | specifier = Specifier(specifier) 29 | if specifier.operator == "==" and specifier.version.endswith(".*"): 30 | specifier = Specifier("=={0}".format(specifier.version[:-2])) 31 | try: 32 | op = REPLACE_RANGES[specifier.operator] 33 | except KeyError: 34 | return specifier 35 | version = specifier.version.replace(".*", "") 36 | curr_tuple = _tuplize_version(version) 37 | try: 38 | next_tuple = (curr_tuple[0], curr_tuple[1] + 1) 39 | except IndexError: 40 | next_tuple = (curr_tuple[0], 1) 41 | specifier = Specifier("{0}{1}".format(op, _format_version(next_tuple))) 42 | return specifier 43 | 44 | 45 | def _get_specs(specset): 46 | if isinstance(specset, Specifier): 47 | specset = str(specset) 48 | if isinstance(specset, str): 49 | specset = SpecifierSet(specset.replace(".*", "")) 50 | return [ 51 | (spec._spec[0], _tuplize_version(spec._spec[1])) 52 | for spec in getattr(specset, "_specs", []) 53 | ] 54 | 55 | 56 | def _group_by_op(specs): 57 | specs = [_get_specs(x) for x in list(specs)] 58 | flattened = [(op, version) for spec in specs for op, version in spec] 59 | specs = sorted(flattened, key=operator.itemgetter(1)) 60 | grouping = itertools.groupby(specs, key=operator.itemgetter(0)) 61 | return grouping 62 | 63 | 64 | def cleanup_pyspecs(specs, joiner="or"): 65 | specs = {_format_pyspec(spec) for spec in specs} 66 | # for != operator we want to group by version 67 | # if all are consecutive, join as a list 68 | results = set() 69 | for op, versions in _group_by_op(specs): 70 | versions = [version[1] for version in versions] 71 | versions = sorted(dedup(versions)) 72 | # if we are doing an or operation, we need to use the min for >= 73 | # this way OR(>=2.6, >=2.7, >=3.6) picks >=2.6 74 | # if we do an AND operation we need to use MAX to be more selective 75 | if op in (">", ">="): 76 | if joiner == "or": 77 | results.add((op, _format_version(min(versions)))) 78 | else: 79 | results.add((op, _format_version(max(versions)))) 80 | # we use inverse logic here so we will take the max value if we are 81 | # using OR but the min value if we are using AND 82 | elif op in ("<=", "<"): 83 | if joiner == "or": 84 | results.add((op, _format_version(max(versions)))) 85 | else: 86 | results.add((op, _format_version(min(versions)))) 87 | # leave these the same no matter what operator we use 88 | elif op in ("!=", "==", "~="): 89 | version_list = sorted( 90 | "{0}".format(_format_version(version)) 91 | for version in versions 92 | ) 93 | version = ", ".join(version_list) 94 | if len(version_list) == 1: 95 | results.add((op, version)) 96 | elif op == "!=": 97 | results.add(("not in", version)) 98 | elif op == "==": 99 | results.add(("in", version)) 100 | else: 101 | specifier = SpecifierSet(",".join(sorted( 102 | "{0}".format(op, v) for v in version_list 103 | )))._specs 104 | for s in specifier: 105 | results &= (specifier._spec[0], specifier._spec[1]) 106 | else: 107 | if len(version) == 1: 108 | results.add((op, version)) 109 | else: 110 | specifier = SpecifierSet("{0}".format(version))._specs 111 | for s in specifier: 112 | results |= (specifier._spec[0], specifier._spec[1]) 113 | return results 114 | 115 | 116 | def pyspec_from_markers(marker): 117 | if marker._markers[0][0] != 'python_version': 118 | return 119 | op = marker._markers[0][1].value 120 | version = marker._markers[0][2].value 121 | specset = set() 122 | if op == "in": 123 | specset.update( 124 | Specifier("=={0}".format(v.strip())) 125 | for v in version.split(",") 126 | ) 127 | elif op == "not in": 128 | specset.update( 129 | Specifier("!={0}".format(v.strip())) 130 | for v in version.split(",") 131 | ) 132 | else: 133 | specset.add(Specifier("".join([op, version]))) 134 | if specset: 135 | return specset 136 | return None 137 | -------------------------------------------------------------------------------- /src/passa/internals/traces.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, unicode_literals 4 | 5 | 6 | def _trace_visit_vertex(graph, current, target, visited, path, paths): 7 | if current == target: 8 | paths.append(path) 9 | return 10 | for v in graph.iter_children(current): 11 | if v == current or v in visited: 12 | continue 13 | next_path = path + [current] 14 | next_visited = visited | {current} 15 | _trace_visit_vertex(graph, v, target, next_visited, next_path, paths) 16 | 17 | 18 | def trace_graph(graph): 19 | """Build a collection of "traces" for each package. 20 | 21 | A trace is a list of names that eventually leads to the package. For 22 | example, if A and B are root dependencies, A depends on C and D, B 23 | depends on C, and C depends on D, the return value would be like:: 24 | 25 | { 26 | None: [], 27 | "A": [None], 28 | "B": [None], 29 | "C": [[None, "A"], [None, "B"]], 30 | "D": [[None, "B", "C"], [None, "A"]], 31 | } 32 | """ 33 | result = {None: []} 34 | for vertex in graph: 35 | result[vertex] = [] 36 | for root in graph.iter_children(None): 37 | paths = [] 38 | _trace_visit_vertex(graph, root, vertex, {None}, [None], paths) 39 | result[vertex].extend(paths) 40 | return result 41 | -------------------------------------------------------------------------------- /src/passa/internals/utils.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, unicode_literals 4 | 5 | 6 | def identify_requirment(r): 7 | """Produce an identifier for a requirement to use in the resolver. 8 | 9 | Note that we are treating the same package with different extras as 10 | distinct. This allows semantics like "I only want this extra in 11 | development, not production". 12 | 13 | This also makes the resolver's implementation much simpler, with the minor 14 | costs of possibly needing a few extra resolution steps if we happen to have 15 | the same package apprearing multiple times. 16 | """ 17 | return "{0}{1}".format(r.normalized_name, r.extras_as_pip) 18 | 19 | 20 | def get_pinned_version(ireq): 21 | """Get the pinned version of an InstallRequirement. 22 | 23 | An InstallRequirement is considered pinned if: 24 | 25 | - Is not editable 26 | - It has exactly one specifier 27 | - That specifier is "==" 28 | - The version does not contain a wildcard 29 | 30 | Examples: 31 | django==1.8 # pinned 32 | django>1.8 # NOT pinned 33 | django~=1.8 # NOT pinned 34 | django==1.* # NOT pinned 35 | 36 | Raises `TypeError` if the input is not a valid InstallRequirement, or 37 | `ValueError` if the InstallRequirement is not pinned. 38 | """ 39 | try: 40 | specifier = ireq.specifier 41 | except AttributeError: 42 | raise TypeError("Expected InstallRequirement, not {}".format( 43 | type(ireq).__name__, 44 | )) 45 | 46 | if ireq.editable: 47 | raise ValueError("InstallRequirement is editable") 48 | if not specifier: 49 | raise ValueError("InstallRequirement has no version specification") 50 | if len(specifier._specs) != 1: 51 | raise ValueError("InstallRequirement has multiple specifications") 52 | 53 | op, version = next(iter(specifier._specs))._spec 54 | if op not in ('==', '===') or version.endswith('.*'): 55 | raise ValueError("InstallRequirement not pinned (is {0!r})".format( 56 | op + version, 57 | )) 58 | 59 | return version 60 | 61 | 62 | def is_pinned(ireq): 63 | """Returns whether an InstallRequirement is a "pinned" requirement. 64 | 65 | An InstallRequirement is considered pinned if: 66 | 67 | - Is not editable 68 | - It has exactly one specifier 69 | - That specifier is "==" 70 | - The version does not contain a wildcard 71 | 72 | Examples: 73 | django==1.8 # pinned 74 | django>1.8 # NOT pinned 75 | django~=1.8 # NOT pinned 76 | django==1.* # NOT pinned 77 | """ 78 | try: 79 | get_pinned_version(ireq) 80 | except (TypeError, ValueError): 81 | return False 82 | return True 83 | 84 | 85 | def filter_sources(requirement, sources): 86 | """Returns a filtered list of sources for this requirement. 87 | 88 | This considers the index specified by the requirement, and returns only 89 | matching source entries if there is at least one. 90 | """ 91 | if not sources or not requirement.index: 92 | return sources 93 | filtered_sources = [ 94 | source for source in sources 95 | if source.get("name") == requirement.index 96 | ] 97 | return filtered_sources or sources 98 | 99 | 100 | def get_allow_prereleases(requirement, global_setting): 101 | # TODO: Implement per-package prereleases flag. (pypa/pipenv#1696) 102 | return global_setting 103 | 104 | 105 | def are_requirements_equal(this, that): 106 | return ( 107 | this.as_line(include_hashes=False) == 108 | that.as_line(include_hashes=False) 109 | ) 110 | 111 | 112 | def strip_extras(requirement): 113 | """Returns a new requirement object with extras removed. 114 | """ 115 | line = requirement.as_line() 116 | new = type(requirement).from_line(line) 117 | new.extras = None 118 | return new 119 | -------------------------------------------------------------------------------- /src/passa/models/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sarugaku/passa/bf5e1dbba15363a5f705381f6fe2f86424d7e1ff/src/passa/models/__init__.py -------------------------------------------------------------------------------- /src/passa/models/caches.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, unicode_literals 4 | 5 | import copy 6 | import hashlib 7 | import json 8 | import os 9 | import sys 10 | 11 | import appdirs 12 | import pip_shims 13 | import requests 14 | import vistir 15 | 16 | from ..internals._pip_shims import VCS_SUPPORT 17 | from ..internals.utils import get_pinned_version 18 | 19 | 20 | CACHE_DIR = os.environ.get("PASSA_CACHE_DIR", appdirs.user_cache_dir("passa")) 21 | 22 | 23 | class HashCache(pip_shims.SafeFileCache): 24 | """Caches hashes of PyPI artifacts so we do not need to re-download them. 25 | 26 | Hashes are only cached when the URL appears to contain a hash in it and the 27 | cache key includes the hash value returned from the server). This ought to 28 | avoid ssues where the location on the server changes. 29 | """ 30 | def __init__(self, *args, **kwargs): 31 | session = kwargs.pop('session', requests.session()) 32 | self.session = session 33 | kwargs.setdefault('directory', os.path.join(CACHE_DIR, 'hash-cache')) 34 | super(HashCache, self).__init__(*args, **kwargs) 35 | 36 | def get_hash(self, location): 37 | # If there is no location hash (i.e., md5, sha256, etc.), we don't want 38 | # to store it. 39 | hash_value = None 40 | orig_scheme = location.scheme 41 | new_location = copy.deepcopy(location) 42 | if orig_scheme in VCS_SUPPORT.all_schemes: 43 | new_location.url = new_location.url.split("+", 1)[-1] 44 | can_hash = new_location.hash 45 | if can_hash: 46 | # hash url WITH fragment 47 | hash_value = self.get(new_location.url) 48 | if not hash_value: 49 | hash_value = self._get_file_hash(new_location) 50 | hash_value = hash_value.encode('utf8') 51 | if can_hash: 52 | self.set(new_location.url, hash_value) 53 | return hash_value.decode('utf8') 54 | 55 | def _get_file_hash(self, location): 56 | h = hashlib.new(pip_shims.FAVORITE_HASH) 57 | with vistir.open_file(location, self.session) as fp: 58 | for chunk in iter(lambda: fp.read(8096), b""): 59 | h.update(chunk) 60 | return ":".join([h.name, h.hexdigest()]) 61 | 62 | 63 | # pip-tools's dependency cache implementation. 64 | class CorruptCacheError(Exception): 65 | def __init__(self, path): 66 | self.path = path 67 | 68 | def __str__(self): 69 | lines = [ 70 | 'The dependency cache seems to have been corrupted.', 71 | 'Inspect, or delete, the following file:', 72 | ' {}'.format(self.path), 73 | ] 74 | return os.linesep.join(lines) 75 | 76 | 77 | def _key_from_req(req): 78 | """Get an all-lowercase version of the requirement's name.""" 79 | if hasattr(req, 'key'): 80 | # from pkg_resources, such as installed dists for pip-sync 81 | key = req.key 82 | else: 83 | # from packaging, such as install requirements from requirements.txt 84 | key = req.name 85 | 86 | key = key.replace('_', '-').lower() 87 | return key 88 | 89 | 90 | def _read_cache_file(cache_file_path): 91 | with open(cache_file_path, 'r') as cache_file: 92 | try: 93 | doc = json.load(cache_file) 94 | except ValueError: 95 | raise CorruptCacheError(cache_file_path) 96 | 97 | # Check version and load the contents 98 | assert doc['__format__'] == 1, 'Unknown cache file format' 99 | return doc['dependencies'] 100 | 101 | 102 | class _JSONCache(object): 103 | """A persistent cache backed by a JSON file. 104 | 105 | The cache file is written to the appropriate user cache dir for the 106 | current platform, i.e. 107 | 108 | ~/.cache/pip-tools/depcache-pyX.Y.json 109 | 110 | Where X.Y indicates the Python version. 111 | """ 112 | filename_format = None 113 | 114 | def __init__(self, cache_dir=CACHE_DIR): 115 | vistir.mkdir_p(cache_dir) 116 | python_version = ".".join(str(digit) for digit in sys.version_info[:2]) 117 | cache_filename = self.filename_format.format( 118 | python_version=python_version, 119 | ) 120 | self._cache_file = os.path.join(cache_dir, cache_filename) 121 | self._cache = None 122 | 123 | @property 124 | def cache(self): 125 | """The dictionary that is the actual in-memory cache. 126 | 127 | This property lazily loads the cache from disk. 128 | """ 129 | if self._cache is None: 130 | self.read_cache() 131 | return self._cache 132 | 133 | def as_cache_key(self, ireq): 134 | """Given a requirement, return its cache key. 135 | 136 | This behavior is a little weird in order to allow backwards 137 | compatibility with cache files. For a requirement without extras, this 138 | will return, for example:: 139 | 140 | ("ipython", "2.1.0") 141 | 142 | For a requirement with extras, the extras will be comma-separated and 143 | appended to the version, inside brackets, like so:: 144 | 145 | ("ipython", "2.1.0[nbconvert,notebook]") 146 | """ 147 | extras = tuple(sorted(ireq.extras)) 148 | if not extras: 149 | extras_string = "" 150 | else: 151 | extras_string = "[{}]".format(",".join(extras)) 152 | name = _key_from_req(ireq.req) 153 | version = get_pinned_version(ireq) 154 | return name, "{}{}".format(version, extras_string) 155 | 156 | def read_cache(self): 157 | """Reads the cached contents into memory. 158 | """ 159 | if os.path.exists(self._cache_file): 160 | self._cache = _read_cache_file(self._cache_file) 161 | else: 162 | self._cache = {} 163 | 164 | def write_cache(self): 165 | """Writes the cache to disk as JSON. 166 | """ 167 | doc = { 168 | '__format__': 1, 169 | 'dependencies': self._cache, 170 | } 171 | with open(self._cache_file, 'w') as f: 172 | json.dump(doc, f, sort_keys=True) 173 | 174 | def clear(self): 175 | self._cache = {} 176 | self.write_cache() 177 | 178 | def __contains__(self, ireq): 179 | pkgname, pkgversion_and_extras = self.as_cache_key(ireq) 180 | return pkgversion_and_extras in self.cache.get(pkgname, {}) 181 | 182 | def __getitem__(self, ireq): 183 | pkgname, pkgversion_and_extras = self.as_cache_key(ireq) 184 | return self.cache[pkgname][pkgversion_and_extras] 185 | 186 | def __setitem__(self, ireq, values): 187 | pkgname, pkgversion_and_extras = self.as_cache_key(ireq) 188 | self.cache.setdefault(pkgname, {}) 189 | self.cache[pkgname][pkgversion_and_extras] = values 190 | self.write_cache() 191 | 192 | def __delitem__(self, ireq): 193 | pkgname, pkgversion_and_extras = self.as_cache_key(ireq) 194 | try: 195 | del self.cache[pkgname][pkgversion_and_extras] 196 | except KeyError: 197 | return 198 | self.write_cache() 199 | 200 | def get(self, ireq, default=None): 201 | pkgname, pkgversion_and_extras = self.as_cache_key(ireq) 202 | return self.cache.get(pkgname, {}).get(pkgversion_and_extras, default) 203 | 204 | 205 | class DependencyCache(_JSONCache): 206 | """Cache the dependency of cancidates. 207 | """ 208 | filename_format = "depcache-py{python_version}.json" 209 | 210 | 211 | class RequiresPythonCache(_JSONCache): 212 | """Cache a candidate's Requires-Python information. 213 | """ 214 | filename_format = "pyreqcache-py{python_version}.json" 215 | -------------------------------------------------------------------------------- /src/passa/models/lockers.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, unicode_literals 4 | 5 | import itertools 6 | 7 | import resolvelib 8 | 9 | import plette 10 | import requirementslib 11 | import vistir 12 | 13 | from ..internals.hashes import get_hashes 14 | from ..internals.reporters import StdOutReporter 15 | from ..internals.traces import trace_graph 16 | from ..internals.utils import identify_requirment 17 | from .caches import HashCache 18 | from .metadata import set_metadata 19 | from .providers import BasicProvider, EagerUpgradeProvider, PinReuseProvider 20 | 21 | 22 | def _get_requirements(model, section_name): 23 | """Produce a mapping of identifier: requirement from the section. 24 | """ 25 | if not model: 26 | return {} 27 | return {identify_requirment(r): r for r in ( 28 | requirementslib.Requirement.from_pipfile(name, package._data) 29 | for name, package in model.get(section_name, {}).items() 30 | )} 31 | 32 | 33 | def _get_requires_python(pipfile): 34 | try: 35 | requires = pipfile.requires 36 | except AttributeError: 37 | return "" 38 | try: 39 | return requires.python_full_version 40 | except AttributeError: 41 | pass 42 | try: 43 | return requires.python_version 44 | except AttributeError: 45 | return "" 46 | 47 | 48 | def _collect_derived_entries(state, traces, identifiers): 49 | """Produce a mapping containing all candidates derived from `identifiers`. 50 | 51 | `identifiers` should provide a collection of requirement identifications 52 | from a section (i.e. `packages` or `dev-packages`). This function uses 53 | `trace` to filter out candidates in the state that are present because of 54 | an entry in that collection. 55 | """ 56 | identifiers = set(identifiers) 57 | if not identifiers: 58 | return {} 59 | 60 | entries = {} 61 | extras = {} 62 | for identifier, requirement in state.mapping.items(): 63 | routes = {trace[1] for trace in traces[identifier] if len(trace) > 1} 64 | if identifier not in identifiers and not (identifiers & routes): 65 | continue 66 | name = requirement.normalized_name 67 | if requirement.extras: 68 | # Aggregate extras from multiple routes so we can produce their 69 | # union in the lock file. (sarugaku/passa#24) 70 | try: 71 | extras[name].extend(requirement.extras) 72 | except KeyError: 73 | extras[name] = list(requirement.extras) 74 | entries[name] = next(iter(requirement.as_pipfile().values())) 75 | for name, ext in extras.items(): 76 | entries[name]["extras"] = ext 77 | 78 | return entries 79 | 80 | 81 | class AbstractLocker(object): 82 | """Helper class to produce a new lock file for a project. 83 | 84 | This is not intended for instantiation. You should use one of its concrete 85 | subclasses instead. The class contains logic to: 86 | 87 | * Prepare a project for locking 88 | * Perform the actually resolver invocation 89 | * Convert resolver output into lock file format 90 | * Update the project to have the new lock file 91 | """ 92 | def __init__(self, project): 93 | self.project = project 94 | self.default_requirements = _get_requirements( 95 | project.pipfile, "packages", 96 | ) 97 | self.develop_requirements = _get_requirements( 98 | project.pipfile, "dev-packages", 99 | ) 100 | 101 | # This comprehension dance ensures we merge packages from both 102 | # sections, and definitions in the default section win. 103 | self.requirements = {k: r for k, r in itertools.chain( 104 | self.develop_requirements.items(), 105 | self.default_requirements.items(), 106 | )}.values() 107 | 108 | self.sources = [s._data.copy() for s in project.pipfile.sources] 109 | self.allow_prereleases = bool( 110 | project.pipfile.get("pipenv", {}).get("allow_prereleases", False), 111 | ) 112 | self.requires_python = _get_requires_python(project.pipfile) 113 | 114 | def __repr__(self): 115 | return "<{0} @ {1!r}>".format(type(self).__name__, self.project.root) 116 | 117 | def get_provider(self): 118 | raise NotImplementedError 119 | 120 | def get_reporter(self): 121 | # TODO: Build SpinnerReporter, and use this only in verbose mode. 122 | return StdOutReporter(self.requirements) 123 | 124 | def lock(self): 125 | """Lock specified (abstract) requirements into (concrete) candidates. 126 | 127 | The locking procedure consists of four stages: 128 | 129 | * Resolve versions and dependency graph (powered by ResolveLib). 130 | * Walk the graph to determine "why" each candidate came to be, i.e. 131 | what top-level requirements result in a given candidate. 132 | * Populate hashes for resolved candidates. 133 | * Populate markers based on dependency specifications of each 134 | candidate, and the dependency graph. 135 | """ 136 | provider = self.get_provider() 137 | reporter = self.get_reporter() 138 | resolver = resolvelib.Resolver(provider, reporter) 139 | 140 | with vistir.cd(self.project.root): 141 | state = resolver.resolve(self.requirements) 142 | 143 | traces = trace_graph(state.graph) 144 | 145 | hash_cache = HashCache() 146 | for r in state.mapping.values(): 147 | if not r.hashes: 148 | r.hashes = get_hashes(hash_cache, r) 149 | 150 | set_metadata( 151 | state.mapping, traces, 152 | provider.fetched_dependencies, 153 | provider.collected_requires_pythons, 154 | ) 155 | 156 | lockfile = plette.Lockfile.with_meta_from(self.project.pipfile) 157 | lockfile["default"] = _collect_derived_entries( 158 | state, traces, self.default_requirements, 159 | ) 160 | lockfile["develop"] = _collect_derived_entries( 161 | state, traces, self.develop_requirements, 162 | ) 163 | self.project.lockfile = lockfile 164 | 165 | 166 | class BasicLocker(AbstractLocker): 167 | """Basic concrete locker. 168 | 169 | This takes a project, generates a lock file from its Pipfile, and sets 170 | the lock file property to the project. 171 | """ 172 | def get_provider(self): 173 | return BasicProvider( 174 | self.requirements, self.sources, 175 | self.requires_python, self.allow_prereleases, 176 | ) 177 | 178 | 179 | class PinReuseLocker(AbstractLocker): 180 | """A specialized locker to handle re-locking based on existing pins. 181 | 182 | See :class:`.providers.PinReuseProvider` for more information. 183 | """ 184 | def __init__(self, project): 185 | super(PinReuseLocker, self).__init__(project) 186 | pins = _get_requirements(project.lockfile, "develop") 187 | pins.update(_get_requirements(project.lockfile, "default")) 188 | for pin in pins.values(): 189 | pin.markers = None 190 | self.preferred_pins = pins 191 | 192 | def get_provider(self): 193 | return PinReuseProvider( 194 | self.preferred_pins, self.requirements, self.sources, 195 | self.requires_python, self.allow_prereleases, 196 | ) 197 | 198 | 199 | class EagerUpgradeLocker(PinReuseLocker): 200 | """A specialized locker to handle the "eager" upgrade strategy. 201 | 202 | See :class:`.providers.EagerUpgradeProvider` for more 203 | information. 204 | """ 205 | def __init__(self, tracked_names, *args, **kwargs): 206 | super(EagerUpgradeLocker, self).__init__(*args, **kwargs) 207 | self.tracked_names = tracked_names 208 | 209 | def get_provider(self): 210 | return EagerUpgradeProvider( 211 | self.tracked_names, self.preferred_pins, 212 | self.requirements, self.sources, 213 | self.requires_python, self.allow_prereleases, 214 | ) 215 | -------------------------------------------------------------------------------- /src/passa/models/metadata.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, unicode_literals 4 | 5 | import copy 6 | import itertools 7 | 8 | import packaging.markers 9 | import packaging.specifiers 10 | import vistir 11 | import vistir.misc 12 | 13 | from ..internals.markers import get_without_extra 14 | from ..internals.specifiers import cleanup_pyspecs, pyspec_from_markers 15 | 16 | 17 | def dedup_markers(s): 18 | # TODO: Implement better logic. 19 | deduped = sorted(vistir.misc.dedup(s)) 20 | return deduped 21 | 22 | 23 | class MetaSet(object): 24 | """Representation of a "metadata set". 25 | 26 | This holds multiple metadata representaions. Each metadata representation 27 | includes a marker, and a specifier set of Python versions required. 28 | """ 29 | def __init__(self): 30 | self.markerset = frozenset() 31 | self.pyspecset = packaging.specifiers.SpecifierSet() 32 | 33 | def __repr__(self): 34 | return "MetaSet(markerset={0!r}, pyspecset={1!r})".format( 35 | ",".join(sorted(self.markerset)), str(self.pyspecset), 36 | ) 37 | 38 | def __str__(self): 39 | pyspecs = set() 40 | markerset = set() 41 | for m in self.markerset: 42 | marker_specs = pyspec_from_markers(packaging.markers.Marker(m)) 43 | if marker_specs: 44 | pyspecs.add(marker_specs) 45 | else: 46 | markerset.add(m) 47 | if pyspecs: 48 | self.pyspecset._specs &= pyspecs 49 | self.markerset = frozenset(markerset) 50 | return " and ".join(dedup_markers(itertools.chain( 51 | # Make sure to always use the same quotes so we can dedup properly. 52 | ( 53 | "{0}".format(ms) if " or " in ms else ms 54 | for ms in (str(m).replace('"', "'") for m in self.markerset) 55 | ), 56 | ( 57 | "python_version {0[0]} '{0[1]}'".format(spec) 58 | for spec in cleanup_pyspecs(self.pyspecset) 59 | ), 60 | ))) 61 | 62 | def __bool__(self): 63 | return bool(self.markerset or self.pyspecset) 64 | 65 | def __nonzero__(self): # Python 2. 66 | return self.__bool__() 67 | 68 | def __or__(self, pair): 69 | marker, specset = pair 70 | markerset = set(self.markerset) 71 | if marker: 72 | marker_specs = pyspec_from_markers(marker) 73 | if not marker_specs: 74 | markerset.add(str(marker)) 75 | else: 76 | specset._specs &= marker_specs 77 | metaset = MetaSet() 78 | metaset.markerset = frozenset(markerset) 79 | # TODO: Implement some logic to clean up dups like '3.0.*' and '3.0'. 80 | metaset.pyspecset &= self.pyspecset & specset 81 | return metaset 82 | 83 | 84 | def _build_metasets(dependencies, pythons, key, trace, all_metasets): 85 | all_parent_metasets = [] 86 | for route in trace: 87 | parent = route[-1] 88 | try: 89 | parent_metasets = all_metasets[parent] 90 | except KeyError: # Parent not calculated yet. Wait for it. 91 | return 92 | all_parent_metasets.append((parent, parent_metasets)) 93 | 94 | metaset_iters = [] 95 | for parent, parent_metasets in all_parent_metasets: 96 | r = dependencies[parent][key] 97 | python = pythons[key] 98 | metaset = ( 99 | get_without_extra(r.markers), 100 | packaging.specifiers.SpecifierSet(python), 101 | ) 102 | metaset_iters.append( 103 | parent_metaset | metaset 104 | for parent_metaset in parent_metasets 105 | ) 106 | return list(itertools.chain.from_iterable(metaset_iters)) 107 | 108 | 109 | def _calculate_metasets_mapping(dependencies, pythons, traces): 110 | all_metasets = {None: [MetaSet()]} 111 | 112 | del traces[None] 113 | while traces: 114 | new_metasets = {} 115 | for key, trace in traces.items(): 116 | assert key not in all_metasets, key # Sanity check for debug. 117 | metasets = _build_metasets( 118 | dependencies, pythons, key, trace, all_metasets, 119 | ) 120 | if metasets is None: 121 | continue 122 | new_metasets[key] = metasets 123 | if not new_metasets: 124 | break # No progress? Deadlocked. Give up. 125 | all_metasets.update(new_metasets) 126 | for key in new_metasets: 127 | del traces[key] 128 | 129 | return all_metasets 130 | 131 | 132 | def _format_metasets(metasets): 133 | # If there is an unconditional route, this needs to be unconditional. 134 | if not metasets or not all(metasets): 135 | return None 136 | 137 | # This extra str(Marker()) call helps simplify the expression. 138 | return str(packaging.markers.Marker(" or ".join( 139 | "{0}".format(s) if " and " in s else s 140 | for s in dedup_markers(str(metaset) for metaset in metasets 141 | if metaset) 142 | ))) 143 | 144 | 145 | def set_metadata(candidates, traces, dependencies, pythons): 146 | """Add "metadata" to candidates based on the dependency tree. 147 | 148 | Metadata for a candidate includes markers and a specifier for Python 149 | version requirements. 150 | 151 | :param candidates: A key-candidate mapping. Candidates in the mapping will 152 | have their markers set. 153 | :param traces: A graph trace (produced by `traces.trace_graph`) providing 154 | information about dependency relationships between candidates. 155 | :param dependencies: A key-collection mapping containing what dependencies 156 | each candidate in `candidates` requested. 157 | :param pythons: A key-str mapping containing Requires-Python information 158 | of each candidate. 159 | 160 | Keys in mappings and entries in the trace are identifiers of a package, as 161 | implemented by the `identify` method of the resolver's provider. 162 | 163 | The candidates are modified in-place. 164 | """ 165 | metasets_mapping = _calculate_metasets_mapping( 166 | dependencies, pythons, copy.deepcopy(traces), 167 | ) 168 | for key, candidate in candidates.items(): 169 | candidate.markers = _format_metasets(metasets_mapping[key]) 170 | -------------------------------------------------------------------------------- /src/passa/models/projects.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, unicode_literals 4 | 5 | import collections 6 | import io 7 | import os 8 | 9 | import attr 10 | import packaging.markers 11 | import packaging.utils 12 | import plette 13 | import plette.models 14 | import six 15 | import tomlkit 16 | 17 | 18 | SectionDifference = collections.namedtuple("SectionDifference", [ 19 | "inthis", "inthat", 20 | ]) 21 | FileDifference = collections.namedtuple("FileDifference", [ 22 | "default", "develop", 23 | ]) 24 | 25 | 26 | def _are_pipfile_entries_equal(a, b): 27 | a = {k: v for k, v in a.items() if k not in ("markers", "hashes", "hash")} 28 | b = {k: v for k, v in b.items() if k not in ("markers", "hashes", "hash")} 29 | if a != b: 30 | return False 31 | try: 32 | marker_eval_a = packaging.markers.Marker(a["markers"]).evaluate() 33 | except (AttributeError, KeyError, TypeError, ValueError): 34 | marker_eval_a = True 35 | try: 36 | marker_eval_b = packaging.markers.Marker(b["markers"]).evaluate() 37 | except (AttributeError, KeyError, TypeError, ValueError): 38 | marker_eval_b = True 39 | return marker_eval_a == marker_eval_b 40 | 41 | 42 | DEFAULT_NEWLINES = "\n" 43 | 44 | 45 | def preferred_newlines(f): 46 | if isinstance(f.newlines, six.text_type): 47 | return f.newlines 48 | return DEFAULT_NEWLINES 49 | 50 | 51 | @attr.s 52 | class ProjectFile(object): 53 | """A file in the Pipfile project. 54 | """ 55 | location = attr.ib() 56 | line_ending = attr.ib() 57 | model = attr.ib() 58 | 59 | @classmethod 60 | def read(cls, location, model_cls, invalid_ok=False): 61 | try: 62 | with io.open(location, encoding="utf-8") as f: 63 | model = model_cls.load(f) 64 | line_ending = preferred_newlines(f) 65 | except Exception: 66 | if not invalid_ok: 67 | raise 68 | model = None 69 | line_ending = DEFAULT_NEWLINES 70 | return cls(location=location, line_ending=line_ending, model=model) 71 | 72 | def write(self): 73 | kwargs = {"encoding": "utf-8", "newline": self.line_ending} 74 | with io.open(self.location, "w", **kwargs) as f: 75 | self.model.dump(f) 76 | 77 | def dumps(self): 78 | strio = six.StringIO() 79 | self.model.dump(strio) 80 | return strio.getvalue() 81 | 82 | 83 | @attr.s 84 | class Project(object): 85 | 86 | root = attr.ib() 87 | _p = attr.ib(init=False) 88 | _l = attr.ib(init=False) 89 | 90 | def __attrs_post_init__(self): 91 | self.root = root = os.path.abspath(self.root) 92 | self._p = ProjectFile.read( 93 | os.path.join(root, "Pipfile"), 94 | plette.Pipfile, 95 | ) 96 | self._l = ProjectFile.read( 97 | os.path.join(root, "Pipfile.lock"), 98 | plette.Lockfile, 99 | invalid_ok=True, 100 | ) 101 | 102 | @property 103 | def pipfile(self): 104 | return self._p.model 105 | 106 | @property 107 | def pipfile_location(self): 108 | return self._p.location 109 | 110 | @property 111 | def lockfile(self): 112 | return self._l.model 113 | 114 | @property 115 | def lockfile_location(self): 116 | return self._l.location 117 | 118 | @lockfile.setter 119 | def lockfile(self, new): 120 | self._l.model = new 121 | 122 | def is_synced(self): 123 | return self.lockfile and self.lockfile.is_up_to_date(self.pipfile) 124 | 125 | def _get_pipfile_section(self, develop, insert=True): 126 | name = "dev-packages" if develop else "packages" 127 | try: 128 | section = self.pipfile[name] 129 | except KeyError: 130 | section = plette.models.PackageCollection(tomlkit.table()) 131 | if insert: 132 | self.pipfile[name] = section 133 | return section 134 | 135 | def contains_key_in_pipfile(self, key): 136 | sections = [ 137 | self._get_pipfile_section(develop=False, insert=False), 138 | self._get_pipfile_section(develop=True, insert=False), 139 | ] 140 | return any( 141 | (packaging.utils.canonicalize_name(name) == 142 | packaging.utils.canonicalize_name(key)) 143 | for section in sections 144 | for name in section 145 | ) 146 | 147 | def add_line_to_pipfile(self, line, develop): 148 | from requirementslib import Requirement 149 | requirement = Requirement.from_line(line) 150 | section = self._get_pipfile_section(develop=develop) 151 | key = requirement.normalized_name 152 | entry = next(iter(requirement.as_pipfile().values())) 153 | if isinstance(entry, dict): 154 | # HACK: TOMLKit prefers to expand tables by default, but we 155 | # always want inline tables here. Also tomlkit.inline_table 156 | # does not have `update()`. 157 | table = tomlkit.inline_table() 158 | for k, v in entry.items(): 159 | table[k] = v 160 | entry = table 161 | section[key] = entry 162 | 163 | def remove_keys_from_pipfile(self, keys, default, develop): 164 | keys = {packaging.utils.canonicalize_name(key) for key in keys} 165 | sections = [] 166 | if default: 167 | sections.append(self._get_pipfile_section( 168 | develop=False, insert=False, 169 | )) 170 | if develop: 171 | sections.append(self._get_pipfile_section( 172 | develop=True, insert=False, 173 | )) 174 | for section in sections: 175 | removals = set() 176 | for name in section: 177 | if packaging.utils.canonicalize_name(name) in keys: 178 | removals.add(name) 179 | for key in removals: 180 | del section._data[key] 181 | 182 | def remove_keys_from_lockfile(self, keys): 183 | keys = {packaging.utils.canonicalize_name(key) for key in keys} 184 | removed = False 185 | for section_name in ("default", "develop"): 186 | try: 187 | section = self.lockfile[section_name] 188 | except KeyError: 189 | continue 190 | removals = set() 191 | for name in section: 192 | if packaging.utils.canonicalize_name(name) in keys: 193 | removals.add(name) 194 | removed = removed or bool(removals) 195 | for key in removals: 196 | del section._data[key] 197 | 198 | if removed: 199 | # HACK: The lock file no longer represents the Pipfile at this 200 | # point. Set the hash to an arbitrary invalid value. 201 | self.lockfile.meta.hash = plette.models.Hash({"__invalid__": ""}) 202 | 203 | def difference_lockfile(self, lockfile): 204 | """Generate a difference between the current and given lockfiles. 205 | 206 | Returns a 2-tuple containing differences in default in develop 207 | sections. 208 | 209 | Each element is a 2-tuple of dicts. The first, `inthis`, contains 210 | entries only present in the current lockfile; the second, `inthat`, 211 | contains entries only present in the given one. 212 | 213 | If a key exists in both this and that, but the values differ, the key 214 | is present in both dicts, pointing to values from each file. 215 | """ 216 | diff_data = { 217 | "default": SectionDifference({}, {}), 218 | "develop": SectionDifference({}, {}), 219 | } 220 | for section_name, section_diff in diff_data.items(): 221 | try: 222 | this = self.lockfile[section_name]._data 223 | except (KeyError, TypeError): 224 | this = {} 225 | try: 226 | that = lockfile[section_name]._data 227 | except (KeyError, TypeError): 228 | that = {} 229 | for key, this_value in this.items(): 230 | try: 231 | that_value = that[key] 232 | except KeyError: 233 | section_diff.inthis[key] = this_value 234 | continue 235 | if not _are_pipfile_entries_equal(this_value, that_value): 236 | section_diff.inthis[key] = this_value 237 | section_diff.inthat[key] = that_value 238 | for key, that_value in that.items(): 239 | if key not in this: 240 | section_diff.inthat[key] = that_value 241 | return FileDifference(**diff_data) 242 | -------------------------------------------------------------------------------- /src/passa/models/providers.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, print_function, unicode_literals 4 | 5 | import os 6 | 7 | import resolvelib 8 | 9 | from ..internals.candidates import find_candidates 10 | from ..internals.dependencies import get_dependencies 11 | from ..internals.utils import ( 12 | filter_sources, get_allow_prereleases, identify_requirment, strip_extras, 13 | ) 14 | 15 | 16 | PROTECTED_PACKAGE_NAMES = {"pip", "setuptools"} 17 | 18 | 19 | class BasicProvider(resolvelib.AbstractProvider): 20 | """Provider implementation to interface with `requirementslib.Requirement`. 21 | """ 22 | def __init__(self, root_requirements, sources, 23 | requires_python, allow_prereleases): 24 | self.sources = sources 25 | self.requires_python = requires_python 26 | self.allow_prereleases = bool(allow_prereleases) 27 | self.invalid_candidates = set() 28 | 29 | # Remember requirements of each pinned candidate. The resolver calls 30 | # `get_dependencies()` only when it wants to repin, so the last time 31 | # the dependencies we got when it is last called on a package, are 32 | # the set used by the resolver. We use this later to trace how a given 33 | # dependency is specified by a package. 34 | self.fetched_dependencies = {None: { 35 | self.identify(r): r for r in root_requirements 36 | }} 37 | 38 | # Should Pipfile's requires.python_[full_]version be included? 39 | self.collected_requires_pythons = {None: ""} 40 | 41 | def identify(self, dependency): 42 | return identify_requirment(dependency) 43 | 44 | def get_preference(self, resolution, candidates, information): 45 | # TODO: Provide better sorting logic. This simply resolve the ones with 46 | # less choices first. Not sophisticated, but sounds reasonable? 47 | return len(candidates) 48 | 49 | def find_matches(self, requirement): 50 | sources = filter_sources(requirement, self.sources) 51 | candidates = find_candidates( 52 | requirement, sources, self.requires_python, 53 | get_allow_prereleases(requirement, self.allow_prereleases), 54 | ) 55 | return candidates 56 | 57 | def is_satisfied_by(self, requirement, candidate): 58 | # A non-named requirement has exactly one candidate, as implemented in 59 | # `find_matches()`. Since pip does not yet implement URL based lookup 60 | # (PEP 508) yet, it must match unless there are duplicated entries in 61 | # Pipfile. If there is, the user takes the blame. (sarugaku/passa#34) 62 | if not requirement.is_named: 63 | return True 64 | 65 | # A non-named candidate can only come from a non-named requirement, 66 | # which, since pip does not implement URL based lookup (PEP 508) yet, 67 | # can only come from Pipfile. Assume the user knows what they're doing, 68 | # and use it without checking. (sarugaku/passa#34) 69 | if not candidate.is_named: 70 | return True 71 | 72 | # Optimization: Everything matches if there are no specifiers. 73 | if not requirement.specifiers: 74 | return True 75 | 76 | # We can't handle old version strings before PEP 440. Drop them all. 77 | # Practically this shouldn't be a problem if the user is specifying a 78 | # remotely reasonable dependency not from before 2013. 79 | candidate_line = candidate.as_line(include_hashes=False) 80 | if candidate_line in self.invalid_candidates: 81 | return False 82 | try: 83 | version = candidate.get_specifier().version 84 | except (TypeError, ValueError): 85 | print('ignoring invalid version from {!r}'.format(candidate_line)) 86 | self.invalid_candidates.add(candidate_line) 87 | return False 88 | 89 | return requirement.as_ireq().specifier.contains(version) 90 | 91 | def get_dependencies(self, candidate): 92 | sources = filter_sources(candidate, self.sources) 93 | try: 94 | dependencies, requires_python = get_dependencies( 95 | candidate, sources=sources, 96 | ) 97 | except Exception as e: 98 | if os.environ.get("PASSA_NO_SUPPRESS_EXCEPTIONS"): 99 | raise 100 | print("failed to get dependencies for {0!r}: {1}".format( 101 | candidate.as_line(include_hashes=False), e, 102 | )) 103 | dependencies = [] 104 | requires_python = "" 105 | # Exclude protected packages from the list. This prevents those 106 | # packages from being locked, unless the user is actually working on 107 | # them, and explicitly lists them as top-level requirements -- those 108 | # packages are not added via this code path. (sarugaku/passa#15) 109 | dependencies = [ 110 | dependency for dependency in dependencies 111 | if dependency.normalized_name not in PROTECTED_PACKAGE_NAMES 112 | ] 113 | if candidate.extras: 114 | # HACK: If this candidate has extras, add the original candidate 115 | # (same pinned version, no extras) as its dependency. This ensures 116 | # the same package with different extras (treated as distinct by 117 | # the resolver) have the same version. (sarugaku/passa#4) 118 | dependencies.append(strip_extras(candidate)) 119 | candidate_key = self.identify(candidate) 120 | self.fetched_dependencies[candidate_key] = { 121 | self.identify(r): r for r in dependencies 122 | } 123 | self.collected_requires_pythons[candidate_key] = requires_python 124 | return dependencies 125 | 126 | 127 | class PinReuseProvider(BasicProvider): 128 | """A provider that reuses preferred pins if possible. 129 | 130 | This is used to implement "add", "remove", and "only-if-needed upgrade", 131 | where already-pinned candidates in Pipfile.lock should be preferred. 132 | """ 133 | def __init__(self, preferred_pins, *args, **kwargs): 134 | super(PinReuseProvider, self).__init__(*args, **kwargs) 135 | self.preferred_pins = preferred_pins 136 | 137 | def find_matches(self, requirement): 138 | candidates = super(PinReuseProvider, self).find_matches(requirement) 139 | try: 140 | # Add the preferred pin. Remember the resolve prefer candidates 141 | # at the end of the list, so the most preferred should be last. 142 | candidates.append(self.preferred_pins[self.identify(requirement)]) 143 | except KeyError: 144 | pass 145 | return candidates 146 | 147 | 148 | class EagerUpgradeProvider(PinReuseProvider): 149 | """A specialized provider to handle an "eager" upgrade strategy. 150 | 151 | An eager upgrade tries to upgrade not only packages specified, but also 152 | their dependencies (recursively). This contrasts to the "only-if-needed" 153 | default, which only promises to upgrade the specified package, and 154 | prevents touching anything else if at all possible. 155 | 156 | The provider is implemented as to keep track of all dependencies of the 157 | specified packages to upgrade, and free their pins when it has a chance. 158 | """ 159 | def __init__(self, tracked_names, *args, **kwargs): 160 | super(EagerUpgradeProvider, self).__init__(*args, **kwargs) 161 | self.tracked_names = set(tracked_names) 162 | for name in tracked_names: 163 | self.preferred_pins.pop(name, None) 164 | 165 | # HACK: Set this special flag to distinguish preferred pins from 166 | # regular, to tell the resolver to NOT use them for tracked packages. 167 | for pin in self.preferred_pins.values(): 168 | pin._preferred_by_provider = True 169 | 170 | def is_satisfied_by(self, requirement, candidate): 171 | # If this is a tracking package, tell the resolver out of using the 172 | # preferred pin, and into a "normal" candidate selection process. 173 | if (self.identify(requirement) in self.tracked_names and 174 | getattr(candidate, "_preferred_by_provider", False)): 175 | return False 176 | return super(EagerUpgradeProvider, self).is_satisfied_by( 177 | requirement, candidate, 178 | ) 179 | 180 | def get_dependencies(self, candidate): 181 | # If this package is being tracked for upgrade, remove pins of its 182 | # dependencies, and start tracking these new packages. 183 | dependencies = super(EagerUpgradeProvider, self).get_dependencies( 184 | candidate, 185 | ) 186 | if self.identify(candidate) in self.tracked_names: 187 | for dependency in dependencies: 188 | name = self.identify(dependency) 189 | self.tracked_names.add(name) 190 | self.preferred_pins.pop(name, None) 191 | return dependencies 192 | 193 | def get_preference(self, resolution, candidates, information): 194 | # Resolve tracking packages so we have a chance to unpin them first. 195 | name = self.identify(candidates[0]) 196 | if name in self.tracked_names: 197 | return -1 198 | return len(candidates) 199 | -------------------------------------------------------------------------------- /src/passa/models/synchronizers.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, unicode_literals 4 | 5 | import collections 6 | import contextlib 7 | import os 8 | import sys 9 | import sysconfig 10 | 11 | import pkg_resources 12 | 13 | import packaging.markers 14 | import packaging.version 15 | import requirementslib 16 | 17 | from ..internals._pip import uninstall, EditableInstaller, WheelInstaller 18 | 19 | 20 | def _is_installation_local(name): 21 | """Check whether the distribution is in the current Python installation. 22 | 23 | This is used to distinguish packages seen by a virtual environment. A venv 24 | may be able to see global packages, but we don't want to mess with them. 25 | """ 26 | loc = os.path.normcase(pkg_resources.working_set.by_key[name].location) 27 | pre = os.path.normcase(sys.prefix) 28 | return os.path.commonprefix([loc, pre]) == pre 29 | 30 | 31 | def _is_up_to_date(distro, version): 32 | # This is done in strings to avoid type mismatches caused by vendering. 33 | return str(version) == str(packaging.version.parse(distro.version)) 34 | 35 | 36 | GroupCollection = collections.namedtuple("GroupCollection", [ 37 | "uptodate", "outdated", "noremove", "unneeded", 38 | ]) 39 | 40 | 41 | def _group_installed_names(packages): 42 | """Group locally installed packages based on given specifications. 43 | 44 | `packages` is a name-package mapping that are used as baseline to 45 | determine how the installed package should be grouped. 46 | 47 | Returns a 3-tuple of disjoint sets, all containing names of installed 48 | packages: 49 | 50 | * `uptodate`: These match the specifications. 51 | * `outdated`: These installations are specified, but don't match the 52 | specifications in `packages`. 53 | * `unneeded`: These are installed, but not specified in `packages`. 54 | """ 55 | groupcoll = GroupCollection(set(), set(), set(), set()) 56 | 57 | for distro in pkg_resources.working_set: 58 | name = distro.key 59 | try: 60 | package = packages[name] 61 | except KeyError: 62 | groupcoll.unneeded.add(name) 63 | continue 64 | 65 | r = requirementslib.Requirement.from_pipfile(name, package) 66 | if not r.is_named: 67 | # Always mark non-named. I think pip does something similar? 68 | groupcoll.outdated.add(name) 69 | elif not _is_up_to_date(distro, r.get_version()): 70 | groupcoll.outdated.add(name) 71 | else: 72 | groupcoll.uptodate.add(name) 73 | 74 | return groupcoll 75 | 76 | 77 | @contextlib.contextmanager 78 | def _remove_package(name): 79 | if name is None or not _is_installation_local(name): 80 | yield None 81 | return 82 | with uninstall(name, auto_confirm=True, verbose=False) as uninstaller: 83 | yield uninstaller 84 | 85 | 86 | def _get_packages(lockfile, default, develop): 87 | # Don't need to worry about duplicates because only extras can differ. 88 | # Extras don't matter because they only affect dependencies, and we 89 | # don't install dependencies anyway! 90 | packages = {} 91 | if default: 92 | packages.update(lockfile.default._data) 93 | if develop: 94 | packages.update(lockfile.develop._data) 95 | return packages 96 | 97 | 98 | def _build_paths(): 99 | """Prepare paths for distlib.wheel.Wheel to install into. 100 | """ 101 | paths = sysconfig.get_paths() 102 | return { 103 | "prefix": sys.prefix, 104 | "data": paths["data"], 105 | "scripts": paths["scripts"], 106 | "headers": paths["include"], 107 | "purelib": paths["purelib"], 108 | "platlib": paths["platlib"], 109 | } 110 | 111 | 112 | PROTECTED_FROM_CLEAN = {"setuptools", "pip", "wheel"} 113 | 114 | 115 | def _clean(names): 116 | cleaned = set() 117 | for name in names: 118 | if name in PROTECTED_FROM_CLEAN: 119 | continue 120 | with _remove_package(name) as uninst: 121 | if uninst: 122 | cleaned.add(name) 123 | return cleaned 124 | 125 | 126 | class Synchronizer(object): 127 | """Helper class to install packages from a project's lock file. 128 | """ 129 | def __init__(self, project, default, develop, clean_unneeded): 130 | self._root = project.root # Only for repr. 131 | self.packages = _get_packages(project.lockfile, default, develop) 132 | self.sources = project.lockfile.meta.sources._data 133 | self.paths = _build_paths() 134 | self.clean_unneeded = clean_unneeded 135 | 136 | def __repr__(self): 137 | return "<{0} @ {1!r}>".format(type(self).__name__, self._root) 138 | 139 | def sync(self): 140 | groupcoll = _group_installed_names(self.packages) 141 | 142 | installed = set() 143 | updated = set() 144 | cleaned = set() 145 | 146 | # TODO: Show a prompt to confirm cleaning. We will need to implement a 147 | # reporter pattern for this as well. 148 | if self.clean_unneeded: 149 | names = _clean(groupcoll.unneeded) 150 | cleaned.update(names) 151 | 152 | # TODO: Specify installation order? (pypa/pipenv#2274) 153 | installers = [] 154 | for name, package in self.packages.items(): 155 | r = requirementslib.Requirement.from_pipfile(name, package) 156 | name = r.normalized_name 157 | if name in groupcoll.uptodate: 158 | continue 159 | markers = r.markers 160 | if markers and not packaging.markers.Marker(markers).evaluate(): 161 | continue 162 | r.markers = None 163 | if r.editable: 164 | installer = EditableInstaller(r) 165 | else: 166 | installer = WheelInstaller(r, self.sources, self.paths) 167 | try: 168 | installer.prepare() 169 | except Exception as e: 170 | if os.environ.get("PASSA_NO_SUPPRESS_EXCEPTIONS"): 171 | raise 172 | print("failed to prepare {0!r}: {1}".format( 173 | r.as_line(include_hashes=False), e, 174 | )) 175 | else: 176 | installers.append((name, installer)) 177 | 178 | for name, installer in installers: 179 | if name in groupcoll.outdated: 180 | name_to_remove = name 181 | else: 182 | name_to_remove = None 183 | try: 184 | with _remove_package(name_to_remove): 185 | installer.install() 186 | except Exception as e: 187 | if os.environ.get("PASSA_NO_SUPPRESS_EXCEPTIONS"): 188 | raise 189 | print("failed to install {0!r}: {1}".format( 190 | r.as_line(include_hashes=False), e, 191 | )) 192 | continue 193 | if name in groupcoll.outdated or name in groupcoll.noremove: 194 | updated.add(name) 195 | else: 196 | installed.add(name) 197 | 198 | return installed, updated, cleaned 199 | 200 | 201 | class Cleaner(object): 202 | """Helper class to clean packages not in a project's lock file. 203 | """ 204 | def __init__(self, project, default, develop): 205 | self._root = project.root # Only for repr. 206 | self.packages = _get_packages(project.lockfile, default, develop) 207 | 208 | def __repr__(self): 209 | return "<{0} @ {1!r}>".format(type(self).__name__, self._root) 210 | 211 | def clean(self): 212 | groupcoll = _group_installed_names(self.packages) 213 | cleaned = _clean(groupcoll.unneeded) 214 | return cleaned 215 | -------------------------------------------------------------------------------- /src/passa/operations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sarugaku/passa/bf5e1dbba15363a5f705381f6fe2f86424d7e1ff/src/passa/operations/__init__.py -------------------------------------------------------------------------------- /src/passa/operations/lock.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, print_function, unicode_literals 4 | 5 | from resolvelib import NoVersionsAvailable, ResolutionImpossible 6 | 7 | from passa.internals.reporters import print_requirement 8 | 9 | 10 | def lock(locker): 11 | success = False 12 | try: 13 | locker.lock() 14 | except NoVersionsAvailable as e: 15 | print("\nCANNOT RESOLVE. NO CANDIDATES FOUND FOR:") 16 | print("{:>40}".format(e.requirement.as_line(include_hashes=False))) 17 | if e.parent: 18 | line = e.parent.as_line(include_hashes=False) 19 | print("{:>41}".format("(from {})".format(line))) 20 | else: 21 | print("{:>41}".format("(user)")) 22 | except ResolutionImpossible as e: 23 | print("\nCANNOT RESOLVE.\nOFFENDING REQUIREMENTS:") 24 | for r in e.requirements: 25 | print_requirement(r) 26 | else: 27 | success = True 28 | return success 29 | -------------------------------------------------------------------------------- /src/passa/operations/sync.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import, print_function, unicode_literals 4 | 5 | 6 | def sync(syncer): 7 | print("Starting synchronization") 8 | installed, updated, cleaned = syncer.sync() 9 | if cleaned: 10 | print("Uninstalled: {}".format(", ".join(sorted(cleaned)))) 11 | if installed: 12 | print("Installed: {}".format(", ".join(sorted(installed)))) 13 | if updated: 14 | print("Updated: {}".format(", ".join(sorted(updated)))) 15 | return True 16 | 17 | 18 | def clean(cleaner): 19 | print("Cleaning") 20 | cleaned = cleaner.clean() 21 | if cleaned: 22 | print("Uninstalled: {}".format(", ".join(sorted(cleaned)))) 23 | return True 24 | -------------------------------------------------------------------------------- /tasks/CHANGELOG.rst.jinja2: -------------------------------------------------------------------------------- 1 | {% for section in sections %} 2 | {% set underline = "-" %} 3 | {% if section %} 4 | {{section}} 5 | {{ underline * section|length }}{% set underline = "~" %} 6 | 7 | {% endif %} 8 | {% if sections[section] %} 9 | {% for category, val in definitions.items() if category in sections[section] and category != 'trivial' %} 10 | 11 | {{ definitions[category]['name'] }} 12 | {{ underline * definitions[category]['name']|length }} 13 | 14 | {% if definitions[category]['showcontent'] %} 15 | {% for text, values in sections[section][category]|dictsort(by='value') %} 16 | - {{ text }}{% if category != 'process' %} 17 | {{ values|sort|join(',\n ') }} 18 | {% endif %} 19 | 20 | {% endfor %} 21 | {% else %} 22 | - {{ sections[section][category]['']|sort|join(', ') }} 23 | 24 | 25 | {% endif %} 26 | {% if sections[section][category]|length == 0 %} 27 | 28 | No significant changes. 29 | 30 | 31 | {% else %} 32 | {% endif %} 33 | {% endfor %} 34 | {% else %} 35 | 36 | No significant changes. 37 | 38 | 39 | {% endif %} 40 | {% endfor %} 41 | -------------------------------------------------------------------------------- /tasks/__init__.py: -------------------------------------------------------------------------------- 1 | import invoke 2 | 3 | from . import admin, package 4 | 5 | 6 | def add_tasks(module, prefix=None): 7 | if prefix is None: 8 | prefix = module.__name__.rsplit('.', 1)[-1] 9 | child_namespace = invoke.Collection.from_module(module) 10 | for name in child_namespace.task_names: 11 | if name in namespace.task_names: 12 | raise ValueError('duplicate task {}'.format(name)) 13 | namespace.add_task(child_namespace[name], name=name) 14 | 15 | 16 | namespace = invoke.Collection() 17 | add_tasks(admin) 18 | add_tasks(package) 19 | -------------------------------------------------------------------------------- /tasks/admin.py: -------------------------------------------------------------------------------- 1 | import ast 2 | import pathlib 3 | import shutil 4 | import subprocess 5 | 6 | import invoke 7 | import parver 8 | 9 | 10 | ROOT = pathlib.Path(__file__).resolve().parent.parent 11 | 12 | PACKAGE_NAME = 'passa' 13 | 14 | INIT_PY = ROOT.joinpath('src', PACKAGE_NAME, '__init__.py') 15 | 16 | 17 | @invoke.task() 18 | def clean(ctx): 19 | """Clean previously built package artifacts. 20 | """ 21 | ctx.run(f'python setup.py clean') 22 | dist = ROOT.joinpath('dist') 23 | print(f'[clean] Removing {dist}') 24 | if dist.exists(): 25 | shutil.rmtree(str(dist)) 26 | 27 | 28 | def _read_version(): 29 | with INIT_PY.open() as f: 30 | for line in f: 31 | if not line.startswith('__version__ = '): 32 | continue 33 | value = ast.literal_eval(line.split('=', 1)[-1].strip()) 34 | return parver.Version.parse(value) 35 | 36 | 37 | def _write_version(v): 38 | lines = [] 39 | with INIT_PY.open() as f: 40 | for line in f: 41 | if line.startswith('__version__ = '): 42 | line = f'__version__ = {repr(str(v))}\n' 43 | lines.append(line) 44 | with INIT_PY.open('w', newline='\n') as f: 45 | f.write(''.join(lines)) 46 | 47 | 48 | REL_TYPES = ('major', 'minor', 'patch',) 49 | 50 | 51 | @invoke.task() 52 | def unprebump(ctx): 53 | out = subprocess.check_output(['git', 'tag'], encoding='ascii') 54 | try: 55 | version = max(parver.Version.parse(v).normalize() for v in ( 56 | line.strip() for line in out.split('\n') 57 | ) if v) 58 | except ValueError: 59 | print('[unprebump] No tag to undo prebump') 60 | return 61 | print(f'[unprebump] Undo prebump back to {version}') 62 | _write_version(version) 63 | 64 | 65 | @invoke.task() 66 | def bump_release(ctx, type_): 67 | if type_ not in REL_TYPES: 68 | raise ValueError(f'{type_} not in {REL_TYPES}') 69 | index = REL_TYPES.index(type_) 70 | prev_version = _read_version() 71 | 72 | if prev_version.is_prerelease: 73 | print(f'[bump] Bumping {type_} from prerelease ({prev_version}).') 74 | print('[bump] Did you forget to undo prebumping?') 75 | try: 76 | input('[bump] ENTER to continue, CTRL-C to abort: ') 77 | except KeyboardInterrupt: 78 | print('\nAborted!') 79 | return 1 80 | 81 | next_version = prev_version.base_version().bump_release(index) 82 | print(f'[bump] {prev_version} -> {next_version}') 83 | _write_version(next_version) 84 | 85 | 86 | @invoke.task(pre=[clean]) 87 | def build(ctx): 88 | ctx.run(f'python setup.py sdist bdist_wheel') 89 | 90 | 91 | @invoke.task(pre=[build]) 92 | def upload(ctx, repo): 93 | dist_pattern = f'{PACKAGE_NAME.replace("-", "[-_]")}-*' 94 | artifacts = list(ROOT.joinpath('dist').glob(dist_pattern)) 95 | filename_display = '\n'.join(f' {a}' for a in artifacts) 96 | 97 | print(f'[release] Will upload:\n{filename_display}') 98 | try: 99 | input('[release] Release ready. ENTER to upload, CTRL-C to abort: ') 100 | except KeyboardInterrupt: 101 | print('\nAborted!') 102 | return 1 103 | 104 | arg_display = ' '.join(f'"{n}"' for n in artifacts) 105 | ctx.run(f'twine upload --repository="{repo}" {arg_display}') 106 | 107 | 108 | @invoke.task() 109 | def prebump(ctx, type_): 110 | if type_ not in REL_TYPES: 111 | raise ValueError(f'{type_} not in {REL_TYPES}') 112 | index = REL_TYPES.index(type_) 113 | prev_version = _read_version() 114 | next_version = prev_version.bump_release(index).bump_dev() 115 | print(f'[bump] {prev_version} -> {next_version}') 116 | _write_version(next_version) 117 | 118 | 119 | PREBUMP = 'patch' 120 | 121 | 122 | @invoke.task() 123 | def release(ctx, type_, repo=None, prebump_to=PREBUMP): 124 | """Make a new release. 125 | """ 126 | unprebump(ctx) 127 | if bump_release(ctx, type_=type_): 128 | return 129 | 130 | this_version = _read_version() 131 | ctx.run('towncrier') 132 | ctx.run(f'git commit -am "Release {this_version}"') 133 | ctx.run(f'git tag -fa {this_version} -m "Version {this_version}"') 134 | 135 | if repo: 136 | if upload(ctx, repo=repo): 137 | return 138 | else: 139 | print('[release] Missing --repo, skip uploading') 140 | 141 | prebump(ctx, type_=prebump_to) 142 | 143 | next_version = _read_version() 144 | ctx.run(f'git commit -am "Prebump to {next_version}"') 145 | -------------------------------------------------------------------------------- /tasks/pack/__main__.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | import os 4 | import sys 5 | import sysconfig 6 | 7 | 8 | LIBPATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'lib') 9 | 10 | 11 | def get_site_packages(): 12 | prefixes = {sys.prefix, sysconfig.get_config_var('prefix')} 13 | try: 14 | prefixes.add(sys.real_prefix) 15 | except AttributeError: 16 | pass 17 | form = sysconfig.get_path('purelib', expand=False) 18 | py_version_short = '{0[0]}.{0[1]}'.format(sys.version_info) 19 | return { 20 | form.format(base=prefix, py_version_short=py_version_short) 21 | for prefix in prefixes 22 | } 23 | 24 | 25 | def insert_before_site_packages(*paths): 26 | site_packages = get_site_packages() 27 | index = None 28 | for i, path in enumerate(sys.path): 29 | if path in site_packages: 30 | index = i 31 | break 32 | if index is None: 33 | sys.path += list(paths) 34 | else: 35 | sys.path = sys.path[:index] + list(paths) + sys.path[index:] 36 | 37 | 38 | def main(): 39 | insert_before_site_packages(LIBPATH) 40 | from passa.cli import main 41 | main() 42 | 43 | 44 | if __name__ == '__main__': 45 | main() 46 | -------------------------------------------------------------------------------- /tasks/pack/lib/typing.py: -------------------------------------------------------------------------------- 1 | # -*- coding=utf-8 -*- 2 | 3 | from __future__ import absolute_import 4 | 5 | import sys 6 | 7 | 8 | class Typing(object): 9 | def __getattr__(self, key): 10 | return None 11 | 12 | 13 | sys.modules[__name__] = Typing() 14 | -------------------------------------------------------------------------------- /tasks/package.py: -------------------------------------------------------------------------------- 1 | import pathlib 2 | import shutil 3 | import zipfile 4 | 5 | import distlib.scripts 6 | import distlib.wheel 7 | import invoke 8 | import passa.internals._pip 9 | import plette 10 | import requirementslib 11 | 12 | 13 | ROOT = pathlib.Path(__file__).resolve().parent.parent 14 | 15 | OUTPUT_DIR = ROOT.joinpath('pack') 16 | 17 | STUBFILES_DIR = pathlib.Path(__file__).resolve().with_name('pack') 18 | 19 | DONT_PACKAGE = { 20 | # Rely on the client for them. 21 | 'pip', 'setuptools', 22 | 23 | 'importlib', # We only support 2.7 so this is not needed. 24 | 'modutil', # This breaks <3.7. 25 | 'toml', # Why is requirementslib still not dropping it? 26 | 'typing', # This breaks 2.7. We'll provide a special stub for it. 27 | } 28 | 29 | IGNORE_LIB_PATTERNS = { 30 | '*.pyd', # Binary on Windows. 31 | '*.so', # Binary on POSIX. 32 | } 33 | 34 | 35 | @invoke.task() 36 | def clean_pack(ctx): 37 | if OUTPUT_DIR.exists(): 38 | shutil.rmtree(str(OUTPUT_DIR)) 39 | print(f'[clean-pack] Removing {OUTPUT_DIR}') 40 | 41 | 42 | def _recursive_write_to_zip(zf, path, root=None): 43 | if path == pathlib.Path(zf.filename): 44 | return 45 | if root is None: 46 | if not path.is_dir(): 47 | raise ValueError('root is required for non-directory path') 48 | root = path 49 | if not path.is_dir(): 50 | zf.write(str(path), str(path.relative_to(root))) 51 | return 52 | for c in path.iterdir(): 53 | _recursive_write_to_zip(zf, c, root) 54 | 55 | 56 | @invoke.task(pre=[clean_pack]) 57 | def pack(ctx, remove_lib=True): 58 | """Build a isolated runnable package. 59 | """ 60 | OUTPUT_DIR.mkdir(parents=True, exist_ok=True) 61 | with ROOT.joinpath('Pipfile.lock').open() as f: 62 | lockfile = plette.Lockfile.load(f) 63 | 64 | libdir = OUTPUT_DIR.joinpath('lib') 65 | 66 | paths = {'purelib': libdir, 'platlib': libdir} 67 | sources = lockfile.meta.sources._data 68 | maker = distlib.scripts.ScriptMaker(None, None) 69 | 70 | # Install packages from Pipfile.lock. 71 | for name, package in lockfile.default._data.items(): 72 | if name in DONT_PACKAGE: 73 | continue 74 | print(f'[pack] Installing {name}') 75 | package.pop('editable', None) # Don't install things as editable. 76 | package.pop('markers', None) # Always install everything. 77 | r = requirementslib.Requirement.from_pipfile(name, package) 78 | wheel = passa.internals._pip.build_wheel( 79 | r.as_ireq(), sources, r.hashes or None, 80 | ) 81 | wheel.install(paths, maker, lib_only=True) 82 | 83 | for pattern in IGNORE_LIB_PATTERNS: 84 | for path in libdir.rglob(pattern): 85 | print(f'[pack] Removing {path}') 86 | path.unlink() 87 | 88 | # Pack everything into ZIP. 89 | zipname = OUTPUT_DIR.joinpath('passa.zip') 90 | with zipfile.ZipFile(zipname, 'w') as zf: 91 | _recursive_write_to_zip(zf, OUTPUT_DIR) 92 | _recursive_write_to_zip(zf, STUBFILES_DIR) 93 | print(f'[pack] Written archive {zipname}') 94 | 95 | if remove_lib and libdir.exists(): 96 | print(f'[pack] Removing {libdir}') 97 | shutil.rmtree(str(libdir)) 98 | -------------------------------------------------------------------------------- /tests/test_markers.py: -------------------------------------------------------------------------------- 1 | from packaging.markers import Marker 2 | 3 | from passa.internals.markers import get_without_extra 4 | 5 | 6 | def test_strip_marker_extra_noop(): 7 | marker = get_without_extra( 8 | Marker('os_name == "nt" or sys_platform == "Windows"'), 9 | ) 10 | assert str(marker) == 'os_name == "nt" or sys_platform == "Windows"' 11 | 12 | 13 | def test_strip_marker_none(): 14 | marker = get_without_extra(None) 15 | assert marker is None 16 | 17 | 18 | def test_strip_marker_extra_only(): 19 | marker = get_without_extra(Marker('extra == "sock"')) 20 | assert marker is None 21 | 22 | 23 | def test_strip_marker_extra_simple(): 24 | marker = get_without_extra(Marker('os_name == "nt" and extra == "sock"')) 25 | assert str(marker) == 'os_name == "nt"' 26 | 27 | 28 | def test_strip_marker_extra_in_front(): 29 | marker = get_without_extra(Marker('extra == "sock" or os_name == "nt"')) 30 | assert str(marker) == 'os_name == "nt"' 31 | 32 | 33 | def test_strip_marker_extra_nested(): 34 | marker = get_without_extra(Marker( 35 | '(os_name == "nt" or sys_platform == "Windows") ' 36 | 'and extra == "sock"', 37 | )) 38 | assert str(marker) == 'os_name == "nt" or sys_platform == "Windows"' 39 | 40 | 41 | def test_strip_marker_extra_crazy(): 42 | marker = get_without_extra(Marker( 43 | '(os_name == "nt" or sys_platform == "Windows" and extra == "huh") ' 44 | 'and extra == "sock"', 45 | )) 46 | assert str(marker) == 'os_name == "nt" or sys_platform == "Windows"' 47 | 48 | 49 | def test_strip_marker_extra_cancelled(): 50 | marker = get_without_extra(Marker('extra == "sock" or extra == "huh"')) 51 | assert marker is None 52 | 53 | 54 | def test_strip_marker_extra_paranthesized_cancelled(): 55 | marker = get_without_extra(Marker( 56 | '(extra == "sock") or (extra == "huh") or (sys_platform == "Windows")', 57 | )) 58 | assert str(marker) == 'sys_platform == "Windows"' 59 | 60 | 61 | def test_strip_marker_extra_crazy_cancelled(): 62 | marker = get_without_extra(Marker( 63 | '(extra == "foo" or extra == "sock") or ' 64 | '(extra == "huh" or extra == "bar")', 65 | )) 66 | assert marker is None 67 | -------------------------------------------------------------------------------- /tests/test_specifiers.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from packaging.specifiers import SpecifierSet 4 | 5 | from passa.internals.specifiers import cleanup_pyspecs 6 | 7 | 8 | @pytest.mark.parametrize("spec, cleaned", [ 9 | # == and ~= don't matter. 10 | ("~=3.0", [("~=", "3.0")]), 11 | ("==2.7", [("==", "2.7")]), 12 | 13 | # >= and < should be kept. 14 | (">=3.0", [(">=", "3.0")]), 15 | ("<3.5", [("<", "3.5")]), 16 | 17 | # > and <= should be converted to >= and <. 18 | (">3.0", [(">=", "3.1")]), 19 | (">=2.7,<=3.3", [(">=", "2.7"), ("<", "3.4")]), 20 | (">2.6,!=3.0,!=3.1,!=3.2", [(">=", "2.7"), ("not in", "3.0, 3.1, 3.2")]), 21 | 22 | # The result should be dedup-ed. 23 | ( 24 | ">2.6,>=2.7,<=3.3,<3.4,!=3.0,!=3.1,!=3.2", 25 | [(">=", "2.7"), ("<", "3.4"), ("not in", "3.0, 3.1, 3.2")], 26 | ), 27 | ]) 28 | def test_cleanup_pyspecs(spec, cleaned): 29 | cleaned_specifierset = frozenset(s for s in cleaned) 30 | assert cleanup_pyspecs(SpecifierSet(spec)) == cleaned_specifierset 31 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = 3 | docs, packaging, py27, py35, py36, py37, coverage-report 4 | 5 | [testenv] 6 | passenv = CI GIT_SSL_CAINFO 7 | setenv = 8 | LC_ALL = en_US.UTF-8 9 | deps = 10 | coverage 11 | -e .[tests] 12 | commands = coverage run --parallel -m pytest --timeout 300 [] 13 | install_command = python -m pip install {opts} {packages} 14 | usedevelop = True 15 | 16 | [testenv:coverage-report] 17 | deps = coverage 18 | skip_install = true 19 | commands = 20 | coverage combine 21 | coverage report 22 | 23 | [testenv:docs] 24 | deps = 25 | -r{toxinidir}/docs/requirements.txt 26 | -e .[tests] 27 | commands = 28 | sphinx-build -d {envtmpdir}/doctrees -b html docs docs/build/html 29 | sphinx-build -d {envtmpdir}/doctrees -b man docs docs/build/man 30 | 31 | [testenv:packaging] 32 | deps = 33 | check-manifest 34 | readme_renderer 35 | commands = 36 | check-manifest 37 | python setup.py check -m -r -s 38 | --------------------------------------------------------------------------------