├── docs ├── changelog.rst ├── _static │ ├── press.ico │ ├── press.png │ ├── numpy_friendly.css_t │ └── press.svg ├── dependencies.rst ├── api │ ├── index.rst │ ├── blank.rst │ ├── wheel.rst │ ├── main.rst │ └── condatools.rst ├── confs │ └── pydata-nyc2019.md ├── Makefile ├── index.rst ├── devguide.rst └── conf.py ├── conda_press ├── __main__.py ├── __init__.py ├── config.py ├── main.xsh ├── condatools.xsh └── wheel.xsh ├── scripts ├── conda-press └── conda-press.bat ├── tests ├── data │ ├── test-deps-0.0.1-py_0.zip │ ├── test-deps-0.0.1-py_0.tar.bz2 │ ├── test-deps-0.0.1-py_0.tar.gz │ └── test-deps-0.0.1-py_0.tar ├── test_main.py ├── test_config.py ├── conftest.py └── test_condatools.py ├── AUTHORS.md ├── news └── TEMPLATE.md ├── .mailmap ├── azure-steps.yml ├── .authors.yml ├── rever.xsh ├── setup.py ├── LICENSE ├── azure-pipelines.yml ├── .gitignore ├── README.md └── CHANGELOG.md /docs/changelog.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../CHANGELOG.md -------------------------------------------------------------------------------- /conda_press/__main__.py: -------------------------------------------------------------------------------- 1 | from conda_press.main import main 2 | 3 | main() 4 | -------------------------------------------------------------------------------- /scripts/conda-press: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 -u 2 | from conda_press.main import main 3 | 4 | main() 5 | -------------------------------------------------------------------------------- /docs/_static/press.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/conda-incubator/conda-press/HEAD/docs/_static/press.ico -------------------------------------------------------------------------------- /docs/_static/press.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/conda-incubator/conda-press/HEAD/docs/_static/press.png -------------------------------------------------------------------------------- /conda_press/__init__.py: -------------------------------------------------------------------------------- 1 | from xonsh.main import setup 2 | 3 | setup() 4 | del setup 5 | 6 | __version__ = "0.0.6" 7 | -------------------------------------------------------------------------------- /tests/data/test-deps-0.0.1-py_0.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/conda-incubator/conda-press/HEAD/tests/data/test-deps-0.0.1-py_0.zip -------------------------------------------------------------------------------- /docs/_static/numpy_friendly.css_t: -------------------------------------------------------------------------------- 1 | @import url("cloud.css"); 2 | 3 | table.docutils th.field-name { 4 | white-space: nowrap; 5 | } 6 | -------------------------------------------------------------------------------- /tests/data/test-deps-0.0.1-py_0.tar.bz2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/conda-incubator/conda-press/HEAD/tests/data/test-deps-0.0.1-py_0.tar.bz2 -------------------------------------------------------------------------------- /tests/data/test-deps-0.0.1-py_0.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/conda-incubator/conda-press/HEAD/tests/data/test-deps-0.0.1-py_0.tar.gz -------------------------------------------------------------------------------- /AUTHORS.md: -------------------------------------------------------------------------------- 1 | All of the people who have made at least one contribution to conda-press. 2 | Authors are sorted by number of commits. 3 | 4 | * Anthony Scopatz 5 | * Marcelo Duarte Trevisani 6 | * Isaiah Norton 7 | * P. L. Lim 8 | * Julien Schueller 9 | * Brian Kelley 10 | -------------------------------------------------------------------------------- /scripts/conda-press.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | call :s_which py.exe 3 | if not "%_path%" == "" ( 4 | py -3 -m conda_press %* 5 | ) else ( 6 | python -m conda_press %* 7 | ) 8 | 9 | goto :eof 10 | 11 | :s_which 12 | setlocal 13 | endlocal & set _path=%~$PATH:1 14 | goto :eof 15 | -------------------------------------------------------------------------------- /news/TEMPLATE.md: -------------------------------------------------------------------------------- 1 | **Added:** 2 | 3 | * 4 | 5 | **Changed:** 6 | 7 | * 8 | 9 | **Deprecated:** 10 | 11 | * 12 | 13 | **Removed:** 14 | 15 | * 16 | 17 | **Fixed:** 18 | 19 | * 20 | 21 | **Security:** 22 | 23 | * 24 | 25 | -------------------------------------------------------------------------------- /docs/dependencies.rst: -------------------------------------------------------------------------------- 1 | Dependencies 2 | ------------ 3 | Conda-press currently has the following external dependencies, 4 | 5 | *Run Time:* 6 | 7 | #. xonsh 8 | #. lazyasd 9 | #. ruamel.yaml 10 | #. tqdm 11 | #. requests 12 | #. conda (installed in the current environment) 13 | 14 | All of these dependencies are available in conda-forge. -------------------------------------------------------------------------------- /docs/api/index.rst: -------------------------------------------------------------------------------- 1 | .. _api: 2 | 3 | ================= 4 | Conda-Press API 5 | ================= 6 | For those of you who want the gritty details. 7 | 8 | **Core Iterface:** 9 | 10 | .. toctree:: 11 | :maxdepth: 1 12 | 13 | condatools 14 | wheel 15 | 16 | **Helpers:** 17 | 18 | .. toctree:: 19 | :maxdepth: 1 20 | 21 | main 22 | -------------------------------------------------------------------------------- /docs/api/blank.rst: -------------------------------------------------------------------------------- 1 | .. _conda_press_mod: 2 | 3 | ******************************************************************************** 4 | (``conda_press.mod``) 5 | ******************************************************************************** 6 | 7 | .. automodule:: conda_press.mod 8 | :members: 9 | :undoc-members: 10 | :inherited-members: 11 | 12 | -------------------------------------------------------------------------------- /docs/api/wheel.rst: -------------------------------------------------------------------------------- 1 | .. _conda_press_wheel: 2 | 3 | ******************************************************************************** 4 | Wheel (``conda_press.wheel``) 5 | ******************************************************************************** 6 | 7 | .. automodule:: conda_press.wheel 8 | :members: 9 | :undoc-members: 10 | :inherited-members: 11 | 12 | -------------------------------------------------------------------------------- /docs/api/main.rst: -------------------------------------------------------------------------------- 1 | .. _conda_press_main: 2 | 3 | ******************************************************************************** 4 | Main Entry Points (``conda_press.main``) 5 | ******************************************************************************** 6 | 7 | .. automodule:: conda_press.main 8 | :members: 9 | :undoc-members: 10 | :inherited-members: 11 | 12 | -------------------------------------------------------------------------------- /docs/api/condatools.rst: -------------------------------------------------------------------------------- 1 | .. _conda_press_condatools: 2 | 3 | ******************************************************************************** 4 | Conda Tools (``conda_press.condatools``) 5 | ******************************************************************************** 6 | 7 | .. automodule:: conda_press.condatools 8 | :members: 9 | :undoc-members: 10 | :inherited-members: 11 | 12 | -------------------------------------------------------------------------------- /tests/test_main.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from conda_press import main 4 | 5 | 6 | def test_main(script_runner, data_folder): 7 | # Sanity test for main to see if empty options are being handled 8 | conda_pkg = os.path.join(data_folder, "test-deps-0.0.1-py_0.tar.bz2") 9 | response = script_runner.run(main.__file__, conda_pkg) 10 | assert response.success, response.stderr 11 | 12 | 13 | def test_main_empty_config_yaml(tmpdir, script_runner, data_folder): 14 | conda_pkg = os.path.join(data_folder, "test-deps-0.0.1-py_0.tar.bz2") 15 | cp_yaml = tmpdir.join("conda-press.yaml") 16 | cp_yaml.write("") 17 | response = script_runner.run( 18 | main.__file__, conda_pkg, "--config", str(cp_yaml) 19 | ) 20 | assert response.success, response.stderr 21 | -------------------------------------------------------------------------------- /.mailmap: -------------------------------------------------------------------------------- 1 | # This file was autogenerated by rever: https://regro.github.io/rever-docs/ 2 | # This prevent git from showing duplicates with various logging commands. 3 | # See the git documentation for more details. The syntax is: 4 | # 5 | # good-name bad-name 6 | # 7 | # You can skip bad-name if it is the same as good-name and is unique in the repo. 8 | # 9 | # This file is up-to-date if the command git log --format="%aN <%aE>" | sort -u 10 | # gives no duplicates. 11 | 12 | Anthony Scopatz 13 | Marcelo Duarte Trevisani Marcelo Duarte Trevisani 14 | Isaiah Norton 15 | P. L. Lim <2090236+pllim@users.noreply.github.com> 16 | Julien Schueller 17 | Brian Kelley 18 | -------------------------------------------------------------------------------- /azure-steps.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - task: UsePythonVersion@0 3 | inputs: 4 | versionSpec: '$(python.version)' 5 | architecture: 'x64' 6 | 7 | # Conda Environment 8 | # Create and activate a Conda environment. 9 | - task: CondaEnvironment@1 10 | inputs: 11 | packageSpecs: 'python=$(python.version) $(deps.space)${{ parameters.platform_deps_space }}' 12 | installOptions: '--strict-channel-priority -c conda-forge' 13 | updateConda: true 14 | displayName: 'Conda Environment (conda-forge)' 15 | - script: | 16 | pip install --no-deps ${{ parameters.pip_flags }} . 17 | pytest -sv --timeout=300 --junitxml=junit/test-results.xml 18 | displayName: 'Tests' 19 | 20 | # Publish build results 21 | - task: PublishTestResults@2 22 | inputs: 23 | testResultsFiles: '**/test-*.xml' 24 | testRunTitle: 'Publish test results for Python $(python.version)' 25 | -------------------------------------------------------------------------------- /.authors.yml: -------------------------------------------------------------------------------- 1 | - name: P. L. Lim 2 | email: 2090236+pllim@users.noreply.github.com 3 | num_commits: 1 4 | first_commit: 2019-05-05 09:56:53 5 | github: pllim 6 | - name: Julien Schueller 7 | email: schueller@phimeca.com 8 | num_commits: 1 9 | first_commit: 2019-05-30 11:40:35 10 | github: jschueller 11 | - name: Anthony Scopatz 12 | email: scopatz@gmail.com 13 | num_commits: 148 14 | first_commit: 2019-04-05 16:53:05 15 | - name: Isaiah Norton 16 | email: isaiah.norton@gmail.com 17 | num_commits: 5 18 | first_commit: 2019-08-09 09:21:03 19 | github: ihnorton 20 | - name: Marcelo Duarte Trevisani 21 | email: marceloduartetrevisani@gmail.com 22 | alternate_emails: 23 | - marcelotrevisani@users.noreply.github.com 24 | num_commits: 39 25 | first_commit: 2019-09-04 04:28:53 26 | github: marcelotrevisani 27 | - name: Brian Kelley 28 | email: fustigator@gmail.com 29 | num_commits: 1 30 | first_commit: 2019-10-29 09:29:01 31 | -------------------------------------------------------------------------------- /rever.xsh: -------------------------------------------------------------------------------- 1 | $PROJECT = $GITHUB_REPO = 'conda-press' 2 | $GITHUB_ORG = 'regro' 3 | $PYPI_SIGN = False 4 | 5 | $ACTIVITIES = ['authors', 'version_bump', 'changelog', 6 | 'sphinx', 'tag', 'push_tag', 7 | 'ghrelease', 'ghpages', 'pypi', 8 | 'conda_forge', 9 | ] 10 | 11 | $AUTHORS_FILENAME = 'AUTHORS.md' 12 | $VERSION_BUMP_PATTERNS = [ 13 | ('conda_press/__init__.py', r'__version__\s*=.*', '__version__ = "$VERSION"'), 14 | ('setup.py', r'version\s*=.*', "version='$VERSION',"), 15 | ('docs/conf.py', r'release\s*=.*', "release = '$VERSION'"), 16 | ('docs/Makefile', r'RELEASE\s*=.*', "RELEASE = v$VERSION"), 17 | ] 18 | $CHANGELOG_FILENAME = 'CHANGELOG.md' 19 | $CHANGELOG_TEMPLATE = 'TEMPLATE.md' 20 | $CHANGELOG_PATTERN = "" 21 | $CHANGELOG_HEADER = """ 22 | 23 | 24 | ## v$VERSION 25 | """ 26 | 27 | $GHPAGES_REPO = 'git@github.com:regro/conda-press-docs.git' 28 | 29 | $DOCKER_CONDA_DEPS = [ 30 | 'sphinx', 'recommonmark', 'ruamel.yaml', 'numpydoc', 'xonsh', 'conda', 'tqdm', 31 | 'lazyasd', 'virtualenv', 'requests', 'cloud_sptheme', 32 | ] 33 | $DOCKER_INSTALL_COMMAND = 'git clean -fdx && pip install --no-deps .' 34 | $DOCKER_GIT_NAME = 'conda-press' 35 | $DOCKER_GIT_EMAIL = 'conda-press@example.com' -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import os 3 | import sys 4 | 5 | from setuptools import setup 6 | 7 | 8 | def main(): 9 | """The main entry point.""" 10 | with open(os.path.join(os.path.dirname(__file__), 'README.md'), 'r') as f: 11 | readme = f.read() 12 | if sys.platform == "win32": 13 | scripts = ['scripts/conda-press.bat'] 14 | else: 15 | scripts = ['scripts/conda-press'] 16 | skw = dict( 17 | name='conda-press', 18 | description='Press conda packages into wheels', 19 | long_description=readme, 20 | long_description_content_type='text/markdown', 21 | license='BSD', 22 | version='0.0.6', 23 | author='Anthony Scopatz', 24 | maintainer='Anthony Scopatz', 25 | author_email='scopatz@gmail.com', 26 | url='https://github.com/regro/conda-press', 27 | platforms='Cross Platform', 28 | classifiers=['Programming Language :: Python :: 3'], 29 | packages=['conda_press'], 30 | package_dir={'conda_press': 'conda_press'}, 31 | package_data={'conda_press': ['*.xsh']}, 32 | scripts=scripts, 33 | install_requires=['xonsh', 'lazyasd', 'ruamel.yaml', 'tqdm', 'requests', 'dataclasses'], 34 | python_requires=">=3.6", 35 | zip_safe=False, 36 | ) 37 | setup(**skw) 38 | 39 | 40 | if __name__ == '__main__': 41 | main() 42 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2019, Re(search) Gro(up) 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /azure-pipelines.yml: -------------------------------------------------------------------------------- 1 | variables: 2 | deps.space: 'conda requests xonsh lazyasd tqdm ruamel.yaml pytest pytest-timeout virtualenv pip pytest-azurepipelines dataclasses pytest-console-scripts' 3 | deps.comma: 'conda,requests,xonsh,lazyasd,tqdm,ruamel.yaml,pytest,pytest-timeout,virtualenv,pip,pytest-azurepipelines,dataclasses,pytest-console-scripts' 4 | 5 | jobs: 6 | - job: windows 7 | pool: 8 | vmImage: 'VS2017-Win2016' 9 | strategy: 10 | matrix: 11 | python36: 12 | python.version: '3.6' 13 | # There is some weird issue with pytest-timeout not working 14 | # on Python 3.7 on Windows 15 | #python37: 16 | # python.version: '3.7' 17 | maxParallel: 4 18 | steps: 19 | - template: azure-steps.yml 20 | parameters: 21 | platform_deps_space: "" 22 | platform_deps_comma: "" 23 | pip_flags: "" 24 | - job: mac 25 | pool: 26 | vmImage: 'macOS-10.13' 27 | strategy: 28 | matrix: 29 | python36: 30 | python.version: '3.6' 31 | python37: 32 | python.version: '3.7' 33 | maxParallel: 4 34 | steps: 35 | - script: | 36 | sudo chmod -R 777 /Users/runner/.conda 37 | displayName: Fix for OSX directory permission 38 | 39 | - template: azure-steps.yml 40 | parameters: 41 | platform_deps_space: "" 42 | platform_deps_comma: "" 43 | pip_flags: "--user" 44 | - job: linux 45 | pool: 46 | vmImage: 'ubuntu-16.04' 47 | strategy: 48 | matrix: 49 | python36: 50 | python.version: '3.6' 51 | python37: 52 | python.version: '3.7' 53 | maxParallel: 4 54 | steps: 55 | - template: azure-steps.yml 56 | parameters: 57 | platform_deps_space: " patchelf" 58 | platform_deps_comma: ",patchelf" 59 | pip_flags: "" 60 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # conda-press ignores 2 | tests/artifact-cache/ 3 | *.whl 4 | 5 | # Byte-compiled / optimized / DLL files 6 | __pycache__/ 7 | *.py[cod] 8 | *$py.class 9 | 10 | # C extensions 11 | *.so 12 | 13 | # Distribution / packaging 14 | .Python 15 | build/ 16 | develop-eggs/ 17 | dist/ 18 | downloads/ 19 | eggs/ 20 | .eggs/ 21 | lib/ 22 | lib64/ 23 | parts/ 24 | sdist/ 25 | var/ 26 | wheels/ 27 | .idea/ 28 | *.egg-info/ 29 | .installed.cfg 30 | *.egg 31 | MANIFEST 32 | 33 | # PyInstaller 34 | # Usually these files are written by a python script from a template 35 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 36 | *.manifest 37 | *.spec 38 | 39 | # Installer logs 40 | pip-log.txt 41 | pip-delete-this-directory.txt 42 | 43 | # Unit test / coverage reports 44 | htmlcov/ 45 | .tox/ 46 | .coverage 47 | .coverage.* 48 | .cache 49 | nosetests.xml 50 | coverage.xml 51 | *.cover 52 | .hypothesis/ 53 | .pytest_cache/ 54 | 55 | # Translations 56 | *.mo 57 | *.pot 58 | 59 | # Django stuff: 60 | *.log 61 | local_settings.py 62 | db.sqlite3 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # pyenv 81 | .python-version 82 | 83 | # celery beat schedule file 84 | celerybeat-schedule 85 | 86 | # SageMath parsed files 87 | *.sage.py 88 | 89 | # Environments 90 | .env 91 | .venv 92 | env/ 93 | venv/ 94 | ENV/ 95 | env.bak/ 96 | venv.bak/ 97 | 98 | # Spyder project settings 99 | .spyderproject 100 | .spyproject 101 | 102 | # Rope project settings 103 | .ropeproject 104 | 105 | # mkdocs documentation 106 | /site 107 | 108 | # mypy 109 | .mypy_cache/ 110 | 111 | # Rever 112 | rever/ 113 | -------------------------------------------------------------------------------- /docs/_static/press.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/confs/pydata-nyc2019.md: -------------------------------------------------------------------------------- 1 | # PyData NYC 2019 2 | 3 | ## Conda-press, or Reinventing the Wheel 4 | 5 | Conda-press (https://github.com/regro/conda-press) is a new tool that 6 | lets you transform conda packages (artifacts) into Python wheels. This talk will: 7 | 8 | * discuss why in the world you would want to do such a terrible thing, 9 | * demonstrate that you can do such a terrible thing (live!), 10 | * dive-in to how such as terrible thing is done, and 11 | * define some safety precautions when doing such a terrible thing on your own. 12 | 13 | ### Discuss 14 | 15 | Building software is hard. Luckily, conda-forge is a huge community (1.5k+) 16 | dedicated to building software, focused on the PyData stack. Unfortunately, 17 | some users still want to be able to `pip install` packages. Double unfortunately, 18 | creating binary wheels across many different platforms is often extremely difficult 19 | for any package with a C-extension. 20 | 21 | The central idea behind conda-press is that if there is already a conda-forge 22 | package, all of the hard work has already been done! To provide wheels, we 23 | should just be able to massage those artifacts into a more circular shape. 24 | 25 | ### Demonstrate 26 | 27 | Because we conda-press is just shuffling bits around, managing metadata, 28 | and not compiling anything new, it is quite fast! This talk will demo 29 | creating and installing wheels for a few different packages. For example, 30 | packages like numpy, scipy, or uvloop are all good candidates. This talk 31 | may also demonstrate generating wheels for more esoteric packages that are 32 | not related to Python, such as cmake, R, or even Python itself! 33 | 34 | ### Dive-in 35 | 36 | This talk will discuss the underlying layout of the wheels that are 37 | created and how these wheels are built to work well with other wheels 38 | created by conda-press. 39 | 40 | This talk will also explain the underlying architecture of conda-press, and how 41 | typical workflows are implemented. Conda-press relies on a number of external, 42 | platform-specific command line utilities. Conda-press is largely written in 43 | the xonsh language to enable this. 44 | 45 | ### Defense 46 | 47 | This talk will also offer guidance against common pitfalls when creating 48 | wheels with conda-press. This includes the distinction between fat and skinny 49 | wheels, namespace differences between PyPI and conda-forge, and issues with 50 | prefix substitutions. 51 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # conda-press 2 | 3 | Press conda packages into wheels. 4 | 5 | The wheels created by conda-press are usable in a general Python 6 | setting, i.e. outside of a conda managed environment. 7 | 8 | ## Quick start 9 | 10 | Run the `conda press` command and point it at either an artifact 11 | file or spec. For example: 12 | 13 | ``` 14 | # from artifact spec, produces single wheel, including all non-Python requirements 15 | $ conda press --subdir linux-64 --skip-python --fatten scikit-image=0.15.0=py37hb3f55d8_2 16 | 17 | # from artifact file, produces a single wheel 18 | $ conda press numpy-1.14.6-py36he5ce36f_1201.tar.bz2 19 | 20 | # from artifact spec, produces wheels for package and all requirements 21 | $ conda press --subdir linux-64 xz=5.2.4=h14c3975_1001 22 | 23 | # merge many wheels into a single wheel 24 | $ conda press --merge *.whl --output scikit_image-0.15.0-2_py37hb3f55d8-cp37-cp37m-linux_x86_64.whl 25 | ``` 26 | 27 | ## What we are solving 28 | 29 | conda-press allows us to build out a pip-usable package index which is 30 | ABI compatible with conda packages. This can help address the following 31 | issues / workflows: 32 | 33 | **Issue 1:** 34 | 35 | It can be very difficult to build wheels for packages that have C extensions. 36 | Also, the provenance of wheels with C extentions can be hard to know (who built it, 37 | how it was built, etc.). Conda-press enables community building of wheels, 38 | based on conda-forge provided packages. This should make it very easy to build a 39 | valid wheel. 40 | 41 | **Issue 2:** 42 | 43 | Many packages with compiled extensions do not have wheels available on one or more 44 | popular platforms (Windows, Mac, Linux). This is because building wheels can 45 | be very difficult. Conda has a lot of packages that are not available as wheels otherwise. 46 | Conda-press allows these packages to easily become generally usable wheels. 47 | 48 | **Issue 3:** Some people want a package index built on newer ABIs than `manylinux` 49 | 50 | 51 | ## How to install 52 | 53 | From conda: 54 | 55 | ``` 56 | conda install -c conda-forge conda-press 57 | ``` 58 | 59 | From the source code: 60 | 61 | ``` 62 | $ pip install --no-deps . 63 | ``` 64 | 65 | ## More technical details about what we are doing 66 | 67 | What conda-press does is take an artifact or spec, and turn it into wheel(s). 68 | When using pip to install such a wheel, it shoves the root level of the artifact 69 | into site-packages. It then provides wrapper / proxy scripts that point to 70 | site-packages/bin so that you may run executables and scripts. 71 | 72 | ## How to get involved 73 | 74 | Please feel free to open up a PR or open an issue! 75 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ====================== 2 | conda-press Change Log 3 | ====================== 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | ## v0.0.6 14 | **Fixed:** 15 | 16 | * When `conda-press` was executed with some empty values for `--add-deps` and `--exclude-deps` it raises and error 17 | of None type is not iterable. 18 | * Fixed issue with running `--merge` on a list of wheels. 19 | 20 | **Authors:** 21 | 22 | * Anthony Scopatz 23 | * Marcelo Duarte Trevisani 24 | * Brian Kelley 25 | 26 | 27 | 28 | ## v0.0.5 29 | **Added:** 30 | 31 | * Added option `--config` which accepts a path to a yaml file with the configuration to run `conda-press`. 32 | * The `YAML` file passed using the option `--config` also accepts the 33 | configuration to be inside of the key `conda_press`. 34 | 35 | **Changed:** 36 | 37 | * Add dataclass `Config` following the `Introduce Parameter Object` design pattern. 38 | `Config` is responsible to hold the `conda-press` configuration. 39 | Refactored internal classes/functions to use the new approach. 40 | 41 | **Authors:** 42 | 43 | * Anthony Scopatz 44 | * Marcelo Duarte Trevisani 45 | 46 | 47 | 48 | ## v0.0.4 49 | **Added:** 50 | 51 | * `wheels.fatten_from_seen()` now has a `skipped_deps` keyword argument 52 | * Add new option `--only-pypi` which will remove any dependency which is not available on PyPi. 53 | 54 | **Changed:** 55 | 56 | * Fattened wheels now respect `--exclude-deps` 57 | 58 | **Fixed:** 59 | 60 | * Fix file types which might be uncompressed when using `from_tarball` method. 61 | * Removed `WHEEL`, `METADATA`, and `RECORD` files from fat wheels. 62 | 63 | **Authors:** 64 | 65 | * Anthony Scopatz 66 | * Marcelo Duarte Trevisani 67 | 68 | 69 | 70 | ## v0.0.3 71 | **Added:** 72 | 73 | * Add plugin ``pytest-azurepipelines`` to show test reports on Azure Pipelines 74 | * Add option `--add-deps` to be able to add new dependencies to the wheel. 75 | * Add option `--exclude-deps`. With this option the user will be able to exclude dependencies from the artifacts. 76 | 77 | **Fixed:** 78 | 79 | * Removed unused imports 80 | 81 | **Authors:** 82 | 83 | * Anthony Scopatz 84 | * Marcelo Duarte Trevisani 85 | 86 | 87 | 88 | ## v0.0.2 89 | **Added:** 90 | 91 | * Initial support for RPATH fix-up on macOS 92 | 93 | **Fixed:** 94 | 95 | * Requirements listed in the wheel metadata are now removed approriately 96 | if Python was skipped or if the wheel is a merger of other wheels. 97 | * Don't list python as a runtime requirement when building with '--skip-python' 98 | * Apply RPATH fixups to '.so' libraries on macOS, because that is CPython extension default 99 | * Fixed issue with noarch Python version reading. 100 | 101 | **Authors:** 102 | 103 | * Anthony Scopatz 104 | * Isaiah Norton 105 | 106 | 107 | 108 | ## v0.0.1 109 | **Added:** 110 | 111 | * Initial version of conda-press! 112 | 113 | **Authors:** 114 | 115 | * Anthony Scopatz 116 | * P. L. Lim 117 | * Julien Schueller 118 | 119 | 120 | 121 | -------------------------------------------------------------------------------- /tests/test_config.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from ruamel import yaml 3 | 4 | from conda_press.config import Config, get_config_by_yaml 5 | 6 | 7 | @pytest.fixture 8 | def config_obj(tmpdir): 9 | return Config( 10 | subdir="SUBDIR", 11 | output="OUTPUT", 12 | channels=["FOO-CHANNEL", "CHANNEL2"], 13 | fatten=True, 14 | skip_python=True, 15 | strip_symbols=False, 16 | merge=True, 17 | exclude_deps={"EXCLUDE1", "EXCLUDE2"}, 18 | add_deps={"ADD1", "ADD2"}, 19 | only_pypi=True, 20 | include_requirements=False, 21 | ) 22 | 23 | 24 | def test_fields(config_obj): 25 | assert config_obj.get_all_channels() == [ 26 | "FOO-CHANNEL", 27 | "CHANNEL2", 28 | "conda-forge", 29 | "anaconda", 30 | "main", 31 | "r", 32 | ] 33 | assert config_obj.channels == ["FOO-CHANNEL", "CHANNEL2"] 34 | assert config_obj.get_all_subdir() == ["SUBDIR", "noarch"] 35 | assert config_obj.subdir == "SUBDIR" 36 | assert config_obj.output == "OUTPUT" 37 | assert config_obj.fatten 38 | assert config_obj.skip_python 39 | assert not config_obj.strip_symbols 40 | assert config_obj.merge 41 | assert config_obj.exclude_deps == {"EXCLUDE1", "EXCLUDE2"} 42 | assert config_obj.add_deps == {"ADD1", "ADD2"} 43 | assert config_obj.only_pypi 44 | assert not config_obj.include_requirements 45 | 46 | 47 | def test_clean_deps(config_obj): 48 | config_obj.add_deps = {"DEP1", "DEP2", "DEP3", "DEP4"} 49 | config_obj.exclude_deps = {"DEP2", "DEP4"} 50 | all_deps = ["DEP0", "DEP1", "DEP2", "DEP5"] 51 | assert config_obj.clean_deps(all_deps) == {"DEP0", "DEP1", "DEP3", "DEP5"} 52 | 53 | 54 | DICT_CONFIG_CONTENT = { 55 | "subdir": "SUBDIR", 56 | "output": "OUTPUT", 57 | "channels": ["FOO-CHANNEL", "CHANNEL2"], 58 | "fatten": True, 59 | "skip_python": True, 60 | "strip_symbols": False, 61 | "merge": True, 62 | "exclude_deps": ["EXCLUDE1", "EXCLUDE2"], 63 | "add_deps": ["ADD1", "ADD2"], 64 | "only_pypi": True, 65 | "include_requirements": False, 66 | } 67 | 68 | 69 | @pytest.mark.parametrize( 70 | "config_content", [DICT_CONFIG_CONTENT, {"conda_press": DICT_CONFIG_CONTENT}] 71 | ) 72 | def test_populate_config_by_yaml(config_content, tmpdir): 73 | yaml_path = tmpdir.join("TEST.yaml") 74 | yaml_path.write(yaml.dump(config_content)) 75 | config_read = get_config_by_yaml(str(yaml_path)) 76 | assert config_read.channels == ["FOO-CHANNEL", "CHANNEL2"] 77 | assert config_read.get_all_channels() == [ 78 | "FOO-CHANNEL", 79 | "CHANNEL2", 80 | "conda-forge", 81 | "anaconda", 82 | "main", 83 | "r", 84 | ] 85 | assert config_read.subdir == "SUBDIR" 86 | assert config_read.get_all_subdir() == ["SUBDIR", "noarch"] 87 | assert config_read.output == "OUTPUT" 88 | assert config_read.fatten 89 | assert config_read.skip_python 90 | assert not config_read.strip_symbols 91 | assert config_read.merge 92 | assert config_read.exclude_deps == {"EXCLUDE1", "EXCLUDE2"} 93 | assert config_read.add_deps == {"ADD1", "ADD2"} 94 | assert config_read.only_pypi 95 | assert not config_read.include_requirements 96 | -------------------------------------------------------------------------------- /conda_press/config.py: -------------------------------------------------------------------------------- 1 | import os 2 | import platform 3 | import tempfile 4 | from dataclasses import asdict, dataclass, field 5 | from typing import List, Set, Union 6 | 7 | CACHE_DIR = os.path.join(tempfile.gettempdir(), "artifact-cache") 8 | DEFAULT_CHANNELS = ("conda-forge", "anaconda", "main", "r") 9 | SYSTEM = platform.system() 10 | if SYSTEM == "Linux": 11 | SO_EXT = ".so" 12 | elif SYSTEM == "Darwin": 13 | SO_EXT = ".dylib" 14 | elif SYSTEM == "Windows": 15 | SO_EXT = ".dll" 16 | else: 17 | raise ValueError(f"System {SYSTEM} is not supported.") 18 | 19 | 20 | @dataclass(init=True, repr=True, eq=True, order=False) 21 | class Config: 22 | subdir: Union[str, List[str]] = field(default_factory=list) 23 | channels: List[str] = field(default_factory=list) 24 | output: str = field(default=None) 25 | exclude_deps: Set[str] = field(default_factory=set) 26 | add_deps: Set[str] = field(default_factory=set) 27 | skip_python: bool = False 28 | strip_symbols: bool = True 29 | fatten: bool = False 30 | merge: bool = False 31 | only_pypi: bool = False 32 | include_requirements: bool = True 33 | 34 | def get_all_channels(self): 35 | return self.channels + list(DEFAULT_CHANNELS) 36 | 37 | def get_all_subdir(self): 38 | if isinstance(self.subdir, str): 39 | return [self.subdir, "noarch"] 40 | return self.subdir + ["noarch"] 41 | 42 | def clean_deps(self, list_deps: Union[Set[str], List[str]]) -> Set[str]: 43 | """This method is responsible to remove the excluded dependencies and 44 | add the new dependencies in a list of dependencies received. 45 | 46 | Parameters 47 | ---------- 48 | list_deps : array_like 49 | Receives a set or a list of dependencies 50 | 51 | Returns 52 | ------- 53 | set 54 | Returns a set with the dependencies. 55 | """ 56 | return set(list_deps).union(self.add_deps).difference(self.exclude_deps) 57 | 58 | 59 | def get_config_by_yaml(yaml_path, config=None): 60 | """Free function responsible to create or fill a `Config` object 61 | with the content of a yaml file. 62 | 63 | Parameters 64 | ---------- 65 | yaml_path : str 66 | Path to the YAML file 67 | config : Config, optional 68 | If it is received a Config object it will be filled otherwise 69 | this function will create a new Config object. 70 | 71 | Returns 72 | ------- 73 | Config 74 | Config object with the yaml configuration 75 | 76 | """ 77 | from ruamel.yaml import YAML 78 | 79 | if config is None: 80 | config = Config() 81 | 82 | with open(yaml_path, "r") as config_file: 83 | yaml = YAML(typ="safe").load(config_file) 84 | 85 | if yaml is None: 86 | yaml = dict() 87 | 88 | if "conda_press" in yaml: 89 | yaml = yaml["conda_press"] 90 | 91 | def convert_to_list(yaml_var): 92 | if isinstance(yaml_var, str): 93 | return [yaml_var] 94 | return yaml_var 95 | 96 | def yaml_attr(attr): 97 | if yaml.get(attr) is not None: 98 | return yaml.get(attr) 99 | return asdict(config)[attr] 100 | 101 | config.subdir = yaml_attr("subdir") 102 | config.output = yaml_attr("output") 103 | config.channels = convert_to_list(yaml_attr("channels")) 104 | config.fatten = yaml_attr("fatten") 105 | config.skip_python = yaml_attr("skip_python") 106 | config.strip_symbols = yaml_attr("strip_symbols") 107 | config.merge = yaml_attr("merge") 108 | 109 | def convert_to_set(yaml_var): 110 | if isinstance(yaml_var, str): 111 | return {yaml_var} 112 | if isinstance(yaml_var, list): 113 | return set(yaml_var) 114 | return yaml_var 115 | 116 | config.add_deps = convert_to_set(yaml_attr("add_deps")) 117 | config.exclude_deps = convert_to_set(yaml_attr("exclude_deps")) 118 | config.only_pypi = yaml_attr("only_pypi") 119 | config.include_requirements = yaml_attr("include_requirements") 120 | return config 121 | -------------------------------------------------------------------------------- /conda_press/main.xsh: -------------------------------------------------------------------------------- 1 | """CLI entry point for conda-press""" 2 | import os 3 | from argparse import ArgumentParser 4 | 5 | from conda_press.config import Config, get_config_by_yaml 6 | from conda_press.wheel import Wheel, merge, fatten_from_seen 7 | from conda_press.condatools import ( 8 | artifact_to_wheel, 9 | artifact_ref_dependency_tree_to_wheels, 10 | ) 11 | 12 | 13 | def main(args=None): 14 | p = ArgumentParser("conda-press") 15 | p.add_argument("files", nargs="+") 16 | p.add_argument("--subdir", dest="subdir", default=None) 17 | p.add_argument("--skip-python", dest="skip_python", default=False, 18 | action="store_true", help="Skips Python packages and " 19 | "their dependencies.") 20 | p.add_argument("--strip-symbols", dest="strip_symbols", default=True, 21 | action="store_true", help="strips symbols from libraries (default)") 22 | p.add_argument("--no-strip-symbols", "--dont-strip-symbols", dest="strip_symbols", 23 | action="store_false", help="don't strip symbols from libraries") 24 | p.add_argument("--channels", dest="channels", nargs="+", default=()) 25 | p.add_argument("--fatten", dest="fatten", default=False, action="store_true", 26 | help="merges the wheel with all of its dependencies.") 27 | p.add_argument("--merge", dest="merge", default=False, action="store_true", 28 | help="merges a list of wheels into a single wheel") 29 | p.add_argument("-o", "--output", dest="output", default=None, 30 | help="Output file name for merge/fatten. If not given, " 31 | "this will be the last wheel listed.") 32 | p.add_argument("--exclude-deps", dest="exclude_deps", default=None, nargs="+", 33 | help="Exclude dependencies from conda package.") 34 | p.add_argument("--add-deps", dest="add_deps", default=None, nargs="+", 35 | help="Add dependencies to the wheel.") 36 | p.add_argument( 37 | "--only-pypi", 38 | dest="only_pypi", 39 | default=False, 40 | action="store_true", 41 | help="Remove dependencies which are not on PyPi when converting conda " 42 | "package to Python wheel.", 43 | ) 44 | p.add_argument( 45 | "--config", 46 | dest="config_file", 47 | default=None, 48 | help="Receives an yaml configuration file which will set the options for conda-press.\n" 49 | "This option has high priority over the others to configure conda-press.", 50 | ) 51 | ns = p.parse_args(args=args) 52 | 53 | config = Config( 54 | output=ns.output, 55 | subdir=ns.subdir, 56 | channels=list(ns.channels), 57 | exclude_deps=set(ns.exclude_deps) if ns.exclude_deps else set(), 58 | add_deps=set(ns.add_deps) if ns.add_deps else set(), 59 | merge=ns.merge, 60 | fatten=ns.fatten, 61 | strip_symbols=ns.strip_symbols, 62 | skip_python=ns.skip_python, 63 | only_pypi=ns.only_pypi, 64 | ) 65 | 66 | if ns.config_file: 67 | get_config_by_yaml(ns.config_file, config) 68 | 69 | if ns.merge: 70 | wheels = {f: Wheel.from_file(f) for f in ns.files} 71 | output = ns.files[-1] if ns.output is None else ns.output 72 | if output in wheels: 73 | wheels[output]._top = True 74 | merge(wheels, output=output) 75 | return 76 | 77 | run_convert_wheel(ns.files, config) 78 | 79 | 80 | def run_convert_wheel(files, config): 81 | for fname in files: 82 | if "=" in fname: 83 | print(f'Converting {fname} tree to wheels') 84 | seen = artifact_ref_dependency_tree_to_wheels(fname, config=config) 85 | if config.fatten: 86 | fatten_from_seen( 87 | seen, output=config.output, skipped_deps=config.exclude_deps 88 | ) 89 | elif os.path.isfile(fname): 90 | print(f'Converting {fname} to wheel') 91 | artifact_to_wheel(fname, config=config) 92 | else: 93 | raise ValueError(f"File receive is not valid.\nFiles: {fname}\n") 94 | 95 | 96 | if __name__ == "__main__": 97 | main() 98 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | RELEASE = v0.0.6 10 | 11 | # Internal variables. 12 | PAPEROPT_a4 = -D latex_paper_size=a4 13 | PAPEROPT_letter = -D latex_paper_size=letter 14 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 15 | 16 | DOCREPONAME = rever-docs 17 | DOCREPOURL = git@github.com:ergs/rever-docs.git 18 | DOCREPOBRANCH = gh-pages 19 | 20 | .PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest 21 | 22 | help: 23 | @echo "Please use \`make ' where is one of" 24 | @echo " html to make standalone HTML files" 25 | @echo " dirhtml to make HTML files named index.html in directories" 26 | @echo " pickle to make pickle files" 27 | @echo " json to make JSON files" 28 | @echo " htmlhelp to make HTML files and a HTML help project" 29 | @echo " qthelp to make HTML files and a qthelp project" 30 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 31 | @echo " changes to make an overview of all changed/added/deprecated items" 32 | @echo " linkcheck to check all external links for integrity" 33 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 34 | 35 | clean: 36 | -rm -rf $(BUILDDIR)/* 37 | 38 | html: 39 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 40 | @echo 41 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 42 | 43 | dirhtml: 44 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 45 | @echo 46 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 47 | 48 | pickle: 49 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 50 | @echo 51 | @echo "Build finished; now you can process the pickle files." 52 | 53 | json: 54 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 55 | @echo 56 | @echo "Build finished; now you can process the JSON files." 57 | 58 | htmlhelp: 59 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 60 | @echo 61 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 62 | ".hhp project file in $(BUILDDIR)/htmlhelp." 63 | 64 | qthelp: 65 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 66 | @echo 67 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 68 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 69 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/metasci.qhcp" 70 | @echo "To view the help file:" 71 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/metasci.qhc" 72 | 73 | latex: 74 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 75 | @echo 76 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 77 | @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \ 78 | "run these through (pdf)latex." 79 | 80 | changes: 81 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 82 | @echo 83 | @echo "The overview file is in $(BUILDDIR)/changes." 84 | 85 | linkcheck: 86 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 87 | @echo 88 | @echo "Link check complete; look for any errors in the above output " \ 89 | "or in $(BUILDDIR)/linkcheck/output.txt." 90 | 91 | doctest: 92 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 93 | @echo "Testing of doctests in the sources finished, look at the " \ 94 | "results in $(BUILDDIR)/doctest/output.txt." 95 | 96 | push-latest: 97 | cd $(BUILDDIR) && \ 98 | test -d $(DOCREPONAME) || git clone $(DOCREPOURL) $(DOCREPONAME) && \ 99 | cd $(DOCREPONAME) && \ 100 | git pull origin $(DOCREPOBRANCH) && \ 101 | test -d latest || mkdir latest && touch latest/_ && \ 102 | rm -r latest/* && \ 103 | cp -r ../html/* latest/ && \ 104 | git add latest/ && \ 105 | git commit -am "Pushed latest docs at $(date)" && \ 106 | git push 107 | 108 | push-release: 109 | cd $(BUILDDIR) && \ 110 | test -d $(DOCREPONAME) || git clone $(DOCREPOURL) $(DOCREPONAME) && \ 111 | cd $(DOCREPONAME) && \ 112 | git pull origin $(DOCREPOBRANCH) && \ 113 | test -d $(RELEASE) || mkdir $(RELEASE) && touch $(RELEASE)/_ && \ 114 | rm -r $(RELEASE)/* && \ 115 | cp -r ../html/* $(RELEASE)/ && \ 116 | git add $(RELEASE)/ && \ 117 | git commit -am "Pushed $(RELEASE) docs at $(date)" && \ 118 | git push 119 | 120 | push-root: 121 | cd $(BUILDDIR) && \ 122 | test -d $(DOCREPONAME) || git clone $(DOCREPOURL) $(DOCREPONAME) && \ 123 | cd $(DOCREPONAME) && \ 124 | git pull origin $(DOCREPOBRANCH) && \ 125 | rm -rf api previous _sources _static *.html *.js *.inv && \ 126 | cp -r ../html/* . && \ 127 | git add . && \ 128 | git commit -am "Pushed root-level docs at $(date)" && \ 129 | git push 130 | 131 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | Conda-Press 2 | ============================ 3 | Press conda packages into wheels. 4 | 5 | The wheels created by conda-press are usable in a general Python 6 | setting, i.e. outside of a conda managed environment. 7 | 8 | Quick start 9 | ----------- 10 | 11 | Run the ``conda press`` command and point it at either an artifact 12 | file or spec. For example: 13 | 14 | .. code-block:: sh 15 | 16 | # from artifact spec, produces single wheel, including all non-Python requirements 17 | $ conda press --subdir linux-64 --skip-python --fatten scikit-image=0.15.0=py37hb3f55d8_2 18 | 19 | # from artifact file, produces a single wheel 20 | $ conda press numpy-1.14.6-py36he5ce36f_1201.tar.bz2 21 | 22 | # from artifact spec, produces wheels for package and all requirements 23 | $ conda press --subdir linux-64 xz=5.2.4=h14c3975_1001 24 | 25 | # merge many wheels into a single wheel 26 | $ conda press --merge *.whl -output scikit_image-0.15.0-2_py37hb3f55d8-cp37-cp37m-linux_x86_64.whl 27 | 28 | What we are solving 29 | ------------------- 30 | conda-press allows us to build out a pip-usable package index which is 31 | ABI compatible with conda packages. This can help address the following 32 | issues / workflows: 33 | 34 | **Issue 1:** 35 | 36 | It can be very difficult to build wheels for packages that have C extensions. 37 | Also, the provenance of wheels with C extentions can be hard to know (who built it, 38 | how it was built, etc.). Conda-press enables community building of wheels, 39 | based on conda-forge provided packages. This should make it very easy to build a 40 | valid wheel. 41 | 42 | **Issue 2:** 43 | 44 | Many packages with compiled extensions do not have wheels available on one or more 45 | popular platforms (Windows, Mac, Linux). This is because building wheels can 46 | be very difficult. Conda has a lot of packages that are not available as wheels otherwise. 47 | Conda-press allows these packages to easily become generally usable wheels. 48 | 49 | **Issue 3:** 50 | 51 | Some people want a package index built on newer ABIs than `manylinux`. 52 | 53 | **Issue 4:** 54 | 55 | Conda-Press addresses the issue of making shared library dependencies loadable at runtime 56 | by having a unix-like directory structure inside of the `site-packages/` directory. This 57 | allows wheels to have a common `$RPATH` that they can all point to. 58 | 59 | 60 | How to install 61 | -------------- 62 | 63 | From conda: 64 | 65 | .. code-block:: sh 66 | 67 | conda install -c conda-forge conda-press 68 | 69 | From the source code: 70 | 71 | .. code-block:: sh 72 | 73 | $ pip install --no-deps . 74 | 75 | More technical details about what we are doing 76 | ---------------------------------------------- 77 | What conda-press does is take an artifact or spec, and turn it into wheel(s). 78 | When using pip to install such a wheel, it shoves the root level of the artifact 79 | into site-packages. It then provides wrapper / proxy scripts that point to 80 | site-packages/bin so that you may run executables and scripts. 81 | 82 | Contents 83 | -------- 84 | **Installation:** 85 | 86 | .. toctree:: 87 | :titlesonly: 88 | :maxdepth: 1 89 | 90 | dependencies 91 | 92 | 93 | **Conferences:** 94 | 95 | .. toctree:: 96 | :titlesonly: 97 | :maxdepth: 1 98 | 99 | confs/pydata-nyc2019.md 100 | 101 | **Development Spiral:** 102 | 103 | .. toctree:: 104 | :titlesonly: 105 | :maxdepth: 1 106 | 107 | api/index 108 | devguide/ 109 | changelog 110 | 111 | 112 | .. include:: dependencies.rst 113 | 114 | 115 | Contributing 116 | ------------- 117 | We highly encourage contributions to conda-press! If you would like to contribute, 118 | it is as easy as forking the repository on GitHub, making your changes, and 119 | issuing a pull request. If you have any questions about this process don't 120 | hesitate to ask on the `Gitter `_ channel. 121 | 122 | See the `Developer's Guide `_ for more information about contributing. 123 | 124 | ============= 125 | Helpful Links 126 | ============= 127 | 128 | * `Documentation `_ 129 | * `Gitter `_ 130 | * `GitHub Repository `_ 131 | * :ref:`genindex` 132 | * :ref:`modindex` 133 | * :ref:`search` 134 | 135 | .. raw:: html 136 | 137 | Fork me on GitHub 138 | 139 | 197 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import glob 4 | import tempfile 5 | import builtins 6 | import subprocess 7 | 8 | import pytest 9 | import requests 10 | 11 | from lazyasd import lazyobject 12 | from xonsh.lib.os import rmtree 13 | 14 | from conda.api import SubdirData 15 | 16 | from conda_press.config import Config 17 | from conda_press.wheel import fatten_from_seen 18 | from conda_press.condatools import artifact_to_wheel, CACHE_DIR, artifact_ref_dependency_tree_to_wheels 19 | 20 | 21 | PLATFORM_TO_SUBDIR = { 22 | "linux": "linux-64", 23 | "win32": "win-64", 24 | "darwin": "osx-64", 25 | } 26 | 27 | 28 | @lazyobject 29 | def subdir_data_arch(): 30 | subdir = PLATFORM_TO_SUBDIR[sys.platform] 31 | return SubdirData('conda-forge/' + subdir) 32 | 33 | 34 | @lazyobject 35 | def subdir_data_noarch(): 36 | return SubdirData('conda-forge/noarch') 37 | 38 | 39 | def download_artifact(artifact_ref): 40 | if artifact_ref.startswith('noarch/'): 41 | noarch = True 42 | subdir_data = subdir_data_noarch 43 | _, _, artifact_ref = artifact_ref.partition("/") 44 | else: 45 | noarch = False 46 | subdir_data = subdir_data_arch 47 | pkg_records = subdir_data.query(artifact_ref) 48 | 49 | # if a python package, get only the ones matching this versuon of python 50 | pytag = "py{vi.major}{vi.minor}".format(vi=sys.version_info) 51 | if noarch: 52 | pass 53 | else: 54 | filtered_records = [] 55 | for r in pkg_records: 56 | if 'py' in r.build: 57 | if pytag in r.build: 58 | filtered_records.append(r) 59 | else: 60 | filtered_records.append(r) 61 | pkg_records = filtered_records 62 | if pkg_records: 63 | pkg_record = pkg_records[-1] 64 | else: 65 | raise RuntimeError(f"could not find {artifact_ref} on conda-forge") 66 | os.makedirs(CACHE_DIR, exist_ok=True) 67 | local_fn = os.path.join(CACHE_DIR, pkg_record.fn) 68 | if os.path.isfile(local_fn): 69 | return local_fn 70 | resp = requests.get(pkg_record.url) 71 | with open(local_fn, 'wb') as f: 72 | f.write(resp.content) 73 | return local_fn 74 | 75 | 76 | @pytest.fixture() 77 | def pip_install_artifact(request): 78 | wheel = None 79 | test_env = tempfile.mkdtemp(prefix="test-env") 80 | def create_wheel_and_install(artifact_ref, include_requirements=True): 81 | nonlocal wheel 82 | artifact_path = download_artifact(artifact_ref) 83 | wheel = artifact_to_wheel(artifact_path, Config(include_requirements=include_requirements)) 84 | subprocess.run(['virtualenv', test_env], check=True) 85 | if sys.platform.startswith('win'): 86 | site_packages = os.path.join(test_env, 'Lib', 'site-packages') 87 | code = f"{test_env}\\Scripts\\activate & pip install {wheel.filename}" 88 | subprocess.run(code, check=True, shell=True) 89 | else: 90 | site_packages = glob.glob(os.path.join(test_env, 'lib', 'python*', 'site-packages'))[0] 91 | code = f"source {test_env}/bin/activate; pip install {wheel.filename}" 92 | # uncomment the following when we handle dependencies 93 | #import_tests = os.path.join(wheel.basedir, 'info', 'test', 'run_test.py') 94 | #if os.path.isfile(import_tests): 95 | # code += f"; python {import_tests}" 96 | subprocess.run(["bash", "-c", code], check=True) 97 | return wheel, test_env, site_packages 98 | 99 | yield create_wheel_and_install 100 | if wheel is not None: 101 | wheel.clean() 102 | rmtree(test_env, force=True) 103 | wheels = glob.glob(os.path.join(os.path.dirname(__file__), "*.whl")) 104 | for w in wheels: 105 | os.remove(w) 106 | 107 | 108 | @pytest.fixture() 109 | def pip_install_artifact_tree(request): 110 | wheels = {} 111 | test_env = tempfile.mkdtemp(prefix="test-env") 112 | def create_wheels_and_install(artifact_ref, include_requirements=True, 113 | skip_python=False, fatten=False, skipped_deps=None): 114 | nonlocal wheels 115 | seen = artifact_ref_dependency_tree_to_wheels( 116 | artifact_ref, 117 | seen=wheels, 118 | config=Config( 119 | skip_python=skip_python, 120 | include_requirements=include_requirements, 121 | fatten=fatten, 122 | subdir=PLATFORM_TO_SUBDIR[sys.platform], 123 | ), 124 | ) 125 | if fatten: 126 | wheels = fatten_from_seen(seen, skipped_deps=skipped_deps) 127 | subprocess.run(['virtualenv', test_env], check=True) 128 | wheel_filenames = " ".join(reversed([w.filename for w in wheels.values() 129 | if w is not None])) 130 | if sys.platform.startswith('win'): 131 | site_packages = os.path.join(test_env, 'Lib', 'site-packages') 132 | code = f"{test_env}\\Scripts\\activate & pip install {wheel_filenames}" 133 | print("Running:\n " + code) 134 | subprocess.run(code, check=True, shell=True) 135 | else: 136 | site_packages = glob.glob(os.path.join(test_env, 'lib', 'python*', 'site-packages'))[0] 137 | code = f"source {test_env}/bin/activate; pip install {wheel_filenames}" 138 | # uncomment the following when we handle dependencies 139 | #import_tests = os.path.join(wheel.basedir, 'info', 'test', 'run_test.py') 140 | #if os.path.isfile(import_tests): 141 | # code += f"; python {import_tests}" 142 | print("Running:\n " + code) 143 | subprocess.run(["bash", "-c", code], check=True) 144 | return wheels, test_env, site_packages 145 | 146 | yield create_wheels_and_install 147 | for wheel in wheels.values(): 148 | if wheel is None: 149 | continue 150 | wheel.clean() 151 | rmtree(test_env, force=True) 152 | wheel_names = glob.glob(os.path.join(os.path.dirname(__file__), "*.whl")) 153 | for w in wheel_names: 154 | os.remove(w) 155 | 156 | 157 | @pytest.fixture() 158 | def xonsh(request): 159 | sess = builtins.__xonsh__ 160 | if sess.shell is None: 161 | from xonsh.shell import Shell 162 | sess.shell = Shell(sess.execer, ctx=sess.ctx, shell_type="none") 163 | return sess 164 | 165 | 166 | @pytest.fixture 167 | def data_folder(request): 168 | return os.path.join(os.path.dirname(request.module.__file__), "data") 169 | -------------------------------------------------------------------------------- /docs/devguide.rst: -------------------------------------------------------------------------------- 1 | .. _devguide: 2 | 3 | ================= 4 | Developer's Guide 5 | ================= 6 | Welcome to the conda-press developer's guide! This is a place for developers to 7 | place information that does not belong in the user's guide or the library 8 | reference but is useful or necessary for the next people that come along to 9 | develop conda-press. 10 | 11 | .. note:: All code changes must go through the pull request review procedure. 12 | 13 | 14 | Changelog 15 | ========= 16 | Pull requests will often have CHANGELOG entries associated with. However, 17 | to avoid excessive merge conflicts, please follow the following procedure: 18 | 19 | 1. Go into the ``news/`` directory, 20 | 2. Copy the ``TEMPLATE.rst`` file to another file in the ``news/`` directory. 21 | We suggest using the branchname:: 22 | 23 | $ cp TEMPLATE.rst branch.rst 24 | 25 | 3. Add your entries as a bullet pointed lists in your ``branch.rst`` file in 26 | the appropriate category. It is OK to leave the ``None`` entries for later 27 | use. 28 | 4. Commit your ``branch.rst``. 29 | 30 | Feel free to update this file whenever you want! Please don't use someone 31 | else's file name. All of the files in this ``news/`` directory will be merged 32 | automatically at release time. The ``None`` entries will be automatically 33 | filtered out too! 34 | 35 | 36 | Style Guide 37 | =========== 38 | Conda-press is a Xonsh & Python project, and so we use PEP8 (with some additions) to 39 | ensure consistency throughout the code base. 40 | 41 | ---------------------------------- 42 | Rules to Write By 43 | ---------------------------------- 44 | It is important to refer to things and concepts by their most specific name. 45 | When writing conda-press code or documentation please use technical terms 46 | appropriately. The following rules help provide needed clarity. 47 | 48 | ********** 49 | Interfaces 50 | ********** 51 | * User-facing APIs should be as generic and robust as possible. 52 | * Tests belong in the top-level ``tests`` directory. 53 | * Documentation belongs in the top-level ``docs`` directory. 54 | 55 | ************ 56 | Expectations 57 | ************ 58 | * Code must have associated tests and adequate documentation. 59 | * Have *extreme* empathy for your users. 60 | * Be selfish. Since you will be writing tests you will be your first user. 61 | 62 | ------------------- 63 | Python Style Guide 64 | ------------------- 65 | Conda-press uses `PEP8`_ for all Python code. The following rules apply where `PEP8`_ 66 | is open to interpretation. 67 | 68 | * Use absolute imports (``import conda-press.tools``) rather than explicit 69 | relative imports (``import .tools``). Implicit relative imports 70 | (``import tools``) are never allowed. 71 | * We use sphinx with the numpydoc extension to autogenerate API documentation. Follow 72 | the `numpydoc`_ standard for docstrings. 73 | * Simple functions should have simple docstrings. 74 | * Lines should be at most 80 characters long. The 72 and 79 character 75 | recommendations from PEP8 are not required here. 76 | * All Python code should be compliant with Python 3.6+. 77 | * Tests should be written with pytest using a procedural style. Do not use 78 | unittest directly or write tests in an object-oriented style. 79 | * Test generators make more dots and the dots must flow! 80 | 81 | How to Test 82 | ================ 83 | 84 | ---------------------------------- 85 | Dependencies 86 | ---------------------------------- 87 | 88 | Prep your environment for running the tests by installing ``pytest`` 89 | 90 | ---------------------------------- 91 | Running the Tests - Basic 92 | ---------------------------------- 93 | 94 | Run all the tests using pytest:: 95 | 96 | $ pytest 97 | 98 | Use "-q" to keep pytest from outputting a bunch of info for every test. 99 | 100 | ---------------------------------- 101 | Running the Tests - Advanced 102 | ---------------------------------- 103 | 104 | To perform all unit tests:: 105 | 106 | $ pytest 107 | 108 | If you want to run specific tests you can specify the test names to 109 | execute. For example to run test_aliases:: 110 | 111 | $ pytest test_aliases.py 112 | 113 | Note that you can pass multiple test names in the above examples:: 114 | 115 | $ pytest test_aliases.py test_environ.py 116 | 117 | Happy Testing! 118 | 119 | 120 | How to Document 121 | ==================== 122 | Documentation takes many forms. This will guide you through the steps of 123 | successful documentation. 124 | 125 | ---------- 126 | Docstrings 127 | ---------- 128 | No matter what language you are writing in, you should always have 129 | documentation strings along with you code. This is so important that it is 130 | part of the style guide. When writing in Python, your docstrings should be 131 | in reStructured Text using the `numpydoc`_ format. 132 | 133 | ------------------------ 134 | Auto-Documentation Hooks 135 | ------------------------ 136 | The docstrings that you have written will automatically be connected to the 137 | website, once the appropriate hooks have been setup. At this stage, all 138 | documentation lives within conda-press's top-level ``docs`` directory. 139 | We uses the sphinx tool to manage and generate the documentation, which 140 | you can learn about from `the sphinx website `_. 141 | If you want to generate the documentation, first conda-press itself must be installed 142 | and then you may run the following command from the ``docs`` dir: 143 | 144 | .. code-block:: console 145 | 146 | ~/conda-press/docs $ make html 147 | 148 | For each new 149 | module, you will have to supply the appropriate hooks. This should be done the 150 | first time that the module appears in a pull request. From here, call the 151 | new module ``mymod``. The following explains how to add hooks. 152 | 153 | ------------------------ 154 | Python Hooks 155 | ------------------------ 156 | Python documentation lives in the ``docs/api`` directory. 157 | First, create a file in this directory that represents the new module called 158 | ``mymod.rst``. 159 | The ``docs/api`` directory matches the structure of the ``conda_press/`` directory. 160 | So if your module is in a sub-package, you'll need to go into the sub-package's 161 | directory before creating ``mymod.rst``. 162 | The contents of this file should be as follows: 163 | 164 | **mymod.rst:** 165 | 166 | .. code-block:: rst 167 | 168 | .. _conda_press_mymod: 169 | 170 | ======================================= 171 | My Awesome Module -- :mod:`conda_press.mymod` 172 | ======================================= 173 | 174 | .. currentmodule:: conda_press.mymod 175 | 176 | .. automodule:: conda_press.mymod 177 | :members: 178 | 179 | This will discover all of the docstrings in ``mymod`` and create the 180 | appropriate webpage. Now, you need to hook this page up to the rest of the 181 | website. 182 | 183 | Go into the ``index.rst`` file in ``docs/api/`` or other subdirectory and add 184 | ``mymod`` to the appropriate ``toctree`` (which stands for table-of-contents 185 | tree). Note that every sub-package has its own ``index.rst`` file. 186 | 187 | 188 | Building the Website 189 | =========================== 190 | 191 | Building the website/documentation requires the following dependencies: 192 | 193 | #. `Sphinx `_ 194 | #. `Cloud Sphinx Theme `_ 195 | #. recommonmark 196 | 197 | ----------------------------------- 198 | Procedure for modifying the website 199 | ----------------------------------- 200 | The conda-press website source files are located in the ``docs`` directory. 201 | A developer first makes necessary changes, then rebuilds the website locally 202 | by executing the command:: 203 | 204 | $ make html 205 | 206 | This will generate html files for the website in the ``_build/html/`` folder. 207 | The developer may view the local changes by opening these files with their 208 | favorite browser, e.g.:: 209 | 210 | $ google-chrome _build/html/index.html 211 | 212 | Once the developer is satisfied with the changes, the changes should be 213 | committed and pull-requested per usual. Once the pull request is accepted, the 214 | developer can push their local changes directly to the website by:: 215 | 216 | $ make push-root 217 | 218 | Branches and Releases 219 | ============================= 220 | Mainline conda-press development occurs on the ``master`` branch. Other branches 221 | may be used for feature development (topical branches) or to represent 222 | past and upcoming releases. 223 | 224 | 225 | Document History 226 | =================== 227 | Portions of this page have been forked from the PyNE and Xonsh documentation, 228 | Copyright 2015-2016, the xonsh developers. All rights reserved. 229 | Copyright 2011-2015, the PyNE Development Team. All rights reserved. 230 | 231 | .. _PEP8: https://www.python.org/dev/peps/pep-0008/ 232 | .. _numpydoc: https://github.com/numpy/numpy/blob/master/doc/HOWTO_DOCUMENT.rst.txt 233 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import glob 4 | import inspect 5 | import importlib 6 | 7 | 8 | def setup(sphinx): 9 | from xonsh.main import setup 10 | setup() 11 | from xonsh.pyghooks import XonshConsoleLexer 12 | sphinx.add_lexer("xonshcon", XonshConsoleLexer()) 13 | 14 | 15 | # -- General configuration ----------------------------------------------------- 16 | 17 | # Documentation is being built on readthedocs, this will be true. 18 | on_rtd = os.environ.get('READTHEDOCS', None) == 'True' 19 | 20 | 21 | # Add any Sphinx extension module names here, as strings. They can be extensions 22 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 23 | extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.imgmath', 24 | 'sphinx.ext.inheritance_diagram', 'sphinx.ext.viewcode', 25 | #'sphinx.ext.autosummary', 26 | 'numpydoc', 27 | 'recommonmark', 28 | ] 29 | 30 | # Add any paths that contain templates here, relative to this directory. 31 | templates_path = ['_templates'] 32 | 33 | # The suffix of source filenames. 34 | source_suffix = { 35 | '.rst': 'restructuredtext', 36 | '.txt': 'markdown', 37 | '.md': 'markdown', 38 | } 39 | 40 | # The encoding of source files. 41 | #source_encoding = 'utf-8' 42 | 43 | # The master toctree document. 44 | master_doc = 'index' 45 | 46 | # General information about the project. 47 | project = u'conda-press' 48 | copyright = u'2019, Anthony Scopatz' 49 | 50 | # The version info for the project you're documenting, acts as replacement for 51 | # |version| and |release|, also used in various other places throughout the 52 | # built documents. 53 | # 54 | # The full version, including alpha/beta/rc tags. 55 | release = '0.0.6' 56 | 57 | # The short X.Y version. 58 | version = release.rsplit('.',1)[0] 59 | 60 | # The language for content autogenerated by Sphinx. Refer to documentation 61 | # for a list of supported languages. 62 | #language = None 63 | 64 | # There are two options for replacing |today|: either, you set today to some 65 | # non-false value, then it is used: 66 | #today = '' 67 | # Else, today_fmt is used as the format for a strftime call. 68 | #today_fmt = '%B %d, %Y' 69 | 70 | # List of documents that shouldn't be included in the build. 71 | exclude_patterns = ['api/blank.rst'] 72 | 73 | # List of directories, relative to source directory, that shouldn't be searched 74 | # for source files. 75 | exclude_trees = [] 76 | 77 | # The reST default role (used for this markup: `text`) to use for all documents. 78 | #default_role = None 79 | 80 | # If true, '()' will be appended to :func: etc. cross-reference text. 81 | add_function_parentheses = True 82 | 83 | # If true, the current module name will be prepended to all description 84 | # unit titles (such as .. function::). 85 | #add_module_names = True 86 | 87 | # If true, sectionauthor and moduleauthor directives will be shown in the 88 | # output. They are ignored by default. 89 | #show_authors = False 90 | 91 | # The name of the Pygments (syntax highlighting) style to use. 92 | #pygments_style = 'sphinx' 93 | #pygments_style = 'friendly' 94 | #pygments_style = 'bw' 95 | #pygments_style = 'fruity' 96 | pygments_style = 'manni' 97 | #pygments_style = 'tango' 98 | #pygments_style = 'pastie' 99 | 100 | # A list of ignored prefixes for module index sorting. 101 | modindex_common_prefix = ['conda_press.'] 102 | 103 | 104 | # -- Options for HTML output --------------------------------------------------- 105 | 106 | # The theme to use for HTML and HTML Help pages. Major themes that come with 107 | # Sphinx are currently 'default' and 'sphinxdoc'. 108 | #html_theme = 'default' 109 | #html_theme = 'altered_nature' 110 | #html_theme = 'sphinxdoc' 111 | 112 | # Theme options are theme-specific and customize the look and feel of a theme 113 | # further. For a list of options available for each theme, see the 114 | # documentation. 115 | 116 | # Pallette Colors: 117 | # 110C1A 118 | # 5F5250 119 | # A6A58D 120 | # C8B7B8 121 | # BB9A95 122 | # 7B675E 123 | # 19182A 124 | # D0CACA 125 | # B7A695 126 | # A9A48B 127 | 128 | if not on_rtd: 129 | import cloud_sptheme as csp 130 | 131 | html_theme = 'cloud' 132 | 133 | html_theme_options = { 134 | 'max_width': '1250px', 135 | 'minimal_width': '700px', 136 | 'relbarbgcolor': '#110C1A', 137 | 'footerbgcolor': '#A6A58D', 138 | 'sidebarwidth': '322px', 139 | 'sidebarbgcolor': '#D0CACA', 140 | 'linkcolor': '#7B675E', 141 | 'link_hover_bg_color': '#D0CACA', 142 | 'headtrimcolor': '#110C1A', 143 | 'textcolor': '#110C1A', 144 | 'sectionbgcolor': '#A9A48B', 145 | 'codebgcolor': '#efefef', 146 | 'sidebarlinkcolor': '#7B675E', 147 | 'object_default_color': '#D0CACA', 148 | #'googleanalytics_id': 'UA-41934829-1', 149 | 'stickysidebar': False, 150 | 'highlighttoc': False, 151 | 'externalrefs': False, 152 | 'collapsiblesidebar': True, 153 | 'default_layout_text_size': "100%", # prevents division by zero error 154 | } 155 | 156 | # Add any paths that contain custom themes here, relative to this directory. 157 | html_theme_path = ["_theme", csp.get_theme_dir()] 158 | templates_path = ["_templates_overwrite"] 159 | 160 | # The name for this set of Sphinx documents. If None, it defaults to 161 | # " v documentation". 162 | #html_title = None 163 | 164 | # A shorter title for the navigation bar. Default is the same as html_title. 165 | #html_short_title = None 166 | 167 | # The name of an image file (relative to this directory) to place at the top 168 | # of the sidebar. 169 | html_logo = '_static/press.png' 170 | 171 | # The name of an image file (within the static path) to use as favicon of the 172 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 173 | # pixels large. 174 | html_favicon = '_static/press.ico' 175 | 176 | # Add any paths that contain custom static files (such as style sheets) here, 177 | # relative to this directory. They are copied after the builtin static files, 178 | # so a file named "default.css" will overwrite the builtin "default.css". 179 | html_static_path = ['_static'] 180 | html_style = "numpy_friendly.css" 181 | 182 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 183 | # using the given strftime format. 184 | #html_last_updated_fmt = '%b %d, %Y' 185 | 186 | # If true, SmartyPants will be used to convert quotes and dashes to 187 | # typographically correct entities. 188 | #html_use_smartypants = True 189 | 190 | # Custom sidebar templates, maps document names to template names. 191 | #html_sidebars = {} 192 | 193 | # Additional templates that should be rendered to pages, maps page names to 194 | # template names. 195 | #html_additional_pages = {} 196 | 197 | # If false, no module index is generated. 198 | #html_use_modindex = True 199 | 200 | # If false, no index is generated. 201 | #html_use_index = True 202 | 203 | # If true, the index is split into individual pages for each letter. 204 | #html_split_index = False 205 | 206 | # If true, links to the reST sources are added to the pages. 207 | #html_show_sourcelink = True 208 | 209 | # If true, an OpenSearch description file will be output, and all pages will 210 | # contain a tag referring to it. The value of this option must be the 211 | # base URL from which the finished HTML is served. 212 | #html_use_opensearch = '' 213 | 214 | # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). 215 | #html_file_suffix = '' 216 | 217 | # Output file base name for HTML help builder. 218 | htmlhelp_basename = 'conda_pressdoc' 219 | 220 | 221 | # -- Options for LaTeX output -------------------------------------------------- 222 | 223 | # The paper size ('letter' or 'a4'). 224 | #latex_paper_size = 'letter' 225 | 226 | # The font size ('10pt', '11pt' or '12pt'). 227 | #latex_font_size = '10pt' 228 | 229 | # Grouping the document tree into LaTeX files. List of tuples 230 | # (source start file, target name, title, author, documentclass [howto/manual]). 231 | latex_documents = [ 232 | ('index', 'conda-press.tex', u'conda-press documentation', 233 | u'Anthony Scopatz', 'manual'), 234 | ] 235 | 236 | # The name of an image file (relative to this directory) to place at the top of 237 | # the title page. 238 | #latex_logo = None 239 | 240 | # For "manual" documents, if this is true, then toplevel headings are parts, 241 | # not chapters. 242 | #latex_use_parts = False 243 | 244 | # Additional stuff for the LaTeX preamble. 245 | #latex_preamble = '' 246 | 247 | # Documents to append as an appendix to all manuals. 248 | #latex_appendices = [] 249 | 250 | # If false, no module index is generated. 251 | #latex_use_modindex = True 252 | 253 | #Autodocumentation Flags 254 | autodoc_member_order = "groupwise" 255 | autoclass_content = "both" 256 | autosummary_generate = [] 257 | 258 | # Prevent numpy from making silly tables 259 | numpydoc_show_class_members = False 260 | -------------------------------------------------------------------------------- /tests/test_condatools.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import ast 4 | import stat 5 | import glob 6 | import subprocess 7 | 8 | import pytest 9 | 10 | from conda_press.condatools import artifact_to_wheel, ArtifactInfo, get_only_deps_on_pypi 11 | from conda_press.config import Config, SYSTEM, SO_EXT 12 | 13 | ON_LINUX = (SYSTEM == "Linux") 14 | ON_WINDOWS = (SYSTEM == "Windows") 15 | ON_MAC = (SYSTEM == "Darwin") 16 | PY_VER_STR = "".join(map(str, sys.version_info[:2])) 17 | 18 | skip_if_not_on_linux = pytest.mark.skipif(not ON_LINUX, reason="can only be run on Linux") 19 | skip_if_on_windows = pytest.mark.skipif(ON_WINDOWS, reason="can not be run on Windows") 20 | 21 | 22 | def isexecutable(filepath): 23 | if ON_WINDOWS: 24 | # punt on this assert for now 25 | return True 26 | st = os.stat(filepath) 27 | return bool(st.st_mode & stat.S_IXUSR) 28 | 29 | 30 | def test_no_symlinks(pip_install_artifact, xonsh): 31 | # pip cannot unpack real symlinks, so insure it isn't 32 | wheel, test_env, sp = pip_install_artifact("re2=2016.11.01", include_requirements=False) 33 | if ON_WINDOWS: 34 | should_be_symlink = os.path.join(sp, 'Library', 'bin', 're2' + SO_EXT) 35 | else: 36 | should_be_symlink = os.path.join(sp, 'lib', 'libre2' + SO_EXT) 37 | assert os.path.isfile(should_be_symlink) 38 | assert not os.path.islink(should_be_symlink) 39 | # check the license file 40 | assert os.path.isfile(os.path.join(sp, 're2-2016.11.01.dist-info/LICENSE')) 41 | 42 | 43 | @skip_if_not_on_linux 44 | def test_scripts_to_bin(pip_install_artifact): 45 | wheel, test_env, sp = pip_install_artifact("patchelf=0.9", include_requirements=False) 46 | exc = os.path.join(test_env, 'bin', 'patchelf') 47 | assert os.path.isfile(exc) 48 | assert isexecutable(exc) 49 | proc = subprocess.run([exc, "--version"], check=True, encoding="utf-8", stdout=subprocess.PIPE) 50 | assert proc.stdout.strip() == "patchelf 0.9" 51 | 52 | 53 | def test_entrypoints(pip_install_artifact): 54 | wheel, test_env, sp = pip_install_artifact("noarch/conda-smithy=3.3.2", include_requirements=False) 55 | if ON_WINDOWS: 56 | exc = os.path.join(test_env, 'Scripts', 'conda-smithy.exe') 57 | else: 58 | exc = os.path.join(test_env, 'bin', 'conda-smithy') 59 | assert os.path.isfile(exc) 60 | assert isexecutable(exc) 61 | 62 | 63 | def test_numpy(pip_install_artifact): 64 | wheel, test_env, sp = pip_install_artifact("numpy=1.14.6", include_requirements=False) 65 | if ON_WINDOWS: 66 | exc = os.path.join(sp, 'Scripts', 'f2py.py') 67 | else: 68 | exc = os.path.join(sp, 'bin', 'f2py') 69 | assert os.path.isfile(exc) 70 | assert isexecutable(exc) 71 | with open(exc, 'r') as f: 72 | shebang = f.readline() 73 | assert shebang.startswith('#!') 74 | assert 'conda' not in shebang 75 | assert 'python' in shebang 76 | # check rpath changes 77 | if ON_LINUX: 78 | multiarray = glob.glob(os.path.join(sp, 'numpy', 'core', 'multiarray.*so')) 79 | malib = multiarray[-1] 80 | proc = subprocess.run(['patchelf', '--print-rpath', malib], check=True, encoding="utf-8", stdout=subprocess.PIPE) 81 | assert "lib" in proc.stdout 82 | 83 | 84 | def test_libcblas(pip_install_artifact): 85 | wheel, test_env, sp = pip_install_artifact("libcblas=3.8.0=4_mkl", include_requirements=False) 86 | if SYSTEM == "Linux": 87 | fname = os.path.join(sp, 'lib', 'libcblas.so.3') 88 | elif SYSTEM == "Darwin": 89 | fname = os.path.join(sp, 'lib', "libcblas.3.dylib") 90 | elif SYSTEM == "Windows": 91 | fname = os.path.join(sp, 'Library', 'bin', "libcblas.dll") 92 | else: 93 | fname = None 94 | assert os.path.isfile(fname) 95 | 96 | 97 | def test_nasm_executes(pip_install_artifact): 98 | wheel, test_env, sp = pip_install_artifact("nasm=2.13.02", include_requirements=False) 99 | if ON_WINDOWS: 100 | exc = os.path.join(test_env, 'Scripts', 'nasm.bat') 101 | else: 102 | exc = os.path.join(test_env, 'bin', 'nasm') 103 | assert os.path.isfile(exc) 104 | assert isexecutable(exc) 105 | proc = subprocess.run([exc, "-v"], check=True, encoding="utf-8", stdout=subprocess.PIPE) 106 | assert proc.stdout.strip().startswith("NASM version 2.13.02") 107 | 108 | 109 | def test_xz_tree(pip_install_artifact_tree): 110 | # tests that execuatbles which link to lib work 111 | wheels, test_env, sp = pip_install_artifact_tree("xz=5.2.4") 112 | if ON_WINDOWS: 113 | exc = os.path.join(test_env, 'Scripts', 'xz.bat') 114 | else: 115 | exc = os.path.join(test_env, 'bin', 'xz') 116 | assert os.path.isfile(exc) 117 | assert isexecutable(exc) 118 | proc = subprocess.run([exc, "--version"], check=True, encoding="utf-8", stdout=subprocess.PIPE) 119 | assert proc.stdout.strip().startswith("xz (XZ Utils) 5.2.4") 120 | 121 | 122 | def test_python(pip_install_artifact_tree, xonsh): 123 | # this tests that PYTHONPATH is getting set properly 124 | spec = "python={0}.{1}.{2}".format(*sys.version_info[:3]) 125 | wheels, test_env, sp = pip_install_artifact_tree(spec) 126 | if ON_WINDOWS: 127 | exc = os.path.join(test_env, 'Scripts', 'python.bat') 128 | else: 129 | exc = os.path.join(test_env, 'bin', 'python') 130 | assert os.path.isfile(exc) 131 | assert isexecutable(exc) 132 | proc = subprocess.run([exc, "--version"], check=True, encoding="utf-8", stdout=subprocess.PIPE) 133 | assert proc.stdout.strip().startswith("Python {0}.{1}.{2}".format(*sys.version_info[:3])) 134 | # now check that site-packages is in sys.path 135 | proc = subprocess.run([exc, "-c", "import sys; print(sys.path)"], check=True, encoding="utf-8", stdout=subprocess.PIPE) 136 | out = proc.stdout.strip() 137 | sys_path = ast.literal_eval(out) 138 | norm_sys_path = [os.path.normpath(p) for p in sys_path] 139 | norm_sp = os.path.normpath(sp) 140 | assert norm_sp in norm_sys_path 141 | 142 | 143 | def test_click(pip_install_artifact_tree, xonsh): 144 | # tests that we can create a click package 145 | # see https://github.com/regro/conda-press/issues/15 146 | wheels, test_env, sp = pip_install_artifact_tree("click=7.0=py_0", skip_python=True) 147 | 148 | 149 | @skip_if_on_windows 150 | def test_uvloop(pip_install_artifact_tree, xonsh): 151 | wheel, test_env, sp = pip_install_artifact_tree("uvloop=0.12.2=py" + PY_VER_STR + "*", 152 | skip_python=True, fatten=True) 153 | 154 | 155 | def test_exclude_add_deps(xonsh, data_folder, tmpdir): 156 | with tmpdir.as_cwd(): 157 | conda_pkg = os.path.join(data_folder, "test-deps-0.0.1-py_0.tar.bz2") 158 | wheel = artifact_to_wheel(conda_pkg) 159 | assert "opencv" in wheel.artifact_info.run_requirements 160 | 161 | config = Config(exclude_deps={"opencv"}) 162 | wheel = artifact_to_wheel(conda_pkg, config=config) 163 | assert "opencv" not in wheel.artifact_info.run_requirements 164 | 165 | config.add_deps = {"opencv-python"} 166 | wheel = artifact_to_wheel(conda_pkg, config=config) 167 | assert "opencv" not in wheel.artifact_info.run_requirements 168 | assert "opencv-python" in wheel.artifact_info.run_requirements 169 | 170 | config = Config(add_deps={"six"}) 171 | wheel = artifact_to_wheel(conda_pkg, config=config) 172 | assert "opencv" in wheel.artifact_info.run_requirements 173 | assert "six" in wheel.artifact_info.run_requirements 174 | 175 | 176 | 177 | @pytest.mark.parametrize("extension", [".tar", ".tar.gz", ".tar.bz2", ".zip"]) 178 | def test_from_tarballs(xonsh, tmpdir, data_folder, extension): 179 | ArtifactInfo.from_tarball(os.path.join(data_folder, f"test-deps-0.0.1-py_0{extension}")) 180 | 181 | 182 | def test_get_only_deps_on_pypi_by_artifact(tmpdir, xonsh, data_folder): 183 | with tmpdir.as_cwd(): 184 | conda_pkg = os.path.join(data_folder, "test-deps-0.0.1-py_0.tar.bz2") 185 | wheel = artifact_to_wheel( 186 | conda_pkg, Config(add_deps={"pytest"}, only_pypi=True) 187 | ) 188 | assert "opencv" not in wheel.artifact_info.run_requirements 189 | assert "pytest" in wheel.artifact_info.run_requirements 190 | 191 | 192 | def test_get_only_deps_on_pypi(): 193 | assert get_only_deps_on_pypi(["pytest", "pytest-xdist"]) == {"pytest", "pytest-xdist"} 194 | assert get_only_deps_on_pypi(["pytest", "NOT_PACKAGE_000"]) == {"pytest"} 195 | assert get_only_deps_on_pypi(["pytest", "requests"]) == {"pytest", "requests"} 196 | 197 | 198 | def test_xeus_python(pip_install_artifact_tree, xonsh): 199 | wheel, test_env, sp = pip_install_artifact_tree("xeus-python=0.5.1", skip_python=True, fatten=True) 200 | 201 | 202 | @skip_if_not_on_linux 203 | def test_pygobject(pip_install_artifact_tree, xonsh): 204 | wheel, test_env, sp = pip_install_artifact_tree( 205 | "pygobject=3.30.4", skip_python=True, fatten=True, skipped_deps={"gobject-introspection"}, 206 | ) 207 | -------------------------------------------------------------------------------- /tests/data/test-deps-0.0.1-py_0.tar: -------------------------------------------------------------------------------- 1 | info/0000775000175000017500000000000013540017274012610 5ustar trevisanitrevisaniinfo/index.json0000644000175000017500000000044313533666332014620 0ustar trevisanitrevisani{ 2 | "arch": null, 3 | "build": "py_0", 4 | "build_number": 0, 5 | "depends": [ 6 | "opencv", 7 | "python" 8 | ], 9 | "license": "MIT", 10 | "license_family": "MIT", 11 | "name": "test-deps", 12 | "noarch": "python", 13 | "platform": null, 14 | "subdir": "noarch", 15 | "timestamp": 1567583450623, 16 | "version": "0.0.1" 17 | }info/git0000644000175000017500000000000013533666332013311 0ustar trevisanitrevisaniinfo/paths.json0000644000175000017500000000004713533666332014630 0ustar trevisanitrevisani{ 18 | "paths": [], 19 | "paths_version": 1 20 | }info/recipe/0000775000175000017500000000000013540017274014057 5ustar trevisanitrevisaniinfo/recipe/conda_build_config.yaml0000644000175000017500000000076413533666332020547 0ustar trevisanitrevisaniCONDA_BUILD_SYSROOT: /opt/MacOSX10.10.sdk 21 | c_compiler: gcc 22 | cpu_optimization_target: nocona 23 | cran_mirror: https://cran.r-project.org 24 | cxx_compiler: gxx 25 | extend_keys: 26 | - pin_run_as_build 27 | - ignore_version 28 | - extend_keys 29 | - ignore_build_only_deps 30 | fortran_compiler: gfortran 31 | ignore_build_only_deps: 32 | - python 33 | - numpy 34 | lua: '5' 35 | numpy: '1.11' 36 | perl: 5.26.0 37 | pin_run_as_build: 38 | python: 39 | min_pin: x.x 40 | max_pin: x.x 41 | r-base: 42 | min_pin: x.x 43 | max_pin: x.x 44 | python: '3.7' 45 | r_base: '3.5' 46 | target_platform: linux-64 47 | info/recipe/meta.yaml.template0000644000175000017500000000051213533660050017475 0ustar trevisanitrevisani{% set name = "test-deps" %} 48 | {% set version = "0.0.1" %} 49 | 50 | package: 51 | name: "{{ name|lower }}" 52 | version: "{{ version }}" 53 | 54 | build: 55 | number: 0 56 | noarch: python 57 | 58 | requirements: 59 | host: 60 | - pip 61 | - python 62 | run: 63 | - python 64 | - opencv 65 | 66 | about: 67 | home: https://github.com/regro/conda-press 68 | license: MIT 69 | license_family: MIT 70 | 71 | 72 | 73 | info/recipe/meta.yaml0000644000175000017500000000403513533666332015700 0ustar trevisanitrevisani# This file created by conda-build 3.18.6 74 | # meta.yaml template originally from: 75 | # /home/trevisani/projetos/recipe-test, last modified Wed Sep 4 07:56:40 2019 76 | # ------------------------------------------------ 77 | 78 | package: 79 | name: test-deps 80 | version: 0.0.1 81 | build: 82 | noarch: python 83 | number: '0' 84 | string: py_0 85 | requirements: 86 | host: 87 | - _libgcc_mutex 0.1 main 88 | - astroid 1.6.5 py37_0 89 | - bzip2 1.0.8 h516909a_0 90 | - ca-certificates 2019.6.16 hecc5488_0 91 | - certifi 2019.6.16 py37_1 92 | - docutils 0.15.2 py37_0 93 | - dodgy 0.1.9 py_1 94 | - flake8 3.5.0 py37_1000 95 | - flake8-polyfill 1.0.2 py_0 96 | - frosted 1.4.1 py_1 97 | - isort 4.3.21 py37_0 98 | - lazy-object-proxy 1.4.2 py37h516909a_0 99 | - libffi 3.2.1 he1b5a44_1006 100 | - libgcc-ng 9.1.0 hdf63c60_0 101 | - libstdcxx-ng 9.1.0 hdf63c60_0 102 | - mccabe 0.6.1 py_1 103 | - ncurses 6.1 hf484d3e_1002 104 | - openssl 1.1.1c h516909a_0 105 | - pep8-naming 0.8.2 py_0 106 | - pies 2.6.7 py37_1000 107 | - pip 19.2.3 py37_0 108 | - prospector 0.12.10 py_1 109 | - pycodestyle 2.3.1 py_1 110 | - pydocstyle 4.0.1 py_0 111 | - pyflakes 1.6.0 py_1 112 | - pylint 1.9.2 py37_0 113 | - pylint-common 0.2.5 py_1 114 | - pylint-plugin-utils 0.5 py_0 115 | - pyroma 2.4 py_0 116 | - python 3.7.3 h33d41f4_1 117 | - pyyaml 5.1.2 py37h516909a_0 118 | - readline 8.0 hf8c457e_0 119 | - requirements-detector 0.6 py37_1000 120 | - setoptconf 0.2.0 py37_1001 121 | - setuptools 41.2.0 py37_0 122 | - six 1.12.0 py37_1000 123 | - snowballstemmer 1.9.0 py_0 124 | - sqlite 3.29.0 hcee41ef_1 125 | - tk 8.6.9 hed695b0_1002 126 | - vulture 1.0 py_0 127 | - wheel 0.33.6 py37_0 128 | - wrapt 1.11.2 py37h516909a_0 129 | - xz 5.2.4 h14c3975_1001 130 | - yaml 0.1.7 h14c3975_1001 131 | - zlib 1.2.11 h516909a_1005 132 | run: 133 | - opencv 134 | - python 135 | about: 136 | home: https://github.com/regro/conda-press 137 | license: MIT 138 | license_family: MIT 139 | extra: 140 | copy_test_source_files: true 141 | final: true 142 | info/about.json0000644000175000017500000001062613533666332014627 0ustar trevisanitrevisani{ 143 | "channels": [ 144 | "conda-forge", 145 | "defaults" 146 | ], 147 | "conda_build_version": "3.18.6", 148 | "conda_private": false, 149 | "conda_version": "4.7.11", 150 | "env_vars": { 151 | "CIO_TEST": "" 152 | }, 153 | "extra": { 154 | "copy_test_source_files": true, 155 | "final": true 156 | }, 157 | "home": "https://github.com/regro/conda-press", 158 | "identifiers": [], 159 | "keywords": [], 160 | "license": "MIT", 161 | "license_family": "MIT", 162 | "root_pkgs": [ 163 | "msrest 0.6.9 py_0", 164 | "xorg-libxau 1.0.9 h14c3975_0", 165 | "psutil 5.6.3 py37h516909a_0", 166 | "libblas 3.8.0 11_openblas", 167 | "wrapt 1.11.2 py37h516909a_0", 168 | "libstdcxx-ng 9.1.0 hdf63c60_0", 169 | "vsts-python-api 0.1.22 py_0", 170 | "zipp 0.5.2 py_0", 171 | "gitpython 2.1.13 py_0", 172 | "sqlite 3.28.0 h8b20d00_0", 173 | "requests 2.21.0 py37_0", 174 | "libcblas 3.8.0 11_openblas", 175 | "blinker 1.4 py_1", 176 | "idna 2.8 py37_1000", 177 | "libopenblas 0.3.6 h6e990d7_6", 178 | "libgcc-ng 9.1.0 hdf63c60_0", 179 | "qt 5.9.7 h52cfd70_2", 180 | "oauthlib 3.0.1 py_0", 181 | "pkginfo 1.5.0.1 py_0", 182 | "libedit 3.1.20170329 hf8c457e_1001", 183 | "pandas 0.24.2 py37he6710b0_0", 184 | "sip 4.19.8 py37hf484d3e_1000", 185 | "ncurses 6.1 hf484d3e_1002", 186 | "python-dateutil 2.8.0 py_0", 187 | "expat 2.2.5 he1b5a44_1003", 188 | "readline 7.0 hf8c457e_1001", 189 | "icu 58.2 hf484d3e_1000", 190 | "libpng 1.6.37 hed695b0_0", 191 | "libxml2 2.9.9 h13577e0_2", 192 | "pyjwt 1.7.1 py_0", 193 | "tqdm 4.32.2 py_0", 194 | "conda 4.7.11 py37_0", 195 | "ccache 3.3.2 0", 196 | "dbus 1.13.6 he372182_0", 197 | "libiconv 1.15 h516909a_1005", 198 | "ruamel_yaml 0.15.71 py37h14c3975_1000", 199 | "urllib3 1.24.3 py37_0", 200 | "cryptography 2.7 py37h72c5cf5_0", 201 | "liblief 0.9.0 hf8a498c_1", 202 | "glob2 0.7 py_0", 203 | "isodate 0.6.0 py_1", 204 | "liblapack 3.8.0 11_openblas", 205 | "libuuid 2.32.1 h14c3975_1000", 206 | "gstreamer 1.14.5 h36ae1b5_0", 207 | "apipkg 1.5 py_0", 208 | "six 1.12.0 py37_1000", 209 | "zlib 1.2.11 h516909a_1005", 210 | "libcurl 7.65.3 hda55be3_0", 211 | "pygithub 1.43.6 py37_0", 212 | "krb5 1.16.3 h05b26f9_1001", 213 | "libgfortran-ng 7.3.0 hdf63c60_0", 214 | "smmap2 2.0.5 py_0", 215 | "numpy 1.17.0 py37h95a1406_0", 216 | "pluggy 0.12.0 py_0", 217 | "ruamel.yaml 0.16.0 py37h516909a_0", 218 | "pycrypto 2.6.1 py37h14c3975_1002", 219 | "_libgcc_mutex 0.1 main", 220 | "chardet 3.0.4 py37_1003", 221 | "responses 0.10.5 py37_0", 222 | "tk 8.6.9 hed695b0_1002", 223 | "curl 7.65.3 hf8cf82a_0", 224 | "filelock 3.0.10 py_0", 225 | "conda-forge-pinning 2019.07.26 0", 226 | "pytz 2019.2 py_0", 227 | "freetype 2.10.0 he983fc9_0", 228 | "pyqt 5.9.2 py37h05f1152_2", 229 | "lz4-c 1.8.3 he1b5a44_1001", 230 | "libarchive 3.3.3 hb44662c_1005", 231 | "cython 0.29.13 py37he1b5a44_0", 232 | "bzip2 1.0.8 h516909a_0", 233 | "python 3.7.3 h0371630_0", 234 | "lzo 2.10 h14c3975_1000", 235 | "beautifulsoup4 4.8.0 py37_0", 236 | "pthread-stubs 0.4 h14c3975_1001", 237 | "jpeg 9c h14c3975_1001", 238 | "openssl 1.1.1c h516909a_0", 239 | "asn1crypto 0.24.0 py37_1003", 240 | "requests-oauthlib 1.2.0 py_0", 241 | "xz 5.2.4 h14c3975_1001", 242 | "conda-smithy 3.4.0 py_0", 243 | "xorg-libxdmcp 1.1.3 h516909a_0", 244 | "pytest-forked 1.0.2 py_0", 245 | "cffi 1.12.3 py37h8022711_0", 246 | "libxcb 1.13 h14c3975_1002", 247 | "gst-plugins-base 1.14.5 h0935bb2_0", 248 | "markupsafe 1.1.1 py37h14c3975_0", 249 | "ca-certificates 2019.6.16 hecc5488_0", 250 | "attrs 19.1.0 py_0", 251 | "pytest-xdist 1.29.0 py_0", 252 | "pcre 8.41 hf484d3e_1003", 253 | "pycparser 2.19 py37_1", 254 | "patchelf 0.10 he1b5a44_0", 255 | "libssh2 1.8.2 h22169c7_2", 256 | "pyyaml 5.1.2 py37h516909a_0", 257 | "pytest 5.0.1 py37_1", 258 | "gitdb2 2.0.5 py_0", 259 | "conda-build 3.18.6 py37_0", 260 | "fontconfig 2.13.1 he4413a7_1000", 261 | "importlib_metadata 0.18 py37_0", 262 | "soupsieve 1.9.2 py37_0", 263 | "git 2.22.0 pl526hce37bd2_0", 264 | "pyopenssl 19.0.0 py37_0", 265 | "certifi 2019.6.16 py37_1", 266 | "pysocks 1.7.0 py37_0", 267 | "deprecated 1.2.5 py_0", 268 | "zstd 1.4.0 h3b9ef0a_0", 269 | "pyparsing 2.4.2 py_0", 270 | "pip 19.2.1 py37_0", 271 | "wcwidth 0.1.7 py_1", 272 | "jinja2 2.10.1 py_0", 273 | "atomicwrites 1.3.0 py_0", 274 | "wheel 0.33.4 py37_0", 275 | "pycosat 0.6.3 py37h14c3975_1001", 276 | "glib 2.58.3 h6f030ca_1002", 277 | "py-lief 0.9.0 py37he1b5a44_1", 278 | "ruamel 1.0 py37_0", 279 | "python-libarchive-c 2.8 py37_1004", 280 | "setuptools 41.0.1 py37_0", 281 | "execnet 1.6.1 py_0", 282 | "py 1.8.0 py_0", 283 | "yaml 0.1.7 h14c3975_1001", 284 | "libffi 3.2.1 he1b5a44_1006", 285 | "gettext 0.19.8.1 hc5be6a0_1002", 286 | "more-itertools 7.2.0 py_0", 287 | "packaging 19.0 py_0", 288 | "perl 5.26.2 h516909a_1006", 289 | "conda-package-handling 1.3.11 py37_0" 290 | ], 291 | "tags": [] 292 | }info/hash_input.json0000644000175000017500000000000213533666332015642 0ustar trevisanitrevisani{}info/files0000644000175000017500000000000013533666332013630 0ustar trevisanitrevisaniinfo/link.json0000644000175000017500000000011313533666332014440 0ustar trevisanitrevisani{ 293 | "noarch": { 294 | "type": "python" 295 | }, 296 | "package_metadata_version": 1 297 | } -------------------------------------------------------------------------------- /conda_press/condatools.xsh: -------------------------------------------------------------------------------- 1 | """Some tools for converting conda packages to wheels""" 2 | import os 3 | import re 4 | import sys 5 | import json 6 | import shutil 7 | import tarfile 8 | import tempfile 9 | 10 | from lazyasd import lazyobject 11 | from xonsh.platform import ON_LINUX 12 | from xonsh.tools import print_color 13 | from xonsh.lib.os import rmtree, indir 14 | 15 | from ruamel.yaml import YAML 16 | import requests 17 | 18 | from conda.api import SubdirData, Solver 19 | 20 | from conda_press.config import CACHE_DIR, DEFAULT_CHANNELS, Config 21 | from conda_press.wheel import Wheel 22 | 23 | 24 | def wheel_safe_build(build, build_string=None): 25 | if build is None: 26 | pass 27 | elif build_string is None: 28 | pass 29 | elif not build.isdigit(): 30 | while build and not build.isdigit(): 31 | build = build[1:] 32 | if not build: 33 | build = None 34 | elif build_string.endswith('_' + build): 35 | build = build + '_' + build_string[:-(len(build) + 1)] 36 | else: 37 | build = build + '_' + build_string 38 | return build 39 | 40 | 41 | def index_json_exists(info=None): 42 | return info.index_json is not None 43 | 44 | 45 | def package_spec_from_index_json(info=None): 46 | idx = info.index_json 47 | build = wheel_safe_build(str(idx.get("build_number", "0")), idx.get("build", None)) 48 | return idx["name"], idx["version"], build 49 | 50 | 51 | def meta_yaml_exists(info=None): 52 | return info.meta_yaml is not None 53 | 54 | 55 | def package_spec_from_meta_yaml(info=None): 56 | meta_yaml = info.meta_yaml 57 | name = meta_yaml['package']['name'] 58 | version = meta_yaml['package']['version'] 59 | build = meta_yaml['build'].get('number', '0') 60 | build_string = meta_yaml['build'].get('string', None) 61 | build = wheel_safe_build(build, build_string) 62 | return name, version, build 63 | 64 | 65 | def valid_package_name(info=None): 66 | fname = os.path.basename(info.artifactdir) 67 | return fname.count('-') >= 3 68 | 69 | 70 | def package_spec_from_filename(info=None): 71 | fname = os.path.basename(info.artifactdir) 72 | extra, _, build = fname.rpartition('-') 73 | name, _, version = extra.rpartition('-') 74 | build = os.path.splitext(build)[0] 75 | if '_' in build: 76 | build_string, _, build = build.rpartition('_') 77 | build = wheel_safe_build(build, build_string) 78 | return name, version, build 79 | 80 | 81 | PACKAGE_SPEC_GETTERS = ( 82 | # (checker, getter) tuples in priority order 83 | (index_json_exists, package_spec_from_index_json), 84 | (package_spec_from_index_json, package_spec_from_meta_yaml), 85 | (valid_package_name, package_spec_from_filename), 86 | ) 87 | 88 | 89 | def _defer_symbolic_links(files): 90 | first = [] 91 | defer = [] 92 | for f in files: 93 | if os.path.islink(f): 94 | defer.append(f) 95 | else: 96 | first.append(f) 97 | return first + defer 98 | 99 | def _group_files(wheel, info): 100 | scripts = [] 101 | includes = [] 102 | files = [] 103 | bindir = "Scripts/" if info.subdir.startswith("win") else "bin/" 104 | for fname in info.files: 105 | if fname.startswith(bindir): 106 | scripts.append(fname) 107 | #elif fname.startswith('include/'): 108 | # pip places files into "include/site/pythonX.Y/package/" rather 109 | # than "includes/" This should be reserved for python packages that 110 | # expect this behavior, and we'll dump the other includes into 111 | # site-packages, like with lib, etc. 112 | # includes.append(fname) 113 | else: 114 | files.append(fname) 115 | wheel.scripts = _defer_symbolic_links(scripts) 116 | wheel.includes = _defer_symbolic_links(includes) 117 | wheel.files = _defer_symbolic_links(files) 118 | 119 | 120 | def root_ext(s): 121 | """gets the extention of the root directory""" 122 | # in info/files, the path separator is always "/" 123 | # even on windows 124 | return os.path.splitext(s.split("/")[0])[1] 125 | 126 | 127 | BAD_ROOT_EXTS = frozenset([".egg-info", ".dist-info"]) 128 | 129 | 130 | def _remap_noarch_python(wheel, info): 131 | new_files = [] 132 | for fsname, arcname in wheel.files: 133 | if arcname.startswith('site-packages/'): 134 | new_arcname = arcname[14:] 135 | if root_ext(new_arcname) in BAD_ROOT_EXTS: 136 | # skip other pip metadata 137 | continue 138 | else: 139 | new_arcname = arcname 140 | new_files.append((fsname, new_arcname)) 141 | wheel.files = new_files 142 | 143 | 144 | @lazyobject 145 | def re_site_packages_file_unix(): 146 | return re.compile(r'lib/python\d\.\d/site-packages/(.*)') 147 | 148 | 149 | @lazyobject 150 | def re_site_packages_file_win(): 151 | return re.compile(r'Lib/site-packages/(.*)') 152 | 153 | 154 | def is_shared_lib(fname): 155 | _, ext = os.path.splitext(fname) 156 | if sys.platform.startswith('linux'): 157 | rtn = (ext == '.so') 158 | elif sys.platform.startswith('darwin'): 159 | rtn = (ext == '.dylib') || (ext == '.so') # cpython extensions use .so because ...? 160 | elif sys.platform.startswith('win'): 161 | rtn = (ext == '.dll') 162 | else: 163 | rtn = False 164 | return rtn 165 | 166 | 167 | def is_elf(fname): 168 | """Whether or not a file is an ELF binary file.""" 169 | if not ON_LINUX: 170 | return False 171 | with ${...}.swap(RAISE_SUBPROC_ERROR=False): 172 | return bool(!(patchelf @(fname) e>o)) 173 | 174 | 175 | def _remap_site_packages(wheel, info): 176 | new_files = [] 177 | moved_so = [] 178 | re_site_packages_file = re_site_packages_file_win if info.subdir.startswith("win") else re_site_packages_file_unix 179 | for fsname, arcname in wheel.files: 180 | m = re_site_packages_file.match(arcname) 181 | if m is None: 182 | new_arcname = arcname 183 | moved = False 184 | else: 185 | new_arcname = m.group(1) 186 | if root_ext(new_arcname) in BAD_ROOT_EXTS: 187 | # skip other pip metadata 188 | continue 189 | moved = True 190 | elem = (fsname, new_arcname) 191 | new_files.append(elem) 192 | if moved and is_shared_lib(new_arcname): 193 | moved_so.append(elem) 194 | wheel.files = new_files 195 | wheel.moved_shared_libs = moved_so 196 | 197 | 198 | def major_minor(ver): 199 | entry, _, _ = ver.partition(',') 200 | major, _, extra = entry.partition('.') 201 | minor, _, extra = extra.partition('.') 202 | return major, minor 203 | 204 | 205 | @lazyobject 206 | def re_name_from_ref(): 207 | return re.compile("^([A-Za-z0-9_-]+).*?") 208 | 209 | 210 | def name_from_ref(ref): 211 | """Gets an artifact name from a ref spec string.""" 212 | return re_name_from_ref.match(ref).group(1).lower() 213 | 214 | 215 | 216 | PLATFORM_SUBDIRS_TO_TAGS = { 217 | "noarch": "any", 218 | "linux-32": "linux_i386", 219 | "linux-64": "linux_x86_64", 220 | "osx-64": "macosx_10_9_x86_64", 221 | "win-32": "win32", 222 | "win-64": "win_amd64", 223 | } 224 | 225 | 226 | def download_package_rec(pkg_record): 227 | """Downloads a package record, returning the local filename.""" 228 | os.makedirs(CACHE_DIR, exist_ok=True) 229 | local_fn = os.path.join(CACHE_DIR, pkg_record.fn) 230 | if os.path.isfile(local_fn): 231 | return local_fn 232 | print(f"Downloading {pkg_record.url}") 233 | resp = requests.get(pkg_record.url) 234 | with open(local_fn, 'wb') as f: 235 | f.write(resp.content) 236 | print("Download complete") 237 | return local_fn 238 | 239 | 240 | def download_artifact_ref(artifact_ref, channels=None, subdir=None): 241 | """Searches for an artifact on a variety of channels. If subdir is not 242 | given, only "noarch" is used. Noarch is searched after the given subdit. 243 | """ 244 | channels = DEFAULT_CHANNELS if channels is None else channels 245 | for channel in channels: 246 | # check subdir 247 | if subdir is not None: 248 | subdir_data = SubdirData(channel + "/" + subdir) 249 | pkg_records = subdir_data.query(artifact_ref) 250 | if pkg_records: 251 | noarch = False 252 | break 253 | # check noarch 254 | subdir_data = SubdirData(channel + "/noarch") 255 | pkg_records = subdir_data.query(artifact_ref) 256 | if pkg_records: 257 | noarch = True 258 | break 259 | else: 260 | raise RuntimeError(f"could not find {artifact_ref} on {channels} for {subdir}") 261 | 262 | # if a python package, get only the ones matching this versuon of python 263 | pytag = "py{vi.major}{vi.minor}".format(vi=sys.version_info) 264 | if noarch: 265 | pass 266 | else: 267 | filtered_records = [] 268 | for r in pkg_records: 269 | if 'py' in r.build: 270 | if pytag in r.build: 271 | filtered_records.append(r) 272 | else: 273 | filtered_records.append(r) 274 | pkg_records = filtered_records 275 | if pkg_records: 276 | print("package records:", pkg_records) 277 | pkg_record = pkg_records[-1] 278 | else: 279 | return None 280 | raise RuntimeError(f"could not find {artifact_ref} on {channels}") 281 | return download_package_rec(pkg_record) 282 | 283 | 284 | def download_artifact(artifact_ref_or_rec, channels=None, subdir=None): 285 | """Downloads an artifact from a ref spec or a PackageRecord.""" 286 | if isinstance(artifact_ref_or_rec, str): 287 | return download_artifact_ref(artifact_ref_or_rec, channels=channels, subdir=subdir) 288 | else: 289 | return download_package_rec(artifact_ref_or_rec) 290 | 291 | 292 | def all_deps(package_rec, names_recs, seen=None): 293 | """Computes the set of all dependency names for a package.""" 294 | package_deps = set(map(name_from_ref, package_rec.depends)) 295 | seen = set() if seen is None else seen 296 | if package_rec.name in seen: 297 | return package_deps 298 | seen.add(package_rec.name) 299 | for dep_name in list(package_deps): 300 | package_deps |= all_deps(names_recs[dep_name], names_recs, seen=seen) 301 | return package_deps 302 | 303 | 304 | def ref_name(name, ver_build=None): 305 | if not ver_build: 306 | rtn = name 307 | elif ver_build[0].isdigit(): 308 | rtn = name + "=" + ver_build.replace(" ", "=") 309 | else: 310 | rtn = name + ver_build.replace(" ", "=") 311 | return rtn 312 | 313 | 314 | def _find_file_in_artifact(relative_source, info=None, channels=None, deps_cache=None, 315 | strip_symbols=True): 316 | tgtfile = None 317 | for name, ver_build in info.run_requirements.items(): 318 | dep_ref = ref_name(name, ver_build=ver_build) 319 | if dep_ref in deps_cache: 320 | dep = deps_cache[dep_ref] 321 | else: 322 | depfile = download_artifact(dep_ref, channels=channels, subdir=info.subdir) 323 | if depfile is None: 324 | print(f"skipping {dep_ref}") 325 | continue 326 | dep = ArtifactInfo.from_tarball(depfile, replace_symlinks=False, config=Config(strip_symbols=strip_symbols)) 327 | deps_cache[dep_ref] = dep 328 | tgtdep = os.path.join(dep.artifactdir, relative_source) 329 | print(f"Searching {dep.artifactdir} for link target of {relative_source} -> {tgtdep}") 330 | if os.path.isfile(tgtdep) or os.path.islink(tgtdep): 331 | tgtfile = tgtdep 332 | else: 333 | tgtfile = find_link_target(tgtdep, info=dep, channels=channels, 334 | deps_cache=deps_cache, 335 | relative_source=relative_source, 336 | strip_symbols=strip_symbols) 337 | if tgtfile and os.path.islink(tgtfile): 338 | # recurse even farther down, if what we got is also a link 339 | tgtfile = find_link_target(tgtfile, info=dep, channels=channels, 340 | deps_cache=deps_cache, 341 | relative_source=relative_source, 342 | strip_symbols=strip_symbols) 343 | if tgtfile is not None: 344 | break 345 | else: 346 | tgtfile = None 347 | return tgtfile 348 | 349 | 350 | def find_link_target(source, info=None, channels=None, deps_cache=None, 351 | relative_source=None, strip_symbols=True): 352 | dc = {} if deps_cache is None else deps_cache 353 | if os.path.islink(source): 354 | target = os.readlink(source) 355 | start = os.path.dirname(source) 356 | tgtfile = os.path.join(start, target) 357 | else: 358 | # this dep doesn't have the target, so search recursively 359 | if relative_source is None: 360 | relative_source = os.path.relpath(source, info.artifactdir) 361 | tgtfile = _find_file_in_artifact(relative_source, info=info, channels=channels, deps_cache=dc, 362 | strip_symbols=strip_symbols) 363 | if tgtfile is None: 364 | print(f"{relative_source} is None") 365 | return None 366 | if not os.path.exists(tgtfile): 367 | # not in this artifact, need to do dependency search 368 | tgtrel = os.path.relpath(tgtfile, info.artifactdir) 369 | tgtfile = _find_file_in_artifact(tgtrel, info=info, channels=channels, 370 | deps_cache=dc, strip_symbols=strip_symbols) 371 | if deps_cache is None: 372 | # clean up, if we are the last call 373 | for key, dep in dc.items(): 374 | dep.clean() 375 | elif os.path.islink(tgtfile): 376 | # target is another symlink! need to go further 377 | rtn = find_link_target(tgtfile, info=info, channels=channels, deps_cache=dc, 378 | strip_symbols=strip_symbols) 379 | else: 380 | rtn = tgtfile 381 | return tgtfile 382 | 383 | 384 | class ArtifactInfo: 385 | """Representation of artifact info/ directory.""" 386 | 387 | def __init__(self, artifactdir, config=None): 388 | self._artifactdir = None 389 | self._python_tag = None 390 | self._abi_tag = None 391 | self._platform_tag = None 392 | self._run_requirements = None 393 | self._noarch = None 394 | self._entry_points = None 395 | self.index_json = None 396 | self.link_json = None 397 | self.recipe_json = None 398 | self.about_json = None 399 | self.meta_yaml = None 400 | self.files = None 401 | self.artifactdir = artifactdir 402 | self._config = config if config else Config() 403 | 404 | def clean(self): 405 | rmtree(self._artifactdir, force=True) 406 | 407 | @property 408 | def config(self) -> Config: 409 | return self._config 410 | 411 | @config.setter 412 | def config(self, config_obj: Config): 413 | self._config = config_obj 414 | 415 | @property 416 | def artifactdir(self): 417 | return self._artifactdir 418 | 419 | @artifactdir.setter 420 | def artifactdir(self, value): 421 | if self._artifactdir is not None: 422 | self.clean() 423 | self._artifactdir = value 424 | # load index.json 425 | idxfile = os.path.join(value, 'info', 'index.json') 426 | if os.path.isfile(idxfile): 427 | with open(idxfile, 'r') as f: 428 | self.index_json = json.load(f) 429 | else: 430 | self.index_json = None 431 | # load link.json 432 | lnkfile = os.path.join(value, 'info', 'link.json') 433 | if os.path.isfile(lnkfile): 434 | with open(lnkfile, 'r') as f: 435 | self.link_json = json.load(f) 436 | else: 437 | self.link_json = None 438 | # load recipe.json 439 | recfile = os.path.join(value, 'info', 'recipe.json') 440 | if os.path.isfile(recfile): 441 | with open(recfile, 'r') as f: 442 | self.recipe_json = json.load(f) 443 | else: 444 | self.recipe_json = None 445 | # load about.json 446 | abtfile = os.path.join(value, 'info', 'about.json') 447 | if os.path.isfile(abtfile): 448 | with open(abtfile, 'r') as f: 449 | self.about_json = json.load(f) 450 | else: 451 | self.about_json = None 452 | # load meta.yaml 453 | metafile = os.path.join(value, 'info', 'recipe', 'meta.yaml.rendered') 454 | if not os.path.exists(metafile): 455 | metafile = os.path.join(value, 'info', 'recipe', 'meta.yaml') 456 | if os.path.isfile(metafile): 457 | yaml = YAML(typ='safe') 458 | with open(metafile) as f: 459 | try: 460 | self.meta_yaml = yaml.load(f) 461 | except Exception: 462 | print("failed to load meta.yaml") 463 | self.meta_yaml = None 464 | else: 465 | self.meta_yaml = None 466 | # load file listing 467 | self._load_files() 468 | # clean up lazy values 469 | self._python_tag = None 470 | self._abi_tag = None 471 | self._platform_tag = None 472 | self._run_requirements = None 473 | self._noarch = None 474 | self._entry_points = None 475 | 476 | def _load_files(self): 477 | filesname = os.path.join(self._artifactdir, 'info', 'files') 478 | if os.path.isfile(filesname): 479 | with open(filesname, 'r') as f: 480 | raw = f.read().strip() 481 | self.files = raw.splitlines() 482 | else: 483 | with indir(self._artifactdir): 484 | self.files = set(g`**`) - set(g`info/**`) 485 | 486 | @property 487 | def run_requirements(self): 488 | if self._run_requirements is not None: 489 | return self._run_requirements 490 | if "depends" in self.index_json: 491 | reqs = self.index_json["depends"] 492 | else: 493 | reqs = self.meta_yaml.get('requirements', {}).get('run', ()) 494 | 495 | reqs = self.config.clean_deps(reqs) 496 | 497 | if self.config.only_pypi: 498 | reqs = get_only_deps_on_pypi(reqs) 499 | 500 | self._run_requirements = dict([x.partition(' ')[::2] for x in reqs]) 501 | return self._run_requirements 502 | 503 | @property 504 | def noarch(self): 505 | if self._noarch is not None: 506 | return self._noarch 507 | if self.index_json is not None: 508 | na = self.index_json.get('noarch', False) 509 | elif self.meta_yaml is not None: 510 | na = self.meta_yaml.get('build', {}).get('noarch', False) 511 | else: 512 | # couldn't find, assume noarch 513 | na = False 514 | self._noarch = na 515 | return self._noarch 516 | 517 | @property 518 | def python_tag(self): 519 | if self._python_tag is not None: 520 | return self._python_tag 521 | if 'python' in self.run_requirements: 522 | pyver = self.run_requirements['python'] 523 | if self.noarch == "python": 524 | if pyver.startswith('=='): 525 | pytag = 'py' + ''.join(major_minor(pyver[2:])) 526 | elif pyver[:1].isdigit(): 527 | pytag = 'py' + ''.join(major_minor(pyver)) 528 | elif pyver.startswith('>=') and ',<' in pyver: 529 | # pinned to a single python version 530 | pytag = 'py' + ''.join(major_minor(pyver[2:])) 531 | elif pyver.startswith('>='): 532 | pytag = 'py' + major_minor(pyver[2:])[0] 533 | else: 534 | # couldn't choose, pick no-arch 535 | pytag = 'py2.py3' 536 | elif pyver: 537 | if pyver.startswith('=='): 538 | pytag = 'cp' + ''.join(major_minor(pyver[2:])) 539 | elif pyver[:1].isdigit(): 540 | pytag = 'cp' + ''.join(major_minor(pyver)) 541 | elif pyver.startswith('>=') and ',<' in pyver: 542 | # pinned to a single python version 543 | pytag = 'cp' + ''.join(major_minor(pyver[2:])) 544 | elif pyver.startswith('>='): 545 | pytag = 'cp' + major_minor(pyver[2:])[0] 546 | else: 547 | # couldn't choose, pick no-arch 548 | pytag = 'py2.py3' 549 | else: 550 | # noarch python, effectively 551 | pytag = 'py2.py3' 552 | else: 553 | # no python dependence, so valid for all Pythons 554 | pytag = 'py2.py3' 555 | self._python_tag = pytag 556 | return self._python_tag 557 | 558 | @property 559 | def abi_tag(self): 560 | # explanation of ABI tag at https://www.python.org/dev/peps/pep-0425/#abi-tag 561 | if self._abi_tag is not None: 562 | return self._abi_tag 563 | if self.noarch: 564 | atag = "none" 565 | elif self.python_tag == 'py2.py3': 566 | # no arch or no Python dependnce 567 | atag = 'none' 568 | elif self.python_tag == "cp3": 569 | atag = "abi3" 570 | elif self.python_tag.startswith('cp'): 571 | # explanation of ABI suffix at https://www.python.org/dev/peps/pep-3149/ 572 | atag = self.python_tag + 'm' 573 | else: 574 | # could not determine, use no-arch setting 575 | atag = "none" 576 | self._abi_tag = atag 577 | return self._abi_tag 578 | 579 | @property 580 | def platform_tag(self): 581 | if self._platform_tag is not None: 582 | return self._platform_tag 583 | if self.noarch: 584 | ptag = 'any' 585 | else: 586 | platform_subdir = self.index_json["subdir"] 587 | ptag = PLATFORM_SUBDIRS_TO_TAGS[platform_subdir] 588 | self._platform_tag = ptag 589 | return self._platform_tag 590 | 591 | @property 592 | def entry_points(self): 593 | if self._entry_points is not None: 594 | return self._entry_points 595 | if self.link_json is None: 596 | ep = [] 597 | else: 598 | ep = self.link_json.get("noarch", {}).get("entry_points", []) 599 | self._entry_points = ep 600 | return self._entry_points 601 | 602 | @property 603 | def subdir(self): 604 | return self.index_json["subdir"] 605 | 606 | @classmethod 607 | def from_tarball(cls, path, config=None, replace_symlinks=True): 608 | if config is None: 609 | config = Config() 610 | base = os.path.basename(path) 611 | if base.endswith('.tar.bz2'): 612 | mode = 'r:bz2' 613 | canonical_name = base[:-8] 614 | elif base.endswith(".tar.gz"): 615 | mode = "r:gz" 616 | canonical_name = base[:-7] 617 | elif base.endswith('.tar'): 618 | mode = 'r:' 619 | canonical_name = base[:-4] 620 | else: 621 | mode = 'r' 622 | canonical_name = base 623 | tmpdir = tempfile.mkdtemp(prefix=canonical_name) 624 | with tarfile.TarFile.open(path, mode=mode) as tf: 625 | tf.extractall(path=tmpdir) 626 | info = cls(tmpdir, config) 627 | if config.skip_python and "python" in info.run_requirements: 628 | return info 629 | if config.strip_symbols: 630 | info.strip_symbols() 631 | if replace_symlinks: 632 | info.replace_symlinks(strip_symbols=config.strip_symbols) 633 | return info 634 | 635 | def strip_symbols(self): 636 | """Strips symbols out of binary files""" 637 | if not ON_LINUX: 638 | print_color("{RED}Skipping symbol stripping, not on linux!{NO_COLOR}") 639 | for f in self.files: 640 | absname = os.path.join(self.artifactdir, f) 641 | if not is_elf(absname): 642 | continue 643 | print_color("striping symbols from {CYAN}" + absname + "{NO_COLOR}") 644 | with ${...}.swap(RAISE_SUBPROC_ERROR=True): 645 | ![strip --strip-all --preserve-dates --enable-deterministic-archives @(absname)] 646 | 647 | def replace_symlinks(self, strip_symbols=None): 648 | # this is needed because of https://github.com/pypa/pip/issues/5919 649 | # this has to walk the package deps in some cases. 650 | if strip_symbols is None: 651 | strip_symbols = self.config.strip_symbols 652 | for f in self.files: 653 | absname = os.path.join(self.artifactdir, f) 654 | if not os.path.islink(absname): 655 | # file is not a symlink, we can skip 656 | continue 657 | deps_cache = {} 658 | target = find_link_target(absname, info=self, deps_cache=deps_cache, strip_symbols=strip_symbols) 659 | if target is None: 660 | raise RuntimeError(f"Could not find link target of {absname}") 661 | print(f"Replacing {absname} with {target}") 662 | if os.path.isdir(absname): 663 | os.remove(absname) 664 | shutil.copytree(target, absname) 665 | else: 666 | try: 667 | shutil.copy2(target, absname, follow_symlinks=False) 668 | except shutil.SameFileError: 669 | os.remove(absname) 670 | shutil.copy2(target, absname, follow_symlinks=False) 671 | # clean up after the copy 672 | for key, dep in deps_cache.items(): 673 | dep.clean() 674 | 675 | 676 | def get_only_deps_on_pypi(list_deps): 677 | """Based on a set of dependencies this function will check if those 678 | dependencies are on PyPi, if it is not available it will be removed. 679 | 680 | Attributes 681 | ---------- 682 | list_deps: set of `str` 683 | List of dependencies as a string values 684 | 685 | Returns 686 | ------- 687 | set 688 | List of packages present on PyPi 689 | """ 690 | new_deps = set() 691 | for pkg in list_deps: 692 | pkg_name_url = pkg.lower().replace("-", "_") 693 | response = requests.get(f"https://pypi.python.org/pypi/{pkg_name_url}/json") 694 | if response.status_code == 200: 695 | new_deps.add(pkg) 696 | else: 697 | print(f"Package {pkg} was not found on PyPi.") 698 | return new_deps 699 | 700 | 701 | def artifact_to_wheel(path, config=None): 702 | """Converts an artifact to a wheel. The clean option will remove 703 | the temporary artifact directory before returning. 704 | """ 705 | # unzip the artifact 706 | if path is None: 707 | return 708 | if config is None: 709 | config = Config() 710 | if isinstance(path, ArtifactInfo): 711 | path.config = config 712 | info = path 713 | else: 714 | info = ArtifactInfo.from_tarball( 715 | path, config=config 716 | ) 717 | # get names from meta.yaml 718 | for checker, getter in PACKAGE_SPEC_GETTERS: 719 | if checker(info=info): 720 | name, version, build = getter(info=info) 721 | break 722 | else: 723 | raise RuntimeError(f'could not compute name, version, and build for {path!r}') 724 | # create wheel 725 | wheel = Wheel(name, version, build_tag=build, python_tag=info.python_tag, 726 | abi_tag=info.abi_tag, platform_tag=info.platform_tag) 727 | wheel.artifact_info = info 728 | wheel.basedir = info.artifactdir 729 | wheel.derived_from = "artifact" 730 | _group_files(wheel, info) 731 | if info.noarch == "python": 732 | wheel.noarch_python = True 733 | _remap_noarch_python(wheel, info) 734 | elif "python" in info.run_requirements: 735 | _remap_site_packages(wheel, info) 736 | if config.skip_python: 737 | info.run_requirements.pop('python') 738 | wheel.rewrite_python_shebang() 739 | wheel.rewrite_rpaths() 740 | wheel.rewrite_scripts_linking() 741 | wheel.entry_points = info.entry_points 742 | wheel.write( 743 | include_requirements=config.include_requirements, 744 | skip_python=config.skip_python 745 | ) 746 | return wheel 747 | 748 | 749 | def package_to_wheel(ref_or_rec, config=None, _top=True): 750 | """Converts a package ref spec or a PackageRecord into a wheel.""" 751 | if config is None: 752 | config = Config() 753 | path = download_artifact( 754 | ref_or_rec, channels=config.get_all_channels(), subdir=config.get_all_subdir() 755 | ) 756 | if path is None: 757 | # happens for cloudpickle>=0.2.1 758 | return None 759 | info = ArtifactInfo.from_tarball(path, config=config) 760 | if config.skip_python and not _top and "python" in info.run_requirements: 761 | return None 762 | wheel = artifact_to_wheel(info, config=config) 763 | wheel._top = _top 764 | return wheel 765 | 766 | 767 | def artifact_ref_dependency_tree_to_wheels(artifact_ref, config=None, seen=None): 768 | """Converts all artifact dependencies to wheels for a ref spec string""" 769 | if config is None: 770 | config = Config() 771 | seen = {} if seen is None else seen 772 | top_name = name_from_ref(artifact_ref) 773 | top_found = False 774 | 775 | solver = Solver("", config.get_all_channels(), subdirs=config.get_all_subdir(), specs_to_add=(artifact_ref,)) 776 | package_recs = solver.solve_final_state() 777 | 778 | if config.skip_python: 779 | names_recs = {pr.name: pr for pr in package_recs} 780 | top_package_rec = names_recs[top_name] 781 | python_deps = set() 782 | non_python_deps = set() 783 | direct_deps = set(map(name_from_ref, top_package_rec.depends)) 784 | for direct_name in direct_deps: 785 | direct_all_deps = all_deps(names_recs[direct_name], names_recs) 786 | if "python" in direct_all_deps: 787 | python_deps |= direct_all_deps 788 | python_deps.add(direct_name) 789 | else: 790 | non_python_deps |= direct_all_deps 791 | non_python_deps.add(direct_name) 792 | python_deps -= non_python_deps 793 | else: 794 | python_deps = set() 795 | 796 | is_top = False 797 | for package_rec in package_recs: 798 | if not top_found and package_rec.name == top_name: 799 | is_top = top_found = True 800 | else: 801 | is_top = False 802 | 803 | match_spec_str = str(package_rec.to_match_spec()) 804 | if match_spec_str in seen: 805 | print_color("Have already seen {YELLOW}" + match_spec_str + "{NO_COLOR}") 806 | continue 807 | 808 | if config.skip_python and not is_top and package_rec.name in python_deps: 809 | print_color("Skipping Python package dependency {YELLOW}" + match_spec_str + "{NO_COLOR}") 810 | seen[match_spec_str] = None 811 | continue 812 | 813 | print_color("Building {YELLOW}" + match_spec_str + "{NO_COLOR} as dependency of {GREEN}" + artifact_ref + "{NO_COLOR}") 814 | wheel = package_to_wheel( 815 | package_rec, 816 | _top=is_top, 817 | config=config 818 | ) 819 | seen[match_spec_str] = wheel 820 | 821 | return seen 822 | -------------------------------------------------------------------------------- /conda_press/wheel.xsh: -------------------------------------------------------------------------------- 1 | """Tools for representing wheels in-memory""" 2 | import os 3 | import re 4 | import sys 5 | import shutil 6 | import base64 7 | import tempfile 8 | import configparser 9 | from hashlib import sha256 10 | from zipfile import ZipFile, ZipInfo, ZIP_DEFLATED 11 | from collections import defaultdict 12 | from collections.abc import Sequence, MutableSequence 13 | 14 | from tqdm import tqdm 15 | from lazyasd import lazyobject 16 | from xonsh.lib.os import indir, rmtree 17 | 18 | from conda_press import __version__ as VERSION 19 | 20 | 21 | DYNAMIC_SP_UNIX_PROXY_SCRIPT = """#!/bin/bash 22 | current_dir="$( cd "$( dirname "${{BASH_SOURCE[0]}}" )" >/dev/null 2>&1 && pwd )" 23 | declare -a targets 24 | targets=$(echo "${{current_dir}}"/../lib/python*/site-packages/bin/{basename}) 25 | #!PROXY_ENVVARS!# 26 | exec "${{targets[0]}}" "$@" 27 | """ 28 | DYNAMIC_SP_PY_UNIX_PROXY_SCRIPT = """#!/bin/bash 29 | current_dir="$( cd "$( dirname "${{BASH_SOURCE[0]}}" )" >/dev/null 2>&1 && pwd )" 30 | declare -a targets 31 | targets=$(echo "${{current_dir}}"/../lib/python*/site-packages/bin/{basename}) 32 | #!PROXY_ENVVARS!# 33 | exec "${{current_dir}}/python" "${{targets[0]}}" "$@" 34 | """ 35 | KNOWN_SP_UNIX_PROXY_SCRIPT = """#!/bin/bash 36 | current_dir="$( cd "$( dirname "${{BASH_SOURCE[0]}}" )" >/dev/null 2>&1 && pwd )" 37 | #!PROXY_ENVVARS!# 38 | exec "${{current_dir}}/../lib/python{pymajor}.{pyminor}/site-packages/bin/{basename}" "$@" 39 | """ 40 | KNOWN_SP_PY_UNIX_PROXY_SCRIPT = """#!/bin/bash 41 | current_dir="$( cd "$( dirname "${{BASH_SOURCE[0]}}" )" >/dev/null 2>&1 && pwd )" 42 | #!PROXY_ENVVARS!# 43 | exec "${{current_dir}}/python{pymajor}.{pyminor}" "${{current_dir}}/../lib/python{pymajor}.{pyminor}/site-packages/bin/{basename}" "$@" 44 | """ 45 | WIN_PROXY_SCRIPT = """@echo off 46 | #!PROXY_ENVVARS!# 47 | call "%~dp0\\\\..\\\\Lib\\\\site-packages\\\\{path_to_exe}\\\\{basename}" %* 48 | exit /B %ERRORLEVEL% 49 | """ 50 | 51 | # Maps packages to environment variables-value dict to set in the proxy 52 | # scripts. These can use the following format names expansions (on Unix): 53 | # {current_dir} -> ${{current_dir}} 54 | # {sitepackages} -> ${{current_dir}}/../lib/python{pymajor}.{pyminor}/site-packages 55 | PROXY_ENVVARS = defaultdict(dict, { 56 | 'python': {"PYTHONPATH": "{sitepackages}"}, 57 | }) 58 | 59 | WIN_EXE_WEIGHTS = defaultdict(int, { 60 | ".com": 1, 61 | ".bat": 2, 62 | ".cmd": 3, 63 | ".exe": 4, 64 | }) 65 | 66 | 67 | @lazyobject 68 | def re_wheel_filename(): 69 | return re.compile(r'(?P[^-]+)[-](?P[^-]+)' 70 | r'([-](?P[^-]+))?[-](?P[^-]+)' 71 | r'[-](?P[^-]+)[-](?P[^-]+)\.whl') 72 | 73 | def distinfo_from_filename(filename): 74 | """returns a dict of wheel information from a filename""" 75 | basename = os.path.basename(filename) 76 | m = re_wheel_filename.match(basename) 77 | if m is None: 78 | raise ValueError( 79 | f"{filename} is malformed, needs to match " + 80 | "'{distribution}-{version}(-{build tag})?-{python tag}-" 81 | "{abi tag}-{platform tag}.whl'" 82 | ) 83 | return m.groupdict() 84 | 85 | 86 | @lazyobject 87 | def re_python_ver(): 88 | return re.compile(r'^[cp]y(\d)(\d)$') 89 | 90 | 91 | @lazyobject 92 | def re_dist_escape(): 93 | return re.compile(r"[^\w\d.]+", flags=re.UNICODE) 94 | 95 | 96 | def dist_escape(distribution): 97 | """Safely escapes a distribution string""" 98 | return re_dist_escape.sub("_", distribution) 99 | 100 | 101 | def urlsafe_b64encode_nopad(data): 102 | return base64.urlsafe_b64encode(data).rstrip(b'=') 103 | 104 | 105 | def urlsafe_b64decode_nopad(data): 106 | pad = b'=' * (4 - (len(data) & 3)) 107 | return base64.urlsafe_b64decode(data + pad) 108 | 109 | 110 | def record_hash(data): 111 | dig = sha256(data).digest() 112 | b64 = urlsafe_b64encode_nopad(dig) 113 | return 'sha256=' + b64.decode('utf8') 114 | 115 | 116 | def _normalize_path_mappings(value, basedir, arcbase='.'): 117 | # try to operate in place if we can. 118 | if isinstance(value, Sequence) and not isinstance(value, MutableSequence): 119 | value = list(value) 120 | elif isinstance(value, MutableSequence): 121 | pass 122 | else: 123 | raise TypeError(f'cannot convert pathlist, wrong type for {value!r}') 124 | # make sure base dir is a path 125 | if basedir is None: 126 | raise TypeError('basedir must be a str, cannot be None') 127 | # make alterations and return 128 | for i in range(len(value)): 129 | elem = value[i] 130 | if isinstance(elem, str): 131 | fsname = arcname = elem 132 | norm_arcname = True 133 | elif isinstance(elem, Sequence) and len(elem) == 2: 134 | fsname, arcname = elem 135 | norm_arcname = False 136 | else: 137 | raise TypeError(f'{elem!r} (value[{i}]) has the wrong type') 138 | # normalize fsname 139 | if os.path.isabs(fsname): 140 | fsname = os.path.relpath(fname, basedir) 141 | # normalize arcpath 142 | if norm_arcname: 143 | if arcbase == '.': 144 | arcname = fsname 145 | else: 146 | arcname = os.path.join(arcbase, os.path.basename(arcname)) 147 | # repack 148 | value[i] = (fsname, arcname) 149 | return value 150 | 151 | 152 | def normalize_version(version): 153 | """Normalizes a version string from conda to PEP-440 style 154 | """ 155 | parts = [] 156 | for ver in version.split(","): 157 | ver = ver.strip() 158 | if not ver: 159 | continue 160 | if ver[0].isdigit(): 161 | if ver[-1] == "*": 162 | ver = "==" + ver 163 | else: 164 | ver = "~=" + ver 165 | parts.append(ver) 166 | return ",".join(parts) 167 | 168 | 169 | def parse_entry_points(wheel_or_file): 170 | """Returns a list of entry points from a Wheel or an entry_points.txt filename""" 171 | if isinstance(wheel_or_file, Wheel): 172 | filename = os.path.join( 173 | wheel_or_file.basedir, 174 | f"{wheel_or_file.distribution}-{wheel_or_file.version}.dist-info", 175 | "entry_points.txt" 176 | ) 177 | else: 178 | filename = wheel_or_file 179 | if not os.path.isfile(filename): 180 | print(f"entry points file {filename!r} does not exist!") 181 | return [] 182 | config = configparser.ConfigParser() 183 | config.read(filename) 184 | return config.get("console_scripts", []) 185 | 186 | 187 | def parse_records(wheel_or_file): 188 | """Returns a list of record tuples from a Wheel or a RECORD filename""" 189 | if isinstance(wheel_or_file, Wheel): 190 | filename = os.path.join( 191 | wheel_or_file.basedir, 192 | f"{wheel_or_file.distribution}-{wheel_or_file.version}.dist-info", 193 | "RECORD" 194 | ) 195 | else: 196 | filename = wheel_or_file 197 | if not os.path.isfile(filename): 198 | print(f"RECORD file {filename!r} does not exist!") 199 | return [] 200 | with open(filename) as f: 201 | raw = f.read() 202 | records = [line.split(",") for line in raw.splitlines() if line] 203 | return records 204 | 205 | 206 | def parse_files(wheel_or_file): 207 | """Returns a list of files from a Wheel or a RECORD filename""" 208 | return [r[0] for r in parse_records(wheel_or_file)] 209 | 210 | 211 | class Wheel: 212 | """A wheel representation that knows how to write itself out.""" 213 | 214 | def __init__(self, distribution, version, build_tag=None, python_tag='py2.py3', 215 | abi_tag='none', platform_tag='any'): 216 | """ 217 | Parameters 218 | ---------- 219 | distribution : str 220 | The 'distribution name', or the package name, e.g. "numpy" 221 | version : str 222 | The version string for the package 223 | build_tag : str or int, optional 224 | The build number, must start with a digit, See PEP #427 225 | python_tag : str, optional 226 | The Python version tag, see PEP #425 227 | abi_tag : str, optional 228 | The Python ABI tag, see PEP #425 229 | platform_tag : str, optional 230 | The platform tag, see PEP #425 231 | 232 | Attributes 233 | ---------- 234 | noarch_python : bool 235 | Whether the package is a 'noarch: python' conda package. 236 | basedir : str or None, 237 | Location on filesystem where real files exist. 238 | derived_from : str 239 | This is a flag representing where this wheel came from. Valid values 240 | are: 241 | 242 | * "none": wheel object created from nothing 243 | * "artifact": wheel object created from conda artifact 244 | * "wheel": wheel object created from a wheel file. 245 | component_wheels : dict or None 246 | Mapping of component wheels when merging many wheels into one. 247 | This is only non-None valued during the actual merge operation. 248 | skipped_deps : set 249 | A set of dependency names we know that are excluded from the 250 | requirements. 251 | scripts : sequence of (filesystem-str, archive-str) tuples or None 252 | This maps filesystem paths to the scripts/filename.ext in the archive. 253 | If an entry is a filesystem path, it will be converted to the correct 254 | tuple. The filesystem path will be relative to the basedir. 255 | includes : sequence of (filesystem-str, archive-str) tuples or None 256 | This maps filesystem paths to the includes/filename.ext in the archive. 257 | If an entry is a filesystem path, it will be converted to the correct 258 | tuple. The filesystem path will be relative to the basedir. 259 | files : sequence of (filesystem-str, archive-str) tuples or None 260 | This maps filesystem paths to the path/to/filename.ext in the archive. 261 | If an entry is a filesystem path, it will be converted to the correct 262 | tuple. The filesystem path will be relative to the basedir. 263 | """ 264 | self.distribution = dist_escape(distribution) 265 | self.version = version 266 | self.build_tag = build_tag 267 | self.python_tag = python_tag 268 | self.abi_tag = abi_tag 269 | self.platform_tag = platform_tag 270 | self.noarch_python = False 271 | self.basedir = None 272 | self.derived_from = "none" 273 | self.artifact_info = None 274 | self.entry_points = [] 275 | self.moved_shared_libs = [] 276 | self.component_wheels = None 277 | self.skipped_deps = frozenset() 278 | self._records = [(f"{distribution}-{version}.dist-info/RECORD", "", "")] 279 | self._scripts = [] 280 | self._includes = [] 281 | self._files = [] 282 | 283 | def __repr__(self): 284 | return f'{self.__class__.__name__}({self.filename})' 285 | 286 | def clean(self): 287 | if self.artifact_info is not None: 288 | self.artifact_info.clean() 289 | 290 | @classmethod 291 | def from_file(cls, filename): 292 | """Creates a wheel object from an existing wheel.""" 293 | basename = os.path.basename(filename) 294 | distinfo = distinfo_from_filename(filename) 295 | whl = cls(**distinfo) 296 | whl.basedir = tempfile.mkdtemp(prefix=basename + "-") 297 | whl.derived_from = "wheel" 298 | with ZipFile(filename) as zf: 299 | zf.extractall(path=whl.basedir) 300 | whl.entry_points.extend(parse_entry_points(whl)) 301 | whl._files.extend([(os.path.join(whl.basedir, x), x) for x in parse_files(whl)]) 302 | return whl 303 | 304 | @property 305 | def filename(self): 306 | parts = [self.distribution, self.version] 307 | if self.build_tag is not None and not self.noarch_python: 308 | parts.append(self.build_tag) 309 | parts.extend([self.python_tag, self.abi_tag, self.platform_tag]) 310 | return '-'.join(parts) + '.whl' 311 | 312 | @property 313 | def compatibility_tag(self): 314 | return "-".join([self.python_tag, self.abi_tag, self.platform_tag]) 315 | 316 | @property 317 | def scripts(self): 318 | return self._scripts 319 | 320 | @scripts.setter 321 | def scripts(self, value): 322 | arcdir = f"{self.distribution}-{self.version}.data/scripts" 323 | self._scripts = _normalize_path_mappings(value, self.basedir, arcdir) 324 | 325 | @scripts.deleter 326 | def scripts(self): 327 | self._scripts = None 328 | 329 | @property 330 | def includes(self): 331 | return self._includes 332 | 333 | @includes.setter 334 | def includes(self, value): 335 | arcdir = f"{self.distribution}-{self.version}.data/headers" 336 | self._includes = _normalize_path_mappings(value, self.basedir, arcdir) 337 | 338 | @includes.deleter 339 | def includes(self): 340 | self._includes = None 341 | 342 | @property 343 | def files(self): 344 | return self._files 345 | 346 | @files.setter 347 | def files(self, value): 348 | self._files = _normalize_path_mappings(value, self.basedir) 349 | 350 | @files.deleter 351 | def files(self): 352 | self._files = None 353 | 354 | def write(self, include_requirements=True, skip_python=False): 355 | """Writes out the wheel file to disk. 356 | 357 | Parameters 358 | ---------- 359 | include_requirements : bool, optional 360 | Whether or not to include the requirements as part of the wheel metadata. 361 | Normally, this should be True. 362 | """ 363 | cl = {'compresslevel': 1} if sys.version_info[:2] >= (3, 7) else {} 364 | with ZipFile(self.filename, 'w', compression=ZIP_DEFLATED, **cl) as zf: 365 | self.zf = zf 366 | self.write_from_filesystem('scripts') 367 | self.write_from_filesystem('includes') 368 | self.write_from_filesystem('files') 369 | self.write_entry_points() 370 | self.write_top_level() 371 | self.write_metadata( 372 | include_requirements=include_requirements, 373 | skip_python=skip_python, 374 | ) 375 | self.write_license_file() 376 | self.write_wheel_metadata() 377 | self.write_record() # This *has* to be the last write 378 | del self.zf 379 | 380 | def _writestr_and_record(self, arcname, data, zinfo=None): 381 | if isinstance(data, str): 382 | data = data.encode('utf-8') 383 | if zinfo is None: 384 | self.zf.writestr(arcname, data, compress_type=ZIP_DEFLATED) 385 | else: 386 | self.zf.writestr(zinfo, data, compress_type=ZIP_DEFLATED) 387 | record = (arcname, record_hash(data), len(data)) 388 | self._records.append(record) 389 | 390 | def write_metadata(self, **kwargs): 391 | """Writes out metadata""" 392 | meth = getattr(self, "write_metadata_from_" + self.derived_from) 393 | return meth(**kwargs) 394 | 395 | def write_metadata_from_wheel(self, **kwargs): 396 | """Writes metadata from a wheel""" 397 | print('Filtering metadata for merged in wheels.') 398 | arcname = f"{self.distribution}-{self.version}.dist-info/METADATA" 399 | top_wheel = [w for w in self.component_wheels.values() 400 | if w is not None and getattr(w, "_top", False)][0] 401 | with open(os.path.join(top_wheel.basedir, arcname), 'r') as f: 402 | lines = f.readlines() 403 | requires_lines = [(i, line.split()[1]) for i, line in enumerate(lines) 404 | if line.startswith('Requires-Dist:')] 405 | merged_dists = {w.distribution for w in self.component_wheels.values() 406 | if w is not None} 407 | merged_dists |= self.skipped_deps 408 | merged_dists |= {d.replace("-", "_") for d in self.skipped_deps} 409 | for i, dist in reversed(requires_lines): 410 | if dist in merged_dists: 411 | print("Removing dependence on " + dist) 412 | del lines[i] 413 | content = "".join(lines) 414 | self._writestr_and_record(arcname, content) 415 | 416 | def write_metadata_from_artifact(self, include_requirements=True, skip_python=False, 417 | **kwargs): 418 | """Writes metadata from a conda artifact""" 419 | print('Writing metadata from artifact') 420 | lines = ["Metadata-Version: 2.1", "Name: " + self.distribution, 421 | "Version: " + self.version] 422 | info = self.artifact_info 423 | # add license 424 | license = info.index_json.get("license", None) 425 | if license: 426 | lines.append("License: " + license) 427 | # add requirements 428 | if include_requirements and info is not None: 429 | for name, ver_build in info.run_requirements.items(): 430 | name = dist_escape(name) 431 | if skip_python and name == "python": 432 | continue 433 | ver, _, build = ver_build.partition(" ") 434 | ver = normalize_version(ver) 435 | line = f"Requires-Dist: {name} {ver}" 436 | lines.append(line) 437 | # add about data 438 | if info is not None and info.about_json is not None: 439 | # add summary 440 | if "summary" in info.about_json: 441 | summary = info.about_json["summary"].replace("\n", " ").replace("\\n", " ") 442 | lines.append("Summary: " + summary) 443 | # add home page 444 | if "home" in info.about_json: 445 | lines.append("Home-page: " + info.about_json["home"]) 446 | # add other project URLs 447 | if "doc_url" in info.about_json: 448 | lines.append("Project-URL: Documentation, " + info.about_json["doc_url"]) 449 | if "dev_url" in info.about_json: 450 | lines.append("Project-URL: Development, " + info.about_json["dev_url"]) 451 | # add description 452 | if "description" in info.about_json: 453 | desc = info.about_json["description"].replace("\\n", "\n") 454 | lines.append("") # add a blank line 455 | lines.extend(desc.splitlines()) 456 | # write it out! 457 | content = "\n".join(lines) + "\n" 458 | arcname = f"{self.distribution}-{self.version}.dist-info/METADATA" 459 | self._writestr_and_record(arcname, content) 460 | 461 | def write_license_file(self, **kwargs): 462 | """Writes out license""" 463 | meth = getattr(self, "write_license_file_from_" + self.derived_from) 464 | return meth(**kwargs) 465 | 466 | def write_license_file_from_wheel(self, include_requirements=True): 467 | """Writes license from a wheel""" 468 | print('License (presumably) already in wheel, skipping.') 469 | 470 | def write_license_file_from_artifact(self): 471 | license_file = os.path.join(self.basedir, 'info', 'LICENSE.txt') 472 | if not os.path.isfile(license_file): 473 | return 474 | print("Writing license file") 475 | with open(license_file, 'rb') as f: 476 | content = f.read() 477 | arcname = f"{self.distribution}-{self.version}.dist-info/LICENSE" 478 | self._writestr_and_record(arcname, content) 479 | 480 | def write_wheel_metadata(self): 481 | print('Writing wheel metadata') 482 | lines = ["Wheel-Version: 1.0", "Generator: conda-press " + VERSION] 483 | lines.append("Root-Is-Purelib: " + str(self.noarch_python).lower()) 484 | lines.append("Tag: " + self.compatibility_tag) 485 | if self.build_tag is not None: 486 | lines.append("Build: " + self.build_tag) 487 | content = "\n".join(lines) + "\n" 488 | arcname = f"{self.distribution}-{self.version}.dist-info/WHEEL" 489 | self._writestr_and_record(arcname, content) 490 | 491 | def write_from_filesystem(self, name): 492 | print(f'Writing {name}') 493 | files = getattr(self, name) 494 | if not files: 495 | print('Nothing to write!') 496 | return 497 | for fsname, arcname in tqdm(files): 498 | if os.path.isabs(fsname): 499 | absname = fsname 500 | else: 501 | absname = os.path.join(self.basedir, fsname) 502 | if not os.path.isfile(absname): 503 | continue 504 | elif False and os.path.islink(absname): 505 | # symbolic link, see https://gist.github.com/kgn/610907 506 | # unfortunately, pip doesn't extract symbolic links 507 | # properly. If this is fixed ever, replace "False and" 508 | # above. Until then, we have to make a copy in the archive. 509 | data = os.readlink(absname).encode('utf-8') 510 | zinfo = ZipInfo.from_file(absname, arcname=arcname) 511 | zinfo.external_attr = 0xA1ED0000 512 | else: 513 | with open(absname, 'br') as f: 514 | data = f.read() 515 | zinfo = ZipInfo.from_file(absname, arcname=arcname) 516 | zinfo.compress_type = ZIP_DEFLATED 517 | self._writestr_and_record(arcname, data, zinfo=zinfo) 518 | 519 | def write_record(self): 520 | print('Writing record') 521 | lines = [f"{f},{h},{s}" for f, h, s in reversed(self._records)] 522 | content = "\n".join(lines) 523 | arcname = f"{self.distribution}-{self.version}.dist-info/RECORD" 524 | self.zf.writestr(arcname, content) 525 | 526 | def write_entry_points(self): 527 | if not self.entry_points: 528 | return 529 | print('Writing entry points') 530 | lines = ["[console_scripts]"] 531 | lines.extend(self.entry_points) 532 | content = "\n".join(lines) 533 | arcname = f"{self.distribution}-{self.version}.dist-info/entry_points.txt" 534 | self._writestr_and_record(arcname, content) 535 | 536 | def write_top_level(self): 537 | inits = [] 538 | for fsname, arcname in self.files: 539 | if arcname.endswith('__init__.py'): 540 | pkg, _, _ = arcname.rpartition('/') 541 | inits.append(pkg) 542 | if not inits: 543 | return 544 | inits.sort(key=len) 545 | top_level = inits[0] 546 | print(f"Writing {top_level} to top_level.txt") 547 | arcname = f"{self.distribution}-{self.version}.dist-info/top_level.txt" 548 | self._writestr_and_record(arcname, top_level + "\n") 549 | 550 | # 551 | # rewrite the actual files going in to the Wheel, as needed 552 | # 553 | 554 | def rewrite_python_shebang(self): 555 | for fsname, arcname in self.scripts: 556 | fspath = os.path.join(self.basedir, fsname) 557 | with open(fspath, 'rb') as f: 558 | first = f.readline() 559 | if not first.startswith(b'#!'): 560 | continue 561 | elif b'pythonw' in first: 562 | shebang = b'#!pythonw\n' 563 | elif b'python' in first: 564 | shebang = b'#!python\n' 565 | else: 566 | continue 567 | remainder = f.read() 568 | print(f"rewriting shebang for {fsname}") 569 | replacement = shebang + remainder 570 | with open(fspath, 'wb') as f: 571 | f.write(replacement) 572 | 573 | def rewrite_rpaths(self): 574 | """Rewrite shared library relative (run) paths, as needed""" 575 | for fsname, arcname in self.moved_shared_libs: 576 | print(f'rewriting RPATH for {fsname}') 577 | fspath = os.path.join(self.basedir, fsname) 578 | containing_dir = os.path.dirname(arcname) 579 | relpath_to_lib = os.path.relpath("lib/", containing_dir) 580 | if sys.platform.startswith("linux"): 581 | rpath_to_lib = "$ORIGIN/" + relpath_to_lib 582 | current_rpath = $(patchelf --print-rpath @(fspath)).strip() 583 | new_rpath = rpath_to_lib + ":" + current_rpath if current_rpath else new_rpath 584 | print(f' new RPATH is {new_rpath}') 585 | $(patchelf --set-rpath @(new_rpath) @(fspath)) 586 | elif sys.platform == 'darwin': 587 | rpath_to_lib = "@loader_path/" + relpath_to_lib 588 | $(install_name_tool -add_rpath @(rpath_to_lib) @(fspath)) 589 | else: 590 | raise RuntimeError(f'cannot rewrite RPATHs on {sys.platform}') 591 | 592 | def rewrite_scripts_linking(self): 593 | """Write wrapper scripts so that dynamic linkings in the 594 | site-packages/lib/ directory will be picked up. These are 595 | platform specific. 596 | """ 597 | subdir = self.artifact_info.subdir 598 | if subdir == "noarch": 599 | pass 600 | elif subdir.startswith("linux") or subdir.startswith("osx"): 601 | self.rewrite_scripts_linking_unix() 602 | elif subdir.startswith("win"): 603 | self.rewrite_scripts_linking_win() 604 | else: 605 | raise NotImplementedError("subdir not recognized") 606 | 607 | def write_unix_script_proxy(self, absname): 608 | root, ext = os.path.splitext(absname) 609 | proxyname = root + '-proxy' + ext 610 | basename = os.path.basename(absname) 611 | # choose the template to fill based on whether we have Python's major/minor 612 | # version numbers, or if we have to find the site-packages directory at 613 | # run time. 614 | with open(absname, 'rb') as f: 615 | shebang = f.readline(12).strip() 616 | is_py_script = (shebang == b"#!python") 617 | m = re_python_ver.match(self.artifact_info.python_tag) 618 | if m is None and is_py_script: 619 | pymajor = pyminor = None 620 | proxy_script = DYNAMIC_SP_PY_UNIX_PROXY_SCRIPT 621 | elif m is None and not is_py_script: 622 | pymajor = pyminor = None 623 | proxy_script = DYNAMIC_SP_UNIX_PROXY_SCRIPT 624 | elif m is not None and is_py_script: 625 | pymajor, pyminor = m.groups() 626 | proxy_script = KNOWN_SP_PY_UNIX_PROXY_SCRIPT 627 | env_prefix = "" 628 | else: 629 | pymajor, pyminor = m.groups() 630 | proxy_script = KNOWN_SP_UNIX_PROXY_SCRIPT 631 | env_prefix = "" 632 | sitepackages = "" 633 | # format environent variables 634 | proxy_envvars = PROXY_ENVVARS.get(self.distribution) 635 | if proxy_envvars: 636 | env = "" 637 | if m is None: 638 | env += 'sitepackages="${{targets[0]}}/../.."\n' 639 | sitepackages = "${{sitepackages}}" 640 | else: 641 | sitepackages = "${{current_dir}}/../lib/python{pymajor}.{pyminor}/site-packages" 642 | for name, val in proxy_envvars.items(): 643 | env += 'export ' + name + '="' + val.format(sitepackages=sitepackages, 644 | current_dir="${{current_dir}}") 645 | env += '"\n' 646 | else: 647 | env = "" 648 | proxy_script = proxy_script.replace("#!PROXY_ENVVARS!#\n", env) 649 | src = proxy_script.format(basename=basename, pymajor=pymajor, pyminor=pyminor) 650 | with open(proxyname, 'w') as f: 651 | f.write(src) 652 | os.chmod(proxyname, 0o755) 653 | return proxyname 654 | 655 | def rewrite_scripts_linking_unix(self): 656 | # relocate the binaries inside the archive, write the proxy scripts 657 | new_scripts = [] 658 | new_files = [] 659 | for fsname, arcname in self.scripts: 660 | absname = os.path.join(self.basedir, fsname) 661 | basename = os.path.basename(absname) 662 | proxyname = self.write_unix_script_proxy(absname) 663 | new_files.append((fsname, 'bin/' + basename)) 664 | new_scripts.append((proxyname, arcname)) 665 | self.files.extend(new_files) 666 | self.scripts.clear() 667 | self.scripts.extend(new_scripts) 668 | 669 | def write_win_script_proxy(self, proxyname, basename, path_to_exe="Scripts"): 670 | # Windows does not need to choose the template to fill based on whether we have 671 | # Python's major/minor version numbers. Instead, can just format environment. 672 | proxy_envvars = PROXY_ENVVARS.get(self.distribution) 673 | env = "" 674 | if proxy_envvars: 675 | for name, val in proxy_envvars.items(): 676 | val = val.replace("/", "\\\\") # swap unix paths for win paths seps 677 | env += 'set ' + name + '=' + val.format(sitepackages="%~dp0\\\\..\\\\Lib\\\\site-packages", 678 | current_dir="%~dp0\\\\..") 679 | env += '\n' 680 | proxy_script = WIN_PROXY_SCRIPT 681 | proxy_script = proxy_script.replace("#!PROXY_ENVVARS!#\n", env) 682 | src = proxy_script.format(basename=basename, path_to_exe=path_to_exe) 683 | with open(proxyname, 'w', newline="\r\n") as f: 684 | f.write(src) 685 | return proxyname 686 | 687 | def rewrite_scripts_linking_win(self): 688 | # relocate the binaries inside the archive, write the proxy scripts 689 | new_scripts_map = {} 690 | new_files = [] 691 | for fsname, arcname in self.scripts: 692 | absname = os.path.join(self.basedir, fsname) 693 | basename = os.path.basename(absname) 694 | root, ext = os.path.splitext(absname) 695 | proxyname = root + '-proxy.bat' 696 | new_files.append((fsname, 'Scripts/' + basename)) 697 | arcroot, _ = os.path.splitext(arcname) 698 | if proxyname not in new_scripts_map or WIN_EXE_WEIGHTS[ext] > WIN_EXE_WEIGHTS[new_scripts_map[proxyname][2]]: 699 | new_scripts_map[proxyname] = (arcroot + ".bat", basename, ext, "Scripts") 700 | # add proxies to executables in non-standard places 701 | arcdir = f"{self.distribution}-{self.version}.data/scripts" 702 | for fsname in self.artifact_info.files: 703 | if fsname.startswith("Scripts/"): 704 | # in standard location 705 | continue 706 | absname = os.path.join(self.basedir, fsname) 707 | basename = os.path.basename(absname) 708 | root, ext = os.path.splitext(absname) 709 | if ext not in WIN_EXE_WEIGHTS: 710 | # not an executable 711 | continue 712 | proxyname = root + '-proxy.bat' 713 | arcname = arcdir + "/" + os.path.basename(root) + ".bat" 714 | if proxyname not in new_scripts_map or WIN_EXE_WEIGHTS[ext] > WIN_EXE_WEIGHTS[new_scripts_map[proxyname][2]]: 715 | path_to_exe = os.path.dirname(fsname).replace("/", "\\\\") 716 | new_scripts_map[proxyname] = (arcname, basename, ext, path_to_exe) 717 | # write proxy files 718 | new_scripts = [] 719 | for proxyname, (arcname, basename, _, path_to_exe) in new_scripts_map.items(): 720 | proxyname = self.write_win_script_proxy(proxyname, basename, path_to_exe) 721 | new_scripts.append((proxyname, arcname)) 722 | # fix the script files themselves 723 | for fsname, _ in new_files: 724 | absname = os.path.join(self.basedir, fsname) 725 | root, ext = os.path.splitext(fsname) 726 | if ext == ".bat": 727 | print("munging path in " + fsname) 728 | with open(absname, 'r') as f: 729 | fsfile = f.read() 730 | fsfile = fsfile.replace(r'@SET "PYTHON_EXE=%~dp0\..\python.exe"', 731 | r'@SET "PYTHON_EXE=%~dp0\..\..\..\Scripts\python.exe"') 732 | with open(absname, 'w') as f: 733 | f.write(fsfile) 734 | # lock in the real values 735 | self.files.extend(new_files) 736 | self.scripts.clear() 737 | self.scripts.extend(new_scripts) 738 | 739 | 740 | def _merge_file_filter(files, distinfo): 741 | filtered = [] 742 | bad_arcnames = { 743 | f"{distinfo['distribution']}-{distinfo['version']}.dist-info/WHEEL", 744 | f"{distinfo['distribution']}-{distinfo['version']}.dist-info/METADATA", 745 | f"{distinfo['distribution']}-{distinfo['version']}.dist-info/top_level.txt", 746 | } 747 | bad_arcbases = {"WHEEL", "METADATA", "RECORD"} 748 | for f in files: 749 | fsname, arcname = f 750 | arcdir, arcbase = os.path.split(arcname) 751 | if arcname in bad_arcnames: 752 | continue 753 | elif arcdir.endswith(".dist-info") and arcbase in bad_arcbases: 754 | continue 755 | filtered.append(f) 756 | return filtered 757 | 758 | 759 | def merge(files, output=None, skipped_deps=None): 760 | """merges wheels together""" 761 | if output is None: 762 | distinfo = {"distribution": "package", "version": "1.0"} 763 | else: 764 | distinfo = distinfo_from_filename(output) 765 | whl = Wheel(**distinfo) 766 | whl.derived_from = "wheel" 767 | whl.component_wheels = files 768 | whl.skipped_deps = skipped_deps or set() 769 | for ref, w in files.items(): 770 | if w is None: 771 | continue 772 | whl.entry_points += w.entry_points 773 | whl._scripts += w._scripts 774 | whl._includes += w._includes 775 | whl._files += _merge_file_filter(w._files, distinfo) 776 | whl._files.sort() 777 | outdir = '.' if output is None else os.path.dirname(output) 778 | with indir(outdir or '.'): 779 | whl.write() 780 | whl.component_wheels = None 781 | return whl 782 | 783 | 784 | def fatten_from_seen(seen, output=None, skipped_deps=None): 785 | """Merges wheels from a dict of seen wheels. 786 | Returns a dict mapping the name of the created file to the Wheel. 787 | """ 788 | wheels = {} 789 | skipped_deps = skipped_deps or set() 790 | os.makedirs('tmp-wheels', exist_ok=True) 791 | for k, w in seen.items(): 792 | if w is None: 793 | continue 794 | fname = w.filename 795 | istop = getattr(w, '_top', False) 796 | if output is None and istop: 797 | output = fname 798 | reloc = os.path.join('tmp-wheels', fname) 799 | shutil.move(fname, reloc) 800 | wheels[reloc] = Wheel.from_file(reloc) 801 | wheels[reloc]._top = istop 802 | whl = merge(wheels, output=output, skipped_deps=skipped_deps) 803 | rmtree('tmp-wheels') 804 | print("Created fat wheel: " + output) 805 | return {output: whl} 806 | --------------------------------------------------------------------------------