├── example_packages ├── npm │ ├── bin │ │ └── script │ ├── lib │ │ └── index.js │ ├── conda.yaml │ └── package.json ├── setuptools │ ├── package1 │ │ ├── __init__.py │ │ └── scripts.py │ ├── conda.yaml │ └── setup.py └── noarch_python │ ├── build.sh │ ├── run_test.py │ └── meta.yaml ├── tests ├── test-recipes │ ├── fail │ │ ├── symlinks │ │ │ ├── file1 │ │ │ ├── meta.yaml │ │ │ └── build.sh │ │ ├── conda-meta │ │ │ ├── build.sh │ │ │ └── meta.yaml │ │ ├── recursive-build2 │ │ │ └── meta.yaml │ │ └── recursive-build │ │ │ └── meta.yaml │ ├── metadata │ │ ├── TODO.md │ │ ├── post_build_versioning │ │ │ ├── build.sh │ │ │ ├── meta.yaml │ │ │ └── run_test.sh │ │ ├── binary_has_prefix_files │ │ │ ├── bld.bat │ │ │ ├── build.sh │ │ │ ├── meta.yaml │ │ │ ├── write_binary_has_prefix.py │ │ │ └── run_test.py │ │ ├── detect_binary_files_with_prefix │ │ │ ├── build.sh │ │ │ ├── run_test.sh │ │ │ ├── meta.yaml │ │ │ └── write_binary_has_prefix.py │ │ ├── build_skip │ │ │ ├── run_test.py │ │ │ └── meta.yaml │ │ ├── entry_points │ │ │ ├── build.sh │ │ │ ├── bld.bat │ │ │ ├── run_test.sh │ │ │ ├── meta.yaml │ │ │ └── run_test.bat │ │ ├── osx_is_app │ │ │ ├── build.sh │ │ │ ├── run_test.sh │ │ │ └── meta.yaml │ │ ├── source_path │ │ │ ├── build.sh │ │ │ ├── bld.bat │ │ │ └── meta.yaml │ │ ├── build_number │ │ │ ├── meta.yaml │ │ │ ├── run_test.sh │ │ │ └── run_test.bat │ │ ├── build_string │ │ │ ├── meta.yaml │ │ │ ├── run_test.sh │ │ │ └── run_test.bat │ │ ├── python_run │ │ │ ├── meta.yaml │ │ │ ├── run_test.sh │ │ │ ├── run_test.bat │ │ │ └── run_test.py │ │ ├── python_build │ │ │ ├── meta.yaml │ │ │ ├── run_test.sh │ │ │ ├── run_test.bat │ │ │ └── run_test.py │ │ ├── source_url │ │ │ ├── bld.bat │ │ │ ├── build.sh │ │ │ └── meta.yaml │ │ ├── numpy_run │ │ │ ├── meta.yaml │ │ │ ├── run_test.bat │ │ │ ├── run_test.sh │ │ │ └── run_test.py │ │ ├── numpy_build │ │ │ ├── meta.yaml │ │ │ ├── run_test.sh │ │ │ ├── run_test.bat │ │ │ └── run_test.py │ │ ├── extra_freeform_metadata │ │ │ ├── meta.yaml │ │ │ └── run_test.py │ │ ├── has_prefix_files │ │ │ ├── meta.yaml │ │ │ ├── build.sh │ │ │ ├── write_forward_slash_prefix.py │ │ │ ├── write_binary_has_prefix.py │ │ │ ├── bld.bat │ │ │ └── run_test.py │ │ ├── python_build_run │ │ │ ├── meta.yaml │ │ │ ├── run_test.sh │ │ │ ├── run_test.bat │ │ │ └── run_test.py │ │ ├── source_hg │ │ │ ├── build.sh │ │ │ ├── bld.bat │ │ │ └── meta.yaml │ │ ├── numpy_build_run │ │ │ ├── meta.yaml │ │ │ ├── run_test.sh │ │ │ ├── run_test.bat │ │ │ └── run_test.py │ │ ├── jinja2 │ │ │ └── meta.yaml │ │ ├── source_svn │ │ │ ├── bld.bat │ │ │ ├── meta.yaml │ │ │ └── build.sh │ │ ├── source_git │ │ │ ├── meta.yaml │ │ │ ├── build.sh │ │ │ └── bld.bat │ │ └── always_include_files_glob │ │ │ ├── meta.yaml │ │ │ └── run_test.py │ ├── TODO.md │ ├── test-package │ │ ├── conda_build_test │ │ │ ├── __init__.py │ │ │ └── manual_entry.py │ │ ├── bin │ │ │ └── test-script-setup.py │ │ └── setup.py │ └── build_recipes.sh ├── __init__.py ├── test-skeleton │ ├── sympy-0.7.5 │ │ ├── bld.bat │ │ ├── build.sh │ │ └── meta.yaml │ ├── sympy-0.7.5-url │ │ ├── bld.bat │ │ ├── build.sh │ │ └── meta.yaml │ └── test-skeleton.sh ├── test_misc.py ├── test_metadata.py ├── test_utils.py ├── test_main_develop.py └── install_miniconda.py ├── .gitattributes ├── setup.cfg ├── .gitignore ├── conda_build ├── cli-32.exe ├── cli-64.exe ├── __main__.py ├── header_test.py ├── __init__.py ├── elf.py ├── templates │ ├── npm.yaml │ └── setuptools.yaml ├── external.py ├── exceptions.py ├── jinja_context.py ├── main_index.py ├── scripts.py ├── tarcheck.py ├── ldd.py ├── _link.py ├── convert_gohlke.py ├── main_sign.py ├── config.py ├── macho.py ├── create_test.py ├── main_metapackage.py ├── noarch_python.py ├── index.py ├── windows.py ├── environ.py ├── utils.py ├── main_convert.py ├── main_develop.py ├── main_skeleton.py ├── _version.py ├── convert.py └── source.py ├── bin ├── conda-build ├── conda-index ├── conda-sign ├── conda-convert ├── conda-develop ├── conda-inspect ├── conda-pipbuild ├── conda-skeleton └── conda-metapackage ├── conda_build.recipe ├── run_test.py ├── build.sh ├── bld.bat └── meta.yaml ├── .travis.yml ├── setup.py ├── LICENSE.txt ├── README.rst └── .binstar.yml /example_packages/npm/bin/script: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/test-recipes/fail/symlinks/file1: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /example_packages/npm/lib/index.js: -------------------------------------------------------------------------------- 1 | // Hello! 2 | -------------------------------------------------------------------------------- /example_packages/setuptools/package1/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | conda_build/_version.py export-subst 2 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [pytest] 2 | norecursedirs= tests/test-recipes -------------------------------------------------------------------------------- /example_packages/setuptools/conda.yaml: -------------------------------------------------------------------------------- 1 | {% extends "setuptools.yaml" %} 2 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/TODO.md: -------------------------------------------------------------------------------- 1 | - tests 2 | - about 3 | - app 4 | - selectors 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *~ 2 | *.pyc 3 | *.egg-info 4 | build/ 5 | dist/ 6 | docs/build 7 | tags 8 | .idea/ 9 | -------------------------------------------------------------------------------- /conda_build/cli-32.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mwcraig/conda-build/master/conda_build/cli-32.exe -------------------------------------------------------------------------------- /conda_build/cli-64.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mwcraig/conda-build/master/conda_build/cli-64.exe -------------------------------------------------------------------------------- /tests/test-recipes/metadata/post_build_versioning/build.sh: -------------------------------------------------------------------------------- 1 | echo "12.345.67" > __conda_version__.txt 2 | -------------------------------------------------------------------------------- /conda_build/__main__.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from conda_build.main_build import main 3 | 4 | sys.exit(main()) 5 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | # This is just here so that tests is a package, so that dotted relative 2 | # imports work. 3 | -------------------------------------------------------------------------------- /tests/test-recipes/TODO.md: -------------------------------------------------------------------------------- 1 | - build environment variables 2 | - pre/post link/unlink 3 | - post-build version 4 | -------------------------------------------------------------------------------- /tests/test-recipes/fail/conda-meta/build.sh: -------------------------------------------------------------------------------- 1 | mkdir -p $PREFIX/conda-meta 2 | touch $PREFIX/conda-meta/nope 3 | -------------------------------------------------------------------------------- /tests/test-recipes/fail/recursive-build2/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: recursive-build2 3 | version: 1.0 4 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/binary_has_prefix_files/bld.bat: -------------------------------------------------------------------------------- 1 | python "%RECIPE_DIR%\write_binary_has_prefix.py" 2 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/binary_has_prefix_files/build.sh: -------------------------------------------------------------------------------- 1 | python $RECIPE_DIR/write_binary_has_prefix.py 2 | -------------------------------------------------------------------------------- /tests/test-recipes/fail/symlinks/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: conda-build-test-symlinks-fail 3 | version: 1.0 4 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/detect_binary_files_with_prefix/build.sh: -------------------------------------------------------------------------------- 1 | python $RECIPE_DIR/write_binary_has_prefix.py 2 | -------------------------------------------------------------------------------- /tests/test-recipes/fail/conda-meta/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: conda-build-test-conda-meta-fail 3 | version: 1.0 4 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/build_skip/run_test.py: -------------------------------------------------------------------------------- 1 | raise ValueError("This shouldn't have built. We skipped it! :(") 2 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/entry_points/build.sh: -------------------------------------------------------------------------------- 1 | cd tests/test-recipes/test-package 2 | 3 | python setup.py install 4 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/osx_is_app/build.sh: -------------------------------------------------------------------------------- 1 | cd tests/test-recipes/test-package 2 | 3 | python setup.py install 4 | -------------------------------------------------------------------------------- /bin/conda-build: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import sys 3 | from conda_build.main_build import main 4 | 5 | sys.exit(main()) 6 | -------------------------------------------------------------------------------- /bin/conda-index: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import sys 3 | from conda_build.main_index import main 4 | 5 | sys.exit(main()) 6 | -------------------------------------------------------------------------------- /bin/conda-sign: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import sys 3 | from conda_build.main_sign import main 4 | 5 | sys.exit(main()) 6 | -------------------------------------------------------------------------------- /conda_build.recipe/run_test.py: -------------------------------------------------------------------------------- 1 | import conda_build 2 | 3 | print('conda_build.__version__: %s' % conda_build.__version__) 4 | -------------------------------------------------------------------------------- /bin/conda-convert: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import sys 3 | from conda_build.main_convert import main 4 | 5 | sys.exit(main()) 6 | -------------------------------------------------------------------------------- /bin/conda-develop: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import sys 3 | from conda_build.main_develop import main 4 | 5 | sys.exit(main()) 6 | -------------------------------------------------------------------------------- /bin/conda-inspect: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import sys 3 | from conda_build.main_inspect import main 4 | 5 | sys.exit(main()) 6 | -------------------------------------------------------------------------------- /bin/conda-pipbuild: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import sys 3 | from conda_build.main_pipbuild import main 4 | 5 | sys.exit(main()) 6 | -------------------------------------------------------------------------------- /bin/conda-skeleton: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import sys 3 | from conda_build.main_skeleton import main 4 | 5 | sys.exit(main()) 6 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/source_path/build.sh: -------------------------------------------------------------------------------- 1 | # If directory exists, we did it right 2 | cd tests/test-recipes/test-package 3 | -------------------------------------------------------------------------------- /bin/conda-metapackage: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import sys 3 | from conda_build.main_metapackage import main 4 | 5 | sys.exit(main()) 6 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/entry_points/bld.bat: -------------------------------------------------------------------------------- 1 | cd tests\test-recipes\test-package 2 | python setup.py install 3 | if errorlevel 1 exit 1 4 | -------------------------------------------------------------------------------- /conda_build.recipe/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | $PYTHON setup.py install 4 | 5 | cp bdist_conda.py ${PREFIX}/lib/python${PY_VER}/distutils/command 6 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/detect_binary_files_with_prefix/run_test.sh: -------------------------------------------------------------------------------- 1 | cd $PREFIX 2 | cat binary-has-prefix 3 | cat binary-has-prefix | grep $PREFIX 4 | -------------------------------------------------------------------------------- /tests/test-recipes/test-package/conda_build_test/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | conda build test package 3 | """ 4 | print("conda_build_test has been imported") 5 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/build_number/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: conda-build-test-build-number 3 | version: 1.0 4 | 5 | build: 6 | number: 1 7 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/build_string/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: conda-build-test-build-string 3 | version: 1.0 4 | 5 | build: 6 | string: abc 7 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/source_path/bld.bat: -------------------------------------------------------------------------------- 1 | rem If directory exists, we did it right 2 | cd tests/test-recipes/test-package 3 | if errorlevel 1 exit 1 4 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/source_path/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: conda-build-test-source-path 3 | version: 1.0 4 | 5 | source: 6 | path: ../../../../ 7 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/python_run/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: conda-build-test-python-run 3 | version: 1.0 4 | 5 | requirements: 6 | run: 7 | - python 8 | -------------------------------------------------------------------------------- /example_packages/noarch_python/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | $PYTHON setup.py install 4 | 5 | EXAMPLES=$PREFIX/Examples 6 | mkdir $EXAMPLES 7 | mv examples $EXAMPLES/bokeh 8 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/python_build/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: conda-build-test-python-build 3 | version: 1.0 4 | 5 | requirements: 6 | build: 7 | - python 8 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/source_url/bld.bat: -------------------------------------------------------------------------------- 1 | set PYTHONPATH=. 2 | python -c "import conda_build; assert conda_build.__version__ == 'tag: 1.8.1'" 3 | if errorlevel 1 exit 1 4 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/numpy_run/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: conda-build-test-numpy-run 3 | version: 1.0 4 | 5 | requirements: 6 | run: 7 | - python 8 | - numpy 9 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/numpy_build/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: conda-build-test-numpy-build 3 | version: 1.0 4 | 5 | requirements: 6 | build: 7 | - python 8 | - numpy 9 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/extra_freeform_metadata/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: conda-build-test-extra-metadata 3 | version: 0.1 4 | 5 | extra: 6 | custom: metadata 7 | however: {we: want} 8 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/has_prefix_files/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: conda-build-test-has-prefix-files 3 | version: 1.0 4 | 5 | build: 6 | has_prefix_files: 7 | - binary-has-prefix 8 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/source_url/build.sh: -------------------------------------------------------------------------------- 1 | # Not sure how versioneer comes up with this version 2 | PYTHONPATH=. python -c "import conda_build; assert conda_build.__version__ == 'tag: 1.8.1'" 3 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/build_skip/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: conda-build-skip 3 | version: 1.0 4 | 5 | build: 6 | skip: True 7 | 8 | requirements: 9 | run: 10 | - python 11 | -------------------------------------------------------------------------------- /example_packages/setuptools/package1/scripts.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Created on Jan 16, 2014 3 | 4 | @author: sean 5 | ''' 6 | from __future__ import print_function 7 | 8 | def main(): 9 | print('hello!') 10 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/entry_points/run_test.sh: -------------------------------------------------------------------------------- 1 | test-script-setup.py 2 | test-script-setup.py | grep "Test script setup\.py" 3 | 4 | test-script-manual 5 | test-script-manual | grep "Manual entry point" 6 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/has_prefix_files/build.sh: -------------------------------------------------------------------------------- 1 | echo $PREFIX > $PREFIX/automatic-prefix 2 | echo /opt/anaconda1anaconda2anaconda3 > $PREFIX/has-prefix 3 | python $RECIPE_DIR/write_binary_has_prefix.py 4 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/post_build_versioning/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: post-build-versioning 3 | 4 | source: 5 | fn: master.zip 6 | url: https://github.com/conda/conda-build/archive/master.zip 7 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/binary_has_prefix_files/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: conda-build-test-binary-has-prefix-files 3 | version: 1.0 4 | 5 | build: 6 | binary_has_prefix_files: 7 | - binary-has-prefix 8 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/detect_binary_files_with_prefix/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: conda-build-test-detect-binary-files-with-prefix 3 | version: 1.0 4 | 5 | build: 6 | detect_binary_files_with_prefix: true 7 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/python_build_run/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: conda-build-test-python-build-run 3 | version: 1.0 4 | 5 | requirements: 6 | build: 7 | - python 8 | run: 9 | - python 10 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/numpy_build/run_test.sh: -------------------------------------------------------------------------------- 1 | conda list -p $PREFIX --canonical 2 | # Test the build string. Should not contain Numpy 3 | conda list -p $PREFIX --canonical | grep "conda-build-test-numpy-build-1.0-0" 4 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/post_build_versioning/run_test.sh: -------------------------------------------------------------------------------- 1 | cat $PREFIX/conda-meta/post-build-versioning-12.345.67-0.json 2 | cat $PREFIX/conda-meta/post-build-versioning-12.345.67-0.json | grep '"version": "12.345.67"' 3 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/python_build/run_test.sh: -------------------------------------------------------------------------------- 1 | conda list -p $PREFIX --canonical 2 | # Test the build string. Should not contain Python 3 | conda list -p $PREFIX --canonical | grep "conda-build-test-python-build-1.0-0" 4 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/python_run/run_test.sh: -------------------------------------------------------------------------------- 1 | conda list -p $PREFIX --canonical 2 | # Test the build string. Should contain Python 3 | conda list -p $PREFIX --canonical | grep "conda-build-test-python-run-1\.0-py.._0" 4 | -------------------------------------------------------------------------------- /conda_build.recipe/bld.bat: -------------------------------------------------------------------------------- 1 | python setup.py install 2 | if errorlevel 1 exit 1 3 | 4 | del %SCRIPTS%\conda-init 5 | if errorlevel 1 exit 1 6 | 7 | copy bdist_conda.py %PREFIX%\Lib\distutils\command\ 8 | if errorlevel 1 exit 1 9 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/numpy_build/run_test.bat: -------------------------------------------------------------------------------- 1 | conda list -p "%PREFIX%" --canonical 2 | if errorlevel 1 exit 1 3 | conda list -p "%PREFIX%" --canonical | grep "conda-build-test-numpy-build-1.0-0" 4 | if errorlevel 1 exit 1 5 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/source_hg/build.sh: -------------------------------------------------------------------------------- 1 | # We test the environment variables in a different recipe 2 | 3 | # Ensure we are in a git repo 4 | [ -d .hg ] 5 | hg id 6 | [ "$(hg id)" = "6364a674cc15 test" ] 7 | [ -e test ] 8 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/numpy_run/run_test.bat: -------------------------------------------------------------------------------- 1 | conda list -p "%PREFIX%" --canonical 2 | if errorlevel 1 exit 1 3 | conda list -p "%PREFIX%" --canonical | grep "conda-build-test-numpy-run-1\.0-py.._0" 4 | if errorlevel 1 exit 1 5 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/python_build/run_test.bat: -------------------------------------------------------------------------------- 1 | conda list -p "%PREFIX%" --canonical 2 | if errorlevel 1 exit 1 3 | conda list -p "%PREFIX%" --canonical | grep "conda-build-test-python-build-1.0-0" 4 | if errorlevel 1 exit 1 5 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/python_build_run/run_test.sh: -------------------------------------------------------------------------------- 1 | conda list -p $PREFIX --canonical 2 | # Test the build string. Should contain Python 3 | conda list -p $PREFIX --canonical | grep "conda-build-test-python-build-run-1\.0-py.._0" 4 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/python_run/run_test.bat: -------------------------------------------------------------------------------- 1 | conda list -p "%PREFIX%" --canonical 2 | if errorlevel 1 exit 1 3 | conda list -p "%PREFIX%" --canonical | grep "conda-build-test-python-run-1\.0-py.._0" 4 | if errorlevel 1 exit 1 5 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/has_prefix_files/write_forward_slash_prefix.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | prefix = os.environ['PREFIX'] 4 | fn = '%s/forward-slash-prefix' % prefix 5 | 6 | with open(fn, 'w') as f: 7 | f.write(prefix.replace('\\', '/')) 8 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/numpy_run/run_test.sh: -------------------------------------------------------------------------------- 1 | conda list -p $PREFIX --canonical 2 | # Test the build string. Should contian NumPy, but not the version 3 | conda list -p $PREFIX --canonical | grep "conda-build-test-numpy-run-1\.0-py.._0" 4 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/python_build_run/run_test.bat: -------------------------------------------------------------------------------- 1 | conda list -p "%PREFIX%" --canonical 2 | if errorlevel 1 exit 1 3 | conda list -p "%PREFIX%" --canonical | grep "conda-build-test-python-build-run-1\.0-py.._0" 4 | if errorlevel 1 exit 1 5 | -------------------------------------------------------------------------------- /example_packages/noarch_python/run_test.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import bokeh 3 | 4 | if sys.platform != 'win32': 5 | bokeh.test(verbosity=2, exit=False) 6 | 7 | print('bokeh.__version__: %s' % bokeh.__version__) 8 | #assert bokeh.__version__ == '0.7.1' 9 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/numpy_build_run/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: conda-build-test-numpy-build-run 3 | version: 1.0 4 | 5 | requirements: 6 | build: 7 | - python 8 | - numpy 9 | run: 10 | - python 11 | - numpy 12 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/numpy_build_run/run_test.sh: -------------------------------------------------------------------------------- 1 | conda list -p $PREFIX --canonical 2 | # Test the build string. Should contain NumPy, but not the version 3 | conda list -p $PREFIX --canonical | grep "conda-build-test-numpy-build-run-1\.0-py.._0" 4 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/has_prefix_files/write_binary_has_prefix.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | prefix = os.environ['PREFIX'] 4 | fn = os.path.join(prefix, 'binary-has-prefix') 5 | 6 | with open(fn, 'wb') as f: 7 | f.write(prefix.encode('utf-8') + b'\x00') 8 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/numpy_build_run/run_test.bat: -------------------------------------------------------------------------------- 1 | @echo on 2 | conda list -p "%PREFIX%" --canonical 3 | if errorlevel 1 exit 1 4 | conda list -p "%PREFIX%" --canonical | grep "conda-build-test-numpy-build-run-1\.0-py.._0" 5 | if errorlevel 1 exit 1 6 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/binary_has_prefix_files/write_binary_has_prefix.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | prefix = os.environ['PREFIX'] 4 | fn = os.path.join(prefix, 'binary-has-prefix') 5 | 6 | with open(fn, 'wb') as f: 7 | f.write(prefix.encode('utf-8') + b'\x00') 8 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/detect_binary_files_with_prefix/write_binary_has_prefix.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | prefix = os.environ['PREFIX'] 4 | fn = '%s/binary-has-prefix' % prefix 5 | 6 | with open(fn, 'wb') as f: 7 | f.write(prefix.encode('utf-8') + b'\x00') 8 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/has_prefix_files/bld.bat: -------------------------------------------------------------------------------- 1 | echo %PREFIX% > "%PREFIX%\automatic-prefix" 2 | echo /opt/anaconda1anaconda2anaconda3 > "%PREFIX%\has-prefix" 3 | python "%RECIPE_DIR%\write_binary_has_prefix.py" 4 | python "%RECIPE_DIR%\write_forward_slash_prefix.py" 5 | -------------------------------------------------------------------------------- /example_packages/npm/conda.yaml: -------------------------------------------------------------------------------- 1 | {% extends "npm.yaml" %} 2 | 3 | {% block requirements %} 4 | 5 | requirements: 6 | build: 7 | - nodejs 8 | - coffee-script 9 | - cakehelper 10 | - grunt-cli 11 | 12 | run: 13 | - nodejs 14 | 15 | {% endblock %} 16 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/source_hg/bld.bat: -------------------------------------------------------------------------------- 1 | if not exist .hg exit 1 2 | hg id 3 | if errorlevel 1 exit 1 4 | for /f "delims=" %%i in ('hg id') do set hgid=%%i 5 | if errorlevel 1 exit 1 6 | echo "%hgid%" 7 | if not "%hgid%"=="6364a674cc15 test" exit 1 8 | if not exist test exit 1 9 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/jinja2/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: conda-build-test-jinja2-in-recipe 3 | version: 1.0 4 | 5 | build: 6 | number: 0 7 | 8 | requirements: 9 | build: 10 | - jinja2 11 | - python 12 | 13 | test: 14 | commands: 15 | - echo 16 | -------------------------------------------------------------------------------- /tests/test-recipes/test-package/bin/test-script-setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import conda_build_test 3 | conda_build_test 4 | 5 | print("Test script setup.py") 6 | 7 | if __name__ == "__main__": 8 | from conda_build_test import manual_entry 9 | manual_entry.main() 10 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/source_svn/bld.bat: -------------------------------------------------------------------------------- 1 | if not exist trunk exit 1 2 | cd trunk 3 | svn info 4 | if errorlevel 1 exit 1 5 | for /f "delims=" %%i in ('svn info ^| grep "Revision"') do set svnrev=%%i 6 | if errorlevel 1 exit 1 7 | echo %svnrev% 8 | if not "%svnrev%"=="Revision: 1157" exit 1 9 | -------------------------------------------------------------------------------- /tests/test-skeleton/sympy-0.7.5/bld.bat: -------------------------------------------------------------------------------- 1 | "%PYTHON%" setup.py install 2 | if errorlevel 1 exit 1 3 | 4 | :: Add more build steps here, if they are necessary. 5 | 6 | :: See 7 | :: http://docs.continuum.io/conda/build.html 8 | :: for a list of environment variables that are set during the build process. 9 | -------------------------------------------------------------------------------- /tests/test-skeleton/sympy-0.7.5/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | $PYTHON setup.py install 4 | 5 | # Add more build steps here, if they are necessary. 6 | 7 | # See 8 | # http://docs.continuum.io/conda/build.html 9 | # for a list of environment variables that are set during the build process. 10 | -------------------------------------------------------------------------------- /tests/test-skeleton/sympy-0.7.5-url/bld.bat: -------------------------------------------------------------------------------- 1 | "%PYTHON%" setup.py install 2 | if errorlevel 1 exit 1 3 | 4 | :: Add more build steps here, if they are necessary. 5 | 6 | :: See 7 | :: http://docs.continuum.io/conda/build.html 8 | :: for a list of environment variables that are set during the build process. 9 | -------------------------------------------------------------------------------- /tests/test-skeleton/sympy-0.7.5-url/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | $PYTHON setup.py install 4 | 5 | # Add more build steps here, if they are necessary. 6 | 7 | # See 8 | # http://docs.continuum.io/conda/build.html 9 | # for a list of environment variables that are set during the build process. 10 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/source_git/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: conda-build-test-source-git 3 | version: 1.0 4 | 5 | source: 6 | git_url: https://github.com/conda/conda-build 7 | git_tag: 1.8.1 8 | 9 | requirements: 10 | build: 11 | # To test the conda_build version 12 | - python 13 | -------------------------------------------------------------------------------- /tests/test-recipes/fail/recursive-build/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: conda-build-test-recursive-build-fail 3 | version: 1.0 4 | 5 | # This is a version of the package that doesn't exist. The test is that this 6 | # doesn't run infinitely. 7 | requirements: 8 | build: 9 | - recursive-build2 2.0 10 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/source_hg/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: conda-build-test-source-hg 3 | version: 1.0 4 | 5 | source: 6 | hg_url: https://bitbucket.org/asmeurer/conda-build-hg-test 7 | hg_tag: test 8 | 9 | requirements: 10 | build: 11 | # To test the conda_build version 12 | - python 13 | -------------------------------------------------------------------------------- /tests/test-recipes/test-package/conda_build_test/manual_entry.py: -------------------------------------------------------------------------------- 1 | def main(): 2 | import argparse 3 | 4 | # Just picks them up from `sys.argv`. 5 | parser = argparse.ArgumentParser( 6 | description="Basic parser." 7 | ) 8 | parser.parse_args() 9 | 10 | print("Manual entry point") 11 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/osx_is_app/run_test.sh: -------------------------------------------------------------------------------- 1 | test-script-setup.py 2 | test-script-setup.py | grep "Test script setup\.py" 3 | cat $PREFIX/bin/test-script-setup.py | grep "python\.app" 4 | 5 | test-script-manual 6 | test-script-manual | grep "Manual entry point" 7 | cat $PREFIX/bin/test-script-manual | grep "python\.app" 8 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/source_git/build.sh: -------------------------------------------------------------------------------- 1 | # We test the environment variables in a different recipe 2 | 3 | # Ensure we are in a git repo 4 | [ -d .git ] 5 | git describe 6 | [ "$(git describe)" = 1.8.1 ] 7 | PYTHONPATH=. python -c "import conda_build; assert conda_build.__version__ == '1.8.1', conda_build.__version__" 8 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/source_svn/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: conda-build-test-source-svn 3 | version: 1.0 4 | 5 | source: 6 | svn_url: https://github.com/conda/conda-build 7 | svn_rev: 1157 # Corresponding to tag 1.8.1 8 | 9 | requirements: 10 | build: 11 | # To test the conda_build version 12 | - python 13 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/entry_points/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: conda-build-test-entry-points 3 | version: 1.0 4 | 5 | source: 6 | git_url: ../../../../ 7 | 8 | build: 9 | entry_points: 10 | - test-script-manual = conda_build_test.manual_entry:main 11 | 12 | requirements: 13 | build: 14 | - python 15 | run: 16 | - python 17 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/always_include_files_glob/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: conda-build-test-always_include_files-glob 3 | version: 1.0 4 | 5 | build: 6 | number: 0 7 | always_include_files: 8 | - lib/libpng*.dylib [osx] 9 | - lib/libpng*.so* [linux] 10 | - Library\lib\libpng*.lib [win] 11 | 12 | requirements: 13 | build: 14 | - libpng 15 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/binary_has_prefix_files/run_test.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | def main(): 4 | prefix = os.environ['PREFIX'] 5 | fn = os.path.join(prefix, 'binary-has-prefix') 6 | 7 | with open(fn, 'rb') as f: 8 | data = f.read() 9 | 10 | print(data) 11 | assert prefix.encode('utf-8') in data 12 | 13 | if __name__ == '__main__': 14 | main() 15 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/build_number/run_test.sh: -------------------------------------------------------------------------------- 1 | conda list -p $PREFIX --canonical 2 | # This is actually the build string. We test the build number below 3 | [ "$(conda list -p $PREFIX --canonical)" = "conda-build-test-build-number-1.0-1" ] 4 | 5 | cat $PREFIX/conda-meta/conda-build-test-build-number-1.0-1.json 6 | cat $PREFIX/conda-meta/conda-build-test-build-number-1.0-1.json | grep '"build_number": 1' 7 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/osx_is_app/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: conda-build-test-osx-is-app 3 | version: 1.0 4 | 5 | source: 6 | git_url: ../../../../ 7 | 8 | build: 9 | entry_points: 10 | - test-script-manual = conda_build_test.manual_entry:main 11 | 12 | osx_is_app: true 13 | 14 | requirements: 15 | build: 16 | - python 17 | run: 18 | - python 19 | - python.app 20 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/source_svn/build.sh: -------------------------------------------------------------------------------- 1 | # We test the environment variables in a different recipe 2 | 3 | # Ensure we are in a git repo 4 | [ -d trunk ] 5 | cd trunk 6 | svn info 7 | [ "$(svn info | grep "Revision")" = "Revision: 1157" ] 8 | # PYTHONPATH=. python -c "import conda_build; assert conda_build.__version__ == '1.8.1'" 9 | # No way to test the version because it's computed by versioneer, which uses git 10 | -------------------------------------------------------------------------------- /example_packages/npm/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Node-Test-Package", 3 | "version": "1.1.1", 4 | "description": "Testing conda build of node package", 5 | "main": "lib/index.js", 6 | "author": "Continuum Analytics", 7 | "license": "MIT", 8 | "readmeFilename": "README.md", 9 | "bin": { 10 | "script": "./bin/script" 11 | }, 12 | "dependencies": { 13 | "optimist": "~0", 14 | "mkdirp": "~0" 15 | } 16 | 17 | } 18 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/numpy_build/run_test.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | 4 | 5 | def main(): 6 | prefix = os.environ['PREFIX'] 7 | info_file = os.path.join(prefix, 'conda-meta', 8 | 'conda-build-test-numpy-build-1.0-0.json') 9 | with open(info_file, 'r') as fh: 10 | info = json.load(fh) 11 | 12 | assert len(info['depends']) == 0 13 | 14 | if __name__ == '__main__': 15 | main() 16 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/python_build/run_test.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | 4 | 5 | def main(): 6 | prefix = os.environ['PREFIX'] 7 | info_file = os.path.join(prefix, 'conda-meta', 8 | 'conda-build-test-python-build-1.0-0.json') 9 | with open(info_file, 'r') as fh: 10 | info = json.load(fh) 11 | 12 | assert len(info['depends']) == 0 13 | 14 | if __name__ == '__main__': 15 | main() 16 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/entry_points/run_test.bat: -------------------------------------------------------------------------------- 1 | rem We have to use the absolute path because there is no "shebang line" in Windows 2 | python "%PREFIX%\Scripts\test-script-setup.py" 3 | if errorlevel 1 exit 1 4 | python "%PREFIX%\Scripts\test-script-setup.py" | grep "Test script setup\.py" 5 | if errorlevel 1 exit 1 6 | 7 | test-script-manual 8 | if errorlevel 1 exit 1 9 | test-script-manual | grep "Manual entry point" 10 | if errorlevel 1 exit 1 11 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/source_url/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: conda-build-test-source-url 3 | version: 1.0 4 | 5 | source: 6 | fn: conda-build-1.8.1.tar.gz 7 | url: https://github.com/conda/conda-build/archive/1.8.1.tar.gz 8 | md5: 0bf1f3598a659a0e8fb5ee6bbb3fd9fd 9 | sha1: c464a8995ad6bbf0480abd2883876cc9b4913fa7 10 | sha256: f82b0bd5c809c9a7c7256c26364a0065e57732788b7a74c7ea2169135ed2f598 11 | 12 | requirements: 13 | build: 14 | - python 15 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/build_string/run_test.sh: -------------------------------------------------------------------------------- 1 | conda list -p $PREFIX --canonical 2 | # This is actually the build string. We test the build number below 3 | [ "$(conda list -p $PREFIX --canonical)" = "conda-build-test-build-string-1.0-abc" ] 4 | 5 | cat $PREFIX/conda-meta/conda-build-test-build-string-1.0-abc.json 6 | cat $PREFIX/conda-meta/conda-build-test-build-string-1.0-abc.json | grep '"build_number": 0' 7 | cat $PREFIX/conda-meta/conda-build-test-build-string-1.0-abc.json | grep '"build": "abc"' 8 | -------------------------------------------------------------------------------- /example_packages/setuptools/setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | 3 | setup( 4 | name='Test Package', 5 | version="1.1.0", 6 | author='Continuum Analytics', 7 | author_email='sean.ross-ross@continuum.io', 8 | description='Testing the conda build', 9 | packages=find_packages(), 10 | install_requires=['Flask', 'werkzeug'], 11 | entry_points={ 12 | 'console_scripts' : [ 13 | 'script1 = package1.scripts:main' 14 | ] 15 | } 16 | ) 17 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/build_number/run_test.bat: -------------------------------------------------------------------------------- 1 | conda list -p "%PREFIX%" --canonical 2 | if errorlevel 1 exit 1 3 | for /f "delims=" %%i in ('conda list -p "%PREFIX%" --canonical') do set condalist=%%i 4 | if errorlevel 1 exit 1 5 | echo "%condalist%" 6 | if not "%condalist%"=="conda-build-test-build-number-1.0-1" exit 1 7 | cat "%PREFIX%\conda-meta\conda-build-test-build-number-1.0-1.json" 8 | if errorlevel 1 exit 1 9 | cat "%PREFIX%\conda-meta\conda-build-test-build-number-1.0-1.json" | grep '"build_number": 1' 10 | if errorlevel 1 exit 1 11 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/source_git/bld.bat: -------------------------------------------------------------------------------- 1 | if not exist .git exit 1 2 | git config core.fileMode false 3 | if errorlevel 1 exit 1 4 | git describe --tags --dirty 5 | if errorlevel 1 exit 1 6 | for /f "delims=" %%i in ('git describe') do set gitdesc=%%i 7 | if errorlevel 1 exit 1 8 | echo "%gitdesc%" 9 | if not "%gitdesc%"=="1.8.1" exit 1 10 | git status 11 | if errorlevel 1 exit 1 12 | git diff 13 | if errorlevel 1 exit 1 14 | set PYTHONPATH=. 15 | python -c "import conda_build; assert conda_build.__version__ == '1.8.1', conda_build.__version__" 16 | if errorlevel 1 exit 1 17 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/python_run/run_test.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | import glob 4 | 5 | def main(): 6 | prefix = os.environ['PREFIX'] 7 | info_files = glob.glob(os.path.join(prefix, 'conda-meta', 8 | 'conda-build-test-python-run-1.0-py*0.json')) 9 | assert len(info_files) == 1 10 | info_file = info_files[0] 11 | with open(info_file, 'r') as fh: 12 | info = json.load(fh) 13 | 14 | assert len(info['depends']) == 1 15 | assert info['depends'][0].startswith('python ') 16 | 17 | if __name__ == '__main__': 18 | main() 19 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/python_build_run/run_test.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | import glob 4 | 5 | def main(): 6 | prefix = os.environ['PREFIX'] 7 | info_files = glob.glob(os.path.join(prefix, 'conda-meta', 8 | 'conda-build-test-python-build-run-1.0-py*0.json')) 9 | assert len(info_files) == 1 10 | info_file = info_files[0] 11 | with open(info_file, 'r') as fh: 12 | info = json.load(fh) 13 | 14 | assert len(info['depends']) == 1 15 | assert info['depends'][0].startswith('python ') 16 | 17 | if __name__ == '__main__': 18 | main() 19 | -------------------------------------------------------------------------------- /conda_build/header_test.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, print_function 2 | 3 | import sys 4 | import subprocess 5 | from distutils.spawn import find_executable 6 | import shlex 7 | 8 | def call_args(string): 9 | args = shlex.split(string) 10 | arg0 = args[0] 11 | args[0] = find_executable(arg0) 12 | if not args[0]: 13 | sys.exit("Command not found: '%s'" % arg0) 14 | 15 | try: 16 | subprocess.check_call(args) 17 | except subprocess.CalledProcessError: 18 | sys.exit('Error: command failed: %s' % ' '.join(args)) 19 | 20 | # --- end header 21 | -------------------------------------------------------------------------------- /tests/test_misc.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | import conda_build._link as _link 4 | 5 | 6 | 7 | class TestLink(unittest.TestCase): 8 | 9 | def test_pyc_f_2(self): 10 | self.assertEqual(_link.pyc_f('sp/utils.py', (2, 7, 9)), 11 | 'sp/utils.pyc') 12 | 13 | def test_pyc_f_3(self): 14 | for f, r in [ 15 | ('sp/utils.py', 16 | 'sp/__pycache__/utils.cpython-34.pyc'), 17 | ('sp/foo/utils.py', 18 | 'sp/foo/__pycache__/utils.cpython-34.pyc'), 19 | ]: 20 | self.assertEqual(_link.pyc_f(f, (3, 4, 2)), r) 21 | -------------------------------------------------------------------------------- /conda_build/__init__.py: -------------------------------------------------------------------------------- 1 | # (c) Continuum Analytics, Inc. / http://continuum.io 2 | # All Rights Reserved 3 | # 4 | # conda is distributed under the terms of the BSD 3-clause license. 5 | # Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause. 6 | 7 | from ._version import get_versions 8 | __version__ = get_versions()['version'] 9 | del get_versions 10 | 11 | # Sub commands added by conda-build to the conda command 12 | sub_commands = [ 13 | 'build', 14 | 'convert', 15 | 'develop', 16 | 'index', 17 | 'inspect', 18 | 'metapackage', 19 | 'pipbuild', 20 | 'sign', 21 | 'skeleton', 22 | ] 23 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/build_string/run_test.bat: -------------------------------------------------------------------------------- 1 | conda list -p "%PREFIX%" --canonical 2 | if errorlevel 1 exit 1 3 | for /f "delims=" %%i in ('conda list -p "%PREFIX%" --canonical') do set condalist=%%i 4 | if errorlevel 1 exit 1 5 | echo "%condalist%" 6 | if not "%condalist%"=="conda-build-test-build-string-1.0-abc" exit 1 7 | cat "%PREFIX%\conda-meta\conda-build-test-build-string-1.0-abc.json" 8 | if errorlevel 1 exit 1 9 | cat "%PREFIX%\conda-meta\conda-build-test-build-string-1.0-abc.json" | grep '"build_number": 0' 10 | if errorlevel 1 exit 1 11 | cat "%PREFIX%\conda-meta\conda-build-test-build-string-1.0-abc.json" | grep '"build": "abc"' 12 | if errorlevel 1 exit 1 13 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/numpy_run/run_test.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | import glob 4 | 5 | def main(): 6 | prefix = os.environ['PREFIX'] 7 | info_files = glob.glob(os.path.join(prefix, 'conda-meta', 8 | 'conda-build-test-numpy-run-1.0-py*0.json')) 9 | assert len(info_files) == 1 10 | info_file = info_files[0] 11 | with open(info_file, 'r') as fh: 12 | info = json.load(fh) 13 | 14 | assert len(info['depends']) == 2 15 | depends = sorted(info['depends']) 16 | # With no version 17 | assert depends[0] == 'numpy' 18 | assert depends[1].startswith('python ') 19 | 20 | if __name__ == '__main__': 21 | main() 22 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/extra_freeform_metadata/run_test.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | 4 | 5 | def main(): 6 | prefix = os.environ['PREFIX'] 7 | info_file = os.path.join(prefix, 'conda-meta', 8 | 'conda-build-test-extra-metadata-0.1-0.json') 9 | with open(info_file, 'r') as fh: 10 | info = json.load(fh) 11 | 12 | source_file = os.path.join(info['link']['source'], 'info', 'recipe.json') 13 | with open(source_file, 'r') as fh: 14 | source = json.load(fh) 15 | 16 | assert source['extra'] == {"custom": "metadata", 17 | "however": {"we": "want"}} 18 | 19 | 20 | if __name__ == '__main__': 21 | main() 22 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/numpy_build_run/run_test.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | import glob 4 | 5 | def main(): 6 | prefix = os.environ['PREFIX'] 7 | 8 | info_files = glob.glob(os.path.join(prefix, 'conda-meta', 9 | 'conda-build-test-numpy-build-run-1.0-py*0.json')) 10 | assert len(info_files) == 1 11 | info_file = info_files[0] 12 | with open(info_file, 'r') as fh: 13 | info = json.load(fh) 14 | 15 | assert len(info['depends']) == 2 16 | depends = sorted(info['depends']) 17 | # With no version 18 | assert depends[0] == 'numpy' 19 | assert depends[1].startswith('python ') 20 | 21 | if __name__ == '__main__': 22 | main() 23 | -------------------------------------------------------------------------------- /conda_build.recipe/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: conda-build 3 | version: 1.10alpha.0 4 | 5 | source: 6 | git_url: ../ 7 | 8 | build: 9 | {% if 'CONDA_RELEASE' in environ %} 10 | number: {{ environ.get('CONDA_BUILD_NUMBER', 0) }} 11 | {% else %} 12 | number: {{ environ.get('GIT_DESCRIBE_NUMBER', 0) }} 13 | string: py{{ environ.get('PY_VER').replace('.', '') }}_{{ environ.get('GIT_BUILD_STR', 'GIT_STUB') }} 14 | {% endif %} 15 | 16 | requirements: 17 | build: 18 | - python 19 | run: 20 | - python 21 | - conda 22 | - jinja2 23 | - patchelf [linux] 24 | 25 | test: 26 | commands: 27 | - conda-build -h 28 | imports: 29 | - conda_build 30 | 31 | about: 32 | home: https://github.com/conda/conda-build 33 | license: BSD 34 | -------------------------------------------------------------------------------- /conda_build/elf.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, print_function 2 | 3 | import sys 4 | from os.path import islink, isfile 5 | 6 | 7 | # extensions which are assumed to belong to non-ELF files 8 | NO_EXT = ( 9 | '.py', '.pyc', '.pyo', '.h', '.a', '.c', '.txt', '.html', 10 | '.xml', '.png', '.jpg', '.gif', 11 | '.o' # ELF but not what we are looking for 12 | ) 13 | 14 | MAGIC = b'\x7fELF' 15 | 16 | 17 | def is_elf(path): 18 | if path.endswith(NO_EXT) or islink(path) or not isfile(path): 19 | return False 20 | with open(path, 'rb') as fi: 21 | head = fi.read(4) 22 | return bool(head == MAGIC) 23 | 24 | 25 | if __name__ == '__main__': 26 | if sys.platform.startswith('linux'): 27 | for path in '/usr/bin/ls', '/etc/mtab': 28 | print(path, is_elf(path)) 29 | -------------------------------------------------------------------------------- /conda_build/templates/npm.yaml: -------------------------------------------------------------------------------- 1 | #Automaticly generating npm conda package 2 | {% set data = load_npm()%} 3 | {% block body -%} 4 | {% block package -%} 5 | package: 6 | name: {{data.get('name').lower()}} 7 | version: {{data.get('version')}} 8 | {%- endblock %} 9 | {% block build -%} 10 | build: 11 | number: {% block build_number -%}1{%- endblock %} 12 | script: 13 | - cd $RECIPE_DIR 14 | - npm --prefix $PREFIX install . -g --production 15 | {%- endblock %} 16 | {% block requirements -%} 17 | requirements: 18 | build: 19 | - nodejs 20 | 21 | run: 22 | - nodejs 23 | 24 | {%- endblock %} 25 | {% block about -%} 26 | about: 27 | home: {{data.get('url')}} 28 | license: {{data.get('license')}} 29 | {%- endblock %} 30 | {%- endblock %} 31 | 32 | # See 33 | # http://docs.continuum.io/conda/build.html for 34 | # more information about meta.yaml 35 | -------------------------------------------------------------------------------- /tests/test-recipes/test-package/setup.py: -------------------------------------------------------------------------------- 1 | from distutils.core import setup 2 | 3 | setup( 4 | name="conda-build-test-project", 5 | version='1.0', 6 | author="Continuum Analytics, Inc.", 7 | url="https://github.com/conda/conda-build", 8 | license="BSD", 9 | classifiers=[ 10 | "Development Status :: 4 - Beta", 11 | "Intended Audience :: Developers", 12 | "Operating System :: OS Independent", 13 | "Programming Language :: Python :: 2", 14 | "Programming Language :: Python :: 2.7", 15 | "Programming Language :: Python :: 3", 16 | "Programming Language :: Python :: 3.3", 17 | "Programming Language :: Python :: 3.4", 18 | ], 19 | description="test package for testing conda-build", 20 | packages=['conda_build_test'], 21 | scripts=[ 22 | 'bin/test-script-setup.py', 23 | ], 24 | ) 25 | -------------------------------------------------------------------------------- /example_packages/noarch_python/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: bokeh 3 | version: 0.7.1 4 | 5 | source: 6 | fn: bokeh-0.7.1.tar.gz 7 | url: https://pypi.python.org/packages/source/b/bokeh/bokeh-0.7.1.tar.gz 8 | md5: 426f2b0850018fab1407f2e7ed129544 9 | 10 | build: 11 | noarch_python: True 12 | number: 1 13 | 14 | requirements: 15 | build: 16 | - python 17 | run: 18 | - python 19 | - numpy 20 | - pandas 21 | - flask 22 | - jinja2 23 | - markupsafe 24 | - werkzeug 25 | - greenlet 26 | - dateutil 27 | - pytz 28 | - requests 29 | - six 30 | - pygments 31 | - pyyaml 32 | - pyzmq 33 | - tornado 34 | 35 | test: 36 | requires: 37 | - nose 38 | - mock 39 | commands: 40 | - bokeh-server -h 41 | imports: 42 | - bokeh 43 | 44 | about: 45 | home: https://github.com/ContinuumIO/Bokeh 46 | license: New BSD 47 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/always_include_files_glob/run_test.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import json 4 | 5 | 6 | def main(): 7 | prefix = os.environ['PREFIX'] 8 | info_file = os.path.join(prefix, 'conda-meta', 9 | 'conda-build-test-always_include_files-glob-1.0-0.json') 10 | with open(info_file, 'r') as fh: 11 | info = json.load(fh) 12 | 13 | if sys.platform == 'darwin': 14 | assert set(info['files']) == {'lib/libpng.dylib', 'lib/libpng16.16.dylib', 'lib/libpng16.dylib'}, info['files'] 15 | elif sys.platform.startswith('linux'): 16 | assert set(info['files']) == {'lib/libpng.so', 'lib/libpng16.so', 'lib/libpng16.so.16', 'lib/libpng16.so.16.17.0'}, info['files'] 17 | elif sys.platform == 'win32': 18 | assert sorted(info['files']) == ['Library/lib/libpng.lib', 'Library/lib/libpng16.lib', 'Library/lib/libpng16_static.lib', 'Library/lib/libpng_static.lib'] 19 | 20 | if __name__ == '__main__': 21 | main() 22 | -------------------------------------------------------------------------------- /tests/test-skeleton/test-skeleton.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -x 3 | set -e 4 | DIR=`mktemp -d -t sympy-0.7.5-XXXX` 5 | conda skeleton pypi --output-dir $DIR --version=0.7.5 sympy 6 | python -c " 7 | import yaml 8 | with open('"$(dirname "${BASH_SOURCE[0]}")"/sympy-0.7.5/meta.yaml') as f: 9 | expected = yaml.load(f) 10 | with open('$DIR/sympy/meta.yaml') as f: 11 | actual = yaml.load(f) 12 | assert expected == actual, (expected, actual) 13 | " 14 | # XXX: This won't run if the test fails. 15 | rm -rf $DIR 16 | echo passed 17 | 18 | DIR=`mktemp -d -t sympy-0.7.5-url-XXXX` 19 | conda skeleton pypi --output-dir $DIR https://pypi.python.org/packages/source/s/sympy/sympy-0.7.5.tar.gz#md5=7de1adb49972a15a3dd975e879a2bea9 20 | python -c " 21 | import yaml 22 | with open('"$(dirname "${BASH_SOURCE[0]}")"/sympy-0.7.5-url/meta.yaml') as f: 23 | expected = yaml.load(f) 24 | with open('$DIR/sympy/meta.yaml') as f: 25 | actual = yaml.load(f) 26 | assert expected == actual, (expected, actual) 27 | " 28 | # XXX: This won't run if the test fails. 29 | rm -rf $DIR 30 | echo passed 31 | -------------------------------------------------------------------------------- /tests/test-recipes/metadata/has_prefix_files/run_test.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | from os.path import join 4 | 5 | def main(): 6 | prefix = os.environ['PREFIX'] 7 | 8 | with open(join(prefix, 'automatic-prefix')) as f: 9 | data = f.read() 10 | 11 | print('automatic-prefix') 12 | print(data) 13 | assert prefix in data 14 | 15 | with open(join(prefix, 'has-prefix')) as f: 16 | data = f.read() 17 | 18 | print('has-prefix') 19 | print(data) 20 | assert prefix in data 21 | 22 | with open(join(prefix, 'binary-has-prefix'), 'rb') as f: 23 | data = f.read() 24 | 25 | print('binary-has-prefix') 26 | print(data) 27 | assert prefix.encode('utf-8') in data 28 | 29 | if sys.platform == 'win32': 30 | forward_slash_prefix = prefix.replace('\\', '/') 31 | with open(join(prefix, 'forward-slash-prefix')) as f: 32 | data = f.read() 33 | 34 | print('forward-slash-prefix') 35 | print(data) 36 | assert forward_slash_prefix in data 37 | 38 | if __name__ == '__main__': 39 | main() 40 | -------------------------------------------------------------------------------- /tests/test-recipes/build_recipes.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | set -x 5 | 6 | cd "$(dirname "${BASH_SOURCE[0]}")" 7 | 8 | # Recipes that should fail and give some error 9 | 10 | for recipe in metadata/*/; do 11 | if [[ $(ls -A "$recipe") ]]; then 12 | if [[ $recipe =~ .*osx_is_app.* && $(uname) != "Darwin" ]]; then 13 | continue 14 | fi 15 | conda build --no-anaconda-upload $recipe 16 | fi 17 | done 18 | 19 | cd fail 20 | 21 | # We use 2>&1 as the error is printed to stderr. We could do >/dev/null to 22 | # ensure it is printed to stderr, but then we would hide the output of the 23 | # command from the test output. The ! ensures that the command fails. 24 | ! OUTPUT=$(conda build --no-anaconda-upload symlinks/ 2>&1) 25 | echo "$OUTPUT" | grep "Error" | wc -l | grep 6 26 | 27 | ! OUTPUT=$(conda build --no-anaconda-upload conda-meta/ 2>&1) 28 | echo "$OUTPUT" | grep 'Error: Untracked file(s) ('\''conda-meta/nope'\'',)' 29 | 30 | ! OUTPUT=$(conda build --no-anaconda-upload recursive-build/ 2>&1) 31 | echo "$OUTPUT" | grep 'No packages found in current .* channels matching: recursive-build2 2\.0' 32 | 33 | echo "TESTS PASSED" 34 | -------------------------------------------------------------------------------- /conda_build/templates/setuptools.yaml: -------------------------------------------------------------------------------- 1 | {% set data = load_setuptools()%} 2 | {% block body -%} 3 | {% block package -%} 4 | package: 5 | name: {{data.get('name').lower().replace(' ', '_')}} 6 | version: {{data.get('version')}} 7 | {%- endblock %} 8 | {% block build -%} 9 | build: 10 | number: {% block build_number -%}1{%- endblock %} 11 | script: 12 | - cd $RECIPE_DIR 13 | - $PYTHON setup.py install --single-version-externally-managed --record=record.txt 14 | 15 | {% block entry_points -%} 16 | entry_points: {{data.get('entry_points', {}).get('console_scripts', [])}} 17 | {%- endblock %} 18 | 19 | {%- endblock %} 20 | 21 | {% block requirements -%} 22 | requirements: 23 | build: 24 | - python 25 | - setuptools 26 | run: 27 | - python 28 | {% for req in data.get('install_requires', []) -%} 29 | - {{req}} 30 | {% endfor %} 31 | 32 | {%- endblock %} 33 | {% block about -%} 34 | about: 35 | home: {{data.get('url')}} 36 | license: {{data.get('license')}} 37 | {%- endblock %} 38 | {% block test -%} 39 | test: 40 | requires: 41 | {% for req in data.get('tests_require', []) -%} 42 | - {{req}} 43 | {% endfor %} 44 | imports: {{data.get('packages')}} 45 | {%- endblock -%} 46 | {%- endblock %} 47 | 48 | # See 49 | # http://docs.continuum.io/conda/build.html for 50 | # more information about meta.yaml 51 | -------------------------------------------------------------------------------- /conda_build/external.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, print_function 2 | 3 | import os 4 | import sys 5 | from os.path import isfile, join, expanduser 6 | 7 | import conda.config as cc 8 | from conda_build.config import config 9 | 10 | def find_executable(executable): 11 | # dir_paths is referenced as a module-level variable 12 | # in other code 13 | global dir_paths 14 | if sys.platform == 'win32': 15 | dir_paths = [join(config.build_prefix, 'Scripts'), 16 | join(config.build_prefix, 'Library\\bin'), 17 | join(cc.root_dir, 'Scripts'), 18 | join(cc.root_dir, 'Library\\bin'), 19 | 'C:\\cygwin\\bin'] 20 | else: 21 | dir_paths = [join(config.build_prefix, 'bin'), 22 | join(cc.root_dir, 'bin'),] 23 | 24 | dir_paths.extend(os.environ['PATH'].split(os.pathsep)) 25 | 26 | for dir_path in dir_paths: 27 | if sys.platform == 'win32': 28 | for ext in '.exe', '.bat', '': 29 | path = join(dir_path, executable + ext) 30 | if isfile(path): 31 | return path 32 | else: 33 | path = join(dir_path, executable) 34 | if isfile(expanduser(path)): 35 | return expanduser(path) 36 | return None 37 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | python: 3 | # We don't actually use the system Python but this keeps it organized. 4 | - "2.7" 5 | - "3.3" 6 | - "3.4" 7 | - "3.5" 8 | env: 9 | - 10 | install: 11 | # TODO: Use a "latest" url 12 | - if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; then 13 | wget http://repo.continuum.io/miniconda/Miniconda-3.5.2-Linux-x86_64.sh -O miniconda.sh; 14 | elif [[ "$TRAVIS_PYTHON_VERSION" == "3.3" ]]; then 15 | wget http://repo.continuum.io/miniconda/Miniconda3-3.0.0-Linux-x86_64.sh -O miniconda.sh; 16 | else 17 | wget http://repo.continuum.io/miniconda/Miniconda3-3.5.2-Linux-x86_64.sh -O miniconda.sh; 18 | fi 19 | - bash miniconda.sh -b -p $HOME/miniconda 20 | - export PATH="$HOME/miniconda/bin:$PATH" 21 | - hash -r 22 | - conda config --set always_yes yes 23 | - conda install --force --no-deps conda requests 24 | - conda install pytest requests patchelf pyflakes python=$TRAVIS_PYTHON_VERSION 25 | - python setup.py install 26 | - conda info -a 27 | 28 | script: 29 | - pyflakes conda_build tests bin/* bdist_conda.py setup.py example_packages 30 | - py.test tests 31 | - tests/test-recipes/build_recipes.sh 32 | - tests/test-skeleton/test-skeleton.sh 33 | - conda build --help 34 | 35 | notifications: 36 | flowdock: ef3821a08a791106512ccfc04c92eccb 37 | 38 | git: 39 | depth: 10000 40 | 41 | sudo: false 42 | -------------------------------------------------------------------------------- /conda_build/exceptions.py: -------------------------------------------------------------------------------- 1 | import textwrap 2 | SEPARATOR = "-" * 70 3 | 4 | indent = lambda s: textwrap.fill(textwrap.dedent(s)) 5 | 6 | 7 | class CondaBuildException(Exception): 8 | pass 9 | 10 | 11 | class YamlParsingError(CondaBuildException): 12 | pass 13 | 14 | 15 | class UnableToParse(YamlParsingError): 16 | def __init__(self, original, *args, **kwargs): 17 | super(UnableToParse, self).__init__(*args, **kwargs) 18 | self.original = original 19 | 20 | def error_msg(self): 21 | return "\n".join([ 22 | SEPARATOR, 23 | self.error_body(), 24 | self.indented_exception(), 25 | ]) 26 | 27 | def error_body(self): 28 | return "\n".join([ 29 | "Unable to parse meta.yaml file\n", 30 | ]) 31 | 32 | def indented_exception(self): 33 | orig = str(self.original) 34 | indent = lambda s: s.replace("\n", "\n--> ") 35 | return "Error Message:\n--> {}\n\n".format(indent(orig)) 36 | 37 | 38 | class UnableToParseMissingJinja2(UnableToParse): 39 | def error_body(self): 40 | return "\n".join([ 41 | super(UnableToParseMissingJinja2, self).error_body(), 42 | indent("""\ 43 | It appears you are missing jinja2. Please install that 44 | package, then attempt to build. 45 | """), 46 | ]) 47 | -------------------------------------------------------------------------------- /tests/test-recipes/fail/symlinks/build.sh: -------------------------------------------------------------------------------- 1 | # file1 exists and file2 doesn't 2 | touch $PREFIX/file1 3 | touch $RECIPE_DIR/file1 4 | touch $SRC_DIR/file1 5 | 6 | # This should work but be converted to relative links 7 | ln -s $PREFIX/file1 $PREFIX/good_abs_link_file1 8 | ln -s $PREFIX/file2 $PREFIX/good_abs_link_file2 9 | 10 | # Absolute symlinks outside of conda-bld should be left alone 11 | ln -s /usr/bin $PREFIX/good_usr_bin_link 12 | 13 | cd $PREFIX 14 | 15 | # This should work 16 | ln -s ./file1 $PREFIX/good_relative_link_file1 17 | ln -s ./file2 $PREFIX/good_relative_link_file2 18 | 19 | # Relative links without a . 20 | # This should also work 21 | ln -s file1 $PREFIX/good_relative_link_nodot_file1 22 | ln -s file2 $PREFIX/good_relative_link_nodot_file2 23 | 24 | ln -s $RECIPE_DIR/file1 $PREFIX/good_outside_link_file1 25 | ln -s $RECIPE_DIR/file2 $PREFIX/good_outside_link_file2 26 | 27 | # These should give errors 28 | ln -s $SRC_DIR/file1 $PREFIX/bad_abs_link_src_file1 29 | ln -s $SRC_DIR/file2 $PREFIX/bad_abs_link_src_file2 30 | 31 | # Assumes $SRC_DIR is $PREFIX/../conda-bld/work 32 | ln -s ../../conda-bld/work/file1 $PREFIX/bad_relative_link_src_file1 33 | ln -s ../../conda-bld/work/file2 $PREFIX/bad_relative_link_src_file2 34 | 35 | # Relative path outside the build prefix that doesn't start with a . 36 | mkdir dir 37 | ln -s dir/../../../conda-bld/work/file1 $PREFIX/bad_relative_outside_nodot_file1 38 | ln -s dir/../../../conda-bld/work/file2 $PREFIX/bad_relative_outside_nodot_file2 39 | -------------------------------------------------------------------------------- /conda_build/jinja_context.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Created on Jan 16, 2014 3 | 4 | @author: sean 5 | ''' 6 | from __future__ import absolute_import, division, print_function 7 | 8 | import json 9 | import os 10 | 11 | from conda.compat import PY3 12 | from .environ import get_dict as get_environ 13 | 14 | _setuptools_data = None 15 | 16 | def load_setuptools(setup_file='setup.py'): 17 | global _setuptools_data 18 | 19 | if _setuptools_data is None: 20 | _setuptools_data = {} 21 | def setup(**kw): 22 | _setuptools_data.update(kw) 23 | 24 | import setuptools 25 | #Add current directory to path 26 | import sys 27 | sys.path.append('.') 28 | 29 | #Patch setuptools 30 | setuptools_setup = setuptools.setup 31 | setuptools.setup = setup 32 | exec(open(setup_file).read()) 33 | setuptools.setup = setuptools_setup 34 | del sys.path[-1] 35 | return _setuptools_data 36 | 37 | def load_npm(): 38 | # json module expects bytes in Python 2 and str in Python 3. 39 | mode_dict = {'mode': 'r', 'encoding': 'utf-8'} if PY3 else {'mode': 'rb'} 40 | with open('package.json', **mode_dict) as pkg: 41 | return json.load(pkg) 42 | 43 | def context_processor(): 44 | ctx = get_environ() 45 | environ = dict(os.environ) 46 | environ.update(get_environ()) 47 | 48 | ctx.update(load_setuptools=load_setuptools, 49 | load_npm=load_npm, 50 | environ=environ) 51 | return ctx 52 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import sys 3 | from glob import glob 4 | 5 | if 'develop' in sys.argv: 6 | from setuptools import setup 7 | else: 8 | from distutils.core import setup 9 | 10 | import versioneer 11 | 12 | 13 | if sys.version_info[:2] < (2, 7): 14 | sys.exit("conda-build is only meant for Python >=2.7" 15 | "Current Python version: %d.%d" % sys.version_info[:2]) 16 | 17 | versioneer.VCS = 'git' 18 | versioneer.versionfile_source = 'conda_build/_version.py' 19 | versioneer.versionfile_build = 'conda_build/_version.py' 20 | versioneer.tag_prefix = '' 21 | versioneer.parentdir_prefix = 'conda-build-' 22 | 23 | setup( 24 | name="conda-build", 25 | version=versioneer.get_version(), 26 | cmdclass=versioneer.get_cmdclass(), 27 | author="Continuum Analytics, Inc.", 28 | author_email="ilan@continuum.io", 29 | url="https://github.com/conda/conda-build", 30 | license="BSD", 31 | classifiers=[ 32 | "Development Status :: 4 - Beta", 33 | "Intended Audience :: Developers", 34 | "Operating System :: OS Independent", 35 | "Programming Language :: Python :: 2", 36 | "Programming Language :: Python :: 2.7", 37 | "Programming Language :: Python :: 3", 38 | "Programming Language :: Python :: 3.3", 39 | "Programming Language :: Python :: 3.4", 40 | ], 41 | description="tools for building conda packages", 42 | long_description=open('README.rst').read(), 43 | packages=['conda_build'], 44 | scripts=glob('bin/*'), 45 | install_requires=['conda'], 46 | package_data={'conda_build': ['templates/*', 'cli-*.exe']}, 47 | ) 48 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Except where noted below, conda is released under the following terms: 2 | 3 | (c) 2012 Continuum Analytics, Inc. / http://continuum.io 4 | All Rights Reserved 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | * Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | * Redistributions in binary form must reproduce the above copyright 11 | notice, this list of conditions and the following disclaimer in the 12 | documentation and/or other materials provided with the distribution. 13 | * Neither the name of Continuum Analytics, Inc. nor the 14 | names of its contributors may be used to endorse or promote products 15 | derived from this software without specific prior written permission. 16 | 17 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 18 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 19 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 20 | DISCLAIMED. IN NO EVENT SHALL CONTINUUM ANALYTICS BE LIABLE FOR ANY 21 | DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 22 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 23 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 24 | ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 26 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 | 28 | 29 | Exceptions 30 | ========== 31 | 32 | versioneer.py is Public Domain 33 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | =========== 2 | conda-build 3 | =========== 4 | 5 | Installation 6 | -------------- 7 | .. code:: bash 8 | 9 | # Display information about current conda install 10 | conda info 11 | 12 | # Install conda-build in the current env 13 | conda install -n root conda-build 14 | 15 | 16 | Building Your Own Packages 17 | -------------------------- 18 | 19 | You can easily build your own packages for conda, and upload them to `anaconda.org 20 | `_, a free service for hosting packages for conda, as 21 | well as other package managers. To build a package, create a recipe. See 22 | http://github.com/conda/conda-recipes for many example recipes, and 23 | http://conda.pydata.org/docs/build.html for documentation on how to build 24 | recipes. 25 | 26 | To upload to anaconda.org, create an account. Then, install the anaconda-client 27 | and login 28 | 29 | .. code-block:: bash 30 | 31 | $ conda install anaconda-client 32 | $ anaconda login 33 | 34 | Then, after you build your recipe 35 | 36 | .. code-block:: bash 37 | 38 | $ conda build 39 | 40 | you will be prompted to upload to anaconda.org. 41 | 42 | To add your anaconda.org channel, or the channel of others to conda so that ``conda 43 | install`` will find and install their packages, run 44 | 45 | .. code-block:: bash 46 | 47 | $ conda config --add channels https://conda.anaconda.org/username 48 | 49 | (replacing ``username`` with the user name of the person whose channel you want 50 | to add). 51 | 52 | Getting Help 53 | ------------ 54 | 55 | The documentation for conda is at http://conda.pydata.org/docs/. You can 56 | subscribe to the `conda mailing list 57 | `_. The source 58 | code and issue tracker for conda are on `GitHub `_. 59 | -------------------------------------------------------------------------------- /conda_build/main_index.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, print_function 2 | 3 | import os 4 | from locale import getpreferredencoding 5 | from os.path import abspath 6 | 7 | from conda.compat import PY3 8 | from conda.cli.conda_argparse import ArgumentParser 9 | 10 | from conda_build.index import update_index 11 | 12 | 13 | def main(): 14 | p = ArgumentParser( 15 | description="Update package index metadata files in given directories.") 16 | 17 | p.add_argument( 18 | 'dir', 19 | help='Directory that contains an index to be updated.', 20 | nargs='*', 21 | default=[os.getcwd()], 22 | ) 23 | 24 | p.add_argument( 25 | '-c', "--check-md5", 26 | action="store_true", 27 | help="""Use MD5 values instead of file modification times for determining if a 28 | package's metadata needs to be updated.""", 29 | ) 30 | 31 | p.add_argument( 32 | '-f', "--force", 33 | action="store_true", 34 | help="Force reading all files.", 35 | ) 36 | 37 | p.add_argument( 38 | '-q', "--quiet", 39 | action="store_true", 40 | help="Don't show any output.", 41 | ) 42 | p.add_argument( 43 | '--no-remove', 44 | action="store_false", 45 | dest="remove", 46 | default=True, 47 | help="Don't remove entries for files that don't exist.", 48 | ) 49 | 50 | args = p.parse_args() 51 | 52 | dir_paths = [abspath(path) for path in args.dir] 53 | # Don't use byte strings in Python 2 54 | if not PY3: 55 | dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths] 56 | 57 | for path in dir_paths: 58 | update_index(path, verbose=(not args.quiet), force=args.force, 59 | check_md5=args.check_md5, remove=args.remove) 60 | 61 | 62 | if __name__ == '__main__': 63 | main() 64 | -------------------------------------------------------------------------------- /conda_build/scripts.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Module for creating entry points and scripts for PyPI packages. 3 | ''' 4 | 5 | from __future__ import absolute_import, division, print_function 6 | 7 | import re 8 | import os 9 | import sys 10 | import shutil 11 | from os.path import dirname, isdir, join 12 | 13 | import conda.config as cc 14 | 15 | from conda_build.config import config 16 | 17 | 18 | PY_TMPL = """\ 19 | if __name__ == '__main__': 20 | import sys 21 | from %s import %s 22 | 23 | sys.exit(%s()) 24 | """ 25 | 26 | bin_dirname = 'Scripts' if sys.platform == 'win32' else 'bin' 27 | 28 | entry_pat = re.compile('\s*([\w\-\.]+)\s*=\s*([\w.]+):(\w+)\s*$') 29 | 30 | 31 | def iter_entry_points(items): 32 | for item in items: 33 | m = entry_pat.match(item) 34 | if m is None: 35 | sys.exit("Error cound not match entry point: %r" % item) 36 | yield m.groups() 37 | 38 | 39 | def create_entry_point(path, module, func): 40 | pyscript = PY_TMPL % (module, func, func) 41 | if sys.platform == 'win32': 42 | with open(path + '-script.py', 'w') as fo: 43 | fo.write(pyscript) 44 | shutil.copyfile(join(dirname(__file__), 'cli-%d.exe' % cc.bits), 45 | path + '.exe') 46 | else: 47 | with open(path, 'w') as fo: 48 | fo.write('#!%s\n' % config.build_python) 49 | fo.write(pyscript) 50 | os.chmod(path, int('755', 8)) 51 | 52 | 53 | def create_entry_points(items): 54 | if not items: 55 | return 56 | bin_dir = join(config.build_prefix, bin_dirname) 57 | if not isdir(bin_dir): 58 | os.mkdir(bin_dir) 59 | for cmd, module, func in iter_entry_points(items): 60 | create_entry_point(join(bin_dir, cmd), module, func) 61 | 62 | 63 | def prepend_bin_path(env, prefix, prepend_prefix=False): 64 | env['PATH'] = join(prefix, bin_dirname) + os.pathsep + env['PATH'] 65 | if sys.platform == "win32": 66 | env['PATH'] = join(prefix, "Library", "bin") + os.pathsep + env['PATH'] 67 | if prepend_prefix: 68 | env['PATH'] = prefix + os.pathsep + env['PATH'] 69 | return env 70 | -------------------------------------------------------------------------------- /conda_build/tarcheck.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, print_function 2 | 3 | import json 4 | import tarfile 5 | from os.path import basename 6 | 7 | 8 | def dist_fn(fn): 9 | if fn.endswith('.tar'): 10 | return fn[:-4] 11 | elif fn.endswith('.tar.bz2'): 12 | return fn[:-8] 13 | else: 14 | raise Exception('did not expect filename: %r' % fn) 15 | 16 | 17 | class TarCheck(object): 18 | def __init__(self, path): 19 | self.t = tarfile.open(path) 20 | self.paths = set(m.path for m in self.t.getmembers()) 21 | self.dist = dist_fn(basename(path)) 22 | self.name, self.version, self.build = self.dist.rsplit('-', 2) 23 | 24 | def info_files(self): 25 | if 'py_' in self.build: 26 | return 27 | lista = [p.strip().decode('utf-8') for p in 28 | self.t.extractfile('info/files').readlines()] 29 | seta = set(lista) 30 | if len(lista) != len(seta): 31 | raise Exception('info/files: duplicates') 32 | 33 | listb = [m.path for m in self.t.getmembers() 34 | if not (m.path.startswith('info/') or m.isdir())] 35 | setb = set(listb) 36 | if len(listb) != len(setb): 37 | raise Exception('info_files: duplicate members') 38 | 39 | if seta == setb: 40 | return 41 | for p in sorted(seta | setb): 42 | if p not in seta: 43 | print('%r not in info/files' % p) 44 | if p not in setb: 45 | print('%r not in tarball' % p) 46 | raise Exception('info/files') 47 | 48 | def index_json(self): 49 | info = json.loads(self.t.extractfile('info/index.json').read().decode('utf-8')) 50 | for varname in 'name', 'version', 'build': 51 | if info[varname] != getattr(self, varname): 52 | raise Exception('%s: %r != %r' % (varname, info[varname], 53 | getattr(self, varname))) 54 | assert isinstance(info['build_number'], int) 55 | 56 | 57 | def check_all(path): 58 | x = TarCheck(path) 59 | x.info_files() 60 | x.index_json() 61 | x.t.close() 62 | -------------------------------------------------------------------------------- /conda_build/ldd.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, print_function 2 | 3 | import sys 4 | import re 5 | import subprocess 6 | import json 7 | from os.path import join, basename 8 | 9 | from conda.utils import memoized 10 | from conda.misc import untracked 11 | 12 | from conda_build import post 13 | from conda_build.macho import otool 14 | 15 | LDD_RE = re.compile(r'\s*(.*?)\s*=>\s*(.*?)\s*\(.*\)') 16 | LDD_NOT_FOUND_RE = re.compile(r'\s*(.*?)\s*=>\s*not found') 17 | 18 | def ldd(path): 19 | "thin wrapper around ldd" 20 | lines = subprocess.check_output(['ldd', path]).decode('utf-8').splitlines() 21 | res = [] 22 | for line in lines: 23 | if '=>' not in line: 24 | continue 25 | 26 | assert line[0] == '\t', (path, line) 27 | m = LDD_RE.match(line) 28 | if m: 29 | res.append(m.groups()) 30 | continue 31 | m = LDD_NOT_FOUND_RE.match(line) 32 | if m: 33 | res.append((m.group(1), 'not found')) 34 | continue 35 | if 'ld-linux' in line: 36 | continue 37 | raise RuntimeError("Unexpected output from ldd: %s" % line) 38 | 39 | return res 40 | 41 | @memoized 42 | def get_linkages(obj_files, prefix): 43 | res = {} 44 | 45 | for f in obj_files: 46 | path = join(prefix, f) 47 | if sys.platform.startswith('linux'): 48 | res[f] = ldd(path) 49 | elif sys.platform.startswith('darwin'): 50 | links = otool(path) 51 | res[f] = [(basename(l), l) for l in links] 52 | 53 | return res 54 | 55 | @memoized 56 | def get_package_obj_files(dist, prefix): 57 | with open(join(prefix, 'conda-meta', dist + 58 | '.json')) as f: 59 | data = json.load(f) 60 | 61 | res = [] 62 | files = data['files'] 63 | for f in files: 64 | path = join(prefix, f) 65 | if post.is_obj(path): 66 | res.append(f) 67 | 68 | return res 69 | 70 | @memoized 71 | def get_untracked_obj_files(prefix): 72 | res = [] 73 | files = untracked(prefix) 74 | for f in files: 75 | path = join(prefix, f) 76 | if post.is_obj(path): 77 | res.append(f) 78 | 79 | return res 80 | -------------------------------------------------------------------------------- /tests/test_metadata.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from conda.resolve import MatchSpec 4 | 5 | from conda_build.metadata import select_lines, handle_config_version 6 | 7 | 8 | def test_select_lines(): 9 | lines = """ 10 | test 11 | test [abc] no 12 | 13 | test [abc] 14 | test # [abc] 15 | test # [abc] yes 16 | test # stuff [abc] yes 17 | """ 18 | 19 | assert select_lines(lines, {'abc': True}) == """ 20 | test 21 | test [abc] no 22 | 23 | test 24 | test 25 | test 26 | test 27 | """ 28 | assert select_lines(lines, {'abc': False}) == """ 29 | test 30 | test [abc] no 31 | 32 | """ 33 | 34 | class HandleConfigVersionTests(unittest.TestCase): 35 | 36 | def test_python(self): 37 | for spec, ver, res_spec in [ 38 | ('python', '3.4', 'python 3.4*'), 39 | ('python 2.7.8', '2.7', 'python 2.7.8'), 40 | ('python 2.7.8', '3.5', 'python 2.7.8'), 41 | ('python 2.7.8', None, 'python 2.7.8'), 42 | ('python', None, 'python'), 43 | ('python x.x', '2.7', 'python 2.7*'), 44 | ('python', '27', 'python 2.7*'), 45 | ('python', 27, 'python 2.7*'), 46 | ]: 47 | ms = MatchSpec(spec) 48 | self.assertEqual(handle_config_version(ms, ver), 49 | MatchSpec(res_spec)) 50 | 51 | self.assertRaises(RuntimeError, 52 | handle_config_version, 53 | MatchSpec('python x.x'), None) 54 | 55 | def test_numpy(self): 56 | for spec, ver, res_spec in [ 57 | ('numpy', None, 'numpy'), 58 | ('numpy', 18, 'numpy'), 59 | ('numpy', 110, 'numpy'), 60 | ('numpy x.x', 17, 'numpy 1.7*'), 61 | ('numpy x.x', 110, 'numpy 1.10*'), 62 | ('numpy 1.9.1', 18, 'numpy 1.9.1'), 63 | ('numpy 1.9.0 py27_2', None, 'numpy 1.9.0 py27_2'), 64 | ]: 65 | ms = MatchSpec(spec) 66 | self.assertEqual(handle_config_version(ms, ver), 67 | MatchSpec(res_spec)) 68 | 69 | self.assertRaises(RuntimeError, 70 | handle_config_version, 71 | MatchSpec('numpy x.x'), None) 72 | -------------------------------------------------------------------------------- /.binstar.yml: -------------------------------------------------------------------------------- 1 | ## The package attribure specifies a binstar package namespace to build the package to. 2 | ## This can be specified here or on the command line 3 | package: conda-build 4 | 5 | ## You can also specify the account to upload to, 6 | ## you must be an admin of that account, this 7 | ## defaults to your user account 8 | # user: USERNAME 9 | 10 | #=============================================================================== 11 | # Build Matrix Options 12 | # Thes options may be a single item, a list or empty 13 | # The resulting number of builds is [platform * engine * env] 14 | #=============================================================================== 15 | 16 | ## The platforms to build on. 17 | ## platform defaults to linux-64 18 | platform: 19 | - win-64 20 | - win-32 21 | ## The engine are the inital conda packages you want to run with 22 | engine: 23 | - python=2 24 | - python=3 25 | ## The env param is an environment variable list 26 | # env: 27 | # - MY_ENV=A CC=gcc 28 | # - MY_ENV=B 29 | 30 | #=============================================================================== 31 | # Script options 32 | # These options may be broken out into the before_script, script and after_script 33 | # or not, that is up to you 34 | #=============================================================================== 35 | 36 | ## Run before the script 37 | # before_script: 38 | # - echo "before_script!" 39 | ## Put your main computations here! 40 | 41 | install: 42 | # Use the provided conda and Python to run the install script. The order of these commands matters. 43 | - conda install requests 44 | - set "CONDA_DEFAULT_ENV=" 45 | - python tests\install_miniconda.py 46 | 47 | test: 48 | - cd tests\test-recipes\metadata 49 | # This will be effectively a no-op for recipes without bld.bat 50 | - for /D %%f in (*) do (C:\Users\binstar\conda-build-miniconda\Scripts\conda-build.exe --no-binstar-upload %%~nf) 51 | 52 | # script: 53 | # - echo "This is my binstar build!" 54 | 55 | ## This will run after the script regardless of the result of script 56 | ## BINSTAR_BUILD_RESULT=[succcess|failure] 57 | # after_script: 58 | # - echo "The build was a $BINSTAR_BUILD_RESULT" | tee artifact1.txt 59 | ## This will be run only after a successfull build 60 | # after_success: 61 | # - echo "after_success!" 62 | ## This will be run only after a build failure 63 | # after_failure: 64 | # - echo "after_failure!" 65 | 66 | #=============================================================================== 67 | # Build Results 68 | # Build results are split into two categories: artifacts and targets 69 | # You may omit either key and stiff have a successfull build 70 | # They may be a string, list and contain any bash glob 71 | #=============================================================================== 72 | 73 | ## Build Targets: Upload these files to your binstar package 74 | ## build targets may be a list of files (globs allows) to upload 75 | ## The special build targets 'conda' and 'pypi' may be used to 76 | ## upload conda builds 77 | ## e.g. conda is an alias for /opt/anaconda/conda-bld//*.tar.bz2 78 | # build_targets: 79 | # - conda 80 | -------------------------------------------------------------------------------- /conda_build/_link.py: -------------------------------------------------------------------------------- 1 | """ 2 | This is code that is added to noarch Python packages. See 3 | conda_build/noarch_python.py. 4 | 5 | """ 6 | import os 7 | import sys 8 | import shutil 9 | from os.path import dirname, exists, isdir, join, normpath 10 | 11 | # Silence pyflakes. This variable is added when link.py is written by 12 | # conda_build.noarch_python. 13 | if False: 14 | DATA = None 15 | 16 | THIS_DIR = dirname(__file__) 17 | PREFIX = normpath(sys.prefix) 18 | if sys.platform == 'win32': 19 | BIN_DIR = join(PREFIX, 'Scripts') 20 | SITE_PACKAGES = 'Lib/site-packages' 21 | else: 22 | BIN_DIR = join(PREFIX, 'bin') 23 | SITE_PACKAGES = 'lib/python%s/site-packages' % sys.version[:3] 24 | 25 | # the list of these files is going to be store in info/_files 26 | FILES = [] 27 | 28 | 29 | def _link(src, dst): 30 | try: 31 | os.link(src, dst) 32 | # on Windows os.link raises AttributeError 33 | except (OSError, AttributeError): 34 | shutil.copy2(src, dst) 35 | 36 | 37 | def _unlink(path): 38 | try: 39 | os.unlink(path) 40 | except OSError: 41 | pass 42 | 43 | 44 | def pyc_f(f, version_info=sys.version_info): 45 | if version_info[0] == 2: 46 | return f + 'c' 47 | dn, fn = f.rsplit('/', 1) 48 | return '%s/__pycache__/%s.cpython-%d%d.pyc' % ( 49 | dn, fn[:-3], version_info[0], version_info[1]) 50 | 51 | 52 | def link_files(src_root, dst_root, files): 53 | for f in files: 54 | src = join(THIS_DIR, src_root, f) 55 | dst = join(PREFIX, dst_root, f) 56 | dst_dir = dirname(dst) 57 | if not isdir(dst_dir): 58 | os.makedirs(dst_dir) 59 | if exists(dst): 60 | _unlink(dst) 61 | _link(src, dst) 62 | f = '%s/%s' % (dst_root, f) 63 | FILES.append(f) 64 | if f.endswith('.py'): 65 | FILES.append(pyc_f(f)) 66 | 67 | 68 | def create_script(fn): 69 | src = join(THIS_DIR, 'python-scripts', fn) 70 | dst = join(BIN_DIR, fn) 71 | if sys.platform == 'win32': 72 | shutil.copyfile(src, dst + '-script.py') 73 | FILES.append('Scripts/%s-script.py' % fn) 74 | shutil.copyfile(join(THIS_DIR, 75 | 'cli-%d.exe' % (8 * tuple.__itemsize__)), 76 | dst + '.exe') 77 | FILES.append('Scripts/%s.exe' % fn) 78 | else: 79 | with open(src) as fi: 80 | data = fi.read() 81 | with open(dst, 'w') as fo: 82 | fo.write('#!%s\n' % normpath(sys.executable)) 83 | fo.write(data) 84 | os.chmod(dst, 0o755) 85 | FILES.append('bin/%s' % fn) 86 | 87 | 88 | def create_scripts(files): 89 | if not files: 90 | return 91 | if not isdir(BIN_DIR): 92 | os.mkdir(BIN_DIR) 93 | for fn in files: 94 | create_script(fn) 95 | 96 | 97 | def main(): 98 | create_scripts(DATA['python-scripts']) 99 | link_files('site-packages', SITE_PACKAGES, DATA['site-packages']) 100 | link_files('Examples', 'Examples', DATA['Examples']) 101 | 102 | with open(join(PREFIX, 'conda-meta', 103 | '%s.files' % DATA['dist']), 'w') as fo: 104 | for f in FILES: 105 | fo.write('%s\n' % f) 106 | 107 | 108 | if __name__ == '__main__': 109 | main() 110 | -------------------------------------------------------------------------------- /conda_build/convert_gohlke.py: -------------------------------------------------------------------------------- 1 | import re 2 | import json 3 | import os 4 | import sys 5 | import calendar 6 | import shutil 7 | import tarfile 8 | import tempfile 9 | import zipfile 10 | from os.path import basename, isdir, join 11 | 12 | if sys.version_info[0] == 2: 13 | from cStringIO import StringIO as BytesIO 14 | else: 15 | from io import BytesIO 16 | 17 | 18 | fn_pat = re.compile( 19 | r'([\w\.-]+)-([\w\.]+)\.(win32|win-amd64)-py(\d\.\d)\.exe$') 20 | 21 | arch_map = {'win32': 'x86', 'win-amd64': 'x86_64'} 22 | 23 | subdir_map = {'x86': 'win-32', 'x86_64': 'win-64'} 24 | 25 | file_map = [ 26 | ('PLATLIB/', 'Lib/site-packages/'), 27 | ('PURELIB/', 'Lib/site-packages/'), 28 | ('SCRIPTS/', 'Scripts/'), 29 | ('DATA/', ''), 30 | ] 31 | 32 | 33 | def info_from_fn(fn): 34 | m = fn_pat.match(fn) 35 | if m is None: 36 | return 37 | py_ver = m.group(4) 38 | return { 39 | "name": m.group(1).lower(), 40 | "version": m.group(2), 41 | "build": "py" + py_ver.replace('.', ''), 42 | "build_number": 0, 43 | "depends": ['python %s*' % py_ver], 44 | "platform": "win", 45 | "arch": arch_map[m.group(3)], 46 | } 47 | 48 | 49 | def repack(src_path, t, verbose=False): 50 | z = zipfile.ZipFile(src_path) 51 | for src in z.namelist(): 52 | if src.endswith(('/', '\\')): 53 | continue 54 | for a, b in file_map: 55 | if src.startswith(a): 56 | dst = b + src[len(a):] 57 | break 58 | else: 59 | raise RuntimeError("Don't know how to handle file %s" % src) 60 | 61 | if verbose: 62 | print(' %s -> %s' % (src, dst)) 63 | zinfo = z.getinfo(src) 64 | zdata = z.read(src) 65 | ti = tarfile.TarInfo(dst) 66 | ti.size = len(zdata) 67 | ti.mtime = calendar.timegm(zinfo.date_time) 68 | t.addfile(ti, BytesIO(zdata)) 69 | z.close() 70 | 71 | 72 | def write_info(t, info): 73 | tmp_dir = tempfile.mkdtemp() 74 | with open(join(tmp_dir, 'files'), 'w') as fo: 75 | for m in t.getmembers(): 76 | fo.write('%s\n' % m.path) 77 | with open(join(tmp_dir, 'index.json'), 'w') as fo: 78 | json.dump(info, fo, indent=2, sort_keys=True) 79 | for fn in os.listdir(tmp_dir): 80 | t.add(join(tmp_dir, fn), 'info/' + fn) 81 | shutil.rmtree(tmp_dir) 82 | 83 | 84 | def convert(path, repo_dir='.', add_depends=None, verbose=False): 85 | fn1 = basename(path) 86 | info = info_from_fn(fn1) 87 | if info is None: 88 | print("WARNING: Invalid .exe filename '%s', skipping" % fn1) 89 | return 90 | fn2 = '%(name)s-%(version)s-%(build)s.tar.bz2' % info 91 | subdir = subdir_map[info['arch']] 92 | if verbose: 93 | print('%s -> %s/%s' % (fn1, subdir, fn2)) 94 | 95 | if add_depends: 96 | info['depends'].extend(add_depends) 97 | 98 | output_dir = join(repo_dir, subdir) 99 | if not isdir(output_dir): 100 | os.makedirs(output_dir) 101 | output_path = join(output_dir, fn2) 102 | 103 | t = tarfile.open(output_path, 'w:bz2') 104 | repack(path, t, verbose) 105 | write_info(t, info) 106 | t.close() 107 | if verbose: 108 | from pprint import pprint 109 | pprint(info) 110 | 111 | print("Wrote: %s" % output_path) 112 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | import conda_build.utils as utils 4 | 5 | 6 | 7 | class TestUtils(unittest.TestCase): 8 | 9 | def test_relative_default(self): 10 | for f, r in [ 11 | ('bin/python', '../lib'), 12 | ('lib/libhdf5.so', '.'), 13 | ('lib/python2.6/foobar.so', '..'), 14 | ('lib/python2.6/lib-dynload/zlib.so', '../..'), 15 | ('lib/python2.6/site-packages/pyodbc.so', '../..'), 16 | ('lib/python2.6/site-packages/bsdiff4/core.so', '../../..'), 17 | ('xyz', './lib'), 18 | ('bin/somedir/cmd', '../../lib'), 19 | ]: 20 | self.assertEqual(utils.relative(f), r) 21 | 22 | def test_relative_lib(self): 23 | for f, r in [ 24 | ('bin/python', '../lib'), 25 | ('lib/libhdf5.so', '.'), 26 | ('lib/python2.6/foobar.so', '..'), 27 | ('lib/python2.6/lib-dynload/zlib.so', '../..'), 28 | ('lib/python2.6/site-packages/pyodbc.so', '../..'), 29 | ('lib/python2.6/site-packages/bsdiff3/core.so', '../../..'), 30 | ('xyz', './lib'), 31 | ('bin/somedir/cmd', '../../lib'), 32 | ('bin/somedir/somedir2/cmd', '../../../lib'), 33 | ]: 34 | self.assertEqual(utils.relative(f, 'lib'), r) 35 | 36 | def test_relative_subdir(self): 37 | for f, r in [ 38 | ('lib/libhdf5.so', './sub'), 39 | ('lib/sub/libhdf5.so', '.'), 40 | ('bin/python', '../lib/sub'), 41 | ('bin/somedir/cmd', '../../lib/sub'), 42 | ]: 43 | self.assertEqual(utils.relative(f, 'lib/sub'), r) 44 | 45 | def test_relative_prefix(self): 46 | for f, r in [ 47 | ('xyz', '.'), 48 | ('a/xyz', '..'), 49 | ('a/b/xyz', '../..'), 50 | ('a/b/c/xyz', '../../..'), 51 | ('a/b/c/d/xyz', '../../../..'), 52 | ]: 53 | self.assertEqual(utils.relative(f, '.'), r) 54 | 55 | def test_relative_2(self): 56 | for f, r in [ 57 | ('a/b/c/d/libhdf5.so', '../..'), 58 | ('a/b/c/libhdf5.so', '..'), 59 | ('a/b/libhdf5.so', '.'), 60 | ('a/libhdf5.so', './b'), 61 | ('x/x/libhdf5.so', '../../a/b'), 62 | ('x/b/libhdf5.so', '../../a/b'), 63 | ('x/libhdf5.so', '../a/b'), 64 | ('libhdf5.so', './a/b'), 65 | ]: 66 | self.assertEqual(utils.relative(f, 'a/b'), r) 67 | 68 | def test_relative_3(self): 69 | for f, r in [ 70 | ('a/b/c/d/libhdf5.so', '..'), 71 | ('a/b/c/libhdf5.so', '.'), 72 | ('a/b/libhdf5.so', './c'), 73 | ('a/libhdf5.so', './b/c'), 74 | ('libhdf5.so', './a/b/c'), 75 | ('a/b/x/libhdf5.so', '../c'), 76 | ('a/x/x/libhdf5.so', '../../b/c'), 77 | ('x/x/x/libhdf5.so', '../../../a/b/c'), 78 | ('x/x/libhdf5.so', '../../a/b/c'), 79 | ('x/libhdf5.so', '../a/b/c'), 80 | ]: 81 | self.assertEqual(utils.relative(f, 'a/b/c'), r) 82 | 83 | def test_relative_4(self): 84 | for f, r in [ 85 | ('a/b/c/d/libhdf5.so', '.'), 86 | ('a/b/c/x/libhdf5.so', '../d'), 87 | ('a/b/x/x/libhdf5.so', '../../c/d'), 88 | ('a/x/x/x/libhdf5.so', '../../../b/c/d'), 89 | ('x/x/x/x/libhdf5.so', '../../../../a/b/c/d'), 90 | ]: 91 | self.assertEqual(utils.relative(f, 'a/b/c/d'), r) 92 | -------------------------------------------------------------------------------- /tests/test_main_develop.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Simple tests for testing functions in main_develop module 3 | ''' 4 | import os 5 | import shutil 6 | from os.path import dirname, join, exists 7 | 8 | from conda_build.main_develop import uninstall, write_to_conda_pth 9 | 10 | import pytest 11 | 12 | 13 | @pytest.fixture(scope="session") 14 | def sp_dir(request): 15 | ''' 16 | create site-packges/ directory in same place where test is located. This 17 | is where tests look conda.pth file. It is a session scoped fixture and 18 | it has a finalizer function invoked in the end to remove site-packages/ 19 | directory 20 | ''' 21 | base_dir = dirname(__file__) 22 | sp = join(base_dir, 'site-packages') 23 | if exists(sp): 24 | shutil.rmtree(sp) 25 | 26 | os.mkdir(sp) 27 | 28 | def cleanup(): 29 | # session scoped cleanup is called at end of the session 30 | shutil.rmtree(sp) 31 | 32 | request.addfinalizer(cleanup) 33 | 34 | return sp 35 | 36 | 37 | @pytest.fixture(scope="function") 38 | def conda_pth(sp_dir): 39 | ''' 40 | Returns the path to conda.pth - though we don't expect name to change 41 | from conda.pth, better to keep this in one place 42 | 43 | Removes 'conda.pth' if it exists so each test starts without a conda.pth 44 | file 45 | ''' 46 | pth = join(sp_dir, 'conda.pth') 47 | if exists(pth): 48 | os.remove(pth) 49 | 50 | return pth 51 | 52 | 53 | # Note: following list is data used for testing - do not change it 54 | _path_in_dev_mode = ['/Users/jsandhu/Documents/projects/CythonExample', 55 | '/Users/jsandhu/Documents/projects/TestOne', 56 | '/Users/jsandhu/Documents/projects/TestOne', 57 | '/Users/jsandhu/Documents/projects/TestTwo'] 58 | 59 | # following list of tuples contains the path and the number of lines 60 | # added/remaining after invoking develop/uninstall. 61 | # These are used to make assertions 62 | _toadd_and_num_after_install = zip(_path_in_dev_mode, (1, 2, 2, 3)) 63 | _torm_and_num_after_uninstall = zip(_path_in_dev_mode, (2, 1, 1, 0)) 64 | 65 | 66 | def test_write_to_conda_pth(sp_dir, conda_pth): 67 | ''' 68 | `conda develop pkg_path` invokes write_to_conda_pth() to write/append to 69 | conda.pth - this is a basic unit test for write_to_conda_pth 70 | 71 | :param str sp_dir: path to site-packages directory returned by fixture 72 | :param str conda_pth: path to conda.pth returned by fixture 73 | ''' 74 | assert not exists(conda_pth) 75 | 76 | for pth, exp_num_pths in _toadd_and_num_after_install: 77 | write_to_conda_pth(sp_dir, pth) 78 | assert exists(conda_pth) 79 | # write to path twice but ensure it only gets written to fine once 80 | write_to_conda_pth(sp_dir, pth) 81 | with open(conda_pth, 'r') as f: 82 | lines = f.readlines() 83 | assert (pth + '\n') in lines 84 | assert len(lines) == exp_num_pths 85 | 86 | 87 | def test_uninstall(sp_dir, conda_pth, request): 88 | ''' 89 | `conda develop --uninstall pkg_path` invokes uninstall() to remove path 90 | from conda.pth - this is a unit test for uninstall 91 | 92 | It also includes a cleanup function that deletes the conda.pth file 93 | 94 | :param str sp_dir: path to site-packages directory returned by fixture 95 | :param str conda_pth: path to conda.pth returned by fixture 96 | ''' 97 | # first write data in conda.pth if it doesn't yet exist 98 | # if all tests are invoked, then conda.pth exists 99 | if not exists(conda_pth): 100 | for pth in _path_in_dev_mode: 101 | write_to_conda_pth(sp_dir, pth) 102 | 103 | for to_rm, exp_num_pths in _torm_and_num_after_uninstall: 104 | # here's where the testing begins 105 | uninstall(sp_dir, to_rm) 106 | assert exists(conda_pth) 107 | 108 | with open(conda_pth, 'r') as f: 109 | lines = f.readlines() 110 | assert to_rm + '\n' not in lines 111 | assert len(lines) == exp_num_pths 112 | -------------------------------------------------------------------------------- /conda_build/main_sign.py: -------------------------------------------------------------------------------- 1 | # (c) Continuum Analytics, Inc. / http://continuum.io 2 | # All Rights Reserved 3 | # 4 | # conda is distributed under the terms of the BSD 3-clause license. 5 | # Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause. 6 | 7 | import os 8 | import sys 9 | import base64 10 | from os.path import isdir, join 11 | 12 | try: 13 | from Crypto import Random 14 | from Crypto.PublicKey import RSA 15 | from Crypto.Signature import PKCS1_PSS 16 | except ImportError: 17 | sys.exit("""\ 18 | Error: could not import Crypto (required for "conda sign"). 19 | Run the following command: 20 | 21 | $ conda install -n root pycrypto 22 | """) 23 | 24 | from conda.signature import KEYS_DIR, hash_file, verify, SignatureError 25 | 26 | 27 | 28 | def keygen(name, size=2048): 29 | print("Generating public/private key pair (%d bits)..." % size) 30 | random_generator = Random.new().read 31 | key = RSA.generate(size, random_generator) 32 | 33 | if not isdir(KEYS_DIR): 34 | os.makedirs(KEYS_DIR) 35 | 36 | path = join(KEYS_DIR, name) 37 | print("Storing private key: %s" % path) 38 | with open(path, 'wb') as fo: 39 | fo.write(key.exportKey()) 40 | fo.write(b'\n') 41 | os.chmod(path, 0o600) 42 | 43 | path = join(KEYS_DIR, '%s.pub' % name) 44 | print("Storing public key : %s" % path) 45 | with open(path, 'wb') as fo: 46 | fo.write(key.publickey().exportKey()) 47 | fo.write(b'\n') 48 | 49 | 50 | def get_default_keyname(): 51 | if isdir(KEYS_DIR): 52 | for fn in os.listdir(KEYS_DIR): 53 | if not fn.endswith('.pub'): 54 | return fn 55 | return None 56 | 57 | 58 | def sign(path, key): 59 | signer = PKCS1_PSS.new(key) 60 | sig = signer.sign(hash_file(path)) 61 | return base64.b64encode(sig).decode('utf-8') 62 | 63 | 64 | def main(): 65 | from conda.cli.conda_argparse import ArgumentParser 66 | 67 | p = ArgumentParser( 68 | description="""\ 69 | Tool for signing conda packages. Signatures will be written alongside the 70 | files as FILE.sig.""") 71 | 72 | p.add_argument('files', 73 | help="Files to sign.", 74 | nargs='*', 75 | metavar="FILE", 76 | ) 77 | p.add_argument('-k', '--keygen', 78 | action="store", 79 | help="Generate a public-private " 80 | "key pair ~/.conda/keys/(.pub).", 81 | metavar="NAME") 82 | p.add_argument('--size', 83 | action="store", 84 | help="Size of generated RSA public-private key pair in bits " 85 | "(defaults to 2048).", 86 | metavar="BITS") 87 | p.add_argument('-v', '--verify', 88 | action="store_true", 89 | help="Verify FILE(s).") 90 | 91 | args = p.parse_args() 92 | 93 | if args.keygen: 94 | if args.files: 95 | p.error('no arguments expected for --keygen') 96 | try: 97 | keygen(args.keygen, int(2048 if args.size is None else args.size)) 98 | except ValueError as e: 99 | sys.exit('Error: %s' % e) 100 | return 101 | 102 | if args.size is not None: 103 | p.error('--size option is only allowed with --keygen option') 104 | 105 | if args.verify: 106 | for path in args.files: 107 | try: 108 | disp = 'VALID' if verify(path) else 'INVALID' 109 | except SignatureError as e: 110 | disp = 'ERROR: %s' % e 111 | print('%-40s %s' % (path, disp)) 112 | return 113 | 114 | key_name = get_default_keyname() 115 | if key_name is None: 116 | sys.exit("Error: no private key found in %s" % KEYS_DIR) 117 | print("Using private key '%s' for signing." % key_name) 118 | key = RSA.importKey(open(join(KEYS_DIR, key_name)).read()) 119 | for path in args.files: 120 | print('signing: %s' % path) 121 | with open('%s.sig' % path, 'w') as fo: 122 | fo.write('%s ' % key_name) 123 | fo.write(sign(path, key)) 124 | fo.write('\n') 125 | 126 | 127 | if __name__ == '__main__': 128 | main() 129 | -------------------------------------------------------------------------------- /conda_build/config.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Module to store conda build settings. 3 | ''' 4 | from __future__ import absolute_import, division, print_function 5 | 6 | import os 7 | import sys 8 | from os.path import abspath, expanduser, join 9 | 10 | import conda.config as cc 11 | 12 | # Don't "save" an attribute of this module for later, like build_prefix = 13 | # conda_build.config.config.build_prefix, as that won't reflect any mutated 14 | # changes. 15 | 16 | class Config(object): 17 | __file__ = __path__ = __file__ 18 | __package__ = __package__ 19 | __doc__ = __doc__ 20 | 21 | CONDA_PERL = os.getenv('CONDA_PERL', '5.18.2') 22 | CONDA_PY = int(os.getenv('CONDA_PY', cc.default_python.replace('.', 23 | '')).replace('.', '')) 24 | CONDA_NPY = os.getenv("CONDA_NPY") 25 | if not CONDA_NPY: 26 | CONDA_NPY = None 27 | else: 28 | CONDA_NPY = int(CONDA_NPY.replace('.', '')) or None 29 | CONDA_R = os.getenv("CONDA_R", "3.2.2") 30 | 31 | @property 32 | def PY3K(self): 33 | return int(bool(self.CONDA_PY >= 30)) 34 | 35 | @property 36 | def use_MSVC2015(self): 37 | """Returns whether python version is above 3.4 38 | 39 | (3.5 is compiler switch to MSVC 2015)""" 40 | return bool(self.CONDA_PY >= 35) 41 | 42 | noarch = False 43 | 44 | def get_conda_py(self): 45 | return self.CONDA_PY 46 | 47 | _bld_root_env = os.getenv('CONDA_BLD_PATH') 48 | _bld_root_rc = cc.rc.get('conda-build', {}).get('root-dir') 49 | if _bld_root_env: 50 | croot = abspath(expanduser(_bld_root_env)) 51 | elif _bld_root_rc: 52 | croot = abspath(expanduser(_bld_root_rc)) 53 | elif cc.root_writable: 54 | croot = join(cc.root_dir, 'conda-bld') 55 | else: 56 | croot = abspath(expanduser('~/conda-bld')) 57 | 58 | short_build_prefix = join(cc.envs_dirs[0], '_build') 59 | long_build_prefix = max(short_build_prefix, (short_build_prefix + 8 * '_placehold')[:80]) 60 | # XXX: Make this None to be more rigorous about requiring the build_prefix 61 | # to be known before it is used. 62 | use_long_build_prefix = False 63 | test_prefix = join(cc.envs_dirs[0], '_test') 64 | 65 | def _get_python(self, prefix): 66 | if sys.platform == 'win32': 67 | res = join(prefix, 'python.exe') 68 | else: 69 | res = join(prefix, 'bin/python') 70 | return res 71 | 72 | def _get_perl(self, prefix): 73 | if sys.platform == 'win32': 74 | res = join(prefix, 'perl.exe') 75 | else: 76 | res = join(prefix, 'bin/perl') 77 | return res 78 | 79 | @property 80 | def build_prefix(self): 81 | if self.use_long_build_prefix is None: 82 | raise Exception("I don't know which build prefix to use yet") 83 | if self.use_long_build_prefix: 84 | return self.long_build_prefix 85 | return self.short_build_prefix 86 | 87 | @property 88 | def build_python(self): 89 | return self._get_python(self.build_prefix) 90 | 91 | @property 92 | def test_python(self): 93 | return self._get_python(self.test_prefix) 94 | 95 | @property 96 | def build_perl(self): 97 | return self._get_perl(self.build_prefix) 98 | 99 | @property 100 | def test_perl(self): 101 | return self._get_perl(self.test_prefix) 102 | 103 | @property 104 | def info_dir(self): 105 | return join(self.build_prefix, 'info') 106 | 107 | @property 108 | def meta_dir(self): 109 | return join(self.build_prefix, 'conda-meta') 110 | 111 | @property 112 | def broken_dir(self): 113 | return join(self.croot, "broken") 114 | 115 | @property 116 | def bldpkgs_dir(self): 117 | if self.noarch: 118 | return join(self.croot, "noarch") 119 | else: 120 | return join(self.croot, cc.subdir) 121 | 122 | config = Config() 123 | 124 | croot = config.croot 125 | 126 | 127 | def show(): 128 | print('CONDA_PY:', config.CONDA_PY) 129 | print('CONDA_NPY:', config.CONDA_NPY) 130 | print('subdir:', cc.subdir) 131 | print('croot:', croot) 132 | print('build packages directory:', config.bldpkgs_dir) 133 | 134 | 135 | if __name__ == '__main__': 136 | show() 137 | -------------------------------------------------------------------------------- /conda_build/macho.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, print_function 2 | 3 | import sys 4 | import subprocess 5 | from os.path import islink, isfile 6 | 7 | from conda_build import utils 8 | 9 | 10 | NO_EXT = ( 11 | '.py', '.pyc', '.pyo', '.h', '.a', '.c', '.txt', '.html', 12 | '.xml', '.png', '.jpg', '.gif', '.class', 13 | ) 14 | 15 | MAGIC = { 16 | b'\xca\xfe\xba\xbe': 'MachO-universal', 17 | b'\xce\xfa\xed\xfe': 'MachO-i386', 18 | b'\xcf\xfa\xed\xfe': 'MachO-x86_64', 19 | b'\xfe\xed\xfa\xce': 'MachO-ppc', 20 | b'\xfe\xed\xfa\xcf': 'MachO-ppc64', 21 | } 22 | 23 | FILETYPE = { 24 | 1: 'MH_OBJECT', 25 | 2: 'MH_EXECUTE', 26 | 3: 'MH_FVMLIB', 27 | 4: 'MH_CORE', 28 | 5: 'MH_PRELOAD', 29 | 6: 'MH_DYLIB', 30 | 7: 'MH_DYLINKER', 31 | 8: 'MH_BUNDLE', 32 | 9: 'MH_DYLIB_STUB', 33 | 10: 'MH_DSYM', 34 | 11: 'MH_KEXT_BUNDLE', 35 | } 36 | 37 | 38 | def is_macho(path): 39 | if path.endswith(NO_EXT) or islink(path) or not isfile(path): 40 | return False 41 | with open(path, 'rb') as fi: 42 | head = fi.read(4) 43 | return bool(head in MAGIC) 44 | 45 | 46 | def is_dylib(path): 47 | return human_filetype(path) == 'DYLIB' 48 | 49 | def human_filetype(path): 50 | output, _ = utils.execute(['otool', '-h', path], check_exit_code=True) 51 | lines = output.splitlines() 52 | assert lines[0].startswith(path), path 53 | 54 | for line in lines: 55 | if line.strip().startswith('0x'): 56 | header = line.split() 57 | filetype = int(header[4]) 58 | return FILETYPE[filetype][3:] 59 | 60 | def otool(path): 61 | "thin wrapper around otool -L" 62 | output, _ = utils.execute(['otool', '-L', path], check_exit_code=True) 63 | lines = output.splitlines() 64 | assert lines[0].startswith(path), path 65 | res = [] 66 | for line in lines[1:]: 67 | assert line[0] == '\t', path 68 | res.append(line.split()[0]) 69 | return res 70 | 71 | def get_rpaths(path): 72 | output, _ = utils.execute(['otool', '-l', path], check_exit_code=True) 73 | lines = output.splitlines() 74 | check_for_rpath = False 75 | rpaths = [] 76 | for line in lines: 77 | if 'cmd LC_RPATH' in line: 78 | check_for_rpath = True 79 | if check_for_rpath and 'path' in line: 80 | _, rpath, _ = line.split(None, 2) 81 | rpaths.append(rpath) 82 | return rpaths 83 | 84 | def install_name_change(path, cb_func): 85 | """ 86 | change dynamic shared library install names of Mach-O binary `path`. 87 | 88 | `cb_func` is a callback function which called for each shared library name. 89 | It is called with `path` and the current shared library install name, 90 | and return the new name (or None if the name should be unchanged). 91 | """ 92 | changes = [] 93 | for link in otool(path): 94 | # The first link may be the install name of the library itself, but 95 | # this isn't a big deal because install_name_tool -change is a no-op 96 | # if given a dependent install name that doesn't exist. 97 | new_link = cb_func(path, link) 98 | if new_link: 99 | changes.append((link, new_link)) 100 | 101 | ret = True 102 | for old, new in changes: 103 | return_code = 0 104 | args = ['install_name_tool', '-change', old, new, path] 105 | print(' '.join(args)) 106 | 107 | try: 108 | stdout, stderr = utils.execute(args, check_exit_code=True) 109 | except subprocess.CalledProcessError as exc: 110 | stdout, stderr = exc.output 111 | return_code = exc.return_code 112 | 113 | if "Mach-O dynamic shared library stub file" in stderr: 114 | print("Skipping Mach-O dynamic shared library stub file %s" % path) 115 | ret = False 116 | continue 117 | else: 118 | print(stderr, file=sys.stderr) 119 | 120 | if return_code: 121 | raise RuntimeError("install_name_tool failed with exit " 122 | "status %d" % return_code) 123 | 124 | return ret 125 | 126 | if __name__ == '__main__': 127 | if sys.platform == 'darwin': 128 | for path in '/bin/ls', '/etc/locate.rc': 129 | print(path, is_macho(path)) 130 | -------------------------------------------------------------------------------- /conda_build/create_test.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Module to handle generating test files. 3 | ''' 4 | 5 | from __future__ import absolute_import, division, print_function 6 | 7 | import shutil 8 | import sys 9 | 10 | from os.path import dirname, join, isdir, exists 11 | 12 | def create_files(dir_path, m): 13 | """ 14 | Create the test files for pkg in the directory given. The resulting 15 | test files are configuration (i.e. platform, architecture, Python and 16 | numpy version, ...) independent. 17 | Return False, if the package has no tests (for any configuration), and 18 | True if it has. 19 | """ 20 | has_files = False 21 | for fn in m.get_value('test/files', []): 22 | has_files = True 23 | path = join(m.path, fn) 24 | if isdir(path): 25 | shutil.copytree(path, join(dir_path, fn)) 26 | else: 27 | shutil.copy(path, dir_path) 28 | return has_files 29 | 30 | 31 | def create_shell_files(dir_path, m): 32 | has_tests = False 33 | if sys.platform == 'win32': 34 | name = 'run_test.bat' 35 | else: 36 | name = 'run_test.sh' 37 | if exists(join(m.path, name)): 38 | shutil.copy(join(m.path, name), dir_path) 39 | has_tests = True 40 | 41 | with open(join(dir_path, name), 'a') as f: 42 | f.write('\n\n') 43 | for cmd in m.get_value('test/commands', []): 44 | f.write(cmd) 45 | f.write('\n') 46 | if sys.platform == 'win32': 47 | f.write("if errorlevel 1 exit 1\n") 48 | has_tests = True 49 | 50 | return has_tests 51 | 52 | 53 | def create_py_files(dir_path, m): 54 | has_tests = False 55 | with open(join(dir_path, 'run_test.py'), 'w') as fo: 56 | fo.write("# tests for %s (this is a generated file)\n" % m.dist()) 57 | with open(join(dirname(__file__), 'header_test.py')) as fi: 58 | fo.write(fi.read() + '\n') 59 | fo.write("print('===== testing package: %s =====')\n" % m.dist()) 60 | 61 | for name in m.get_value('test/imports', []): 62 | fo.write('print("import: %r")\n' % name) 63 | fo.write('import %s\n' % name) 64 | fo.write('\n') 65 | has_tests = True 66 | 67 | try: 68 | with open(join(m.path, 'run_test.py')) as fi: 69 | fo.write("print('running run_test.py')\n") 70 | fo.write("# --- run_test.py (begin) ---\n") 71 | fo.write(fi.read()) 72 | fo.write("# --- run_test.py (end) ---\n") 73 | has_tests = True 74 | except IOError: 75 | fo.write("# no run_test.py exists for this package\n") 76 | fo.write("\nprint('===== %s OK =====')\n" % m.dist()) 77 | 78 | return has_tests 79 | 80 | 81 | def create_pl_files(dir_path, m): 82 | has_tests = False 83 | with open(join(dir_path, 'run_test.pl'), 'w') as fo: 84 | print(r'# tests for %s (this is a generated file)' % m.dist(), file=fo) 85 | print(r'print("===== testing package: %s =====\n");' % m.dist(), 86 | file=fo) 87 | print(r'my $expected_version = "%s";' % m.version().rstrip('0'), 88 | file=fo) 89 | for name in m.get_value('test/imports'): 90 | print(r'print("import: %s\n");' % name, file=fo) 91 | print('use %s;\n' % name, file=fo) 92 | # Don't try to print version for complex imports 93 | if ' ' not in name: 94 | print(("if (defined {0}->VERSION) {{\n" + 95 | "\tmy $given_version = {0}->VERSION;\n" + 96 | "\t$given_version =~ s/0+$//;\n" + 97 | "\tdie('Expected version ' . $expected_version . ' but" + 98 | " found ' . $given_version) unless ($expected_version " + 99 | "eq $given_version);\n" + 100 | "\tprint('\tusing version ' . {0}->VERSION . '\n');\n" + 101 | "\n}}").format(name), file=fo) 102 | has_tests = True 103 | 104 | try: 105 | with open(join(m.path, 'run_test.pl')) as fi: 106 | print("# --- run_test.pl (begin) ---", file=fo) 107 | fo.write(fi.read()) 108 | print("# --- run_test.pl (end) ---", file=fo) 109 | has_tests = True 110 | except IOError: 111 | fo.write("# no run_test.pl exists for this package\n") 112 | print('\nprint("===== %s OK =====\\n");' % m.dist(), file=fo) 113 | 114 | return has_tests 115 | -------------------------------------------------------------------------------- /conda_build/main_metapackage.py: -------------------------------------------------------------------------------- 1 | # (c) Continuum Analytics, Inc. / http://continuum.io 2 | # All Rights Reserved 3 | # 4 | # conda is distributed under the terms of the BSD 3-clause license. 5 | # Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause. 6 | 7 | from __future__ import absolute_import, division, print_function 8 | 9 | import argparse 10 | from collections import defaultdict 11 | 12 | import conda.config 13 | from conda.cli.conda_argparse import ArgumentParser 14 | 15 | from conda_build.main_build import args_func 16 | from conda_build.metadata import MetaData 17 | from conda_build.build import build, bldpkg_path 18 | from conda_build.main_build import handle_binstar_upload 19 | 20 | def main(): 21 | p = ArgumentParser( 22 | description=''' 23 | Tool for building conda metapackages. A metapackage is a package with no 24 | files, only metadata. They are typically used to collect several packages 25 | together into a single package via dependencies. 26 | 27 | NOTE: Metapackages can also be created by creating a recipe with the necessary 28 | metadata in the meta.yaml, but a metapackage can be created entirely from the 29 | command line with the conda metapackage command. 30 | ''', 31 | ) 32 | 33 | p.add_argument( 34 | "--no-anaconda-upload", 35 | action="store_false", 36 | help="Do not ask to upload the package to anaconda.org.", 37 | dest='binstar_upload', 38 | default=conda.config.binstar_upload, 39 | ) 40 | p.add_argument( 41 | "--no-binstar-upload", 42 | action="store_false", 43 | help=argparse.SUPPRESS, 44 | dest='binstar_upload', 45 | default=conda.config.binstar_upload, 46 | ) 47 | p.add_argument( 48 | "name", 49 | action="store", 50 | help="Name of the created package.", 51 | ) 52 | p.add_argument( 53 | "version", 54 | action="store", 55 | help="Version of the created package.", 56 | ) 57 | p.add_argument( 58 | "--build-number", 59 | action="store", 60 | type=int, 61 | default=0, 62 | help="Build number for the package (default is 0).", 63 | ) 64 | p.add_argument( 65 | "--build-string", 66 | action="store", 67 | default=None, 68 | help="Build string for the package (default is automatically generated).", 69 | ) 70 | p.add_argument( 71 | "--dependencies", "-d", 72 | nargs='*', 73 | default=(), 74 | help="""The dependencies of the package. To specify a version restriction for a 75 | dependency, wrap the dependency in quotes, like 'package >=2.0'.""", 76 | ) 77 | p.add_argument( 78 | "--home", 79 | action="store", 80 | help="The homepage for the metapackage." 81 | ) 82 | p.add_argument( 83 | "--license", 84 | action="store", 85 | help="The license of the metapackage.", 86 | ) 87 | p.add_argument( 88 | "--summary", 89 | action="store", 90 | help="""Summary of the package. Pass this in as a string on the command 91 | line, like --summary 'A metapackage for X'. It is recommended to use 92 | single quotes if you are not doing variable substitution to avoid 93 | interpretation of special characters.""", 94 | ) 95 | p.add_argument( 96 | "--entry-points", 97 | nargs='*', 98 | default=(), 99 | help="""Python entry points to create automatically. They should use the same 100 | syntax as in the meta.yaml of a recipe, e.g., --entry-points 101 | bsdiff4=bsdiff4.cli:main_bsdiff4 will create an entry point called 102 | bsdiff4 that calls bsdiff4.cli.main_bsdiff4(). """, 103 | ) 104 | p.set_defaults(func=execute) 105 | 106 | args = p.parse_args() 107 | args_func(args, p) 108 | 109 | def execute(args, parser): 110 | d = defaultdict(dict) 111 | d['package']['name'] = args.name 112 | d['package']['version'] = args.version 113 | d['build']['number'] = args.build_number 114 | d['build']['entry_points'] = args.entry_points 115 | # MetaData does the auto stuff if the build string is None 116 | d['build']['string'] = args.build_string 117 | d['requirements']['run'] = args.dependencies 118 | d['about']['home'] = args.home 119 | d['about']['license'] = args.license 120 | d['about']['summary'] = args.summary 121 | d = dict(d) 122 | m = MetaData.fromdict(d) 123 | 124 | build(m) 125 | handle_binstar_upload(bldpkg_path(m), args) 126 | 127 | if __name__ == '__main__': 128 | main() 129 | -------------------------------------------------------------------------------- /conda_build/noarch_python.py: -------------------------------------------------------------------------------- 1 | import os 2 | import io 3 | import sys 4 | import json 5 | import shutil 6 | import locale 7 | from os.path import basename, dirname, isdir, join 8 | 9 | from conda_build.config import config 10 | from conda_build.post import SHEBANG_PAT 11 | 12 | 13 | def _force_dir(dirname): 14 | if not isdir(dirname): 15 | os.makedirs(dirname) 16 | 17 | 18 | def _error_exit(exit_message): 19 | sys.exit("[noarch_python] %s" % exit_message) 20 | 21 | 22 | def rewrite_script(fn): 23 | """Take a file from the bin directory and rewrite it into the python-scripts 24 | directory after it passes some sanity checks for noarch pacakges""" 25 | 26 | # Load and check the source file for not being a binary 27 | src = join(config.build_prefix, 'bin', fn) 28 | with io.open(src, encoding=locale.getpreferredencoding()) as fi: 29 | try: 30 | data = fi.read() 31 | except UnicodeDecodeError: # file is binary 32 | _error_exit("Noarch package contains binary script: %s" % fn) 33 | os.unlink(src) 34 | 35 | # Check that it does have a #! python string 36 | m = SHEBANG_PAT.match(data) 37 | if not (m and 'python' in m.group()): 38 | _error_exit("No python shebang in: %s" % fn) 39 | 40 | # Rewrite the file to the python-scripts directory after skipping the #! line 41 | new_data = data[data.find('\n') + 1:] 42 | dst_dir = join(config.build_prefix, 'python-scripts') 43 | _force_dir(dst_dir) 44 | with open(join(dst_dir, fn), 'w') as fo: 45 | fo.write(new_data) 46 | 47 | 48 | def handle_file(f, d): 49 | """Process a file for inclusion in a noarch python package. 50 | """ 51 | path = join(config.build_prefix, f) 52 | 53 | # Ignore egg-info and pyc files. 54 | if f.endswith(('.egg-info', '.pyc')): 55 | os.unlink(path) 56 | 57 | # The presence of .so indicated this is not a noarch package 58 | elif f.endswith('.so'): 59 | _error_exit("Error: Shared object file found: %s" % f) 60 | 61 | elif 'site-packages' in f: 62 | nsp = join(config.build_prefix, 'site-packages') 63 | _force_dir(nsp) 64 | 65 | g = f[f.find('site-packages'):] 66 | dst = join(config.build_prefix, g) 67 | dst_dir = dirname(dst) 68 | _force_dir(dst_dir) 69 | os.rename(path, dst) 70 | d['site-packages'].append(g[14:]) 71 | 72 | # Treat scripts specially with the logic from above 73 | elif f.startswith('bin/'): 74 | fn = basename(path) 75 | rewrite_script(fn) 76 | d['python-scripts'].append(fn) 77 | 78 | # Include examples in the metadata doc 79 | elif f.startswith('Examples/'): 80 | d['Examples'].append(f[9:]) 81 | 82 | else: 83 | _error_exit("Error: Don't know how to handle file: %s" % f) 84 | 85 | 86 | def transform(m, files): 87 | assert 'py_' in m.dist() 88 | if sys.platform == 'win32': 89 | _error_exit("Error: Python noarch packages can currently " 90 | "not be created on Windows systems.") 91 | 92 | prefix = config.build_prefix 93 | name = m.name() 94 | 95 | # Create *nix prelink script 96 | with open(join(prefix, 'bin/.%s-pre-link.sh' % name), 'w') as fo: 97 | fo.write('''\ 98 | #!/bin/bash 99 | $PREFIX/bin/python $SOURCE_DIR/link.py 100 | ''') 101 | 102 | scripts_dir = join(prefix, 'Scripts') 103 | _force_dir(scripts_dir) 104 | 105 | # Create windows prelink script 106 | with open(join(scripts_dir, '.%s-pre-link.bat' % name), 'w') as fo: 107 | fo.write('''\ 108 | @echo off 109 | "%PREFIX%\\python.exe" "%SOURCE_DIR%\\link.py" 110 | ''') 111 | 112 | d = {'dist': m.dist(), 113 | 'site-packages': [], 114 | 'python-scripts': [], 115 | 'Examples': []} 116 | 117 | # Populate site-package, python-scripts, and Examples into above 118 | for f in files: 119 | handle_file(f, d) 120 | 121 | # Find our way to this directory 122 | this_dir = dirname(__file__) 123 | 124 | # copy in windows exe shims if there are any python-scripts 125 | if d['python-scripts']: 126 | for fn in 'cli-32.exe', 'cli-64.exe': 127 | shutil.copyfile(join(this_dir, fn), join(prefix, fn)) 128 | 129 | # Read the local _link.py 130 | with open(join(this_dir, '_link.py')) as fi: 131 | link_code = fi.read() 132 | 133 | # Write the package metadata, and bumper with code for linking 134 | with open(join(prefix, 'link.py'), 'w') as fo: 135 | fo.write('DATA = ') 136 | json.dump(d, fo, indent=2, sort_keys=True) 137 | fo.write('\n## END DATA\n\n') 138 | fo.write(link_code) 139 | -------------------------------------------------------------------------------- /conda_build/index.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Functions related to creating repodata index files. 3 | ''' 4 | 5 | from __future__ import absolute_import, division, print_function 6 | 7 | import os 8 | import bz2 9 | import sys 10 | import json 11 | import tarfile 12 | from os.path import isfile, join, getmtime 13 | 14 | from conda_build.utils import file_info 15 | from conda.compat import PY3 16 | from conda.utils import md5_file 17 | 18 | 19 | def read_index_tar(tar_path): 20 | """ Returns the index.json dict inside the given package tarball. """ 21 | try: 22 | with tarfile.open(tar_path) as t: 23 | try: 24 | return json.loads(t.extractfile('info/index.json').read().decode('utf-8')) 25 | except EOFError: 26 | raise RuntimeError("Could not extract %s. File probably corrupt." 27 | % tar_path) 28 | except OSError as e: 29 | raise RuntimeError("Could not extract %s (%s)" % (tar_path, e)) 30 | except tarfile.ReadError: 31 | raise RuntimeError("Could not extract metadata from %s. File probably corrupt." % tar_path) 32 | 33 | def write_repodata(repodata, dir_path): 34 | """ Write updated repodata.json and repodata.json.bz2 """ 35 | data = json.dumps(repodata, indent=2, sort_keys=True) 36 | # strip trailing whitespace 37 | data = '\n'.join(line.rstrip() for line in data.splitlines()) 38 | # make sure we have newline at the end 39 | if not data.endswith('\n'): 40 | data += '\n' 41 | with open(join(dir_path, 'repodata.json'), 'w') as fo: 42 | fo.write(data) 43 | with open(join(dir_path, 'repodata.json.bz2'), 'wb') as fo: 44 | fo.write(bz2.compress(data.encode('utf-8'))) 45 | 46 | def update_index(dir_path, verbose=False, force=False, check_md5=False, remove=True): 47 | """ 48 | Update all index files in dir_path with changed packages. 49 | 50 | :param verbose: Should detailed status messages be output? 51 | :type verbose: bool 52 | :param force: Whether to re-index all packages (including those that 53 | haven't changed) or not. 54 | :type force: bool 55 | :param check_md5: Whether to check MD5s instead of mtimes for determining 56 | if a package changed. 57 | :type check_md5: bool 58 | """ 59 | if verbose: 60 | print("updating index in:", dir_path) 61 | index_path = join(dir_path, '.index.json') 62 | if force: 63 | index = {} 64 | else: 65 | try: 66 | mode_dict = {'mode': 'r', 'encoding': 'utf-8'} if PY3 else {'mode': 'rb'} 67 | with open(index_path, **mode_dict) as fi: 68 | index = json.load(fi) 69 | except (IOError, ValueError): 70 | index = {} 71 | 72 | files = set(fn for fn in os.listdir(dir_path) if fn.endswith('.tar.bz2')) 73 | if any(fn.startswith('_license-') for fn in files): 74 | sys.exit("""\ 75 | Error: 76 | Indexing a copy of the Anaconda conda package channel is neither 77 | necessary nor supported. If you which to add your own packages, 78 | you can do so by adding them to a separate channel. 79 | """) 80 | for fn in files: 81 | path = join(dir_path, fn) 82 | if fn in index: 83 | if check_md5: 84 | if index[fn]['md5'] == md5_file(path): 85 | continue 86 | elif index[fn]['mtime'] == getmtime(path): 87 | continue 88 | if verbose: 89 | print('updating:', fn) 90 | d = read_index_tar(path) 91 | d.update(file_info(path)) 92 | index[fn] = d 93 | 94 | for fn in files: 95 | index[fn]['sig'] = '.' if isfile(join(dir_path, fn + '.sig')) else None 96 | 97 | if remove: 98 | # remove files from the index which are not on disk 99 | for fn in set(index) - files: 100 | if verbose: 101 | print("removing:", fn) 102 | del index[fn] 103 | 104 | # Deal with Python 2 and 3's different json module type reqs 105 | mode_dict = {'mode': 'w', 'encoding': 'utf-8'} if PY3 else {'mode': 'wb'} 106 | with open(index_path, **mode_dict) as fo: 107 | json.dump(index, fo, indent=2, sort_keys=True, default=str) 108 | 109 | # --- new repodata 110 | for fn in index: 111 | info = index[fn] 112 | for varname in 'arch', 'platform', 'mtime', 'ucs': 113 | try: 114 | del info[varname] 115 | except KeyError: 116 | pass 117 | 118 | if 'requires' in info and 'depends' not in info: 119 | info['depends'] = info['requires'] 120 | 121 | repodata = {'packages': index, 'info': {}} 122 | write_repodata(repodata, dir_path) 123 | -------------------------------------------------------------------------------- /tests/test-skeleton/sympy-0.7.5/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: sympy 3 | version: !!str 0.7.5 4 | 5 | source: 6 | fn: sympy-0.7.5.tar.gz 7 | url: https://pypi.python.org/packages/source/s/sympy/sympy-0.7.5.tar.gz 8 | md5: 7de1adb49972a15a3dd975e879a2bea9 9 | # patches: 10 | # List any patch files here 11 | # - fix.patch 12 | 13 | # build: 14 | #preserve_egg_dir: True 15 | #entry_points: 16 | # Put any entry points (scripts to be generated automatically) here. The 17 | # syntax is module:function. For example 18 | # 19 | # - sympy = sympy:main 20 | # 21 | # Would create an entry point called sympy that calls sympy.main() 22 | 23 | 24 | # If this is a new build for the same version, increment the build 25 | # number. If you do not include this key, it defaults to 0. 26 | # number: 1 27 | 28 | requirements: 29 | build: 30 | - python 31 | 32 | run: 33 | - python 34 | 35 | test: 36 | # Python imports 37 | imports: 38 | - sympy 39 | - sympy.assumptions 40 | - sympy.assumptions.handlers 41 | - sympy.assumptions.tests 42 | - sympy.calculus 43 | - sympy.calculus.tests 44 | - sympy.categories 45 | - sympy.categories.tests 46 | - sympy.combinatorics 47 | - sympy.combinatorics.tests 48 | - sympy.concrete 49 | - sympy.concrete.tests 50 | - sympy.core 51 | - sympy.core.tests 52 | - sympy.crypto 53 | - sympy.crypto.tests 54 | - sympy.diffgeom 55 | - sympy.diffgeom.tests 56 | - sympy.external 57 | - sympy.external.tests 58 | - sympy.functions 59 | - sympy.functions.combinatorial 60 | - sympy.functions.combinatorial.tests 61 | - sympy.functions.elementary 62 | - sympy.functions.elementary.tests 63 | - sympy.functions.special 64 | - sympy.functions.special.tests 65 | - sympy.galgebra 66 | - sympy.galgebra.tests 67 | - sympy.geometry 68 | - sympy.geometry.tests 69 | - sympy.integrals 70 | - sympy.integrals.tests 71 | - sympy.interactive 72 | - sympy.interactive.tests 73 | - sympy.liealgebras 74 | - sympy.liealgebras.tests 75 | - sympy.logic 76 | - sympy.logic.algorithms 77 | - sympy.logic.tests 78 | - sympy.logic.utilities 79 | - sympy.matrices 80 | - sympy.matrices.expressions 81 | - sympy.matrices.expressions.tests 82 | - sympy.matrices.tests 83 | - sympy.mpmath 84 | - sympy.mpmath.calculus 85 | - sympy.mpmath.functions 86 | - sympy.mpmath.libmp 87 | - sympy.mpmath.matrices 88 | - sympy.mpmath.tests 89 | - sympy.ntheory 90 | - sympy.ntheory.tests 91 | - sympy.parsing 92 | - sympy.parsing.tests 93 | - sympy.physics 94 | - sympy.physics.hep 95 | - sympy.physics.hep.tests 96 | - sympy.physics.mechanics 97 | - sympy.physics.mechanics.tests 98 | - sympy.physics.quantum 99 | - sympy.physics.quantum.tests 100 | - sympy.physics.tests 101 | - sympy.physics.vector 102 | - sympy.physics.vector.tests 103 | - sympy.plotting 104 | - sympy.plotting.intervalmath 105 | - sympy.plotting.intervalmath.tests 106 | - sympy.plotting.pygletplot 107 | - sympy.plotting.pygletplot.tests 108 | - sympy.plotting.tests 109 | - sympy.polys 110 | - sympy.polys.agca 111 | - sympy.polys.agca.tests 112 | - sympy.polys.domains 113 | - sympy.polys.domains.tests 114 | - sympy.polys.tests 115 | - sympy.printing 116 | - sympy.printing.pretty 117 | - sympy.printing.pretty.tests 118 | - sympy.printing.tests 119 | - sympy.series 120 | - sympy.series.tests 121 | - sympy.sets 122 | - sympy.sets.tests 123 | - sympy.simplify 124 | - sympy.simplify.tests 125 | - sympy.solvers 126 | - sympy.solvers.tests 127 | - sympy.statistics 128 | - sympy.statistics.tests 129 | - sympy.stats 130 | - sympy.stats.tests 131 | - sympy.strategies 132 | - sympy.strategies.branch 133 | - sympy.strategies.branch.tests 134 | - sympy.strategies.tests 135 | - sympy.tensor 136 | - sympy.tensor.tests 137 | - sympy.unify 138 | - sympy.unify.tests 139 | - sympy.utilities 140 | - sympy.utilities.mathml 141 | - sympy.utilities.tests 142 | 143 | #commands: 144 | # You can put test commands to be run here. Use this to test that the 145 | # entry points work. 146 | 147 | 148 | # You can also put a file called run_test.py in the recipe that will be run 149 | # at test time. 150 | 151 | # requires: 152 | # Put any additional test requirements here. For example 153 | # - nose 154 | 155 | about: 156 | home: http://code.google.com/p/sympy 157 | license: BSD License 158 | summary: 'Computer algebra system (CAS) in Python' 159 | 160 | # See 161 | # http://docs.continuum.io/conda/build.html for 162 | # more information about meta.yaml 163 | -------------------------------------------------------------------------------- /tests/test-skeleton/sympy-0.7.5-url/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: sympy 3 | version: !!str 0.7.5 4 | 5 | source: 6 | fn: sympy-0.7.5.tar.gz 7 | url: https://pypi.python.org/packages/source/s/sympy/sympy-0.7.5.tar.gz#md5=7de1adb49972a15a3dd975e879a2bea9 8 | md5: 7de1adb49972a15a3dd975e879a2bea9 9 | # patches: 10 | # List any patch files here 11 | # - fix.patch 12 | 13 | # build: 14 | #preserve_egg_dir: True 15 | #entry_points: 16 | # Put any entry points (scripts to be generated automatically) here. The 17 | # syntax is module:function. For example 18 | # 19 | # - sympy = sympy:main 20 | # 21 | # Would create an entry point called sympy that calls sympy.main() 22 | 23 | 24 | # If this is a new build for the same version, increment the build 25 | # number. If you do not include this key, it defaults to 0. 26 | # number: 1 27 | 28 | requirements: 29 | build: 30 | - python 31 | 32 | run: 33 | - python 34 | 35 | test: 36 | # Python imports 37 | imports: 38 | - sympy 39 | - sympy.assumptions 40 | - sympy.assumptions.handlers 41 | - sympy.assumptions.tests 42 | - sympy.calculus 43 | - sympy.calculus.tests 44 | - sympy.categories 45 | - sympy.categories.tests 46 | - sympy.combinatorics 47 | - sympy.combinatorics.tests 48 | - sympy.concrete 49 | - sympy.concrete.tests 50 | - sympy.core 51 | - sympy.core.tests 52 | - sympy.crypto 53 | - sympy.crypto.tests 54 | - sympy.diffgeom 55 | - sympy.diffgeom.tests 56 | - sympy.external 57 | - sympy.external.tests 58 | - sympy.functions 59 | - sympy.functions.combinatorial 60 | - sympy.functions.combinatorial.tests 61 | - sympy.functions.elementary 62 | - sympy.functions.elementary.tests 63 | - sympy.functions.special 64 | - sympy.functions.special.tests 65 | - sympy.galgebra 66 | - sympy.galgebra.tests 67 | - sympy.geometry 68 | - sympy.geometry.tests 69 | - sympy.integrals 70 | - sympy.integrals.tests 71 | - sympy.interactive 72 | - sympy.interactive.tests 73 | - sympy.liealgebras 74 | - sympy.liealgebras.tests 75 | - sympy.logic 76 | - sympy.logic.algorithms 77 | - sympy.logic.tests 78 | - sympy.logic.utilities 79 | - sympy.matrices 80 | - sympy.matrices.expressions 81 | - sympy.matrices.expressions.tests 82 | - sympy.matrices.tests 83 | - sympy.mpmath 84 | - sympy.mpmath.calculus 85 | - sympy.mpmath.functions 86 | - sympy.mpmath.libmp 87 | - sympy.mpmath.matrices 88 | - sympy.mpmath.tests 89 | - sympy.ntheory 90 | - sympy.ntheory.tests 91 | - sympy.parsing 92 | - sympy.parsing.tests 93 | - sympy.physics 94 | - sympy.physics.hep 95 | - sympy.physics.hep.tests 96 | - sympy.physics.mechanics 97 | - sympy.physics.mechanics.tests 98 | - sympy.physics.quantum 99 | - sympy.physics.quantum.tests 100 | - sympy.physics.tests 101 | - sympy.physics.vector 102 | - sympy.physics.vector.tests 103 | - sympy.plotting 104 | - sympy.plotting.intervalmath 105 | - sympy.plotting.intervalmath.tests 106 | - sympy.plotting.pygletplot 107 | - sympy.plotting.pygletplot.tests 108 | - sympy.plotting.tests 109 | - sympy.polys 110 | - sympy.polys.agca 111 | - sympy.polys.agca.tests 112 | - sympy.polys.domains 113 | - sympy.polys.domains.tests 114 | - sympy.polys.tests 115 | - sympy.printing 116 | - sympy.printing.pretty 117 | - sympy.printing.pretty.tests 118 | - sympy.printing.tests 119 | - sympy.series 120 | - sympy.series.tests 121 | - sympy.sets 122 | - sympy.sets.tests 123 | - sympy.simplify 124 | - sympy.simplify.tests 125 | - sympy.solvers 126 | - sympy.solvers.tests 127 | - sympy.statistics 128 | - sympy.statistics.tests 129 | - sympy.stats 130 | - sympy.stats.tests 131 | - sympy.strategies 132 | - sympy.strategies.branch 133 | - sympy.strategies.branch.tests 134 | - sympy.strategies.tests 135 | - sympy.tensor 136 | - sympy.tensor.tests 137 | - sympy.unify 138 | - sympy.unify.tests 139 | - sympy.utilities 140 | - sympy.utilities.mathml 141 | - sympy.utilities.tests 142 | 143 | #commands: 144 | # You can put test commands to be run here. Use this to test that the 145 | # entry points work. 146 | 147 | 148 | # You can also put a file called run_test.py in the recipe that will be run 149 | # at test time. 150 | 151 | # requires: 152 | # Put any additional test requirements here. For example 153 | # - nose 154 | 155 | about: 156 | home: http://code.google.com/p/sympy 157 | license: BSD License 158 | summary: 'Computer algebra system (CAS) in Python' 159 | 160 | # See 161 | # http://docs.continuum.io/conda/build.html for 162 | # more information about meta.yaml 163 | -------------------------------------------------------------------------------- /tests/install_miniconda.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | import hashlib 4 | import subprocess 5 | 6 | import requests 7 | 8 | tempdir = os.path.expanduser("~") 9 | 10 | 11 | def rm_rf(path, max_retries=5): 12 | """ 13 | Completely delete path 14 | 15 | max_retries is the number of times to retry on failure. The default is 16 | 5. This only applies to deleting a directory. 17 | 18 | """ 19 | import sys 20 | import logging 21 | import shutil 22 | import stat 23 | import time 24 | from os.path import islink, isfile, isdir 25 | 26 | on_win = bool(sys.platform == 'win32') 27 | log = logging.getLogger(__name__) 28 | 29 | if islink(path) or isfile(path): 30 | # Note that we have to check if the destination is a link because 31 | # exists('/path/to/dead-link') will return False, although 32 | # islink('/path/to/dead-link') is True. 33 | os.unlink(path) 34 | 35 | elif isdir(path): 36 | for i in range(max_retries): 37 | try: 38 | shutil.rmtree(path) 39 | return 40 | except OSError as e: 41 | msg = "Unable to delete %s\n%s\n" % (path, e) 42 | if on_win: 43 | try: 44 | def remove_readonly(func, path, excinfo): 45 | os.chmod(path, stat.S_IWRITE) 46 | func(path) 47 | shutil.rmtree(path, onerror=remove_readonly) 48 | return 49 | except OSError as e1: 50 | msg += "Retry with onerror failed (%s)\n" % e1 51 | 52 | try: 53 | subprocess.check_call(['cmd', '/c', 'rd', '/s', '/q', path]) 54 | return 55 | except subprocess.CalledProcessError as e2: 56 | msg += '%s\n' % e2 57 | log.debug(msg + "Retrying after %s seconds..." % i) 58 | time.sleep(i) 59 | # Final time. pass exceptions to caller. 60 | shutil.rmtree(path) 61 | 62 | def download_file(url, md5): 63 | urlparts = requests.packages.urllib3.util.url.parse_url(url) 64 | local_filename = urlparts.path.split('/')[-1] 65 | 66 | r = requests.get(url, stream=True) 67 | r.raise_for_status() 68 | 69 | dir_path = os.path.join(tempdir, 'download_cache') 70 | file_path = os.path.join(dir_path, local_filename) 71 | 72 | print("Downloading %s to %s" % (local_filename, file_path)) 73 | if not os.path.exists(dir_path): 74 | os.makedirs(dir_path) 75 | 76 | if os.path.exists(file_path): 77 | if hashsum_file(file_path) == md5: 78 | print("File %s already exists at %s" % (local_filename, 79 | file_path)) 80 | return file_path 81 | else: 82 | print("MD5 mismatch. Downloading again.") 83 | 84 | size = int(r.headers.get('Content-Length')) 85 | with open(file_path, 'wb') as f: 86 | for i, chunk in enumerate(r.iter_content(chunk_size=2**20)): 87 | if chunk: # filter out keep-alive new chunks 88 | print("writing %s/%s MB" % (r.raw.tell()/2**20, size/2**20)) 89 | f.write(chunk) 90 | f.flush() 91 | return file_path 92 | 93 | 94 | def hashsum_file(path, mode='md5'): 95 | with open(path, 'rb') as fi: 96 | h = hashlib.new(mode) 97 | while True: 98 | chunk = fi.read(262144) 99 | if not chunk: 100 | break 101 | h.update(chunk) 102 | return h.hexdigest() 103 | 104 | def install_miniconda(path): 105 | prefix = os.path.join(tempdir, 'conda-build-miniconda') 106 | print("Installing Miniconda %s to %s" % (path, prefix)) 107 | 108 | rm_rf(prefix) 109 | os.makedirs(prefix) 110 | conda = os.path.join(prefix, 'Scripts', 'conda.exe') 111 | 112 | for cmd in [ 113 | [path, '/S', '/D=%s' % prefix], 114 | [conda, 'info', '-a'], 115 | [conda, 'config', '--get'], 116 | [conda, 'config', '--set', 'always_yes', 'yes'], 117 | [conda, 'install', 'pytest', 'requests', 118 | 'conda-build', '--quiet'], 119 | [conda, 'list'], 120 | ]: 121 | print(' '.join(cmd)) 122 | subprocess.check_call(cmd) 123 | 124 | 125 | def main(): 126 | arch = os.environ['BINSTAR_PLATFORM'] 127 | pyver = str(sys.version_info[0]) 128 | for url, md5, plat in [ 129 | ('http://repo.continuum.io/miniconda/Miniconda-3.5.5-Windows-x86_64.exe', 130 | 'b6285db92cc042a44b2afaaf1a99b8cc', 'win-64-2'), 131 | ('http://repo.continuum.io/miniconda/Miniconda-3.5.5-Windows-x86.exe', 132 | '67a6efb324491928f9aaa447ab5491ac', 'win-32-2'), 133 | ('http://repo.continuum.io/miniconda/Miniconda3-3.5.5-Windows-x86_64.exe', 134 | '6c6643ae90028d89e3ef72889bf8bb36', 'win-64-3'), 135 | ('http://repo.continuum.io/miniconda/Miniconda3-3.5.5-Windows-x86.exe', 136 | '2aae7daffbbd4a3f2b775c85a1500a47', 'win-32-3'), 137 | ]: 138 | if plat == '%s-%s' % (arch, pyver): 139 | f = download_file(url, md5) 140 | install_miniconda(f) 141 | 142 | if __name__ == '__main__': 143 | main() 144 | -------------------------------------------------------------------------------- /conda_build/windows.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, print_function 2 | 3 | import os 4 | import sys 5 | import shutil 6 | from os.path import dirname, isdir, isfile, join, exists 7 | 8 | import conda.config as cc 9 | 10 | from conda_build.config import config 11 | from conda_build import environ 12 | from conda_build import source 13 | from conda_build.utils import _check_call 14 | 15 | assert sys.platform == 'win32' 16 | 17 | 18 | def fix_staged_scripts(): 19 | """ 20 | Fixes scripts which have been installed unix-style to have a .bat 21 | helper 22 | """ 23 | scripts_dir = join(config.build_prefix, 'Scripts') 24 | if not isdir(scripts_dir): 25 | return 26 | for fn in os.listdir(scripts_dir): 27 | # process all the extensionless files 28 | if not isfile(join(scripts_dir, fn)) or '.' in fn: 29 | continue 30 | 31 | with open(join(scripts_dir, fn)) as f: 32 | line = f.readline().lower() 33 | # If it's a #!python script 34 | if not (line.startswith('#!') and 'python' in line.lower()): 35 | continue 36 | print('Adjusting unix-style #! script %s, ' 37 | 'and adding a .bat file for it' % fn) 38 | # copy it with a .py extension (skipping that first #! line) 39 | with open(join(scripts_dir, fn + '-script.py'), 'w') as fo: 40 | fo.write(f.read()) 41 | # now create the .exe file 42 | shutil.copyfile(join(dirname(__file__), 'cli-%d.exe' % cc.bits), 43 | join(scripts_dir, fn + '.exe')) 44 | 45 | # remove the original script 46 | os.remove(join(scripts_dir, fn)) 47 | 48 | 49 | def msvc_env_cmd(override=None): 50 | if 'ProgramFiles(x86)' in os.environ: 51 | program_files = os.environ['ProgramFiles(x86)'] 52 | else: 53 | program_files = os.environ['ProgramFiles'] 54 | 55 | msvc_env_lines = [] 56 | 57 | if config.PY3K and config.use_MSVC2015: 58 | version = '14.0' 59 | elif config.PY3K: 60 | version = '10.0' 61 | else: 62 | version = '9.0' 63 | 64 | if override is not None: 65 | version = override 66 | msvc_env_lines.append('set DISTUTILS_USE_SDK=1') 67 | msvc_env_lines.append('set MSSdk=1') 68 | 69 | vcvarsall = os.path.join(program_files, 70 | r'Microsoft Visual Studio {version}'.format(version=version), 71 | 'VC', 'vcvarsall.bat') 72 | 73 | # Try the Microsoft Visual C++ Compiler for Python 2.7 74 | localappdata = os.environ.get("localappdata") 75 | not_vcvars = not isfile(vcvarsall) 76 | if not_vcvars and localappdata and not config.PY3K: 77 | vcvarsall = os.path.join(localappdata, "Programs", "Common", 78 | "Microsoft", "Visual C++ for Python", "9.0", "vcvarsall.bat") 79 | if not_vcvars and program_files and not config.PY3K: 80 | vcvarsall = os.path.join(program_files, 'Common Files', 81 | 'Microsoft', 'Visual C++ for Python', "9.0", "vcvarsall.bat") 82 | if not_vcvars: 83 | print("Warning: Couldn't find Visual Studio: %r" % vcvarsall) 84 | return '' 85 | 86 | msvc_env_lines.append('call "%s" %s' % (vcvarsall, 'x86' if cc.bits == 32 else 'amd64')) 87 | return '\n'.join(msvc_env_lines) 88 | 89 | 90 | def kill_processes(process_names=["msbuild.exe"]): 91 | # for things that uniform across both APIs 92 | import psutil 93 | # list of pids changed APIs from v1 to v2. 94 | try: 95 | # V1 API 96 | from psutil import get_pid_list 97 | except: 98 | try: 99 | # V2 API 100 | from psutil import pids as get_pid_list 101 | except: 102 | raise ImportError("psutil failed to import.") 103 | for n in get_pid_list(): 104 | try: 105 | p = psutil.Process(n) 106 | if p.name.lower() in (process_name.lower() for process_name in process_names): 107 | print('Terminating:', p.name) 108 | p.terminate() 109 | except: 110 | continue 111 | 112 | 113 | def build(m): 114 | env = dict(os.environ) 115 | env.update(environ.get_dict(m)) 116 | env = environ.prepend_bin_path(env, config.build_prefix, True) 117 | 118 | for name in 'BIN', 'INC', 'LIB': 119 | path = env['LIBRARY_' + name] 120 | if not isdir(path): 121 | os.makedirs(path) 122 | 123 | src_dir = source.get_dir() 124 | bld_bat = join(m.path, 'bld.bat') 125 | if exists(bld_bat): 126 | with open(bld_bat) as fi: 127 | data = fi.read() 128 | with open(join(src_dir, 'bld.bat'), 'w') as fo: 129 | fo.write(msvc_env_cmd(override=m.get_value('build/msvc_compiler', None))) 130 | fo.write('\n') 131 | # more debuggable with echo on 132 | fo.write('@echo on\n') 133 | fo.write("set INCLUDE={};%INCLUDE%\n".format(env["LIBRARY_INC"])) 134 | fo.write("set LIB={};%LIB%\n".format(env["LIBRARY_LIB"])) 135 | fo.write("REM ===== end generated header =====\n") 136 | fo.write(data) 137 | 138 | cmd = [os.environ['COMSPEC'], '/c', 'call', 'bld.bat'] 139 | _check_call(cmd, cwd=src_dir, env={str(k): str(v) for k, v in env.items()}) 140 | kill_processes() 141 | fix_staged_scripts() 142 | -------------------------------------------------------------------------------- /conda_build/environ.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, print_function 2 | 3 | import os 4 | import sys 5 | from os.path import join 6 | import multiprocessing 7 | 8 | import conda.config as cc 9 | 10 | from conda_build.config import config 11 | 12 | from conda_build import source 13 | from conda_build.scripts import prepend_bin_path 14 | from conda_build import utils 15 | 16 | 17 | def get_perl_ver(): 18 | return str(config.CONDA_PERL) 19 | 20 | def get_py_ver(): 21 | return '.'.join(str(config.CONDA_PY)) 22 | 23 | def get_npy_ver(): 24 | if config.CONDA_NPY: 25 | # Convert int -> string, e.g. 26 | # 17 -> '1.7' 27 | # 110 -> '1.10' 28 | conda_npy = str(config.CONDA_NPY) 29 | return conda_npy[0] + '.' + conda_npy[1:] 30 | return '' 31 | 32 | def get_stdlib_dir(): 33 | return join(config.build_prefix, 'Lib' if sys.platform == 'win32' else 34 | 'lib/python%s' % get_py_ver()) 35 | 36 | def get_sp_dir(): 37 | return join(get_stdlib_dir(), 'site-packages') 38 | 39 | def get_git_build_info(src_dir): 40 | env = os.environ.copy() 41 | d = {} 42 | git_dir = join(src_dir, '.git') 43 | if os.path.exists(git_dir): 44 | env['GIT_DIR'] = git_dir 45 | else: 46 | return d 47 | 48 | # grab information from describe 49 | key_name = lambda a: "GIT_DESCRIBE_{}".format(a) 50 | keys = [key_name("TAG"), key_name("NUMBER"), key_name("HASH")] 51 | env = {str(key): str(value) for key, value in env.items()} 52 | output, _ = utils.execute(["git", "describe", "--tags", "--long", "HEAD"], 53 | env=env) 54 | parts = output.strip().rsplit('-', 2) 55 | parts_length = len(parts) 56 | if parts_length == 3: 57 | d.update(dict(zip(keys, parts))) 58 | # get the _full_ hash of the current HEAD 59 | output, _ = utils.execute(["git", "rev-parse", "HEAD"], 60 | env=env) 61 | 62 | d['GIT_FULL_HASH'] = output.strip() 63 | # set up the build string 64 | if key_name('NUMBER') in d and key_name('HASH') in d: 65 | d['GIT_BUILD_STR'] = '{}_{}'.format(d[key_name('NUMBER')], 66 | d[key_name('HASH')]) 67 | 68 | return d 69 | 70 | def get_dict(m=None, prefix=None): 71 | if not prefix: 72 | prefix = config.build_prefix 73 | 74 | python = config.build_python 75 | d = {'CONDA_BUILD': '1', 'PYTHONNOUSERSITE': '1'} 76 | d['CONDA_DEFAULT_ENV'] = config.build_prefix 77 | d['ARCH'] = str(cc.bits) 78 | d['PREFIX'] = prefix 79 | d['PYTHON'] = python 80 | d['PY3K'] = str(config.PY3K) 81 | d['STDLIB_DIR'] = get_stdlib_dir() 82 | d['SP_DIR'] = get_sp_dir() 83 | d['SYS_PREFIX'] = sys.prefix 84 | d['SYS_PYTHON'] = sys.executable 85 | d['PERL_VER'] = get_perl_ver() 86 | d['PY_VER'] = get_py_ver() 87 | if get_npy_ver(): 88 | d['NPY_VER'] = get_npy_ver() 89 | d['SRC_DIR'] = source.get_dir() 90 | if "LANG" in os.environ: 91 | d['LANG'] = os.environ['LANG'] 92 | if "HTTPS_PROXY" in os.environ: 93 | d['HTTPS_PROXY'] = os.environ['HTTPS_PROXY'] 94 | if "HTTP_PROXY" in os.environ: 95 | d['HTTP_PROXY'] = os.environ['HTTP_PROXY'] 96 | 97 | if m: 98 | for var_name in m.get_value('build/script_env', []): 99 | value = os.getenv(var_name) 100 | if value is None: 101 | value = '' 102 | d[var_name] = value 103 | 104 | try: 105 | d['CPU_COUNT'] = str(multiprocessing.cpu_count()) 106 | except NotImplementedError: 107 | d['CPU_COUNT'] = "1" 108 | 109 | d.update(**get_git_build_info(d['SRC_DIR'])) 110 | d['PATH'] = dict(os.environ)['PATH'] 111 | d = prepend_bin_path(d, prefix) 112 | 113 | if sys.platform == 'win32': # -------- Windows 114 | d['SCRIPTS'] = join(prefix, 'Scripts') 115 | d['LIBRARY_PREFIX'] = join(prefix, 'Library') 116 | d['LIBRARY_BIN'] = join(d['LIBRARY_PREFIX'], 'bin') 117 | d['LIBRARY_INC'] = join(d['LIBRARY_PREFIX'], 'include') 118 | d['LIBRARY_LIB'] = join(d['LIBRARY_PREFIX'], 'lib') 119 | # This probably should be done more generally 120 | d['CYGWIN_PREFIX'] = prefix.replace('\\', '/').replace('C:', '/cygdrive/c') 121 | 122 | d['R'] = join(prefix, 'Scripts', 'R.exe') 123 | else: # -------- Unix 124 | d['HOME'] = os.getenv('HOME', 'UNKNOWN') 125 | d['PKG_CONFIG_PATH'] = join(prefix, 'lib', 'pkgconfig') 126 | d['R'] = join(prefix, 'bin', 'R') 127 | 128 | if sys.platform == 'darwin': # -------- OSX 129 | d['OSX_ARCH'] = 'i386' if cc.bits == 32 else 'x86_64' 130 | d['CFLAGS'] = '-arch %(OSX_ARCH)s' % d 131 | d['CXXFLAGS'] = d['CFLAGS'] 132 | d['LDFLAGS'] = d['CFLAGS'] 133 | d['MACOSX_DEPLOYMENT_TARGET'] = '10.6' 134 | 135 | elif sys.platform.startswith('linux'): # -------- Linux 136 | d['LD_RUN_PATH'] = prefix + '/lib' 137 | 138 | if m: 139 | d['PKG_NAME'] = m.name() 140 | d['PKG_VERSION'] = m.version() 141 | d['PKG_BUILDNUM'] = str(m.build_number()) 142 | d['RECIPE_DIR'] = m.path 143 | 144 | return d 145 | 146 | 147 | if __name__ == '__main__': 148 | e = get_dict() 149 | for k in sorted(e): 150 | assert isinstance(e[k], str), k 151 | print('%s=%s' % (k, e[k])) 152 | -------------------------------------------------------------------------------- /conda_build/utils.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, print_function 2 | 3 | import os 4 | import sys 5 | import shutil 6 | import tarfile 7 | import zipfile 8 | import subprocess 9 | import operator 10 | import time 11 | 12 | from os.path import dirname, getmtime, getsize, isdir, join 13 | from collections import defaultdict 14 | 15 | from conda.utils import md5_file 16 | from conda.compat import PY3, iteritems 17 | 18 | from conda_build import external 19 | 20 | # Backwards compatibility import. Do not remove. 21 | from conda.install import rm_rf 22 | rm_rf 23 | 24 | ATTEMPTS = 3 25 | RETRY_INTERVAL = 0.1 26 | 27 | 28 | def copy_into(src, dst): 29 | "Copy all the files and directories in src to the directory dst" 30 | 31 | if not isdir(src): 32 | tocopy = [src] 33 | else: 34 | tocopy = os.listdir(src) 35 | for afile in tocopy: 36 | srcname = os.path.join(src, afile) 37 | dstname = os.path.join(dst, afile) 38 | 39 | if os.path.isdir(srcname): 40 | shutil.copytree(srcname, dstname) 41 | else: 42 | shutil.copy2(srcname, dstname) 43 | 44 | 45 | def relative(f, d='lib'): 46 | assert not f.startswith('/'), f 47 | assert not d.startswith('/'), d 48 | d = d.strip('/').split('/') 49 | if d == ['.']: 50 | d = [] 51 | f = dirname(f).split('/') 52 | if f == ['']: 53 | f = [] 54 | while d and f and d[0] == f[0]: 55 | d.pop(0) 56 | f.pop(0) 57 | return '/'.join(((['..'] * len(f)) if f else ['.']) + d) 58 | 59 | 60 | def _check_call(args, **kwargs): 61 | try: 62 | subprocess.check_call(args, **kwargs) 63 | except subprocess.CalledProcessError: 64 | sys.exit('Command failed: %s' % ' '.join(args)) 65 | 66 | 67 | def tar_xf(tarball, dir_path, mode='r:*'): 68 | if tarball.lower().endswith('.tar.z'): 69 | uncompress = external.find_executable('uncompress') 70 | if not uncompress: 71 | sys.exit("""\ 72 | uncompress is required to unarchive .z source files. 73 | """) 74 | subprocess.check_call([uncompress, '-f', tarball]) 75 | tarball = tarball[:-2] 76 | if not PY3 and tarball.endswith('.tar.xz'): 77 | unxz = external.find_executable('unxz') 78 | if not unxz: 79 | sys.exit("""\ 80 | unxz is required to unarchive .xz source files. 81 | """) 82 | 83 | subprocess.check_call([unxz, '-f', '-k', tarball]) 84 | tarball = tarball[:-3] 85 | t = tarfile.open(tarball, mode) 86 | t.extractall(path=dir_path) 87 | t.close() 88 | 89 | 90 | def unzip(zip_path, dir_path): 91 | z = zipfile.ZipFile(zip_path) 92 | for name in z.namelist(): 93 | if name.endswith('/'): 94 | continue 95 | path = join(dir_path, *name.split('/')) 96 | dp = dirname(path) 97 | if not isdir(dp): 98 | os.makedirs(dp) 99 | with open(path, 'wb') as fo: 100 | fo.write(z.read(name)) 101 | z.close() 102 | 103 | 104 | def file_info(path): 105 | return {'size': getsize(path), 106 | 'md5': md5_file(path), 107 | 'mtime': getmtime(path)} 108 | 109 | # Taken from toolz 110 | 111 | def groupby(key, seq): 112 | """ Group a collection by a key function 113 | >>> names = ['Alice', 'Bob', 'Charlie', 'Dan', 'Edith', 'Frank'] 114 | >>> groupby(len, names) # doctest: +SKIP 115 | {3: ['Bob', 'Dan'], 5: ['Alice', 'Edith', 'Frank'], 7: ['Charlie']} 116 | >>> iseven = lambda x: x % 2 == 0 117 | >>> groupby(iseven, [1, 2, 3, 4, 5, 6, 7, 8]) # doctest: +SKIP 118 | {False: [1, 3, 5, 7], True: [2, 4, 6, 8]} 119 | Non-callable keys imply grouping on a member. 120 | >>> groupby('gender', [{'name': 'Alice', 'gender': 'F'}, 121 | ... {'name': 'Bob', 'gender': 'M'}, 122 | ... {'name': 'Charlie', 'gender': 'M'}]) # doctest:+SKIP 123 | {'F': [{'gender': 'F', 'name': 'Alice'}], 124 | 'M': [{'gender': 'M', 'name': 'Bob'}, 125 | {'gender': 'M', 'name': 'Charlie'}]} 126 | See Also: 127 | countby 128 | """ 129 | if not callable(key): 130 | key = getter(key) 131 | d = defaultdict(lambda: [].append) 132 | for item in seq: 133 | d[key(item)](item) 134 | rv = {} 135 | for k, v in iteritems(d): 136 | rv[k] = v.__self__ 137 | return rv 138 | 139 | def getter(index): 140 | if isinstance(index, list): 141 | if len(index) == 1: 142 | index = index[0] 143 | return lambda x: (x[index],) 144 | elif index: 145 | return operator.itemgetter(*index) 146 | else: 147 | return lambda x: () 148 | else: 149 | return operator.itemgetter(index) 150 | 151 | def comma_join(items): 152 | """ 153 | Like ', '.join(items) but with and 154 | 155 | Examples: 156 | 157 | >>> comma_join(['a']) 158 | 'a' 159 | >>> comma_join(['a', 'b']) 160 | 'a and b' 161 | >>> comma_join(['a', 'b', 'c]) 162 | 'a, b, and c' 163 | """ 164 | return ' and '.join(items) if len(items) <= 2 else ', '.join(items[:-1]) + ', and ' + items[-1] 165 | 166 | 167 | def execute(command, **kwargs): 168 | """Helper method to shell out and execute a command through subprocess. 169 | 170 | :param attempts: How many times to retry running the command. 171 | :param binary: On Python 3, return stdout and stderr as bytes if 172 | binary is True, as Unicode otherwise. 173 | :param check_exit_code: Single bool, int, or list of allowed exit 174 | codes. Defaults to [0]. Raise 175 | :class:`CalledProcessError` unless 176 | program exits with one of these code. 177 | :param command: The command passed to the subprocess.Popen. 178 | :param cwd: Set the current working directory 179 | :param env_variables: Environment variables and their values that 180 | will be set for the process. 181 | :param retry_interval: Interval between execute attempts, in seconds 182 | :param shell: whether or not there should be a shell used to 183 | execute this command. 184 | 185 | :raises: :class:`subprocess.CalledProcessError` 186 | """ 187 | # pylint: disable=too-many-locals 188 | 189 | attempts = kwargs.pop("attempts", ATTEMPTS) 190 | binary = kwargs.pop('binary', False) 191 | check_exit_code = kwargs.pop('check_exit_code', False) 192 | cwd = kwargs.pop('cwd', None) 193 | env_variables = kwargs.pop("env_variables", None) 194 | retry_interval = kwargs.pop("retry_interval", RETRY_INTERVAL) 195 | shell = kwargs.pop("shell", False) 196 | 197 | command = [str(argument) for argument in command] 198 | ignore_exit_code = False 199 | 200 | if isinstance(check_exit_code, bool): 201 | ignore_exit_code = not check_exit_code 202 | check_exit_code = [0] 203 | elif isinstance(check_exit_code, int): 204 | check_exit_code = [check_exit_code] 205 | 206 | while attempts > 0: 207 | attempts = attempts - 1 208 | try: 209 | process = subprocess.Popen(command, 210 | stdin=subprocess.PIPE, 211 | stdout=subprocess.PIPE, 212 | stderr=subprocess.PIPE, shell=shell, 213 | cwd=cwd, env=env_variables) 214 | result = process.communicate() 215 | return_code = process.returncode 216 | 217 | if PY3 and not binary and result is not None: 218 | # pylint: disable=no-member 219 | 220 | # Decode from the locale using using the surrogate escape error 221 | # handler (decoding cannot fail) 222 | (stdout, stderr) = result 223 | stdout = os.fsdecode(stdout) 224 | stderr = os.fsdecode(stderr) 225 | else: 226 | stdout, stderr = result 227 | 228 | if not ignore_exit_code and return_code not in check_exit_code: 229 | raise subprocess.CalledProcessError(returncode=return_code, 230 | cmd=command, 231 | output=(stdout, stderr)) 232 | else: 233 | return (stdout, stderr) 234 | except subprocess.CalledProcessError: 235 | if attempts: 236 | time.sleep(retry_interval) 237 | else: 238 | raise 239 | -------------------------------------------------------------------------------- /conda_build/main_convert.py: -------------------------------------------------------------------------------- 1 | # (c) Continuum Analytics, Inc. / http://continuum.io 2 | # All Rights Reserved 3 | # 4 | # conda is distributed under the terms of the BSD 3-clause license. 5 | # Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause. 6 | 7 | from __future__ import absolute_import, division, print_function 8 | 9 | import json 10 | import pprint 11 | import re 12 | import sys 13 | import os 14 | import tarfile 15 | from locale import getpreferredencoding 16 | from os.path import abspath, expanduser, isdir, join, split 17 | 18 | from conda.compat import PY3 19 | from conda.cli.conda_argparse import ArgumentParser 20 | from conda_build.main_build import args_func 21 | 22 | from conda_build.convert import (has_cext, tar_update, get_pure_py_file_map, 23 | has_nonpy_entry_points) 24 | 25 | 26 | epilog = """ 27 | 28 | Tool to convert packages 29 | 30 | conda convert converts pure Python packages to other platforms, and converts 31 | Gohlke's .exe packages into conda packages. 32 | 33 | Packages are automatically organized in subdirectories according to platform, 34 | e.g., 35 | 36 | osx-64/ 37 | package-1.0-py33.tar.bz2 38 | win-32/ 39 | package-1.0-py33.tar.bz2 40 | 41 | Examples: 42 | 43 | Convert a package built with conda build to Windows 64-bit, and place the 44 | resulting package in the current directory (supposing a default Anaconda 45 | install on Mac OS X): 46 | 47 | conda convert package-1.0-py33.tar.bz2 -p win-64 48 | 49 | Convert a Gohlke .exe to a conda package, and add make it depend on numpy 1.8 50 | or higher: 51 | 52 | conda convert cvxopt-1.1.7.win-amd64-py2.7.exe -d 'numpy >=1.8' 53 | 54 | """ 55 | 56 | 57 | def main(): 58 | p = ArgumentParser( 59 | description=""" 60 | Various tools to convert conda packages. Takes a pure Python package build for 61 | one platform and converts it to work on one or more other platforms, or 62 | all.""" , 63 | epilog=epilog, 64 | ) 65 | 66 | # TODO: Factor this into a subcommand, since it's python package specific 67 | p.add_argument( 68 | 'package_files', 69 | metavar='package-files', 70 | action="store", 71 | nargs='+', 72 | help="Package files to convert." 73 | ) 74 | p.add_argument( 75 | '-p', "--platform", 76 | dest='platforms', 77 | action="append", 78 | choices=['osx-64', 'linux-32', 'linux-64', 'win-32', 'win-64', 'all'], 79 | help="Platform to convert the packages to." 80 | ) 81 | p.add_argument( 82 | "--dependencies", "-d", 83 | nargs='*', 84 | help="""Additional (besides python) dependencies of the converted 85 | package. To specify a version restriction for a dependency, wrap 86 | the dependency in quotes, like 'package >=2.0'.""", 87 | ) 88 | p.add_argument( 89 | '--show-imports', 90 | action='store_true', 91 | default=False, 92 | help="Show Python imports for compiled parts of the package.", 93 | ) 94 | p.add_argument( 95 | '-f', "--force", 96 | action="store_true", 97 | help="Force convert, even when a package has compiled C extensions.", 98 | ) 99 | p.add_argument( 100 | '-o', '--output-dir', 101 | default='.', 102 | help="""Directory to write the output files. The packages will be 103 | organized in platform/ subdirectories, e.g., 104 | win-32/package-1.0-py27_0.tar.bz2.""" 105 | ) 106 | p.add_argument( 107 | '-v', '--verbose', 108 | default=False, 109 | action='store_true', 110 | help="Print verbose output." 111 | ) 112 | p.add_argument( 113 | "--dry-run", 114 | action="store_true", 115 | help="Only display what would have been done.", 116 | ) 117 | p.add_argument( 118 | "-q", "--quiet", 119 | action="store_true", 120 | help="Don't print as much output." 121 | ) 122 | 123 | p.set_defaults(func=execute) 124 | 125 | args = p.parse_args() 126 | args_func(args, p) 127 | 128 | 129 | path_mapping = [# (unix, windows) 130 | ('lib/python{pyver}', 'Lib'), 131 | ('bin', 'Scripts')] 132 | 133 | pyver_re = re.compile(r'python\s+(\d.\d)') 134 | 135 | 136 | def conda_convert(file, args): 137 | if not args.show_imports and args.platforms is None: 138 | sys.exit('Error: --platform option required for conda package conversion') 139 | 140 | with tarfile.open(file) as t: 141 | if args.show_imports: 142 | has_cext(t, show=True) 143 | return 144 | 145 | if not args.force and has_cext(t, show=args.show_imports): 146 | print("WARNING: Package %s has C extensions, skipping. Use -f to " 147 | "force conversion." % file, file=sys.stderr) 148 | return 149 | 150 | file_dir, fn = split(file) 151 | 152 | info = json.loads(t.extractfile('info/index.json') 153 | .read().decode('utf-8')) 154 | source_type = 'unix' if info['platform'] in {'osx', 'linux'} else 'win' 155 | 156 | nonpy_unix = False 157 | nonpy_win = False 158 | 159 | if 'all' in args.platforms: 160 | args.platforms = ['osx-64', 'linux-32', 'linux-64', 'win-32', 'win-64'] 161 | for platform in args.platforms: 162 | output_dir = join(args.output_dir, platform) 163 | if abspath(expanduser(join(output_dir, fn))) == file: 164 | if not args.quiet: 165 | print("Skipping %s/%s. Same as input file" % (platform, fn)) 166 | continue 167 | if not PY3: 168 | platform = platform.decode('utf-8') 169 | dest_plat = platform.split('-')[0] 170 | dest_type = 'unix' if dest_plat in {'osx', 'linux'} else 'win' 171 | 172 | if source_type == 'unix' and dest_type == 'win': 173 | nonpy_unix = nonpy_unix or has_nonpy_entry_points(t, 174 | unix_to_win=True, 175 | show=args.verbose, 176 | quiet=args.quiet) 177 | if source_type == 'win' and dest_type == 'unix': 178 | nonpy_win = nonpy_win or has_nonpy_entry_points(t, 179 | unix_to_win=False, 180 | show=args.verbose, 181 | quiet=args.quiet) 182 | 183 | if nonpy_unix and not args.force: 184 | print(("WARNING: Package %s has non-Python entry points, " 185 | "skipping %s to %s conversion. Use -f to force.") % 186 | (file, info['platform'], platform), file=sys.stderr) 187 | continue 188 | 189 | if nonpy_win and not args.force: 190 | print(("WARNING: Package %s has entry points, which are not " 191 | "supported yet. Skipping %s to %s conversion. Use -f to force.") % 192 | (file, info['platform'], platform), file=sys.stderr) 193 | continue 194 | 195 | file_map = get_pure_py_file_map(t, platform) 196 | 197 | if args.dry_run: 198 | if not args.quiet: 199 | print("Would convert %s from %s to %s" % 200 | (file, info['platform'], dest_plat)) 201 | if args.verbose: 202 | pprint.pprint(file_map) 203 | continue 204 | else: 205 | if not args.quiet: 206 | print("Converting %s from %s to %s" % 207 | (file, info['platform'], platform)) 208 | 209 | if not isdir(output_dir): 210 | os.makedirs(output_dir) 211 | tar_update(t, join(output_dir, fn), file_map, 212 | verbose=args.verbose, quiet=args.quiet) 213 | 214 | 215 | def execute(args, parser): 216 | files = args.package_files 217 | 218 | for file in files: 219 | # Don't use byte literals for paths in Python 2 220 | if not PY3: 221 | file = file.decode(getpreferredencoding()) 222 | 223 | file = abspath(expanduser(file)) 224 | if file.endswith('.tar.bz2'): 225 | conda_convert(file, args) 226 | 227 | elif file.endswith('.exe'): 228 | from conda_build.convert_gohlke import convert 229 | 230 | if args.platforms: 231 | raise RuntimeError('--platform option not allowed for Gohlke ' 232 | '.exe package conversion') 233 | convert(file, args.output_dir, add_depends=args.dependencies, 234 | verbose=args.verbose) 235 | 236 | elif file.endswith('.whl'): 237 | raise RuntimeError('Conversion from wheel packages is not ' 238 | 'implemented yet, stay tuned.') 239 | 240 | else: 241 | raise RuntimeError("cannot convert: %s" % file) 242 | 243 | 244 | if __name__ == '__main__': 245 | main() 246 | -------------------------------------------------------------------------------- /conda_build/main_develop.py: -------------------------------------------------------------------------------- 1 | # (c) Continuum Analytics, Inc. / http://continuum.io 2 | # All Rights Reserved 3 | # 4 | # conda is distributed under the terms of the BSD 3-clause license. 5 | # Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause. 6 | 7 | from __future__ import absolute_import, division, print_function 8 | 9 | import sys 10 | from os.path import join, isdir, abspath, expanduser, exists 11 | from os import walk 12 | import fnmatch 13 | import shutil 14 | 15 | from conda.cli.common import add_parser_prefix, get_prefix 16 | from conda.cli.conda_argparse import ArgumentParser 17 | from conda_build.main_build import args_func 18 | from conda_build.post import mk_relative_osx 19 | from conda_build.utils import _check_call 20 | 21 | from conda.install import linked 22 | 23 | 24 | def main(): 25 | p = ArgumentParser( 26 | description=""" 27 | 28 | Install a Python package in 'development mode'. 29 | 30 | This works by creating a conda.pth file in site-packages.""" 31 | # TODO: Use setup.py to determine any entry-points to install. 32 | ) 33 | 34 | p.add_argument( 35 | 'source', 36 | action="store", 37 | metavar='PATH', 38 | nargs='+', 39 | help="Path to the source directory." 40 | ) 41 | p.add_argument('-npf', '--no-pth-file', 42 | action='store_true', 43 | help=("Relink compiled extension dependencies against " 44 | "libraries found in current conda env. " 45 | "Do not add source to conda.pth.")) 46 | p.add_argument('-b', '--build_ext', 47 | action='store_true', 48 | help=("Build extensions inplace, invoking: " 49 | "python setup.py build_ext --inplace; " 50 | "add to conda.pth; relink runtime libraries to " 51 | "environment's lib/.")) 52 | p.add_argument('-c', '--clean', 53 | action='store_true', 54 | help=("Invoke clean on setup.py: " 55 | "python setup.py clean " 56 | "use with build_ext to clean before building.")) 57 | p.add_argument('-u', '--uninstall', 58 | action='store_true', 59 | help=("Removes package if installed in 'development mode' " 60 | "by deleting path from conda.pth file. Ignore other " 61 | "options - just uninstall and exit")) 62 | 63 | add_parser_prefix(p) 64 | p.set_defaults(func=execute) 65 | 66 | args = p.parse_args() 67 | args_func(args, p) 68 | 69 | 70 | def sharedobjects_list(pkg_path): 71 | ''' 72 | return list of shared objects (*.so) found in pkg_path. 73 | 74 | :param pkg_path: look for shared objects to relink in pkg_path 75 | ''' 76 | bin_files = [] 77 | 78 | # only relevant for mac/linux 79 | pattern = '*.so' 80 | 81 | for d_f in walk(pkg_path): 82 | m = fnmatch.filter(d_f[2], pattern) 83 | if m: 84 | # list is not empty, append full path to binary, then add it 85 | # to bin_files list 86 | bin_files.extend([join(d_f[0], f) for f in m]) 87 | 88 | return bin_files 89 | 90 | 91 | def relink_sharedobjects(pkg_path, build_prefix): 92 | ''' 93 | invokes functions in post module to relink to libraries in conda env 94 | 95 | :param pkg_path: look for shared objects to relink in pkg_path 96 | :param build_prefix: path to conda environment which contains lib/. to find 97 | runtime libraries. 98 | 99 | .. note:: develop mode builds the extensions in place and makes a link to 100 | package in site-packages/. The build_prefix points to conda environment 101 | since runtime libraries should be loaded from environment's lib/. first 102 | ''' 103 | # find binaries in package dir and make them relocatable 104 | bin_files = sharedobjects_list(pkg_path) 105 | for b_file in bin_files: 106 | if sys.platform == 'darwin': 107 | mk_relative_osx(b_file, build_prefix) 108 | else: 109 | print("Nothing to do on Linux or Windows.") 110 | 111 | 112 | def write_to_conda_pth(sp_dir, pkg_path): 113 | ''' 114 | Append pkg_path to conda.pth in site-packages directory for current 115 | environment. Only add path if it doens't already exist. 116 | 117 | :param sp_dir: path to site-packages/. directory 118 | :param pkg_path: the package path to append to site-packes/. dir. 119 | ''' 120 | c_file = join(sp_dir, 'conda.pth') 121 | with open(c_file, 'a') as f: 122 | with open(c_file, 'r') as cf: 123 | # make sure file exists, before we try to read from it hence nested 124 | # in append with block 125 | # expect conda.pth to be small so read it all in at once 126 | pkgs_in_dev_mode = cf.readlines() 127 | 128 | # only append pkg_path if it doesn't already exist in conda.pth 129 | if pkg_path + '\n' in pkgs_in_dev_mode: 130 | print("path exits, skipping " + pkg_path) 131 | else: 132 | f.write(pkg_path + '\n') 133 | print("added " + pkg_path) 134 | 135 | 136 | def get_site_pkg(prefix, py_ver): 137 | ''' 138 | Given the path to conda environment, find the site-packages directory 139 | 140 | :param prefix: path to conda environment. Look here for current 141 | environment's site-packages 142 | :returns: absolute path to site-packages directory 143 | ''' 144 | # get site-packages directory 145 | stdlib_dir = join(prefix, 'Lib' if sys.platform == 'win32' else 146 | 'lib/python%s' % py_ver) 147 | sp_dir = join(stdlib_dir, 'site-packages') 148 | 149 | return sp_dir 150 | 151 | 152 | def get_setup_py(path_): 153 | ''' Return full path to setup.py or exit if not found ''' 154 | # build path points to source dir, builds are placed in the 155 | setup_py = join(path_, 'setup.py') 156 | 157 | if not exists(setup_py): 158 | sys.exit("No setup.py found in {0}. Exiting.".format(path_)) 159 | 160 | return setup_py 161 | 162 | 163 | def clean(setup_py): 164 | ''' 165 | This invokes: 166 | $ python setup.py clean 167 | 168 | :param setup_py: path to setup.py 169 | ''' 170 | # first call setup.py clean 171 | cmd = ['python', setup_py, 'clean'] 172 | _check_call(cmd) 173 | print("Completed: " + " ".join(cmd)) 174 | print("===============================================") 175 | 176 | 177 | def build_ext(setup_py): 178 | ''' 179 | Define a develop function - similar to build function 180 | todo: need to test on win32 and linux 181 | 182 | It invokes: 183 | $ python setup.py build_ext --inplace 184 | 185 | :param setup_py: path to setup.py 186 | ''' 187 | 188 | # next call setup.py develop 189 | cmd = ['python', setup_py, 'build_ext', '--inplace'] 190 | _check_call(cmd) 191 | print("Completed: " + " ".join(cmd)) 192 | print("===============================================") 193 | 194 | 195 | def uninstall(sp_dir, pkg_path): 196 | ''' 197 | Look for pkg_path in conda.pth file in site-packages directory and remove 198 | it. If pkg_path is not found in conda.pth, it means package is not 199 | installed in 'development mode' via conda develop. 200 | 201 | :param sp_dir: path to site-packages/. directory 202 | :param pkg_path: the package path to be uninstalled. 203 | ''' 204 | o_c_pth = join(sp_dir, 'conda.pth') 205 | n_c_pth = join(sp_dir, 'conda.pth.temp') 206 | found = False 207 | with open(n_c_pth, 'w') as new_c: 208 | with open(o_c_pth, 'r') as orig_c: 209 | for line in orig_c: 210 | if line != pkg_path + '\n': 211 | new_c.write(line) 212 | else: 213 | print("uninstalled: " + pkg_path) 214 | found = True 215 | 216 | if not found: 217 | print("conda.pth does not contain path: " + pkg_path) 218 | print("package not installed via conda develop") 219 | 220 | shutil.move(n_c_pth, o_c_pth) 221 | 222 | 223 | def execute(args, parser): 224 | prefix = get_prefix(args) 225 | if not isdir(prefix): 226 | sys.exit("""\ 227 | Error: environment does not exist: %s 228 | # 229 | # Use 'conda create' to create the environment first. 230 | #""" % prefix) 231 | for package in linked(prefix): 232 | name, ver, _ = package .rsplit('-', 2) 233 | if name == 'python': 234 | py_ver = ver[:3] # x.y 235 | break 236 | else: 237 | raise RuntimeError("python is not installed in %s" % prefix) 238 | 239 | # current environment's site-packages directory 240 | sp_dir = get_site_pkg(prefix, py_ver) 241 | 242 | for path in args.source: 243 | pkg_path = abspath(expanduser(path)) 244 | 245 | if args.uninstall: 246 | # uninstall then exit - does not do any other operations 247 | uninstall(sp_dir, pkg_path) 248 | sys.exit(0) 249 | 250 | if args.clean or args.build_ext: 251 | setup_py = get_setup_py(pkg_path) 252 | if args.clean: 253 | clean(setup_py) 254 | if not args.build_ext: 255 | sys.exit(0) 256 | 257 | # build extensions before adding to conda.pth 258 | if args.build_ext: 259 | build_ext(setup_py) 260 | 261 | if not args.no_pth_file: 262 | write_to_conda_pth(sp_dir, pkg_path) 263 | 264 | # go through the source looking for compiled extensions and make sure 265 | # they use the conda environment for loading libraries at runtime 266 | relink_sharedobjects(pkg_path, prefix) 267 | print("completed operation for: " + pkg_path) 268 | 269 | if __name__ == '__main__': 270 | main() 271 | -------------------------------------------------------------------------------- /conda_build/main_skeleton.py: -------------------------------------------------------------------------------- 1 | # (c) Continuum Analytics, Inc. / http://continuum.io 2 | # All Rights Reserved 3 | # 4 | # conda is distributed under the terms of the BSD 3-clause license. 5 | # Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause. 6 | 7 | from __future__ import absolute_import, division, print_function 8 | 9 | from conda.config import default_python 10 | from conda_build.main_build import args_func 11 | from conda.cli.conda_argparse import ArgumentParser 12 | from conda.cli.common import Completer 13 | 14 | class PyPIPackagesCompleter(Completer): 15 | def __init__(self, prefix, parsed_args, **kwargs): 16 | self.prefix = prefix 17 | self.parsed_args = parsed_args 18 | 19 | def _get_items(self): 20 | from conda_build.pypi import get_xmlrpc_client 21 | args = self.parsed_args 22 | client = get_xmlrpc_client(getattr(args, 'pypi_url', 'https://pypi.python.org/pypi')) 23 | return [i.lower() for i in client.list_packages()] 24 | 25 | class CRANPackagesCompleter(Completer): 26 | def __init__(self, prefix, parsed_args, **kwargs): 27 | self.prefix = prefix 28 | self.parsed_args = parsed_args 29 | 30 | def _get_items(self): 31 | from conda_build.cran import get_cran_metadata 32 | args = self.parsed_args 33 | cran_url = getattr(args, 'cran_url', 'http://cran.r-project.org/') 34 | output_dir = getattr(args, 'output_dir', '.') 35 | cran_metadata = get_cran_metadata(cran_url, output_dir, verbose=False) 36 | return [i.lower() for i in cran_metadata] + ['r-%s' % i.lower() for i 37 | in cran_metadata] 38 | 39 | def main(): 40 | p = ArgumentParser( 41 | description=""" 42 | Generates a boilerplate/skeleton recipe, which you can then edit to create a 43 | full recipe. Some simple skeleton recipes may not even need edits. 44 | """, 45 | epilog=""" 46 | Run --help on the subcommands like 'conda skeleton pypi --help' to see the 47 | options available. 48 | """, 49 | ) 50 | 51 | repos = p.add_subparsers( 52 | dest="repo" 53 | ) 54 | 55 | pypi_example = """ 56 | Examples: 57 | 58 | Create a recipe for the sympy package: 59 | 60 | conda skeleton pypi sympy 61 | 62 | Create a recipes for the flake8 package and all its dependencies: 63 | 64 | conda skeleton pypi --recursive flake8 65 | 66 | Use the --pypi-url flag to point to a PyPI mirror url: 67 | 68 | conda skeleton pypi --pypi-url package_name 69 | """ 70 | 71 | pypi = repos.add_parser( 72 | "pypi", 73 | help=""" 74 | Create recipe skeleton for packages hosted on the Python Packaging Index 75 | (PyPI) (pypi.python.org). 76 | """, 77 | epilog=pypi_example, 78 | ) 79 | pypi.add_argument( 80 | "packages", 81 | action="store", 82 | nargs='+', 83 | help="""PyPi packages to create recipe skeletons for. 84 | You can also specify package[extra,...] features.""", 85 | ).completer = PyPIPackagesCompleter 86 | pypi.add_argument( 87 | "--output-dir", 88 | action="store", 89 | nargs=1, 90 | help="Directory to write recipes to (default: %(default)s).", 91 | default=".", 92 | ) 93 | pypi.add_argument( 94 | "--version", 95 | action="store", 96 | nargs=1, 97 | help="Version to use. Applies to all packages.", 98 | ) 99 | pypi.add_argument( 100 | "--all-urls", 101 | action="store_true", 102 | help="""Look at all URLs, not just source URLs. Use this if it can't 103 | find the right URL.""", 104 | ) 105 | pypi.add_argument( 106 | "--pypi-url", 107 | action="store", 108 | default='https://pypi.python.org/pypi', 109 | help="URL to use for PyPI (default: %(default)s).", 110 | ) 111 | pypi.add_argument( 112 | "--no-prompt", 113 | action="store_true", 114 | default=False, 115 | dest="noprompt", 116 | help="""Don't prompt the user on ambiguous choices. Instead, make the 117 | best possible choice and continue.""" 118 | ) 119 | pypi.add_argument( 120 | "--all-extras", 121 | action="store_true", 122 | default=False, 123 | help="Add all extra feature requirements. Applies to all packages.", 124 | ) 125 | pypi.add_argument( 126 | "--recursive", 127 | action='store_true', 128 | help='Create recipes for dependencies if they do not already exist.' 129 | ) 130 | pypi.add_argument( 131 | "--version-compare", 132 | action='store_true', 133 | help="""Compare the package version of the recipe with the one available 134 | on PyPI.""" 135 | ) 136 | pypi.add_argument( 137 | "--python-version", 138 | action='store', 139 | default=default_python, 140 | help="""Version of Python to use to run setup.py. Default is %(default)s.""", 141 | choices=['2.6', '2.7', '3.3', '3.4'], 142 | ) 143 | 144 | pypi.add_argument( 145 | "--manual-url", 146 | action='store_true', 147 | default=False, 148 | help="Manually choose source url when more than one urls are present." + 149 | "Default is the one with least source size." 150 | ) 151 | 152 | pypi.add_argument( 153 | "--noarch-python", 154 | action='store_true', 155 | default=False, 156 | help="Creates recipe as noarch python" 157 | ) 158 | 159 | cpan = repos.add_parser( 160 | "cpan", 161 | help=""" 162 | Create recipe skeleton for packages hosted on the Comprehensive Perl Archive 163 | Network (CPAN) (cpan.org). 164 | """, 165 | ) 166 | cpan.add_argument( 167 | "packages", 168 | action="store", 169 | nargs='+', 170 | help="CPAN packages to create recipe skeletons for.", 171 | ) 172 | cpan.add_argument( 173 | "--output-dir", 174 | help="Directory to write recipes to (default: %(default)s).", 175 | default=".", 176 | ) 177 | cpan.add_argument( 178 | "--version", 179 | help="Version to use. Applies to all packages.", 180 | ) 181 | cpan.add_argument( 182 | "--meta-cpan-url", 183 | action="store", 184 | nargs=1, 185 | default='http://api.metacpan.org', 186 | help="URL to use for MetaCPAN API.", 187 | ) 188 | cpan.add_argument( 189 | "--recursive", 190 | action='store_true', 191 | help='Create recipes for dependencies if they do not already exist.') 192 | 193 | 194 | cran = repos.add_parser( 195 | "cran", 196 | help=""" 197 | Create recipe skeleton for packages hosted on the Comprehensive R Archive 198 | Network (CRAN) (cran.r-project.org). 199 | """, 200 | ) 201 | cran.add_argument( 202 | "packages", 203 | action="store", 204 | nargs='*', 205 | help="""CRAN packages to create recipe skeletons for.""", 206 | ).completer = CRANPackagesCompleter 207 | cran.add_argument( 208 | "--output-dir", 209 | action="store", 210 | nargs=1, 211 | help="Directory to write recipes to (default: %(default)s).", 212 | default=".", 213 | ) 214 | cran.add_argument( 215 | "--version", 216 | action="store", 217 | nargs=1, 218 | help="Version to use. Applies to all packages.", 219 | ) 220 | cran.add_argument( 221 | "--git-tag", 222 | action="store", 223 | nargs=1, 224 | help="Git tag to use for GitHub recipes.", 225 | ) 226 | cran.add_argument( 227 | "--all-urls", 228 | action="store_true", 229 | help="""Look at all URLs, not just source URLs. Use this if it can't 230 | find the right URL.""", 231 | ) 232 | cran.add_argument( 233 | "--cran-url", 234 | action="store", 235 | default='http://cran.r-project.org/', 236 | help="URL to use for CRAN (default: %(default)s).", 237 | ) 238 | cran.add_argument( 239 | "--recursive", 240 | action='store_true', 241 | dest='recursive', 242 | help='Create recipes for dependencies if they do not already exist.', 243 | ) 244 | cran.add_argument( 245 | "--no-recursive", 246 | action='store_false', 247 | dest='recursive', 248 | help="Don't create recipes for dependencies if they do not already exist.", 249 | ) 250 | cran.add_argument( 251 | '--no-archive', 252 | action='store_false', 253 | dest='archive', 254 | help="Don't include an Archive download url.", 255 | ) 256 | cran.add_argument( 257 | "--version-compare", 258 | action='store_true', 259 | help="""Compare the package version of the recipe with the one available 260 | on CRAN. Exits 1 if a newer version is available and 0 otherwise.""" 261 | ) 262 | cran.add_argument( 263 | "--update-outdated", 264 | action="store_true", 265 | help="""Update outdated packages in the output directory (set by 266 | --output-dir). If packages are given, they are updated; otherwise, all 267 | recipes in the output directory are updated.""", 268 | ) 269 | p.set_defaults(func=execute) 270 | 271 | args = p.parse_args() 272 | args_func(args, p) 273 | 274 | 275 | def execute(args, parser): 276 | import conda_build.pypi as pypi 277 | import conda_build.cpan as cpan 278 | import conda_build.cran as cran 279 | from conda.lock import Locked 280 | from conda_build.config import config 281 | 282 | if not args.repo: 283 | parser.print_help() 284 | with Locked(config.croot): 285 | if args.repo == "pypi": 286 | pypi.main(args, parser) 287 | elif args.repo == "cpan": 288 | cpan.main(args, parser) 289 | elif args.repo == 'cran': 290 | cran.main(args, parser) 291 | 292 | if __name__ == '__main__': 293 | main() 294 | -------------------------------------------------------------------------------- /conda_build/_version.py: -------------------------------------------------------------------------------- 1 | 2 | # This file helps to compute a version number in source trees obtained from 3 | # git-archive tarball (such as those provided by githubs download-from-tag 4 | # feature). Distribution tarballs (built by setup.py sdist) and build 5 | # directories (produced by setup.py build) will contain a much shorter file 6 | # that just contains the computed version number. 7 | 8 | # This file is released into the public domain. Generated by 9 | # versioneer-0.14 (https://github.com/warner/python-versioneer) 10 | 11 | import errno 12 | import os 13 | import re 14 | import subprocess 15 | import sys 16 | 17 | # these strings will be replaced by git during git-archive 18 | git_refnames = " (HEAD -> master)" 19 | git_full = "7ac50f50b746f0ef88e4e7085605e051933214d8" 20 | 21 | # these strings are filled in when 'setup.py versioneer' creates _version.py 22 | tag_prefix = "" 23 | parentdir_prefix = "conda-build-" 24 | versionfile_source = "conda_build/_version.py" 25 | 26 | 27 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): 28 | assert isinstance(commands, list) 29 | p = None 30 | for c in commands: 31 | try: 32 | # remember shell=False, so use git.cmd on windows, not just git 33 | p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE, 34 | stderr=(subprocess.PIPE if hide_stderr 35 | else None)) 36 | break 37 | except EnvironmentError: 38 | e = sys.exc_info()[1] 39 | if e.errno == errno.ENOENT: 40 | continue 41 | if verbose: 42 | print("unable to run %s" % args[0]) 43 | print(e) 44 | return None 45 | else: 46 | if verbose: 47 | print("unable to find command, tried %s" % (commands,)) 48 | return None 49 | stdout = p.communicate()[0].strip() 50 | if sys.version_info[0] >= 3: 51 | stdout = stdout.decode() 52 | if p.returncode != 0: 53 | if verbose: 54 | print("unable to run %s (error)" % args[0]) 55 | return None 56 | return stdout 57 | 58 | 59 | def versions_from_parentdir(parentdir_prefix, root, verbose=False): 60 | # Source tarballs conventionally unpack into a directory that includes 61 | # both the project name and a version string. 62 | dirname = os.path.basename(root) 63 | if not dirname.startswith(parentdir_prefix): 64 | if verbose: 65 | print("guessing rootdir is '%s', but '%s' doesn't start with " 66 | "prefix '%s'" % (root, dirname, parentdir_prefix)) 67 | return None 68 | return {"version": dirname[len(parentdir_prefix):], "full": ""} 69 | 70 | 71 | def git_get_keywords(versionfile_abs): 72 | # the code embedded in _version.py can just fetch the value of these 73 | # keywords. When used from setup.py, we don't want to import _version.py, 74 | # so we do it with a regexp instead. This function is not used from 75 | # _version.py. 76 | keywords = {} 77 | try: 78 | f = open(versionfile_abs, "r") 79 | for line in f.readlines(): 80 | if line.strip().startswith("git_refnames ="): 81 | mo = re.search(r'=\s*"(.*)"', line) 82 | if mo: 83 | keywords["refnames"] = mo.group(1) 84 | if line.strip().startswith("git_full ="): 85 | mo = re.search(r'=\s*"(.*)"', line) 86 | if mo: 87 | keywords["full"] = mo.group(1) 88 | f.close() 89 | except EnvironmentError: 90 | pass 91 | return keywords 92 | 93 | 94 | def git_versions_from_keywords(keywords, tag_prefix, verbose=False): 95 | if not keywords: 96 | return {} # keyword-finding function failed to find keywords 97 | refnames = keywords["refnames"].strip() 98 | if refnames.startswith("$Format"): 99 | if verbose: 100 | print("keywords are unexpanded, not using") 101 | return {} # unexpanded, so not in an unpacked git-archive tarball 102 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) 103 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 104 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 105 | TAG = "tag: " 106 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) 107 | if not tags: 108 | # Either we're using git < 1.8.3, or there really are no tags. We use 109 | # a heuristic: assume all version tags have a digit. The old git %d 110 | # expansion behaves like git log --decorate=short and strips out the 111 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 112 | # between branches and tags. By ignoring refnames without digits, we 113 | # filter out many common branch names like "release" and 114 | # "stabilization", as well as "HEAD" and "master". 115 | tags = set([r for r in refs if re.search(r'\d', r)]) 116 | if verbose: 117 | print("discarding '%s', no digits" % ",".join(refs-tags)) 118 | if verbose: 119 | print("likely tags: %s" % ",".join(sorted(tags))) 120 | for ref in sorted(tags): 121 | # sorting will prefer e.g. "2.0" over "2.0rc1" 122 | if ref.startswith(tag_prefix): 123 | r = ref[len(tag_prefix):] 124 | if verbose: 125 | print("picking %s" % r) 126 | return {"version": r, 127 | "full": keywords["full"].strip()} 128 | # no suitable tags, so version is "0+unknown", but full hex is still there 129 | if verbose: 130 | print("no suitable tags, using unknown + full revision id") 131 | return {"version": "0+unknown", 132 | "full": keywords["full"].strip()} 133 | 134 | 135 | def git_parse_vcs_describe(git_describe, tag_prefix, verbose=False): 136 | # TAG-NUM-gHEX[-dirty] or HEX[-dirty] . TAG might have hyphens. 137 | 138 | # dirty 139 | dirty = git_describe.endswith("-dirty") 140 | if dirty: 141 | git_describe = git_describe[:git_describe.rindex("-dirty")] 142 | dirty_suffix = ".dirty" if dirty else "" 143 | 144 | # now we have TAG-NUM-gHEX or HEX 145 | 146 | if "-" not in git_describe: # just HEX 147 | return "0+untagged.g"+git_describe+dirty_suffix, dirty 148 | 149 | # just TAG-NUM-gHEX 150 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 151 | if not mo: 152 | # unparseable. Maybe git-describe is misbehaving? 153 | return "0+unparseable"+dirty_suffix, dirty 154 | 155 | # tag 156 | full_tag = mo.group(1) 157 | if not full_tag.startswith(tag_prefix): 158 | if verbose: 159 | fmt = "tag '%s' doesn't start with prefix '%s'" 160 | print(fmt % (full_tag, tag_prefix)) 161 | return None, dirty 162 | tag = full_tag[len(tag_prefix):] 163 | 164 | # distance: number of commits since tag 165 | distance = int(mo.group(2)) 166 | 167 | # commit: short hex revision ID 168 | commit = mo.group(3) 169 | 170 | # now build up version string, with post-release "local version 171 | # identifier". Our goal: TAG[+NUM.gHEX[.dirty]] . Note that if you get a 172 | # tagged build and then dirty it, you'll get TAG+0.gHEX.dirty . So you 173 | # can always test version.endswith(".dirty"). 174 | version = tag 175 | if distance or dirty: 176 | version += "+%d.g%s" % (distance, commit) + dirty_suffix 177 | 178 | return version, dirty 179 | 180 | 181 | def git_versions_from_vcs(tag_prefix, root, verbose=False): 182 | # this runs 'git' from the root of the source tree. This only gets called 183 | # if the git-archive 'subst' keywords were *not* expanded, and 184 | # _version.py hasn't already been rewritten with a short version string, 185 | # meaning we're inside a checked out source tree. 186 | 187 | if not os.path.exists(os.path.join(root, ".git")): 188 | if verbose: 189 | print("no .git in %s" % root) 190 | return {} # get_versions() will try next method 191 | 192 | GITS = ["git"] 193 | if sys.platform == "win32": 194 | GITS = ["git.cmd", "git.exe"] 195 | # if there is a tag, this yields TAG-NUM-gHEX[-dirty] 196 | # if there are no tags, this yields HEX[-dirty] (no NUM) 197 | stdout = run_command(GITS, ["describe", "--tags", "--dirty", 198 | "--always", "--long"], 199 | cwd=root) 200 | # --long was added in git-1.5.5 201 | if stdout is None: 202 | return {} # try next method 203 | version, dirty = git_parse_vcs_describe(stdout, tag_prefix, verbose) 204 | 205 | # build "full", which is FULLHEX[.dirty] 206 | stdout = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 207 | if stdout is None: 208 | return {} 209 | full = stdout.strip() 210 | if dirty: 211 | full += ".dirty" 212 | 213 | return {"version": version, "full": full} 214 | 215 | 216 | def get_versions(default={"version": "0+unknown", "full": ""}, verbose=False): 217 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 218 | # __file__, we can work backwards from there to the root. Some 219 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 220 | # case we can only use expanded keywords. 221 | 222 | keywords = {"refnames": git_refnames, "full": git_full} 223 | ver = git_versions_from_keywords(keywords, tag_prefix, verbose) 224 | if ver: 225 | return ver 226 | 227 | try: 228 | root = os.path.realpath(__file__) 229 | # versionfile_source is the relative path from the top of the source 230 | # tree (where the .git directory might live) to this file. Invert 231 | # this to find the root from __file__. 232 | for i in versionfile_source.split('/'): 233 | root = os.path.dirname(root) 234 | except NameError: 235 | return default 236 | 237 | return (git_versions_from_vcs(tag_prefix, root, verbose) 238 | or versions_from_parentdir(parentdir_prefix, root, verbose) 239 | or default) 240 | -------------------------------------------------------------------------------- /conda_build/convert.py: -------------------------------------------------------------------------------- 1 | # (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io 2 | # All Rights Reserved 3 | # 4 | # conda is distributed under the terms of the BSD 3-clause license. 5 | # Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause. 6 | 7 | """ 8 | Tools for converting conda packages 9 | 10 | """ 11 | from __future__ import absolute_import, division, print_function 12 | import re 13 | import tarfile 14 | import json 15 | 16 | from copy import deepcopy 17 | 18 | from conda.compat import PY3 19 | if PY3: 20 | from io import StringIO, BytesIO 21 | else: 22 | from cStringIO import StringIO 23 | BytesIO = StringIO 24 | 25 | 26 | BAT_PROXY = """\ 27 | @echo off 28 | set PYFILE=%~f0 29 | set PYFILE=%PYFILE:~0,-4%-script.py 30 | "%~f0\\..\\..\\python.exe" "%PYFILE%" %* 31 | """ 32 | 33 | libpy_pat = re.compile( 34 | r'(lib/python\d\.\d|Lib)' 35 | r'/(site-packages|lib-dynload)/(\S+?)(\.cpython-\d\dm)?\.(so|pyd)') 36 | 37 | def has_cext(t, show=False): 38 | matched = False 39 | for m in t.getmembers(): 40 | match = libpy_pat.match(m.path) 41 | if match: 42 | if show: 43 | x = match.group(3) 44 | print("import", x.replace('/', '.')) 45 | matched = True 46 | else: 47 | return True 48 | return matched 49 | 50 | def has_nonpy_entry_points(t, unix_to_win=True, show=False, quiet=False): 51 | """ 52 | If unix_to_win=True, assumes a Unix type package (i.e., entry points 53 | are in the bin directory). 54 | 55 | unix_to_win=False means win to unix, which is not implemented yet, so it 56 | will only succeed if there are no entry points. 57 | """ 58 | if not quiet: 59 | print("Checking entry points") 60 | bindir = 'bin/' if unix_to_win else 'Scripts/' 61 | matched = False 62 | for m in t.getmembers(): 63 | if m.path.startswith(bindir): 64 | if not unix_to_win: 65 | if show: 66 | print("Entry points with Windows to Unix are not yet " + 67 | "supported") 68 | return True 69 | r = t.extractfile(m).read() 70 | try: 71 | r = r.decode('utf-8') 72 | except UnicodeDecodeError: 73 | if show: 74 | print("Binary file %s" % m.path) 75 | matched = True 76 | else: 77 | firstline = r.splitlines()[0] 78 | if 'python' not in firstline: 79 | if show: 80 | print("Non-Python plaintext file %s" % m.path) 81 | matched = True 82 | else: 83 | if show: 84 | print("Python plaintext file %s" % m.path) 85 | return matched 86 | 87 | 88 | def tar_update(source, dest, file_map, verbose=True, quiet=False): 89 | """ 90 | update a tarball, i.e. repack it and insert/update or remove some 91 | archives according file_map, which is a dictionary mapping archive names 92 | to either: 93 | 94 | - None: meaning the archive will not be contained in the new tarball 95 | 96 | - a file path: meaning the archive in the new tarball will be this 97 | file. Should point to an actual file on the filesystem. 98 | 99 | - a TarInfo object: Useful when mapping from an existing archive. The 100 | file path in the archive will be the path in the TarInfo object. To 101 | change the path, mutate its .path attribute. The data will be used 102 | from the source tar file. 103 | 104 | - a tuple (TarInfo, data): Use this is you want to add new data to the 105 | dest tar file. 106 | 107 | Files in the source that aren't in the map will moved without any changes 108 | """ 109 | 110 | # s -> t 111 | if isinstance(source, tarfile.TarFile): 112 | s = source 113 | else: 114 | if not source.endswith(('.tar', '.tar.bz2')): 115 | raise TypeError("path must be a .tar or .tar.bz2 path") 116 | s = tarfile.open(source) 117 | if isinstance(dest, tarfile.TarFile): 118 | t = dest 119 | else: 120 | t = tarfile.open(dest, 'w:bz2') 121 | 122 | try: 123 | for m in s.getmembers(): 124 | p = m.path 125 | if p in file_map: 126 | if file_map[p] is None: 127 | if verbose: 128 | print('removing %r' % p) 129 | else: 130 | if verbose: 131 | print('updating %r with %r' % (p, file_map[p])) 132 | if isinstance(file_map[p], tarfile.TarInfo): 133 | t.addfile(file_map[p], s.extractfile(file_map[p])) 134 | elif isinstance(file_map[p], tuple): 135 | t.addfile(*file_map[p]) 136 | else: 137 | t.add(file_map[p], p) 138 | continue 139 | if not quiet: 140 | print("keeping %r" % p) 141 | t.addfile(m, s.extractfile(p)) 142 | 143 | s_names_set = set(m.path for m in s.getmembers()) 144 | # This sorted is important! 145 | for p in sorted(file_map): 146 | if p not in s_names_set: 147 | if verbose: 148 | print('inserting %r with %r' % (p, file_map[p])) 149 | if isinstance(file_map[p], tarfile.TarInfo): 150 | t.addfile(file_map[p], s.extractfile(file_map[p])) 151 | elif isinstance(file_map[p], tuple): 152 | t.addfile(*file_map[p]) 153 | else: 154 | t.add(file_map[p], p) 155 | finally: 156 | t.close() 157 | 158 | path_mapping_bat_proxy = [ 159 | (re.compile(r'bin/(.*)(\.py)'), r'Scripts/\1.bat'), 160 | (re.compile(r'bin/(.*)'), r'Scripts/\1.bat'), 161 | ] 162 | 163 | path_mapping_unix_windows = [ 164 | (r'lib/python{pyver}/', r'Lib/'), 165 | # Handle entry points already ending in .py. This is OK because these are 166 | # parsed in order. Only concern is if there are both script and script.py, 167 | # which seems unlikely 168 | (r'bin/(.*)(\.py)', r'Scripts/\1-script.py'), 169 | (r'bin/(.*)', r'Scripts/\1-script.py'), 170 | ] 171 | 172 | path_mapping_windows_unix = [ 173 | (r'Lib/', r'lib/python{pyver}/'), 174 | (r'Scripts/', r'bin/'), # Not supported right now anyway 175 | ] 176 | 177 | pyver_re = re.compile(r'python\s+(\d.\d)') 178 | 179 | def get_pure_py_file_map(t, platform): 180 | info = json.loads(t.extractfile('info/index.json').read().decode('utf-8')) 181 | source_plat = info['platform'] 182 | source_type = 'unix' if source_plat in {'osx', 'linux'} else 'win' 183 | dest_plat, dest_arch = platform.split('-') 184 | dest_type = 'unix' if dest_plat in {'osx', 'linux'} else 'win' 185 | 186 | files = t.extractfile('info/files').read().decode("utf-8") 187 | 188 | if source_type == 'unix' and dest_type == 'win': 189 | mapping = path_mapping_unix_windows 190 | elif source_type == 'win' and dest_type == 'unix': 191 | mapping = path_mapping_windows_unix 192 | else: 193 | mapping = [] 194 | 195 | newinfo = info.copy() 196 | newinfo['platform'] = dest_plat 197 | newinfo['arch'] = 'x86_64' if dest_arch == '64' else 'x86' 198 | newinfo['subdir'] = platform 199 | 200 | pythons = list(filter(None, [pyver_re.match(p) for p in info['depends']])) 201 | if len(pythons) > 1: 202 | raise RuntimeError("Found more than one Python dependency in package %s" 203 | % t.name) 204 | if len(pythons) == 0: 205 | # not a Python package 206 | mapping = [] 207 | else: 208 | pyver = pythons[0].group(1) 209 | 210 | mapping = [(re.compile(i[0].format(pyver=pyver)), 211 | i[1].format(pyver=pyver)) for i in mapping] 212 | 213 | members = t.getmembers() 214 | file_map = {} 215 | for member in members: 216 | # Update metadata 217 | if member.path == 'info/index.json': 218 | newmember = tarfile.TarInfo('info/index.json') 219 | if PY3: 220 | newbytes = bytes(json.dumps(newinfo), 'utf-8') 221 | else: 222 | newbytes = json.dumps(newinfo) 223 | newmember.size = len(newbytes) 224 | file_map['info/index.json'] = (newmember, BytesIO(newbytes)) 225 | continue 226 | elif member.path == 'info/files': 227 | # We have to do this at the end when we have all the files 228 | filemember = deepcopy(member) 229 | continue 230 | elif member.path == 'info/has_prefix': 231 | if source_type == 'unix' and dest_type == 'win': 232 | # has_prefix is not needed on Windows 233 | file_map['info/has_prefix'] = None 234 | 235 | # Move paths 236 | oldpath = member.path 237 | for old, new in mapping: 238 | newpath = old.sub(new, oldpath) 239 | if oldpath in file_map: 240 | # Already been handled 241 | break 242 | if newpath != oldpath: 243 | newmember = deepcopy(member) 244 | newmember.path = newpath 245 | assert member.path == oldpath 246 | file_map[oldpath] = None 247 | file_map[newpath] = newmember 248 | files = files.replace(oldpath, newpath) 249 | 250 | # Make Windows compatible entry-points 251 | batseen = set() 252 | if source_type == 'unix' and dest_type == 'win': 253 | for old, new in path_mapping_bat_proxy: 254 | newpath = old.sub(new, oldpath) 255 | if oldpath in batseen: 256 | break 257 | if newpath != oldpath: 258 | newmember = tarfile.TarInfo(newpath) 259 | if PY3: 260 | data = bytes(BAT_PROXY.replace('\n', '\r\n'), 'ascii') 261 | else: 262 | data = BAT_PROXY.replace('\n', '\r\n') 263 | newmember.size = len(data) 264 | file_map[newpath] = newmember, BytesIO(data) 265 | batseen.add(oldpath) 266 | files = files + newpath + "\n" 267 | 268 | files = '\n'.join(sorted(files.splitlines())) + '\n' 269 | if PY3: 270 | files = bytes(files, 'utf-8') 271 | filemember.size = len(files) 272 | file_map['info/files'] = filemember, BytesIO(files) 273 | 274 | return file_map 275 | -------------------------------------------------------------------------------- /conda_build/source.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, print_function 2 | 3 | import os 4 | import sys 5 | from os.path import join, isdir, isfile, abspath, expanduser 6 | from shutil import copytree, copy2 7 | 8 | from conda.fetch import download 9 | from conda.utils import hashsum_file 10 | 11 | from conda_build import external 12 | from conda_build.config import config 13 | from conda_build.utils import rm_rf, tar_xf, unzip, execute 14 | 15 | 16 | SRC_CACHE = join(config.croot, 'src_cache') 17 | GIT_CACHE = join(config.croot, 'git_cache') 18 | HG_CACHE = join(config.croot, 'hg_cache') 19 | SVN_CACHE = join(config.croot, 'svn_cache') 20 | WORK_DIR = join(config.croot, 'work') 21 | 22 | 23 | def get_dir(): 24 | if not isdir(WORK_DIR): 25 | os.makedirs(WORK_DIR) 26 | lst = [fn for fn in os.listdir(WORK_DIR) if not fn.startswith('.')] 27 | if len(lst) == 1: 28 | dir_path = join(WORK_DIR, lst[0]) 29 | if isdir(dir_path): 30 | return dir_path 31 | return WORK_DIR 32 | 33 | 34 | def download_to_cache(meta): 35 | ''' Download a source to the local cache. ''' 36 | print('Source cache directory is: %s' % SRC_CACHE) 37 | if not isdir(SRC_CACHE): 38 | os.makedirs(SRC_CACHE) 39 | 40 | fn = meta['fn'] 41 | path = join(SRC_CACHE, fn) 42 | 43 | if isfile(path): 44 | print('Found source in cache: %s' % fn) 45 | else: 46 | print('Downloading source to cache: %s' % fn) 47 | if not isinstance(meta['url'], list): 48 | meta['url'] = [meta['url']] 49 | 50 | for url in meta['url']: 51 | try: 52 | print("Downloading %s" % url) 53 | download(url, path) 54 | except RuntimeError as e: 55 | print("Error: %s" % str(e).strip(), file=sys.stderr) 56 | else: 57 | print("Success") 58 | break 59 | else: # no break 60 | raise RuntimeError("Could not download %s" % fn) 61 | 62 | for tp in 'md5', 'sha1', 'sha256': 63 | if meta.get(tp) and hashsum_file(path, tp) != meta[tp]: 64 | raise RuntimeError("%s mismatch: '%s' != '%s'" % 65 | (tp.upper(), hashsum_file(path, tp), meta[tp])) 66 | 67 | return path 68 | 69 | 70 | def unpack(meta): 71 | ''' Uncompress a downloaded source. ''' 72 | src_path = download_to_cache(meta) 73 | 74 | os.makedirs(WORK_DIR) 75 | print("Extracting download") 76 | if src_path.lower().endswith(('.tar.gz', '.tar.bz2', '.tgz', '.tar.xz', 77 | '.tar', 'tar.z')): 78 | tar_xf(src_path, WORK_DIR) 79 | elif src_path.lower().endswith('.zip'): 80 | unzip(src_path, WORK_DIR) 81 | else: 82 | # In this case, the build script will need to deal with unpacking the source 83 | print("Warning: Unrecognized source format. Source file will be copied to the SRC_DIR") 84 | copy2(src_path, WORK_DIR) 85 | 86 | 87 | def git_source(meta, recipe_dir): 88 | ''' Download a source from Git repo. ''' 89 | if not isdir(GIT_CACHE): 90 | os.makedirs(GIT_CACHE) 91 | 92 | git = external.find_executable('git') 93 | if not git: 94 | sys.exit("Error: git is not installed") 95 | git_url = meta['git_url'] 96 | if git_url.startswith('.'): 97 | # It's a relative path from the conda recipe 98 | os.chdir(recipe_dir) 99 | git_dn = abspath(expanduser(git_url)) 100 | git_dn = "_".join(git_dn.split(os.path.sep)[1:]) 101 | else: 102 | git_dn = git_url.split(':')[-1].replace('/', '_') 103 | cache_repo = cache_repo_arg = join(GIT_CACHE, git_dn) 104 | if sys.platform == 'win32': 105 | is_cygwin = 'cygwin' in git.lower() 106 | cache_repo_arg = cache_repo_arg.replace('\\', '/') 107 | if is_cygwin: 108 | cache_repo_arg = '/cygdrive/c/' + cache_repo_arg[3:] 109 | 110 | # update (or create) the cache repo 111 | if isdir(cache_repo): 112 | execute([git, 'fetch'], cwd=cache_repo, check_exit_code=True) 113 | else: 114 | execute([git, 'clone', '--mirror', git_url, cache_repo_arg], 115 | cwd=recipe_dir, check_exit_code=True) 116 | assert isdir(cache_repo) 117 | 118 | # now clone into the work directory 119 | checkout = meta.get('git_rev') 120 | # if rev is not specified, and the git_url is local, 121 | # assume the user wants the current HEAD 122 | if not checkout and git_url.startswith('.'): 123 | stdout, _ = execute(["git", "rev-parse", "HEAD"], cwd=git_url) 124 | checkout = stdout.strip() 125 | 126 | if checkout: 127 | print('checkout: %r' % checkout) 128 | 129 | execute([git, 'clone', '--recursive', cache_repo_arg, WORK_DIR], 130 | check_exit_code=True) 131 | if checkout: 132 | execute([git, 'checkout', checkout], 133 | cwd=WORK_DIR, check_exit_code=True) 134 | 135 | git_info() 136 | return WORK_DIR 137 | 138 | 139 | def git_info(fo=None): 140 | ''' Print info about a Git repo. ''' 141 | assert isdir(WORK_DIR) 142 | 143 | # Ensure to explicitly set GIT_DIR as some Linux machines will not 144 | # properly execute without it. 145 | env = os.environ.copy() 146 | env['GIT_DIR'] = join(WORK_DIR, '.git') 147 | env = {str(key): str(value) for key, value in env.items()} 148 | for cmd, check_error in [('git log -n1', True), 149 | ('git describe --tags --dirty', False), 150 | ('git status', True)]: 151 | stdout, stderr = execute(cmd.split(), cwd=WORK_DIR, env=env, 152 | check_exit_code=check_error) 153 | if check_error and stderr and stderr.strip(): 154 | raise Exception("git error: %s" % stderr) 155 | if fo: 156 | fo.write(u'==> %s <==\n' % cmd) 157 | fo.write(stdout + u'\n') 158 | else: 159 | print(u'==> %s <==\n' % cmd) 160 | print(stdout + u'\n') 161 | 162 | 163 | def hg_source(meta): 164 | ''' Download a source from Mercurial repo. ''' 165 | hg = external.find_executable('hg') 166 | if not hg: 167 | sys.exit('Error: hg not installed') 168 | hg_url = meta['hg_url'] 169 | if not isdir(HG_CACHE): 170 | os.makedirs(HG_CACHE) 171 | hg_dn = hg_url.split(':')[-1].replace('/', '_') 172 | cache_repo = join(HG_CACHE, hg_dn) 173 | if isdir(cache_repo): 174 | execute([hg, 'pull'], cwd=cache_repo, check_exit_code=True) 175 | else: 176 | execute([hg, 'clone', hg_url, cache_repo], check_exit_code=True) 177 | assert isdir(cache_repo) 178 | 179 | # now clone in to work directory 180 | update = meta.get('hg_tag') or 'tip' 181 | print('checkout: %r' % update) 182 | 183 | execute([hg, 'clone', cache_repo, WORK_DIR], 184 | check_exit_code=True) 185 | execute([hg, 'update', '-C', update], cwd=WORK_DIR, 186 | check_exit_code=True) 187 | return WORK_DIR 188 | 189 | 190 | 191 | def svn_source(meta): 192 | ''' Download a source from SVN repo. ''' 193 | def parse_bool(s): 194 | return str(s).lower().strip() in ('yes', 'true', '1', 'on') 195 | 196 | svn = external.find_executable('svn') 197 | if not svn: 198 | sys.exit("Error: svn is not installed") 199 | svn_url = meta['svn_url'] 200 | svn_revision = meta.get('svn_rev') or 'head' 201 | svn_ignore_externals = parse_bool(meta.get('svn_ignore_externals') or 'no') 202 | if not isdir(SVN_CACHE): 203 | os.makedirs(SVN_CACHE) 204 | svn_dn = svn_url.split(':', 1)[-1].replace('/', '_').replace(':', '_') 205 | cache_repo = join(SVN_CACHE, svn_dn) 206 | if svn_ignore_externals: 207 | extra_args = ['--ignore-externals'] 208 | else: 209 | extra_args = [] 210 | if isdir(cache_repo): 211 | execute([svn, 'up', '-r', svn_revision] + extra_args, 212 | cwd=cache_repo, check_exit_code=True) 213 | else: 214 | execute([svn, 'co', '-r', svn_revision] + extra_args + 215 | [svn_url, cache_repo], check_exit_code=True) 216 | assert isdir(cache_repo) 217 | 218 | # now copy into work directory 219 | copytree(cache_repo, WORK_DIR) 220 | return WORK_DIR 221 | 222 | 223 | def _ensure_unix_line_endings(path): 224 | """Replace windows line endings with Unix. Return path to modified file.""" 225 | out_path = path + "_unix" 226 | with open(path) as inputfile: 227 | with open(out_path, "w") as outputfile: 228 | for line in inputfile: 229 | outputfile.write(line.replace("\r\n", "\n")) 230 | return out_path 231 | 232 | def apply_patch(src_dir, path): 233 | print('Applying patch: %r' % path) 234 | if not isfile(path): 235 | sys.exit('Error: no such patch: %s' % path) 236 | 237 | patch = external.find_executable('patch') 238 | if patch is None: 239 | sys.exit("""\ 240 | Error: 241 | Did not find 'patch' in: %s 242 | You can install 'patch' using apt-get, yum (Linux), Xcode (MacOSX), 243 | or conda, cygwin (Windows), 244 | """ % (os.pathsep.join(external.dir_paths))) 245 | patch_args = ['-p0', '-i', path] 246 | if sys.platform == 'win32': 247 | patch_args[-1] = _ensure_unix_line_endings(path) 248 | 249 | execute([patch, ] + patch_args, cwd=src_dir, check_exit_code=True) 250 | if sys.platform == 'win32' and os.path.exists(patch_args[-1]): 251 | os.remove(patch_args[-1]) # clean up .patch_unix file 252 | 253 | 254 | def provide(recipe_dir, meta, patch=True): 255 | """ 256 | given a recipe_dir: 257 | - download (if necessary) 258 | - unpack 259 | - apply patches (if any) 260 | """ 261 | print("Removing old work directory") 262 | rm_rf(WORK_DIR) 263 | if 'fn' in meta: 264 | unpack(meta) 265 | elif 'git_url' in meta: 266 | git_source(meta, recipe_dir) 267 | elif 'hg_url' in meta: 268 | hg_source(meta) 269 | elif 'svn_url' in meta: 270 | svn_source(meta) 271 | elif 'path' in meta: 272 | print("Copying %s to %s" % (abspath(join(recipe_dir, meta.get('path'))), WORK_DIR)) 273 | copytree(abspath(join(recipe_dir, meta.get('path'))), WORK_DIR) 274 | else: # no source 275 | os.makedirs(WORK_DIR) 276 | 277 | if patch: 278 | src_dir = get_dir() 279 | for patch in meta.get('patches', []): 280 | apply_patch(src_dir, join(recipe_dir, patch)) 281 | 282 | 283 | if __name__ == '__main__': 284 | print(provide('.', 285 | {'url': 'http://pypi.python.org/packages/source/b/bitarray/bitarray-0.8.0.tar.gz', 286 | 'git_url': 'git@github.com:ilanschnell/bitarray.git', 287 | 'git_tag': '0.5.2'})) 288 | --------------------------------------------------------------------------------