├── .gitattributes ├── .gitignore ├── .travis.yml ├── CONTRIBUTING.md ├── LICENSE ├── MANIFEST.in ├── README.md ├── conda-build-all.recipe └── meta.yaml ├── conda_build_all ├── __init__.py ├── _version.py ├── artefact_destination.py ├── build.py ├── builder.py ├── cli.py ├── conda_interface.py ├── inspect_binstar.py ├── order_deps.py ├── resolved_distribution.py ├── tests │ ├── __init__.py │ ├── integration │ │ ├── __init__.py │ │ ├── test_builder.py │ │ ├── test_cli.py │ │ └── test_inspect_binstar.py │ └── unit │ │ ├── __init__.py │ │ ├── dummy_index.py │ │ ├── test_artefact_destination.py │ │ ├── test_builder.py │ │ ├── test_order_deps.py │ │ ├── test_resolved_distribution.py │ │ └── test_version_matrix.py └── version_matrix.py ├── requirements.txt ├── setup.cfg ├── setup.py └── versioneer.py /.gitattributes: -------------------------------------------------------------------------------- 1 | conda_build_all/_version.py export-subst 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.py[co] 2 | __pycache__ 3 | *.egg-info 4 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | # The language in this case has no bearing - we are going to be making use of conda for a 2 | # python distribution for the scientific python stack. 3 | language: python 4 | 5 | sudo: false 6 | 7 | env: 8 | global: 9 | - CONDA_INSTALL_LOCN="${HOME}/conda" 10 | # Secure BINSTAR_TOKEN for scitools/conda-build-all which can write to Obvious-CI-tests 11 | - secure: Jdnn46HEYC+3FdsK4HK4SBJhV6lFKhgD/ei1Tf5pgYXOs7Vw5kpKHdmRjyNjUCWk7Lbjuud5ILWZSV6+1Zn/kHCqIT4UDMbF/GtXRIaQYEf1P7R2WTw+OghcSM90RNBi1Aj996LHGRvCDR/VRArNyiTN8kjUsdp8mUyxuP1JM3duAdBd0SmWugVLUTKvb9sOzifO90kzBUtqeiHG4t6/8m3052pUWUiSxWJ10KVomuV0BsBvgcGajWOHvovHjqE+kTCpTxZaeTJL77ewN1ICtN2ytK5ZtnITwJbZiQ/MRybsZkQ6C+AEa8YwZZYoruGGome3KAgCuj+DVsx0klAfu4fVcAjGyMEYXsye1/Gu+2dXq2xkfLrz56cMYK3jG+pxMrUiRomXhrrUhvzqsDcnTHLDhJYixhAt8/T8IiiunENrapOP0jdE8Z+MS2hiPZNlnMbpQRac2XnQ+qHBn+618/s83eEtl3UA1YGd8AkaQtvaASoMwjBU5PBYScQrA0RPEdxgcX/eFnRFCFgZkioB5jo25rKuxmd5xufAQQzdc1ke3Bh4bp8pgB59s6paz6ucXSy06pBt1vVqXGHpEPkWx+6LIpk15PwwkDXhpeLNmAc4p+JBTATz3Rlhc70CAapt05P7bDTbCmGukDviSIrg5I7aWU81j4js48mxfRpXK1U= 12 | # Secure BINSTAR_TOKEN for pelson/conda-build-all which can write to Obvious-CI-tests 13 | - secure: Dz1ZWH583kXz725IUSU32en048qGnweQZqrONg9CmDZuBwb3hE6CZ/qpXlKxLD6pqrV1fRJLxhoJo+7wS828GVdg5nfGdw1TyJdSsIDVjXjVMKRGx4fJagodE83E1sjBgyFrh7z/giHmT65+mExS4lzGBM56fOoVrNk2+WTF31xsPAMimF1qLvNA9vBpOExjpxaugFaZwRdhWqYMHT2RtLUnFPjtOWvj0Q0MlLUjoXznTRTl6YLN2jyRbEg42R62goXgp6hAVQ7Dn/OfV4ax8pofiStwmMSFdyfx3Y60xEGAwopO5DzYNVqwO4t2lvoIq9jkQH3RQCtNjEZ8KcewuLym+RapRcidAwUwhaW3K7IDqbiSmaD5x6wgwZripfl+LHcWgXup0gK/eYVwrVh3hsa6ATwpc7/qMMSR1AEhHooUv6bNzASSeZmiBh/2gvq7Kob+WAa+RgwdCc1wOGsmWdicvLWSTwvnriHDrUYqS/uixV5A7C+o8VsnQC2Lcx24pO50NBhSSBpp73yHNmaQIwwEmxDEBUiFqtYz/V0peck+UQMHi+sgR/V8xB2dVQiyxaZI3/UAGtBqf3qtOGRiLQ3kFW8DAbi/2t19xk8c0FLf17ELTxURiFjTPQ78qmDDaI6MmK2bhi9IjTFBLLJU4m2ziW2l0MrTm4w3dwU7kls= 14 | matrix: 15 | - PYTHON=2.7 16 | - PYTHON=3.4 17 | # test against older conda-build 18 | - PYTHON=3.5 19 | EXTRA_DEPS='conda-build=2.0.*' 20 | - PYTHON=3.5 21 | CONDA_BUILD_ALL_TEST_ANACONDA_CLOUD=1 22 | - PYTHON=3.5 23 | CONDA_ORIGIN=https://repo.continuum.io/pkgs/main 24 | 25 | install: 26 | - mkdir -p ${HOME}/cache/pkgs 27 | - "[ ! -f ${HOME}/cache/miniconda.sh ] && wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ${HOME}/cache/miniconda.sh || :" 28 | - bash ${HOME}/cache/miniconda.sh -b -p ${CONDA_INSTALL_LOCN} && export PATH=${CONDA_INSTALL_LOCN}/bin:$PATH 29 | 30 | # Re-use the pacakges in the cache, and download any new ones into that location. 31 | - rm -rf ${CONDA_INSTALL_LOCN}/pkgs && ln -s ${HOME}/cache/pkgs ${CONDA_INSTALL_LOCN}/pkgs 32 | 33 | # Disable integration (not unit) testing if there is no BINSTAR_TOKEN defined. 34 | - if [ -z "$BINSTAR_TOKEN" -a -n "$CONDA_BUILD_ALL_TEST_ANACONDA_CLOUD" ]; then 35 | echo "BINSTAR_TOKEN is not defined, some of the intergation tests have been disabled. These will be re-enabled when merged." && 36 | export CONDA_BUILD_ALL_TEST_ANACONDA_CLOUD=0; 37 | fi 38 | 39 | # Now do the things we need to do to install it. 40 | - conda install -c conda-forge --file requirements.txt nose mock python=${PYTHON} ${EXTRA_DEPS} --yes --quiet 41 | - if [[ -n ${CONDA_ORIGIN} ]]; then conda install -yq -c ${CONDA_ORIGIN} conda conda-build; fi 42 | - python setup.py install 43 | - mkdir not_the_source_root && cd not_the_source_root 44 | 45 | script: 46 | - nosetests conda_build_all 47 | 48 | 49 | # We store the files that are downloaded from continuum.io, but not the environments that are created. 50 | cache: 51 | directories: 52 | - $HOME/cache 53 | before_cache: 54 | # Remove all untarred directories. 55 | - find $CONDA_INSTALL_LOCN/pkgs/ -mindepth 1 -maxdepth 1 -type d -exec rm -r {} \; 56 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | Contributing to conda-build-all 2 | 3 | In proposing contributions to this project, you are agreeing to the following contribution terms. 4 | 5 | Grant of Copyright Licence. Subject to the terms and conditions of this Agreement, You hereby grant to the Met Office and to recipients of software distributed by the Met Office a perpetual, worldwide, non-exclusive, royalty-free, irrevocable copyright licence to reproduce, prepare derivative works of, publicly display, publicly perform, sublicence, and distribute Your Contributions and such derivative works under the terms of the BSD 3-Clause licence: http://opensource.org/licenses/BSD-3-Clause 6 | 7 | Intellectual Property Infringement. If any third party makes any claim against You or any other entity, alleging that your Contribution, or the Work to which you have contributed, infringes the intellectual property rights of that third party , then You shall inform the Met Office within 5 Working Days of such claim in order for the Met Office to take all appropriate action it deems necessary in relation to the claim. 8 | 9 | You represent that you are legally entitled to grant the above licence. If your employer(s) has rights to intellectual property that you create that includes your Contributions, you represent that you have received permission to make Contributions on behalf of that employer, or that your employer has waived such rights for your Contributions to the Met Office. 10 | 11 | You represent that each of Your Contributions is Your original creation and that you have not assigned or otherwise given up your interest in the Contribution to any third party. You represent that Your Contribution submissions include complete details of any third-party licence or other restriction (including, but not limited to, related patents and trademarks) of which you are personally aware and which are associated with any part of Your Contributions. 12 | 13 | You (or Your employer(s)) agree to fully indemnify the Met Office in the event that either of the above representations are untrue. 14 | 15 | 16 | Definitions: 17 | "You" (or "Your") shall mean the copyright owner or legal entity authorised by the copyright 18 | owner that is making this Agreement with the Met Office. For legal entities, the entity 19 | making a Contribution and all other entities that control, are controlled by, or are under 20 | common control with that entity are considered to be a single Contributor. 21 | 22 | For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause 23 | the direction or management of such entity, whether by contract or otherwise, or (ii) 24 | ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial 25 | ownership of such entity. 26 | 27 | "Contribution" shall mean any original work of authorship, including any modifications or 28 | additions to an existing work, that is intentionally submitted by You to the Met Office for 29 | inclusion in, or documentation of, any of the products owned or managed by the Met Office 30 | (the "Work"). For the purposes of this definition, "submitted" means any form of electronic, 31 | verbal, or written communication sent to the Met Office or its representatives, including but 32 | not limited to communication on electronic mailing lists, source code control systems, and 33 | issue tracking systems that are managed by, or on behalf of, the Met Office for the purpose 34 | of discussing and improving the Work, but excluding communication that is conspicuously 35 | marked or otherwise designated in writing by You as "Not a Contribution." 36 | 37 | "Parties" shall mean The Met Office and You. 38 | 39 | "Met Office" shall mean the Met Office, an Executive Agency of the Department of 40 | Business, Innovation and Skills of the United Kingdom of Great Britain and Northern 41 | Ireland ("BIS"), whose principal place of business is situated at FitzRoy Road, Exeter, 42 | Devon EX1 3PB, United Kingdom, for an on behalf of BIS. 43 | 44 | "Working Day" shall mean a day other than a Saturday, Sunday or public holiday in England 45 | when banks in London are open for business. 46 | 47 | "Data" shall have the same meaning as set out in the Data Protection Act 1998. 48 | 49 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | Copyright (c) 2015, Met Office. 3 | All rights reserved. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | 8 | * Redistributions of source code must retain the above copyright notice, this 9 | list of conditions and the following disclaimer. 10 | 11 | * Redistributions in binary form must reproduce the above copyright notice, 12 | this list of conditions and the following disclaimer in the documentation 13 | and/or other materials provided with the distribution. 14 | 15 | * Neither the name of conda-buildall nor the names of its 16 | contributors may be used to endorse or promote products derived from 17 | this software without specific prior written permission. 18 | 19 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 20 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 21 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 22 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 23 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 24 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 25 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 26 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 27 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 28 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 29 | 30 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include versioneer.py 2 | include conda_build_all/_version.py 3 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | conda build-all 2 | =============== 3 | 4 | ``conda build-all`` is a conda subcommand which allows multiple distributions to be built (and uploaded) in a single command. 5 | It makes use of the underlying machinery developed for ``conda build``, but has a number of advantages: 6 | 7 | * Automatically computes a build matrix for a single package, and builds all possible combinations. 8 | * Can be given a directory of recipes, each of which will be identified and built (with each having their own build matrix). 9 | * Will resolve the build order based on the dependency graph from both build and run time dependencies. 10 | * Ability to re-build everything, or only build distributions that don't already exist in a conda channel and/or folder. 11 | * Since the build matrix is computed, ``conda build-all`` avoids the need for special environment variables which control the build. 12 | * Provides a Python API for building programmatically. 13 | 14 | 15 | Installation 16 | ============ 17 | 18 | The easiest way of installing ``conda-build-all`` is with conda, and the ``conda-forge`` channel: 19 | 20 | ``` 21 | conda install conda-build-all --channel conda-forge 22 | ``` 23 | 24 | Building from source is trivial with the pre-requisite dependencies (see ``requirements.txt``). 25 | 26 | 27 | Usage 28 | ====== 29 | 30 | ``` 31 | usage: conda-build-all [-h] [--version] 32 | [--inspect-channels [INSPECT_CHANNELS [INSPECT_CHANNELS ...]]] 33 | [--inspect-directories [INSPECT_DIRECTORIES [INSPECT_DIRECTORIES ...]]] 34 | [--no-inspect-conda-bld-directory] 35 | [--artefact-directory ARTEFACT_DIRECTORY] 36 | [--upload-channels [UPLOAD_CHANNELS [UPLOAD_CHANNELS ...]]] 37 | [--matrix-conditions [MATRIX_CONDITIONS [MATRIX_CONDITIONS ...]]] 38 | [--matrix-max-n-major-versions MATRIX_MAX_N_MAJOR_VERSIONS] 39 | [--matrix-max-n-minor-versions MATRIX_MAX_N_MINOR_VERSIONS] 40 | recipes 41 | 42 | Build many conda distributions. 43 | 44 | positional arguments: 45 | recipes The folder containing conda recipes to build. 46 | 47 | optional arguments: 48 | -h, --help show this help message and exit 49 | --version Show conda-build-all's version, and exit. 50 | --inspect-channels [INSPECT_CHANNELS [INSPECT_CHANNELS ...]] 51 | Skip a build if the equivalent disribution is already 52 | available in the specified channel. 53 | --inspect-directories [INSPECT_DIRECTORIES [INSPECT_DIRECTORIES ...]] 54 | Skip a build if the equivalent disribution is already 55 | available in the specified directory. 56 | --no-inspect-conda-bld-directory 57 | Do not add the conda-build directory to the inspection 58 | list. 59 | --artefact-directory ARTEFACT_DIRECTORY 60 | A directory for any newly built distributions to be 61 | placed. 62 | --upload-channels [UPLOAD_CHANNELS [UPLOAD_CHANNELS ...]] 63 | The channel(s) to upload built distributions to 64 | (requires BINSTAR_TOKEN envioronment variable). 65 | --matrix-conditions [MATRIX_CONDITIONS [MATRIX_CONDITIONS ...]] 66 | Extra conditions for computing the build matrix (e.g. 67 | 'python 2.7.*'). When set, the defaults for matrix- 68 | max-n-major-versions and matrix-max-n-minor-versions 69 | are set to 0 (i.e. no limit on the max n versions). 70 | --matrix-max-n-major-versions MATRIX_MAX_N_MAJOR_VERSIONS 71 | When computing the build matrix, limit to the latest n 72 | major versions (0 makes this unlimited). For example, 73 | if Python 1, 2 and Python 3 are resolved by the recipe 74 | and associated matrix conditions, only the latest N 75 | major version will be used for the build matrix. 76 | (default: 2 if no matrix conditions) 77 | --matrix-max-n-minor-versions MATRIX_MAX_N_MINOR_VERSIONS 78 | When computing the build matrix, limit to the latest n 79 | minor versions (0 makes this unlimited). Note that 80 | this does not limit the number of major versions (see 81 | also matrix-max-n-major-version). For example, if 82 | Python 2 and Python 3 are resolved by the recipe and 83 | associated matrix conditions, a total of Nx2 builds 84 | will be identified. (default: 2 if no matrix 85 | conditions) 86 | ``` 87 | 88 | 89 | Example 90 | ======= 91 | 92 | Supposing we have two moderately complex conda recipes in a directory: 93 | 94 | ``` 95 | $ mkdir -p my_recipes/recipe_a my_recipes/recipe_b 96 | $ cat < my_recipes/recipe_a/meta.yaml 97 | package: 98 | name: recipe_a 99 | version: 2.4 100 | 101 | requirements: 102 | build: 103 | - python 104 | run: 105 | - python 106 | 107 | EOF 108 | 109 | $ cat < my_recipes/recipe_b/meta.yaml 110 | package: 111 | name: recipe_b 112 | version: 3.2 113 | 114 | requirements: 115 | build: 116 | - recipe_a 117 | - numpy x.x 118 | run: 119 | - recipe_a 120 | - python 121 | - numpy x.x 122 | 123 | EOF 124 | ``` 125 | 126 | If we wish to build the lot, we can simply run: 127 | 128 | ``` 129 | $ conda-build-all my_recipes 130 | 131 | conda-build-all my_recipes 132 | Fetching package metadata: ........ 133 | Resolving distributions from 2 recipes... 134 | Computed that there are 11 distributions from the 2 recipes: 135 | Resolved dependencies, will be built in the following order: 136 | recipe_a-2.4-py26_0 (will be built: True) 137 | recipe_a-2.4-py27_0 (will be built: True) 138 | recipe_a-2.4-py34_0 (will be built: True) 139 | recipe_a-2.4-py35_0 (will be built: True) 140 | recipe_b-3.2-np19py26_0 (will be built: True) 141 | recipe_b-3.2-np110py27_0 (will be built: True) 142 | recipe_b-3.2-np19py27_0 (will be built: True) 143 | recipe_b-3.2-np110py34_0 (will be built: True) 144 | recipe_b-3.2-np19py34_0 (will be built: True) 145 | recipe_b-3.2-np110py35_0 (will be built: True) 146 | recipe_b-3.2-np19py35_0 (will be built: True) 147 | 148 | BUILD START: recipe_a-2.4-py26_0 149 | ... 150 | 151 | ``` 152 | 153 | As you can see, these two unassuming recipes will result in more than 2 builds. 154 | In this case, ``recipe_a`` has been identified to be built against the top two minor versions of the top two major versions of Python - that is, py26, py27, py34, py35 (at the time of writing). 155 | Next, ``recipe_b`` has been identified to be built against the top two minor versions of the top two major versions of Python *and* numpy. 156 | If all built distributions of python and numpy were available, there would be ``4 x 2`` permutations (4 being the number of Python versions available, and 2 being the number of numpy versions, assuming there exists only 1 major version of numpy, otherwise this would double to 4). 157 | 158 | We've seen that we can build a *lot* of distributions for our simple recipes. We can tighten the build matrix somewhat by adding or own conditions: 159 | 160 | ``` 161 | $ conda-build-all my_recipes --matrix-condition "python 3.5.*" "numpy >=1.8" 162 | Fetching package metadata: ........ 163 | Resolving distributions from 2 recipes... 164 | Computed that there are 3 distributions from the 2 recipes: 165 | Resolved dependencies, will be built in the following order: 166 | recipe_a-2.4-py35_0 (will be built: True) 167 | recipe_b-3.2-np110py35_0 (will be built: True) 168 | recipe_b-3.2-np19py35_0 (will be built: True) 169 | ... 170 | ``` 171 | 172 | Here we've used the language provided to us by conda to limit the build matrix to a smaller number of combinations. Alternatively we could use the max ``N`` major and minor arguments to limit the scope: 173 | 174 | ``` 175 | $ conda-build-all my_recipes --matrix-max-n-minor-versions=1 --matrix-max-n-major-versions=1 176 | Fetching package metadata: ........ 177 | Resolving distributions from 2 recipes... 178 | Computed that there are 2 distributions from the 2 recipes: 179 | Resolved dependencies, will be built in the following order: 180 | recipe_a-2.4-py35_0 (will be built: True) 181 | recipe_b-3.2-np110py35_0 (will be built: True) 182 | ... 183 | 184 | ``` 185 | 186 | 187 | -------------------------------------------------------------------------------- /conda-build-all.recipe/meta.yaml: -------------------------------------------------------------------------------- 1 | {% set data = load_setup_py_data() %} 2 | 3 | package: 4 | name: conda-build-all 5 | version: {{data.get('version')}} 6 | 7 | source: 8 | path: ../ 9 | 10 | build: 11 | script: python setup.py install --single-version-externally-managed --record=record.txt 12 | 13 | requirements: 14 | build: 15 | - python 16 | - setuptools 17 | 18 | run: 19 | - python 20 | - setuptools 21 | - conda >=4 22 | - conda-build >=1.21.7 23 | - anaconda-client 24 | - mock # [py<33] 25 | 26 | test: 27 | imports: 28 | - conda_build_all 29 | commands: 30 | - conda build-all --help 31 | - conda-build-all --version 32 | 33 | about: 34 | license: BSD-3 35 | home: https://github.com/scitools/conda-build-all 36 | -------------------------------------------------------------------------------- /conda_build_all/__init__.py: -------------------------------------------------------------------------------- 1 | from ._version import get_versions 2 | __version__ = get_versions()['version'] 3 | del get_versions 4 | -------------------------------------------------------------------------------- /conda_build_all/_version.py: -------------------------------------------------------------------------------- 1 | 2 | # This file helps to compute a version number in source trees obtained from 3 | # git-archive tarball (such as those provided by githubs download-from-tag 4 | # feature). Distribution tarballs (built by setup.py sdist) and build 5 | # directories (produced by setup.py build) will contain a much shorter file 6 | # that just contains the computed version number. 7 | 8 | # This file is released into the public domain. Generated by 9 | # versioneer-0.15+dev (https://github.com/warner/python-versioneer) 10 | 11 | """Git implementation of _version.py.""" 12 | 13 | import errno 14 | import os 15 | import re 16 | import subprocess 17 | import sys 18 | 19 | 20 | def get_keywords(): 21 | """Get the keywords needed to look up the version information.""" 22 | # these strings will be replaced by git during git-archive. 23 | # setup.py/versioneer.py will grep for the variable names, so they must 24 | # each be defined on a line of their own. _version.py will just call 25 | # get_keywords(). 26 | git_refnames = " (HEAD -> master, tag: v1.1.3)" 27 | git_full = "742efce8c7928bac9291034bc629bde2d3b49e09" 28 | keywords = {"refnames": git_refnames, "full": git_full} 29 | return keywords 30 | 31 | 32 | class VersioneerConfig: 33 | 34 | """Container for Versioneer configuration parameters.""" 35 | 36 | 37 | def get_config(): 38 | """Create, populate and return the VersioneerConfig() object.""" 39 | # these strings are filled in when 'setup.py versioneer' creates 40 | # _version.py 41 | cfg = VersioneerConfig() 42 | cfg.VCS = "git" 43 | cfg.style = "pep440-branch-based" 44 | cfg.tag_prefix = "v" 45 | cfg.parentdir_prefix = "conda-build-all-" 46 | cfg.versionfile_source = "conda_build_all/_version.py" 47 | cfg.verbose = False 48 | return cfg 49 | 50 | 51 | class NotThisMethod(Exception): 52 | 53 | """Exception raised if a method is not valid for the current scenario.""" 54 | 55 | 56 | LONG_VERSION_PY = {} 57 | HANDLERS = {} 58 | 59 | 60 | def register_vcs_handler(vcs, method): # decorator 61 | """Decorator to mark a method as the handler for a particular VCS.""" 62 | def decorate(f): 63 | """Store f in HANDLERS[vcs][method].""" 64 | if vcs not in HANDLERS: 65 | HANDLERS[vcs] = {} 66 | HANDLERS[vcs][method] = f 67 | return f 68 | return decorate 69 | 70 | 71 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): 72 | """Call the given command(s).""" 73 | assert isinstance(commands, list) 74 | p = None 75 | for c in commands: 76 | try: 77 | dispcmd = str([c] + args) 78 | # remember shell=False, so use git.cmd on windows, not just git 79 | p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE, 80 | stderr=(subprocess.PIPE if hide_stderr 81 | else None)) 82 | break 83 | except EnvironmentError: 84 | e = sys.exc_info()[1] 85 | if e.errno == errno.ENOENT: 86 | continue 87 | if verbose: 88 | print("unable to run %s" % dispcmd) 89 | print(e) 90 | return None 91 | else: 92 | if verbose: 93 | print("unable to find command, tried %s" % (commands,)) 94 | return None 95 | stdout = p.communicate()[0].strip() 96 | if sys.version_info[0] >= 3: 97 | stdout = stdout.decode() 98 | if p.returncode != 0: 99 | if verbose: 100 | print("unable to run %s (error)" % dispcmd) 101 | return None 102 | return stdout 103 | 104 | 105 | def versions_from_parentdir(parentdir_prefix, root, verbose): 106 | """Try to determine the version from the parent directory name. 107 | 108 | Source tarballs conventionally unpack into a directory that includes 109 | both the project name and a version string. 110 | """ 111 | dirname = os.path.basename(root) 112 | if not dirname.startswith(parentdir_prefix): 113 | if verbose: 114 | print("guessing rootdir is '%s', but '%s' doesn't start with " 115 | "prefix '%s'" % (root, dirname, parentdir_prefix)) 116 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 117 | return {"version": dirname[len(parentdir_prefix):], 118 | "full-revisionid": None, 119 | "dirty": False, "error": None} 120 | 121 | 122 | @register_vcs_handler("git", "get_keywords") 123 | def git_get_keywords(versionfile_abs): 124 | """Extract version information from the given file.""" 125 | # the code embedded in _version.py can just fetch the value of these 126 | # keywords. When used from setup.py, we don't want to import _version.py, 127 | # so we do it with a regexp instead. This function is not used from 128 | # _version.py. 129 | keywords = {} 130 | try: 131 | f = open(versionfile_abs, "r") 132 | for line in f.readlines(): 133 | if line.strip().startswith("git_refnames ="): 134 | mo = re.search(r'=\s*"(.*)"', line) 135 | if mo: 136 | keywords["refnames"] = mo.group(1) 137 | if line.strip().startswith("git_full ="): 138 | mo = re.search(r'=\s*"(.*)"', line) 139 | if mo: 140 | keywords["full"] = mo.group(1) 141 | f.close() 142 | except EnvironmentError: 143 | pass 144 | return keywords 145 | 146 | 147 | @register_vcs_handler("git", "keywords") 148 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 149 | """Get version information from git keywords.""" 150 | if not keywords: 151 | raise NotThisMethod("no keywords at all, weird") 152 | refnames = keywords["refnames"].strip() 153 | if refnames.startswith("$Format"): 154 | if verbose: 155 | print("keywords are unexpanded, not using") 156 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 157 | refs = [r.strip() for r in refnames.strip("()").split(",")] 158 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 159 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 160 | TAG = "tag: " 161 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) 162 | if not tags: 163 | # Either we're using git < 1.8.3, or there really are no tags. We use 164 | # a heuristic: assume all version tags have a digit. The old git %d 165 | # expansion behaves like git log --decorate=short and strips out the 166 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 167 | # between branches and tags. By ignoring refnames without digits, we 168 | # filter out many common branch names like "release" and 169 | # "stabilization", as well as "HEAD" and "master". 170 | tags = set([r for r in refs if re.search(r'\d', r)]) 171 | if verbose: 172 | print("discarding '%s', no digits" % ",".join(set(refs) - tags)) 173 | if verbose: 174 | print("likely tags: %s" % ",".join(sorted(tags))) 175 | for ref in sorted(tags): 176 | # sorting will prefer e.g. "2.0" over "2.0rc1" 177 | if ref.startswith(tag_prefix): 178 | r = ref[len(tag_prefix):] 179 | if verbose: 180 | print("picking %s" % r) 181 | return {"version": r, 182 | "full-revisionid": keywords["full"].strip(), 183 | "dirty": False, "error": None, "branch": None 184 | } 185 | # no suitable tags, so version is "0+unknown", but full hex is still there 186 | if verbose: 187 | print("no suitable tags, using unknown + full revision id") 188 | return {"version": "0+unknown", 189 | "full-revisionid": keywords["full"].strip(), 190 | "dirty": False, "error": "no suitable tags", 191 | "branch": None} 192 | 193 | 194 | @register_vcs_handler("git", "pieces_from_vcs") 195 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 196 | """Get version from 'git describe' in the root of the source tree. 197 | 198 | This only gets called if the git-archive 'subst' keywords were *not* 199 | expanded, and _version.py hasn't already been rewritten with a short 200 | version string, meaning we're inside a checked out source tree. 201 | """ 202 | if not os.path.exists(os.path.join(root, ".git")): 203 | if verbose: 204 | print("no .git in %s" % root) 205 | raise NotThisMethod("no .git directory") 206 | 207 | GITS = ["git"] 208 | if sys.platform == "win32": 209 | GITS = ["git.cmd", "git.exe"] 210 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 211 | # if there isn't one, this yields HEX[-dirty] (no NUM). Note, for git v1.7 212 | # and below, it is necessary to run "git update-index --refresh" first. 213 | describe_out = run_command(GITS, ["describe", "--tags", "--dirty", 214 | "--always", "--long", 215 | "--match", "%s*" % tag_prefix], 216 | cwd=root) 217 | # --long was added in git-1.5.5 218 | if describe_out is None: 219 | raise NotThisMethod("'git describe' failed") 220 | describe_out = describe_out.strip() 221 | full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 222 | if full_out is None: 223 | raise NotThisMethod("'git rev-parse' failed") 224 | full_out = full_out.strip() 225 | 226 | pieces = {} 227 | pieces["long"] = full_out 228 | pieces["short"] = full_out[:7] # maybe improved later 229 | pieces["error"] = None 230 | 231 | # abbrev-ref available with git >= 1.7 232 | branch_name = run_command(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], 233 | cwd=root).strip() 234 | if branch_name == 'HEAD': 235 | # If we aren't exactly on a branch, pick a branch which represents 236 | # the current commit. If all else fails, we are on a branchless 237 | # commit. 238 | branches = run_command(GITS, ["branch", "--contains"], 239 | cwd=root).split('\n') 240 | # Strip off the leading "* " from the list of branches. 241 | branches = [branch[2:] for branch in branches 242 | if branch and branch[4:5] != '('] 243 | if 'master' in branches: 244 | branch_name = 'master' 245 | elif not branches: 246 | branch_name = None 247 | else: 248 | # Pick the first branch that is returned. Good or bad. 249 | branch_name = branches[0] 250 | 251 | pieces['branch'] = branch_name 252 | 253 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 254 | # TAG might have hyphens. 255 | git_describe = describe_out 256 | 257 | # look for -dirty suffix 258 | dirty = git_describe.endswith("-dirty") 259 | pieces["dirty"] = dirty 260 | if dirty: 261 | git_describe = git_describe[:git_describe.rindex("-dirty")] 262 | 263 | # now we have TAG-NUM-gHEX or HEX 264 | 265 | if "-" in git_describe: 266 | # TAG-NUM-gHEX 267 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 268 | if not mo: 269 | # unparseable. Maybe git-describe is misbehaving? 270 | pieces["error"] = ("unable to parse git-describe output: '%s'" 271 | % describe_out) 272 | return pieces 273 | 274 | # tag 275 | full_tag = mo.group(1) 276 | if not full_tag.startswith(tag_prefix): 277 | if verbose: 278 | fmt = "tag '%s' doesn't start with prefix '%s'" 279 | print(fmt % (full_tag, tag_prefix)) 280 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" 281 | % (full_tag, tag_prefix)) 282 | return pieces 283 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 284 | 285 | # distance: number of commits since tag 286 | pieces["distance"] = int(mo.group(2)) 287 | 288 | # commit: short hex revision ID 289 | pieces["short"] = mo.group(3) 290 | 291 | else: 292 | # HEX: no tags 293 | pieces["closest-tag"] = None 294 | count_out = run_command(GITS, ["rev-list", "HEAD", "--count"], 295 | cwd=root) 296 | pieces["distance"] = int(count_out) # total number of commits 297 | 298 | return pieces 299 | 300 | 301 | # Default matches v1.2.x, maint/1.2.x, 1.2.x, 1.x etc. 302 | default_maint_branch_regexp = ".*([0-9]+\.)+x$" 303 | 304 | 305 | def plus_or_dot(pieces): 306 | """Return a + if we don't already have one, else return a .""" 307 | if "+" in pieces.get("closest-tag", ""): 308 | return "." 309 | return "+" 310 | 311 | 312 | def render_pep440(pieces): 313 | """Build up version string, with post-release "local version identifier". 314 | 315 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 316 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 317 | 318 | Exceptions: 319 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 320 | """ 321 | if pieces["closest-tag"]: 322 | rendered = pieces["closest-tag"] 323 | if pieces["distance"] or pieces["dirty"]: 324 | rendered += plus_or_dot(pieces) 325 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 326 | if pieces["dirty"]: 327 | rendered += ".dirty" 328 | else: 329 | # exception #1 330 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], 331 | pieces["short"]) 332 | if pieces["dirty"]: 333 | rendered += ".dirty" 334 | return rendered 335 | 336 | 337 | def render_pep440_pre(pieces): 338 | """TAG[.post.devDISTANCE] -- No -dirty. 339 | 340 | Exceptions: 341 | 1: no tags. 0.post.devDISTANCE 342 | """ 343 | if pieces["closest-tag"]: 344 | rendered = pieces["closest-tag"] 345 | if pieces["distance"]: 346 | rendered += ".post.dev%d" % pieces["distance"] 347 | else: 348 | # exception #1 349 | rendered = "0.post.dev%d" % pieces["distance"] 350 | return rendered 351 | 352 | 353 | def render_pep440_post(pieces): 354 | """TAG[.postDISTANCE[.dev0]+gHEX] . 355 | 356 | The ".dev0" means dirty. Note that .dev0 sorts backwards 357 | (a dirty tree will appear "older" than the corresponding clean one), 358 | but you shouldn't be releasing software with -dirty anyways. 359 | 360 | Exceptions: 361 | 1: no tags. 0.postDISTANCE[.dev0] 362 | """ 363 | if pieces["closest-tag"]: 364 | rendered = pieces["closest-tag"] 365 | if pieces["distance"] or pieces["dirty"]: 366 | rendered += ".post%d" % pieces["distance"] 367 | if pieces["dirty"]: 368 | rendered += ".dev0" 369 | rendered += plus_or_dot(pieces) 370 | rendered += "g%s" % pieces["short"] 371 | else: 372 | # exception #1 373 | rendered = "0.post%d" % pieces["distance"] 374 | if pieces["dirty"]: 375 | rendered += ".dev0" 376 | rendered += "+g%s" % pieces["short"] 377 | return rendered 378 | 379 | 380 | def render_pep440_old(pieces): 381 | """TAG[.postDISTANCE[.dev0]] . 382 | 383 | The ".dev0" means dirty. 384 | 385 | Eexceptions: 386 | 1: no tags. 0.postDISTANCE[.dev0] 387 | """ 388 | if pieces["closest-tag"]: 389 | rendered = pieces["closest-tag"] 390 | if pieces["distance"] or pieces["dirty"]: 391 | rendered += ".post%d" % pieces["distance"] 392 | if pieces["dirty"]: 393 | rendered += ".dev0" 394 | else: 395 | # exception #1 396 | rendered = "0.post%d" % pieces["distance"] 397 | if pieces["dirty"]: 398 | rendered += ".dev0" 399 | return rendered 400 | 401 | 402 | def render_git_describe(pieces): 403 | """TAG[-DISTANCE-gHEX][-dirty]. 404 | 405 | Like 'git describe --tags --dirty --always'. 406 | 407 | Exceptions: 408 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 409 | """ 410 | if pieces["closest-tag"]: 411 | rendered = pieces["closest-tag"] 412 | if pieces["distance"]: 413 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 414 | else: 415 | # exception #1 416 | rendered = pieces["short"] 417 | if pieces["dirty"]: 418 | rendered += "-dirty" 419 | return rendered 420 | 421 | 422 | def render_git_describe_long(pieces): 423 | """TAG-DISTANCE-gHEX[-dirty]. 424 | 425 | Like 'git describe --tags --dirty --always -long'. 426 | The distance/hash is unconditional. 427 | 428 | Exceptions: 429 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 430 | """ 431 | if pieces["closest-tag"]: 432 | rendered = pieces["closest-tag"] 433 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 434 | else: 435 | # exception #1 436 | rendered = pieces["short"] 437 | if pieces["dirty"]: 438 | rendered += "-dirty" 439 | return rendered 440 | 441 | 442 | def add_one_to_version(version_string, number_index_to_increment=-1): 443 | """ 444 | Add one to a version string at the given numeric indices. 445 | 446 | >>> add_one_to_version('v1.2.3') 447 | 'v1.2.4' 448 | 449 | """ 450 | # Break up the tag by number groups (preserving multi-digit 451 | # numbers as multidigit) 452 | parts = re.split("([0-9]+)", version_string) 453 | 454 | digit_parts = [(i, part) for i, part in enumerate(parts) 455 | if part.isdigit()] 456 | 457 | # Deal with negative indexing. 458 | increment_at_index = ((number_index_to_increment + len(digit_parts)) 459 | % len(digit_parts)) 460 | for n_seen, (i, part) in enumerate(digit_parts): 461 | if n_seen == increment_at_index: 462 | parts[i] = str(int(part) + 1) 463 | elif n_seen > increment_at_index: 464 | parts[i] = '0' 465 | return ''.join(parts) 466 | 467 | 468 | def render_pep440_branch_based(pieces): 469 | # [TAG+1 of minor number][.devDISTANCE][+gHEX]. The git short is 470 | # included for dirty. 471 | 472 | # exceptions: 473 | # 1: no tags. 0.0.0.devDISTANCE[+gHEX] 474 | 475 | replacements = ([' ', '.'], ['(', ''], [')', '']) 476 | branch_name = pieces.get('branch') or '' 477 | for old, new in replacements: 478 | branch_name = branch_name.replace(old, new) 479 | master = branch_name == 'master' 480 | maint = re.match(default_maint_branch_regexp, branch_name) 481 | 482 | # If we are on a tag, just pep440-pre it. 483 | if pieces["closest-tag"] and not (pieces["distance"] or 484 | pieces["dirty"]): 485 | rendered = pieces["closest-tag"] 486 | else: 487 | # Put a default closest-tag in. 488 | if not pieces["closest-tag"]: 489 | pieces["closest-tag"] = '0.0.0' 490 | 491 | if pieces["distance"] or pieces["dirty"]: 492 | if maint: 493 | rendered = pieces["closest-tag"] 494 | if pieces["distance"]: 495 | rendered += ".post%d" % pieces["distance"] 496 | else: 497 | rendered = add_one_to_version(pieces["closest-tag"]) 498 | if pieces["distance"]: 499 | rendered += ".dev%d" % pieces["distance"] 500 | 501 | suffix = [] 502 | # Put the branch name in if it isn't master nor a 503 | # maintenance branch. 504 | if not (master or maint): 505 | suffix.append('%s' % (branch_name or 'unknown_branch')) 506 | 507 | if pieces["dirty"]: 508 | suffix.append('g%s' % pieces["short"]) 509 | if suffix: 510 | rendered += '+%s' % '_'.join(suffix) 511 | else: 512 | rendered = pieces["closest-tag"] 513 | return rendered 514 | 515 | 516 | STYLES = {'default': render_pep440, 517 | 'pep440': render_pep440, 518 | 'pep440-pre': render_pep440_pre, 519 | 'pep440-post': render_pep440_post, 520 | 'pep440-old': render_pep440_old, 521 | 'git-describe': render_git_describe, 522 | 'git-describe-long': render_git_describe_long, 523 | 'pep440-old': render_pep440_old, 524 | 'pep440-branch-based': render_pep440_branch_based, 525 | } 526 | 527 | 528 | def render(pieces, style): 529 | """Render the given version pieces into the requested style.""" 530 | if pieces["error"]: 531 | return {"version": "unknown", 532 | "full-revisionid": pieces.get("long"), 533 | "dirty": None, 534 | "error": pieces["error"]} 535 | 536 | if not style: 537 | style = 'default' 538 | 539 | renderer = STYLES.get(style) 540 | 541 | if not renderer: 542 | raise ValueError("unknown style '%s'" % style) 543 | 544 | rendered = renderer(pieces) 545 | 546 | return {"version": rendered, "full-revisionid": pieces["long"], 547 | "dirty": pieces["dirty"], "error": None} 548 | 549 | 550 | def get_versions(): 551 | """Get version information or return default if unable to do so.""" 552 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 553 | # __file__, we can work backwards from there to the root. Some 554 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 555 | # case we can only use expanded keywords. 556 | 557 | cfg = get_config() 558 | verbose = cfg.verbose 559 | 560 | try: 561 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, 562 | verbose) 563 | except NotThisMethod: 564 | pass 565 | 566 | try: 567 | root = os.path.realpath(__file__) 568 | # versionfile_source is the relative path from the top of the source 569 | # tree (where the .git directory might live) to this file. Invert 570 | # this to find the root from __file__. 571 | for i in cfg.versionfile_source.split('/'): 572 | root = os.path.dirname(root) 573 | except NameError: 574 | return {"version": "0+unknown", "full-revisionid": None, 575 | "dirty": None, 576 | "error": "unable to find root of source tree"} 577 | 578 | try: 579 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 580 | return render(pieces, cfg.style) 581 | except NotThisMethod: 582 | pass 583 | 584 | try: 585 | if cfg.parentdir_prefix: 586 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 587 | except NotThisMethod: 588 | pass 589 | 590 | return {"version": "0+unknown", "full-revisionid": None, 591 | "dirty": None, 592 | "error": "unable to compute version"} 593 | -------------------------------------------------------------------------------- /conda_build_all/artefact_destination.py: -------------------------------------------------------------------------------- 1 | """ 2 | Build all the conda recipes in the given directory sequentially if they do not 3 | already exist on the given binstar channel. 4 | Building is done in order of dependencies (circular dependencies are not supported). 5 | Once a build is complete, the distribution will be uploaded (provided BINSTAR_TOKEN is 6 | defined), and the next package will be processed. 7 | 8 | """ 9 | from __future__ import print_function 10 | 11 | import logging 12 | import os 13 | import shutil 14 | import subprocess 15 | from argparse import Namespace 16 | import posixpath as urlpath 17 | 18 | import binstar_client.utils 19 | import binstar_client 20 | from .conda_interface import get_index 21 | from conda_build.metadata import MetaData 22 | from conda_build.build import bldpkg_path 23 | 24 | from . import inspect_binstar 25 | from . import build 26 | 27 | 28 | log = logging.getLogger('artefact_destination') 29 | 30 | 31 | class ArtefactDestination(object): 32 | def __init__(self): 33 | pass 34 | 35 | def make_available(self, meta, built_dist_path, just_built, config=None): 36 | """ 37 | Put the built distribution on this destination. 38 | 39 | Parameters 40 | ---------- 41 | meta : MetaData 42 | The metadata of the thing to make available. 43 | built_dist_path 44 | The location of the built distribution for this artefact. 45 | just_built : bool 46 | Whether this artefact was just built, or was already available. 47 | config 48 | The conda-build configuration for the build. 49 | 50 | """ 51 | pass 52 | 53 | 54 | class DirectoryDestination(ArtefactDestination): 55 | def __init__(self, directory): 56 | self.directory = os.path.abspath(os.path.expanduser(directory)) 57 | if not os.path.exists(self.directory): 58 | os.makedirs(self.directory) 59 | if not os.path.isdir(self.directory): 60 | raise IOError("The destination provided is not a directory.") 61 | 62 | def make_available(self, meta, built_dist_path, just_built, config=None): 63 | if just_built: 64 | if type(built_dist_path) not in (list, tuple): 65 | built_dist_path = [built_dist_path] 66 | for path in built_dist_path: 67 | print(meta, path, just_built) 68 | shutil.copy(path, self.directory) 69 | 70 | 71 | class AnacondaClientChannelDest(ArtefactDestination): 72 | def __init__(self, token, owner, channel): 73 | self.token = token 74 | self.owner = owner 75 | self.channel = channel 76 | self._cli = None 77 | 78 | @classmethod 79 | def from_spec(cls, spec): 80 | """ 81 | Create an AnacondaClientChannelDest given the channel specification. 82 | 83 | Useful for command line arguments to be able to specify the owner 84 | and channel in a single string. 85 | 86 | """ 87 | token = os.environ.get("BINSTAR_TOKEN", None) 88 | if '/' in spec: 89 | owner, _, channel = spec.split('/') 90 | else: 91 | owner, channel = spec, 'main' 92 | return cls(token, owner, channel) 93 | 94 | def make_available(self, meta, built_dist_path, just_built, config=None): 95 | if self._cli is None: 96 | self._cli = binstar_client.utils.get_binstar(Namespace(token=self.token, site=None)) 97 | 98 | already_with_owner = inspect_binstar.distribution_exists(self._cli, self.owner, meta) 99 | already_on_channel = inspect_binstar.distribution_exists_on_channel(self._cli, 100 | self.owner, 101 | meta, 102 | channel=self.channel) 103 | if already_on_channel and not just_built: 104 | log.info('Nothing to be done for {} - it is already on {}/{}.'.format(meta.name(), self.owner, self.channel)) 105 | elif already_on_channel and just_built: 106 | # We've just built, and the owner already has a distribution on this channel. 107 | log.warn("Assuming the distribution we've just built and the one on {}/{} are the same.".format(self.owner, self.channel)) 108 | 109 | elif already_with_owner: 110 | if just_built: 111 | log.warn("Assuming the distribution we've just built and the one owned by {} are the same.".format(self.owner)) 112 | # Link a distribution. 113 | log.info('Adding existing {} to the {}/{} channel.'.format(meta.dist(), self.owner, self.channel)) 114 | inspect_binstar.add_distribution_to_channel(self._cli, self.owner, meta, channel=self.channel) 115 | 116 | elif just_built: 117 | # Upload the distribution 118 | log.info('Uploading {} to the {} channel.'.format(meta.name(), self.channel)) 119 | build.upload(self._cli, meta, self.owner, channels=[self.channel], 120 | config=config) 121 | 122 | elif not just_built: 123 | # The distribution already existed, but not under the target owner. 124 | if 'http://' in built_dist_path or 'https://' in built_dist_path: 125 | source_owner = urlpath.basename(urlpath.dirname(built_dist_path.rstrip('/'))) 126 | inspect_binstar.copy_distribution_to_owner(self._cli, source_owner, self.owner, meta, 127 | channel=self.channel) 128 | -------------------------------------------------------------------------------- /conda_build_all/build.py: -------------------------------------------------------------------------------- 1 | # NOTE: This module has no unit tests. 2 | 3 | from __future__ import print_function 4 | 5 | import os 6 | import shutil 7 | 8 | import conda_build.build as build_module 9 | from conda_build.metadata import MetaData 10 | import conda_build.config 11 | from .conda_interface import Locked 12 | 13 | try: 14 | from conda_build.api import get_output_file_path 15 | except ImportError: 16 | from conda_build.build import bldpkg_path as get_output_file_path 17 | 18 | import conda_build.source 19 | import binstar_client 20 | from binstar_client.utils.detect import detect_package_type, get_attrs 21 | 22 | from . import inspect_binstar 23 | 24 | 25 | def build(meta, test=True): 26 | """Build (and optionally test) a recipe directory.""" 27 | with Locked(conda_build.config.croot): 28 | # Check whether verbose is a module-level variable in 29 | # this version of conda_build and set it properly if it is. 30 | if 'verbose' in dir(build_module): 31 | build_module.verbose = False 32 | kwd = {} 33 | else: 34 | kwd = {'verbose': False} 35 | meta.check_fields() 36 | 37 | if os.path.exists(conda_build.source.WORK_DIR): 38 | shutil.rmtree(conda_build.source.WORK_DIR) 39 | build_module.build(meta, post=None, need_source_download=True, **kwd) 40 | if test: 41 | build_module.test(meta, **kwd) 42 | return meta 43 | 44 | 45 | def upload(cli, meta, owner, channels=['main'], config=None): 46 | """Upload a distribution, given the build metadata.""" 47 | fname = get_output_file_path(meta) 48 | package_type = detect_package_type(fname) 49 | package_attrs, release_attrs, file_attrs = get_attrs(package_type, fname) 50 | package_name = package_attrs['name'] 51 | version = release_attrs['version'] 52 | 53 | # Check the package exists, otherwise create one. 54 | try: 55 | cli.package(owner, package_name) 56 | except binstar_client.NotFound: 57 | print('Creating the {} package on {}'.format(package_name, owner)) 58 | summary = package_attrs['summary'] 59 | cli.add_package(owner, package_name, summary, package_attrs.get('license'), public=True) 60 | 61 | # Check the release exists, otherwise create one. 62 | try: 63 | cli.release(owner, package_name, version) 64 | except binstar_client.NotFound: 65 | # TODO: Add readme.md support for descriptions? 66 | 67 | # The signature for add_release changed in anaconda-client 1.6.3. 68 | # First try the old signature, and if that fails, use the new. 69 | try: 70 | cli.add_release(owner, package_name, version, requirements=[], 71 | announce=None, description='') 72 | except TypeError: 73 | cli.add_release(owner, package_name, version, requirements=[], 74 | announce=None, release_attrs={'description': ''}) 75 | 76 | try: 77 | cli.distribution(owner, package_name, version, file_attrs['basename']) 78 | except binstar_client.NotFound: 79 | # The file doesn't exist. 80 | pass 81 | else: 82 | print('Distribution %s already exists ... removing' % (file_attrs['basename'],)) 83 | cli.remove_dist(owner, package_name, version, file_attrs['basename']) 84 | 85 | with open(fname, 'rb') as fd: 86 | print('\nUploading file %s/%s/%s/%s to %s...' % (owner, package_name, version, file_attrs['basename'], channels)) 87 | upload_info = cli.upload(owner, package_name, version, file_attrs['basename'], 88 | fd, package_type, description='', 89 | dependencies=file_attrs.get('dependencies'), 90 | attrs=file_attrs['attrs'], 91 | channels=channels) 92 | return upload_info 93 | -------------------------------------------------------------------------------- /conda_build_all/builder.py: -------------------------------------------------------------------------------- 1 | """ 2 | Build all the conda recipes in the given directory sequentially if they do not 3 | already exist on the given binstar channel. 4 | Building is done in order of dependencies (circular dependencies are not supported). 5 | Once a build is complete, the distribution will be uploaded (provided BINSTAR_TOKEN is 6 | defined), and the next package will be processed. 7 | 8 | """ 9 | from __future__ import print_function 10 | 11 | from copy import deepcopy 12 | import glob 13 | import logging 14 | try: 15 | from unittest import mock 16 | except ImportError: 17 | import mock 18 | import os 19 | 20 | from binstar_client.utils import get_binstar 21 | import binstar_client 22 | from .conda_interface import (Resolve, get_index, subdir, copy_index, 23 | string_types) 24 | 25 | try: 26 | import conda_build.api 27 | except ImportError: 28 | import conda_build.config 29 | import conda_build 30 | from conda_build.metadata import MetaData 31 | import conda_build.render 32 | from conda_build.build import bldpkg_path 33 | 34 | from . import order_deps 35 | from . import build 36 | from . import inspect_binstar 37 | from . import version_matrix as vn_matrix 38 | from . import resolved_distribution 39 | 40 | 41 | def package_built_name(package, root_dir): 42 | package_dir = os.path.join(root_dir, package) 43 | if hasattr(conda_build, 'api'): 44 | return conda_build.api.get_output_file_path(package_dir) 45 | else: 46 | meta = MetaData(package_dir) 47 | return bldpkg_path(meta) 48 | 49 | 50 | def distribution_exists(binstar_cli, owner, metadata): 51 | fname = '{}/{}.tar.bz2'.format(subdir, metadata.dist()) 52 | try: 53 | r = binstar_cli.distribution(owner, metadata.name(), metadata.version(), 54 | fname) 55 | exists = True 56 | except binstar_client.errors.NotFound: 57 | exists = False 58 | return exists 59 | 60 | 61 | def list_metas(directory, max_depth=0, config=None): 62 | """ 63 | Get the build metadata of all recipes in a directory. 64 | 65 | The order of metas from this function is not guaranteed. 66 | 67 | Parameters 68 | ---------- 69 | directory 70 | Where to start looking for metas using os.walk. 71 | max_depth : int 72 | How deep to recurse when looking for recipes. 73 | A value ``<=0`` will recurse indefinitely. A value of 1 74 | will look in the given directory for a meta.yaml. 75 | (default: 0) 76 | 77 | """ 78 | packages = [] 79 | current_depth = max_depth 80 | root = os.path.normpath(directory) 81 | for new_root, dirs, files in os.walk(root, followlinks=True): 82 | depth = new_root[len(root):].count(os.path.sep) + 1 83 | if max_depth > 0 and depth >= max_depth: 84 | del dirs[:] 85 | 86 | if 'meta.yaml' in files: 87 | if hasattr(conda_build, 'api'): 88 | pkgs = conda_build.api.render(new_root, config=config, 89 | finalize=False, bypass_env_check=True) 90 | # cb2 returns a tuple, with the metadata object as the first 91 | # element. That's all we care about. 92 | if hasattr(pkgs[0], 'config'): 93 | pkgs = [pkgs[0]] 94 | # cb3 returns a list of tuples, each with the metadata object 95 | # as the first element. Collect them up. 96 | else: 97 | pkgs = [pkg[0] for pkg in pkgs] 98 | packages.extend(pkgs) 99 | else: 100 | packages.append(MetaData(new_root)) 101 | 102 | return packages 103 | 104 | 105 | def sort_dependency_order(metas, config): 106 | """Sort the metas into the order that they must be built.""" 107 | meta_named_deps = {} 108 | buildable = [meta.name() for meta in metas] 109 | for meta in metas: 110 | meta = deepcopy(meta) 111 | 112 | # In order to deal with selectors impacting sort order, we completely 113 | # ignore them for the sake of ordering. This decision was taken in the 114 | # light of https://github.com/SciTools/conda-build-all/issues/30 as a 115 | # pragmatic performance choice. 116 | def select_lines(data, *args, **kwargs): 117 | # Just return the data without removing any of the lines. This 118 | # is only a suitable solution when selectors are also comments. 119 | return data 120 | 121 | meta.final = False 122 | with mock.patch('conda_build.metadata.select_lines', new=select_lines): 123 | try: 124 | with mock.patch('conda_build.jinja_context.select_lines', new=select_lines): 125 | try: 126 | meta.parse_again(config, permit_undefined_jinja=True) 127 | except TypeError: 128 | meta.parse_again(permit_undefined_jinja=True) 129 | except AttributeError: 130 | try: 131 | meta.parse_again(config, permit_undefined_jinja=True) 132 | except TypeError: 133 | meta.parse_again(permit_undefined_jinja=True) 134 | 135 | # Now that we have re-parsed the metadata with selectors unconditionally 136 | # included, we can get the run and build dependencies and do a toposort. 137 | all_deps = ((meta.get_value('requirements/run', []) or []) + 138 | (meta.get_value('requirements/build', []) or [])) 139 | # Remove version information from the name. 140 | all_deps = [dep.split(' ', 1)[0] for dep in all_deps] 141 | meta_named_deps[meta.name()] = [dep for dep in all_deps if dep in buildable] 142 | sorted_names = list(order_deps.resolve_dependencies(meta_named_deps)) 143 | return sorted(metas, key=lambda meta: sorted_names.index(meta.name())) 144 | 145 | 146 | class Builder(object): 147 | def __init__(self, conda_recipes_directory, 148 | inspection_channels, inspection_directories, 149 | artefact_destinations, 150 | matrix_conditions, matrix_max_n_major_minor_versions=(2, 2), 151 | dry_run=False): 152 | """ 153 | Build a directory of conda recipes sequentially, if they don't already exist in the inspection locations. 154 | 155 | Parameters 156 | ---------- 157 | conda_recipes_directory : string 158 | The path to the directory in which to look for conda recipes. 159 | inspection_channels : iterable 160 | The conda channels to inspect to determine whether a recipe has already been built. 161 | inspection_directories : iterable 162 | The local directories to inspect to determine whether a recipe has already been built. 163 | artefact_destinations : iterable of conda_build_all.artefact_destination.ArtefactDestination 164 | The destinations for the built artefact to go to. 165 | matrix_conditions : iterable of conda specifications 166 | The conditions to apply when determining whether a recipe should be built 167 | matrix_max_n_major_minor_versions : pair of ints 168 | The number of major and minor versions to preserve for each resolved recipe. For instance, 169 | if a recipe can be built against np 1.7, 1.8 and 1.9, and the number of minor versions is 2, 170 | the build matrix will prune the 1.7 option. 171 | dry_run : bool 172 | True to stop before building recipes but after determining which 173 | recipes to build. 174 | 175 | """ 176 | self.conda_recipes_directory = conda_recipes_directory 177 | self.inspection_channels = inspection_channels or [] 178 | self.inspection_directories = inspection_directories 179 | self.artefact_destinations = artefact_destinations 180 | self.matrix_conditions = matrix_conditions 181 | self.matrix_max_n_major_minor_versions = matrix_max_n_major_minor_versions 182 | self.dry_run = dry_run 183 | 184 | def fetch_all_metas(self, config): 185 | """ 186 | Return the conda recipe metas, in the order they should be built. 187 | 188 | """ 189 | conda_recipes_directory = os.path.abspath(os.path.expanduser(self.conda_recipes_directory)) 190 | recipe_metas = list_metas(conda_recipes_directory, config=config) 191 | recipe_metas = sort_dependency_order(recipe_metas, config=config) 192 | return recipe_metas 193 | 194 | def find_existing_built_dists(self, recipe_metas): 195 | recipes = tuple([meta, None] for meta in recipe_metas) 196 | if self.inspection_channels: 197 | # For an unknown reason we are unable to cache the get_index call. There is a 198 | # test which fails against v3.18.6 if use_cache is True. 199 | index = get_index(self.inspection_channels, prepend=False, use_cache=False) 200 | # We look to see if a distribution exists in the channel. Note: This is not checking 201 | # there is a distribution for this platform. This isn't a big deal, as channels are 202 | # typically split by platform. If this changes, we would need to re-consider how this 203 | # is implemented. 204 | 205 | # We temporarily workaround the index containing the channel information in the key. 206 | # We should deal with this properly though. 207 | index = {meta['fn']: meta for meta in index.values()} 208 | 209 | for recipe_pair in recipes: 210 | meta, dist_location = recipe_pair 211 | if meta.pkg_fn() in index: 212 | recipe_pair[1] = index[meta.pkg_fn()]['channel'] 213 | if self.inspection_directories: 214 | for directory in self.inspection_directories: 215 | files = glob.glob(os.path.join(directory, '*.tar.bz2')) 216 | fnames = [os.path.basename(fpath) for fpath in files] 217 | for recipe_pair in recipes: 218 | meta, dist_location = recipe_pair 219 | if dist_location is None and meta.pkg_fn() in fnames: 220 | recipe_pair[1] = directory 221 | return recipes 222 | 223 | def build(self, meta, config): 224 | print('Building ', meta.dist()) 225 | config = meta.vn_context(config=config) 226 | try: 227 | output_paths = conda_build.api.build(meta.meta, config=config) 228 | except AttributeError: 229 | with meta.vn_context(): 230 | output_paths = bldpkg_path(build.build(meta.meta)) 231 | if isinstance(output_paths, string_types): 232 | output_paths = [output_paths] 233 | return output_paths 234 | 235 | def compute_build_distros(self, index, recipes, config): 236 | """ 237 | Given the recipes which are to be built, return a list of BakedDistribution instances 238 | for all distributions that should be built. 239 | 240 | """ 241 | all_distros = [] 242 | index = copy_index(index) 243 | 244 | for meta in recipes: 245 | distros = resolved_distribution.ResolvedDistribution.resolve_all(meta, index, 246 | self.matrix_conditions) 247 | cases = [distro.special_versions for distro in distros] 248 | cases = list(vn_matrix.keep_top_n_major_versions(cases, n=self.matrix_max_n_major_minor_versions[0])) 249 | cases = list(vn_matrix.keep_top_n_minor_versions(cases, n=self.matrix_max_n_major_minor_versions[1])) 250 | for distro in distros: 251 | if distro.special_versions in cases: 252 | # Update the index with this distribution so that it can be considered by the version matrix. 253 | if distro.pkg_fn() not in index: 254 | index[distro.pkg_fn()] = distro.info_index() 255 | all_distros.append(distro) 256 | 257 | return all_distros 258 | 259 | def main(self): 260 | index = get_index(use_cache=False) 261 | if hasattr(conda_build, 'api'): 262 | build_config = conda_build.api.Config() 263 | else: 264 | build_config = conda_build.config.config 265 | 266 | # If it is not already defined with environment variables, we set the CONDA_NPY 267 | # to the latest possible value. Since we compute a build matrix anyway, this is 268 | # useful to prevent conda-build bailing if the recipe depends on it (e.g. 269 | # ``numpy x.x``), and to ensure that recipes that don't care which version they want 270 | # at build/test time get a sensible version. 271 | if build_config.CONDA_NPY is None: 272 | resolver = Resolve(index) 273 | npy = resolver.get_pkgs('numpy', emptyok=True) 274 | if npy: 275 | version = ''.join(max(npy).version.split('.')[:2]) 276 | build_config.CONDA_NPY = version 277 | 278 | recipe_metas = self.fetch_all_metas(build_config) 279 | print('Resolving distributions from {} recipes... '.format(len(recipe_metas))) 280 | 281 | all_distros = self.compute_build_distros(index, recipe_metas, build_config) 282 | print('Computed that there are {} distributions from the {} ' 283 | 'recipes:'.format(len(all_distros), len(recipe_metas))) 284 | recipes_and_dist_locn = self.find_existing_built_dists(all_distros) 285 | 286 | print('Resolved dependencies, will be built in the following order: \n\t{}'.format( 287 | '\n\t'.join(['{} (will be built: {})'.format(meta.dist(), dist_locn is None) 288 | for meta, dist_locn in recipes_and_dist_locn]))) 289 | 290 | if self.dry_run: 291 | print('Dry run: no distributions built') 292 | return 293 | 294 | for meta, built_dist_location in recipes_and_dist_locn: 295 | was_built = built_dist_location is None 296 | if was_built: 297 | built_dist_location = self.build(meta, build_config) 298 | self.post_build(meta, built_dist_location, was_built, 299 | config=build_config) 300 | 301 | def post_build(self, meta, built_dist_location, was_built, config=None): 302 | """ 303 | The post build phase occurs whether or not a build has actually taken place. 304 | It is the point at which a distribution is transfered to the desired artefact 305 | location. 306 | 307 | Parameters 308 | ---------- 309 | meta : MetaData 310 | The distribution for which we are running the post-build phase 311 | build_dist_location : str 312 | The location of the built .tar.bz2 file for the given meta. 313 | config 314 | The conda-build configuration for the build. 315 | 316 | """ 317 | for artefact_destination in self.artefact_destinations: 318 | artefact_destination.make_available(meta, built_dist_location, was_built, 319 | config=config) 320 | -------------------------------------------------------------------------------- /conda_build_all/cli.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import logging 3 | import os 4 | 5 | import conda_build.config 6 | 7 | import conda_build_all 8 | import conda_build_all.builder 9 | import conda_build_all.artefact_destination as artefact_dest 10 | 11 | 12 | def main(): 13 | parser = argparse.ArgumentParser( 14 | description='Build many conda distributions.') 15 | 16 | parser.add_argument('--version', action='version', 17 | version=conda_build_all.__version__, 18 | help="Show conda-build-all's version, and exit.") 19 | 20 | parser.add_argument('recipes', 21 | help='The folder containing conda recipes to build.') 22 | parser.add_argument('--inspect-channels', nargs='*', 23 | help=('Skip a build if the equivalent disribution is already ' 24 | 'available in the specified channel.')) 25 | parser.add_argument('--inspect-directories', nargs='*', 26 | help=('Skip a build if the equivalent disribution is already ' 27 | 'available in the specified directory.')) 28 | parser.add_argument('--no-inspect-conda-bld-directory', default=False, 29 | action='store_true', 30 | help='Do not add the conda-build directory to the inspection list.') 31 | parser.add_argument('--dry-run', default=False, 32 | action='store_true', 33 | help='Skip all builds, just list what distribution would be built.') 34 | 35 | parser.add_argument('--artefact-directory', 36 | help='A directory for any newly built distributions to be placed.') 37 | parser.add_argument('--upload-channels', nargs='*', default=[], 38 | help=('The channel(s) to upload built distributions to (requires ' 39 | 'BINSTAR_TOKEN envioronment variable).')) 40 | 41 | parser.add_argument("--matrix-conditions", nargs='*', default=[], 42 | help=("Extra conditions for computing the build matrix " 43 | "(e.g. 'python 2.7.*'). Note, your build matrix may also be being " 44 | "limited by --matrix-max-n-major-versions and " 45 | "--matrix-max-n-minor-versions.")) 46 | parser.add_argument("--matrix-max-n-major-versions", default=2, type=int, 47 | help=("When computing the build matrix, limit to the latest n major " 48 | "versions (0 makes this unlimited). For example, if Python 1, " 49 | "2 and Python 3 are resolved by the recipe and associated " 50 | "matrix conditions, only the latest N major version will be " 51 | "used for the build matrix. (default: 2) ")) 52 | parser.add_argument("--matrix-max-n-minor-versions", default=2, type=int, 53 | help=("When computing the build matrix, limit to the latest n minor " 54 | "versions (0 makes this unlimited). Note that this does not " 55 | "limit the number of major versions (see also " 56 | "matrix-max-n-major-version). For example, if Python 2 and " 57 | "Python 3 are resolved by the recipe and associated matrix " 58 | "conditions, a total of Nx2 builds will be identified. " 59 | "(default: 2)")) 60 | 61 | args = parser.parse_args() 62 | 63 | if hasattr(conda_build, 'api'): 64 | build_config = conda_build.api.Config() 65 | else: 66 | build_config = conda_build.config.config 67 | 68 | matrix_conditions = args.matrix_conditions 69 | max_n_versions = (args.matrix_max_n_major_versions, 70 | args.matrix_max_n_minor_versions) 71 | 72 | inspection_directories = args.inspect_directories or [] 73 | if (not args.no_inspect_conda_bld_directory and 74 | os.path.isdir(build_config.bldpkgs_dir)): 75 | inspection_directories.extend(build_config.bldpkgs_dirs) 76 | 77 | artefact_destinations = [] 78 | for channel in args.upload_channels: 79 | dest = artefact_dest.AnacondaClientChannelDest.from_spec(channel) 80 | artefact_destinations.append(dest) 81 | if args.artefact_directory: 82 | dest = artefact_dest.DirectoryDestination(args.artefact_directory) 83 | artefact_destinations.append(dest) 84 | 85 | artefact_dest.log.setLevel(logging.INFO) 86 | artefact_dest.log.addHandler(logging.StreamHandler()) 87 | 88 | b = conda_build_all.builder.Builder(args.recipes, args.inspect_channels, 89 | inspection_directories, 90 | artefact_destinations, 91 | args.matrix_conditions, 92 | max_n_versions, args.dry_run) 93 | b.main() 94 | 95 | 96 | if __name__ == '__main__': 97 | main() 98 | -------------------------------------------------------------------------------- /conda_build_all/conda_interface.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import absolute_import, division, print_function 3 | 4 | from conda import __version__ as CONDA_VERSION 5 | 6 | CONDA_VERSION_MAJOR_MINOR = tuple(int(x) for x in CONDA_VERSION.split('.')[:2]) 7 | 8 | if (4, 3) <= CONDA_VERSION_MAJOR_MINOR < (4, 4): 9 | from conda.lock import Locked 10 | from conda.exports import get_index 11 | from conda.exports import subdir 12 | from conda.exports import MatchSpec 13 | from conda.exports import Unsatisfiable 14 | from conda.exports import NoPackagesFound 15 | from conda.exports import Resolve 16 | from conda.exports import string_types 17 | from conda.models.dist import Dist as _Dist 18 | 19 | def get_key(dist_or_filename): 20 | return dist_or_filename 21 | 22 | def copy_index(index): 23 | return {_Dist(key): index[key] for key in index.keys()} 24 | 25 | def ensure_dist_or_dict(fn): 26 | return _Dist.from_string(fn) 27 | 28 | from conda.console import setup_verbose_handlers 29 | setup_verbose_handlers() 30 | from conda.gateways.logging import initialize_logging 31 | initialize_logging() 32 | 33 | elif (4, 2) <= CONDA_VERSION_MAJOR_MINOR < (4, 3): 34 | from conda.lock import Locked 35 | from conda.exports import get_index 36 | from conda.exports import subdir 37 | from conda.exports import MatchSpec 38 | from conda.exports import Unsatisfiable 39 | from conda.exports import NoPackagesFound 40 | from conda.exports import Resolve 41 | from conda.exports import string_types 42 | 43 | def get_key(dist_or_filename): 44 | return dist_or_filename.fn 45 | 46 | def copy_index(index): 47 | index = index.copy() 48 | return index 49 | 50 | def ensure_dist_or_dict(fn): 51 | return fn 52 | 53 | # We need to import conda.fetch and conda.resolve to trigger the 54 | # creation of the loggers. 55 | import conda.fetch 56 | import conda.resolve 57 | 58 | else: 59 | raise NotImplementedError("CONDA_VERSION: %s CONDA_VERSION_MAJOR_MINOR: %s" 60 | % (CONDA_VERSION, str(CONDA_VERSION_MAJOR_MINOR))) 61 | 62 | 63 | subdir = subdir 64 | Locked = Locked 65 | Resolve, get_index = Resolve, get_index 66 | MatchSpec = MatchSpec 67 | Unsatisfiable, NoPackagesFound = Unsatisfiable, NoPackagesFound 68 | string_types = string_types 69 | -------------------------------------------------------------------------------- /conda_build_all/inspect_binstar.py: -------------------------------------------------------------------------------- 1 | # NOTE: This module has no unit tests. 2 | 3 | import binstar_client 4 | from conda_build.build import bldpkg_path 5 | from .conda_interface import get_index, subdir 6 | 7 | 8 | def distribution_exists(binstar_cli, owner, metadata): 9 | """ 10 | Determine whether a distribution exists. 11 | 12 | This does not check specific channels - it is either on binstar or it is not. 13 | """ 14 | fname = '{}/{}.tar.bz2'.format(subdir, metadata.dist()) 15 | try: 16 | r = binstar_cli.distribution(owner, metadata.name(), metadata.version(), 17 | fname) 18 | exists = True 19 | except binstar_client.NotFound: 20 | exists = False 21 | return exists 22 | 23 | 24 | def distribution_exists_on_channel(binstar_cli, owner, metadata, channel='main'): 25 | """ 26 | Determine whether a distribution exists on a specific channel. 27 | 28 | Note from @pelson: As far as I can see, there is no easy way to do this on binstar. 29 | 30 | """ 31 | fname = '{}.tar.bz2'.format(metadata.dist()) 32 | channel_url = '/'.join([owner, 'label', channel]) 33 | 34 | distributions_on_channel = get_index([channel_url], 35 | prepend=False, use_cache=False) 36 | 37 | try: 38 | on_channel = (distributions_on_channel[fname]['subdir'] == 39 | subdir) 40 | except KeyError: 41 | on_channel = False 42 | return on_channel 43 | 44 | 45 | def add_distribution_to_channel(binstar_cli, owner, metadata, channel='main'): 46 | """ 47 | Add a(n already existing) distribution on binstar to another channel. 48 | 49 | Note - the addition is done based on name and version - no build strings etc. 50 | so if you have a foo-0.1-np18 and foo-0.1-np19 *both* will be added to the channel. 51 | 52 | """ 53 | package_fname = '{}/{}.tar.bz2'.format(subdir, metadata.dist()) 54 | binstar_cli.add_channel(channel, owner, metadata.name(), metadata.version())#filename=package_fname) 55 | 56 | 57 | def copy_distribution_to_owner(binstar_cli, source_owner, dest_owner, 58 | metadata, channel='main'): 59 | """ 60 | Copy an already existing distribution from one owner to another on 61 | anaconda. 62 | """ 63 | package_fname = '{}/{}.tar.bz2'.format(subdir, metadata.dist()) 64 | binstar_cli.copy(source_owner, metadata.name(), metadata.version(), 65 | basename=package_fname, to_owner=dest_owner, 66 | to_channel=channel) 67 | -------------------------------------------------------------------------------- /conda_build_all/order_deps.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | 4 | def resolve_dependencies(package_dependencies): 5 | """ 6 | Given a dictionary mapping a package to its dependencies, return a 7 | generator of packages to install, sorted by the required install 8 | order. 9 | 10 | >>> deps = resolve_dependencies({'a': ['b', 'c'], 'b': ['c'], 11 | 'c': ['d'], 'd': []}) 12 | >>> list(deps) 13 | ['d', 'c', 'b', 'a'] 14 | 15 | """ 16 | remaining_dependencies = package_dependencies.copy() 17 | completed_packages = [] 18 | 19 | # A maximum of 10000 iterations. Beyond that and there is probably a 20 | # problem. 21 | for failsafe in range(10000): 22 | for package, deps in sorted(remaining_dependencies.copy().items()): 23 | if all(dependency in completed_packages for dependency in deps): 24 | completed_packages.append(package) 25 | remaining_dependencies.pop(package) 26 | yield package 27 | else: 28 | # Put a check in to ensure that all the dependencies were 29 | # defined as packages, otherwise we will never succeed. 30 | for dependency in deps: 31 | if dependency not in package_dependencies: 32 | msg = ('The package {} depends on {}, but it was not ' 33 | 'part of the package_dependencies dictionary.' 34 | ''.format(package, dependency)) 35 | raise ValueError(msg) 36 | 37 | # Close off the loop if we've completed the dependencies. 38 | if not remaining_dependencies: 39 | break 40 | else: 41 | raise ValueError('Dependencies could not be resolved. ' 42 | 'Remaining dependencies: {}' 43 | ''.format(remaining_dependencies)) 44 | -------------------------------------------------------------------------------- /conda_build_all/resolved_distribution.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | 3 | from .conda_interface import get_index 4 | import conda_build.config 5 | 6 | 7 | #import conda_build_all.logging 8 | import conda_build_all.version_matrix as vn_matrix 9 | from conda_build_all.version_matrix import setup_vn_mtx_case 10 | 11 | 12 | class ResolvedDistribution(object): 13 | """ 14 | Represents a conda pacakge, with the appropriate special case 15 | versions fixed (e.g. CONDA_PY, CONDA_NPY). Without this, a meta 16 | changes as the conda_build.config.CONDA_NPY changes. 17 | 18 | Parameters 19 | ---------- 20 | meta: conda_build.metadata.MetData 21 | The package which has been resolved. 22 | special_versions: iterable 23 | A list of the versions which have been resolved for this package. 24 | e.g. ``(['python', '27'],)`` 25 | 26 | """ 27 | def __init__(self, meta, special_versions=()): 28 | self.meta = meta 29 | self.special_versions = special_versions 30 | 31 | def __repr__(self): 32 | return 'BakedDistribution({}, {})'.format(self.meta, 33 | self.special_versions) 34 | 35 | def __str__(self): 36 | return self.dist() 37 | 38 | def vn_context(self, config=None): 39 | return setup_vn_mtx_case(self.special_versions, config) 40 | 41 | def __getattr__(self, name): 42 | if hasattr(self.meta, 'config'): 43 | config = setup_vn_mtx_case(self.special_versions, 44 | config=self.meta.config) 45 | self.meta.parse_again(config) 46 | else: 47 | with setup_vn_mtx_case(self.special_versions): 48 | self.meta.parse_again() 49 | result = getattr(self.meta, name) 50 | 51 | # Wrap any callable such that it is called within the appropriate 52 | # environment. 53 | # callable exists in python 2.* and >=3.2 54 | if callable(result): 55 | orig_result = result 56 | import functools 57 | @functools.wraps(result) 58 | def with_vn_mtx_setup(*args, **kwargs): 59 | if hasattr(self.meta, 'config'): 60 | config = setup_vn_mtx_case(self.special_versions, 61 | config=self.meta.config) 62 | self.meta.parse_again(config=config) 63 | return orig_result(*args, **kwargs) 64 | else: 65 | with setup_vn_mtx_case(self.special_versions): 66 | self.meta.parse_again() 67 | return orig_result(*args, **kwargs) 68 | result = with_vn_mtx_setup 69 | return result 70 | 71 | @classmethod 72 | def resolve_all(cls, meta, index=None, extra_conditions=None): 73 | """ 74 | Given a package, return a list of ResolvedDistributions, one for each 75 | possible (necessary) version permutation. 76 | 77 | """ 78 | if index is None: 79 | with vn_matrix.override_conda_logging('WARN'): 80 | index = get_index() 81 | 82 | cases = sorted(vn_matrix.special_case_version_matrix(meta, index)) 83 | 84 | if extra_conditions: 85 | cases = list(vn_matrix.filter_cases(cases, extra_conditions)) 86 | result = [] 87 | for case in cases: 88 | dist = cls(meta, case) 89 | if not dist.skip(): 90 | result.append(dist) 91 | return result 92 | -------------------------------------------------------------------------------- /conda_build_all/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/conda-tools/conda-build-all/742efce8c7928bac9291034bc629bde2d3b49e09/conda_build_all/tests/__init__.py -------------------------------------------------------------------------------- /conda_build_all/tests/integration/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/conda-tools/conda-build-all/742efce8c7928bac9291034bc629bde2d3b49e09/conda_build_all/tests/integration/__init__.py -------------------------------------------------------------------------------- /conda_build_all/tests/integration/test_builder.py: -------------------------------------------------------------------------------- 1 | from contextlib import contextmanager 2 | import os 3 | import re 4 | import shutil 5 | import tempfile 6 | import textwrap 7 | import unittest 8 | 9 | try: 10 | import conda_build.api 11 | except ImportError: 12 | import conda_build.config 13 | 14 | from conda_build.metadata import MetaData 15 | from conda_build_all.conda_interface import string_types 16 | 17 | from conda_build_all.resolved_distribution import ResolvedDistribution 18 | from conda_build_all.builder import Builder 19 | from conda_build_all.tests.unit.dummy_index import DummyIndex 20 | 21 | 22 | sample_recipes = os.path.join(os.path.dirname(__file__), 'test_recipes') 23 | 24 | 25 | class RecipeCreatingUnit(unittest.TestCase): 26 | def setUp(self): 27 | self.index = DummyIndex() 28 | self.directories_to_remove = [] 29 | self.recipes_root_dir = self.tmp_dir(prefix='recipes') 30 | 31 | def tearDown(self): 32 | for directory in self.directories_to_remove: 33 | shutil.rmtree(directory) 34 | 35 | def tmp_dir(self, **mkdtemp_kwargs): 36 | tmp_dir = tempfile.mkdtemp(**mkdtemp_kwargs) 37 | self.directories_to_remove.append(tmp_dir) 38 | return tmp_dir 39 | 40 | def write_meta(self, recipe_dir_name, spec): 41 | recipe_dir = os.path.join(self.recipes_root_dir, recipe_dir_name) 42 | if not os.path.exists(recipe_dir): 43 | os.makedirs(recipe_dir) 44 | recipe_file = os.path.join(recipe_dir, 'meta.yaml') 45 | with open(recipe_file, 'w') as fh: 46 | fh.write(textwrap.dedent(spec)) 47 | return MetaData(os.path.dirname(recipe_file)) 48 | 49 | 50 | class Test_build(RecipeCreatingUnit): 51 | def test_no_source(self): 52 | pkg1 = self.write_meta('pkg1', """ 53 | package: 54 | name: pkg1 55 | version: 1.0 56 | """) 57 | pkg1_resolved = ResolvedDistribution(pkg1, (())) 58 | builder = Builder(None, None, None, None, None) 59 | if hasattr(conda_build, 'api'): 60 | rs = builder.build(pkg1_resolved, conda_build.api.Config()) 61 | else: 62 | rs = builder.build(pkg1_resolved, conda_build.config.config) 63 | for r in rs: 64 | self.assertTrue(os.path.exists(r)) 65 | self.assertEqual(os.path.abspath(r), r) 66 | self.assertTrue(bool(re.match('pkg1-1.0-(h[0-9a-f]{7}_)?0.tar.bz2', 67 | os.path.basename(r)))) 68 | 69 | def test_noarch_python(self): 70 | pkg1 = self.write_meta('pkg1', """ 71 | package: 72 | name: pkg1 73 | version: 1.0 74 | build: 75 | noarch: python 76 | requirements: 77 | build: 78 | - python 79 | run: 80 | - python 81 | """) 82 | pkg1_resolved = ResolvedDistribution(pkg1, (['python', '3.5'], )) 83 | builder = Builder(None, None, None, None, None) 84 | if hasattr(conda_build, 'api'): 85 | rs = builder.build(pkg1_resolved, conda_build.api.Config()) 86 | else: 87 | rs = builder.build(pkg1_resolved, conda_build.config.config) 88 | for r in rs: 89 | self.assertTrue(os.path.exists(r)) 90 | self.assertEqual(os.path.abspath(r), r) 91 | self.assertTrue(bool(re.match('pkg1-1.0-py(h[0-9a-f]{7})?_0.tar.bz2', 92 | os.path.basename(r)))) 93 | 94 | def test_numpy_dep(self): 95 | pkg1 = self.write_meta('pkg1', """ 96 | package: 97 | name: pkg1 98 | version: 1.0 99 | requirements: 100 | build: 101 | - python 102 | - numpy x.x 103 | run: 104 | - python 105 | - numpy x.x 106 | """) 107 | pkg1_resolved = ResolvedDistribution(pkg1, (['python', '3.5'], ['numpy', '1.11'])) 108 | builder = Builder(None, None, None, None, None) 109 | if hasattr(conda_build, 'api'): 110 | rs = builder.build(pkg1_resolved, conda_build.api.Config()) 111 | else: 112 | rs = builder.build(pkg1_resolved, conda_build.config.config) 113 | for r in rs: 114 | self.assertTrue(os.path.exists(r)) 115 | self.assertEqual(os.path.abspath(r), r) 116 | self.assertTrue(bool(re.match('pkg1-1.0-np111py35(h[0-9a-f]{7})?_0.tar.bz2', 117 | os.path.basename(r)))) 118 | 119 | 120 | class Test__find_existing_built_dists(RecipeCreatingUnit): 121 | def make_channel(self, all_metas): 122 | for meta in all_metas: 123 | self.index.add_pkg_meta(meta) 124 | channel_root = tempfile.mkdtemp(prefix='temporary_channel_') 125 | # Line the directory up for removal when we're done with it. 126 | self.directories_to_remove.append(channel_root) 127 | 128 | self.index.write_to_channel(channel_root) 129 | return channel_root 130 | 131 | def setUp(self): 132 | super(Test__find_existing_built_dists, self).setUp() 133 | self.metas = {'a1': self.write_meta('a vn1', """ 134 | package: 135 | name: a 136 | version: 1.0 137 | """), 138 | 'a2': self.write_meta('a vn2', """ 139 | package: 140 | name: a 141 | version: 2.0 142 | """), 143 | 'a2_1': self.write_meta('a vn2 bld1', """ 144 | package: 145 | name: a 146 | version: 2.0 147 | build: 148 | number: 1 149 | """), 150 | 'b2': self.write_meta('b vn2', """ 151 | package: 152 | name: b 153 | version: 2.0 154 | requirements: 155 | run: 156 | - a 2.* 157 | """)} 158 | 159 | def test_exist_on_channel(self): 160 | channel = self.make_channel(self.metas.values()) 161 | channel_url = 'file://' + channel 162 | builder = Builder('.', [channel_url], [], [], []) 163 | expected_channel = '{}/{}'.format(channel_url, self.metas['a1'].info_index()['subdir']) 164 | existing = builder.find_existing_built_dists([self.metas['a1'], self.metas['a2']]) 165 | dists = [(meta.dist(), locn) for meta, locn in existing] 166 | self.assertEqual(dists, [('a-1.0-0', expected_channel), 167 | ('a-2.0-0', expected_channel)]) 168 | 169 | def test_full_version_exists_on_channel(self): 170 | # Only a vn2.0 build 1 is available, we want to assert that nothing is found for a1 and a2 build 0. 171 | channel = self.make_channel([self.metas['a2_1']]) 172 | builder = Builder('.', ['file://' + channel], [], [], []) 173 | existing = builder.find_existing_built_dists([self.metas['a1'], self.metas['a2']]) 174 | self.assertEqual([(meta.dist(), locn) for meta, locn in existing], 175 | [('a-1.0-0', None), ('a-2.0-0', None)]) 176 | 177 | 178 | def test_exists_in_directory(self): 179 | distribution_directory = tempfile.mkdtemp() 180 | # Line the directory up for removal when we're done with it. 181 | self.directories_to_remove.append(distribution_directory) 182 | 183 | with open(os.path.join(distribution_directory, self.metas['a1'].dist() + '.tar.bz2'), 'w') as fh: 184 | fh.write('placeholder') 185 | builder = Builder('.', [], [distribution_directory], [], []) 186 | existing = builder.find_existing_built_dists([self.metas['a1'], self.metas['a2']]) 187 | dists = [(meta.dist(), locn) for meta, locn in existing] 188 | self.assertEqual(dists, [('a-1.0-0', distribution_directory), ('a-2.0-0', None)]) 189 | 190 | 191 | class Test_compute_build_distros(RecipeCreatingUnit): 192 | def test_added_to_index(self): 193 | metas = [self.write_meta('py2k', """ 194 | package: 195 | name: python 196 | version: 2.7.0 197 | """), 198 | self.write_meta('py33', """ 199 | package: 200 | name: python 201 | version: 3.3.0 202 | """), 203 | self.write_meta('py34', """ 204 | package: 205 | name: python 206 | version: 3.4.24 207 | """), 208 | self.write_meta('py35', """ 209 | package: 210 | name: python 211 | version: 3.5.2 212 | build: 213 | number: 1 214 | """), 215 | self.write_meta('np110', """ 216 | package: 217 | name: numpy 218 | version: 1.10 219 | requirements: 220 | build: 221 | - python 222 | run: 223 | - python 224 | """), 225 | self.write_meta('py_package', """ 226 | package: 227 | name: my_py_package 228 | version: 2.0 229 | requirements: 230 | build: 231 | - python 232 | run: 233 | - python 234 | - numpy 235 | """)] 236 | builder = Builder(None, None, None, None, None) 237 | index = {} 238 | if hasattr(conda_build, 'api'): 239 | config = conda_build.api.Config() 240 | else: 241 | config = conda_build.config.config 242 | distributions = builder.compute_build_distros(index, metas, config) 243 | expected = ['python-2.7.0-0', 'python-3.3.0-0', 'python-3.4.24-0', 244 | 'python-3.5.2-1', 245 | 'numpy-1.10-py27_0', 'numpy-1.10-py34_0', 'numpy-1.10-py35_0', 246 | 'my_py_package-2.0-py27_0', 'my_py_package-2.0-py34_0', 247 | 'my_py_package-2.0-py35_0'] 248 | self.assertEqual([meta.dist() for meta in distributions], expected) 249 | # Check that we didn't change the index. 250 | self.assertEqual(index, {}) 251 | 252 | 253 | if __name__ == '__main__': 254 | unittest.main() 255 | -------------------------------------------------------------------------------- /conda_build_all/tests/integration/test_cli.py: -------------------------------------------------------------------------------- 1 | # There should be *very* few tests here. The intention of this module is to test the 2 | # conda-build-all interface for a few cases to ensure that when pulling all of the 3 | # components together, we get the desired behaviour. Please consider adding *unit* 4 | # tests for new functionality. 5 | 6 | from argparse import Namespace 7 | from contextlib import contextmanager 8 | import os 9 | import shutil 10 | import subprocess 11 | import sys 12 | import tempfile 13 | import textwrap 14 | import unittest 15 | 16 | from binstar_client.utils import get_binstar 17 | 18 | from conda_build_all.build import build, upload 19 | from conda_build_all.inspect_binstar import (distribution_exists, 20 | distribution_exists_on_channel, 21 | add_distribution_to_channel) 22 | from conda_build_all.tests.integration.test_builder import RecipeCreatingUnit 23 | from conda_build_all.resolved_distribution import ResolvedDistribution 24 | from conda_build_all.conda_interface import subdir 25 | 26 | 27 | def clear_binstar(cli, owner): 28 | """ 29 | Empty all distributions for a user. 30 | 31 | The "rm -rf *" of the binstar world. 32 | 33 | """ 34 | for channel in cli.list_channels(owner): 35 | cli.remove_channel(owner, channel) 36 | 37 | for package in cli.user_packages(owner): 38 | cli.remove_package(owner, package['name']) 39 | 40 | 41 | OWNER = 'Obvious-ci-tests' 42 | CLIENT = get_binstar(Namespace(token=os.environ.get('BINSTAR_TOKEN', None), site=None)) 43 | 44 | 45 | @unittest.skipIf(os.environ.get('CONDA_BUILD_ALL_TEST_ANACONDA_CLOUD', False) != '1', 46 | "Not testing real binstar usage as the " 47 | "CONDA_BUILD_ALL_TEST_ANACONDA_CLOUD environment variable is not " 48 | "set to '1'.") 49 | class Test(RecipeCreatingUnit): 50 | # Note: These tests upload things to anaconda.org and are completely global. That is, 51 | # if somebody else in the world is running the tests at the same time anywhere on the planet, 52 | # they will behave in very strange ways (highly likely to fail). 53 | def setUp(self): 54 | clear_binstar(CLIENT, OWNER) 55 | self.conda_bld_root = tempfile.mkdtemp(prefix='conda_bld_dir_') 56 | super(Test, self).setUp() 57 | 58 | def tearDown(self): 59 | clear_binstar(CLIENT, OWNER) 60 | shutil.rmtree(self.conda_bld_root) 61 | super(Test, self).tearDown() 62 | 63 | @contextmanager 64 | def configure_conda(self): 65 | condarc = os.path.join(self.conda_bld_root, 'condarc') 66 | with open(condarc, 'w') as fh: 67 | fh.write(textwrap.dedent(""" 68 | channels: [] 69 | add_pip_as_python_dependency: False 70 | conda-build: 71 | root-dir: {} 72 | """.format(self.conda_bld_root))) 73 | yield condarc 74 | 75 | def call(self, cmd_args): 76 | cmd = [sys.executable, '-m', 'conda_build_all.cli'] + list(cmd_args) 77 | environ = os.environ.copy() 78 | # Use a very limited condarc config. 79 | with self.configure_conda() as rc_path: 80 | environ['CONDARC'] = rc_path 81 | p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, 82 | env=environ, bufsize=0) 83 | line = p.stdout.readline() 84 | while line: 85 | print(str(line.rstrip().decode('utf8'))) 86 | line = p.stdout.readline() 87 | # Wait for the return code. 88 | p.communicate() 89 | self.assertEqual(p.returncode, 0, 'Exit code was not 0 (got {})'.format(p.returncode)) 90 | 91 | def test(self): 92 | # Build a recipe. 93 | py2 = self.write_meta('py1', """ 94 | package: 95 | name: python 96 | version: 1.2.3 97 | """) 98 | py2 = self.write_meta('py2', """ 99 | package: 100 | name: python 101 | version: 2.1.10 102 | """) 103 | a = self.write_meta('a', """ 104 | package: 105 | name: a 106 | version: 3.1.4 107 | requirements: 108 | build: 109 | - python 110 | run: 111 | - python 112 | """) 113 | 114 | a_py12 = ResolvedDistribution(a, (('python', '12', ), )) 115 | a_py21 = ResolvedDistribution(a, (('python', '21', ), )) 116 | a_py99 = ResolvedDistribution(a, (('python', '99', ), )) 117 | 118 | testing_channel = '{}/channel/{}'.format(OWNER, 'testing') 119 | self.call([self.recipes_root_dir, '--upload-channel', testing_channel]) 120 | 121 | # Check that we have started on the right footing - the distribution should be on testing, 122 | # but not on main. 123 | self.assertTrue(distribution_exists_on_channel(CLIENT, OWNER, py2, channel='testing')) 124 | self.assertFalse(distribution_exists_on_channel(CLIENT, OWNER, py2, channel='main')) 125 | 126 | # Check that we've had a py21 and py12, but not a py99 for a. 127 | self.assertTrue(distribution_exists_on_channel(CLIENT, OWNER, a_py12, channel='testing')) 128 | self.assertTrue(distribution_exists_on_channel(CLIENT, OWNER, a_py21, channel='testing')) 129 | self.assertFalse(distribution_exists_on_channel(CLIENT, OWNER, a_py99, channel='testing')) 130 | 131 | # Remove the built distribution, re-run, and assert that we didn't bother re-building. 132 | dist_path = os.path.join(self.conda_bld_root, subdir, a_py21.pkg_fn()) 133 | self.assertTrue(os.path.exists(dist_path)) 134 | os.remove(dist_path) 135 | self.call([self.recipes_root_dir, '--inspect-channel', testing_channel, '--upload-channel', testing_channel]) 136 | self.assertFalse(os.path.exists(dist_path)) 137 | 138 | # Now put a condition in. In this case, only build dists for py<2 139 | CLIENT.remove_dist(OWNER, a_py21.name(), a_py21.version(), '{}/{}'.format(subdir, a_py21.pkg_fn())) 140 | self.assertFalse(distribution_exists_on_channel(CLIENT, OWNER, a_py21, channel='testing')) 141 | self.call([self.recipes_root_dir, '--inspect-channel', testing_channel, '--upload-channel', testing_channel, 142 | '--matrix-condition', 'python <2']) 143 | self.assertFalse(distribution_exists_on_channel(CLIENT, OWNER, a_py21, channel='testing')) 144 | self.assertFalse(os.path.exists(dist_path)) 145 | 146 | # Without the condition, we should be re-building the distribution 147 | self.call([self.recipes_root_dir, '--inspect-channel', testing_channel, '--upload-channel', testing_channel]) 148 | self.assertTrue(os.path.exists(dist_path)) 149 | self.assertTrue(distribution_exists_on_channel(CLIENT, OWNER, a_py21, channel='testing')) 150 | 151 | 152 | if __name__ == '__main__': 153 | unittest.main() 154 | -------------------------------------------------------------------------------- /conda_build_all/tests/integration/test_inspect_binstar.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | 4 | from binstar_client.utils import get_binstar 5 | from argparse import Namespace 6 | try: 7 | import conda_build.api 8 | except ImportError: 9 | import conda_build.config 10 | 11 | from conda_build_all.build import build, upload 12 | from conda_build_all.inspect_binstar import (distribution_exists, 13 | distribution_exists_on_channel, 14 | add_distribution_to_channel, 15 | copy_distribution_to_owner) 16 | from conda_build_all.tests.integration.test_builder import RecipeCreatingUnit 17 | 18 | 19 | 20 | def clear_binstar(cli, owner): 21 | """ 22 | Empty all distributions for a user. 23 | 24 | The "rm -rf *" of the binstar world. 25 | 26 | """ 27 | for channel in cli.list_channels(owner): 28 | cli.remove_channel(owner, channel) 29 | 30 | for package in cli.user_packages(owner): 31 | cli.remove_package(owner, package['name']) 32 | 33 | 34 | OWNER = 'Obvious-ci-tests' 35 | CLIENT = get_binstar(Namespace(token=os.environ.get('BINSTAR_TOKEN', None), site=None)) 36 | 37 | 38 | @unittest.skipIf(os.environ.get('CONDA_BUILD_ALL_TEST_ANACONDA_CLOUD', False) != '1', 39 | "Not testing real binstar usage as the " 40 | "CONDA_BUILD_ALL_TEST_ANACONDA_CLOUD environment variable is not " 41 | "set to '1'.") 42 | class Test(RecipeCreatingUnit): 43 | # Note: These tests upload things to anaconda.org and are completely global. That is, 44 | # if somebody else in the world is running the tests at the same time anywhere on the planet, 45 | # they will behave in very strange ways (highly likely to fail). 46 | def setUp(self): 47 | clear_binstar(CLIENT, OWNER) 48 | super(Test, self).setUp() 49 | 50 | def tearDown(self): 51 | clear_binstar(CLIENT, OWNER) 52 | super(Test, self).tearDown() 53 | 54 | def test_distribution_exists(self): 55 | # Build a recipe. 56 | meta = self.write_meta('test_recipe_1', """ 57 | package: 58 | name: test_recipe_1 59 | version: 'determined_at_build_time' 60 | build: 61 | script: echo "v0.1.0.dev1" > __conda_version__.txt 62 | """) 63 | meta = build(meta) 64 | if hasattr(conda_build, 'api'): 65 | build_config = conda_build.api.Config() 66 | else: 67 | build_config = conda_build.config.config 68 | 69 | # Check distribution exists returns false when there is no distribution. 70 | self.assertFalse(distribution_exists(CLIENT, OWNER, meta)) 71 | 72 | # upload the distribution 73 | upload(CLIENT, meta, OWNER, channels=['testing'], config=build_config) 74 | 75 | # Check the distribution exists. Notice there is no channel being supplied here. 76 | self.assertTrue(distribution_exists(CLIENT, OWNER, meta)) 77 | 78 | # Check the distribution is on testing but not on main. 79 | self.assertTrue(distribution_exists_on_channel(CLIENT, OWNER, meta, channel='testing')) 80 | self.assertFalse(distribution_exists_on_channel(CLIENT, OWNER, meta, channel='main')) 81 | 82 | add_distribution_to_channel(CLIENT, OWNER, meta, channel='main') 83 | # Check that the distribution has been added. 84 | self.assertTrue(distribution_exists_on_channel(CLIENT, OWNER, meta, channel='main')) 85 | 86 | # Add the meta for a recipe known to exist on conda-forge 87 | meta2 = self.write_meta('conda_build_all', """ 88 | package: 89 | name: conda-build-all 90 | version: 0.12.0 91 | """) 92 | copy_distribution_to_owner(CLIENT, 'conda-forge', OWNER, meta2, channel='main') 93 | self.assertTrue(distribution_exists_on_channel(CLIENT, OWNER, meta2)) 94 | 95 | 96 | if __name__ == '__main__': 97 | unittest.main() 98 | -------------------------------------------------------------------------------- /conda_build_all/tests/unit/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | import tempfile 3 | import textwrap 4 | import shutil 5 | import unittest 6 | 7 | from conda_build.metadata import MetaData 8 | 9 | from conda_build_all.tests.unit.dummy_index import DummyIndex 10 | 11 | 12 | class RecipeCreatingUnit(unittest.TestCase): 13 | def setUp(self): 14 | self.index = DummyIndex() 15 | self.recipe_dir = tempfile.mkdtemp(prefix='tmp_recipe_') 16 | 17 | def tearDown(self): 18 | shutil.rmtree(self.recipe_dir) 19 | 20 | def write_meta(self, spec): 21 | with open(os.path.join(self.recipe_dir, 'meta.yaml'), 'w') as fh: 22 | fh.write(textwrap.dedent(spec)) 23 | return MetaData(os.path.join(self.recipe_dir)) 24 | -------------------------------------------------------------------------------- /conda_build_all/tests/unit/dummy_index.py: -------------------------------------------------------------------------------- 1 | import collections 2 | import os 3 | 4 | from conda_build.index import write_repodata 5 | try: 6 | import conda_build.api 7 | from conda_build.utils import get_lock 8 | extra_config = False 9 | except ImportError: 10 | import conda_build 11 | extra_config = True 12 | 13 | from conda_build_all.conda_interface import subdir 14 | 15 | _DummyPackage = collections.namedtuple('_DummyPackage', 16 | ['pkg_name', 'build_deps', 17 | 'run_deps', 'vn']) 18 | 19 | 20 | class DummyPackage(_DummyPackage): 21 | def __new__(cls, name, build_deps=None, run_deps=None, version='0.0'): 22 | return super(DummyPackage, cls).__new__(cls, name, build_deps or (), 23 | run_deps or (), version) 24 | 25 | def name(self): 26 | return self.pkg_name 27 | 28 | def version(self): 29 | return self.vn 30 | 31 | def dist(self): 32 | return '{}-{}-{}'.format(self.name(), self.version(), '0') 33 | 34 | def get_value(self, item, default): 35 | if item == 'requirements/run': 36 | return self.run_deps 37 | elif item == 'requirements/build': 38 | return self.build_deps 39 | else: 40 | raise AttributeError(item) 41 | 42 | def __repr__(self): 43 | # For testing purposes, this is particularly convenient. 44 | return self.name() 45 | 46 | 47 | class DummyIndex(dict): 48 | def add_pkg(self, name, version, build_string='', 49 | depends=(), build_number='0', 50 | **extra_items): 51 | if build_string: 52 | build_string = '{}_{}'.format(build_string, build_number) 53 | else: 54 | build_string = build_number 55 | pkg_info = dict(name=name, version=version, build_number=build_number, 56 | build=build_string, subdir=subdir, 57 | depends=tuple(depends), **extra_items) 58 | self['{}-{}-{}.tar.bz2'.format(name, version, build_string)] = pkg_info 59 | 60 | def add_pkg_meta(self, meta): 61 | # Add a package given its MetaData instance. This may include a DummyPackage 62 | # instance in the future. 63 | if isinstance(meta, DummyPackage): 64 | raise NotImplementedError('') 65 | self['{}.tar.bz2'.format(meta.dist())] = meta.info_index() 66 | 67 | def write_to_channel(self, dest): 68 | # Write the index to a channel. Useful to get conda to read it back in again 69 | # using conda.api.get_index(). 70 | channel_subdir = os.path.join(dest, subdir) 71 | if not os.path.exists(channel_subdir): 72 | os.mkdir(channel_subdir) 73 | if hasattr(conda_build, 'api'): 74 | lock = get_lock(channel_subdir) 75 | write_repodata({'packages': self, 'info': {}}, channel_subdir, lock, config=conda_build.api.Config()) 76 | else: 77 | write_repodata({'packages': self, 'info': {}}, channel_subdir) 78 | 79 | return channel_subdir 80 | 81 | -------------------------------------------------------------------------------- /conda_build_all/tests/unit/test_artefact_destination.py: -------------------------------------------------------------------------------- 1 | from argparse import Namespace 2 | from contextlib import contextmanager 3 | import logging 4 | try: 5 | from unittest import mock 6 | except ImportError: 7 | import mock 8 | import os 9 | import shutil 10 | import sys 11 | import tempfile 12 | import unittest 13 | 14 | 15 | from conda_build_all.tests.unit.dummy_index import DummyIndex, DummyPackage 16 | from conda_build_all.artefact_destination import (ArtefactDestination, 17 | AnacondaClientChannelDest, 18 | DirectoryDestination) 19 | import conda_build_all.artefact_destination 20 | 21 | 22 | class Test_AnacondaClientChannelDest(unittest.TestCase): 23 | # These tests make extensive use of mock to avoid the need to contact the 24 | # conda.anaconda.org server. 25 | # Integration tests which do use the server are available for inspect_binstar. 26 | def setUp(self): 27 | self.logger_patch = mock.patch('conda_build_all.artefact_destination.log') 28 | self.logger = self.logger_patch.start() 29 | 30 | def tearDown(self): 31 | self.logger_patch.stop() 32 | 33 | def _get_config(self): 34 | # Provide an object that will behave like a conda_build config object. 35 | config = mock.Mock() 36 | config.bldpkgs_dir = mock.Mock(return_value='') 37 | return config 38 | 39 | @contextmanager 40 | def dist_exists_setup(self, on_owner, on_channel): 41 | dist_exists = mock.patch('conda_build_all.inspect_binstar.distribution_exists', return_value=on_owner) 42 | dist_exists_on_channel = mock.patch('conda_build_all.inspect_binstar.distribution_exists_on_channel', return_value=on_channel) 43 | with dist_exists: 44 | with dist_exists_on_channel: 45 | yield 46 | 47 | def test_not_already_available_not_just_built(self): 48 | client, owner, channel = [mock.sentinel.client, mock.sentinel.owner, 49 | mock.sentinel.channel] 50 | ad = AnacondaClientChannelDest(mock.sentinel.token, owner, channel) 51 | ad._cli = client 52 | meta = DummyPackage('a', '2.1.0') 53 | config = self._get_config() 54 | with self.dist_exists_setup(on_owner=True, on_channel=False): 55 | with mock.patch('conda_build_all.inspect_binstar.add_distribution_to_channel') as add_to_channel: 56 | ad.make_available(meta, mock.sentinel.dist_path, 57 | just_built=False, config=config) 58 | add_to_channel.assert_called_once_with(client, owner, meta, channel=channel) 59 | self.logger.info.assert_called_once_with('Adding existing a-0.0-0 to the sentinel.owner/sentinel.channel channel.') 60 | 61 | def test_not_already_available_just_built(self): 62 | client, owner, channel = [mock.sentinel.client, mock.sentinel.owner, 63 | mock.sentinel.channel] 64 | ad = AnacondaClientChannelDest(mock.sentinel.token, owner, channel) 65 | ad._cli = client 66 | meta = DummyPackage('a', '2.1.0') 67 | config = self._get_config() 68 | with self.dist_exists_setup(on_owner=False, on_channel=False): 69 | with mock.patch('conda_build_all.build.upload') as upload: 70 | ad.make_available(meta, mock.sentinel.dist_path, 71 | just_built=True, config=config) 72 | upload.assert_called_once_with(client, meta, owner, 73 | channels=[channel], config=config) 74 | self.logger.info.assert_called_once_with('Uploading a to the sentinel.channel channel.') 75 | 76 | def test_already_available_not_just_built(self): 77 | # Note, we exercise the use of get_binstar here too. 78 | 79 | client, owner, channel = [mock.sentinel.client, mock.sentinel.owner, 80 | mock.sentinel.channel] 81 | ad = AnacondaClientChannelDest(mock.sentinel.token, owner, channel) 82 | meta = DummyPackage('a', '2.1.0') 83 | config = self._get_config() 84 | with self.dist_exists_setup(on_owner=True, on_channel=True): 85 | with mock.patch('binstar_client.utils.get_binstar') as get_binstar: 86 | ad.make_available(meta, mock.sentinel.dist_path, 87 | just_built=False, config=config) 88 | get_binstar.assert_called_once_with(Namespace(site=None, token=mock.sentinel.token)) 89 | # Nothing happens, we just get a message. 90 | self.logger.info.assert_called_once_with('Nothing to be done for a - it is already on sentinel.owner/sentinel.channel.') 91 | 92 | def test_already_available_just_built(self): 93 | client, owner, channel = [mock.sentinel.client, mock.sentinel.owner, 94 | mock.sentinel.channel] 95 | ad = AnacondaClientChannelDest(mock.sentinel.token, owner, channel) 96 | ad._cli = client 97 | meta = DummyPackage('a', '2.1.0') 98 | config = self._get_config() 99 | with self.dist_exists_setup(on_owner=True, on_channel=True): 100 | ad.make_available(meta, mock.sentinel.dist_path, 101 | just_built=True, config=config) 102 | # Nothing happens, we just get a message. 103 | self.logger.warn.assert_called_once_with("Assuming the distribution we've just built and the one on sentinel.owner/sentinel.channel are the same.") 104 | 105 | def test_already_available_elsewhere(self): 106 | client, owner, channel = [mock.sentinel.client, mock.sentinel.owner, 107 | mock.sentinel.channel] 108 | ad = AnacondaClientChannelDest(mock.sentinel.token, owner, channel) 109 | ad._cli = client 110 | meta = DummyPackage('a', '2.1.0') 111 | config = self._get_config() 112 | source_owner = 'fake_owner' 113 | # The osx-64 subdirectory at the end of the URL is not important to the test. 114 | for url in ['http://foo.bar/{}/osx-64/'.format(source_owner), 115 | 'https://foo.bar/wibble/{}/osx-64/'.format(source_owner), 116 | 'https://foo.bar/wibble/{}/osx-64'.format(source_owner)]: 117 | with self.dist_exists_setup(on_owner=False, on_channel=False): 118 | with mock.patch('conda_build_all.inspect_binstar.copy_distribution_to_owner') as copy: 119 | ad.make_available(meta, url, just_built=False, 120 | config=config) 121 | copy.assert_called_once_with(ad._cli, source_owner, owner, meta, channel=channel) 122 | 123 | def test_from_spec_owner(self): 124 | spec = 'testing' 125 | os.environ['BINSTAR_TOKEN'] = 'a test token' 126 | dest = AnacondaClientChannelDest.from_spec(spec) 127 | self.assertEqual(dest.token, 'a test token') 128 | self.assertEqual(dest.owner, 'testing') 129 | self.assertEqual(dest.channel, 'main') 130 | 131 | def test_from_spec_owner_and_channel(self): 132 | spec = 'testing_owner/channels/my_channel' 133 | os.environ['BINSTAR_TOKEN'] = 'a test token' 134 | dest = AnacondaClientChannelDest.from_spec(spec) 135 | self.assertEqual(dest.token, 'a test token') 136 | self.assertEqual(dest.owner, 'testing_owner') 137 | self.assertEqual(dest.channel, 'my_channel') 138 | 139 | 140 | class Test_DirectoryDestination(unittest.TestCase): 141 | def setUp(self): 142 | self.tmp_dir = tempfile.mkdtemp(prefix='recipes') 143 | self.dd = DirectoryDestination(self.tmp_dir) 144 | self.dummy_meta = mock.sentinel.dummy_meta 145 | self.dummy_path1 = mock.sentinel.dummy_path1 146 | self.dummy_path2 = mock.sentinel.dummy_path2 147 | 148 | def tearDown(self): 149 | shutil.rmtree(self.tmp_dir) 150 | 151 | def test_not_copying(self): 152 | with mock.patch('shutil.copy') as copy: 153 | self.dd.make_available(self.dummy_meta, 154 | self.dummy_path1, 155 | just_built=False) 156 | self.assertEqual(copy.call_count, 0) 157 | 158 | def test_copying(self): 159 | with mock.patch('shutil.copy') as copy: 160 | self.dd.make_available(self.dummy_meta, 161 | self.dummy_path1, 162 | just_built=True) 163 | copy.assert_called_once_with(self.dummy_path1, self.tmp_dir) 164 | 165 | def test_copying_multi(self): 166 | paths = (self.dummy_path1, self.dummy_path2) 167 | with mock.patch('shutil.copy') as copy: 168 | self.dd.make_available(self.dummy_meta, 169 | paths, 170 | just_built=True) 171 | calls = [mock.call(path, self.tmp_dir) for path in paths] 172 | copy.assert_has_calls(calls) 173 | 174 | 175 | if __name__ == '__main__': 176 | unittest.main() 177 | -------------------------------------------------------------------------------- /conda_build_all/tests/unit/test_builder.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | 4 | try: 5 | import conda_build.api 6 | except ImportError: 7 | import conda_build.config 8 | 9 | from conda_build_all.builder import list_metas 10 | from conda_build_all.tests.integration.test_builder import RecipeCreatingUnit 11 | 12 | 13 | class Test_list_metas(RecipeCreatingUnit): 14 | def setUp(self): 15 | super(Test_list_metas, self).setUp() 16 | m1 = self.write_meta('m1', """ 17 | package: 18 | name: m1 19 | """) 20 | m2 = self.write_meta('.', """ 21 | package: 22 | name: m2 23 | """) 24 | m3 = self.write_meta('d1/d2/d3/meta3', """ 25 | package: 26 | name: m3 27 | """) 28 | m4 = self.write_meta('da1/da2/da3/meta4', """ 29 | package: 30 | name: m4 31 | """) 32 | 33 | def test_depth_0(self): 34 | metas = list_metas(self.recipes_root_dir, max_depth=0) 35 | names = [meta.name() for meta in metas] 36 | self.assertEqual(sorted(names), ['m1', 'm2', 'm3', 'm4']) 37 | 38 | def test_depth_m1(self): 39 | metas = list_metas(self.recipes_root_dir, max_depth=-1) 40 | names = [meta.name() for meta in metas] 41 | self.assertEqual(sorted(names), ['m1', 'm2', 'm3', 'm4']) 42 | 43 | def test_depth_1(self): 44 | metas = list_metas(self.recipes_root_dir, max_depth=1) 45 | names = [meta.name() for meta in metas] 46 | self.assertEqual(sorted(names), ['m2']) 47 | 48 | def test_depth_2(self): 49 | metas = list_metas(self.recipes_root_dir, max_depth=2) 50 | names = [meta.name() for meta in metas] 51 | self.assertEqual(sorted(names), ['m1', 'm2']) 52 | 53 | def test_default_depth(self): 54 | metas = list_metas(self.recipes_root_dir) 55 | names = [meta.name() for meta in metas] 56 | self.assertEqual(sorted(names), ['m1', 'm2', 'm3', 'm4']) 57 | 58 | def test_follow_symlink(self): 59 | link_dir = self.tmp_dir(prefix='recipes_through_links') 60 | os.symlink(os.path.join(self.recipes_root_dir, 'd1'), 61 | os.path.join(link_dir, 'd1')) 62 | os.symlink(os.path.join(self.recipes_root_dir, 'm1'), 63 | os.path.join(link_dir, 'm1')) 64 | metas = list_metas(link_dir) 65 | names = [meta.name() for meta in metas] 66 | self.assertEqual(sorted(names), ['m1', 'm3']) 67 | 68 | 69 | class Test_sort_dependency_order(RecipeCreatingUnit): 70 | def setUp(self): 71 | super(Test_sort_dependency_order, self).setUp() 72 | a = self.write_meta('a', """ 73 | package: 74 | name: a 75 | requirements: 76 | build: 77 | - c 78 | """) 79 | 80 | b = self.write_meta('b', """ 81 | package: 82 | name: b 83 | requirements: 84 | run: 85 | - a # [False] 86 | """) 87 | c = self.write_meta('c', """ 88 | package: 89 | name: c 90 | """) 91 | 92 | def test_order_dependent_selector(self): 93 | # If we listen to the selectors, we would get a different build order. 94 | # As a result of https://github.com/SciTools/conda-build-all/issues/30 95 | # we know that we either have to resolve all dependencies up-front, 96 | # or simply ignore all selectors when dealing with sort order (but 97 | # emphatically not when building!). 98 | if hasattr(conda_build, 'api'): 99 | config = conda_build.api.Config() 100 | else: 101 | config = conda_build.config.config 102 | 103 | metas = list_metas(self.recipes_root_dir) 104 | from conda_build_all.builder import sort_dependency_order 105 | names = [m.name() for m in sort_dependency_order(metas, config)] 106 | self.assertEqual(names, ['c', 'a', 'b']) 107 | 108 | 109 | if __name__ == '__main__': 110 | unittest.main() 111 | -------------------------------------------------------------------------------- /conda_build_all/tests/unit/test_order_deps.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from conda_build_all.order_deps import resolve_dependencies 4 | 5 | 6 | class Test_resolve_dependencies(unittest.TestCase): 7 | def test_example(self): 8 | deps = resolve_dependencies({'a': ['b', 'c'], 'b': ['c'], 9 | 'c': ['d'], 'd': []}) 10 | self.assertEqual(list(deps), ['d', 'c', 'b', 'a']) 11 | 12 | def test_unresolvable(self): 13 | deps = resolve_dependencies({'a': 'b', 'b': 'a'}) 14 | with self.assertRaises(ValueError): 15 | list(deps) 16 | 17 | def test_missing_link(self): 18 | deps = resolve_dependencies({'a': 'b', 'c': 'd'}) 19 | with self.assertRaises(ValueError): 20 | list(deps) 21 | 22 | -------------------------------------------------------------------------------- /conda_build_all/tests/unit/test_resolved_distribution.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | import tempfile 4 | import unittest 5 | import textwrap 6 | 7 | try: 8 | import conda_build.api 9 | except ImportError: 10 | import conda_build.config 11 | from conda_build.metadata import MetaData 12 | 13 | from conda_build_all.resolved_distribution import (ResolvedDistribution, 14 | setup_vn_mtx_case) 15 | from conda_build_all.tests.unit import RecipeCreatingUnit 16 | from conda_build_all.tests.unit.dummy_index import DummyIndex, DummyPackage 17 | 18 | 19 | class Test_BakedDistribution(RecipeCreatingUnit): 20 | # Tests cases where a recipe changes based on external 21 | # conditions, such as the definition of the PYTHON version. 22 | def test_py_version_selector(self): 23 | meta = self.write_meta(""" 24 | package: 25 | name: recipe_which_depends_on_py_version 26 | version: 3 # [py3k] 27 | version: 2 # [not py3k] 28 | """) 29 | dist1 = ResolvedDistribution(meta, (('python', '27', ), )) 30 | dist2 = ResolvedDistribution(meta, (('python', '35', ), )) 31 | 32 | self.assertEqual(dist1.version(), u'2') 33 | self.assertEqual(dist2.version(), u'3') 34 | 35 | def test_py_version_selector_skip(self): 36 | meta = self.write_meta(""" 37 | package: 38 | name: recipe_which_depends_on_py_version 39 | build: # [py35] 40 | skip: True # [py3k] 41 | """) 42 | dist1 = ResolvedDistribution(meta, (('python', '35', ), )) 43 | dist2 = ResolvedDistribution(meta, (('python', '34', ), )) 44 | 45 | self.assertEqual(dist1.skip(), True) 46 | self.assertEqual(dist2.skip(), False) 47 | 48 | 49 | class Test_BakedDistribution_resolve_all(RecipeCreatingUnit): 50 | def test_py_xx_version(self): 51 | meta = self.write_meta(""" 52 | package: 53 | name: recipe_which_depends_on_py_version 54 | version: 2 55 | requirements: 56 | build: 57 | - python >=2.7 58 | - numpy x.x 59 | run: 60 | - python x.x 61 | - numpy x.x 62 | """) 63 | self.index.add_pkg('python', '2.7.2') 64 | self.index.add_pkg('python', '2.6.2') 65 | self.index.add_pkg('python', '3.5.0') 66 | self.index.add_pkg('numpy', '1.8.0', depends=['python']) 67 | resolved = ResolvedDistribution.resolve_all(meta, self.index) 68 | ids = [dist.build_id() for dist in resolved] 69 | self.assertEqual(ids, ['np18py27_0', 'np18py35_0']) 70 | 71 | def test_skip_build(self): 72 | meta = self.write_meta(""" 73 | package: 74 | name: recipe_which_depends_on_py_version 75 | version: 2 76 | build: # [py3k] 77 | skip: True # [py3k] 78 | requirements: 79 | build: 80 | - python 81 | run: 82 | - python 83 | """) 84 | self.index.add_pkg('python', '2.7.2') 85 | self.index.add_pkg('python', '2.6.2') 86 | self.index.add_pkg('python', '3.5.0') 87 | resolved = ResolvedDistribution.resolve_all(meta, self.index) 88 | ids = [dist.build_id() for dist in resolved] 89 | self.assertEqual(ids, ['py26_0', 'py27_0']) 90 | 91 | def test_extra_conditions(self): 92 | meta = self.write_meta(""" 93 | package: 94 | name: test_recipe 95 | requirements: 96 | build: 97 | - python 98 | run: 99 | - python 100 | """) 101 | self.index.add_pkg('python', '2.7.2') 102 | self.index.add_pkg('python', '2.6.2') 103 | self.index.add_pkg('python', '3.5.0') 104 | resolved = ResolvedDistribution.resolve_all(meta, self.index, 105 | extra_conditions=['python 2.6.*|>=3']) 106 | ids = [dist.build_id() for dist in resolved] 107 | self.assertEqual(ids, ['py26_0', 'py35_0']) 108 | 109 | 110 | class Test_setup_vn_mtx_case(unittest.TestCase): 111 | def test_perl_case(self): 112 | if hasattr(conda_build, 'api'): 113 | config = setup_vn_mtx_case([('perl', '9.10.11.12'), ('numpy', '1.23'), 114 | ('python', '2.7'), ('r-base', '4.5.6')], 115 | conda_build.api.Config()) 116 | self.assertEqual(config.CONDA_PERL, '9.10.11.12') 117 | self.assertEqual(config.CONDA_NPY, 123) 118 | self.assertEqual(config.CONDA_PY, 27) 119 | self.assertEqual(config.CONDA_R, '4.5.6') 120 | else: 121 | with setup_vn_mtx_case([('perl', '9.10.11.12'), ('numpy', '1.23'), 122 | ('python', '2.7'), ('r-base', '4.5.6')]): 123 | config = conda_build.config.config 124 | self.assertEqual(config.CONDA_PERL, '9.10.11.12') 125 | self.assertEqual(config.CONDA_NPY, 123) 126 | self.assertEqual(config.CONDA_PY, 27) 127 | self.assertEqual(config.CONDA_R, '4.5.6') 128 | 129 | 130 | 131 | 132 | if __name__ == '__main__': 133 | unittest.main() 134 | -------------------------------------------------------------------------------- /conda_build_all/tests/unit/test_version_matrix.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from conda_build_all.conda_interface import MatchSpec 4 | 5 | from conda_build_all.version_matrix import (parse_specifications, 6 | special_case_version_matrix, 7 | filter_cases, 8 | keep_top_n_major_versions, 9 | keep_top_n_minor_versions) 10 | from conda_build_all.tests.unit.dummy_index import DummyPackage, DummyIndex 11 | 12 | 13 | class Test_special_case_version_matrix(unittest.TestCase): 14 | def setUp(self): 15 | self.pkgs = {'a': DummyPackage('pkgA', ['python', 'numpy']), 16 | 'b': DummyPackage('b', ['c']), 17 | 'c': DummyPackage('c'), 18 | 'b_alt': DummyPackage('b', ['c', 'd']), 19 | 'd': DummyPackage('d')} 20 | self.index = DummyIndex() 21 | 22 | def test_no_case(self): 23 | # No cases should still give us a result with a single case in it. 24 | a = DummyPackage('pkgA', ['wibble']) 25 | self.index.add_pkg('python', '2.7.2') 26 | self.index.add_pkg('wibble', '3.5.0') 27 | r = special_case_version_matrix(a, self.index) 28 | self.assertEqual(r, set([()])) 29 | 30 | def test_python_itself(self): 31 | a = DummyPackage('python', version="a.b.c") 32 | r = special_case_version_matrix(a, self.index) 33 | self.assertEqual(r, set(((('python', 'a.b'),), 34 | )) 35 | ) 36 | 37 | def test_python(self): 38 | a = DummyPackage('pkgA', ['python']) 39 | self.index.add_pkg('python', '2.7.2') 40 | self.index.add_pkg('python', '3.5.0') 41 | r = special_case_version_matrix(a, self.index) 42 | self.assertEqual(r, set(((('python', '2.7'),), 43 | (('python', '3.5'),), 44 | )) 45 | ) 46 | 47 | def test_noarch_python(self): 48 | a = DummyPackage('pkgA', ['python']) 49 | a.noarch = 'python' 50 | self.index.add_pkg('python', '2.7.2') 51 | self.index.add_pkg('python', '3.5.0') 52 | r = special_case_version_matrix(a, self.index) 53 | self.assertEqual(r, set(((), ))) 54 | 55 | def test_constrained_python(self): 56 | a = DummyPackage('pkgA', ['python <3']) 57 | self.index.add_pkg('python', '2.7.2') 58 | self.index.add_pkg('python', '3.5.0') 59 | r = special_case_version_matrix(a, self.index) 60 | self.assertEqual(r, set(((('python', '2.7'), 61 | ), 62 | )) 63 | ) 64 | 65 | def test_numpy_simplest_case(self): 66 | a = DummyPackage('pkgA', ['python', 'numpy']) 67 | self.index.add_pkg('numpy', '1.8.0', 'py27', depends=['python']) 68 | self.index.add_pkg('python', '2.7.2') 69 | r = special_case_version_matrix(a, self.index) 70 | self.assertEqual(r, set([(('python', '2.7'), ('numpy', '1.8')), 71 | ]) 72 | ) 73 | 74 | def test_numpy_without_python(self): 75 | # Conda recipes which do not depend on python, but do on python, do 76 | # not have the full conda metadata, but still need to be handled. 77 | a = DummyPackage('pkgA', ['numpy']) 78 | self.index.add_pkg('numpy', '1.8.0', 'py27', depends=['python']) 79 | self.index.add_pkg('python', '2.7.2') 80 | r = special_case_version_matrix(a, self.index) 81 | self.assertEqual(r, set([(('python', '2.7'), ('numpy', '1.8')), 82 | ]) 83 | ) 84 | 85 | def test_numpy_repeated_python27(self): 86 | # Repeating python 2.7 will result in the latest version being found 87 | a = DummyPackage('pkgA', ['python', 'numpy']) 88 | self.index.add_pkg('numpy', '1.8.0', 'py27', depends=['python <3']) 89 | self.index.add_pkg('python', '2.7.2') 90 | self.index.add_pkg('python', '2.7.0') 91 | r = special_case_version_matrix(a, self.index) 92 | self.assertEqual(r, set([(('python', '2.7'), ('numpy', '1.8')), 93 | ]) 94 | ) 95 | 96 | def test_numpy_repeated_python(self): 97 | a = DummyPackage('pkgA', ['python', 'numpy']) 98 | self.index.add_pkg('numpy', '1.8.0', 'py27', depends=['python <3']) 99 | self.index.add_pkg('numpy', '1.8.0', 'py35', depends=['python']) 100 | self.index.add_pkg('numpy', '1.9.0', 'py35', depends=['python >=3']) 101 | self.index.add_pkg('python', '2.7.2') 102 | self.index.add_pkg('python', '3.5.0') 103 | r = special_case_version_matrix(a, self.index) 104 | self.assertEqual(r, set(((('python', '2.7'), ('numpy', '1.8')), 105 | (('python', '3.5'), ('numpy', '1.8')), 106 | (('python', '3.5'), ('numpy', '1.9')), 107 | )) 108 | ) 109 | 110 | def test_dependency_on_py27(self): 111 | # If a dependency can't hit the python version, it should not 112 | # be considered a case. 113 | a = DummyPackage('pkgA', ['python', 'oldschool']) 114 | self.index.add_pkg('oldschool', '1.8.0', 'py27', depends=['python <3']) 115 | self.index.add_pkg('python', '2.7.2') 116 | self.index.add_pkg('python', '3.5.0') 117 | r = special_case_version_matrix(a, self.index) 118 | # No python 3 should be here. 119 | self.assertEqual(r, set([(('python', '2.7'), 120 | ), 121 | ] 122 | )) 123 | 124 | def construct_numpy_index(self, python_versions, numpy_versions): 125 | """ 126 | Set up an index with several versions of python and numpy. 127 | """ 128 | for python_version in python_versions: 129 | python_build_string = 'py' + python_version.replace('.', '') 130 | self.index.add_pkg('python', python_version) 131 | for numpy_version in numpy_versions: 132 | # Add a patch version to each numpy since that is how the 133 | # versions are numbered. 134 | self.index.add_pkg('numpy', 135 | numpy_version + '.2', 136 | python_build_string, 137 | depends=['python ' + python_version]) 138 | 139 | def test_numpy_xx_only(self): 140 | # Only a numpy x.x spec. 141 | 142 | # Build an index that contains numpy 1.9 and 1.10 on python 2.7 and 143 | # 3.5 for a total of 4 numpy/python combinations. 144 | pythons = ['2.7', '3.5'] 145 | # Only major/minor in the numpy list here because that is what is used 146 | # for the build matrix. 147 | numpys = ['1.9', '1.10'] 148 | self.construct_numpy_index(pythons, numpys) 149 | 150 | # Case 1: Only a numpy x.x spec... 151 | 152 | numpy_dep_case = ('numpy x.x', 'python') 153 | 154 | # ...expect all four cases to be in the matrix. 155 | expect_result = [] 156 | for python in pythons: 157 | for numpy in numpys: 158 | expect_result.append((('python', python), ('numpy', numpy))) 159 | 160 | a = DummyPackage('pkgA', numpy_dep_case, numpy_dep_case) 161 | 162 | r = special_case_version_matrix(a, self.index) 163 | self.assertEqual(set(r), set(expect_result), 164 | msg='got: {}\nexpected: {}'.format(r, expect_result)) 165 | 166 | def test_numpy_xx_and_nonrestrictive_specifciation(self): 167 | # Case 2: 168 | # A numpy x.x spec and a numpy version restriction which does NOT 169 | # exclude any of the cases in the DummyIndex. 170 | 171 | # Build an index that contains numpy 1.9 and 1.10 on python 2.7 and 172 | # 3.5 for a total of 4 numpy/python combinations. 173 | pythons = ['2.7', '3.5'] 174 | # Only major/minor in the numpy list here because that is what is used 175 | # for the build matrix. 176 | numpys = ['1.9', '1.10'] 177 | self.construct_numpy_index(pythons, numpys) 178 | 179 | # A numpy x.x spec and a numpy version restriction which does NOT 180 | # exclude any of the cases in the DummyIndex. 181 | numpy_dep_case = ('numpy x.x', 'numpy >1.6', 'python') 182 | 183 | # As in case 1, expect all four cases to be in the matrix. 184 | expect_result = [] 185 | for python in pythons: 186 | for numpy in numpys: 187 | expect_result.append((('python', python), ('numpy', numpy))) 188 | 189 | a = DummyPackage('pkgA', numpy_dep_case, numpy_dep_case) 190 | 191 | r = special_case_version_matrix(a, self.index) 192 | self.assertEqual(set(r), set(expect_result), 193 | msg='got: {}\nexpected: {}'.format(r, expect_result)) 194 | 195 | def test_numpy_xx_and_restrictive_specifcation(self): 196 | # Case 3: 197 | # A numpy x.x spec and a numpy version restriction which does 198 | # eliminate one the numpy versions in the DummyIndex. 199 | 200 | # Build an index that contains numpy 1.9 and 1.10 on python 2.7 and 201 | # 3.5 for a total of 4 numpy/python combinations. 202 | pythons = ['2.7', '3.5'] 203 | # Only major/minor in the numpy list here because that is what is used 204 | # for the build matrix. 205 | numpys = ['1.9', '1.10'] 206 | self.construct_numpy_index(pythons, numpys) 207 | 208 | # A numpy x.x spec and a numpy version restriction which does 209 | # eliminate one the numpy versions in the DummyIndex. 210 | numpy_dep_case = ('numpy x.x', 'numpy >=1.10', 'python') 211 | 212 | # Expect only the numpy 1.9 case to survive. 213 | expect_result = [] 214 | for python in pythons: 215 | for numpy in numpys[1:]: 216 | expect_result.append((('python', python), ('numpy', numpy))) 217 | 218 | a = DummyPackage('pkgA', numpy_dep_case, numpy_dep_case) 219 | 220 | r = special_case_version_matrix(a, self.index) 221 | self.assertEqual(set(r), set(expect_result), 222 | msg='got: {}\nexpected: {}'.format(r, expect_result)) 223 | 224 | def test_perl_matrix(self): 225 | a = DummyPackage('pkgA', ['perl']) 226 | self.index.add_pkg('perl', '4.5.6') 227 | self.index.add_pkg('perl', '4.5.7') 228 | r = special_case_version_matrix(a, self.index) 229 | self.assertEqual(r, set(((('perl', '4.5.6'),), 230 | (('perl', '4.5.7'),), 231 | )) 232 | ) 233 | 234 | def test_perl_and_python_matrix(self): 235 | a = DummyPackage('pkgA', ['perl', 'python']) 236 | self.index.add_pkg('perl', '4.5.6') 237 | self.index.add_pkg('perl', '4.5.7') 238 | self.index.add_pkg('python', '2.7') 239 | self.index.add_pkg('python', '3.5') 240 | 241 | r = special_case_version_matrix(a, self.index) 242 | expected = set(((('python', '3.5'), ('perl', '4.5.7')), 243 | (('python', '2.7'), ('perl', '4.5.7')), 244 | (('python', '2.7'), ('perl', '4.5.6')), 245 | (('python', '3.5'), ('perl', '4.5.6')))) 246 | self.assertEqual(r, expected) 247 | 248 | def test_r_matrix(self): 249 | a = DummyPackage('pkgA', ['r-base']) 250 | self.index.add_pkg('r-base', '4.5.6') 251 | self.index.add_pkg('r-base', '4.5.7') 252 | r = special_case_version_matrix(a, self.index) 253 | self.assertEqual(r, set(((('r-base', '4.5.6'),), 254 | (('r-base', '4.5.7'),), 255 | )) 256 | ) 257 | 258 | def test_r_and_py_and_perl_matrix(self): 259 | a = DummyPackage('pkgA', ['perl', 'python', 'r-base']) 260 | self.index.add_pkg('perl', '4.5.6') 261 | self.index.add_pkg('perl', '4.5.7') 262 | self.index.add_pkg('python', '2.7') 263 | self.index.add_pkg('python', '3.5') 264 | self.index.add_pkg('r-base', '1.2.3') 265 | self.index.add_pkg('r-base', '4.5.6') 266 | 267 | r = special_case_version_matrix(a, self.index) 268 | expected = set(((('python', '2.7'), ('perl', '4.5.6'), ('r-base', '1.2.3')), 269 | (('python', '2.7'), ('perl', '4.5.6'), ('r-base', '4.5.6')), 270 | (('python', '3.5'), ('perl', '4.5.6'), ('r-base', '1.2.3')), 271 | (('python', '3.5'), ('perl', '4.5.7'), ('r-base', '1.2.3')), 272 | (('python', '3.5'), ('perl', '4.5.7'), ('r-base', '4.5.6')), 273 | (('python', '2.7'), ('perl', '4.5.7'), ('r-base', '4.5.6')), 274 | (('python', '2.7'), ('perl', '4.5.7'), ('r-base', '1.2.3')), 275 | (('python', '3.5'), ('perl', '4.5.6'), ('r-base', '4.5.6')), 276 | )) 277 | self.assertEqual(r, expected) 278 | 279 | 280 | class Test_parse_specification(unittest.TestCase): 281 | def test_specification_no_duplicates(self): 282 | # Do specifications that are all on one-liners get handled correctly? 283 | input_spec = ['numpy', 'scipy', 'python'] 284 | expected_match_spec = [MatchSpec(spec) for spec in input_spec] 285 | expected_output = {ms.name: ms for ms in expected_match_spec} 286 | 287 | output_spec = parse_specifications(input_spec) 288 | self.assertEqual(expected_output, output_spec) 289 | 290 | def test_specification_duplicates_with_version(self): 291 | # If there are duplicates lines in the specifications, and each 292 | # contains a non-trivial version specification, do they get combined 293 | # as expected? 294 | input_spec = ['numpy >=1.7', 'numpy <1.10', 'python'] 295 | expected_match_spec = [MatchSpec(spec) for spec in ['numpy >=1.7,<1.10', 'python']] 296 | expected_output = {ms.name: ms for ms in expected_match_spec} 297 | output_spec = parse_specifications(input_spec) 298 | self.assertEqual(expected_output, output_spec) 299 | 300 | def test_three_part_spec_preserved(self): 301 | # A conda specification may contain up to three parts. Make sure those 302 | # are preserved. 303 | 304 | input_spec = ['numpy 1.8.1 py27_0', 'python'] 305 | expected_match_spec = [MatchSpec(spec) for spec in input_spec] 306 | expected_output = {ms.name: ms for ms in expected_match_spec} 307 | output_spec = parse_specifications(input_spec) 308 | self.assertEqual(expected_output, output_spec) 309 | 310 | def test_multiline_spec_with_one_three_part_spec(self): 311 | # The expected output here is to have the specifications combined 312 | # with a comma even though the result is not a valid conda version 313 | # specification. However, the original multi-line version is not 314 | # valid either. 315 | 316 | input_spec = ['numpy 1.8.1 py27_0', 'numpy 1.8*', 'python'] 317 | expected_match_spec = [MatchSpec(spec) for spec 318 | in ['numpy 1.8.1 py27_0,1.8*', 'python']] 319 | expected_output = {ms.name: ms for ms in expected_match_spec} 320 | output_spec = parse_specifications(input_spec) 321 | self.assertEqual(expected_output, output_spec) 322 | 323 | def test_specification_with_blank(self): 324 | # Does a multiline specification, one of which is just the package 325 | # name, properly combine the other specifications on the other lines? 326 | 327 | input_spec = ('numpy 1.9', 'numpy', 'numpy <1.11', 'python 2.7') 328 | 329 | expected_match_spec = [MatchSpec(spec) for spec 330 | in ['numpy 1.9,<1.11', 'python 2.7']] 331 | expected_output = {ms.name: ms for ms in expected_match_spec} 332 | output_spec = parse_specifications(input_spec) 333 | self.assertEqual(expected_output, output_spec) 334 | 335 | 336 | class CasesTestCase(unittest.TestCase): 337 | def setUp(self): 338 | self.item = { 339 | 'py26': ('python', '2.6'), 340 | 'py27': ('python', '2.7'), 341 | 'py34': ('python', '3.4'), 342 | 'py35': ('python', '3.5'), 343 | 'o12': ('other', '1.2'), 344 | 'o13': ('other', '1.3'), 345 | 'np19': ('numpy', '1.9'), 346 | 'np110': ('numpy', '1.10'), 347 | 'np21': ('numpy', '2.1'), 348 | } 349 | 350 | 351 | class Test_filter_cases(CasesTestCase): 352 | # n.b. We should be careful not to test MatchSpec functionality here. 353 | 354 | def test_nothing(self): 355 | self.assertEqual(list(filter_cases([], [])), []) 356 | 357 | def test_no_filter(self): 358 | cases = ([self.item['py26']], 359 | [self.item['py35']]) 360 | self.assertEqual(tuple(filter_cases(cases, [])), cases) 361 | 362 | def test_single_filter(self): 363 | cases = ([self.item['py26']], 364 | [self.item['py35']]) 365 | self.assertEqual(tuple(filter_cases(cases, ['python >=3'])), cases[1:]) 366 | 367 | def test_multiple_filter(self): 368 | cases = ([self.item['py26']], 369 | [self.item['py34']], 370 | [self.item['py35']]) 371 | self.assertEqual(tuple(filter_cases(cases, ['python >=3', 'python <=3.4'])), cases[1:2]) 372 | 373 | def test_multiple_filter_with_numpy(self): 374 | cases = ([self.item['py26'], self.item['np110']], 375 | [self.item['py34'], self.item['np19']], 376 | [self.item['py35'], self.item['np110']]) 377 | self.assertEqual(tuple(filter_cases(cases, ['python >=3', 'numpy 1.10.*'])), cases[2:]) 378 | 379 | def test_other_cases(self): 380 | cases = ([self.item['py26'], self.item['o12']], 381 | [self.item['py34'], self.item['o12']], 382 | [self.item['py35'], self.item['o13']]) 383 | self.assertEqual(tuple(filter_cases(cases, ['other 1.2.*'])), cases[:2]) 384 | 385 | 386 | class Test_keep_top_n_major_versions(CasesTestCase): 387 | def test_keep_less_than_n(self): 388 | cases = ([self.item['py26']],) 389 | self.assertEqual(tuple(keep_top_n_major_versions(cases, 2)), 390 | cases) 391 | 392 | def test_keep_1(self): 393 | cases = ([self.item['py27']], 394 | [self.item['py35']]) 395 | self.assertEqual(tuple(keep_top_n_major_versions(cases, 1)), 396 | cases[1:]) 397 | 398 | def test_keep_2(self): 399 | cases = ([self.item['py27']], 400 | [self.item['py35']]) 401 | self.assertEqual(tuple(keep_top_n_major_versions(cases, 2)), 402 | cases) 403 | 404 | def test_keep_0(self): 405 | cases = ([self.item['py27']], 406 | [self.item['py35']]) 407 | self.assertEqual(tuple(keep_top_n_major_versions(cases, 0)), 408 | cases) 409 | 410 | def test_multiple_packages(self): 411 | cases = ([self.item['py35'], self.item['np110']], 412 | [self.item['py35'], self.item['np21']], 413 | [self.item['py27'], self.item['np110']], 414 | [self.item['py27'], self.item['np21']]) 415 | self.assertEqual(tuple(keep_top_n_major_versions(cases, 1)), 416 | cases[1:2]) 417 | 418 | def test_multiple_packages_leaves_nothing(self): 419 | cases = ([self.item['py35'], self.item['np110']], 420 | [self.item['py27'], self.item['np21']]) 421 | self.assertEqual(tuple(keep_top_n_major_versions(cases, 1)), 422 | ()) 423 | 424 | 425 | class Test_keep_top_n_minor_versions(CasesTestCase): 426 | def test_keep_less_than_n(self): 427 | cases = ([self.item['py26']],) 428 | self.assertEqual(tuple(keep_top_n_minor_versions(cases, 2)), 429 | cases) 430 | 431 | def test_keep_1(self): 432 | cases = ([self.item['py26']], 433 | [self.item['py27']], 434 | [self.item['py34']], 435 | [self.item['py35']]) 436 | self.assertEqual(tuple(keep_top_n_minor_versions(cases, 1)), 437 | cases[1::2]) 438 | 439 | def test_keep_2(self): 440 | cases = ([self.item['py26']], 441 | [self.item['py27']], 442 | [self.item['py35']]) 443 | self.assertEqual(tuple(keep_top_n_minor_versions(cases, 2)), 444 | cases) 445 | 446 | def test_keep_0(self): 447 | cases = ([self.item['py26']], 448 | [self.item['py35']]) 449 | self.assertEqual(tuple(keep_top_n_minor_versions(cases, 0)), 450 | cases) 451 | 452 | def test_multiple_packages(self): 453 | cases = ([self.item['py26'], self.item['np19']], 454 | [self.item['py26'], self.item['np110']], 455 | [self.item['py27'], self.item['np110']]) 456 | self.assertEqual(tuple(keep_top_n_minor_versions(cases, 1)), 457 | cases[2:]) 458 | 459 | 460 | 461 | # def test_keep_0(self): 462 | 463 | 464 | 465 | if __name__ == '__main__': 466 | unittest.main() 467 | -------------------------------------------------------------------------------- /conda_build_all/version_matrix.py: -------------------------------------------------------------------------------- 1 | from contextlib import contextmanager 2 | from collections import defaultdict 3 | import logging 4 | import sys 5 | 6 | from .conda_interface import (MatchSpec, Unsatisfiable, NoPackagesFound, Resolve, 7 | get_key, copy_index, ensure_dist_or_dict) 8 | 9 | try: 10 | import conda_build.api 11 | except ImportError: 12 | import conda_build.config 13 | 14 | NO_PACKAGES_EXCEPTION = (Unsatisfiable, NoPackagesFound) 15 | 16 | 17 | class StdoutNewline(logging.Handler): 18 | def emit(self, record): 19 | record.msg += '\n' 20 | try: 21 | sys.stdout.write(record.msg) 22 | sys.stdout.flush() 23 | except IOError: 24 | pass 25 | 26 | 27 | stdout = logging.getLogger('conda_build_all.version_matrix.stdoutlog') 28 | stdout.addHandler(StdoutNewline()) 29 | stdout.setLevel(logging.WARNING) 30 | 31 | 32 | @contextmanager 33 | def override_conda_logging(level): 34 | # Override the conda logging handlers. 35 | 36 | levels = {} 37 | handlers = {} 38 | loggers = ['progress', 'progress.start', 'progress.update', 39 | 'progress.stop', 'stdoutlog', 'stderrlog', 40 | 'conda.resolve', 'dotupdate'] 41 | 42 | for logger_name in loggers: 43 | logger = logging.getLogger(logger_name) 44 | levels[logger_name] = logger.level 45 | handlers[logger_name] = logger.handlers 46 | 47 | logger.setLevel(level) 48 | logger.handlers = [] 49 | yield 50 | for logger_name in loggers: 51 | logger = logging.getLogger(logger_name) 52 | logger.setLevel(levels[logger_name]) 53 | logger.handlers = handlers[logger_name] 54 | 55 | 56 | def conda_special_versions(meta, index, version_matrix=None): 57 | """ 58 | Returns a generator which configures conda build's PY and NPY versions 59 | according to the given version matrix. If no version matrix is given, it 60 | will be computed by :func:`special_case_version_matrix`. 61 | 62 | """ 63 | if version_matrix is None: 64 | version_matrix = special_case_version_matrix(meta, index) 65 | 66 | for case in version_matrix: 67 | config = setup_vn_mtx_case(case) 68 | yield config 69 | 70 | 71 | def parse_specifications(requirements): 72 | """ 73 | Parse a list of specifications, turning multi-line specifications into 74 | a single specification. 75 | """ 76 | requirement_specs = defaultdict(list) 77 | # Generate a list of requirements for each spec name to ensure that 78 | # multi-line specs are handled. 79 | for spec in requirements: 80 | spec_details = spec.split(None, 1) 81 | if len(spec_details) == 2: 82 | # Package name and version spec were given, append the 83 | # version spec. 84 | requirement_specs[MatchSpec(spec).name].append(spec_details[1]) 85 | elif spec_details[0] not in requirement_specs: 86 | # Only package name given (e.g. 'numpy'), and the package name is 87 | # not in the requirements yet, so add an empty list. 88 | requirement_specs[MatchSpec(spec).name] = [] 89 | 90 | # Combine multi-line specs into a single line by assuming the requirements 91 | # should be and-ed. 92 | for spec_name, spec_list in requirement_specs.items(): 93 | requirement_specs[spec_name] = ','.join(spec_list) 94 | 95 | # Turn these into MatchSpecs. 96 | requirement_specs = {name: MatchSpec(' '.join([name, spec]).strip()) 97 | for name, spec in requirement_specs.items()} 98 | 99 | return requirement_specs 100 | 101 | 102 | def special_case_version_matrix(meta, index): 103 | """ 104 | Return the non-orthogonal version matrix for special software within conda 105 | (numpy, python). 106 | 107 | For example, supposing a meta depended on numpy and python, and that there 108 | was a numpy 1.8 & 1.9 for python 2.7 but only a numpy 1.9 for python 3.5, 109 | the matrix should be: 110 | 111 | ([('python', '2.7'), ('numpy', '1.8')], 112 | [('python', '2.7'), ('numpy', '1.9')], 113 | [('python', '3.5'), ('numpy', '1.9')]) 114 | 115 | Packages which don't depend on any of the special cases will return an 116 | iterable with an empty tuple. This is analogous to saying "a build is needed, 117 | but there are no special cases". Thus, code may reliably implement a loop such as: 118 | 119 | for case in special_case_version_matrix(...): 120 | ... setup the case ... 121 | ... build ... 122 | 123 | """ 124 | index = copy_index(index) 125 | r = Resolve(index) 126 | 127 | requirements = meta.get_value('requirements/build', []) 128 | requirement_specs = parse_specifications(requirements) 129 | 130 | run_requirements = meta.get_value('requirements/run', []) 131 | run_requirement_specs = parse_specifications(run_requirements) 132 | 133 | # Thanks to https://github.com/conda/conda-build/pull/493 we no longer need to 134 | # compute the complex matrix for numpy versions unless a specific version has 135 | # been defined. 136 | np_spec = requirement_specs.get('numpy') 137 | np_run_spec = run_requirement_specs.get('numpy') 138 | if np_spec and np_run_spec and 'x.x' not in np_run_spec.spec: 139 | # A simple spec (just numpy) has been defined, so we can drop it from the 140 | # special cases. 141 | requirement_specs.pop('numpy') 142 | 143 | for pkg in requirement_specs: 144 | spec = requirement_specs[pkg] 145 | # We want to bake the version in, but we don't know what it is yet. 146 | if 'x.x' in spec.spec: 147 | # Remove the x.x part of the specification, assuming that if it 148 | # is present with other specifications they are and-ed together, 149 | # i.e. comma-separated. 150 | name, specification = spec.spec.split() 151 | spec_list = specification.split(',') 152 | no_xx = [s for s in spec_list if s != 'x.x'] 153 | new_spec = ','.join(no_xx) 154 | if new_spec: 155 | ms = MatchSpec(' '.join([name, new_spec])) 156 | else: 157 | ms = MatchSpec(name) 158 | requirement_specs[pkg] = ms 159 | 160 | def minor_vn(version_str): 161 | """ 162 | Take an string of the form 1.8.2, into string form 1.8 163 | """ 164 | return '.'.join(version_str.split('.')[:2]) 165 | 166 | cases = set() 167 | unsolvable_cases = set() 168 | 169 | def get_pkgs(spec): 170 | try: 171 | # should be r.get_dists_for_spec(spec) for conda-4.3+ 172 | return r.get_pkgs(spec) 173 | except NO_PACKAGES_EXCEPTION: 174 | # If no package is found in the channel, we do nothing 175 | # this is reasonable because add_case_if_soluble does the same 176 | # for concrete cases. 177 | # This behavior is important because otherwise this will crash if 178 | # a package is not available for a certain platform (e.g. win). 179 | return [] 180 | 181 | def add_case_if_soluble(case): 182 | # Whilst we strictly don't need to, shortcutting cases we've already seen makes a 183 | # *huge* performance difference. 184 | if case in cases | unsolvable_cases: 185 | return 186 | 187 | specs = ([ms.spec for ms in requirement_specs.values()] + 188 | ['{} {}*'.format(pkg, version) for pkg, version in case]) 189 | 190 | try: 191 | # Figure out if this case is actually resolvable. We don't care how, 192 | # just that it could be. 193 | r.solve(specs) 194 | except NO_PACKAGES_EXCEPTION: 195 | unsolvable_cases.add(case) 196 | else: 197 | cases.add(case) 198 | 199 | with override_conda_logging(logging.WARN): 200 | if 'numpy' in requirement_specs: 201 | np_spec = requirement_specs.pop('numpy') 202 | py_spec = requirement_specs.pop('python', None) 203 | for numpy_pkg in get_pkgs(np_spec): 204 | np_vn = minor_vn(index[get_key(numpy_pkg)]['version']) 205 | numpy_deps = index[get_key(numpy_pkg)]['depends'] 206 | numpy_deps = {MatchSpec(spec).name: MatchSpec(spec) 207 | for spec in numpy_deps} 208 | # This would be problematic if python wasn't a dep of numpy. 209 | for python_pkg in get_pkgs(numpy_deps['python']): 210 | if py_spec and not py_spec.match(get_key(python_pkg)): 211 | continue 212 | py_vn = minor_vn(index[get_key(python_pkg)]['version']) 213 | case = (('python', py_vn), 214 | ('numpy', np_vn), 215 | ) 216 | add_case_if_soluble(case) 217 | elif 'python' in requirement_specs: 218 | if getattr(meta, 'noarch', None) == 'python': 219 | # no python version dependency on noarch: python recipes 220 | add_case_if_soluble(()) 221 | else: 222 | py_spec = requirement_specs.pop('python') 223 | for python_pkg in get_pkgs(py_spec): 224 | py_vn = minor_vn(index[get_key(python_pkg)]['version']) 225 | case = (('python', py_vn), ) 226 | add_case_if_soluble(case) 227 | 228 | if 'perl' in requirement_specs: 229 | pl_spec = requirement_specs.pop('perl') 230 | for case_base in list(cases or [()]): 231 | for perl_pkg in get_pkgs(pl_spec): 232 | pl_vn = index[get_key(perl_pkg)]['version'] 233 | case = case_base + (('perl', pl_vn), ) 234 | add_case_if_soluble(case) 235 | if case_base in cases: 236 | cases.remove(case_base) 237 | 238 | if 'r-base' in requirement_specs: 239 | r_spec = requirement_specs.pop('r-base') 240 | for case_base in list(cases or [()]): 241 | for r_pkg in get_pkgs(r_spec): 242 | r_vn = index[get_key(r_pkg)]['version'] 243 | case = case_base + (('r-base', r_vn), ) 244 | add_case_if_soluble(case) 245 | if case_base in cases: 246 | cases.remove(case_base) 247 | 248 | # Deal with the fact that building a Python recipe itself requires a special case 249 | # version. This comes down to the odd decision in 250 | # https://github.com/conda/conda-build/commit/3dddeaf3cf5e85369e28c8f96e24c2dd655e36f0. 251 | if meta.name() == 'python' and not cases: 252 | cases.add((('python', '.'.join(meta.version().split('.', 2)[:2])),)) 253 | 254 | # Put an empty case in to allow simple iteration of the results. 255 | if not cases: 256 | cases.add(()) 257 | 258 | return set(cases) 259 | 260 | 261 | def filter_cases(cases, extra_specs): 262 | """ 263 | cases might look like: 264 | 265 | cases = ([('python', '2.7'), ('numpy', '1.8')], 266 | [('python', '2.7'), ('numpy', '1.9')], 267 | [('python', '3.5'), ('numpy', '1.8')], 268 | ) 269 | 270 | Typically extra_specs comes from the environment specification. 271 | 272 | """ 273 | specs = [MatchSpec(spec) for spec in extra_specs] 274 | 275 | for case in cases: 276 | # Invent a sensible "tar.bz2" name which we can use to invoke conda's 277 | # MatchSpec matching. 278 | cases_by_pkg_name = {name: '{}-{}.0-0.tar.bz2'.format(name, version) 279 | for name, version in case} 280 | match = [] 281 | for spec in specs: 282 | # Only run the filter on the packages in cases. 283 | if spec.name in cases_by_pkg_name: 284 | match.append(bool(spec.match(ensure_dist_or_dict(cases_by_pkg_name[spec.name])))) 285 | if all(match): 286 | yield case 287 | 288 | 289 | def keep_top_n_major_versions(cases, n=2): 290 | """ 291 | Remove all but the top n major version cases for each package in cases. 292 | 293 | Parameters 294 | ---------- 295 | cases 296 | The cases to filter. See filter_cases for a definition of cases. 297 | n : integer >= 0 298 | The number of major versions to keep. Default is ``2``. 0 results in all 299 | major versions being kept. 300 | 301 | """ 302 | name_to_major_versions = {} 303 | for case in cases: 304 | for name, version in case: 305 | name_to_major_versions.setdefault(name, set()).add(int(version.split('.')[0])) 306 | cutoff = {name: sorted(majors)[-n:] for name, majors in name_to_major_versions.items()} 307 | for case in cases: 308 | keeper = True 309 | for name, version in case: 310 | if int(version.split('.')[0]) not in cutoff[name]: 311 | keeper = False 312 | if keeper: 313 | yield case 314 | 315 | 316 | def keep_top_n_minor_versions(cases, n=2): 317 | """ 318 | Remove all but the top n minor version cases for each package in cases. 319 | This will not do any major version filtering, so two major versions with 320 | many minor versions will result in n x 2 cases returned. 321 | 322 | Parameters 323 | ---------- 324 | cases 325 | The cases to filter. See filter_cases for a definition of cases. 326 | n : integer >= 0 327 | The number of minor versions to keep. Default is ``2``. 0 results in all 328 | minor versions being kept. 329 | 330 | """ 331 | mapping = {} 332 | for case in cases: 333 | for name, version in case: 334 | major = int(version.split('.')[0]) 335 | minor = int(version.split('.')[1]) 336 | mapping.setdefault((name, major), set()).add(minor) 337 | cutoff = {key: sorted(minors)[-n:] for key, minors in mapping.items()} 338 | for case in cases: 339 | keeper = True 340 | for name, version in case: 341 | major = int(version.split('.')[0]) 342 | minor = int(version.split('.')[1]) 343 | if minor not in cutoff[(name, major)]: 344 | keeper = False 345 | if keeper: 346 | yield case 347 | 348 | if hasattr(conda_build, 'api'): 349 | def setup_vn_mtx_case(case, config): 350 | for pkg, version in case: 351 | if pkg == 'python': 352 | version = int(version.replace('.', '')) 353 | config.CONDA_PY = version 354 | elif pkg == 'numpy': 355 | version = int(version.replace('.', '')) 356 | config.CONDA_NPY = version 357 | elif pkg == 'perl': 358 | config.CONDA_PERL = version 359 | elif pkg == 'r-base': 360 | config.CONDA_R = version 361 | else: 362 | raise NotImplementedError('Package {} not yet implemented.' 363 | ''.format(pkg)) 364 | return config 365 | 366 | else: 367 | @contextmanager 368 | def setup_vn_mtx_case(case, config=None): 369 | config = conda_build.config.config 370 | orig_npy = conda_build.config.config.CONDA_NPY 371 | orig_py = conda_build.config.config.CONDA_PY 372 | orig_r = conda_build.config.config.CONDA_R 373 | orig_perl = conda_build.config.config.CONDA_PERL 374 | for pkg, version in case: 375 | if pkg == 'python': 376 | version = int(version.replace('.', '')) 377 | config.CONDA_PY = version 378 | elif pkg == 'numpy': 379 | version = int(version.replace('.', '')) 380 | config.CONDA_NPY = version 381 | elif pkg == 'perl': 382 | config.CONDA_PERL = version 383 | elif pkg == 'r-base': 384 | config.CONDA_R = version 385 | else: 386 | raise NotImplementedError('Package {} not yet implemented.' 387 | ''.format(pkg)) 388 | yield 389 | conda_build.config.config.CONDA_NPY = orig_npy 390 | conda_build.config.config.CONDA_PY = orig_py 391 | conda_build.config.config.CONDA_R = orig_r 392 | conda_build.config.config.CONDA_PERL = orig_perl 393 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | conda >=4 2 | conda-build >=1.21.7 3 | anaconda-client 4 | mock 5 | requests==2.11 # work around conda/conda#3947 6 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [versioneer] 2 | VCS = git 3 | style = pep440-branch-based 4 | versionfile_source = conda_build_all/_version.py 5 | versionfile_build = conda_build_all/_version.py 6 | tag_prefix = v 7 | parentdir_prefix = conda-build-all- 8 | 9 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | import versioneer 3 | 4 | 5 | setup( 6 | name='conda-build-all', 7 | version=versioneer.get_version(), 8 | cmdclass=versioneer.get_cmdclass(), 9 | description='Build all conda recipes within a directory.', 10 | author='Phil Elson', 11 | author_email='pelson.pub@gmail.com', 12 | url='https://github.com/scitools/conda-build-all', 13 | packages=['conda_build_all', 'conda_build_all.tests', 14 | 'conda_build_all.tests.integration', 15 | 'conda_build_all.tests.unit'], 16 | entry_points={ 17 | 'console_scripts': [ 18 | 'conda-build-all = conda_build_all.cli:main', 19 | ] 20 | }, 21 | ) 22 | 23 | --------------------------------------------------------------------------------