├── .gitattributes ├── .gitignore ├── .travis.yml ├── LICENSE ├── MANIFEST.in ├── README.md ├── appveyor.yml ├── bootstrap-obvious-ci-and-miniconda.py ├── obvci ├── __init__.py ├── _version.py ├── cli │ ├── __init__.py │ └── conda_build_dir.py ├── conda_tools │ ├── __init__.py │ ├── build.py │ ├── build_directory.py │ ├── from_conda_manifest_core_vn_matrix.py │ ├── inspect_binstar.py │ └── order_deps.py └── tests │ ├── __init__.py │ ├── recipes │ ├── recipe1_dev │ │ ├── bld.bat │ │ ├── build.sh │ │ └── meta.yaml │ └── recipes_directory │ │ ├── recipe1 │ │ ├── build.sh │ │ └── meta.yaml │ │ ├── recipe2 │ │ ├── build.sh │ │ └── meta.yaml │ │ └── recipe3 │ │ ├── build.sh │ │ └── meta.yaml │ └── unit │ ├── __init__.py │ └── conda │ ├── __init__.py │ ├── dummy_index.py │ ├── test_BakedDistribution.py │ └── test_build_directory__channels.py ├── obvious-ci.conda ├── bld.bat ├── build.sh └── meta.yaml ├── scripts ├── obvci_appveyor_python_build_env.cmd ├── obvci_conda_build_dir.py ├── obvci_install_conda_build_tools.py ├── obvci_install_miniconda.ps1 ├── obvci_install_miniconda.sh └── obvci_substitute_conda_recipe_version.py ├── setup.cfg ├── setup.py └── versioneer.py /.gitattributes: -------------------------------------------------------------------------------- 1 | obvci/_version.py export-subst 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | build/ 2 | *.pyc 3 | .project 4 | .pydevproject 5 | .DS_Store 6 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | # The language in this case has no bearing - we are going to be making use of "conda" for a 2 | # python distribution for the scientific python stack. 3 | language: c 4 | 5 | sudo: false 6 | 7 | env: 8 | global: 9 | - TARGET_ARCH="x64" 10 | - CONDA_INSTALL_LOCN="${HOME}/conda" 11 | # Defines BINSTAR_TOKEN (for pelson) 12 | - secure: "NlB/4DcLa/aj4SssKXxW/XLz/6Eg8UWYNixHnvSd4l5TbrX+yJjlxRuaOo5DEVCP/qBvg1m0lPtGjZ4frmqTRkhPO59JDC8ICjjykTXbN07pgpogKkWdk2XrXVUsWvo17+SZISCNZc6QBtk5iLfzjZgDZu+yDrs3BCv11Byuiao=" 13 | matrix: 14 | # These items are used by the miniconda installer, but also by the conda build phase. 15 | - CONDA_PY=27 16 | - CONDA_PY=34 17 | - CONDA_PY=35 18 | 19 | install: 20 | - python ./bootstrap-obvious-ci-and-miniconda.py ${CONDA_INSTALL_LOCN} ${TARGET_ARCH} ${CONDA_PY::1} --without-obvci 21 | - source ${CONDA_INSTALL_LOCN}/bin/activate root 22 | - scripts/obvci_install_conda_build_tools.py 23 | 24 | script: 25 | - conda build obvious-ci.conda 26 | 27 | after_success: 28 | - if [[ ( "$TRAVIS_PULL_REQUEST" == 'false' ) && ( "$TRAVIS_BRANCH" == 'master' ) ]] ; then anaconda -t ${BINSTAR_TOKEN} upload $(conda build obvious-ci.conda --output) -u pelson -c development --force; fi 29 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2015, Phil Elson 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are met: 6 | 7 | * Redistributions of source code must retain the above copyright notice, this 8 | list of conditions and the following disclaimer. 9 | 10 | * Redistributions in binary form must reproduce the above copyright notice, 11 | this list of conditions and the following disclaimer in the documentation 12 | and/or other materials provided with the distribution. 13 | 14 | * Neither the name of Obvious-CI nor the names of its 15 | contributors may be used to endorse or promote products derived from 16 | this software without specific prior written permission. 17 | 18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 19 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 20 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 21 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 22 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 23 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 24 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 25 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 26 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 27 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 | 29 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE 2 | include versioneer.py 3 | include obvci/_version.py 4 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Obvious-CI 2 | ========== 3 | 4 | Obvious-CI is not a continuous integration tool in itself, but can be used 5 | to simplify the implementation of continuous integration using free tools 6 | such as Travis CI and appveyor. 7 | 8 | Obvious-CI aims to simplify the CI setup of a repository by providing 9 | infrastructural setup scripts so that your repository can focus on the 10 | continuous integration actions and outputs rather than on the repetitive 11 | setup of the environment itself. 12 | -------------------------------------------------------------------------------- /appveyor.yml: -------------------------------------------------------------------------------- 1 | branches: 2 | only: 3 | - master 4 | 5 | environment: 6 | MINICONDA_VERSION: "3.5.5" 7 | CONDA_INSTALL_LOCN: "C:\\conda" 8 | CMD_IN_ENV: "cmd /E:ON /V:ON /C scripts\\obvci_appveyor_python_build_env.cmd" 9 | 10 | BINSTAR_TOKEN: 11 | secure: rCl3tR9qKVjNlG9MVyY2E6ZZsj6Wrhwe5ytWWXtbUaJWpIiLkY6ItyJkAyEpNJCO 12 | 13 | matrix: 14 | - TARGET_ARCH: "x64" 15 | CONDA_PY: "27" 16 | - TARGET_ARCH: "x86" 17 | CONDA_PY: "27" 18 | - TARGET_ARCH: "x64" 19 | CONDA_PY: "34" 20 | - TARGET_ARCH: "x86" 21 | CONDA_PY: "34" 22 | - TARGET_ARCH: "x64" 23 | CONDA_PY: "35" 24 | - TARGET_ARCH: "x86" 25 | CONDA_PY: "35" 26 | 27 | # We always use a 64-bit machine, but can build x86 distributions 28 | # with the TARGET_ARCH variable. 29 | platform: 30 | - x64 31 | 32 | install: 33 | - "python bootstrap-obvious-ci-and-miniconda.py %CONDA_INSTALL_LOCN% %TARGET_ARCH% %CONDA_PY:~0,1% --without-obvci" 34 | - "SET PATH=%CONDA_INSTALL_LOCN%;%CONDA_INSTALL_LOCN%\\Scripts;%PATH%" 35 | 36 | - "python scripts\\obvci_install_conda_build_tools.py" 37 | - "conda info" 38 | 39 | # Skip .NET project specific build phase. 40 | build: off 41 | 42 | test_script: 43 | - "%CMD_IN_ENV% conda build obvious-ci.conda" 44 | 45 | on_success: 46 | - "conda build obvious-ci.conda --output > obvci_fname.txt" 47 | - "set /p FNAME= < obvci_fname.txt" 48 | 49 | - "python -c \"import os; tag = os.environ.get('APPVEYOR_REPO_TAG', 'false'); is_master = os.environ.get('APPVEYOR_REPO_BRANCH', None) == 'master'; is_pr = os.environ.get('APPVEYOR_PULL_REQUEST_NUMBER', ''); print(tag not in ['false', ''] or (is_master and not is_pr));\" > is_tag_or_master.txt" 50 | - "set /p TAG_OR_MASTER= < is_tag_or_master.txt" 51 | - "ECHO '%TAG_OR_MASTER%'" 52 | - "if '%TAG_OR_MASTER%' == 'True' (anaconda -t %BINSTAR_TOKEN% upload %FNAME% -u pelson -c development --force)" 53 | 54 | -------------------------------------------------------------------------------- /bootstrap-obvious-ci-and-miniconda.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | Installs Miniconda with the latest version of Obvious-CI. 4 | 5 | This script supports Python 2 and 3 (>=2.6 and >=3.2+ respectively) and is 6 | designed to run on OSX, Linux and Windows. 7 | 8 | """ 9 | from __future__ import print_function 10 | 11 | import argparse 12 | import os 13 | import platform 14 | import subprocess 15 | import sys 16 | 17 | try: 18 | from urllib.request import urlretrieve 19 | except ImportError: 20 | from urllib import urlretrieve 21 | 22 | MINICONDA_URL_TEMPLATE = ('https://repo.continuum.io/miniconda/Miniconda{major_py_version}-' 23 | '{miniconda_version}-{OS}-{arch}.{ext}') 24 | 25 | 26 | def miniconda_url(target_system, target_arch, major_py_version, miniconda_version): 27 | template_values = {'miniconda_version': miniconda_version} 28 | 29 | if target_arch == 'x86': 30 | template_values['arch'] = "x86" 31 | elif target_arch == 'x64': 32 | template_values['arch'] = "x86_64" 33 | else: 34 | raise ValueError('Unexpected target arch.') 35 | 36 | system_to_miniconda_os = {'Linux': 'Linux', 37 | 'Darwin': 'MacOSX', 38 | 'Windows': 'Windows'} 39 | if target_system not in system_to_miniconda_os: 40 | raise ValueError('Unexpected system {!r}.'.format(target_system)) 41 | template_values['OS'] = system_to_miniconda_os[target_system] 42 | 43 | miniconda_os_ext = {'Linux': 'sh', 'MacOSX': 'sh', 44 | 'Windows': 'exe'} 45 | template_values['ext'] = miniconda_os_ext[template_values['OS']] 46 | 47 | if major_py_version not in ['2', '3']: 48 | raise ValueError('Unexpected major Python version {!r}.'.format(major_py_version)) 49 | template_values['major_py_version'] = major_py_version 50 | 51 | return MINICONDA_URL_TEMPLATE.format(**template_values) 52 | 53 | 54 | def main(target_dir, target_arch, major_py_version, miniconda_version='latest', install_obvci=True): 55 | system = platform.system() 56 | URL = miniconda_url(system, target_arch, major_py_version, miniconda_version) 57 | basename = URL.rsplit('/', 1)[1] 58 | if system in ['Linux', 'Darwin']: 59 | cmd = ['bash', basename, '-b', '-p', target_dir] 60 | bin_dir = 'bin' 61 | elif system in ['Windows']: 62 | cmd = ['powershell', 'Start-Process', '-FilePath', basename, '-ArgumentList', 63 | '/S,/D=' + target_dir, 64 | '-Wait', ]#'-Passthru'] 65 | bin_dir = 'scripts' 66 | else: 67 | raise ValueError('Unsupported operating system.') 68 | 69 | if not os.path.exists(basename): 70 | print('Downloading from {}'.format(URL)) 71 | urlretrieve(URL, basename) 72 | else: 73 | print('Using cached version of {}'.format(URL)) 74 | 75 | # Install with powershell. 76 | if os.path.exists(target_dir): 77 | raise IOError('Installation directory already exists') 78 | subprocess.check_call(cmd) 79 | 80 | if not os.path.isdir(target_dir): 81 | raise RuntimeError('Failed to install miniconda :(') 82 | 83 | if install_obvci: 84 | conda_path = os.path.join(target_dir, bin_dir, 'conda') 85 | subprocess.check_call([conda_path, 'install', '--yes', '--quiet', '-c', 'pelson', 'obvious-ci']) 86 | 87 | 88 | if __name__ == '__main__': 89 | parser = argparse.ArgumentParser(description="""A script to download and install miniconda for Linux/OSX/Windows.""") 90 | parser.add_argument("installation_directory", help="""Where miniconda should be installed.""") 91 | parser.add_argument("arch", help="""The target architecture of this build. (must be either "x86" or "x64").""", 92 | choices=['x86', 'x64']) 93 | parser.add_argument("major_py_version", help="""The major Python version for the miniconda root env (may 94 | still subsequently use another Python version).""", 95 | choices=['2', '3']) 96 | parser.add_argument('--without-obvci', help="Disable the installation of Obvious-ci.", 97 | action='store_true') 98 | parser.add_argument('--miniconda-version', default='latest') 99 | 100 | args = parser.parse_args() 101 | main(args.installation_directory, args.arch, args.major_py_version, 102 | miniconda_version=args.miniconda_version, 103 | install_obvci=not args.without_obvci) 104 | -------------------------------------------------------------------------------- /obvci/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | from ._version import get_versions 3 | __version__ = get_versions()['version'] 4 | del get_versions 5 | -------------------------------------------------------------------------------- /obvci/_version.py: -------------------------------------------------------------------------------- 1 | 2 | # This file helps to compute a version number in source trees obtained from 3 | # git-archive tarball (such as those provided by githubs download-from-tag 4 | # feature). Distribution tarballs (built by setup.py sdist) and build 5 | # directories (produced by setup.py build) will contain a much shorter file 6 | # that just contains the computed version number. 7 | 8 | # This file is released into the public domain. Generated by 9 | # versioneer-0.15+dev (https://github.com/warner/python-versioneer) 10 | 11 | """Git implementation of _version.py.""" 12 | 13 | import errno 14 | import os 15 | import re 16 | import subprocess 17 | import sys 18 | 19 | 20 | def get_keywords(): 21 | """Get the keywords needed to look up the version information.""" 22 | # these strings will be replaced by git during git-archive. 23 | # setup.py/versioneer.py will grep for the variable names, so they must 24 | # each be defined on a line of their own. _version.py will just call 25 | # get_keywords(). 26 | git_refnames = " (HEAD -> master)" 27 | git_full = "a11cee26e8578bc1146fa5f25201bdf35096fbc7" 28 | keywords = {"refnames": git_refnames, "full": git_full} 29 | return keywords 30 | 31 | 32 | class VersioneerConfig: 33 | 34 | """Container for Versioneer configuration parameters.""" 35 | 36 | 37 | def get_config(): 38 | """Create, populate and return the VersioneerConfig() object.""" 39 | # these strings are filled in when 'setup.py versioneer' creates 40 | # _version.py 41 | cfg = VersioneerConfig() 42 | cfg.VCS = "git" 43 | cfg.style = "pep440-branch-based" 44 | cfg.tag_prefix = "v" 45 | cfg.parentdir_prefix = "obvci" 46 | cfg.versionfile_source = "obvci/_version.py" 47 | cfg.verbose = False 48 | return cfg 49 | 50 | 51 | class NotThisMethod(Exception): 52 | 53 | """Exception raised if a method is not valid for the current scenario.""" 54 | 55 | 56 | LONG_VERSION_PY = {} 57 | HANDLERS = {} 58 | 59 | 60 | def register_vcs_handler(vcs, method): # decorator 61 | """Decorator to mark a method as the handler for a particular VCS.""" 62 | def decorate(f): 63 | """Store f in HANDLERS[vcs][method].""" 64 | if vcs not in HANDLERS: 65 | HANDLERS[vcs] = {} 66 | HANDLERS[vcs][method] = f 67 | return f 68 | return decorate 69 | 70 | 71 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): 72 | """Call the given command(s).""" 73 | assert isinstance(commands, list) 74 | p = None 75 | for c in commands: 76 | try: 77 | dispcmd = str([c] + args) 78 | # remember shell=False, so use git.cmd on windows, not just git 79 | p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE, 80 | stderr=(subprocess.PIPE if hide_stderr 81 | else None)) 82 | break 83 | except EnvironmentError: 84 | e = sys.exc_info()[1] 85 | if e.errno == errno.ENOENT: 86 | continue 87 | if verbose: 88 | print("unable to run %s" % dispcmd) 89 | print(e) 90 | return None 91 | else: 92 | if verbose: 93 | print("unable to find command, tried %s" % (commands,)) 94 | return None 95 | stdout = p.communicate()[0].strip() 96 | if sys.version_info[0] >= 3: 97 | stdout = stdout.decode() 98 | if p.returncode != 0: 99 | if verbose: 100 | print("unable to run %s (error)" % dispcmd) 101 | return None 102 | return stdout 103 | 104 | 105 | def versions_from_parentdir(parentdir_prefix, root, verbose): 106 | """Try to determine the version from the parent directory name. 107 | 108 | Source tarballs conventionally unpack into a directory that includes 109 | both the project name and a version string. 110 | """ 111 | dirname = os.path.basename(root) 112 | if not dirname.startswith(parentdir_prefix): 113 | if verbose: 114 | print("guessing rootdir is '%s', but '%s' doesn't start with " 115 | "prefix '%s'" % (root, dirname, parentdir_prefix)) 116 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 117 | return {"version": dirname[len(parentdir_prefix):], 118 | "full-revisionid": None, 119 | "dirty": False, "error": None} 120 | 121 | 122 | @register_vcs_handler("git", "get_keywords") 123 | def git_get_keywords(versionfile_abs): 124 | """Extract version information from the given file.""" 125 | # the code embedded in _version.py can just fetch the value of these 126 | # keywords. When used from setup.py, we don't want to import _version.py, 127 | # so we do it with a regexp instead. This function is not used from 128 | # _version.py. 129 | keywords = {} 130 | try: 131 | f = open(versionfile_abs, "r") 132 | for line in f.readlines(): 133 | if line.strip().startswith("git_refnames ="): 134 | mo = re.search(r'=\s*"(.*)"', line) 135 | if mo: 136 | keywords["refnames"] = mo.group(1) 137 | if line.strip().startswith("git_full ="): 138 | mo = re.search(r'=\s*"(.*)"', line) 139 | if mo: 140 | keywords["full"] = mo.group(1) 141 | f.close() 142 | except EnvironmentError: 143 | pass 144 | return keywords 145 | 146 | 147 | @register_vcs_handler("git", "keywords") 148 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 149 | """Get version information from git keywords.""" 150 | if not keywords: 151 | raise NotThisMethod("no keywords at all, weird") 152 | refnames = keywords["refnames"].strip() 153 | if refnames.startswith("$Format"): 154 | if verbose: 155 | print("keywords are unexpanded, not using") 156 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 157 | refs = [r.strip() for r in refnames.strip("()").split(",")] 158 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 159 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 160 | TAG = "tag: " 161 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) 162 | if not tags: 163 | # Either we're using git < 1.8.3, or there really are no tags. We use 164 | # a heuristic: assume all version tags have a digit. The old git %d 165 | # expansion behaves like git log --decorate=short and strips out the 166 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 167 | # between branches and tags. By ignoring refnames without digits, we 168 | # filter out many common branch names like "release" and 169 | # "stabilization", as well as "HEAD" and "master". 170 | tags = set([r for r in refs if re.search(r'\d', r)]) 171 | if verbose: 172 | print("discarding '%s', no digits" % ",".join(set(refs) - tags)) 173 | if verbose: 174 | print("likely tags: %s" % ",".join(sorted(tags))) 175 | for ref in sorted(tags): 176 | # sorting will prefer e.g. "2.0" over "2.0rc1" 177 | if ref.startswith(tag_prefix): 178 | r = ref[len(tag_prefix):] 179 | if verbose: 180 | print("picking %s" % r) 181 | return {"version": r, 182 | "full-revisionid": keywords["full"].strip(), 183 | "dirty": False, "error": None, "branch": None 184 | } 185 | # no suitable tags, so version is "0+unknown", but full hex is still there 186 | if verbose: 187 | print("no suitable tags, using unknown + full revision id") 188 | return {"version": "0+unknown", 189 | "full-revisionid": keywords["full"].strip(), 190 | "dirty": False, "error": "no suitable tags", 191 | "branch": None} 192 | 193 | 194 | @register_vcs_handler("git", "pieces_from_vcs") 195 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 196 | """Get version from 'git describe' in the root of the source tree. 197 | 198 | This only gets called if the git-archive 'subst' keywords were *not* 199 | expanded, and _version.py hasn't already been rewritten with a short 200 | version string, meaning we're inside a checked out source tree. 201 | """ 202 | if not os.path.exists(os.path.join(root, ".git")): 203 | if verbose: 204 | print("no .git in %s" % root) 205 | raise NotThisMethod("no .git directory") 206 | 207 | GITS = ["git"] 208 | if sys.platform == "win32": 209 | GITS = ["git.cmd", "git.exe"] 210 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 211 | # if there isn't one, this yields HEX[-dirty] (no NUM). Note, for git v1.7 212 | # and below, it is necessary to run "git update-index --refresh" first. 213 | describe_out = run_command(GITS, ["describe", "--tags", "--dirty", 214 | "--always", "--long", 215 | "--match", "%s*" % tag_prefix], 216 | cwd=root) 217 | # --long was added in git-1.5.5 218 | if describe_out is None: 219 | raise NotThisMethod("'git describe' failed") 220 | describe_out = describe_out.strip() 221 | full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 222 | if full_out is None: 223 | raise NotThisMethod("'git rev-parse' failed") 224 | full_out = full_out.strip() 225 | 226 | pieces = {} 227 | pieces["long"] = full_out 228 | pieces["short"] = full_out[:7] # maybe improved later 229 | pieces["error"] = None 230 | 231 | # abbrev-ref available with git >= 1.7 232 | branch_name = run_command(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], 233 | cwd=root).strip() 234 | if branch_name == 'HEAD': 235 | branches = run_command(GITS, ["branch", "--contains"], 236 | cwd=root).split('\n') 237 | branches = [branch[2:] for branch in branches if branch[4:5] != '('] 238 | if 'master' in branches: 239 | branch_name = 'master' 240 | elif not branches: 241 | branch_name = None 242 | else: 243 | # Pick the first branch that is returned. Good or bad. 244 | branch_name = branches[0] 245 | 246 | branch_name = branch_name.replace(' ', '.').replace('(', '').replace(')', '') 247 | 248 | pieces['branch'] = branch_name 249 | 250 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 251 | # TAG might have hyphens. 252 | git_describe = describe_out 253 | 254 | # look for -dirty suffix 255 | dirty = git_describe.endswith("-dirty") 256 | pieces["dirty"] = dirty 257 | if dirty: 258 | git_describe = git_describe[:git_describe.rindex("-dirty")] 259 | 260 | # now we have TAG-NUM-gHEX or HEX 261 | 262 | if "-" in git_describe: 263 | # TAG-NUM-gHEX 264 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 265 | if not mo: 266 | # unparseable. Maybe git-describe is misbehaving? 267 | pieces["error"] = ("unable to parse git-describe output: '%s'" 268 | % describe_out) 269 | return pieces 270 | 271 | # tag 272 | full_tag = mo.group(1) 273 | if not full_tag.startswith(tag_prefix): 274 | if verbose: 275 | fmt = "tag '%s' doesn't start with prefix '%s'" 276 | print(fmt % (full_tag, tag_prefix)) 277 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" 278 | % (full_tag, tag_prefix)) 279 | return pieces 280 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 281 | 282 | # distance: number of commits since tag 283 | pieces["distance"] = int(mo.group(2)) 284 | 285 | # commit: short hex revision ID 286 | pieces["short"] = mo.group(3) 287 | 288 | else: 289 | # HEX: no tags 290 | pieces["closest-tag"] = None 291 | count_out = run_command(GITS, ["rev-list", "HEAD", "--count"], 292 | cwd=root) 293 | pieces["distance"] = int(count_out) # total number of commits 294 | 295 | return pieces 296 | 297 | 298 | # Default matches v1.2.x, maint/1.2.x, 1.2.x, 1.x etc. 299 | default_maint_branch_regexp = ".*([0-9]+\.)+x$" 300 | 301 | 302 | def plus_or_dot(pieces): 303 | """Return a + if we don't already have one, else return a .""" 304 | if "+" in pieces.get("closest-tag", ""): 305 | return "." 306 | return "+" 307 | 308 | 309 | def render_pep440(pieces): 310 | """Build up version string, with post-release "local version identifier". 311 | 312 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 313 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 314 | 315 | Exceptions: 316 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 317 | """ 318 | if pieces["closest-tag"]: 319 | rendered = pieces["closest-tag"] 320 | if pieces["distance"] or pieces["dirty"]: 321 | rendered += plus_or_dot(pieces) 322 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 323 | if pieces["dirty"]: 324 | rendered += ".dirty" 325 | else: 326 | # exception #1 327 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], 328 | pieces["short"]) 329 | if pieces["dirty"]: 330 | rendered += ".dirty" 331 | return rendered 332 | 333 | 334 | def render_pep440_pre(pieces): 335 | """TAG[.post.devDISTANCE] -- No -dirty. 336 | 337 | Exceptions: 338 | 1: no tags. 0.post.devDISTANCE 339 | """ 340 | if pieces["closest-tag"]: 341 | rendered = pieces["closest-tag"] 342 | if pieces["distance"]: 343 | rendered += ".post.dev%d" % pieces["distance"] 344 | else: 345 | # exception #1 346 | rendered = "0.post.dev%d" % pieces["distance"] 347 | return rendered 348 | 349 | 350 | def render_pep440_post(pieces): 351 | """TAG[.postDISTANCE[.dev0]+gHEX] . 352 | 353 | The ".dev0" means dirty. Note that .dev0 sorts backwards 354 | (a dirty tree will appear "older" than the corresponding clean one), 355 | but you shouldn't be releasing software with -dirty anyways. 356 | 357 | Exceptions: 358 | 1: no tags. 0.postDISTANCE[.dev0] 359 | """ 360 | if pieces["closest-tag"]: 361 | rendered = pieces["closest-tag"] 362 | if pieces["distance"] or pieces["dirty"]: 363 | rendered += ".post%d" % pieces["distance"] 364 | if pieces["dirty"]: 365 | rendered += ".dev0" 366 | rendered += plus_or_dot(pieces) 367 | rendered += "g%s" % pieces["short"] 368 | else: 369 | # exception #1 370 | rendered = "0.post%d" % pieces["distance"] 371 | if pieces["dirty"]: 372 | rendered += ".dev0" 373 | rendered += "+g%s" % pieces["short"] 374 | return rendered 375 | 376 | 377 | def render_pep440_old(pieces): 378 | """TAG[.postDISTANCE[.dev0]] . 379 | 380 | The ".dev0" means dirty. 381 | 382 | Eexceptions: 383 | 1: no tags. 0.postDISTANCE[.dev0] 384 | """ 385 | if pieces["closest-tag"]: 386 | rendered = pieces["closest-tag"] 387 | if pieces["distance"] or pieces["dirty"]: 388 | rendered += ".post%d" % pieces["distance"] 389 | if pieces["dirty"]: 390 | rendered += ".dev0" 391 | else: 392 | # exception #1 393 | rendered = "0.post%d" % pieces["distance"] 394 | if pieces["dirty"]: 395 | rendered += ".dev0" 396 | return rendered 397 | 398 | 399 | def render_git_describe(pieces): 400 | """TAG[-DISTANCE-gHEX][-dirty]. 401 | 402 | Like 'git describe --tags --dirty --always'. 403 | 404 | Exceptions: 405 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 406 | """ 407 | if pieces["closest-tag"]: 408 | rendered = pieces["closest-tag"] 409 | if pieces["distance"]: 410 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 411 | else: 412 | # exception #1 413 | rendered = pieces["short"] 414 | if pieces["dirty"]: 415 | rendered += "-dirty" 416 | return rendered 417 | 418 | 419 | def render_git_describe_long(pieces): 420 | """TAG-DISTANCE-gHEX[-dirty]. 421 | 422 | Like 'git describe --tags --dirty --always -long'. 423 | The distance/hash is unconditional. 424 | 425 | Exceptions: 426 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 427 | """ 428 | if pieces["closest-tag"]: 429 | rendered = pieces["closest-tag"] 430 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 431 | else: 432 | # exception #1 433 | rendered = pieces["short"] 434 | if pieces["dirty"]: 435 | rendered += "-dirty" 436 | return rendered 437 | 438 | 439 | def add_one_to_version(version_string, number_index_to_increment=-1): 440 | """ 441 | Add one to a version string at the given numeric indices. 442 | 443 | >>> add_one_to_version('v1.2.3') 444 | 'v1.2.4' 445 | 446 | """ 447 | # Break up the tag by number groups (preserving multi-digit 448 | # numbers as multidigit) 449 | parts = re.split("([0-9]+)", version_string) 450 | 451 | digit_parts = [(i, part) for i, part in enumerate(parts) 452 | if part.isdigit()] 453 | 454 | # Deal with negative indexing. 455 | increment_at_index = ((number_index_to_increment + len(digit_parts)) 456 | % len(digit_parts)) 457 | for n_seen, (i, part) in enumerate(digit_parts): 458 | if n_seen == increment_at_index: 459 | parts[i] = str(int(part) + 1) 460 | elif n_seen > increment_at_index: 461 | parts[i] = '0' 462 | return ''.join(parts) 463 | 464 | 465 | def render_pep440_branch_based(pieces): 466 | # [TAG+1 of minor number][.devDISTANCE][+gHEX]. The git short is 467 | # included for dirty. 468 | 469 | # exceptions: 470 | # 1: no tags. 0.0.0.devDISTANCE[+gHEX] 471 | 472 | master = pieces.get('branch') == 'master' 473 | maint = re.match(default_maint_branch_regexp, 474 | pieces.get('branch') or '') 475 | 476 | # If we are on a tag, just pep440-pre it. 477 | if pieces["closest-tag"] and not (pieces["distance"] or 478 | pieces["dirty"]): 479 | rendered = pieces["closest-tag"] 480 | else: 481 | # Put a default closest-tag in. 482 | if not pieces["closest-tag"]: 483 | pieces["closest-tag"] = '0.0.0' 484 | 485 | if pieces["distance"] or pieces["dirty"]: 486 | if maint: 487 | rendered = pieces["closest-tag"] 488 | if pieces["distance"]: 489 | rendered += ".post%d" % pieces["distance"] 490 | else: 491 | rendered = add_one_to_version(pieces["closest-tag"]) 492 | if pieces["distance"]: 493 | rendered += ".dev%d" % pieces["distance"] 494 | # Put the branch name in if it isn't master nor a 495 | # maintenance branch. 496 | 497 | plus = '+' 498 | if not (master or maint): 499 | rendered += "%s%s" % (plus, 500 | pieces.get('branch') or 501 | 'unknown_branch') 502 | plus = '_' 503 | 504 | if pieces["dirty"]: 505 | rendered += "%sg%s" % (plus, pieces["short"]) 506 | else: 507 | rendered = pieces["closest-tag"] 508 | return rendered 509 | 510 | 511 | STYLES = {'default': render_pep440, 512 | 'pep440': render_pep440, 513 | 'pep440-pre': render_pep440_pre, 514 | 'pep440-post': render_pep440_post, 515 | 'pep440-old': render_pep440_old, 516 | 'git-describe': render_git_describe, 517 | 'git-describe-long': render_git_describe_long, 518 | 'pep440-old': render_pep440_old, 519 | 'pep440-branch-based': render_pep440_branch_based, 520 | } 521 | 522 | 523 | def render(pieces, style): 524 | """Render the given version pieces into the requested style.""" 525 | if pieces["error"]: 526 | return {"version": "unknown", 527 | "full-revisionid": pieces.get("long"), 528 | "dirty": None, 529 | "error": pieces["error"]} 530 | 531 | if not style: 532 | style = 'default' 533 | 534 | renderer = STYLES.get(style) 535 | 536 | if not renderer: 537 | raise ValueError("unknown style '%s'" % style) 538 | 539 | rendered = renderer(pieces) 540 | 541 | return {"version": rendered, "full-revisionid": pieces["long"], 542 | "dirty": pieces["dirty"], "error": None} 543 | 544 | 545 | def get_versions(): 546 | """Get version information or return default if unable to do so.""" 547 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 548 | # __file__, we can work backwards from there to the root. Some 549 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 550 | # case we can only use expanded keywords. 551 | 552 | cfg = get_config() 553 | verbose = cfg.verbose 554 | 555 | try: 556 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, 557 | verbose) 558 | except NotThisMethod: 559 | pass 560 | 561 | try: 562 | root = os.path.realpath(__file__) 563 | # versionfile_source is the relative path from the top of the source 564 | # tree (where the .git directory might live) to this file. Invert 565 | # this to find the root from __file__. 566 | for i in cfg.versionfile_source.split('/'): 567 | root = os.path.dirname(root) 568 | except NameError: 569 | return {"version": "0+unknown", "full-revisionid": None, 570 | "dirty": None, 571 | "error": "unable to find root of source tree"} 572 | 573 | try: 574 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 575 | return render(pieces, cfg.style) 576 | except NotThisMethod: 577 | pass 578 | 579 | try: 580 | if cfg.parentdir_prefix: 581 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 582 | except NotThisMethod: 583 | pass 584 | 585 | return {"version": "0+unknown", "full-revisionid": None, 586 | "dirty": None, 587 | "error": "unable to compute version"} 588 | -------------------------------------------------------------------------------- /obvci/cli/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pelson/Obvious-CI/a11cee26e8578bc1146fa5f25201bdf35096fbc7/obvci/cli/__init__.py -------------------------------------------------------------------------------- /obvci/cli/conda_build_dir.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | A script to build and upload all of the conda recipes in 4 | the specified directory. 5 | 6 | """ 7 | import argparse 8 | import sys 9 | 10 | 11 | from obvci.conda_tools.build_directory import Builder 12 | 13 | 14 | def main(): 15 | description = sys.modules[__name__].__doc__ 16 | parser = argparse.ArgumentParser(description=description) 17 | Builder.define_args(parser) 18 | args = parser.parse_args() 19 | return Builder.handle_args(args).main() 20 | 21 | 22 | if __name__ == '__main__': 23 | main() 24 | -------------------------------------------------------------------------------- /obvci/conda_tools/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pelson/Obvious-CI/a11cee26e8578bc1146fa5f25201bdf35096fbc7/obvci/conda_tools/__init__.py -------------------------------------------------------------------------------- /obvci/conda_tools/build.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | 3 | import os 4 | import shutil 5 | 6 | import conda_build.build as build_module 7 | from conda_build.metadata import MetaData 8 | import conda_build.config 9 | from conda.lock import Locked 10 | from conda_build.build import bldpkg_path 11 | import binstar_client 12 | from binstar_client.utils.detect import detect_package_type, get_attrs 13 | 14 | from . import inspect_binstar 15 | 16 | 17 | def build(meta, test=True): 18 | """Build (and optionally test) a recipe directory.""" 19 | with Locked(conda_build.config.croot): 20 | meta.check_fields() 21 | if os.path.exists(conda_build.config.config.info_dir): 22 | shutil.rmtree(conda_build.config.config.info_dir) 23 | build_module.build(meta, verbose=False, post=None) 24 | if test: 25 | build_module.test(meta, verbose=False) 26 | return meta 27 | 28 | 29 | def upload(cli, meta, owner, channels=['main']): 30 | """Upload a distribution, given the build metadata.""" 31 | fname = bldpkg_path(meta) 32 | package_type = detect_package_type(fname) 33 | package_attrs, release_attrs, file_attrs = get_attrs(package_type, fname) 34 | package_name = package_attrs['name'] 35 | version = release_attrs['version'] 36 | 37 | # Check the package exists, otherwise create one. 38 | try: 39 | cli.package(owner, package_name) 40 | except binstar_client.NotFound: 41 | print('Creating the {} package on {}'.format(package_name, owner)) 42 | summary = package_attrs['summary'] 43 | cli.add_package(owner, package_name, summary, package_attrs.get('license'), public=True) 44 | 45 | # Check the release exists, otherwise create one. 46 | try: 47 | cli.release(owner, package_name, version) 48 | except binstar_client.NotFound: 49 | # TODO: Add readme.md support for descriptions? 50 | cli.add_release(owner, package_name, version, requirements=[], announce=None, 51 | description='') 52 | 53 | try: 54 | cli.distribution(owner, package_name, version, file_attrs['basename']) 55 | except binstar_client.NotFound: 56 | # The file doesn't exist. 57 | pass 58 | else: 59 | print('Distribution %s already exists ... removing' % (file_attrs['basename'],)) 60 | cli.remove_dist(owner, package_name, version, file_attrs['basename']) 61 | 62 | with open(fname, 'rb') as fd: 63 | print('\nUploading file %s/%s/%s/%s to %s...' % (owner, package_name, version, file_attrs['basename'], channels)) 64 | upload_info = cli.upload(owner, package_name, version, file_attrs['basename'], 65 | fd, package_type, description='', 66 | dependencies=file_attrs.get('dependencies'), 67 | attrs=file_attrs['attrs'], 68 | channels=channels) 69 | return upload_info 70 | -------------------------------------------------------------------------------- /obvci/conda_tools/build_directory.py: -------------------------------------------------------------------------------- 1 | """ 2 | Build all the conda recipes in the given directory sequentially if they do not 3 | already exist on the given binstar channel. 4 | Building is done in order of dependencies (circular dependencies are not supported). 5 | Once a build is complete, the distribution will be uploaded (provided BINSTAR_TOKEN is 6 | defined), and the next package will be processed. 7 | 8 | """ 9 | from __future__ import print_function 10 | 11 | import logging 12 | import os 13 | import subprocess 14 | from argparse import Namespace 15 | 16 | from binstar_client.utils import get_binstar 17 | import binstar_client 18 | from conda.api import get_index 19 | from conda_build.metadata import MetaData 20 | from conda_build.build import bldpkg_path 21 | import conda.config 22 | 23 | from . import order_deps 24 | from . import build 25 | from . import inspect_binstar 26 | from . import from_conda_manifest_core_vn_matrix as vn_matrix 27 | 28 | 29 | def package_built_name(package, root_dir): 30 | package_dir = os.path.join(root_dir, package) 31 | meta = MetaData(package_dir) 32 | return bldpkg_path(meta) 33 | 34 | 35 | def distribution_exists(binstar_cli, owner, metadata): 36 | fname = '{}/{}.tar.bz2'.format(conda.config.subdir, metadata.dist()) 37 | try: 38 | r = binstar_cli.distribution(owner, metadata.name(), metadata.version(), 39 | fname) 40 | exists = True 41 | except binstar_client.errors.NotFound: 42 | exists = False 43 | return exists 44 | 45 | 46 | def recipes_to_build(binstar_cli, owner, channel, recipe_metas): 47 | for meta in recipe_metas: 48 | if not inspect_binstar.distribution_exists(binstar_cli, owner, meta): 49 | yield meta 50 | 51 | 52 | def fetch_metas(directory): 53 | """ 54 | Get the build metadata of all recipes in a directory. 55 | 56 | The recipes will be sorted by the order of their directory name. 57 | 58 | """ 59 | packages = [] 60 | for package_name in sorted(os.listdir(directory)): 61 | package_dir = os.path.join(directory, package_name) 62 | meta_yaml = os.path.join(package_dir, 'meta.yaml') 63 | 64 | if os.path.isdir(package_dir) and os.path.exists(meta_yaml): 65 | packages.append(MetaData(package_dir)) 66 | 67 | return packages 68 | 69 | 70 | def sort_dependency_order(metas): 71 | """Sort the metas into the order that they must be built.""" 72 | meta_named_deps = {} 73 | buildable = [meta.name() for meta in metas] 74 | for meta in metas: 75 | all_deps = ((meta.get_value('requirements/run', []) or []) + 76 | (meta.get_value('requirements/build', []) or [])) 77 | # Remove version information from the name. 78 | all_deps = [dep.split(' ', 1)[0] for dep in all_deps] 79 | meta_named_deps[meta.name()] = [dep for dep in all_deps if dep in buildable] 80 | sorted_names = list(order_deps.resolve_dependencies(meta_named_deps)) 81 | return sorted(metas, key=lambda meta: sorted_names.index(meta.name())) 82 | 83 | 84 | class BakedDistribution(object): 85 | """ 86 | Represents a conda pacakge, with the appropriate special case 87 | versions fixed (e.g. CONDA_PY, CONDA_NPY). Without this, a meta 88 | changes as the conda_build.config.CONDA_NPY changes. 89 | 90 | """ 91 | def __init__(self, meta, special_versions=()): 92 | self.meta = meta 93 | self.special_versions = special_versions 94 | 95 | def __repr__(self): 96 | return 'BakedDistribution({}, {})'.format(self.meta, 97 | self.special_versions) 98 | 99 | def __str__(self): 100 | return self.dist() 101 | 102 | def vn_context(self): 103 | return vn_matrix.setup_vn_mtx_case(self.special_versions) 104 | 105 | def __getattr__(self, name): 106 | with vn_matrix.setup_vn_mtx_case(self.special_versions): 107 | self.meta.parse_again() 108 | result = getattr(self.meta, name) 109 | 110 | # Wrap any callable such that it is called within the appropriate 111 | # environment. 112 | # callable exists in python 2.* and >=3.2 113 | if callable(result): 114 | orig_result = result 115 | import functools 116 | @functools.wraps(result) 117 | def with_vn_mtx_setup(*args, **kwargs): 118 | with vn_matrix.setup_vn_mtx_case(self.special_versions): 119 | self.meta.parse_again() 120 | return orig_result(*args, **kwargs) 121 | result = with_vn_mtx_setup 122 | return result 123 | 124 | @classmethod 125 | def compute_matrix(cls, meta, index=None, extra_conditions=None): 126 | if index is None: 127 | with vn_matrix.override_conda_logging('WARN'): 128 | index = get_index() 129 | 130 | cases = vn_matrix.special_case_version_matrix(meta, index) 131 | 132 | if extra_conditions: 133 | cases = list(vn_matrix.filter_cases(cases, index, 134 | extra_conditions)) 135 | result = [] 136 | for case in cases: 137 | dist = cls(meta, case) 138 | if not dist.skip(): 139 | result.append(dist) 140 | return result 141 | 142 | 143 | class Builder(object): 144 | def __init__(self, conda_recipes_root, upload_owner, upload_channel): 145 | """ 146 | Build a directory of conda recipes sequentially, if they don't already exist on the owner's binstar account. 147 | If the build does exist on the binstar account, but isn't in the targeted channel, it will be added to upload_channel, 148 | All built distributions will be uploaded to the owner's channel. 149 | 150 | """ 151 | self.conda_recipes_root = conda_recipes_root 152 | self.upload_owner = upload_owner 153 | self.upload_channel = upload_channel 154 | 155 | self.binstar_token = os.environ.get('BINSTAR_TOKEN', None) 156 | self.can_upload = self.binstar_token is not None 157 | 158 | if not self.can_upload: 159 | print('**Build will continue, but no uploads will take place.**') 160 | print('To automatically upload from this script, define the BINSTAR_TOKEN env variable.') 161 | print('This is done automatically on the travis-ci system once the PR has been merged.') 162 | 163 | self.binstar_cli = get_binstar(Namespace(token=self.binstar_token, site=None)) 164 | 165 | @classmethod 166 | def define_args(cls, parser): 167 | parser.add_argument("recipe-dir", 168 | help="""The directory containing (multiple) conda recipes 169 | (i.e. each sub-directory must contain a meta.yaml).""") 170 | parser.add_argument("upload-user", 171 | help="""The target user on binstar where build distributions should go. 172 | The BINSTAR_TOKEN environment variable must also be defined.""") 173 | parser.add_argument("--channel", help="""The target channel on binstar where built distributions should go.""", 174 | default='main') 175 | parser.add_argument("--build-condition", nargs='*', 176 | dest='extra_build_conditions', 177 | help="Extra conditions for computing the build matrix.", 178 | default=['python >=2'] # Thanks for the python 1.0 build Continuum... 179 | ) 180 | 181 | @classmethod 182 | def handle_args(cls, parsed_args): 183 | result = cls(getattr(parsed_args, 'recipe-dir'), 184 | getattr(parsed_args, 'upload-user'), 185 | parsed_args.channel) 186 | result.extra_build_conditions = list(filter(None, parsed_args.extra_build_conditions)) 187 | return result 188 | 189 | def fetch_all_metas(self): 190 | """ 191 | Return the conda recipe metas, in the order they should be built. 192 | 193 | """ 194 | conda_recipes_root = os.path.abspath(os.path.expanduser(self.conda_recipes_root)) 195 | recipe_metas = fetch_metas(conda_recipes_root) 196 | recipe_metas = sort_dependency_order(recipe_metas) 197 | return recipe_metas 198 | 199 | def calculate_existing_distributions(self, recipe_metas): 200 | # Figure out which distributions binstar.org already has. 201 | existing_distributions = [meta for meta in recipe_metas 202 | if inspect_binstar.distribution_exists(self.binstar_cli, self.upload_owner, meta)] 203 | 204 | print('Resolved dependencies, will be built in the following order: \n\t{}'.format( 205 | '\n\t'.join(['{} (will be built: {})'.format(meta.dist(), meta not in existing_distributions) 206 | for meta in recipe_metas]))) 207 | return existing_distributions 208 | 209 | def recipes_to_build(self, recipes): 210 | existing_distributions = self.calculate_existing_distributions(recipes) 211 | return [recipe not in existing_distributions for recipe in recipes] 212 | 213 | def build(self, meta): 214 | print('Building ', meta.dist()) 215 | if isinstance(meta, BakedDistribution): 216 | with meta.vn_context(): 217 | build.build(meta.meta) 218 | else: 219 | build.build(meta) 220 | 221 | def main(self): 222 | recipe_metas = self.fetch_all_metas() 223 | index = get_index() 224 | 225 | print('Resolving distributions from {} recipes... '.format(len(recipe_metas))) 226 | 227 | all_distros = [] 228 | for meta in recipe_metas: 229 | distros = BakedDistribution.compute_matrix(meta, index, 230 | getattr(self, 'extra_build_conditions', [])) 231 | all_distros.extend(distros) 232 | 233 | print('Computed that there are {} distributions from the {} ' 234 | 'recipes:'.format(len(all_distros), len(recipe_metas))) 235 | recipes_to_build = self.recipes_to_build(all_distros) 236 | 237 | for meta, build_dist in zip(all_distros, recipes_to_build): 238 | if build_dist: 239 | self.build(meta) 240 | self.post_build(meta, build_occured=build_dist) 241 | 242 | def post_build(self, meta, build_occured=True): 243 | if self.can_upload: 244 | already_on_channel = inspect_binstar.distribution_exists_on_channel(self.binstar_cli, 245 | self.upload_owner, 246 | meta, 247 | channel=self.upload_channel) 248 | if not build_occured and not already_on_channel: 249 | # Link a distribution. 250 | print('Adding existing {} to the {} channel.'.format(meta.name(), self.upload_channel)) 251 | inspect_binstar.add_distribution_to_channel(self.binstar_cli, self.upload_owner, meta, channel=self.upload_channel) 252 | elif already_on_channel: 253 | print('Nothing to be done for {} - it is already on {}.'.format(meta.name(), self.upload_channel)) 254 | else: 255 | # Upload the distribution 256 | print('Uploading {} to the {} channel.'.format(meta.name(), self.upload_channel)) 257 | build.upload(self.binstar_cli, meta, self.upload_owner, channels=[self.upload_channel]) 258 | 259 | -------------------------------------------------------------------------------- /obvci/conda_tools/from_conda_manifest_core_vn_matrix.py: -------------------------------------------------------------------------------- 1 | # TODO: Pull this back together with conda_manifest. 2 | import os 3 | from contextlib import contextmanager 4 | from collections import defaultdict 5 | 6 | import conda.resolve 7 | from conda.resolve import MatchSpec 8 | import conda_build.config 9 | # import conda_manifest.config 10 | 11 | import logging 12 | from conda.resolve import stdoutlog, dotlog 13 | 14 | conda_stdoutlog = stdoutlog 15 | # TODO: Handle the amount of standard out that conda is producing. 16 | 17 | 18 | from conda.console import SysStdoutWriteHandler 19 | 20 | 21 | class StdoutNewline(SysStdoutWriteHandler): 22 | def emit(self, record): 23 | record.msg += '\n' 24 | SysStdoutWriteHandler.emit(self, record) 25 | 26 | 27 | stdout = logging.getLogger('obvci.stdoutlog') 28 | stdout.addHandler(StdoutNewline()) 29 | stdout.setLevel(logging.WARNING) 30 | 31 | 32 | @contextmanager 33 | def override_conda_logging(level): 34 | # Override the conda logging handlers. 35 | 36 | # We need to import conda.fetch and conda.resolve to trigger the 37 | # creation of the loggers in the first place. 38 | import conda.fetch 39 | import conda.resolve 40 | 41 | levels = {} 42 | handlers = {} 43 | loggers = ['progress', 'progress.start', 'progress.update', 44 | 'progress.stop', 'stdoutlog', 'stderrlog', 45 | 'conda.resolve', 'dotupdate'] 46 | 47 | for logger_name in loggers: 48 | logger = logging.getLogger(logger_name) 49 | levels[logger_name] = logger.level 50 | handlers[logger_name] = logger.handlers 51 | 52 | logger.setLevel(level) 53 | logger.handlers = [] 54 | yield 55 | for logger_name in loggers: 56 | logger = logging.getLogger(logger_name) 57 | logger.setLevel(levels[logger_name]) 58 | logger.handlers = handlers[logger_name] 59 | 60 | 61 | @contextmanager 62 | def setup_vn_mtx_case(case): 63 | orig_npy = conda_build.config.config.CONDA_NPY 64 | orig_py = conda_build.config.config.CONDA_PY 65 | 66 | for pkg, version in case: 67 | version = int(version.replace('.', '')) 68 | if pkg == 'python': 69 | conda_build.config.config.CONDA_PY = version 70 | elif pkg == 'numpy': 71 | conda_build.config.config.CONDA_NPY = version 72 | else: 73 | raise NotImplementedError('Package {} not yet implemented.' 74 | ''.format(pkg)) 75 | yield 76 | conda_build.config.config.CONDA_NPY = orig_npy 77 | conda_build.config.config.CONDA_PY = orig_py 78 | 79 | 80 | def conda_special_versions(meta, index, version_matrix=None): 81 | """ 82 | Returns a generator which configures conda build's PY and NPY versions 83 | according to the given version matrix. If no version matrix is given, it 84 | will be computed by :func:`special_case_version_matrix`. 85 | 86 | """ 87 | if version_matrix is None: 88 | version_matrix = special_case_version_matrix(meta, index) 89 | 90 | for case in version_matrix: 91 | with setup_vn_mtx_case(case): 92 | yield case 93 | 94 | 95 | def special_case_version_matrix(meta, index): 96 | """ 97 | Return the non-orthogonal version matrix for special software within conda 98 | (numpy, python). 99 | 100 | For example, supposing there was a numpy 1.8 & 1.9 for python 2.7, 101 | but only a numpy 1.9 for python 3.5, the matrix should be: 102 | 103 | ([('python', '2.7.0'), ('numpy', '1.8.0')], 104 | [('python', '2.7.0'), ('numpy', '1.9.0')], 105 | [('python', '3.5.0'), ('numpy', '1.9.0')]) 106 | 107 | Packages which don't depend on any of the special cases will return an 108 | iterable with an empty list, so that code such as: 109 | 110 | for case in special_case_version_matrix(...): 111 | ... setup the case ... 112 | ... build ... 113 | 114 | can be written provided that the process which handles the cases can handle 115 | an empty list. 116 | 117 | .. note:: 118 | 119 | This algorithm does not deal with PERL and R versions at this time. 120 | 121 | """ 122 | r = conda.resolve.Resolve(index) 123 | requirements = meta.get_value('requirements/build', []) 124 | requirement_specs = {MatchSpec(spec).name: MatchSpec(spec) 125 | for spec in requirements} 126 | run_requirements = meta.get_value('requirements/run', []) 127 | run_requirement_specs = defaultdict(list) 128 | # Generate a list of requirements for each spec name to ensure that 129 | # multi-line specs are handled. 130 | for spec in run_requirements: 131 | run_requirement_specs[MatchSpec(spec).name].append(spec) 132 | 133 | # Combine multi-line specs into a single line by assuming the requirements 134 | # should be and-ed. 135 | for spec_name, spec_list in run_requirement_specs.items(): 136 | run_requirement_specs[spec_name] = ','.join(spec_list) 137 | 138 | # Turn these into MatchSpecs. 139 | run_requirement_specs = {name: MatchSpec(spec) 140 | for name, spec in run_requirement_specs.items()} 141 | 142 | # Thanks to https://github.com/conda/conda-build/pull/493 we no longer need to 143 | # compute the complex matrix for numpy versions unless a specific version has 144 | # been defined. 145 | np_spec = requirement_specs.get('numpy') 146 | np_run_spec = run_requirement_specs.get('numpy') 147 | if np_spec and np_run_spec and 'x.x' not in np_run_spec.spec: 148 | # A simple spec (just numpy) has been defined, so we can drop it from the 149 | # special cases. 150 | requirement_specs.pop('numpy') 151 | 152 | for pkg in requirement_specs: 153 | spec = requirement_specs[pkg] 154 | # We want to bake the version in, but we don't know what it is yet. 155 | if spec.spec.endswith(' x.x'): 156 | requirement_specs[pkg] = MatchSpec(spec.spec[:-4]) 157 | 158 | def minor_vn(version_str): 159 | """ 160 | Take an string of the form 1.8.2, into integer form 1.8 161 | """ 162 | return '.'.join(version_str.split('.')[:2]) 163 | 164 | cases = [] 165 | 166 | with override_conda_logging(logging.WARN): 167 | if 'numpy' in requirement_specs: 168 | np_spec = requirement_specs.pop('numpy') 169 | py_spec = requirement_specs.pop('python', None) 170 | for numpy_pkg in r.get_pkgs(np_spec): 171 | np_vn = minor_vn(index[numpy_pkg.fn]['version']) 172 | numpy_deps = index[numpy_pkg.fn]['depends'] 173 | numpy_deps = {MatchSpec(spec).name: MatchSpec(spec) 174 | for spec in numpy_deps} 175 | # This would be problematic if python wasn't a dep of numpy. 176 | for python_pkg in r.get_pkgs(numpy_deps['python']): 177 | if py_spec and not py_spec.match(python_pkg.fn): 178 | continue 179 | py_vn = minor_vn(index[python_pkg.fn]['version']) 180 | case = (('python', py_vn), 181 | ('numpy', np_vn), 182 | ) 183 | if case not in cases: 184 | cases.append(case) 185 | elif 'python' in requirement_specs: 186 | py_spec = requirement_specs.pop('python') 187 | for python_pkg in r.get_pkgs(py_spec): 188 | py_vn = minor_vn(index[python_pkg.fn]['version']) 189 | case = (('python', py_vn), ) 190 | if case not in cases: 191 | cases.append(case) 192 | 193 | if 'perl' in requirement_specs: 194 | raise NotImplementedError('PERL version matrix not yet implemented.') 195 | if 'r' in requirement_specs: 196 | raise NotImplementedError('R version matrix not yet implemented.') 197 | 198 | # We only want the special cases. 199 | # cases = list(filter_cases(cases, index, requirement_specs.keys())) 200 | 201 | # Put an empty case in to allow simple iteration of the results. 202 | if not cases: 203 | cases.append(()) 204 | 205 | return set(cases) 206 | 207 | 208 | def filter_cases(cases, index, extra_specs): 209 | """ 210 | cases might look like: 211 | 212 | cases = ([('python', '2.7'), ('numpy', '1.8')], 213 | [('python', '2.7'), ('numpy', '1.9')], 214 | [('python', '3.5'), ('numpy', '1.8')], 215 | ) 216 | 217 | Typically extra_specs comes from the environment specification. 218 | 219 | """ 220 | """ 221 | cases might look like: 222 | 223 | cases = ([('python', '2.7'), ('numpy', '1.8')], 224 | [('python', '2.7'), ('numpy', '1.9')], 225 | [('python', '3.5'), ('numpy', '1.8')], 226 | ) 227 | 228 | Typically extra_specs comes from the environment specification. 229 | 230 | """ 231 | specs = [MatchSpec(spec) for spec in extra_specs] 232 | 233 | for case in cases: 234 | cases_by_pkg_name = {name: '{}-{}.0-0.tar.bz2'.format(name, version) 235 | for name, version in case} 236 | match = [] 237 | for spec in specs: 238 | if spec.name in cases_by_pkg_name: 239 | match.append(bool(spec.match(cases_by_pkg_name[spec.name]))) 240 | if all(match): 241 | yield case 242 | 243 | -------------------------------------------------------------------------------- /obvci/conda_tools/inspect_binstar.py: -------------------------------------------------------------------------------- 1 | import conda.config 2 | import binstar_client 3 | from conda_build.build import bldpkg_path 4 | 5 | 6 | def distribution_exists(binstar_cli, owner, metadata): 7 | """ 8 | Determine whether a distribution exists. 9 | 10 | This does not check specific channels - it is either on binstar or it is not. 11 | """ 12 | fname = '{}/{}.tar.bz2'.format(conda.config.subdir, metadata.dist()) 13 | try: 14 | r = binstar_cli.distribution(owner, metadata.name(), metadata.version(), 15 | fname) 16 | exists = True 17 | except binstar_client.NotFound: 18 | exists = False 19 | return exists 20 | 21 | 22 | def distribution_exists_on_channel(binstar_cli, owner, metadata, channel='main'): 23 | """ 24 | Determine whether a distribution exists on a specific channel. 25 | 26 | Note from @pelson: As far as I can see, there is no easy way to do this on binstar. 27 | 28 | """ 29 | fname = '{}/{}.tar.bz2'.format(conda.config.subdir, metadata.dist()) 30 | distributions_on_channel = [dist['basename'] for dist in 31 | binstar_cli.show_channel(owner=owner, channel=channel)['files']] 32 | return fname in distributions_on_channel 33 | 34 | 35 | def add_distribution_to_channel(binstar_cli, owner, metadata, channel='main'): 36 | """ 37 | Add a(n already existing) distribution on binstar to another channel. 38 | 39 | Note - the addition is done based on name and version - no build strings etc. 40 | so if you have a foo-0.1-np18 and foo-0.1-np19 *both* will be added to the channel. 41 | 42 | """ 43 | package_fname = '{}/{}.tar.bz2'.format(conda.config.subdir, metadata.dist()) 44 | binstar_cli.add_channel(channel, owner, metadata.name(), metadata.version())#filename=package_fname) 45 | -------------------------------------------------------------------------------- /obvci/conda_tools/order_deps.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | 4 | def resolve_dependencies(package_dependencies): 5 | """ 6 | Given a dictionary mapping a package to its dependencies, return a 7 | generator of packages to install, sorted by the required install 8 | order. 9 | 10 | >>> deps = resolve_dependencies({'a': ['b', 'c'], 'b': ['c'], 11 | 'c': ['d'], 'd': []}) 12 | >>> list(deps) 13 | ['d', 'c', 'b', 'a'] 14 | 15 | """ 16 | remaining_dependencies = package_dependencies.copy() 17 | completed_packages = [] 18 | 19 | # A maximum of 10000 iterations. Beyond that and there is probably a 20 | # problem. 21 | for failsafe in range(10000): 22 | for package, deps in sorted(remaining_dependencies.copy().items()): 23 | if all(dependency in completed_packages for dependency in deps): 24 | completed_packages.append(package) 25 | remaining_dependencies.pop(package) 26 | yield package 27 | else: 28 | # Put a check in to ensure that all the dependencies were 29 | # defined as packages, otherwise we will never succeed. 30 | for dependency in deps: 31 | if dependency not in package_dependencies: 32 | msg = ('The package {} depends on {}, but it was not ' 33 | 'part of the package_dependencies dictionary.' 34 | ''.format(package, dependency)) 35 | raise ValueError(msg) 36 | 37 | # Close off the loop if we've completed the dependencies. 38 | if not remaining_dependencies: 39 | break 40 | else: 41 | raise ValueError('Dependencies could not be resolved. ' 42 | 'Remaining dependencies: {}' 43 | ''.format(remaining_dependencies)) 44 | -------------------------------------------------------------------------------- /obvci/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pelson/Obvious-CI/a11cee26e8578bc1146fa5f25201bdf35096fbc7/obvci/tests/__init__.py -------------------------------------------------------------------------------- /obvci/tests/recipes/recipe1_dev/bld.bat: -------------------------------------------------------------------------------- 1 | echo v0.1.0.dev1 > __conda_version__.txt 2 | -------------------------------------------------------------------------------- /obvci/tests/recipes/recipe1_dev/build.sh: -------------------------------------------------------------------------------- 1 | echo "v0.1.0.dev1" > __conda_version__.txt 2 | -------------------------------------------------------------------------------- /obvci/tests/recipes/recipe1_dev/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: recipe1 3 | version: 'determined_at_buildtime' 4 | 5 | requirements: 6 | build: 7 | 8 | run: 9 | 10 | -------------------------------------------------------------------------------- /obvci/tests/recipes/recipes_directory/recipe1/build.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pelson/Obvious-CI/a11cee26e8578bc1146fa5f25201bdf35096fbc7/obvci/tests/recipes/recipes_directory/recipe1/build.sh -------------------------------------------------------------------------------- /obvci/tests/recipes/recipes_directory/recipe1/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: recipe1 3 | version: '0.1.0' 4 | 5 | requirements: 6 | build: 7 | 8 | run: 9 | 10 | -------------------------------------------------------------------------------- /obvci/tests/recipes/recipes_directory/recipe2/build.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pelson/Obvious-CI/a11cee26e8578bc1146fa5f25201bdf35096fbc7/obvci/tests/recipes/recipes_directory/recipe2/build.sh -------------------------------------------------------------------------------- /obvci/tests/recipes/recipes_directory/recipe2/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: recipe2 3 | version: '2.2' 4 | 5 | requirements: 6 | build: 7 | - recipe1 >= 0.1.0 8 | 9 | run: 10 | - recipe3 11 | -------------------------------------------------------------------------------- /obvci/tests/recipes/recipes_directory/recipe3/build.sh: -------------------------------------------------------------------------------- 1 | echo 'Recipe 3' 2 | -------------------------------------------------------------------------------- /obvci/tests/recipes/recipes_directory/recipe3/meta.yaml: -------------------------------------------------------------------------------- 1 | package: 2 | name: recipe3 3 | version: '3.1' 4 | 5 | requirements: 6 | build: 7 | - recipe1 8 | 9 | -------------------------------------------------------------------------------- /obvci/tests/unit/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pelson/Obvious-CI/a11cee26e8578bc1146fa5f25201bdf35096fbc7/obvci/tests/unit/__init__.py -------------------------------------------------------------------------------- /obvci/tests/unit/conda/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pelson/Obvious-CI/a11cee26e8578bc1146fa5f25201bdf35096fbc7/obvci/tests/unit/conda/__init__.py -------------------------------------------------------------------------------- /obvci/tests/unit/conda/dummy_index.py: -------------------------------------------------------------------------------- 1 | import collections 2 | import conda.config 3 | 4 | 5 | _DummyPackage = collections.namedtuple('_DummyPackage', 6 | ['pkg_name', 'build_deps', 'run_deps']) 7 | 8 | 9 | class DummyPackage(_DummyPackage): 10 | def __new__(cls, name, build_deps=None, run_deps=None): 11 | return super(DummyPackage, cls).__new__(cls, name, build_deps or (), 12 | run_deps or ()) 13 | 14 | def name(self): 15 | return self.pkg_name 16 | 17 | def dist(self): 18 | return '{}-{}-{}'.format(self.name(), '0.0', '0') 19 | 20 | def get_value(self, item, default): 21 | if item == 'requirements/run': 22 | return self.run_deps 23 | elif item == 'requirements/build': 24 | return self.build_deps 25 | else: 26 | raise AttributeError(item) 27 | 28 | def __repr__(self): 29 | # For testing purposes, this is particularly convenient. 30 | return self.name() 31 | 32 | 33 | class DummyIndex(dict): 34 | def add_pkg(self, name, version, build_string='', 35 | depends=(), build_number='0', 36 | **extra_items): 37 | if build_string: 38 | build_string = '{}_{}'.format(build_string, build_number) 39 | else: 40 | build_string = build_number 41 | pkg_info = dict(name=name, version=version, build_number=build_number, 42 | build=build_string, subdir=conda.config.subdir, 43 | depends=tuple(depends), **extra_items) 44 | self['{}-{}-{}.tar.bz2'.format(name, version, build_string)] = pkg_info 45 | -------------------------------------------------------------------------------- /obvci/tests/unit/conda/test_BakedDistribution.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | import tempfile 4 | import unittest 5 | 6 | import conda_build.config 7 | from conda_build.metadata import MetaData 8 | 9 | from obvci.conda_tools.build_directory import BakedDistribution 10 | from obvci.tests.unit.conda.dummy_index import DummyIndex, DummyPackage 11 | 12 | 13 | class Test_conditional_recipe(unittest.TestCase): 14 | # Tests cases where a recipe changes based on external 15 | # conditions, such as the definition of the PYTHON version. 16 | def setUp(self): 17 | self.recipe_dir = tempfile.mkdtemp(prefix='tmp_obvci_recipe_') 18 | 19 | def tearDown(self): 20 | shutil.rmtree(self.recipe_dir) 21 | 22 | def test_py_version_selector(self): 23 | recipe = """ 24 | package: 25 | name: recipe_which_depends_on_py_version 26 | version: 3 # [py3k] 27 | version: 2 # [not py3k] 28 | """.replace('\n' + ' ' * 12, '\n').strip() 29 | with open(os.path.join(self.recipe_dir, 'meta.yaml'), 'w') as fh: 30 | fh.write(recipe) 31 | conda_build.config.config.CONDA_PY = 27 32 | meta = MetaData(self.recipe_dir) 33 | dist1 = BakedDistribution(meta, (('python', '27', ), )) 34 | self.assertEqual(dist1.version(), u'2') 35 | 36 | dist2 = BakedDistribution(meta, (('python', '35', ), )) 37 | self.assertEqual(dist2.version(), u'3') 38 | self.assertEqual(dist1.version(), u'2') 39 | 40 | def test_py_version_selector_skip(self): 41 | recipe = """ 42 | package: 43 | name: recipe_which_depends_on_py_version 44 | build: # [py35] 45 | skip: True # [py3k] 46 | """.replace('\n' + ' ' * 12, '\n').strip() 47 | with open(os.path.join(self.recipe_dir, 'meta.yaml'), 'w') as fh: 48 | fh.write(recipe) 49 | conda_build.config.config.CONDA_PY = 27 50 | meta = MetaData(self.recipe_dir) 51 | dist1 = BakedDistribution(meta, (('python', '35', ), )) 52 | dist2 = BakedDistribution(meta, (('python', '34', ), )) 53 | 54 | self.assertEqual(dist1.skip(), True) 55 | self.assertEqual(dist2.skip(), False) 56 | 57 | 58 | class Test_baked_version(unittest.TestCase): 59 | def setUp(self): 60 | self.index = DummyIndex() 61 | self.recipe_dir = tempfile.mkdtemp(prefix='tmp_obvci_recipe_') 62 | 63 | def tearDown(self): 64 | shutil.rmtree(self.recipe_dir) 65 | 66 | def test_py_xx_version(self): 67 | recipe = """ 68 | package: 69 | name: recipe_which_depends_on_py_version 70 | version: 2 71 | requirements: 72 | build: 73 | - python >=2.7 74 | - numpy 75 | run: 76 | - python x.x 77 | - numpy x.x 78 | """ 79 | with open(os.path.join(self.recipe_dir, 'meta.yaml'), 'w') as fh: 80 | fh.write(recipe) 81 | conda_build.config.config.CONDA_PY = 35 82 | conda_build.config.config.CONDA_NPY = 17 83 | 84 | meta = MetaData(self.recipe_dir) 85 | 86 | self.index.add_pkg('python', '2.7.2') 87 | self.index.add_pkg('python', '2.6.2') 88 | self.index.add_pkg('python', '3.5.0') 89 | self.index.add_pkg('numpy', '1.8.0', depends=['python']) 90 | r = BakedDistribution.compute_matrix(meta, self.index) 91 | self.assertEqual(len(r), 2) 92 | self.assertEqual(r[0].build_id(), 'np18py27_0') 93 | self.assertEqual(r[1].build_id(), 'np18py35_0') 94 | 95 | def test_py_xx_version(self): 96 | recipe = """ 97 | package: 98 | name: recipe_which_depends_on_py_version 99 | version: 2 100 | build: 101 | skip: True # [py3k] 102 | requirements: 103 | build: 104 | - python 105 | run: 106 | - python 107 | """ 108 | with open(os.path.join(self.recipe_dir, 'meta.yaml'), 'w') as fh: 109 | fh.write(recipe) 110 | conda_build.config.config.CONDA_PY = 35 111 | 112 | meta = MetaData(self.recipe_dir) 113 | 114 | self.index.add_pkg('python', '2.7.2') 115 | self.index.add_pkg('python', '2.6.2') 116 | self.index.add_pkg('python', '3.5.0') 117 | r = BakedDistribution.compute_matrix(meta, self.index) 118 | self.assertEqual(len(r), 2) 119 | self.assertEqual(r[0].build_id(), 'py27_0') 120 | self.assertEqual(r[1].build_id(), 'py26_0') 121 | 122 | 123 | if __name__ == '__main__': 124 | unittest.main() 125 | -------------------------------------------------------------------------------- /obvci/tests/unit/conda/test_build_directory__channels.py: -------------------------------------------------------------------------------- 1 | import nose 2 | from nose.tools import assert_equal, assert_false, assert_true, assert_not_equal 3 | from unittest import expectedFailure 4 | from conda_build.metadata import MetaData 5 | 6 | import os 7 | 8 | from obvci.conda_tools.build import build, upload 9 | from obvci.conda_tools.build_directory import recipes_to_build, fetch_metas, sort_dependency_order 10 | from obvci.conda_tools.inspect_binstar import distribution_exists 11 | from obvci.conda_tools.inspect_binstar import distribution_exists_on_channel, add_distribution_to_channel 12 | 13 | from binstar_client.utils import get_binstar 14 | from argparse import Namespace 15 | 16 | 17 | def clear_binstar(cli, owner): 18 | """ 19 | Empty all distributions for a user. 20 | 21 | The "rm -rf *" of the binstar world. 22 | 23 | """ 24 | for channel in cli.list_channels(owner): 25 | cli.remove_channel(owner, channel) 26 | 27 | for package in cli.user_packages(owner): 28 | cli.remove_package(owner, package['name']) 29 | 30 | 31 | OWNER = 'Obvious-ci-tests' 32 | CLIENT = get_binstar(Namespace(token=os.environ['BINSTAR_TOKEN'], site=None)) 33 | RECIPES_ROOT = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'recipes') 34 | RECIPES_DIR = os.path.join(RECIPES_ROOT, 'recipes_directory') 35 | RECIPE_DEV = os.path.join(RECIPES_ROOT, 'recipe1_dev') 36 | 37 | 38 | def test_distribution_exists(): 39 | clear_binstar(CLIENT, OWNER) 40 | 41 | # Build a recipe. 42 | meta = MetaData(RECIPE_DEV) 43 | meta = build(meta) 44 | 45 | # Check distribution exists returns false when there is no distribution. 46 | assert_false(distribution_exists(CLIENT, OWNER, meta)) 47 | 48 | # upload the distribution 49 | upload(CLIENT, meta, OWNER, channels=['testing']) 50 | 51 | # Check the distribution exists. Notice there is no channel being supplied here. 52 | assert_true(distribution_exists(CLIENT, OWNER, meta)) 53 | 54 | # Check the distribution is on testing but not on main. 55 | assert_true(distribution_exists_on_channel(CLIENT, OWNER, meta, channel='testing')) 56 | assert_false(distribution_exists_on_channel(CLIENT, OWNER, meta, channel='main')) 57 | 58 | add_distribution_to_channel(CLIENT, OWNER, meta, channel='main') 59 | # Check that the distribution has been added. 60 | assert_true(distribution_exists_on_channel(CLIENT, OWNER, meta, channel='main')) 61 | 62 | 63 | def test_leaky_add_to_channel(): 64 | # A newer distribution (e.g. v0.2.0.dev) on a dev channel was getting promoted to the main channel 65 | # when an earlier version (e.g. v0.1.0) was being linked to main. 66 | clear_binstar(CLIENT, OWNER) 67 | # Build a recipe and upload the recipe to the testing channel. 68 | meta = MetaData(RECIPE_DEV) 69 | meta = build(meta) 70 | upload(CLIENT, meta, OWNER, channels=['testing']) 71 | 72 | # Build a recipe and upload the recipe to the testing channel. 73 | meta_eariler = MetaData(os.path.join(RECIPES_DIR, 'recipe1')) 74 | meta_eariler = build(meta_eariler) 75 | upload(CLIENT, meta_eariler, OWNER, channels=['testing']) 76 | 77 | add_distribution_to_channel(CLIENT, OWNER, meta_eariler, channel='main') 78 | 79 | assert_true(distribution_exists_on_channel(CLIENT, OWNER, meta_eariler, channel='main')) 80 | assert_false(distribution_exists_on_channel(CLIENT, OWNER, meta, channel='main')) 81 | 82 | 83 | def assert_metas_equal(result_metas, expected_metas): 84 | meta_name = lambda meta: meta.name() 85 | message = 'Metas differ:\n LHS: {}\n RHS: {}'.format(map(meta_name, result_metas), 86 | map(meta_name, expected_metas)) 87 | assert_equal(result_metas, expected_metas, msg=message) 88 | 89 | 90 | def assert_metas_not_equal(result_metas, expected_metas): 91 | meta_name = lambda meta: meta.name() 92 | message = "Metas don't differ:\n LHS: {}\n RHS: {}".format(map(meta_name, result_metas), 93 | map(meta_name, expected_metas)) 94 | assert_not_equal(result_metas, expected_metas, msg=message) 95 | 96 | 97 | def test_recipes_to_build(): 98 | clear_binstar(CLIENT, OWNER) 99 | 100 | # Build a recipe. 101 | meta = build(MetaData(os.path.join(RECIPES_DIR, 'recipe1'))) 102 | upload(CLIENT, meta, OWNER, channels=['testing']) 103 | 104 | metas = fetch_metas(RECIPES_DIR) 105 | metas.sort(key=lambda meta: meta.name()) 106 | 107 | result = list(recipes_to_build(CLIENT, OWNER, channel='testing', recipe_metas=metas)) 108 | # The ones we need to build are all but the first. 109 | assert_metas_equal(result, metas[1:]) 110 | 111 | 112 | def test_meta_sorting(): 113 | metas = fetch_metas(RECIPES_DIR) 114 | unsorted_metas = sorted(metas, key=lambda meta: meta.name(), reverse=True) 115 | # The recipes have been constructed to sort in alphabetical order. 116 | assert_metas_equal(sort_dependency_order(unsorted_metas), [metas[0], metas[2], metas[1]]) 117 | # Check that that is what was going on. 118 | assert_metas_not_equal(unsorted_metas, metas) 119 | 120 | 121 | def test_meta_sorting_version_strip(): 122 | m1 = MetaData.fromdict({'package': 123 | {'name': 'a'}, 124 | 'requirements': 125 | {'build': ['b > 1.2']}}) 126 | m2 = MetaData.fromdict({'package': 127 | {'name': 'b'}}) 128 | metas = sort_dependency_order([m1, m2]) 129 | assert_equal([meta.name() for meta in metas], ['b', 'a']) 130 | 131 | 132 | if __name__ == '__main__': 133 | nose.runmodule(argv=['-s', '--with-doctest'], exit=False) 134 | -------------------------------------------------------------------------------- /obvious-ci.conda/bld.bat: -------------------------------------------------------------------------------- 1 | %PYTHON% setup.py install --single-version-externally-managed --record=record.txt 2 | -------------------------------------------------------------------------------- /obvious-ci.conda/build.sh: -------------------------------------------------------------------------------- 1 | ${PYTHON} setup.py install --single-version-externally-managed --record=record.txt 2 | -------------------------------------------------------------------------------- /obvious-ci.conda/meta.yaml: -------------------------------------------------------------------------------- 1 | {% set data = load_setuptools() %} 2 | 3 | package: 4 | name: obvious-ci 5 | version: {{data.get('version')}} 6 | 7 | source: 8 | path: ../ 9 | 10 | requirements: 11 | build: 12 | - python 13 | - setuptools 14 | 15 | run: 16 | - python 17 | - setuptools 18 | - anaconda-client 19 | - conda 20 | - conda-build 21 | 22 | test: 23 | imports: 24 | - obvci 25 | - obvci.conda_tools 26 | - obvci.cli 27 | commands: 28 | - unset CONDA_NPY && obvci_conda_build_dir --help # [not win] 29 | - obvci_conda_build_dir --help # [win] 30 | -------------------------------------------------------------------------------- /scripts/obvci_appveyor_python_build_env.cmd: -------------------------------------------------------------------------------- 1 | :: EXPECTED ENV VARS: TARGET_ARCH (either x86 or x64) 2 | :: CONDA_PY (either 27, 33, 35 etc. - only major version is extracted) 3 | :: 4 | :: 5 | :: To build extensions for 64 bit Python 3, we need to configure environment 6 | :: variables to use the MSVC 2010 C++ compilers from GRMSDKX_EN_DVD.iso of: 7 | :: MS Windows SDK for Windows 7 and .NET Framework 4 (SDK v7.1) 8 | :: 9 | :: To build extensions for 64 bit Python 2, we need to configure environment 10 | :: variables to use the MSVC 2008 C++ compilers from GRMSDKX_EN_DVD.iso of: 11 | :: MS Windows SDK for Windows 7 and .NET Framework 3.5 (SDK v7.0) 12 | :: 13 | :: 32 bit builds, and 64-bit builds for 3.5 and beyond, do not require specific 14 | :: environment configurations. 15 | :: 16 | :: Note: this script needs to be run with the /E:ON and /V:ON flags for the 17 | :: cmd interpreter, at least for (SDK v7.0) 18 | :: 19 | :: More details at: 20 | :: https://github.com/cython/cython/wiki/64BitCythonExtensionsOnWindows 21 | :: http://stackoverflow.com/a/13751649/163740 22 | :: 23 | :: Author: Phil Elson 24 | :: Original Author: Olivier Grisel (https://github.com/ogrisel/python-appveyor-demo) 25 | :: License: CC0 1.0 Universal: http://creativecommons.org/publicdomain/zero/1.0/ 26 | :: 27 | :: Notes about batch files for Python people: 28 | :: 29 | :: Quotes in values are literally part of the values: 30 | :: SET FOO="bar" 31 | :: FOO is now five characters long: " b a r " 32 | :: If you don't want quotes, don't include them on the right-hand side. 33 | :: 34 | :: The CALL lines at the end of this file look redundant, but if you move them 35 | :: outside of the IF clauses, they do not run properly in the SET_SDK_64==Y 36 | :: case, I don't know why. 37 | @ECHO OFF 38 | 39 | SET COMMAND_TO_RUN=%* 40 | SET WIN_SDK_ROOT=C:\Program Files\Microsoft SDKs\Windows 41 | 42 | :: Extract the major and minor versions, and allow for the minor version to be 43 | :: more than 9. This requires the version number to have two dots in it. 44 | SET MAJOR_PYTHON_VERSION=%CONDA_PY:~0,1% 45 | 46 | IF "%CONDA_PY:~2,1%" == "" ( 47 | :: CONDA_PY style, such as 27, 34 etc. 48 | SET MINOR_PYTHON_VERSION=%CONDA_PY:~1,1% 49 | ) ELSE ( 50 | IF "%CONDA_PY:~3,1%" == "." ( 51 | SET MINOR_PYTHON_VERSION=%CONDA_PY:~2,1% 52 | ) ELSE ( 53 | SET MINOR_PYTHON_VERSION=%CONDA_PY:~2,2% 54 | ) 55 | ) 56 | 57 | :: Based on the Python version, determine what SDK version to use, and whether 58 | :: to set the SDK for 64-bit. 59 | IF %MAJOR_PYTHON_VERSION% == 2 ( 60 | SET WINDOWS_SDK_VERSION="v7.0" 61 | SET SET_SDK_64=Y 62 | ) ELSE ( 63 | IF %MAJOR_PYTHON_VERSION% == 3 ( 64 | SET WINDOWS_SDK_VERSION="v7.1" 65 | IF %MINOR_PYTHON_VERSION% LEQ 4 ( 66 | SET SET_SDK_64=Y 67 | ) ELSE ( 68 | SET SET_SDK_64=N 69 | ) 70 | ) ELSE ( 71 | ECHO Unsupported Python version: "%MAJOR_PYTHON_VERSION%" 72 | EXIT /B 1 73 | ) 74 | ) 75 | 76 | IF "%TARGET_ARCH%"=="x64" ( 77 | IF %SET_SDK_64% == Y ( 78 | ECHO Configuring Windows SDK %WINDOWS_SDK_VERSION% for Python %MAJOR_PYTHON_VERSION% on a 64 bit architecture 79 | SET DISTUTILS_USE_SDK=1 80 | SET MSSdk=1 81 | "%WIN_SDK_ROOT%\%WINDOWS_SDK_VERSION%\Setup\WindowsSdkVer.exe" -q -version:%WINDOWS_SDK_VERSION% 82 | "%WIN_SDK_ROOT%\%WINDOWS_SDK_VERSION%\Bin\SetEnv.cmd" /x64 /release 83 | ECHO Executing: %COMMAND_TO_RUN% 84 | call %COMMAND_TO_RUN% || EXIT /B 1 85 | ) ELSE ( 86 | ECHO Using default MSVC build environment for 64 bit architecture 87 | ECHO Executing: %COMMAND_TO_RUN% 88 | call %COMMAND_TO_RUN% || EXIT /B 1 89 | ) 90 | ) ELSE ( 91 | ECHO Using default MSVC build environment for 32 bit architecture 92 | ECHO Executing: %COMMAND_TO_RUN% 93 | call %COMMAND_TO_RUN% || EXIT /B 1 94 | ) 95 | -------------------------------------------------------------------------------- /scripts/obvci_conda_build_dir.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | A script to build and upload all of the conda recipes in 4 | the specified directory. 5 | 6 | This script is left around as legacy from before entry_points was used for managing executables. 7 | It will be removed at some point in the future. 8 | 9 | """ 10 | import obvci.cli.conda_build_dir as bld_dir 11 | import warnings 12 | 13 | if __name__ == '__main__': 14 | warnings.warn('obvci_conda_build_dir.py has been deprecated. Use obvci_conda_build_dir instead.') 15 | bld_dir.main() 16 | -------------------------------------------------------------------------------- /scripts/obvci_install_conda_build_tools.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | Install the packages necessary for building conda 4 | distributions. 5 | 6 | Requires conda to be installed and on the path. 7 | There are scripts to help with the installation of 8 | miniconda in the same directory as this script. 9 | 10 | """ 11 | from __future__ import print_function 12 | BUILD_PACKAGES = ['conda-build', 'anaconda-client', 'jinja2', 'setuptools'] 13 | 14 | 15 | if __name__ == '__main__': 16 | import subprocess 17 | cmd = ['conda', 'install', '--yes', '-n', 'root', '--quiet'] + BUILD_PACKAGES 18 | subprocess.check_call(cmd) 19 | -------------------------------------------------------------------------------- /scripts/obvci_install_miniconda.ps1: -------------------------------------------------------------------------------- 1 | # Install miniconda under windows using powershell. 2 | # Authors: Phil Elson, Stuart Mumford 3 | # Inspired and originally written by: Olivier Grisel, Kyle Kastner 4 | # License: BSD 3 clause 5 | 6 | # The following environment variables are expected: 7 | # MINICONDA_VERSION - the version as seen in the miniconda URL. e.g. "3.5.5" 8 | # TARGET_ARCH - either x86 or x64 9 | # CONDA_INSTALL_LOCN - the directory where miniconda should be installed 10 | 11 | 12 | $MINICONDA_URL = "http://repo.continuum.io/miniconda/" 13 | 14 | function DownloadMiniconda ($version, $platform_suffix) { 15 | $webclient = New-Object System.Net.WebClient 16 | $filename = "Miniconda3-" + $version + "-Windows-" + $platform_suffix + ".exe" 17 | 18 | $url = $MINICONDA_URL + $filename 19 | 20 | $basedir = $pwd.Path + "\" 21 | $filepath = $basedir + $filename 22 | if (Test-Path $filename) { 23 | Write-Host "Reusing" $filepath 24 | return $filepath 25 | } 26 | 27 | # Download and retry up to 3 times in case of network transient errors. 28 | Write-Host "Downloading" $filename "from" $url 29 | $retry_attempts = 2 30 | for($i=0; $i -lt $retry_attempts; $i++){ 31 | try { 32 | $webclient.DownloadFile($url, $filepath) 33 | break 34 | } 35 | Catch [Exception]{ 36 | Start-Sleep 1 37 | } 38 | } 39 | if (Test-Path $filepath) { 40 | Write-Host "File saved at" $filepath 41 | } else { 42 | # Retry once to get the error message if any at the last try 43 | $webclient.DownloadFile($url, $filepath) 44 | } 45 | return $filepath 46 | } 47 | 48 | function InstallMiniconda ($python_version, $architecture, $python_home) { 49 | Write-Host "Installing miniconda" $python_version "for" $architecture "bit architecture to" $python_home 50 | if (Test-Path $python_home) { 51 | Write-Host $python_home "already exists, skipping." 52 | return $false 53 | } 54 | if ($architecture -eq "x86") { 55 | $platform_suffix = "x86" 56 | } else { 57 | $platform_suffix = "x86_64" 58 | } 59 | $filepath = DownloadMiniconda $python_version $platform_suffix 60 | Write-Host "Installing" $filepath "to" $python_home 61 | $args = "/InstallationType=AllUsers /S /AddToPath=1 /RegisterPython=1 /D=" + $python_home 62 | Write-Host $filepath $args 63 | Start-Process -FilePath $filepath -ArgumentList $args -Wait -Passthru 64 | #Start-Sleep -s 15 65 | if (Test-Path $python_home) { 66 | Write-Host "Miniconda $python_version ($architecture) installation complete" 67 | } else { 68 | Write-Host "Failed to install Python in $python_home" 69 | Exit 1 70 | } 71 | } 72 | 73 | function main () { 74 | InstallMiniconda $env:MINICONDA_VERSION $env:TARGET_ARCH $env:CONDA_INSTALL_LOCN 75 | } 76 | 77 | main 78 | -------------------------------------------------------------------------------- /scripts/obvci_install_miniconda.sh: -------------------------------------------------------------------------------- 1 | # Install miniconda under Linux and OSX. 2 | # The following environment variables are expected: 3 | # MINICONDA_VERSION - the version as seen in the miniconda URL. e.g. "3.5.5" 4 | # TARGET_ARCH - either x86 or x64. Note that on OSX, x86 is not readily available on binstar. 5 | # CONDA_INSTALL_LOCN - the directory where miniconda should be installed. 6 | 7 | MINICONDA_URL="http://repo.continuum.io/miniconda" 8 | 9 | if [[ "$TARGET_ARCH" == 'x86' ]]; then 10 | platform_suffix="x86" 11 | else 12 | platform_suffix="x86_64" 13 | fi 14 | 15 | if [[ "$OSTYPE" == 'linux-gnu' ]]; then 16 | os='linux' 17 | else 18 | os='MacOSX' 19 | fi 20 | 21 | URL=${MINICONDA_URL}/Miniconda3-${MINICONDA_VERSION}-${os}-${platform_suffix}.sh 22 | wget ${URL} -O miniconda.sh; 23 | bash miniconda.sh -b -p ${CONDA_INSTALL_LOCN} 24 | 25 | source ${CONDA_INSTALL_LOCN}/bin/activate root 26 | 27 | -------------------------------------------------------------------------------- /scripts/obvci_substitute_conda_recipe_version.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import argparse 3 | import os 4 | import subprocess 5 | 6 | def load_version_file(fh): 7 | vn_context = {} 8 | exec(fh.read(), vn_context) 9 | return vn_context['__version__'] 10 | 11 | 12 | def identify_branch_name(directory): 13 | # TODO: Consider using the tag name too (git describe --tags --exact-match) 14 | return subprocess.check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD'], 15 | cwd=directory).strip() 16 | 17 | 18 | def main(conda_dir, version_file, include_git_branch_name=True): 19 | with open(version_file, 'r') as fh: 20 | version = load_version_file(fh) 21 | if include_git_branch_name: 22 | branch_name = identify_branch_name(conda_dir) 23 | if branch_name != 'HEAD': 24 | version = '{}.{}'.format(version, branch_name) 25 | meta_file = os.path.join(conda_dir, 'meta.yaml') 26 | with open(meta_file, 'r') as fh: 27 | meta_content = fh.readlines() 28 | with open(meta_file, 'w') as fh: 29 | for line in meta_content: 30 | if line.strip().startswith('version:'): 31 | line = '{pre}version: {version!r}\n'.format(pre=line[:line.find('version:')], 32 | version=version) 33 | fh.write(line) 34 | 35 | 36 | if __name__ == '__main__': 37 | parser = argparse.ArgumentParser(description="""A script to update the version specified in a conda 38 | recipe's meta.yaml.""") 39 | parser.add_argument("recipe_dir", help="""The directory of the conda recipe.""") 40 | parser.add_argument("version_file", help="""The file containing a python declaration of __version__.""") 41 | parser.add_argument('--without-branch-name', help="""Include the branch name 42 | (only if 'git rev-parse --abbrev-ref HEAD' != HEAD).""", 43 | action='store_true') 44 | args = parser.parse_args() 45 | main(args.recipe_dir, args.version_file, include_git_branch_name=not args.without_branch_name) 46 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | 2 | # See the docstring in versioneer.py for instructions. Note that you must 3 | # re-run 'versioneer.py setup' after changing this section, and commit the 4 | # resulting files. 5 | 6 | [versioneer] 7 | VCS = git 8 | style = pep440-branch-based 9 | versionfile_source = obvci/_version.py 10 | versionfile_build = obvci/_version.py 11 | tag_prefix = v 12 | parentdir_prefix = obvci 13 | 14 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import os.path 2 | import sys 3 | 4 | from setuptools import setup 5 | 6 | # Add the CWD to the path so that we get an appropriate versioneer. 7 | sys.path.insert(0, './') 8 | import versioneer 9 | 10 | 11 | setup(name='Obvious-ci', 12 | version=versioneer.get_version(), 13 | cmdclass=versioneer.get_cmdclass(), 14 | description='Utilities to simplify CI with tools such as travis-ci and appveyor.', 15 | author='Phil Elson', 16 | author_email='pelson.pub@gmail.com', 17 | url='https://github.com/pelson/Obvious-ci', 18 | scripts=[os.path.join('scripts', script) for script in 19 | ['obvci_install_conda_build_tools.py', 'obvci_install_miniconda.ps1', 20 | 'obvci_install_miniconda.sh', 'obvci_appveyor_python_build_env.cmd']], 21 | packages=['obvci', 'obvci.conda_tools', 'obvci.cli'], 22 | entry_points={ 23 | 'console_scripts': [ 24 | 'obvci_conda_build_dir = obvci.cli.conda_build_dir:main' 25 | ] 26 | }, 27 | ) 28 | 29 | -------------------------------------------------------------------------------- /versioneer.py: -------------------------------------------------------------------------------- 1 | 2 | # Version: 0.15+dev 3 | 4 | """The Versioneer - like a rocketeer, but for versions. 5 | 6 | The Versioneer 7 | ============== 8 | 9 | * like a rocketeer, but for versions! 10 | * https://github.com/warner/python-versioneer 11 | * Brian Warner 12 | * License: Public Domain 13 | * Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, and pypy 14 | * [![Latest Version] 15 | (https://pypip.in/version/versioneer/badge.svg?style=flat) 16 | ](https://pypi.python.org/pypi/versioneer/) 17 | * [![Build Status] 18 | (https://travis-ci.org/warner/python-versioneer.png?branch=master) 19 | ](https://travis-ci.org/warner/python-versioneer) 20 | 21 | This is a tool for managing a recorded version number in distutils-based 22 | python projects. The goal is to remove the tedious and error-prone "update 23 | the embedded version string" step from your release process. Making a new 24 | release should be as easy as recording a new tag in your version-control 25 | system, and maybe making new tarballs. 26 | 27 | 28 | ## Quick Install 29 | 30 | * `pip install versioneer` to somewhere to your $PATH 31 | * add a `[versioneer]` section to your setup.cfg (see below) 32 | * run `versioneer install` in your source tree, commit the results 33 | 34 | ## Version Identifiers 35 | 36 | Source trees come from a variety of places: 37 | 38 | * a version-control system checkout (mostly used by developers) 39 | * a nightly tarball, produced by build automation 40 | * a snapshot tarball, produced by a web-based VCS browser, like github's 41 | "tarball from tag" feature 42 | * a release tarball, produced by "setup.py sdist", distributed through PyPI 43 | 44 | Within each source tree, the version identifier (either a string or a number, 45 | this tool is format-agnostic) can come from a variety of places: 46 | 47 | * ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows 48 | about recent "tags" and an absolute revision-id 49 | * the name of the directory into which the tarball was unpacked 50 | * an expanded VCS keyword ($Id$, etc) 51 | * a `_version.py` created by some earlier build step 52 | 53 | For released software, the version identifier is closely related to a VCS 54 | tag. Some projects use tag names that include more than just the version 55 | string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool 56 | needs to strip the tag prefix to extract the version identifier. For 57 | unreleased software (between tags), the version identifier should provide 58 | enough information to help developers recreate the same tree, while also 59 | giving them an idea of roughly how old the tree is (after version 1.2, before 60 | version 1.3). Many VCS systems can report a description that captures this, 61 | for example `git describe --tags --dirty --always` reports things like 62 | "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the 63 | 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has 64 | uncommitted changes. 65 | 66 | The version identifier is used for multiple purposes: 67 | 68 | * to allow the module to self-identify its version: `myproject.__version__` 69 | * to choose a name and prefix for a 'setup.py sdist' tarball 70 | 71 | ## Theory of Operation 72 | 73 | Versioneer works by adding a special `_version.py` file into your source 74 | tree, where your `__init__.py` can import it. This `_version.py` knows how to 75 | dynamically ask the VCS tool for version information at import time. 76 | 77 | `_version.py` also contains `$Revision$` markers, and the installation 78 | process marks `_version.py` to have this marker rewritten with a tag name 79 | during the `git archive` command. As a result, generated tarballs will 80 | contain enough information to get the proper version. 81 | 82 | To allow `setup.py` to compute a version too, a `versioneer.py` is added to 83 | the top level of your source tree, next to `setup.py` and the `setup.cfg` 84 | that configures it. This overrides several distutils/setuptools commands to 85 | compute the version when invoked, and changes `setup.py build` and `setup.py 86 | sdist` to replace `_version.py` with a small static file that contains just 87 | the generated version data. 88 | 89 | ## Installation 90 | 91 | First, decide on values for the following configuration variables: 92 | 93 | * `VCS`: the version control system you use. Currently accepts "git". 94 | 95 | * `style`: the style of version string to be produced. See "Styles" below for 96 | details. Defaults to "pep440", which looks like 97 | `TAG[+DISTANCE.gSHORTHASH[.dirty]]`. 98 | 99 | * `versionfile_source`: 100 | 101 | A project-relative pathname into which the generated version strings should 102 | be written. This is usually a `_version.py` next to your project's main 103 | `__init__.py` file, so it can be imported at runtime. If your project uses 104 | `src/myproject/__init__.py`, this should be `src/myproject/_version.py`. 105 | This file should be checked in to your VCS as usual: the copy created below 106 | by `setup.py setup_versioneer` will include code that parses expanded VCS 107 | keywords in generated tarballs. The 'build' and 'sdist' commands will 108 | replace it with a copy that has just the calculated version string. 109 | 110 | This must be set even if your project does not have any modules (and will 111 | therefore never import `_version.py`), since "setup.py sdist" -based trees 112 | still need somewhere to record the pre-calculated version strings. Anywhere 113 | in the source tree should do. If there is a `__init__.py` next to your 114 | `_version.py`, the `setup.py setup_versioneer` command (described below) 115 | will append some `__version__`-setting assignments, if they aren't already 116 | present. 117 | 118 | * `versionfile_build`: 119 | 120 | Like `versionfile_source`, but relative to the build directory instead of 121 | the source directory. These will differ when your setup.py uses 122 | 'package_dir='. If you have `package_dir={'myproject': 'src/myproject'}`, 123 | then you will probably have `versionfile_build='myproject/_version.py'` and 124 | `versionfile_source='src/myproject/_version.py'`. 125 | 126 | If this is set to None, then `setup.py build` will not attempt to rewrite 127 | any `_version.py` in the built tree. If your project does not have any 128 | libraries (e.g. if it only builds a script), then you should use 129 | `versionfile_build = None` and override `distutils.command.build_scripts` 130 | to explicitly insert a copy of `versioneer.get_version()` into your 131 | generated script. 132 | 133 | * `tag_prefix`: 134 | 135 | a string, like 'PROJECTNAME-', which appears at the start of all VCS tags. 136 | If your tags look like 'myproject-1.2.0', then you should use 137 | tag_prefix='myproject-'. If you use unprefixed tags like '1.2.0', this 138 | should be an empty string, using either `tag_prefix=` or `tag_prefix=''`. 139 | 140 | * `parentdir_prefix`: 141 | 142 | a optional string, frequently the same as tag_prefix, which appears at the 143 | start of all unpacked tarball filenames. If your tarball unpacks into 144 | 'myproject-1.2.0', this should be 'myproject-'. To disable this feature, 145 | just omit the field from your `setup.cfg`. 146 | 147 | This tool provides one script, named `versioneer`. That script has one mode, 148 | "install", which writes a copy of `versioneer.py` into the current directory 149 | and runs `versioneer.py setup` to finish the installation. 150 | 151 | To versioneer-enable your project: 152 | 153 | * 1: Modify your `setup.cfg`, adding a section named `[versioneer]` and 154 | populating it with the configuration values you decided earlier (note that 155 | the option names are not case-sensitive): 156 | 157 | ```` 158 | [versioneer] 159 | VCS = git 160 | style = pep440 161 | versionfile_source = src/myproject/_version.py 162 | versionfile_build = myproject/_version.py 163 | tag_prefix = 164 | parentdir_prefix = myproject- 165 | ```` 166 | 167 | * 2: Run `versioneer install`. This will do the following: 168 | 169 | * copy `versioneer.py` into the top of your source tree 170 | * create `_version.py` in the right place (`versionfile_source`) 171 | * modify your `__init__.py` (if one exists next to `_version.py`) to define 172 | `__version__` (by calling a function from `_version.py`) 173 | * modify your `MANIFEST.in` to include both `versioneer.py` and the 174 | generated `_version.py` in sdist tarballs 175 | 176 | `versioneer install` will complain about any problems it finds with your 177 | `setup.py` or `setup.cfg`. Run it multiple times until you have fixed all 178 | the problems. 179 | 180 | * 3: add a `import versioneer` to your setup.py, and add the following 181 | arguments to the setup() call: 182 | 183 | version=versioneer.get_version(), 184 | cmdclass=versioneer.get_cmdclass(), 185 | 186 | * 4: commit these changes to your VCS. To make sure you won't forget, 187 | `versioneer install` will mark everything it touched for addition using 188 | `git add`. Don't forget to add `setup.py` and `setup.cfg` too. 189 | 190 | ## Post-Installation Usage 191 | 192 | Once established, all uses of your tree from a VCS checkout should get the 193 | current version string. All generated tarballs should include an embedded 194 | version string (so users who unpack them will not need a VCS tool installed). 195 | 196 | If you distribute your project through PyPI, then the release process should 197 | boil down to two steps: 198 | 199 | * 1: git tag 1.0 200 | * 2: python setup.py register sdist upload 201 | 202 | If you distribute it through github (i.e. users use github to generate 203 | tarballs with `git archive`), the process is: 204 | 205 | * 1: git tag 1.0 206 | * 2: git push; git push --tags 207 | 208 | Versioneer will report "0+untagged.NUMCOMMITS.gHASH" until your tree has at 209 | least one tag in its history. 210 | 211 | ## Version-String Flavors 212 | 213 | Code which uses Versioneer can learn about its version string at runtime by 214 | importing `_version` from your main `__init__.py` file and running the 215 | `get_versions()` function. From the "outside" (e.g. in `setup.py`), you can 216 | import the top-level `versioneer.py` and run `get_versions()`. 217 | 218 | Both functions return a dictionary with different flavors of version 219 | information: 220 | 221 | * `['version']`: A condensed version string, rendered using the selected 222 | style. This is the most commonly used value for the project's version 223 | string. The default "pep440" style yields strings like `0.11`, 224 | `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section 225 | below for alternative styles. 226 | 227 | * `['full-revisionid']`: detailed revision identifier. For Git, this is the 228 | full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". 229 | 230 | * `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that 231 | this is only accurate if run in a VCS checkout, otherwise it is likely to 232 | be False or None 233 | 234 | * `['error']`: if the version string could not be computed, this will be set 235 | to a string describing the problem, otherwise it will be None. It may be 236 | useful to throw an exception in setup.py if this is set, to avoid e.g. 237 | creating tarballs with a version string of "unknown". 238 | 239 | Some variants are more useful than others. Including `full-revisionid` in a 240 | bug report should allow developers to reconstruct the exact code being tested 241 | (or indicate the presence of local changes that should be shared with the 242 | developers). `version` is suitable for display in an "about" box or a CLI 243 | `--version` output: it can be easily compared against release notes and lists 244 | of bugs fixed in various releases. 245 | 246 | The installer adds the following text to your `__init__.py` to place a basic 247 | version in `YOURPROJECT.__version__`: 248 | 249 | from ._version import get_versions 250 | __version__ = get_versions()['version'] 251 | del get_versions 252 | 253 | ## Styles 254 | 255 | The setup.cfg `style=` configuration controls how the VCS information is 256 | rendered into a version string. 257 | 258 | The default style, "pep440", produces a PEP440-compliant string, equal to the 259 | un-prefixed tag name for actual releases, and containing an additional "local 260 | version" section with more detail for in-between builds. For Git, this is 261 | TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags 262 | --dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the 263 | tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and 264 | that this commit is two revisions ("+2") beyond the "0.11" tag. For released 265 | software (exactly equal to a known tag), the identifier will only contain the 266 | stripped tag, e.g. "0.11". 267 | 268 | Other styles are available. See details.md in the Versioneer source tree for 269 | descriptions. 270 | 271 | ## Debugging 272 | 273 | Versioneer tries to avoid fatal errors: if something goes wrong, it will tend 274 | to return a version of "0+unknown". To investigate the problem, run `setup.py 275 | version`, which will run the version-lookup code in a verbose mode, and will 276 | display the full contents of `get_versions()` (including the `error` string, 277 | which may help identify what went wrong). 278 | 279 | ## Updating Versioneer 280 | 281 | To upgrade your project to a new release of Versioneer, do the following: 282 | 283 | * install the new Versioneer (`pip install -U versioneer` or equivalent) 284 | * edit `setup.cfg`, if necessary, to include any new configuration settings 285 | indicated by the release notes 286 | * re-run `versioneer install` in your source tree, to replace 287 | `SRC/_version.py` 288 | * commit any changed files 289 | 290 | ### Upgrading to 0.15 291 | 292 | Starting with this version, Versioneer is configured with a `[versioneer]` 293 | section in your `setup.cfg` file. Earlier versions required the `setup.py` to 294 | set attributes on the `versioneer` module immediately after import. The new 295 | version will refuse to run (raising an exception during import) until you 296 | have provided the necessary `setup.cfg` section. 297 | 298 | In addition, the Versioneer package provides an executable named 299 | `versioneer`, and the installation process is driven by running `versioneer 300 | install`. In 0.14 and earlier, the executable was named 301 | `versioneer-installer` and was run without an argument. 302 | 303 | ### Upgrading to 0.14 304 | 305 | 0.14 changes the format of the version string. 0.13 and earlier used 306 | hyphen-separated strings like "0.11-2-g1076c97-dirty". 0.14 and beyond use a 307 | plus-separated "local version" section strings, with dot-separated 308 | components, like "0.11+2.g1076c97". PEP440-strict tools did not like the old 309 | format, but should be ok with the new one. 310 | 311 | ### Upgrading from 0.11 to 0.12 312 | 313 | Nothing special. 314 | 315 | ### Upgrading from 0.10 to 0.11 316 | 317 | You must add a `versioneer.VCS = "git"` to your `setup.py` before re-running 318 | `setup.py setup_versioneer`. This will enable the use of additional 319 | version-control systems (SVN, etc) in the future. 320 | 321 | ## Future Directions 322 | 323 | This tool is designed to make it easily extended to other version-control 324 | systems: all VCS-specific components are in separate directories like 325 | src/git/ . The top-level `versioneer.py` script is assembled from these 326 | components by running make-versioneer.py . In the future, make-versioneer.py 327 | will take a VCS name as an argument, and will construct a version of 328 | `versioneer.py` that is specific to the given VCS. It might also take the 329 | configuration arguments that are currently provided manually during 330 | installation by editing setup.py . Alternatively, it might go the other 331 | direction and include code from all supported VCS systems, reducing the 332 | number of intermediate scripts. 333 | 334 | 335 | ## License 336 | 337 | To make Versioneer easier to embed, all its code is dedicated to the public 338 | domain. The `_version.py` that it creates is also in the public domain. 339 | Specifically, both are released under the Creative Commons "Public Domain 340 | Dedication" license (CC0-1.0), as described in 341 | https://creativecommons.org/publicdomain/zero/1.0/ . 342 | 343 | """ 344 | 345 | from __future__ import print_function 346 | try: 347 | import configparser 348 | except ImportError: 349 | import ConfigParser as configparser 350 | import errno 351 | import json 352 | import os 353 | import re 354 | import subprocess 355 | import sys 356 | 357 | 358 | class VersioneerConfig: 359 | 360 | """Container for Versioneer configuration parameters.""" 361 | 362 | 363 | def get_root(): 364 | """Get the project root directory. 365 | 366 | We require that all commands are run from the project root, i.e. the 367 | directory that contains setup.py, setup.cfg, and versioneer.py . 368 | """ 369 | root = os.path.realpath(os.path.abspath(os.getcwd())) 370 | setup_py = os.path.join(root, "setup.py") 371 | versioneer_py = os.path.join(root, "versioneer.py") 372 | if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): 373 | # allow 'python path/to/setup.py COMMAND' 374 | root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) 375 | setup_py = os.path.join(root, "setup.py") 376 | versioneer_py = os.path.join(root, "versioneer.py") 377 | if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): 378 | err = ("Versioneer was unable to run the project root directory. " 379 | "Versioneer requires setup.py to be executed from " 380 | "its immediate directory (like 'python setup.py COMMAND'), " 381 | "or in a way that lets it use sys.argv[0] to find the root " 382 | "(like 'python path/to/setup.py COMMAND').") 383 | raise VersioneerBadRootError(err) 384 | try: 385 | # Certain runtime workflows (setup.py install/develop in a setuptools 386 | # tree) execute all dependencies in a single python process, so 387 | # "versioneer" may be imported multiple times, and python's shared 388 | # module-import table will cache the first one. So we can't use 389 | # os.path.dirname(__file__), as that will find whichever 390 | # versioneer.py was first imported, even in later projects. 391 | me = os.path.realpath(os.path.abspath(__file__)) 392 | if os.path.splitext(me)[0] != os.path.splitext(versioneer_py)[0]: 393 | print("Warning: build in %s is using versioneer.py from %s" 394 | % (os.path.dirname(me), versioneer_py)) 395 | except NameError: 396 | pass 397 | return root 398 | 399 | 400 | def get_config_from_root(root): 401 | """Read the project setup.cfg file to determine Versioneer config.""" 402 | # This might raise EnvironmentError (if setup.cfg is missing), or 403 | # configparser.NoSectionError (if it lacks a [versioneer] section), or 404 | # configparser.NoOptionError (if it lacks "VCS="). See the docstring at 405 | # the top of versioneer.py for instructions on writing your setup.cfg . 406 | setup_cfg = os.path.join(root, "setup.cfg") 407 | parser = configparser.SafeConfigParser() 408 | with open(setup_cfg, "r") as f: 409 | parser.readfp(f) 410 | VCS = parser.get("versioneer", "VCS") # mandatory 411 | 412 | def get(parser, name): 413 | if parser.has_option("versioneer", name): 414 | return parser.get("versioneer", name) 415 | return None 416 | cfg = VersioneerConfig() 417 | cfg.VCS = VCS 418 | cfg.style = get(parser, "style") or "" 419 | cfg.versionfile_source = get(parser, "versionfile_source") 420 | cfg.versionfile_build = get(parser, "versionfile_build") 421 | cfg.tag_prefix = get(parser, "tag_prefix") 422 | if cfg.tag_prefix in ("''", '""'): 423 | cfg.tag_prefix = "" 424 | cfg.parentdir_prefix = get(parser, "parentdir_prefix") 425 | cfg.verbose = get(parser, "verbose") 426 | return cfg 427 | 428 | 429 | class NotThisMethod(Exception): 430 | 431 | """Exception raised if a method is not valid for the current scenario.""" 432 | 433 | # these dictionaries contain VCS-specific tools 434 | LONG_VERSION_PY = {} 435 | HANDLERS = {} 436 | 437 | 438 | def register_vcs_handler(vcs, method): # decorator 439 | """Decorator to mark a method as the handler for a particular VCS.""" 440 | def decorate(f): 441 | """Store f in HANDLERS[vcs][method].""" 442 | if vcs not in HANDLERS: 443 | HANDLERS[vcs] = {} 444 | HANDLERS[vcs][method] = f 445 | return f 446 | return decorate 447 | 448 | 449 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): 450 | """Call the given command(s).""" 451 | assert isinstance(commands, list) 452 | p = None 453 | for c in commands: 454 | try: 455 | dispcmd = str([c] + args) 456 | # remember shell=False, so use git.cmd on windows, not just git 457 | p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE, 458 | stderr=(subprocess.PIPE if hide_stderr 459 | else None)) 460 | break 461 | except EnvironmentError: 462 | e = sys.exc_info()[1] 463 | if e.errno == errno.ENOENT: 464 | continue 465 | if verbose: 466 | print("unable to run %s" % dispcmd) 467 | print(e) 468 | return None 469 | else: 470 | if verbose: 471 | print("unable to find command, tried %s" % (commands,)) 472 | return None 473 | stdout = p.communicate()[0].strip() 474 | if sys.version_info[0] >= 3: 475 | stdout = stdout.decode() 476 | if p.returncode != 0: 477 | if verbose: 478 | print("unable to run %s (error)" % dispcmd) 479 | return None 480 | return stdout 481 | LONG_VERSION_PY['git'] = r''' 482 | # This file helps to compute a version number in source trees obtained from 483 | # git-archive tarball (such as those provided by githubs download-from-tag 484 | # feature). Distribution tarballs (built by setup.py sdist) and build 485 | # directories (produced by setup.py build) will contain a much shorter file 486 | # that just contains the computed version number. 487 | 488 | # This file is released into the public domain. Generated by 489 | # versioneer-0.15+dev (https://github.com/warner/python-versioneer) 490 | 491 | """Git implementation of _version.py.""" 492 | 493 | import errno 494 | import os 495 | import re 496 | import subprocess 497 | import sys 498 | 499 | 500 | def get_keywords(): 501 | """Get the keywords needed to look up the version information.""" 502 | # these strings will be replaced by git during git-archive. 503 | # setup.py/versioneer.py will grep for the variable names, so they must 504 | # each be defined on a line of their own. _version.py will just call 505 | # get_keywords(). 506 | git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" 507 | git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" 508 | keywords = {"refnames": git_refnames, "full": git_full} 509 | return keywords 510 | 511 | 512 | class VersioneerConfig: 513 | 514 | """Container for Versioneer configuration parameters.""" 515 | 516 | 517 | def get_config(): 518 | """Create, populate and return the VersioneerConfig() object.""" 519 | # these strings are filled in when 'setup.py versioneer' creates 520 | # _version.py 521 | cfg = VersioneerConfig() 522 | cfg.VCS = "git" 523 | cfg.style = "%(STYLE)s" 524 | cfg.tag_prefix = "%(TAG_PREFIX)s" 525 | cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" 526 | cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" 527 | cfg.verbose = False 528 | return cfg 529 | 530 | 531 | class NotThisMethod(Exception): 532 | 533 | """Exception raised if a method is not valid for the current scenario.""" 534 | 535 | 536 | LONG_VERSION_PY = {} 537 | HANDLERS = {} 538 | 539 | 540 | def register_vcs_handler(vcs, method): # decorator 541 | """Decorator to mark a method as the handler for a particular VCS.""" 542 | def decorate(f): 543 | """Store f in HANDLERS[vcs][method].""" 544 | if vcs not in HANDLERS: 545 | HANDLERS[vcs] = {} 546 | HANDLERS[vcs][method] = f 547 | return f 548 | return decorate 549 | 550 | 551 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): 552 | """Call the given command(s).""" 553 | assert isinstance(commands, list) 554 | p = None 555 | for c in commands: 556 | try: 557 | dispcmd = str([c] + args) 558 | # remember shell=False, so use git.cmd on windows, not just git 559 | p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE, 560 | stderr=(subprocess.PIPE if hide_stderr 561 | else None)) 562 | break 563 | except EnvironmentError: 564 | e = sys.exc_info()[1] 565 | if e.errno == errno.ENOENT: 566 | continue 567 | if verbose: 568 | print("unable to run %%s" %% dispcmd) 569 | print(e) 570 | return None 571 | else: 572 | if verbose: 573 | print("unable to find command, tried %%s" %% (commands,)) 574 | return None 575 | stdout = p.communicate()[0].strip() 576 | if sys.version_info[0] >= 3: 577 | stdout = stdout.decode() 578 | if p.returncode != 0: 579 | if verbose: 580 | print("unable to run %%s (error)" %% dispcmd) 581 | return None 582 | return stdout 583 | 584 | 585 | def versions_from_parentdir(parentdir_prefix, root, verbose): 586 | """Try to determine the version from the parent directory name. 587 | 588 | Source tarballs conventionally unpack into a directory that includes 589 | both the project name and a version string. 590 | """ 591 | dirname = os.path.basename(root) 592 | if not dirname.startswith(parentdir_prefix): 593 | if verbose: 594 | print("guessing rootdir is '%%s', but '%%s' doesn't start with " 595 | "prefix '%%s'" %% (root, dirname, parentdir_prefix)) 596 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 597 | return {"version": dirname[len(parentdir_prefix):], 598 | "full-revisionid": None, 599 | "dirty": False, "error": None} 600 | 601 | 602 | @register_vcs_handler("git", "get_keywords") 603 | def git_get_keywords(versionfile_abs): 604 | """Extract version information from the given file.""" 605 | # the code embedded in _version.py can just fetch the value of these 606 | # keywords. When used from setup.py, we don't want to import _version.py, 607 | # so we do it with a regexp instead. This function is not used from 608 | # _version.py. 609 | keywords = {} 610 | try: 611 | f = open(versionfile_abs, "r") 612 | for line in f.readlines(): 613 | if line.strip().startswith("git_refnames ="): 614 | mo = re.search(r'=\s*"(.*)"', line) 615 | if mo: 616 | keywords["refnames"] = mo.group(1) 617 | if line.strip().startswith("git_full ="): 618 | mo = re.search(r'=\s*"(.*)"', line) 619 | if mo: 620 | keywords["full"] = mo.group(1) 621 | f.close() 622 | except EnvironmentError: 623 | pass 624 | return keywords 625 | 626 | 627 | @register_vcs_handler("git", "keywords") 628 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 629 | """Get version information from git keywords.""" 630 | if not keywords: 631 | raise NotThisMethod("no keywords at all, weird") 632 | refnames = keywords["refnames"].strip() 633 | if refnames.startswith("$Format"): 634 | if verbose: 635 | print("keywords are unexpanded, not using") 636 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 637 | refs = [r.strip() for r in refnames.strip("()").split(",")] 638 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 639 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 640 | TAG = "tag: " 641 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) 642 | if not tags: 643 | # Either we're using git < 1.8.3, or there really are no tags. We use 644 | # a heuristic: assume all version tags have a digit. The old git %%d 645 | # expansion behaves like git log --decorate=short and strips out the 646 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 647 | # between branches and tags. By ignoring refnames without digits, we 648 | # filter out many common branch names like "release" and 649 | # "stabilization", as well as "HEAD" and "master". 650 | tags = set([r for r in refs if re.search(r'\d', r)]) 651 | if verbose: 652 | print("discarding '%%s', no digits" %% ",".join(set(refs) - tags)) 653 | if verbose: 654 | print("likely tags: %%s" %% ",".join(sorted(tags))) 655 | for ref in sorted(tags): 656 | # sorting will prefer e.g. "2.0" over "2.0rc1" 657 | if ref.startswith(tag_prefix): 658 | r = ref[len(tag_prefix):] 659 | if verbose: 660 | print("picking %%s" %% r) 661 | return {"version": r, 662 | "full-revisionid": keywords["full"].strip(), 663 | "dirty": False, "error": None, "branch": None 664 | } 665 | # no suitable tags, so version is "0+unknown", but full hex is still there 666 | if verbose: 667 | print("no suitable tags, using unknown + full revision id") 668 | return {"version": "0+unknown", 669 | "full-revisionid": keywords["full"].strip(), 670 | "dirty": False, "error": "no suitable tags", 671 | "branch": None} 672 | 673 | 674 | @register_vcs_handler("git", "pieces_from_vcs") 675 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 676 | """Get version from 'git describe' in the root of the source tree. 677 | 678 | This only gets called if the git-archive 'subst' keywords were *not* 679 | expanded, and _version.py hasn't already been rewritten with a short 680 | version string, meaning we're inside a checked out source tree. 681 | """ 682 | if not os.path.exists(os.path.join(root, ".git")): 683 | if verbose: 684 | print("no .git in %%s" %% root) 685 | raise NotThisMethod("no .git directory") 686 | 687 | GITS = ["git"] 688 | if sys.platform == "win32": 689 | GITS = ["git.cmd", "git.exe"] 690 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 691 | # if there isn't one, this yields HEX[-dirty] (no NUM). Note, for git v1.7 692 | # and below, it is necessary to run "git update-index --refresh" first. 693 | describe_out = run_command(GITS, ["describe", "--tags", "--dirty", 694 | "--always", "--long", 695 | "--match", "%%s*" %% tag_prefix], 696 | cwd=root) 697 | # --long was added in git-1.5.5 698 | if describe_out is None: 699 | raise NotThisMethod("'git describe' failed") 700 | describe_out = describe_out.strip() 701 | full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 702 | if full_out is None: 703 | raise NotThisMethod("'git rev-parse' failed") 704 | full_out = full_out.strip() 705 | 706 | pieces = {} 707 | pieces["long"] = full_out 708 | pieces["short"] = full_out[:7] # maybe improved later 709 | pieces["error"] = None 710 | 711 | # abbrev-ref available with git >= 1.7 712 | branch_name = run_command(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], 713 | cwd=root).strip() 714 | if branch_name == 'HEAD': 715 | branches = run_command(GITS, ["branch", "--contains"], 716 | cwd=root).split('\n') 717 | branches = [branch[2:] for branch in branches if branch[4:5] != '('] 718 | if 'master' in branches: 719 | branch_name = 'master' 720 | elif not branches: 721 | branch_name = None 722 | else: 723 | # Pick the first branch that is returned. Good or bad. 724 | branch_name = branches[0] 725 | 726 | branch_name = branch_name.replace(' ', '.').replace('(', '').replace(')', '') 727 | 728 | pieces['branch'] = branch_name 729 | 730 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 731 | # TAG might have hyphens. 732 | git_describe = describe_out 733 | 734 | # look for -dirty suffix 735 | dirty = git_describe.endswith("-dirty") 736 | pieces["dirty"] = dirty 737 | if dirty: 738 | git_describe = git_describe[:git_describe.rindex("-dirty")] 739 | 740 | # now we have TAG-NUM-gHEX or HEX 741 | 742 | if "-" in git_describe: 743 | # TAG-NUM-gHEX 744 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 745 | if not mo: 746 | # unparseable. Maybe git-describe is misbehaving? 747 | pieces["error"] = ("unable to parse git-describe output: '%%s'" 748 | %% describe_out) 749 | return pieces 750 | 751 | # tag 752 | full_tag = mo.group(1) 753 | if not full_tag.startswith(tag_prefix): 754 | if verbose: 755 | fmt = "tag '%%s' doesn't start with prefix '%%s'" 756 | print(fmt %% (full_tag, tag_prefix)) 757 | pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" 758 | %% (full_tag, tag_prefix)) 759 | return pieces 760 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 761 | 762 | # distance: number of commits since tag 763 | pieces["distance"] = int(mo.group(2)) 764 | 765 | # commit: short hex revision ID 766 | pieces["short"] = mo.group(3) 767 | 768 | else: 769 | # HEX: no tags 770 | pieces["closest-tag"] = None 771 | count_out = run_command(GITS, ["rev-list", "HEAD", "--count"], 772 | cwd=root) 773 | pieces["distance"] = int(count_out) # total number of commits 774 | 775 | return pieces 776 | 777 | 778 | # Default matches v1.2.x, maint/1.2.x, 1.2.x, 1.x etc. 779 | default_maint_branch_regexp = ".*([0-9]+\.)+x$" 780 | 781 | 782 | def plus_or_dot(pieces): 783 | """Return a + if we don't already have one, else return a .""" 784 | if "+" in pieces.get("closest-tag", ""): 785 | return "." 786 | return "+" 787 | 788 | 789 | def render_pep440(pieces): 790 | """Build up version string, with post-release "local version identifier". 791 | 792 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 793 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 794 | 795 | Exceptions: 796 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 797 | """ 798 | if pieces["closest-tag"]: 799 | rendered = pieces["closest-tag"] 800 | if pieces["distance"] or pieces["dirty"]: 801 | rendered += plus_or_dot(pieces) 802 | rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) 803 | if pieces["dirty"]: 804 | rendered += ".dirty" 805 | else: 806 | # exception #1 807 | rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], 808 | pieces["short"]) 809 | if pieces["dirty"]: 810 | rendered += ".dirty" 811 | return rendered 812 | 813 | 814 | def render_pep440_pre(pieces): 815 | """TAG[.post.devDISTANCE] -- No -dirty. 816 | 817 | Exceptions: 818 | 1: no tags. 0.post.devDISTANCE 819 | """ 820 | if pieces["closest-tag"]: 821 | rendered = pieces["closest-tag"] 822 | if pieces["distance"]: 823 | rendered += ".post.dev%%d" %% pieces["distance"] 824 | else: 825 | # exception #1 826 | rendered = "0.post.dev%%d" %% pieces["distance"] 827 | return rendered 828 | 829 | 830 | def render_pep440_post(pieces): 831 | """TAG[.postDISTANCE[.dev0]+gHEX] . 832 | 833 | The ".dev0" means dirty. Note that .dev0 sorts backwards 834 | (a dirty tree will appear "older" than the corresponding clean one), 835 | but you shouldn't be releasing software with -dirty anyways. 836 | 837 | Exceptions: 838 | 1: no tags. 0.postDISTANCE[.dev0] 839 | """ 840 | if pieces["closest-tag"]: 841 | rendered = pieces["closest-tag"] 842 | if pieces["distance"] or pieces["dirty"]: 843 | rendered += ".post%%d" %% pieces["distance"] 844 | if pieces["dirty"]: 845 | rendered += ".dev0" 846 | rendered += plus_or_dot(pieces) 847 | rendered += "g%%s" %% pieces["short"] 848 | else: 849 | # exception #1 850 | rendered = "0.post%%d" %% pieces["distance"] 851 | if pieces["dirty"]: 852 | rendered += ".dev0" 853 | rendered += "+g%%s" %% pieces["short"] 854 | return rendered 855 | 856 | 857 | def render_pep440_old(pieces): 858 | """TAG[.postDISTANCE[.dev0]] . 859 | 860 | The ".dev0" means dirty. 861 | 862 | Eexceptions: 863 | 1: no tags. 0.postDISTANCE[.dev0] 864 | """ 865 | if pieces["closest-tag"]: 866 | rendered = pieces["closest-tag"] 867 | if pieces["distance"] or pieces["dirty"]: 868 | rendered += ".post%%d" %% pieces["distance"] 869 | if pieces["dirty"]: 870 | rendered += ".dev0" 871 | else: 872 | # exception #1 873 | rendered = "0.post%%d" %% pieces["distance"] 874 | if pieces["dirty"]: 875 | rendered += ".dev0" 876 | return rendered 877 | 878 | 879 | def render_git_describe(pieces): 880 | """TAG[-DISTANCE-gHEX][-dirty]. 881 | 882 | Like 'git describe --tags --dirty --always'. 883 | 884 | Exceptions: 885 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 886 | """ 887 | if pieces["closest-tag"]: 888 | rendered = pieces["closest-tag"] 889 | if pieces["distance"]: 890 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) 891 | else: 892 | # exception #1 893 | rendered = pieces["short"] 894 | if pieces["dirty"]: 895 | rendered += "-dirty" 896 | return rendered 897 | 898 | 899 | def render_git_describe_long(pieces): 900 | """TAG-DISTANCE-gHEX[-dirty]. 901 | 902 | Like 'git describe --tags --dirty --always -long'. 903 | The distance/hash is unconditional. 904 | 905 | Exceptions: 906 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 907 | """ 908 | if pieces["closest-tag"]: 909 | rendered = pieces["closest-tag"] 910 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) 911 | else: 912 | # exception #1 913 | rendered = pieces["short"] 914 | if pieces["dirty"]: 915 | rendered += "-dirty" 916 | return rendered 917 | 918 | 919 | def add_one_to_version(version_string, number_index_to_increment=-1): 920 | """ 921 | Add one to a version string at the given numeric indices. 922 | 923 | >>> add_one_to_version('v1.2.3') 924 | 'v1.2.4' 925 | 926 | """ 927 | # Break up the tag by number groups (preserving multi-digit 928 | # numbers as multidigit) 929 | parts = re.split("([0-9]+)", version_string) 930 | 931 | digit_parts = [(i, part) for i, part in enumerate(parts) 932 | if part.isdigit()] 933 | 934 | # Deal with negative indexing. 935 | increment_at_index = ((number_index_to_increment + len(digit_parts)) 936 | %% len(digit_parts)) 937 | for n_seen, (i, part) in enumerate(digit_parts): 938 | if n_seen == increment_at_index: 939 | parts[i] = str(int(part) + 1) 940 | elif n_seen > increment_at_index: 941 | parts[i] = '0' 942 | return ''.join(parts) 943 | 944 | 945 | def render_pep440_branch_based(pieces): 946 | # [TAG+1 of minor number][.devDISTANCE][+gHEX]. The git short is 947 | # included for dirty. 948 | 949 | # exceptions: 950 | # 1: no tags. 0.0.0.devDISTANCE[+gHEX] 951 | 952 | master = pieces.get('branch') == 'master' 953 | maint = re.match(default_maint_branch_regexp, 954 | pieces.get('branch') or '') 955 | 956 | # If we are on a tag, just pep440-pre it. 957 | if pieces["closest-tag"] and not (pieces["distance"] or 958 | pieces["dirty"]): 959 | rendered = pieces["closest-tag"] 960 | else: 961 | # Put a default closest-tag in. 962 | if not pieces["closest-tag"]: 963 | pieces["closest-tag"] = '0.0.0' 964 | 965 | if pieces["distance"] or pieces["dirty"]: 966 | if maint: 967 | rendered = pieces["closest-tag"] 968 | if pieces["distance"]: 969 | rendered += ".post%%d" %% pieces["distance"] 970 | else: 971 | rendered = add_one_to_version(pieces["closest-tag"]) 972 | if pieces["distance"]: 973 | rendered += ".dev%%d" %% pieces["distance"] 974 | # Put the branch name in if it isn't master nor a 975 | # maintenance branch. 976 | 977 | plus = '+' 978 | if not (master or maint): 979 | rendered += "%%s%%s" %% (plus, 980 | pieces.get('branch') or 981 | 'unknown_branch') 982 | plus = '_' 983 | 984 | if pieces["dirty"]: 985 | rendered += "%%sg%%s" %% (plus, pieces["short"]) 986 | else: 987 | rendered = pieces["closest-tag"] 988 | return rendered 989 | 990 | 991 | STYLES = {'default': render_pep440, 992 | 'pep440': render_pep440, 993 | 'pep440-pre': render_pep440_pre, 994 | 'pep440-post': render_pep440_post, 995 | 'pep440-old': render_pep440_old, 996 | 'git-describe': render_git_describe, 997 | 'git-describe-long': render_git_describe_long, 998 | 'pep440-old': render_pep440_old, 999 | 'pep440-branch-based': render_pep440_branch_based, 1000 | } 1001 | 1002 | 1003 | def render(pieces, style): 1004 | """Render the given version pieces into the requested style.""" 1005 | if pieces["error"]: 1006 | return {"version": "unknown", 1007 | "full-revisionid": pieces.get("long"), 1008 | "dirty": None, 1009 | "error": pieces["error"]} 1010 | 1011 | if not style: 1012 | style = 'default' 1013 | 1014 | renderer = STYLES.get(style) 1015 | 1016 | if not renderer: 1017 | raise ValueError("unknown style '%%s'" %% style) 1018 | 1019 | rendered = renderer(pieces) 1020 | 1021 | return {"version": rendered, "full-revisionid": pieces["long"], 1022 | "dirty": pieces["dirty"], "error": None} 1023 | 1024 | 1025 | def get_versions(): 1026 | """Get version information or return default if unable to do so.""" 1027 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 1028 | # __file__, we can work backwards from there to the root. Some 1029 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 1030 | # case we can only use expanded keywords. 1031 | 1032 | cfg = get_config() 1033 | verbose = cfg.verbose 1034 | 1035 | try: 1036 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, 1037 | verbose) 1038 | except NotThisMethod: 1039 | pass 1040 | 1041 | try: 1042 | root = os.path.realpath(__file__) 1043 | # versionfile_source is the relative path from the top of the source 1044 | # tree (where the .git directory might live) to this file. Invert 1045 | # this to find the root from __file__. 1046 | for i in cfg.versionfile_source.split('/'): 1047 | root = os.path.dirname(root) 1048 | except NameError: 1049 | return {"version": "0+unknown", "full-revisionid": None, 1050 | "dirty": None, 1051 | "error": "unable to find root of source tree"} 1052 | 1053 | try: 1054 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 1055 | return render(pieces, cfg.style) 1056 | except NotThisMethod: 1057 | pass 1058 | 1059 | try: 1060 | if cfg.parentdir_prefix: 1061 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 1062 | except NotThisMethod: 1063 | pass 1064 | 1065 | return {"version": "0+unknown", "full-revisionid": None, 1066 | "dirty": None, 1067 | "error": "unable to compute version"} 1068 | ''' 1069 | 1070 | 1071 | @register_vcs_handler("git", "get_keywords") 1072 | def git_get_keywords(versionfile_abs): 1073 | """Extract version information from the given file.""" 1074 | # the code embedded in _version.py can just fetch the value of these 1075 | # keywords. When used from setup.py, we don't want to import _version.py, 1076 | # so we do it with a regexp instead. This function is not used from 1077 | # _version.py. 1078 | keywords = {} 1079 | try: 1080 | f = open(versionfile_abs, "r") 1081 | for line in f.readlines(): 1082 | if line.strip().startswith("git_refnames ="): 1083 | mo = re.search(r'=\s*"(.*)"', line) 1084 | if mo: 1085 | keywords["refnames"] = mo.group(1) 1086 | if line.strip().startswith("git_full ="): 1087 | mo = re.search(r'=\s*"(.*)"', line) 1088 | if mo: 1089 | keywords["full"] = mo.group(1) 1090 | f.close() 1091 | except EnvironmentError: 1092 | pass 1093 | return keywords 1094 | 1095 | 1096 | @register_vcs_handler("git", "keywords") 1097 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 1098 | """Get version information from git keywords.""" 1099 | if not keywords: 1100 | raise NotThisMethod("no keywords at all, weird") 1101 | refnames = keywords["refnames"].strip() 1102 | if refnames.startswith("$Format"): 1103 | if verbose: 1104 | print("keywords are unexpanded, not using") 1105 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 1106 | refs = [r.strip() for r in refnames.strip("()").split(",")] 1107 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 1108 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 1109 | TAG = "tag: " 1110 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) 1111 | if not tags: 1112 | # Either we're using git < 1.8.3, or there really are no tags. We use 1113 | # a heuristic: assume all version tags have a digit. The old git %d 1114 | # expansion behaves like git log --decorate=short and strips out the 1115 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 1116 | # between branches and tags. By ignoring refnames without digits, we 1117 | # filter out many common branch names like "release" and 1118 | # "stabilization", as well as "HEAD" and "master". 1119 | tags = set([r for r in refs if re.search(r'\d', r)]) 1120 | if verbose: 1121 | print("discarding '%s', no digits" % ",".join(set(refs) - tags)) 1122 | if verbose: 1123 | print("likely tags: %s" % ",".join(sorted(tags))) 1124 | for ref in sorted(tags): 1125 | # sorting will prefer e.g. "2.0" over "2.0rc1" 1126 | if ref.startswith(tag_prefix): 1127 | r = ref[len(tag_prefix):] 1128 | if verbose: 1129 | print("picking %s" % r) 1130 | return {"version": r, 1131 | "full-revisionid": keywords["full"].strip(), 1132 | "dirty": False, "error": None, "branch": None 1133 | } 1134 | # no suitable tags, so version is "0+unknown", but full hex is still there 1135 | if verbose: 1136 | print("no suitable tags, using unknown + full revision id") 1137 | return {"version": "0+unknown", 1138 | "full-revisionid": keywords["full"].strip(), 1139 | "dirty": False, "error": "no suitable tags", 1140 | "branch": None} 1141 | 1142 | 1143 | @register_vcs_handler("git", "pieces_from_vcs") 1144 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 1145 | """Get version from 'git describe' in the root of the source tree. 1146 | 1147 | This only gets called if the git-archive 'subst' keywords were *not* 1148 | expanded, and _version.py hasn't already been rewritten with a short 1149 | version string, meaning we're inside a checked out source tree. 1150 | """ 1151 | if not os.path.exists(os.path.join(root, ".git")): 1152 | if verbose: 1153 | print("no .git in %s" % root) 1154 | raise NotThisMethod("no .git directory") 1155 | 1156 | GITS = ["git"] 1157 | if sys.platform == "win32": 1158 | GITS = ["git.cmd", "git.exe"] 1159 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 1160 | # if there isn't one, this yields HEX[-dirty] (no NUM). Note, for git v1.7 1161 | # and below, it is necessary to run "git update-index --refresh" first. 1162 | describe_out = run_command(GITS, ["describe", "--tags", "--dirty", 1163 | "--always", "--long", 1164 | "--match", "%s*" % tag_prefix], 1165 | cwd=root) 1166 | # --long was added in git-1.5.5 1167 | if describe_out is None: 1168 | raise NotThisMethod("'git describe' failed") 1169 | describe_out = describe_out.strip() 1170 | full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 1171 | if full_out is None: 1172 | raise NotThisMethod("'git rev-parse' failed") 1173 | full_out = full_out.strip() 1174 | 1175 | pieces = {} 1176 | pieces["long"] = full_out 1177 | pieces["short"] = full_out[:7] # maybe improved later 1178 | pieces["error"] = None 1179 | 1180 | # abbrev-ref available with git >= 1.7 1181 | branch_name = run_command(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], 1182 | cwd=root).strip() 1183 | if branch_name == 'HEAD': 1184 | branches = run_command(GITS, ["branch", "--contains"], 1185 | cwd=root).split('\n') 1186 | branches = [branch[2:] for branch in branches if branch[4:5] != '('] 1187 | if 'master' in branches: 1188 | branch_name = 'master' 1189 | elif not branches: 1190 | branch_name = None 1191 | else: 1192 | # Pick the first branch that is returned. Good or bad. 1193 | branch_name = branches[0] 1194 | 1195 | branch_name = branch_name.replace(' ', '.').replace('(', '').replace(')', '') 1196 | 1197 | pieces['branch'] = branch_name 1198 | 1199 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 1200 | # TAG might have hyphens. 1201 | git_describe = describe_out 1202 | 1203 | # look for -dirty suffix 1204 | dirty = git_describe.endswith("-dirty") 1205 | pieces["dirty"] = dirty 1206 | if dirty: 1207 | git_describe = git_describe[:git_describe.rindex("-dirty")] 1208 | 1209 | # now we have TAG-NUM-gHEX or HEX 1210 | 1211 | if "-" in git_describe: 1212 | # TAG-NUM-gHEX 1213 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 1214 | if not mo: 1215 | # unparseable. Maybe git-describe is misbehaving? 1216 | pieces["error"] = ("unable to parse git-describe output: '%s'" 1217 | % describe_out) 1218 | return pieces 1219 | 1220 | # tag 1221 | full_tag = mo.group(1) 1222 | if not full_tag.startswith(tag_prefix): 1223 | if verbose: 1224 | fmt = "tag '%s' doesn't start with prefix '%s'" 1225 | print(fmt % (full_tag, tag_prefix)) 1226 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" 1227 | % (full_tag, tag_prefix)) 1228 | return pieces 1229 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 1230 | 1231 | # distance: number of commits since tag 1232 | pieces["distance"] = int(mo.group(2)) 1233 | 1234 | # commit: short hex revision ID 1235 | pieces["short"] = mo.group(3) 1236 | 1237 | else: 1238 | # HEX: no tags 1239 | pieces["closest-tag"] = None 1240 | count_out = run_command(GITS, ["rev-list", "HEAD", "--count"], 1241 | cwd=root) 1242 | pieces["distance"] = int(count_out) # total number of commits 1243 | 1244 | return pieces 1245 | 1246 | 1247 | def do_vcs_install(manifest_in, versionfile_source, ipy): 1248 | """Git-specific installation logic for Versioneer. 1249 | 1250 | For Git, this means creating/changing .gitattributes to mark _version.py 1251 | for export-time keyword substitution. 1252 | """ 1253 | GITS = ["git"] 1254 | if sys.platform == "win32": 1255 | GITS = ["git.cmd", "git.exe"] 1256 | files = [manifest_in, versionfile_source] 1257 | if ipy: 1258 | files.append(ipy) 1259 | try: 1260 | me = __file__ 1261 | if me.endswith(".pyc") or me.endswith(".pyo"): 1262 | me = os.path.splitext(me)[0] + ".py" 1263 | versioneer_file = os.path.relpath(me) 1264 | except NameError: 1265 | versioneer_file = "versioneer.py" 1266 | files.append(versioneer_file) 1267 | present = False 1268 | try: 1269 | f = open(".gitattributes", "r") 1270 | for line in f.readlines(): 1271 | if line.strip().startswith(versionfile_source): 1272 | if "export-subst" in line.strip().split()[1:]: 1273 | present = True 1274 | f.close() 1275 | except EnvironmentError: 1276 | pass 1277 | if not present: 1278 | f = open(".gitattributes", "a+") 1279 | f.write("%s export-subst\n" % versionfile_source) 1280 | f.close() 1281 | files.append(".gitattributes") 1282 | run_command(GITS, ["add", "--"] + files) 1283 | 1284 | 1285 | def versions_from_parentdir(parentdir_prefix, root, verbose): 1286 | """Try to determine the version from the parent directory name. 1287 | 1288 | Source tarballs conventionally unpack into a directory that includes 1289 | both the project name and a version string. 1290 | """ 1291 | dirname = os.path.basename(root) 1292 | if not dirname.startswith(parentdir_prefix): 1293 | if verbose: 1294 | print("guessing rootdir is '%s', but '%s' doesn't start with " 1295 | "prefix '%s'" % (root, dirname, parentdir_prefix)) 1296 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 1297 | return {"version": dirname[len(parentdir_prefix):], 1298 | "full-revisionid": None, 1299 | "dirty": False, "error": None} 1300 | 1301 | SHORT_VERSION_PY = """ 1302 | # This file was generated by 'versioneer.py' (0.15+dev) from 1303 | # revision-control system data, or from the parent directory name of an 1304 | # unpacked source archive. Distribution tarballs contain a pre-generated copy 1305 | # of this file. 1306 | 1307 | import json 1308 | import sys 1309 | 1310 | version_json = ''' 1311 | %s 1312 | ''' # END VERSION_JSON 1313 | 1314 | 1315 | def get_versions(): 1316 | return json.loads(version_json) 1317 | """ 1318 | 1319 | 1320 | def versions_from_file(filename): 1321 | """Try to determine the version from _version.py if present.""" 1322 | try: 1323 | with open(filename) as f: 1324 | contents = f.read() 1325 | except EnvironmentError: 1326 | raise NotThisMethod("unable to read _version.py") 1327 | mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", 1328 | contents, re.M | re.S) 1329 | if not mo: 1330 | raise NotThisMethod("no version_json in _version.py") 1331 | return json.loads(mo.group(1)) 1332 | 1333 | 1334 | def write_to_version_file(filename, versions): 1335 | """Write the given version number to the given _version.py file.""" 1336 | os.unlink(filename) 1337 | contents = json.dumps(versions, sort_keys=True, 1338 | indent=1, separators=(",", ": ")) 1339 | with open(filename, "w") as f: 1340 | f.write(SHORT_VERSION_PY % contents) 1341 | 1342 | print("set %s to '%s'" % (filename, versions["version"])) 1343 | 1344 | # Default matches v1.2.x, maint/1.2.x, 1.2.x, 1.x etc. 1345 | default_maint_branch_regexp = ".*([0-9]+\.)+x$" 1346 | 1347 | 1348 | def plus_or_dot(pieces): 1349 | """Return a + if we don't already have one, else return a .""" 1350 | if "+" in pieces.get("closest-tag", ""): 1351 | return "." 1352 | return "+" 1353 | 1354 | 1355 | def render_pep440(pieces): 1356 | """Build up version string, with post-release "local version identifier". 1357 | 1358 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 1359 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 1360 | 1361 | Exceptions: 1362 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 1363 | """ 1364 | if pieces["closest-tag"]: 1365 | rendered = pieces["closest-tag"] 1366 | if pieces["distance"] or pieces["dirty"]: 1367 | rendered += plus_or_dot(pieces) 1368 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 1369 | if pieces["dirty"]: 1370 | rendered += ".dirty" 1371 | else: 1372 | # exception #1 1373 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], 1374 | pieces["short"]) 1375 | if pieces["dirty"]: 1376 | rendered += ".dirty" 1377 | return rendered 1378 | 1379 | 1380 | def render_pep440_pre(pieces): 1381 | """TAG[.post.devDISTANCE] -- No -dirty. 1382 | 1383 | Exceptions: 1384 | 1: no tags. 0.post.devDISTANCE 1385 | """ 1386 | if pieces["closest-tag"]: 1387 | rendered = pieces["closest-tag"] 1388 | if pieces["distance"]: 1389 | rendered += ".post.dev%d" % pieces["distance"] 1390 | else: 1391 | # exception #1 1392 | rendered = "0.post.dev%d" % pieces["distance"] 1393 | return rendered 1394 | 1395 | 1396 | def render_pep440_post(pieces): 1397 | """TAG[.postDISTANCE[.dev0]+gHEX] . 1398 | 1399 | The ".dev0" means dirty. Note that .dev0 sorts backwards 1400 | (a dirty tree will appear "older" than the corresponding clean one), 1401 | but you shouldn't be releasing software with -dirty anyways. 1402 | 1403 | Exceptions: 1404 | 1: no tags. 0.postDISTANCE[.dev0] 1405 | """ 1406 | if pieces["closest-tag"]: 1407 | rendered = pieces["closest-tag"] 1408 | if pieces["distance"] or pieces["dirty"]: 1409 | rendered += ".post%d" % pieces["distance"] 1410 | if pieces["dirty"]: 1411 | rendered += ".dev0" 1412 | rendered += plus_or_dot(pieces) 1413 | rendered += "g%s" % pieces["short"] 1414 | else: 1415 | # exception #1 1416 | rendered = "0.post%d" % pieces["distance"] 1417 | if pieces["dirty"]: 1418 | rendered += ".dev0" 1419 | rendered += "+g%s" % pieces["short"] 1420 | return rendered 1421 | 1422 | 1423 | def render_pep440_old(pieces): 1424 | """TAG[.postDISTANCE[.dev0]] . 1425 | 1426 | The ".dev0" means dirty. 1427 | 1428 | Eexceptions: 1429 | 1: no tags. 0.postDISTANCE[.dev0] 1430 | """ 1431 | if pieces["closest-tag"]: 1432 | rendered = pieces["closest-tag"] 1433 | if pieces["distance"] or pieces["dirty"]: 1434 | rendered += ".post%d" % pieces["distance"] 1435 | if pieces["dirty"]: 1436 | rendered += ".dev0" 1437 | else: 1438 | # exception #1 1439 | rendered = "0.post%d" % pieces["distance"] 1440 | if pieces["dirty"]: 1441 | rendered += ".dev0" 1442 | return rendered 1443 | 1444 | 1445 | def render_git_describe(pieces): 1446 | """TAG[-DISTANCE-gHEX][-dirty]. 1447 | 1448 | Like 'git describe --tags --dirty --always'. 1449 | 1450 | Exceptions: 1451 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 1452 | """ 1453 | if pieces["closest-tag"]: 1454 | rendered = pieces["closest-tag"] 1455 | if pieces["distance"]: 1456 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 1457 | else: 1458 | # exception #1 1459 | rendered = pieces["short"] 1460 | if pieces["dirty"]: 1461 | rendered += "-dirty" 1462 | return rendered 1463 | 1464 | 1465 | def render_git_describe_long(pieces): 1466 | """TAG-DISTANCE-gHEX[-dirty]. 1467 | 1468 | Like 'git describe --tags --dirty --always -long'. 1469 | The distance/hash is unconditional. 1470 | 1471 | Exceptions: 1472 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 1473 | """ 1474 | if pieces["closest-tag"]: 1475 | rendered = pieces["closest-tag"] 1476 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 1477 | else: 1478 | # exception #1 1479 | rendered = pieces["short"] 1480 | if pieces["dirty"]: 1481 | rendered += "-dirty" 1482 | return rendered 1483 | 1484 | 1485 | def add_one_to_version(version_string, number_index_to_increment=-1): 1486 | """ 1487 | Add one to a version string at the given numeric indices. 1488 | 1489 | >>> add_one_to_version('v1.2.3') 1490 | 'v1.2.4' 1491 | 1492 | """ 1493 | # Break up the tag by number groups (preserving multi-digit 1494 | # numbers as multidigit) 1495 | parts = re.split("([0-9]+)", version_string) 1496 | 1497 | digit_parts = [(i, part) for i, part in enumerate(parts) 1498 | if part.isdigit()] 1499 | 1500 | # Deal with negative indexing. 1501 | increment_at_index = ((number_index_to_increment + len(digit_parts)) 1502 | % len(digit_parts)) 1503 | for n_seen, (i, part) in enumerate(digit_parts): 1504 | if n_seen == increment_at_index: 1505 | parts[i] = str(int(part) + 1) 1506 | elif n_seen > increment_at_index: 1507 | parts[i] = '0' 1508 | return ''.join(parts) 1509 | 1510 | 1511 | def render_pep440_branch_based(pieces): 1512 | # [TAG+1 of minor number][.devDISTANCE][+gHEX]. The git short is 1513 | # included for dirty. 1514 | 1515 | # exceptions: 1516 | # 1: no tags. 0.0.0.devDISTANCE[+gHEX] 1517 | 1518 | master = pieces.get('branch') == 'master' 1519 | maint = re.match(default_maint_branch_regexp, 1520 | pieces.get('branch') or '') 1521 | 1522 | # If we are on a tag, just pep440-pre it. 1523 | if pieces["closest-tag"] and not (pieces["distance"] or 1524 | pieces["dirty"]): 1525 | rendered = pieces["closest-tag"] 1526 | else: 1527 | # Put a default closest-tag in. 1528 | if not pieces["closest-tag"]: 1529 | pieces["closest-tag"] = '0.0.0' 1530 | 1531 | if pieces["distance"] or pieces["dirty"]: 1532 | if maint: 1533 | rendered = pieces["closest-tag"] 1534 | if pieces["distance"]: 1535 | rendered += ".post%d" % pieces["distance"] 1536 | else: 1537 | rendered = add_one_to_version(pieces["closest-tag"]) 1538 | if pieces["distance"]: 1539 | rendered += ".dev%d" % pieces["distance"] 1540 | # Put the branch name in if it isn't master nor a 1541 | # maintenance branch. 1542 | 1543 | plus = '+' 1544 | if not (master or maint): 1545 | rendered += "%s%s" % (plus, 1546 | pieces.get('branch') or 1547 | 'unknown_branch') 1548 | plus = '_' 1549 | 1550 | if pieces["dirty"]: 1551 | rendered += "%sg%s" % (plus, pieces["short"]) 1552 | else: 1553 | rendered = pieces["closest-tag"] 1554 | return rendered 1555 | 1556 | 1557 | STYLES = {'default': render_pep440, 1558 | 'pep440': render_pep440, 1559 | 'pep440-pre': render_pep440_pre, 1560 | 'pep440-post': render_pep440_post, 1561 | 'pep440-old': render_pep440_old, 1562 | 'git-describe': render_git_describe, 1563 | 'git-describe-long': render_git_describe_long, 1564 | 'pep440-old': render_pep440_old, 1565 | 'pep440-branch-based': render_pep440_branch_based, 1566 | } 1567 | 1568 | 1569 | def render(pieces, style): 1570 | """Render the given version pieces into the requested style.""" 1571 | if pieces["error"]: 1572 | return {"version": "unknown", 1573 | "full-revisionid": pieces.get("long"), 1574 | "dirty": None, 1575 | "error": pieces["error"]} 1576 | 1577 | if not style: 1578 | style = 'default' 1579 | 1580 | renderer = STYLES.get(style) 1581 | 1582 | if not renderer: 1583 | raise ValueError("unknown style '%s'" % style) 1584 | 1585 | rendered = renderer(pieces) 1586 | 1587 | return {"version": rendered, "full-revisionid": pieces["long"], 1588 | "dirty": pieces["dirty"], "error": None} 1589 | 1590 | 1591 | class VersioneerBadRootError(Exception): 1592 | 1593 | """The project root directory is unknown or missing key files.""" 1594 | 1595 | 1596 | def get_versions(verbose=False): 1597 | """Get the project version from whatever source is available. 1598 | 1599 | Returns dict with two keys: 'version' and 'full'. 1600 | """ 1601 | if "versioneer" in sys.modules: 1602 | # see the discussion in cmdclass.py:get_cmdclass() 1603 | del sys.modules["versioneer"] 1604 | 1605 | root = get_root() 1606 | cfg = get_config_from_root(root) 1607 | 1608 | assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" 1609 | handlers = HANDLERS.get(cfg.VCS) 1610 | assert handlers, "unrecognized VCS '%s'" % cfg.VCS 1611 | verbose = verbose or cfg.verbose 1612 | assert cfg.versionfile_source is not None, \ 1613 | "please set versioneer.versionfile_source" 1614 | assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" 1615 | 1616 | versionfile_abs = os.path.join(root, cfg.versionfile_source) 1617 | 1618 | # extract version from first of: _version.py, VCS command (e.g. 'git 1619 | # describe'), parentdir. This is meant to work for developers using a 1620 | # source checkout, for users of a tarball created by 'setup.py sdist', 1621 | # and for users of a tarball/zipball created by 'git archive' or github's 1622 | # download-from-tag feature or the equivalent in other VCSes. 1623 | 1624 | get_keywords_f = handlers.get("get_keywords") 1625 | from_keywords_f = handlers.get("keywords") 1626 | if get_keywords_f and from_keywords_f: 1627 | try: 1628 | keywords = get_keywords_f(versionfile_abs) 1629 | ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) 1630 | if verbose: 1631 | print("got version from expanded keyword %s" % ver) 1632 | return ver 1633 | except NotThisMethod: 1634 | pass 1635 | 1636 | try: 1637 | ver = versions_from_file(versionfile_abs) 1638 | if verbose: 1639 | print("got version from file %s %s" % (versionfile_abs, ver)) 1640 | return ver 1641 | except NotThisMethod: 1642 | pass 1643 | 1644 | from_vcs_f = handlers.get("pieces_from_vcs") 1645 | if from_vcs_f: 1646 | try: 1647 | pieces = from_vcs_f(cfg.tag_prefix, root, verbose) 1648 | ver = render(pieces, cfg.style) 1649 | if verbose: 1650 | print("got version from VCS %s" % ver) 1651 | return ver 1652 | except NotThisMethod: 1653 | pass 1654 | 1655 | try: 1656 | if cfg.parentdir_prefix: 1657 | ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 1658 | if verbose: 1659 | print("got version from parentdir %s" % ver) 1660 | return ver 1661 | except NotThisMethod: 1662 | pass 1663 | 1664 | if verbose: 1665 | print("unable to compute version") 1666 | 1667 | return {"version": "0+unknown", "full-revisionid": None, 1668 | "dirty": None, "error": "unable to compute version"} 1669 | 1670 | 1671 | def get_version(): 1672 | """Get the short version string for this project.""" 1673 | return get_versions()["version"] 1674 | 1675 | 1676 | def get_cmdclass(): 1677 | """Get the custom setuptools/distutils subclasses used by Versioneer.""" 1678 | if "versioneer" in sys.modules: 1679 | del sys.modules["versioneer"] 1680 | # this fixes the "python setup.py develop" case (also 'install' and 1681 | # 'easy_install .'), in which subdependencies of the main project are 1682 | # built (using setup.py bdist_egg) in the same python process. Assume 1683 | # a main project A and a dependency B, which use different versions 1684 | # of Versioneer. A's setup.py imports A's Versioneer, leaving it in 1685 | # sys.modules by the time B's setup.py is executed, causing B to run 1686 | # with the wrong versioneer. Setuptools wraps the sub-dep builds in a 1687 | # sandbox that restores sys.modules to it's pre-build state, so the 1688 | # parent is protected against the child's "import versioneer". By 1689 | # removing ourselves from sys.modules here, before the child build 1690 | # happens, we protect the child from the parent's versioneer too. 1691 | # Also see https://github.com/warner/python-versioneer/issues/52 1692 | 1693 | cmds = {} 1694 | 1695 | # we add "version" to both distutils and setuptools 1696 | from distutils.core import Command 1697 | 1698 | class cmd_version(Command): 1699 | description = "report generated version string" 1700 | user_options = [] 1701 | boolean_options = [] 1702 | 1703 | def initialize_options(self): 1704 | pass 1705 | 1706 | def finalize_options(self): 1707 | pass 1708 | 1709 | def run(self): 1710 | vers = get_versions(verbose=True) 1711 | print("Version: %s" % vers["version"]) 1712 | print(" full-revisionid: %s" % vers.get("full-revisionid")) 1713 | print(" dirty: %s" % vers.get("dirty")) 1714 | if vers["error"]: 1715 | print(" error: %s" % vers["error"]) 1716 | cmds["version"] = cmd_version 1717 | 1718 | # we override "build_py" in both distutils and setuptools 1719 | # 1720 | # most invocation pathways end up running build_py: 1721 | # distutils/build -> build_py 1722 | # distutils/install -> distutils/build ->.. 1723 | # setuptools/bdist_wheel -> distutils/install ->.. 1724 | # setuptools/bdist_egg -> distutils/install_lib -> build_py 1725 | # setuptools/install -> bdist_egg ->.. 1726 | # setuptools/develop -> ? 1727 | 1728 | # we override different "build_py" commands for both environments 1729 | if "setuptools" in sys.modules: 1730 | from setuptools.command.build_py import build_py as _build_py 1731 | else: 1732 | from distutils.command.build_py import build_py as _build_py 1733 | 1734 | class cmd_build_py(_build_py): 1735 | def run(self): 1736 | root = get_root() 1737 | cfg = get_config_from_root(root) 1738 | versions = get_versions() 1739 | _build_py.run(self) 1740 | # now locate _version.py in the new build/ directory and replace 1741 | # it with an updated value 1742 | if cfg.versionfile_build: 1743 | target_versionfile = os.path.join(self.build_lib, 1744 | cfg.versionfile_build) 1745 | print("UPDATING %s" % target_versionfile) 1746 | write_to_version_file(target_versionfile, versions) 1747 | cmds["build_py"] = cmd_build_py 1748 | 1749 | if "cx_Freeze" in sys.modules: # cx_freeze enabled? 1750 | from cx_Freeze.dist import build_exe as _build_exe 1751 | 1752 | class cmd_build_exe(_build_exe): 1753 | def run(self): 1754 | root = get_root() 1755 | cfg = get_config_from_root(root) 1756 | versions = get_versions() 1757 | target_versionfile = cfg.versionfile_source 1758 | print("UPDATING %s" % target_versionfile) 1759 | write_to_version_file(target_versionfile, versions) 1760 | 1761 | _build_exe.run(self) 1762 | os.unlink(target_versionfile) 1763 | with open(cfg.versionfile_source, "w") as f: 1764 | LONG = LONG_VERSION_PY[cfg.VCS] 1765 | f.write(LONG % 1766 | {"DOLLAR": "$", 1767 | "STYLE": cfg.style, 1768 | "TAG_PREFIX": cfg.tag_prefix, 1769 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 1770 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 1771 | }) 1772 | cmds["build_exe"] = cmd_build_exe 1773 | del cmds["build_py"] 1774 | 1775 | # we override different "sdist" commands for both environments 1776 | if "setuptools" in sys.modules: 1777 | from setuptools.command.sdist import sdist as _sdist 1778 | else: 1779 | from distutils.command.sdist import sdist as _sdist 1780 | 1781 | class cmd_sdist(_sdist): 1782 | def run(self): 1783 | versions = get_versions() 1784 | self._versioneer_generated_versions = versions 1785 | # unless we update this, the command will keep using the old 1786 | # version 1787 | self.distribution.metadata.version = versions["version"] 1788 | return _sdist.run(self) 1789 | 1790 | def make_release_tree(self, base_dir, files): 1791 | root = get_root() 1792 | cfg = get_config_from_root(root) 1793 | _sdist.make_release_tree(self, base_dir, files) 1794 | # now locate _version.py in the new base_dir directory 1795 | # (remembering that it may be a hardlink) and replace it with an 1796 | # updated value 1797 | target_versionfile = os.path.join(base_dir, cfg.versionfile_source) 1798 | print("UPDATING %s" % target_versionfile) 1799 | write_to_version_file(target_versionfile, 1800 | self._versioneer_generated_versions) 1801 | cmds["sdist"] = cmd_sdist 1802 | 1803 | return cmds 1804 | 1805 | 1806 | CONFIG_ERROR = """ 1807 | setup.cfg is missing the necessary Versioneer configuration. You need 1808 | a section like: 1809 | 1810 | [versioneer] 1811 | VCS = git 1812 | style = pep440 1813 | versionfile_source = src/myproject/_version.py 1814 | versionfile_build = myproject/_version.py 1815 | tag_prefix = 1816 | parentdir_prefix = myproject- 1817 | 1818 | You will also need to edit your setup.py to use the results: 1819 | 1820 | import versioneer 1821 | setup(version=versioneer.get_version(), 1822 | cmdclass=versioneer.get_cmdclass(), ...) 1823 | 1824 | Please read the docstring in ./versioneer.py for configuration instructions, 1825 | edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. 1826 | """ 1827 | 1828 | SAMPLE_CONFIG = """ 1829 | # See the docstring in versioneer.py for instructions. Note that you must 1830 | # re-run 'versioneer.py setup' after changing this section, and commit the 1831 | # resulting files. 1832 | 1833 | [versioneer] 1834 | #VCS = git 1835 | #style = pep440 1836 | #versionfile_source = 1837 | #versionfile_build = 1838 | #tag_prefix = 1839 | #parentdir_prefix = 1840 | 1841 | """ 1842 | 1843 | INIT_PY_SNIPPET = """ 1844 | from ._version import get_versions 1845 | __version__ = get_versions()['version'] 1846 | del get_versions 1847 | """ 1848 | 1849 | 1850 | def do_setup(): 1851 | """Main VCS-independent setup function for installing Versioneer.""" 1852 | root = get_root() 1853 | try: 1854 | cfg = get_config_from_root(root) 1855 | except (EnvironmentError, configparser.NoSectionError, 1856 | configparser.NoOptionError) as e: 1857 | if isinstance(e, (EnvironmentError, configparser.NoSectionError)): 1858 | print("Adding sample versioneer config to setup.cfg", 1859 | file=sys.stderr) 1860 | with open(os.path.join(root, "setup.cfg"), "a") as f: 1861 | f.write(SAMPLE_CONFIG) 1862 | print(CONFIG_ERROR, file=sys.stderr) 1863 | return 1 1864 | 1865 | print(" creating %s" % cfg.versionfile_source) 1866 | with open(cfg.versionfile_source, "w") as f: 1867 | LONG = LONG_VERSION_PY[cfg.VCS] 1868 | f.write(LONG % {"DOLLAR": "$", 1869 | "STYLE": cfg.style, 1870 | "TAG_PREFIX": cfg.tag_prefix, 1871 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 1872 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 1873 | }) 1874 | 1875 | ipy = os.path.join(os.path.dirname(cfg.versionfile_source), 1876 | "__init__.py") 1877 | if os.path.exists(ipy): 1878 | try: 1879 | with open(ipy, "r") as f: 1880 | old = f.read() 1881 | except EnvironmentError: 1882 | old = "" 1883 | if INIT_PY_SNIPPET not in old: 1884 | print(" appending to %s" % ipy) 1885 | with open(ipy, "a") as f: 1886 | f.write(INIT_PY_SNIPPET) 1887 | else: 1888 | print(" %s unmodified" % ipy) 1889 | else: 1890 | print(" %s doesn't exist, ok" % ipy) 1891 | ipy = None 1892 | 1893 | # Make sure both the top-level "versioneer.py" and versionfile_source 1894 | # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so 1895 | # they'll be copied into source distributions. Pip won't be able to 1896 | # install the package without this. 1897 | manifest_in = os.path.join(root, "MANIFEST.in") 1898 | simple_includes = set() 1899 | try: 1900 | with open(manifest_in, "r") as f: 1901 | for line in f: 1902 | if line.startswith("include "): 1903 | for include in line.split()[1:]: 1904 | simple_includes.add(include) 1905 | except EnvironmentError: 1906 | pass 1907 | # That doesn't cover everything MANIFEST.in can do 1908 | # (http://docs.python.org/2/distutils/sourcedist.html#commands), so 1909 | # it might give some false negatives. Appending redundant 'include' 1910 | # lines is safe, though. 1911 | if "versioneer.py" not in simple_includes: 1912 | print(" appending 'versioneer.py' to MANIFEST.in") 1913 | with open(manifest_in, "a") as f: 1914 | f.write("include versioneer.py\n") 1915 | else: 1916 | print(" 'versioneer.py' already in MANIFEST.in") 1917 | if cfg.versionfile_source not in simple_includes: 1918 | print(" appending versionfile_source ('%s') to MANIFEST.in" % 1919 | cfg.versionfile_source) 1920 | with open(manifest_in, "a") as f: 1921 | f.write("include %s\n" % cfg.versionfile_source) 1922 | else: 1923 | print(" versionfile_source already in MANIFEST.in") 1924 | 1925 | # Make VCS-specific changes. For git, this means creating/changing 1926 | # .gitattributes to mark _version.py for export-time keyword 1927 | # substitution. 1928 | do_vcs_install(manifest_in, cfg.versionfile_source, ipy) 1929 | return 0 1930 | 1931 | 1932 | def scan_setup_py(): 1933 | """Validate the contents of setup.py against Versioneer's expectations.""" 1934 | found = set() 1935 | setters = False 1936 | errors = 0 1937 | with open("setup.py", "r") as f: 1938 | for line in f.readlines(): 1939 | if "import versioneer" in line: 1940 | found.add("import") 1941 | if "versioneer.get_cmdclass()" in line: 1942 | found.add("cmdclass") 1943 | if "versioneer.get_version()" in line: 1944 | found.add("get_version") 1945 | if "versioneer.VCS" in line: 1946 | setters = True 1947 | if "versioneer.versionfile_source" in line: 1948 | setters = True 1949 | if len(found) != 3: 1950 | print("") 1951 | print("Your setup.py appears to be missing some important items") 1952 | print("(but I might be wrong). Please make sure it has something") 1953 | print("roughly like the following:") 1954 | print("") 1955 | print(" import versioneer") 1956 | print(" setup( version=versioneer.get_version(),") 1957 | print(" cmdclass=versioneer.get_cmdclass(), ...)") 1958 | print("") 1959 | errors += 1 1960 | if setters: 1961 | print("You should remove lines like 'versioneer.VCS = ' and") 1962 | print("'versioneer.versionfile_source = ' . This configuration") 1963 | print("now lives in setup.cfg, and should be removed from setup.py") 1964 | print("") 1965 | errors += 1 1966 | return errors 1967 | 1968 | if __name__ == "__main__": 1969 | cmd = sys.argv[1] 1970 | if cmd == "setup": 1971 | errors = do_setup() 1972 | errors += scan_setup_py() 1973 | if errors: 1974 | sys.exit(1) 1975 | --------------------------------------------------------------------------------