├── .coveragerc ├── .gitattributes ├── .github └── workflows │ ├── ci.yaml │ ├── pre-commit.yaml │ └── release.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .readthedocs.yml ├── LICENSE ├── MANIFEST.in ├── README.rst ├── dask_ctl ├── __init__.py ├── _version.py ├── cli.py ├── config.py ├── ctl-schema.yaml ├── ctl.yaml ├── discovery.py ├── exceptions.py ├── lifecycle.py ├── proxy.py ├── renderables.py ├── spec.py ├── tests │ ├── conftest.py │ ├── specs │ │ └── simple.yaml │ ├── test_cli.py │ ├── test_dask_ctl.py │ ├── test_discovery.py │ └── test_lifecycle.py ├── utils.py └── widgets │ └── templates │ └── snippet.py.j2 ├── docs ├── Makefile ├── api.rst ├── cli.rst ├── conf.py ├── configuration.rst ├── contributing.rst ├── index.rst ├── integrating.rst ├── make.bat ├── releasing.rst ├── requirements_docs.txt └── spec.rst ├── requirements-test.txt ├── requirements.txt ├── setup.cfg ├── setup.py └── versioneer.py /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | omit = 3 | setup.py 4 | versioneer.py 5 | dask_ctl/tests/test* 6 | dask_ctl/_version.py -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | dask_ctl/_version.py export-subst 2 | -------------------------------------------------------------------------------- /.github/workflows/ci.yaml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: [pull_request, push, workflow_dispatch] 4 | 5 | jobs: 6 | test: 7 | runs-on: ${{ matrix.os }} 8 | env: 9 | PYTHONIOENCODING: utf-8 10 | strategy: 11 | fail-fast: false 12 | matrix: 13 | os: ["ubuntu-latest", "windows-latest", "macos-latest"] 14 | python-version: ["3.9", "3.10", "3.11"] 15 | 16 | steps: 17 | - name: Checkout source 18 | uses: actions/checkout@v2 19 | 20 | - name: Setup python 21 | uses: actions/setup-python@v2 22 | with: 23 | python-version: ${{ matrix.python-version }} 24 | architecture: x64 25 | 26 | - name: Install 27 | run: | 28 | pip install -e . 29 | pip install git+https://github.com/dask/dask.git@main git+https://github.com/dask/distributed.git@main 30 | pip install -r requirements-test.txt 31 | 32 | - name: Run tests 33 | run: pytest --cov=./ --reruns 5 --reruns-delay 1 34 | 35 | - name: "Upload coverage to Codecov" 36 | uses: codecov/codecov-action@v1 37 | -------------------------------------------------------------------------------- /.github/workflows/pre-commit.yaml: -------------------------------------------------------------------------------- 1 | name: Linting 2 | on: [pull_request, push, workflow_dispatch] 3 | 4 | jobs: 5 | lint: 6 | runs-on: ubuntu-latest 7 | steps: 8 | - uses: actions/checkout@v2 9 | - uses: actions/setup-python@v2 10 | - uses: pre-commit/action@v2.0.0 11 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Build distribution 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | test: 7 | runs-on: "ubuntu-latest" 8 | 9 | steps: 10 | - name: Checkout source 11 | uses: actions/checkout@v2 12 | 13 | - name: Set up Python 3.10 14 | uses: actions/setup-python@v1 15 | with: 16 | python-version: "3.10" 17 | 18 | - name: Install build dependencies 19 | run: python -m pip install build wheel 20 | 21 | - name: Build distributions 22 | shell: bash -l {0} 23 | run: python setup.py sdist bdist_wheel 24 | 25 | - name: Publish package to PyPI 26 | if: github.repository == 'dask-contrib/dask-ctl' && github.event_name == 'push' && startsWith(github.ref, 'refs/tags') 27 | uses: pypa/gh-action-pypi-publish@master 28 | with: 29 | user: __token__ 30 | password: ${{ secrets.pypi_password }} 31 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 98 | __pypackages__/ 99 | 100 | # Celery stuff 101 | celerybeat-schedule 102 | celerybeat.pid 103 | 104 | # SageMath parsed files 105 | *.sage.py 106 | 107 | # Environments 108 | .env 109 | .venv 110 | env/ 111 | venv/ 112 | ENV/ 113 | env.bak/ 114 | venv.bak/ 115 | 116 | # Spyder project settings 117 | .spyderproject 118 | .spyproject 119 | 120 | # Rope project settings 121 | .ropeproject 122 | 123 | # mkdocs documentation 124 | /site 125 | 126 | # mypy 127 | .mypy_cache/ 128 | .dmypy.json 129 | dmypy.json 130 | 131 | # Pyre type checker 132 | .pyre/ 133 | 134 | # pytype static type analyzer 135 | .pytype/ 136 | 137 | # Cython debug symbols 138 | cython_debug/ 139 | 140 | # textual 141 | textual_debug.log 142 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/psf/black 3 | rev: 23.12.1 4 | hooks: 5 | - id: black 6 | language_version: python3 7 | exclude: versioneer.py 8 | args: 9 | - --target-version=py39 10 | - repo: https://github.com/astral-sh/ruff-pre-commit 11 | # Ruff version. 12 | rev: 'v0.1.14' 13 | hooks: 14 | - id: ruff 15 | args: [--fix, --exit-non-zero-on-fix] 16 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | sphinx: 4 | configuration: docs/conf.py 5 | 6 | build: 7 | os: "ubuntu-22.04" 8 | tools: 9 | python: "3.9" 10 | 11 | python: 12 | install: 13 | - method: pip 14 | path: . 15 | - requirements: docs/requirements_docs.txt 16 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2021 Dask contributors 2 | 3 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 4 | 5 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 6 | 7 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 8 | 9 | 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 10 | 11 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | recursive-include dask_ctl *.py 2 | recursive-include dask_ctl *.j2 3 | recursive-include dask_ctl *.yaml 4 | recursive-include docs *.rst 5 | recursive-exclude dask_ctl **/tests/* 6 | 7 | include setup.py 8 | include README.rst 9 | include LICENSE 10 | include MANIFEST.in 11 | include requirements.txt 12 | 13 | prune docs/_build 14 | include versioneer.py 15 | include dask_ctl/_version.py 16 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | dask-ctl 2 | ======== 3 | 4 | 5 | .. image:: https://img.shields.io/pypi/v/dask-ctl 6 | :target: https://pypi.org/project/dask-ctl/ 7 | :alt: PyPI 8 | .. image:: https://img.shields.io/readthedocs/dask-ctl 9 | :target: https://dask-ctl.readthedocs.io/ 10 | :alt: Read the Docs 11 | .. image:: https://github.com/dask-contrib/dask-ctl/workflows/Tests/badge.svg 12 | :target: https://github.com/dask-contrib/dask-ctl/actions?query=workflow%3ATests 13 | :alt: GitHub Actions - CI 14 | .. image:: https://github.com/dask-contrib/dask-ctl/workflows/Linting/badge.svg 15 | :target: https://github.com/dask-contrib/dask-ctl/actions?query=workflow%3ALinting 16 | :alt: GitHub Actions - pre-commit 17 | .. image:: https://img.shields.io/codecov/c/gh/dask-contrib/dask-ctl 18 | :target: https://app.codecov.io/gh/dask-contrib/dask-ctl 19 | :alt: Codecov 20 | 21 | A set of tools to provide a control plane for managing the lifecycle of Dask clusters. 22 | 23 | .. code-block:: bash 24 | 25 | $ dask cluster list 26 | NAME ADDRESS TYPE WORKERS THREADS MEMORY CREATED STATUS 27 | proxycluster-8786 tcp://localhost:8786 ProxyCluster 4 12 17.18 GB Just now Running 28 | -------------------------------------------------------------------------------- /dask_ctl/__init__.py: -------------------------------------------------------------------------------- 1 | from ._version import get_versions 2 | from .exceptions import DaskClusterConfigNotFound # noqa 3 | import os.path 4 | 5 | from dask.widgets import TEMPLATE_PATHS 6 | 7 | __version__ = get_versions()["version"] 8 | del get_versions 9 | 10 | TEMPLATE_PATHS.append( 11 | os.path.join(os.path.dirname(os.path.abspath(__file__)), "widgets", "templates") 12 | ) 13 | -------------------------------------------------------------------------------- /dask_ctl/_version.py: -------------------------------------------------------------------------------- 1 | # This file helps to compute a version number in source trees obtained from 2 | # git-archive tarball (such as those provided by githubs download-from-tag 3 | # feature). Distribution tarballs (built by setup.py sdist) and build 4 | # directories (produced by setup.py build) will contain a much shorter file 5 | # that just contains the computed version number. 6 | 7 | # This file is released into the public domain. Generated by 8 | # versioneer-0.19 (https://github.com/python-versioneer/python-versioneer) 9 | 10 | """Git implementation of _version.py.""" 11 | 12 | import errno 13 | import os 14 | import re 15 | import subprocess 16 | import sys 17 | 18 | 19 | def get_keywords(): 20 | """Get the keywords needed to look up the version information.""" 21 | # these strings will be replaced by git during git-archive. 22 | # setup.py/versioneer.py will grep for the variable names, so they must 23 | # each be defined on a line of their own. _version.py will just call 24 | # get_keywords(). 25 | git_refnames = " (HEAD -> main)" 26 | git_full = "1aed52ecf82c46fe8eb269972ce2084d46595092" 27 | git_date = "2024-01-25 14:14:01 +0000" 28 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} 29 | return keywords 30 | 31 | 32 | class VersioneerConfig: 33 | """Container for Versioneer configuration parameters.""" 34 | 35 | 36 | def get_config(): 37 | """Create, populate and return the VersioneerConfig() object.""" 38 | # these strings are filled in when 'setup.py versioneer' creates 39 | # _version.py 40 | cfg = VersioneerConfig() 41 | cfg.VCS = "git" 42 | cfg.style = "pep440" 43 | cfg.tag_prefix = "" 44 | cfg.parentdir_prefix = "" 45 | cfg.versionfile_source = "dask_ctl/_version.py" 46 | cfg.verbose = False 47 | return cfg 48 | 49 | 50 | class NotThisMethod(Exception): 51 | """Exception raised if a method is not valid for the current scenario.""" 52 | 53 | 54 | LONG_VERSION_PY = {} 55 | HANDLERS = {} 56 | 57 | 58 | def register_vcs_handler(vcs, method): # decorator 59 | """Create decorator to mark a method as the handler of a VCS.""" 60 | 61 | def decorate(f): 62 | """Store f in HANDLERS[vcs][method].""" 63 | if vcs not in HANDLERS: 64 | HANDLERS[vcs] = {} 65 | HANDLERS[vcs][method] = f 66 | return f 67 | 68 | return decorate 69 | 70 | 71 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): 72 | """Call the given command(s).""" 73 | assert isinstance(commands, list) 74 | p = None 75 | for c in commands: 76 | try: 77 | dispcmd = str([c] + args) 78 | # remember shell=False, so use git.cmd on windows, not just git 79 | p = subprocess.Popen( 80 | [c] + args, 81 | cwd=cwd, 82 | env=env, 83 | stdout=subprocess.PIPE, 84 | stderr=(subprocess.PIPE if hide_stderr else None), 85 | ) 86 | break 87 | except EnvironmentError: 88 | e = sys.exc_info()[1] 89 | if e.errno == errno.ENOENT: 90 | continue 91 | if verbose: 92 | print("unable to run %s" % dispcmd) 93 | print(e) 94 | return None, None 95 | else: 96 | if verbose: 97 | print("unable to find command, tried %s" % (commands,)) 98 | return None, None 99 | stdout = p.communicate()[0].strip().decode() 100 | if p.returncode != 0: 101 | if verbose: 102 | print("unable to run %s (error)" % dispcmd) 103 | print("stdout was %s" % stdout) 104 | return None, p.returncode 105 | return stdout, p.returncode 106 | 107 | 108 | def versions_from_parentdir(parentdir_prefix, root, verbose): 109 | """Try to determine the version from the parent directory name. 110 | 111 | Source tarballs conventionally unpack into a directory that includes both 112 | the project name and a version string. We will also support searching up 113 | two directory levels for an appropriately named parent directory 114 | """ 115 | rootdirs = [] 116 | 117 | for i in range(3): 118 | dirname = os.path.basename(root) 119 | if dirname.startswith(parentdir_prefix): 120 | return { 121 | "version": dirname[len(parentdir_prefix) :], 122 | "full-revisionid": None, 123 | "dirty": False, 124 | "error": None, 125 | "date": None, 126 | } 127 | else: 128 | rootdirs.append(root) 129 | root = os.path.dirname(root) # up a level 130 | 131 | if verbose: 132 | print( 133 | "Tried directories %s but none started with prefix %s" 134 | % (str(rootdirs), parentdir_prefix) 135 | ) 136 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 137 | 138 | 139 | @register_vcs_handler("git", "get_keywords") 140 | def git_get_keywords(versionfile_abs): 141 | """Extract version information from the given file.""" 142 | # the code embedded in _version.py can just fetch the value of these 143 | # keywords. When used from setup.py, we don't want to import _version.py, 144 | # so we do it with a regexp instead. This function is not used from 145 | # _version.py. 146 | keywords = {} 147 | try: 148 | f = open(versionfile_abs, "r") 149 | for line in f.readlines(): 150 | if line.strip().startswith("git_refnames ="): 151 | mo = re.search(r'=\s*"(.*)"', line) 152 | if mo: 153 | keywords["refnames"] = mo.group(1) 154 | if line.strip().startswith("git_full ="): 155 | mo = re.search(r'=\s*"(.*)"', line) 156 | if mo: 157 | keywords["full"] = mo.group(1) 158 | if line.strip().startswith("git_date ="): 159 | mo = re.search(r'=\s*"(.*)"', line) 160 | if mo: 161 | keywords["date"] = mo.group(1) 162 | f.close() 163 | except EnvironmentError: 164 | pass 165 | return keywords 166 | 167 | 168 | @register_vcs_handler("git", "keywords") 169 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 170 | """Get version information from git keywords.""" 171 | if not keywords: 172 | raise NotThisMethod("no keywords at all, weird") 173 | date = keywords.get("date") 174 | if date is not None: 175 | # Use only the last line. Previous lines may contain GPG signature 176 | # information. 177 | date = date.splitlines()[-1] 178 | 179 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant 180 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 181 | # -like" string, which we must then edit to make compliant), because 182 | # it's been around since git-1.5.3, and it's too difficult to 183 | # discover which version we're using, or to work around using an 184 | # older one. 185 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 186 | refnames = keywords["refnames"].strip() 187 | if refnames.startswith("$Format"): 188 | if verbose: 189 | print("keywords are unexpanded, not using") 190 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 191 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) 192 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 193 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 194 | TAG = "tag: " 195 | tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)]) 196 | if not tags: 197 | # Either we're using git < 1.8.3, or there really are no tags. We use 198 | # a heuristic: assume all version tags have a digit. The old git %d 199 | # expansion behaves like git log --decorate=short and strips out the 200 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 201 | # between branches and tags. By ignoring refnames without digits, we 202 | # filter out many common branch names like "release" and 203 | # "stabilization", as well as "HEAD" and "master". 204 | tags = set([r for r in refs if re.search(r"\d", r)]) 205 | if verbose: 206 | print("discarding '%s', no digits" % ",".join(refs - tags)) 207 | if verbose: 208 | print("likely tags: %s" % ",".join(sorted(tags))) 209 | for ref in sorted(tags): 210 | # sorting will prefer e.g. "2.0" over "2.0rc1" 211 | if ref.startswith(tag_prefix): 212 | r = ref[len(tag_prefix) :] 213 | if verbose: 214 | print("picking %s" % r) 215 | return { 216 | "version": r, 217 | "full-revisionid": keywords["full"].strip(), 218 | "dirty": False, 219 | "error": None, 220 | "date": date, 221 | } 222 | # no suitable tags, so version is "0+unknown", but full hex is still there 223 | if verbose: 224 | print("no suitable tags, using unknown + full revision id") 225 | return { 226 | "version": "0+unknown", 227 | "full-revisionid": keywords["full"].strip(), 228 | "dirty": False, 229 | "error": "no suitable tags", 230 | "date": None, 231 | } 232 | 233 | 234 | @register_vcs_handler("git", "pieces_from_vcs") 235 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 236 | """Get version from 'git describe' in the root of the source tree. 237 | 238 | This only gets called if the git-archive 'subst' keywords were *not* 239 | expanded, and _version.py hasn't already been rewritten with a short 240 | version string, meaning we're inside a checked out source tree. 241 | """ 242 | GITS = ["git"] 243 | if sys.platform == "win32": 244 | GITS = ["git.cmd", "git.exe"] 245 | 246 | out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) 247 | if rc != 0: 248 | if verbose: 249 | print("Directory %s not under git control" % root) 250 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 251 | 252 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 253 | # if there isn't one, this yields HEX[-dirty] (no NUM) 254 | describe_out, rc = run_command( 255 | GITS, 256 | [ 257 | "describe", 258 | "--tags", 259 | "--dirty", 260 | "--always", 261 | "--long", 262 | "--match", 263 | "%s*" % tag_prefix, 264 | ], 265 | cwd=root, 266 | ) 267 | # --long was added in git-1.5.5 268 | if describe_out is None: 269 | raise NotThisMethod("'git describe' failed") 270 | describe_out = describe_out.strip() 271 | full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 272 | if full_out is None: 273 | raise NotThisMethod("'git rev-parse' failed") 274 | full_out = full_out.strip() 275 | 276 | pieces = {} 277 | pieces["long"] = full_out 278 | pieces["short"] = full_out[:7] # maybe improved later 279 | pieces["error"] = None 280 | 281 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 282 | # TAG might have hyphens. 283 | git_describe = describe_out 284 | 285 | # look for -dirty suffix 286 | dirty = git_describe.endswith("-dirty") 287 | pieces["dirty"] = dirty 288 | if dirty: 289 | git_describe = git_describe[: git_describe.rindex("-dirty")] 290 | 291 | # now we have TAG-NUM-gHEX or HEX 292 | 293 | if "-" in git_describe: 294 | # TAG-NUM-gHEX 295 | mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) 296 | if not mo: 297 | # unparseable. Maybe git-describe is misbehaving? 298 | pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out 299 | return pieces 300 | 301 | # tag 302 | full_tag = mo.group(1) 303 | if not full_tag.startswith(tag_prefix): 304 | if verbose: 305 | fmt = "tag '%s' doesn't start with prefix '%s'" 306 | print(fmt % (full_tag, tag_prefix)) 307 | pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( 308 | full_tag, 309 | tag_prefix, 310 | ) 311 | return pieces 312 | pieces["closest-tag"] = full_tag[len(tag_prefix) :] 313 | 314 | # distance: number of commits since tag 315 | pieces["distance"] = int(mo.group(2)) 316 | 317 | # commit: short hex revision ID 318 | pieces["short"] = mo.group(3) 319 | 320 | else: 321 | # HEX: no tags 322 | pieces["closest-tag"] = None 323 | count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) 324 | pieces["distance"] = int(count_out) # total number of commits 325 | 326 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 327 | date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[ 328 | 0 329 | ].strip() 330 | # Use only the last line. Previous lines may contain GPG signature 331 | # information. 332 | date = date.splitlines()[-1] 333 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 334 | 335 | return pieces 336 | 337 | 338 | def plus_or_dot(pieces): 339 | """Return a + if we don't already have one, else return a .""" 340 | if "+" in pieces.get("closest-tag", ""): 341 | return "." 342 | return "+" 343 | 344 | 345 | def render_pep440(pieces): 346 | """Build up version string, with post-release "local version identifier". 347 | 348 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 349 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 350 | 351 | Exceptions: 352 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 353 | """ 354 | if pieces["closest-tag"]: 355 | rendered = pieces["closest-tag"] 356 | if pieces["distance"] or pieces["dirty"]: 357 | rendered += plus_or_dot(pieces) 358 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 359 | if pieces["dirty"]: 360 | rendered += ".dirty" 361 | else: 362 | # exception #1 363 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) 364 | if pieces["dirty"]: 365 | rendered += ".dirty" 366 | return rendered 367 | 368 | 369 | def render_pep440_pre(pieces): 370 | """TAG[.post0.devDISTANCE] -- No -dirty. 371 | 372 | Exceptions: 373 | 1: no tags. 0.post0.devDISTANCE 374 | """ 375 | if pieces["closest-tag"]: 376 | rendered = pieces["closest-tag"] 377 | if pieces["distance"]: 378 | rendered += ".post0.dev%d" % pieces["distance"] 379 | else: 380 | # exception #1 381 | rendered = "0.post0.dev%d" % pieces["distance"] 382 | return rendered 383 | 384 | 385 | def render_pep440_post(pieces): 386 | """TAG[.postDISTANCE[.dev0]+gHEX] . 387 | 388 | The ".dev0" means dirty. Note that .dev0 sorts backwards 389 | (a dirty tree will appear "older" than the corresponding clean one), 390 | but you shouldn't be releasing software with -dirty anyways. 391 | 392 | Exceptions: 393 | 1: no tags. 0.postDISTANCE[.dev0] 394 | """ 395 | if pieces["closest-tag"]: 396 | rendered = pieces["closest-tag"] 397 | if pieces["distance"] or pieces["dirty"]: 398 | rendered += ".post%d" % pieces["distance"] 399 | if pieces["dirty"]: 400 | rendered += ".dev0" 401 | rendered += plus_or_dot(pieces) 402 | rendered += "g%s" % pieces["short"] 403 | else: 404 | # exception #1 405 | rendered = "0.post%d" % pieces["distance"] 406 | if pieces["dirty"]: 407 | rendered += ".dev0" 408 | rendered += "+g%s" % pieces["short"] 409 | return rendered 410 | 411 | 412 | def render_pep440_old(pieces): 413 | """TAG[.postDISTANCE[.dev0]] . 414 | 415 | The ".dev0" means dirty. 416 | 417 | Exceptions: 418 | 1: no tags. 0.postDISTANCE[.dev0] 419 | """ 420 | if pieces["closest-tag"]: 421 | rendered = pieces["closest-tag"] 422 | if pieces["distance"] or pieces["dirty"]: 423 | rendered += ".post%d" % pieces["distance"] 424 | if pieces["dirty"]: 425 | rendered += ".dev0" 426 | else: 427 | # exception #1 428 | rendered = "0.post%d" % pieces["distance"] 429 | if pieces["dirty"]: 430 | rendered += ".dev0" 431 | return rendered 432 | 433 | 434 | def render_git_describe(pieces): 435 | """TAG[-DISTANCE-gHEX][-dirty]. 436 | 437 | Like 'git describe --tags --dirty --always'. 438 | 439 | Exceptions: 440 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 441 | """ 442 | if pieces["closest-tag"]: 443 | rendered = pieces["closest-tag"] 444 | if pieces["distance"]: 445 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 446 | else: 447 | # exception #1 448 | rendered = pieces["short"] 449 | if pieces["dirty"]: 450 | rendered += "-dirty" 451 | return rendered 452 | 453 | 454 | def render_git_describe_long(pieces): 455 | """TAG-DISTANCE-gHEX[-dirty]. 456 | 457 | Like 'git describe --tags --dirty --always -long'. 458 | The distance/hash is unconditional. 459 | 460 | Exceptions: 461 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 462 | """ 463 | if pieces["closest-tag"]: 464 | rendered = pieces["closest-tag"] 465 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 466 | else: 467 | # exception #1 468 | rendered = pieces["short"] 469 | if pieces["dirty"]: 470 | rendered += "-dirty" 471 | return rendered 472 | 473 | 474 | def render(pieces, style): 475 | """Render the given version pieces into the requested style.""" 476 | if pieces["error"]: 477 | return { 478 | "version": "unknown", 479 | "full-revisionid": pieces.get("long"), 480 | "dirty": None, 481 | "error": pieces["error"], 482 | "date": None, 483 | } 484 | 485 | if not style or style == "default": 486 | style = "pep440" # the default 487 | 488 | if style == "pep440": 489 | rendered = render_pep440(pieces) 490 | elif style == "pep440-pre": 491 | rendered = render_pep440_pre(pieces) 492 | elif style == "pep440-post": 493 | rendered = render_pep440_post(pieces) 494 | elif style == "pep440-old": 495 | rendered = render_pep440_old(pieces) 496 | elif style == "git-describe": 497 | rendered = render_git_describe(pieces) 498 | elif style == "git-describe-long": 499 | rendered = render_git_describe_long(pieces) 500 | else: 501 | raise ValueError("unknown style '%s'" % style) 502 | 503 | return { 504 | "version": rendered, 505 | "full-revisionid": pieces["long"], 506 | "dirty": pieces["dirty"], 507 | "error": None, 508 | "date": pieces.get("date"), 509 | } 510 | 511 | 512 | def get_versions(): 513 | """Get version information or return default if unable to do so.""" 514 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 515 | # __file__, we can work backwards from there to the root. Some 516 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 517 | # case we can only use expanded keywords. 518 | 519 | cfg = get_config() 520 | verbose = cfg.verbose 521 | 522 | try: 523 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) 524 | except NotThisMethod: 525 | pass 526 | 527 | try: 528 | root = os.path.realpath(__file__) 529 | # versionfile_source is the relative path from the top of the source 530 | # tree (where the .git directory might live) to this file. Invert 531 | # this to find the root from __file__. 532 | for i in cfg.versionfile_source.split("/"): 533 | root = os.path.dirname(root) 534 | except NameError: 535 | return { 536 | "version": "0+unknown", 537 | "full-revisionid": None, 538 | "dirty": None, 539 | "error": "unable to find root of source tree", 540 | "date": None, 541 | } 542 | 543 | try: 544 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 545 | return render(pieces, cfg.style) 546 | except NotThisMethod: 547 | pass 548 | 549 | try: 550 | if cfg.parentdir_prefix: 551 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 552 | except NotThisMethod: 553 | pass 554 | 555 | return { 556 | "version": "0+unknown", 557 | "full-revisionid": None, 558 | "dirty": None, 559 | "error": "unable to compute version", 560 | "date": None, 561 | } 562 | -------------------------------------------------------------------------------- /dask_ctl/cli.py: -------------------------------------------------------------------------------- 1 | from time import sleep 2 | import sys 3 | import warnings 4 | 5 | import click 6 | from rich import box 7 | from rich.console import Console 8 | from rich.table import Table 9 | from rich.syntax import Syntax 10 | from rich.progress import Progress, BarColumn 11 | 12 | from . import __version__ 13 | from .utils import loop 14 | from .discovery import ( 15 | discover_cluster_names, 16 | list_discovery_methods, 17 | ) 18 | from .lifecycle import create_cluster, get_cluster, delete_cluster, get_snippet 19 | from .renderables import generate_table 20 | 21 | from . import config # noqa 22 | 23 | console = Console() 24 | 25 | # Only show warnings from dask_ctl 26 | warnings.filterwarnings("ignore", module="^((?!dask_ctl).)*$") 27 | # Customize warning output on the CLI 28 | warnings.showwarning = lambda msg, *_: console.print(f":warning: {msg}", style="yellow") 29 | 30 | 31 | def autocomplete_cluster_names(ctx, args, incomplete): 32 | async def _autocomplete_cluster_names(): 33 | return [ 34 | cluster 35 | async for cluster, _ in discover_cluster_names() 36 | if incomplete in cluster 37 | ] 38 | 39 | return loop.run_sync(_autocomplete_cluster_names) 40 | 41 | 42 | @click.command( 43 | context_settings=dict( 44 | ignore_unknown_options=True, 45 | allow_extra_args=True, 46 | ) 47 | ) 48 | def cli(): 49 | """[deprecated] use `dask cluster` instead of `daskctl`.""" 50 | 51 | click.echo( 52 | "The command `daskctl` has been deprecated, please use `dask cluster` instead." 53 | ) 54 | sys.exit(1) 55 | 56 | 57 | @click.group() 58 | def cluster(): 59 | """Manage dask clusters. 60 | 61 | Create, List, Scale and Delete your dask clusters with dask-ctl. 62 | 63 | See https://dask-ctl.readthedocs.io/en/latest/cli.html for more info. 64 | """ 65 | pass 66 | 67 | 68 | @cluster.command() 69 | @click.option("-f", "--spec-file-path") 70 | def create(spec_file_path): 71 | """Create a Dask cluster from a spec file.""" 72 | 73 | try: 74 | cluster = create_cluster(spec_file_path) 75 | except Exception: 76 | click.echo("Failed to create cluster.") 77 | raise click.Abort() 78 | else: 79 | click.echo(f"Created cluster {cluster.name}.") 80 | 81 | 82 | @cluster.command() 83 | @click.argument("discovery", type=str, required=False) 84 | def list(discovery=None): 85 | """List Dask clusters. 86 | 87 | DISCOVERY can be optionally set to restrict which discovery method to use. 88 | Run `dask cluster discovery list` for all available options. 89 | """ 90 | 91 | async def _list(): 92 | with console.status("[bold green]Discovering clusters...") as status: 93 | table = await generate_table( 94 | discovery=discovery, status=status, console=console 95 | ) 96 | 97 | console.print(table) 98 | 99 | loop.run_sync(_list) 100 | 101 | 102 | @cluster.command() 103 | @click.argument("name", shell_complete=autocomplete_cluster_names) 104 | @click.argument("n-workers", type=int) 105 | def scale(name, n_workers): 106 | """Scale a Dask cluster. 107 | 108 | NAME is the name of the cluster to scale. 109 | Run `dask cluster list` for all available options. 110 | 111 | N_WORKERS is the number of workers to scale to. 112 | 113 | """ 114 | 115 | try: 116 | with Progress( 117 | "[progress.description]{task.description}", 118 | BarColumn(), 119 | "[progress.percentage]{task.fields[workers]}/{task.fields[n_workers]}", 120 | transient=True, 121 | ) as progress: 122 | scale_task = progress.add_task( 123 | "[blue]Preparing to scale...", start=False, workers="..", n_workers=".." 124 | ) 125 | cluster = get_cluster(name) 126 | start_workers = len(cluster.scheduler_info["workers"]) 127 | diff_workers = n_workers - start_workers 128 | 129 | if diff_workers != 0: 130 | progress.update( 131 | scale_task, 132 | workers=start_workers, 133 | n_workers=n_workers, 134 | total=abs(diff_workers), 135 | ) 136 | if diff_workers > 0: 137 | progress.update(scale_task, description="[green]Adding workers...") 138 | elif diff_workers < 0: 139 | progress.update(scale_task, description="[red]Removing workers...") 140 | progress.start_task(scale_task) 141 | 142 | cluster.scale(n_workers) 143 | 144 | while len(cluster.scheduler_info["workers"]) != n_workers: 145 | sleep(0.1) 146 | progress.update( 147 | scale_task, 148 | completed=abs( 149 | len(cluster.scheduler_info["workers"]) - start_workers 150 | ), 151 | workers=len(cluster.scheduler_info["workers"]), 152 | ) 153 | 154 | progress.update(scale_task, completed=diff_workers) 155 | progress.console.print( 156 | f"Scaled cluster [blue]{name}[/blue] to {n_workers} workers." 157 | ) 158 | else: 159 | progress.console.print( 160 | f"Cluster [blue]{name}[/blue] already at {n_workers}, nothing to do." 161 | ) 162 | 163 | except Exception as e: 164 | console.print(e) 165 | raise click.Abort() 166 | 167 | 168 | @cluster.command() 169 | @click.argument("name", shell_complete=autocomplete_cluster_names) 170 | def delete( 171 | name, 172 | ): 173 | """Delete a Dask cluster. 174 | 175 | NAME is the name of the cluster to delete. 176 | Run `dask cluster list` for all available options. 177 | 178 | """ 179 | try: 180 | delete_cluster(name) 181 | except Exception as e: 182 | click.echo(e) 183 | raise click.Abort() 184 | else: 185 | click.echo(f"Deleted cluster {name}.") 186 | 187 | 188 | @cluster.command() 189 | @click.argument("name", shell_complete=autocomplete_cluster_names) 190 | def snippet( 191 | name, 192 | ): 193 | """Get code snippet for connecting to a cluster. 194 | 195 | NAME is the name of the cluster to get a snippet for. 196 | Run `dask cluster list` for all available options. 197 | 198 | """ 199 | try: 200 | snip = get_snippet(name) 201 | snip = Syntax( 202 | snip, 203 | "python", 204 | theme="ansi_dark", # Uses existing terminal theme 205 | background_color="default", # Don't change background color 206 | ) 207 | except Exception as e: 208 | click.echo(e) 209 | else: 210 | console.print(snip) 211 | 212 | 213 | @cluster.group() 214 | def discovery(): 215 | """Cluster discovery subcommands.""" 216 | pass 217 | 218 | 219 | @discovery.command(name="list") 220 | def list_discovery(): 221 | """List installed discovery methods. 222 | 223 | Dask clusters can be created by many different packages. Each package has the option 224 | to register a method to discover clusters it creates. This command lists all discovery 225 | methods registered on your system. 226 | 227 | """ 228 | 229 | async def _list_discovery(): 230 | table = Table(box=box.SIMPLE) 231 | table.add_column("Name", style="cyan", no_wrap=True) 232 | table.add_column("Package", justify="right", style="magenta") 233 | table.add_column("Version", style="green") 234 | table.add_column("Path", style="yellow") 235 | table.add_column("Enabled", justify="right", style="green") 236 | 237 | for method_name, method in list_discovery_methods().items(): 238 | table.add_row( 239 | method_name, 240 | method["package"], 241 | method["version"], 242 | method["path"], 243 | ":heavy_check_mark:" if method["enabled"] else ":cross_mark:", 244 | ) 245 | console.print(table) 246 | 247 | loop.run_sync(_list_discovery) 248 | 249 | 250 | @discovery.command(name="enable") 251 | @click.argument("name") 252 | def enable_discovery(name): 253 | """Enable a discovery method.""" 254 | console.print( 255 | "To enable discovery methods please update your configuration.\n" 256 | "See " 257 | ) 258 | 259 | 260 | @discovery.command(name="disable") 261 | @click.argument("name") 262 | def disable_discovery(name): 263 | """Disable a discovery method.""" 264 | console.print( 265 | "To disable discovery methods please update your configuration.\n" 266 | "See " 267 | ) 268 | 269 | 270 | @cluster.command() 271 | def version(): 272 | """Show the dask-ctl version.""" 273 | click.echo(f"dask-ctl: {__version__}") 274 | 275 | 276 | def daskctl(): 277 | cli() 278 | 279 | 280 | if __name__ == "__main__": 281 | daskctl() 282 | -------------------------------------------------------------------------------- /dask_ctl/config.py: -------------------------------------------------------------------------------- 1 | import os.path 2 | import yaml 3 | 4 | import dask.config 5 | 6 | fn = os.path.join(os.path.dirname(__file__), "ctl.yaml") 7 | dask.config.ensure_file(source=fn) 8 | 9 | with open(fn) as f: 10 | defaults = yaml.safe_load(f) 11 | 12 | dask.config.update_defaults(defaults) 13 | -------------------------------------------------------------------------------- /dask_ctl/ctl-schema.yaml: -------------------------------------------------------------------------------- 1 | properties: 2 | 3 | ctl: 4 | type: object 5 | properties: 6 | 7 | disable_discovery: 8 | type: list 9 | items: 10 | type: string 11 | description: | 12 | Discovery methods to disable when discovering clusters. 13 | -------------------------------------------------------------------------------- /dask_ctl/ctl.yaml: -------------------------------------------------------------------------------- 1 | ctl: 2 | disable_discovery: [] 3 | cluster-spec: null 4 | -------------------------------------------------------------------------------- /dask_ctl/discovery.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from typing import Callable, Dict, AsyncIterator, Tuple 3 | from contextlib import suppress 4 | import pkg_resources 5 | import warnings 6 | 7 | import dask.config 8 | from distributed.deploy.spec import SpecCluster 9 | 10 | from .utils import AsyncTimedIterable 11 | 12 | 13 | def list_discovery_methods() -> Dict[str, Callable]: 14 | """Lists registered discovery methods. 15 | 16 | Dask cluster discovery methods are registered via the ``dask_cluster_discovery`` entrypoint. 17 | This function lists all methods registered via that entrypoint. 18 | 19 | Returns 20 | ------- 21 | dict 22 | A mapping of discovery methods containing the functions themselves and metadata about 23 | where they came from. 24 | 25 | Examples 26 | -------- 27 | >>> list_discovery_methods() # doctest: +SKIP 28 | {'proxycluster': { 29 | 'discover': , 30 | 'package': 'dask-ctl', 31 | 'version': '', 32 | 'path': ''} 33 | } 34 | >>> list(list_discovery_methods()) # doctest: +SKIP 35 | ['proxycluster'] 36 | 37 | """ 38 | discovery_methods = {} 39 | for ep in pkg_resources.iter_entry_points(group="dask_cluster_discovery"): 40 | with suppress(AttributeError, ImportError): 41 | discovery_methods.update( 42 | { 43 | ep.name: { 44 | "discover": ep.load(), 45 | "package": ep.dist.key, 46 | "version": ep.dist.version, 47 | "path": ep.dist.location, 48 | "enabled": ( 49 | not dask.config.get("ctl.disable_discovery") 50 | or ep.name not in dask.config.get("ctl.disable_discovery") 51 | ), 52 | } 53 | } 54 | ) 55 | return discovery_methods 56 | 57 | 58 | async def discover_cluster_names( 59 | discovery: str = None, 60 | ) -> AsyncIterator[Tuple[str, Callable]]: 61 | """Generator to discover cluster names. 62 | 63 | Cluster discovery methods are asynchronous. This async generator iterates through 64 | each discovery method and then iterates through each cluster name discovered. 65 | 66 | Can also be restricted to a specific disovery method. 67 | 68 | Parameters 69 | ---------- 70 | discovery 71 | Discovery method to use, as listed in :func:`list_discovery_methods`. 72 | Default is ``None`` which uses all discovery methods. 73 | 74 | Yields 75 | ------- 76 | tuple 77 | Each tuple contains the name of the cluster and a class which can be used to represent it. 78 | 79 | Examples 80 | -------- 81 | >>> from dask.distributed import LocalCluster # doctest: +SKIP 82 | >>> cluster = LocalCluster(scheduler_port=8786) # doctest: +SKIP 83 | >>> [name async for name in discover_cluster_names()] # doctest: +SKIP 84 | [('proxycluster-8786', dask_ctl.proxy.ProxyCluster)] 85 | 86 | """ 87 | discovery_methods = list_discovery_methods() 88 | for discovery_method in discovery_methods: 89 | try: 90 | if discovery_methods[discovery_method]["enabled"] and ( 91 | discovery is None or discovery == discovery_method 92 | ): 93 | try: 94 | async for cluster_name, cluster_class in AsyncTimedIterable( 95 | discovery_methods[discovery_method]["discover"](), 5 96 | ): 97 | yield (cluster_name, cluster_class) 98 | if discovery is not None: 99 | return 100 | except asyncio.TimeoutError: 101 | warnings.warn( 102 | f"Cluster discovery for {discovery_method} timed out." 103 | ) 104 | except ( 105 | Exception 106 | ) as e: # We are calling code that is out of our control here, so handling broad exceptions 107 | if discovery is None: 108 | warnings.warn(f"Cluster discovery for {discovery_method} failed.") 109 | else: 110 | raise e 111 | 112 | 113 | async def discover_clusters(discovery=None) -> AsyncIterator[SpecCluster]: 114 | """Generator to discover clusters. 115 | 116 | This generator takes the names and classes output from :func:`discover_cluster_names` 117 | and constructs the cluster object using the `cls.from_name(name)` classmethod. 118 | 119 | Can also be restricted to a specific disovery method. 120 | 121 | Parameters 122 | ---------- 123 | discovery 124 | Discovery method to use, as listed in :func:`list_discovery_methods`. 125 | Default is ``None`` which uses all discovery methods. 126 | 127 | Yields 128 | ------- 129 | Cluster 130 | Cluster manager classes for each discovered cluster. 131 | 132 | Examples 133 | -------- 134 | >>> from dask.distributed import LocalCluster # doctest: +SKIP 135 | >>> cluster = LocalCluster(scheduler_port=8786) # doctest: +SKIP 136 | >>> [name async for name in discover_clusters()] # doctest: +SKIP 137 | [ProxyCluster(proxycluster-8786, 'tcp://localhost:8786', workers=4, threads=12, memory=17.18 GB)] 138 | 139 | """ 140 | async for cluster_name, cluster_class in discover_cluster_names(discovery): 141 | with suppress(Exception): 142 | yield cluster_class.from_name(cluster_name) 143 | -------------------------------------------------------------------------------- /dask_ctl/exceptions.py: -------------------------------------------------------------------------------- 1 | class DaskClusterConfigNotFound(FileNotFoundError): 2 | """Unable to find the Dask cluster config.""" 3 | -------------------------------------------------------------------------------- /dask_ctl/lifecycle.py: -------------------------------------------------------------------------------- 1 | import importlib 2 | from typing import List 3 | 4 | import dask.config 5 | from dask.widgets import get_template 6 | from dask.utils import typename 7 | from distributed.deploy import LocalCluster 8 | from distributed.deploy.cluster import Cluster 9 | from .discovery import discover_cluster_names, discover_clusters 10 | from .spec import load_spec 11 | from .utils import loop 12 | from .exceptions import DaskClusterConfigNotFound 13 | 14 | 15 | def create_cluster( 16 | spec_path: str = None, 17 | local_fallback: bool = False, 18 | asynchronous: bool = False, 19 | ) -> Cluster: 20 | """Create a cluster from a spec file. 21 | 22 | Parameters 23 | ---------- 24 | spec_path 25 | Path to a cluster spec file. Defaults to ``dask-cluster.yaml``. 26 | local_fallback 27 | Create a LocalCluster if spec file not found. 28 | asynchronous 29 | Start the cluster in asynchronous mode 30 | 31 | Returns 32 | ------- 33 | Cluster 34 | Cluster manager representing the spec. 35 | 36 | Examples 37 | -------- 38 | With the spec: 39 | 40 | .. code-block:: yaml 41 | 42 | # /path/to/spec.yaml 43 | version: 1 44 | module: "dask.distributed" 45 | class: "LocalCluster" 46 | 47 | >>> create_cluster("/path/to/spec.yaml") # doctest: +SKIP 48 | LocalCluster(b3973c71, 'tcp://127.0.0.1:8786', workers=4, threads=12, memory=17.18 GB) 49 | 50 | """ 51 | spec_path = ( 52 | dask.config.get("ctl.cluster-spec", None, override_with=spec_path) 53 | or "dask-cluster.yaml" 54 | ) 55 | 56 | async def _create_cluster(): 57 | try: 58 | cm_module, cm_class, args, kwargs = load_spec(spec_path) 59 | except FileNotFoundError as e: 60 | if local_fallback: 61 | return LocalCluster(asynchronous=asynchronous) 62 | else: 63 | raise DaskClusterConfigNotFound(f"Unable to find {spec_path}") from e 64 | module = importlib.import_module(cm_module) 65 | cluster_manager = getattr(module, cm_class) 66 | 67 | kwargs = {key.replace("-", "_"): entry for key, entry in kwargs.items()} 68 | 69 | cluster = cluster_manager(*args, **kwargs, asynchronous=asynchronous) 70 | cluster.shutdown_on_close = False 71 | return cluster 72 | 73 | return loop.run_sync(_create_cluster) 74 | 75 | 76 | def list_clusters() -> List[Cluster]: 77 | """List all clusters. 78 | 79 | Discover clusters and return a list of cluster managers representing each one. 80 | 81 | Returns 82 | ------- 83 | list 84 | List of cluster manager classes for each discovered cluster. 85 | 86 | Examples 87 | -------- 88 | >>> from dask.distributed import LocalCluster # doctest: +SKIP 89 | >>> cluster = LocalCluster(scheduler_port=8786) # doctest: +SKIP 90 | >>> list_clusters() # doctest: +SKIP 91 | [ProxyCluster(proxycluster-8786, 'tcp://localhost:8786', workers=4, threads=12, memory=17.18 GB)] 92 | 93 | """ 94 | 95 | async def _list_clusters(): 96 | clusters = [] 97 | async for cluster in discover_clusters(): 98 | clusters.append(cluster) 99 | return clusters 100 | 101 | return loop.run_sync(_list_clusters) 102 | 103 | 104 | def get_cluster(name: str, asynchronous=False) -> Cluster: 105 | """Get a cluster by name. 106 | 107 | Parameters 108 | ---------- 109 | name 110 | Name of cluster to get a cluster manager for. 111 | asynchronous 112 | Return an awaitable instead of starting a loop. 113 | 114 | Returns 115 | ------- 116 | Cluster 117 | Cluster manager representing the named cluster. 118 | 119 | Examples 120 | -------- 121 | >>> from dask.distributed import LocalCluster # doctest: +SKIP 122 | >>> cluster = LocalCluster(scheduler_port=8786) # doctest: +SKIP 123 | >>> get_cluster("proxycluster-8786") # doctest: +SKIP 124 | ProxyCluster(proxycluster-8786, 'tcp://localhost:8786', workers=4, threads=12, memory=17.18 GB) 125 | 126 | """ 127 | 128 | async def _get_cluster(): 129 | async for cluster_name, cluster_class in discover_cluster_names(): 130 | if cluster_name == name: 131 | return cluster_class.from_name(name) 132 | raise RuntimeError("No such cluster %s", name) 133 | 134 | if asynchronous: 135 | return _get_cluster() 136 | else: 137 | return loop.run_sync(_get_cluster) 138 | 139 | 140 | def get_snippet(name: str) -> str: 141 | """Get a code snippet for connecting to a cluster. 142 | 143 | Parameters 144 | ---------- 145 | name 146 | Name of cluster to get a snippet for. 147 | 148 | Returns 149 | ------- 150 | str 151 | Code snippet. 152 | 153 | Examples 154 | -------- 155 | >>> from dask.distributed import LocalCluster # doctest: +SKIP 156 | >>> cluster = LocalCluster(scheduler_port=8786) # doctest: +SKIP 157 | >>> get_snippet("proxycluster-8786") # doctest: +SKIP 158 | from dask.distributed import Client 159 | from dask_ctl.proxy import ProxyCluster 160 | 161 | cluster = ProxyCluster.from_name("proxycluster-8786") 162 | client = Client(cluster) 163 | 164 | """ 165 | 166 | cluster = get_cluster(name) 167 | try: 168 | return cluster.get_snippet() 169 | except AttributeError: 170 | *module, cm = typename(type(cluster)).split(".") 171 | module = ".".join(module) 172 | return get_template("snippet.py.j2").render( 173 | module=module, cm=cm, name=name, cluster=cluster 174 | ) 175 | 176 | 177 | def scale_cluster(name: str, n_workers: int) -> None: 178 | """Scale a cluster by name. 179 | 180 | Constructs a cluster manager for the named cluster and calls 181 | ``.scale(n_workers)`` on it. 182 | 183 | Parameters 184 | ---------- 185 | name 186 | Name of cluster to scale. 187 | n_workers 188 | Number of workers to scale to 189 | 190 | Examples 191 | -------- 192 | >>> scale_cluster("mycluster", 10) # doctest: +SKIP 193 | 194 | """ 195 | 196 | return get_cluster(name).scale(n_workers) 197 | 198 | 199 | def delete_cluster(name: str) -> None: 200 | """Close a cluster by name. 201 | 202 | Constructs a cluster manager for the named cluster and calls 203 | ``.close()`` on it. 204 | 205 | Parameters 206 | ---------- 207 | name 208 | Name of cluster to close. 209 | 210 | Examples 211 | -------- 212 | >>> delete_cluster("mycluster") # doctest: +SKIP 213 | 214 | """ 215 | 216 | return get_cluster(name).close() 217 | -------------------------------------------------------------------------------- /dask_ctl/proxy.py: -------------------------------------------------------------------------------- 1 | from typing import Callable, AsyncIterator, Tuple 2 | import asyncio 3 | import contextlib 4 | 5 | # import psutil 6 | 7 | from distributed.deploy.cluster import Cluster 8 | from distributed.core import rpc, Status 9 | from distributed.client import Client 10 | from distributed.utils import LoopRunner 11 | 12 | 13 | def gen_name(port): 14 | return f"proxycluster-{port}" 15 | 16 | 17 | class ProxyCluster(Cluster): 18 | """A representation of a cluster with a locally running scheduler. 19 | 20 | If a Dask Scheduler is running locally it is generally assumed that the process is tightly 21 | coupled to a parent process and therefore the cluster manager type cannot be reconstructed. 22 | The ProxyCluster object allows you limited interactivity with a local cluster in the same way 23 | you would with a regular cluster, allowing you to retrieve logs, get stats, etc. 24 | 25 | """ 26 | 27 | @classmethod 28 | def from_name( 29 | cls, name: str, loop: asyncio.BaseEventLoop = None, asynchronous: bool = False 30 | ): 31 | """Get instance of ``ProxyCluster`` by name. 32 | 33 | Parameters 34 | ---------- 35 | name 36 | Name of cluster to get ``ProxyCluster`` for. Has the format ``proxycluster-{port}``. 37 | loop (optional) 38 | Existing event loop to use. 39 | asynchronous (optional) 40 | Start asynchronously. Default ``False``. 41 | 42 | Returns 43 | ------- 44 | ProxyCluster 45 | Instance of ProxyCluster. 46 | 47 | Examples 48 | -------- 49 | >>> from dask.distributed import LocalCluster # doctest: +SKIP 50 | >>> cluster = LocalCluster(scheduler_port=8786) # doctest: +SKIP 51 | >>> ProxyCluster.from_name("proxycluster-8786") # doctest: +SKIP 52 | ProxyCluster(proxycluster-8786, 'tcp://localhost:8786', workers=4, threads=12, memory=17.18 GB) 53 | 54 | """ 55 | port = name.split("-")[-1] 56 | return cls.from_port(port, loop=loop, asynchronous=asynchronous) 57 | 58 | @classmethod 59 | def from_port( 60 | cls, port: int, loop: asyncio.BaseEventLoop = None, asynchronous: bool = False 61 | ): 62 | """Get instance of ``ProxyCluster`` by port. 63 | 64 | Parameters 65 | ---------- 66 | port 67 | Localhost port of cluster to get ``ProxyCluster`` for. 68 | loop (optional) 69 | Existing event loop to use. 70 | asynchronous (optional) 71 | Start asynchronously. Default ``False``. 72 | 73 | Returns 74 | ------- 75 | ProxyCluster 76 | Instance of ProxyCluster. 77 | 78 | Examples 79 | -------- 80 | >>> from dask.distributed import LocalCluster # doctest: +SKIP 81 | >>> cluster = LocalCluster(scheduler_port=81234) # doctest: +SKIP 82 | >>> ProxyCluster.from_port(81234) # doctest: +SKIP 83 | ProxyCluster(proxycluster-81234, 'tcp://localhost:81234', workers=4, threads=12, memory=17.18 GB) 84 | 85 | """ 86 | cluster = cls(asynchronous=asynchronous) 87 | cluster.name = gen_name(port) 88 | 89 | cluster.scheduler_comm = rpc(f"tcp://localhost:{port}") 90 | 91 | cluster._loop_runner = LoopRunner(loop=loop, asynchronous=asynchronous) 92 | cluster.loop = cluster._loop_runner.loop 93 | if not asynchronous: 94 | cluster._loop_runner.start() 95 | 96 | cluster.status = Status.starting 97 | cluster.sync(cluster._start) 98 | return cluster 99 | 100 | def scale(self, *args, **kwargs): 101 | raise TypeError("Scaling of ProxyCluster objects is not supported.") 102 | 103 | def close(self, *args, **kwargs): 104 | raise TypeError("Closing of ProxyCluster objects is not supported.") 105 | 106 | def __await__(self): 107 | async def _(): 108 | return self 109 | 110 | return _().__await__() 111 | 112 | 113 | async def discover() -> AsyncIterator[Tuple[str, Callable]]: 114 | """Discover proxy clusters. 115 | 116 | If a Dask Scheduler is running locally it is generally assumed that the process is tightly 117 | coupled to a parent process and therefore the cluster manager type cannot be reconstructed. 118 | Instead we can construct ProxyCluster objects which allow limited interactivity with a local cluster in the same way 119 | you would with a regular cluster, allowing you to retrieve logs, get stats, etc. 120 | 121 | This discovery works by checking all local services listening on ports, then attempting to connect a 122 | :class:`dask.distributed.Client` to it. If it is successful we assume it is a cluster that we can represent. 123 | 124 | Notes 125 | ----- 126 | Listing open ports is not possible as a reular user on macOS. So discovery must be run as root. For regular users 127 | we still check the default ``8786`` port for a scheduler. 128 | 129 | Yields 130 | ------- 131 | tuple 132 | Each tuple contains the name of the cluster and a class which can be used to represent it. 133 | 134 | Examples 135 | -------- 136 | >>> from dask.distributed import LocalCluster # doctest: +SKIP 137 | >>> cluster = LocalCluster(scheduler_port=8786) # doctest: +SKIP 138 | >>> [name async for name in discover()] # doctest: +SKIP 139 | [('proxycluster-8786', dask_ctl.proxy.ProxyCluster)] 140 | 141 | """ 142 | open_ports = {8786} 143 | 144 | # with contextlib.suppress( 145 | # psutil.AccessDenied 146 | # ): # On macOS this needs to be run as root 147 | # connections = psutil.net_connections() 148 | # for connection in connections: 149 | # if ( 150 | # connection.status == "LISTEN" 151 | # and connection.family.name == "AF_INET" 152 | # and connection.laddr.port not in open_ports 153 | # ): 154 | # open_ports.add(connection.laddr.port) 155 | 156 | async def try_connect(port): 157 | with contextlib.suppress(OSError, asyncio.TimeoutError): 158 | async with Client( 159 | f"tcp://localhost:{port}", 160 | asynchronous=True, 161 | timeout=1, # Minimum of 1 for Windows 162 | ): 163 | return port 164 | return 165 | 166 | for port in await asyncio.gather(*[try_connect(port) for port in open_ports]): 167 | if port: 168 | yield ( 169 | gen_name(port), 170 | ProxyCluster, 171 | ) 172 | -------------------------------------------------------------------------------- /dask_ctl/renderables.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | import click 4 | from rich import box 5 | from rich.table import Table 6 | from rich.text import Text 7 | 8 | from dask.utils import format_bytes, format_time_ago, typename 9 | from distributed.core import Status 10 | 11 | from .discovery import ( 12 | discover_clusters, 13 | list_discovery_methods, 14 | ) 15 | 16 | 17 | def get_created(cluster): 18 | try: 19 | return format_time_ago( 20 | datetime.datetime.fromtimestamp(float(cluster.scheduler_info["started"])) 21 | ) 22 | except KeyError: 23 | return "Unknown" 24 | 25 | 26 | def get_status(cluster): 27 | cluster_status = cluster.status.name.title() 28 | if cluster.status == Status.created: 29 | cluster_status = Text(cluster_status, style="yellow") 30 | elif cluster.status == Status.running: 31 | cluster_status = Text(cluster_status, style="green") 32 | else: 33 | cluster_status = Text(cluster_status, style="red") 34 | return cluster_status 35 | 36 | 37 | def get_workers(cluster): 38 | try: 39 | return cluster.scheduler_info["workers"].values() 40 | except KeyError: 41 | return [] 42 | 43 | 44 | async def generate_table(discovery=None, status=None, console=None): 45 | table = Table(box=box.SIMPLE) 46 | table.add_column("Name", style="cyan", no_wrap=True) 47 | table.add_column("Address") 48 | table.add_column("Type") 49 | table.add_column("Discovery") 50 | table.add_column("Workers") 51 | table.add_column("Threads") 52 | table.add_column("Memory") 53 | table.add_column("Created") 54 | table.add_column("Status") 55 | 56 | discovery_methods = list_discovery_methods() 57 | for discovery_method in discovery_methods: 58 | if status: 59 | status.update(f"[bold green]Discovering {discovery_method}s...") 60 | if discovery_methods[discovery_method]["enabled"] and ( 61 | discovery is None or discovery == discovery_method 62 | ): 63 | try: 64 | async for cluster in discover_clusters(discovery=discovery_method): 65 | workers = get_workers(cluster) 66 | table.add_row( 67 | cluster.name, 68 | cluster.scheduler_address, 69 | typename(type(cluster)), 70 | discovery_method, 71 | str(len(workers)), 72 | str(sum(w["nthreads"] for w in workers)), 73 | format_bytes(sum([w["memory_limit"] for w in workers])), 74 | get_created(cluster), 75 | get_status(cluster), 76 | ) 77 | except Exception as e: 78 | if console: 79 | if discovery is None: 80 | console.print( 81 | f":warning: Discovery {discovery_method} failed. " 82 | f"Run `dask cluster list {discovery_method}` for more info.", 83 | style="yellow", 84 | ) 85 | else: 86 | console.print_exception(show_locals=True) 87 | raise click.Abort() 88 | else: 89 | raise e 90 | return table 91 | -------------------------------------------------------------------------------- /dask_ctl/spec.py: -------------------------------------------------------------------------------- 1 | import yaml 2 | 3 | 4 | def load_spec(path): 5 | with open(path, "r") as fh: 6 | spec = yaml.safe_load(fh.read()) 7 | 8 | version = spec["version"] 9 | if version == 1: 10 | return load_v1_spec(spec) 11 | else: 12 | raise KeyError(f"No such dask cluster spec version {version}") 13 | 14 | 15 | def load_v1_spec(spec): 16 | cm_module = spec["module"] 17 | cm_class = spec["class"] 18 | args = spec.get("args", []) 19 | kwargs = spec.get("kwargs", {}) 20 | return cm_module, cm_class, args, kwargs 21 | -------------------------------------------------------------------------------- /dask_ctl/tests/conftest.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import pytest 4 | import os 5 | 6 | 7 | @pytest.fixture 8 | def simple_spec_path(): 9 | return os.path.join( 10 | os.path.dirname(os.path.abspath(__file__)), "specs", "simple.yaml" 11 | ) 12 | 13 | 14 | @pytest.fixture 15 | def event_loop(): 16 | yield asyncio.get_event_loop() 17 | 18 | 19 | def pytest_sessionfinish(session, exitstatus): 20 | asyncio.get_event_loop().close() 21 | -------------------------------------------------------------------------------- /dask_ctl/tests/specs/simple.yaml: -------------------------------------------------------------------------------- 1 | version: 1 2 | module: "dask.distributed" 3 | class: "LocalCluster" 4 | args: [] 5 | kwargs: {} 6 | -------------------------------------------------------------------------------- /dask_ctl/tests/test_cli.py: -------------------------------------------------------------------------------- 1 | from distributed import LocalCluster 2 | from subprocess import check_output 3 | from dask_ctl.cli import autocomplete_cluster_names 4 | 5 | 6 | def test_list_discovery(): 7 | assert b"proxycluster" in check_output(["dask", "cluster", "discovery", "list"]) 8 | 9 | 10 | def test_list(): 11 | with LocalCluster(name="testcluster", scheduler_port=8786) as _: 12 | output = check_output(["dask", "cluster", "list"]) 13 | 14 | assert b"Name" in output 15 | 16 | # Rich truncates output on small displays and check_output seems to set a small 17 | # terminal size so these strings are truncated. 18 | # TODO Figure out how to set the terminal size in check_output. 19 | # 20 | # assert b"dask_ctl.proxy.ProxyCluster" in output 21 | # assert b"Running" in output 22 | 23 | 24 | def test_create(simple_spec_path): 25 | output = check_output(["dask", "cluster", "create", "-f", simple_spec_path]) 26 | assert b"Created" in output 27 | 28 | 29 | def test_autocompletion(): 30 | with LocalCluster(scheduler_port=8786) as _: 31 | assert len(autocomplete_cluster_names(None, None, "")) == 1 32 | assert len(autocomplete_cluster_names(None, None, "proxy")) == 1 33 | assert len(autocomplete_cluster_names(None, None, "local")) == 0 34 | -------------------------------------------------------------------------------- /dask_ctl/tests/test_dask_ctl.py: -------------------------------------------------------------------------------- 1 | def test_import(): 2 | import dask_ctl 3 | 4 | assert dask_ctl 5 | -------------------------------------------------------------------------------- /dask_ctl/tests/test_discovery.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from typing import AsyncIterator 4 | 5 | from dask.distributed import LocalCluster 6 | from dask_ctl.discovery import ( 7 | discover_cluster_names, 8 | discover_clusters, 9 | list_discovery_methods, 10 | ) 11 | 12 | SCHEDULER_PORT = 8786 13 | 14 | 15 | def test_discovery_methods(): 16 | assert "proxycluster" in list_discovery_methods() 17 | 18 | 19 | @pytest.mark.asyncio 20 | async def test_discover_cluster_names(): 21 | assert isinstance(discover_cluster_names(), AsyncIterator) 22 | async with LocalCluster(scheduler_port=SCHEDULER_PORT, asynchronous=True) as _: 23 | count = 0 24 | async for _ in discover_cluster_names(): 25 | count += 1 26 | assert count == 1 27 | 28 | 29 | @pytest.mark.asyncio 30 | async def test_cluster_client(): 31 | from dask.distributed import Client 32 | 33 | async with LocalCluster(scheduler_port=SCHEDULER_PORT, asynchronous=True) as _: 34 | async with Client( 35 | f"tcp://localhost:{SCHEDULER_PORT}", asynchronous=True, timeout=1 36 | ) as client: 37 | assert int(client.scheduler.address.split(":")[-1]) == SCHEDULER_PORT 38 | 39 | 40 | @pytest.mark.asyncio 41 | async def test_discovery_list(): 42 | from dask_ctl.proxy import discover 43 | 44 | async with LocalCluster(scheduler_port=SCHEDULER_PORT, asynchronous=True) as _: 45 | discovered_cluster_names = [name async for name, _ in discover()] 46 | assert discovered_cluster_names 47 | for name in discovered_cluster_names: 48 | assert str(SCHEDULER_PORT) in name 49 | 50 | 51 | @pytest.mark.xfail(reason="Proxy cluster discovery not working") 52 | @pytest.mark.asyncio 53 | async def test_discover_clusters(): 54 | async with LocalCluster( 55 | scheduler_port=SCHEDULER_PORT, asynchronous=True 56 | ) as cluster: 57 | discovered_clusters = [cluster async for cluster in discover_clusters()] 58 | assert discovered_clusters 59 | assert cluster.name in [c.name for c in discovered_clusters] 60 | -------------------------------------------------------------------------------- /dask_ctl/tests/test_lifecycle.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import ast 3 | 4 | import dask.config 5 | from dask.distributed import LocalCluster 6 | 7 | from dask_ctl.lifecycle import create_cluster, get_snippet 8 | from dask_ctl.exceptions import DaskClusterConfigNotFound 9 | 10 | 11 | def test_create_cluster(simple_spec_path): 12 | cluster = create_cluster(simple_spec_path) 13 | 14 | assert isinstance(cluster, LocalCluster) 15 | 16 | 17 | def test_create_cluster_fallback(): 18 | with pytest.raises(DaskClusterConfigNotFound, match="dask-cluster.yaml"): 19 | cluster = create_cluster() 20 | 21 | with dask.config.set({"ctl.cluster-spec": "foo.yaml"}): 22 | with pytest.raises(DaskClusterConfigNotFound, match="foo.yaml"): 23 | cluster = create_cluster() 24 | 25 | cluster = create_cluster(local_fallback=True) 26 | assert isinstance(cluster, LocalCluster) 27 | 28 | 29 | @pytest.mark.xfail(reason="Proxy cluster discovery not working") 30 | def test_snippet(): 31 | with LocalCluster(scheduler_port=8786) as _: 32 | snippet = get_snippet("proxycluster-8786") 33 | 34 | # Check is valid Python 35 | ast.parse(snippet) 36 | 37 | assert "proxycluster-8786" in snippet 38 | -------------------------------------------------------------------------------- /dask_ctl/utils.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from tornado.ioloop import IOLoop 4 | from distributed.cli.utils import install_signal_handlers 5 | 6 | 7 | loop = IOLoop.current() 8 | install_signal_handlers(loop) 9 | 10 | 11 | class _AsyncTimedIterator: 12 | __slots__ = ("_iterator", "_timeout", "_sentinel") 13 | 14 | def __init__(self, iterable, timeout): 15 | self._iterator = iterable.__aiter__() 16 | self._timeout = timeout 17 | 18 | async def __anext__(self): 19 | return await asyncio.wait_for(self._iterator.__anext__(), self._timeout) 20 | 21 | 22 | class AsyncTimedIterable: 23 | """Wrapper for an AsyncIterable that adds a timeout 24 | 25 | See https://stackoverflow.com/a/50245879/1003288 26 | 27 | """ 28 | 29 | __slots__ = ("_factory",) 30 | 31 | def __init__(self, iterable, timeout=None): 32 | self._factory = lambda: _AsyncTimedIterator(iterable, timeout) 33 | 34 | def __aiter__(self): 35 | return self._factory() 36 | -------------------------------------------------------------------------------- /dask_ctl/widgets/templates/snippet.py.j2: -------------------------------------------------------------------------------- 1 | from dask.distributed import Client 2 | from dask_ctl import get_cluster 3 | 4 | cluster = get_cluster("{{ name }}") 5 | client = Client(cluster) 6 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/api.rst: -------------------------------------------------------------------------------- 1 | Python API 2 | ========== 3 | 4 | Lifecycle 5 | --------- 6 | 7 | .. autosummary:: 8 | dask_ctl.lifecycle.get_cluster 9 | dask_ctl.lifecycle.create_cluster 10 | dask_ctl.lifecycle.scale_cluster 11 | dask_ctl.lifecycle.delete_cluster 12 | dask_ctl.lifecycle.list_clusters 13 | 14 | .. autofunction:: dask_ctl.lifecycle.get_cluster 15 | 16 | .. autofunction:: dask_ctl.lifecycle.create_cluster 17 | 18 | .. autofunction:: dask_ctl.lifecycle.scale_cluster 19 | 20 | .. autofunction:: dask_ctl.lifecycle.delete_cluster 21 | 22 | .. autofunction:: dask_ctl.lifecycle.list_clusters 23 | 24 | .. autofunction:: dask_ctl.lifecycle.get_snippet 25 | 26 | Discovery 27 | --------- 28 | 29 | .. autosummary:: 30 | dask_ctl.discovery.discover_cluster_names 31 | dask_ctl.discovery.discover_clusters 32 | dask_ctl.discovery.list_discovery_methods 33 | 34 | .. autofunction:: dask_ctl.discovery.discover_cluster_names 35 | 36 | .. autofunction:: dask_ctl.discovery.discover_clusters 37 | 38 | .. autofunction:: dask_ctl.discovery.list_discovery_methods 39 | -------------------------------------------------------------------------------- /docs/cli.rst: -------------------------------------------------------------------------------- 1 | Command line utility 2 | ==================== 3 | 4 | .. click:: dask_ctl.cli:cluster 5 | :prog: dask cluster 6 | :nested: full 7 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | from datetime import datetime 8 | from dask_ctl import __version__ 9 | 10 | # -- Path setup -------------------------------------------------------------- 11 | 12 | 13 | # -- Project information ----------------------------------------------------- 14 | 15 | project = "dask-ctl" 16 | copyright = f"{datetime.now().year}, Dask Developers" 17 | author = "Dask Developers" 18 | 19 | 20 | # -- General configuration --------------------------------------------------- 21 | 22 | # Add any Sphinx extension module names here, as strings. They can be 23 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 24 | # ones. 25 | extensions = [ 26 | "sphinx.ext.intersphinx", 27 | "sphinx.ext.autodoc", 28 | "sphinx_click", 29 | "sphinx.ext.autosummary", 30 | "dask_sphinx_theme.ext.dask_config_sphinx_ext", 31 | ] 32 | 33 | # Add any paths that contain templates here, relative to this directory. 34 | templates_path = ["_templates"] 35 | 36 | # List of patterns, relative to source directory, that match files and 37 | # directories to ignore when looking for source files. 38 | # This pattern also affects html_static_path and html_extra_path. 39 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] 40 | 41 | 42 | version = __version__ 43 | # The full version, including alpha/beta/rc tags. 44 | release = __version__ 45 | 46 | # -- Options for HTML output ------------------------------------------------- 47 | 48 | # The theme to use for HTML and HTML Help pages. See the documentation for 49 | # a list of builtin themes. 50 | # 51 | 52 | html_theme = "dask_sphinx_theme" 53 | 54 | # Add any paths that contain custom static files (such as style sheets) here, 55 | # relative to this directory. They are copied after the builtin static files, 56 | # so a file named "default.css" will overwrite the builtin "default.css". 57 | html_static_path = ["_static"] 58 | 59 | intersphinx_mapping = { 60 | "python": ("https://docs.python.org/3", None), 61 | "dask": ("https://docs.dask.org/en/latest/", None), 62 | "distributed": ("https://distributed.dask.org/en/latest/", None), 63 | } 64 | -------------------------------------------------------------------------------- /docs/configuration.rst: -------------------------------------------------------------------------------- 1 | Configuration 2 | ============= 3 | 4 | ``dask-ctl`` uses the `Dask configuration system `_ which allows you to set config items 5 | via YAML, environment variables or in code. 6 | 7 | Reference 8 | --------- 9 | 10 | 11 | .. dask-config-block:: 12 | :location: ctl 13 | :config: https://raw.githubusercontent.com/dask-contrib/dask-ctl/main/dask_ctl/ctl.yaml 14 | :schema: https://raw.githubusercontent.com/dask-contrib/dask-ctl/main/dask_ctl/ctl-schema.yaml 15 | -------------------------------------------------------------------------------- /docs/contributing.rst: -------------------------------------------------------------------------------- 1 | Contributing 2 | ============ 3 | 4 | Developing 5 | ---------- 6 | 7 | This project uses ``black`` to format code and ``flake8`` for linting. We also support ``pre-commit`` to ensure 8 | these have been run. To configure your local environment please install these development dependencies and set up 9 | the commit hooks. 10 | 11 | .. code-block:: console 12 | 13 | $ pip install black flake8 pre-commit 14 | $ pre-commit install 15 | 16 | Textual 17 | ^^^^^^^ 18 | 19 | To develop the textual UI furst install the developer dependencies:: 20 | 21 | $ pip install textual[dev] 22 | 23 | To run the Textual UI in developer mode run:: 24 | 25 | $ textual run --dev dask_ctl.tui:DaskCtlTUI 26 | 27 | Then in a second window run the developer console:: 28 | 29 | $ textual console 30 | 31 | Individual widgets also contain a small demo app for testing which you can invoke directly:: 32 | 33 | $ textual run --dev dask_ctl/tui/widgets/logo.py 34 | 35 | Testing 36 | ------- 37 | 38 | This project uses ``pytest`` to run tests and also to test docstring examples. 39 | 40 | Install the test dependencies. 41 | 42 | .. code-block:: bash 43 | 44 | $ pip install -r requirements-test.txt 45 | 46 | Run the tests. 47 | 48 | .. code-block:: bash 49 | 50 | $ pytest 51 | === 3 passed in 0.13 seconds === 52 | 53 | Documentation 54 | ------------- 55 | 56 | This project uses ``sphinx`` to build its documentation. 57 | 58 | .. code-block:: bash 59 | 60 | $ pip install -r docs/requirements_docs.txt 61 | $ sphinx-autobuild docs docs/_build/html 62 | # Visit http://localhost:8000 in your browser 63 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | dask-ctl 2 | ======== 3 | 4 | 5 | .. image:: https://img.shields.io/pypi/v/dask-ctl 6 | :target: https://pypi.org/project/dask-ctl/ 7 | :alt: PyPI 8 | .. image:: https://github.com/dask-contrib/dask-ctl/workflows/Tests/badge.svg 9 | :target: https://github.com/dask-contrib/dask-ctl/actions?query=workflow%3ATests 10 | :alt: GitHub Actions - CI 11 | .. image:: https://github.com/dask-contrib/dask-ctl/workflows/Linting/badge.svg 12 | :target: https://github.com/dask-contrib/dask-ctl/actions?query=workflow%3ALinting 13 | :alt: GitHub Actions - pre-commit 14 | .. image:: https://img.shields.io/codecov/c/gh/dask-contrib/dask-ctl 15 | :target: https://app.codecov.io/gh/dask-contrib/dask-ctl 16 | :alt: Codecov 17 | 18 | A set of tools to provide a control plane for managing the lifecycle of Dask clusters. 19 | 20 | .. code-block:: bash 21 | 22 | $ dask cluster list 23 | NAME ADDRESS TYPE WORKERS THREADS MEMORY CREATED STATUS 24 | proxycluster-8786 tcp://localhost:8786 ProxyCluster 4 12 17.18 GB Just now Running 25 | 26 | Installation 27 | ------------ 28 | 29 | .. code-block:: bash 30 | 31 | pip install dask-ctl 32 | # or 33 | conda install -c conda-forge dask-ctl 34 | 35 | 36 | .. toctree:: 37 | :maxdepth: 2 38 | :caption: Usage 39 | 40 | cli.rst 41 | api.rst 42 | configuration.rst 43 | spec.rst 44 | 45 | .. toctree:: 46 | :maxdepth: 2 47 | :caption: Integrating 48 | 49 | integrating.rst 50 | 51 | .. toctree:: 52 | :maxdepth: 2 53 | :caption: Development 54 | 55 | contributing.rst 56 | releasing.rst 57 | 58 | -------------------------------------------------------------------------------- /docs/integrating.rst: -------------------------------------------------------------------------------- 1 | Adding dask-ctl support to your project 2 | ======================================= 3 | 4 | ``dask-ctl`` is an opt-in utility package for managing Dask cluster lifecycles. 5 | 6 | In order for a cluster manager to work with ``dask-ctl`` it must have the following things: 7 | 8 | - A discovery method registered as an entrypoint 9 | - A ``from_name`` class method which reconstructs the cluster manager 10 | 11 | Discovery 12 | --------- 13 | 14 | In order for cluster to be visible in ``dask-ctl`` the cluster manager which created it must implement a ``discovery`` 15 | method and register it as an ``dask_cluster_discovery`` entrypoint. 16 | 17 | .. code-block:: python 18 | 19 | import setuptools 20 | 21 | setuptools.setup( 22 | ... 23 | entry_points=""" 24 | [dask_cluster_discovery] 25 | mycluster=my_package.discovery:discover 26 | """, 27 | ) 28 | 29 | This method must be an async generator which returns tuples of the cluster name and a class which can be used to reconstruct it. 30 | 31 | .. code-block:: python 32 | 33 | from typing import Callable, AsyncIterator, Tuple 34 | 35 | from my_package.cluster import MyClusterManager # A cluster manager class which supports the ``from_name`` classmethod 36 | 37 | 38 | async def discover() -> AsyncIterator[Tuple[str, Callable]]: 39 | 40 | # Discover cluster names in whatever way is appropriate 41 | cluster_names = [...] 42 | 43 | for cluster_name in cluster_names: 44 | yield (cluster_name, MyClusterManager) 45 | 46 | From name 47 | --------- 48 | 49 | When ``dask-ctl`` discovers clusters it iterates through all the registered discovery methods and constructs a list of 50 | name/cluster manager pairs. 51 | 52 | Then when making calls such as ``get_cluster`` it will attempt to call the ``from_name`` class method on the cluster manager 53 | and pass in the name that was provided during discovery. 54 | 55 | Cluster managers are contructed from name during almost all ``dask-ctl`` operations. Even calling ``dask cluster list`` on the CLI 56 | will create all cluster managers in order to query information about them such as number of workers and resources via the scheduler comm. 57 | 58 | Implementation of this method will vary drastically depending on how the cluster manager is implemented. But the interface should take the 59 | ``name`` argument and contruct a cluster manager class and return it. 60 | 61 | .. code-block:: python 62 | 63 | from distributed.deploy.cluster import Cluster 64 | 65 | class MyClusterManager(Cluster): 66 | 67 | ... 68 | 69 | @classmethod 70 | def from_name( 71 | cls, name: str, loop: asyncio.BaseEventLoop = None, asynchronous: bool = False 72 | ): 73 | cluster = cls(name=name, asynchronous=asynchronous) 74 | 75 | # Connect to the scheduler comm 76 | cluster.scheduler_comm = rpc(...) 77 | 78 | # Put the cluster manager into a started and running state 79 | ... 80 | 81 | return cluster 82 | 83 | 84 | Testing integration 85 | ------------------- 86 | 87 | A useful test to ensure your cluster manager will be compliant with ``dask-ctl`` would be to follow these steps: 88 | 89 | - Create a cluster using your cluster manager class 90 | - Record the name of that cluster 91 | - Run ``dask-ctl`` discovery and ensure the cluster is listed 92 | - Ensure the cluster is not created when the cluster manager is destroyed 93 | - Delete the cluster manager object 94 | - Recreate the cluster manager object from the name 95 | - Check that the cluster is working as expected 96 | 97 | .. code-block:: python 98 | 99 | import pytest 100 | 101 | from dask.distributed import Client 102 | from dask_ctl.discovery import ( 103 | list_discovery_methods, 104 | discover_cluster_names, 105 | ) 106 | 107 | from my_package.cluster import MyClusterManager 108 | 109 | @pytest.mark.asyncio 110 | async def test_from_name(): 111 | # Create cluster 112 | cluster = await MyClusterManager(*args, **kwargs) 113 | await cluster.scale(1) 114 | name = cluster.name 115 | 116 | # Check cluster listed in discovery 117 | discovery = "mycluster" 118 | assert discovery in list_discovery_methods() 119 | clusters_names = [ 120 | cluster async for cluster in discover_cluster_names(discovery=discovery) 121 | ] 122 | assert len(clusters_names) == 1 123 | discovered_name, discovered_class = cluster_names[0] 124 | assert discovered_name == name 125 | assert discovered_class == MyClusterManager 126 | 127 | # Delete cluster manager 128 | cluster.shutdown_on_close = False 129 | del cluster 130 | 131 | # Recreate cluster manager from name 132 | cluster = await MyClusterManager.from_name(name, asynchronous=True) 133 | assert "id" in cluster.scheduler_info 134 | assert cluster.status == Status.running 135 | 136 | # Ensure work can be run on cluster 137 | async with Client(cluster, asynchronous=True) as client: 138 | # Ensure that inter-worker communication works well 139 | futures = client.map(lambda x: x + 1, range(10)) 140 | total = client.submit(sum, futures) 141 | assert (await total) == sum(map(lambda x: x + 1, range(10))) 142 | assert all((await client.has_what()).values()) 143 | 144 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/releasing.rst: -------------------------------------------------------------------------------- 1 | Releasing 2 | ========= 3 | 4 | Releases are published automatically when a tag is pushed to GitHub. 5 | 6 | .. code-block:: bash 7 | 8 | # Set next version number 9 | export RELEASE=x.x.x 10 | 11 | # Create tags 12 | git commit --allow-empty -m "Release $RELEASE" 13 | git tag -a $RELEASE -m "Version $RELEASE" 14 | 15 | # Push 16 | git push upstream --tags -------------------------------------------------------------------------------- /docs/requirements_docs.txt: -------------------------------------------------------------------------------- 1 | sphinx 2 | sphinx-autobuild 3 | myst-parser 4 | sphinx-click 5 | dask-sphinx-theme>=3.0.0 6 | sphinxcontrib-applehelp==1.0.4 7 | sphinxcontrib-devhelp==1.0.2 8 | sphinxcontrib-htmlhelp==2.0.1 9 | sphinxcontrib-qthelp==1.0.3 10 | sphinxcontrib-serializinghtml==1.1.5 11 | -------------------------------------------------------------------------------- /docs/spec.rst: -------------------------------------------------------------------------------- 1 | Cluster specs 2 | ============= 3 | 4 | ``dask-ctl`` can create Dask clusters for you from spec files. 5 | 6 | These files describe the Python cluster manager which should be used along with any arguments. 7 | 8 | .. code-block:: yaml 9 | 10 | # /path/to/spec.yaml 11 | version: 1 12 | module: "dask.distributed" 13 | class: "LocalCluster" 14 | args: [] 15 | kwargs: 16 | n_workers: 2 17 | threads_per_worker: 1 18 | memory_limit: '1GB' 19 | 20 | You can then create the cluster from the command line. 21 | 22 | .. code-block:: bash 23 | 24 | $ dask cluster create -f /path/to/spec.yaml 25 | 26 | Or using the Python API. 27 | 28 | .. code-block:: python 29 | 30 | from dask_ctl import create_cluster 31 | 32 | cluster = create_cluster("/path/to/spec.yaml") 33 | 34 | Both of these examples are equivalent to running the following Python code. 35 | 36 | .. code-block:: python 37 | 38 | from dask.distributed import LocalCluster 39 | 40 | cluster = LocalCluster(n_workers=2, threads_per_worker=1, memory_limit='1GB') 41 | -------------------------------------------------------------------------------- /requirements-test.txt: -------------------------------------------------------------------------------- 1 | pytest 2 | pytest-asyncio==0.15.1 3 | pytest-cov 4 | pytest-rerunfailures 5 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | click>=8.1.2 2 | dask>=2022.11.1 3 | distributed 4 | numpy 5 | rich 6 | tornado 7 | pyyaml 8 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [versioneer] 2 | VCS = git 3 | style = pep440 4 | versionfile_source = dask_ctl/_version.py 5 | versionfile_build = dask_ctl/_version.py 6 | tag_prefix = 7 | parentdir_prefix = 8 | 9 | [tool:pytest] 10 | addopts = --doctest-modules --ignore=docs 11 | 12 | [flake8] 13 | exclude = __init__.py,versioneer.py,_version.py 14 | max-line-length = 120 15 | # Aligned with black https://github.com/psf/black/blob/main/.flake8 16 | extend-ignore = E203, E266, E501 17 | ignore = 18 | E4, # Import formatting 19 | E731, # Assigning lambda expression 20 | W503, # line break before binary operator 21 | 22 | per-file-ignores = 23 | **/tests/*: 24 | # local variable is assigned to but never used 25 | F841, 26 | # Ambiguous variable name 27 | E741, 28 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import setuptools 2 | import versioneer 3 | 4 | with open("README.rst", "r") as fh: 5 | long_description = fh.read() 6 | with open("requirements.txt", "r") as fh: 7 | requirements = [line.strip() for line in fh] 8 | 9 | setuptools.setup( 10 | name="dask-ctl", 11 | version=versioneer.get_version(), 12 | cmdclass=versioneer.get_cmdclass(), 13 | author="Jacob Tomlinson", 14 | author_email="jacob@tomlinson.email", 15 | description="A set of tools to provide a control plane for managing the lifecycle of Dask clusters.", 16 | long_description=long_description, 17 | long_description_content_type="text/x-rst", 18 | include_package_data=True, 19 | packages=setuptools.find_packages(), 20 | classifiers=[ 21 | "Programming Language :: Python :: 3", 22 | "License :: OSI Approved :: BSD License", 23 | "Operating System :: OS Independent", 24 | ], 25 | python_requires=">=3.9", 26 | install_requires=requirements, 27 | extras_require={"tui": ["textual==0.5.0"]}, 28 | entry_points=""" 29 | [console_scripts] 30 | daskctl=dask_ctl.cli:daskctl 31 | [dask_cluster_discovery] 32 | proxycluster=dask_ctl.proxy:discover 33 | [dask_cli] 34 | cluster=dask_ctl.cli:cluster 35 | """, 36 | ) 37 | -------------------------------------------------------------------------------- /versioneer.py: -------------------------------------------------------------------------------- 1 | # Version: 0.19 2 | 3 | """The Versioneer - like a rocketeer, but for versions. 4 | 5 | The Versioneer 6 | ============== 7 | 8 | * like a rocketeer, but for versions! 9 | * https://github.com/python-versioneer/python-versioneer 10 | * Brian Warner 11 | * License: Public Domain 12 | * Compatible with: Python 3.6, 3.7, 3.8, 3.9 and pypy3 13 | * [![Latest Version][pypi-image]][pypi-url] 14 | * [![Build Status][travis-image]][travis-url] 15 | 16 | This is a tool for managing a recorded version number in distutils-based 17 | python projects. The goal is to remove the tedious and error-prone "update 18 | the embedded version string" step from your release process. Making a new 19 | release should be as easy as recording a new tag in your version-control 20 | system, and maybe making new tarballs. 21 | 22 | 23 | ## Quick Install 24 | 25 | * `pip install versioneer` to somewhere in your $PATH 26 | * add a `[versioneer]` section to your setup.cfg (see [Install](INSTALL.md)) 27 | * run `versioneer install` in your source tree, commit the results 28 | * Verify version information with `python setup.py version` 29 | 30 | ## Version Identifiers 31 | 32 | Source trees come from a variety of places: 33 | 34 | * a version-control system checkout (mostly used by developers) 35 | * a nightly tarball, produced by build automation 36 | * a snapshot tarball, produced by a web-based VCS browser, like github's 37 | "tarball from tag" feature 38 | * a release tarball, produced by "setup.py sdist", distributed through PyPI 39 | 40 | Within each source tree, the version identifier (either a string or a number, 41 | this tool is format-agnostic) can come from a variety of places: 42 | 43 | * ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows 44 | about recent "tags" and an absolute revision-id 45 | * the name of the directory into which the tarball was unpacked 46 | * an expanded VCS keyword ($Id$, etc) 47 | * a `_version.py` created by some earlier build step 48 | 49 | For released software, the version identifier is closely related to a VCS 50 | tag. Some projects use tag names that include more than just the version 51 | string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool 52 | needs to strip the tag prefix to extract the version identifier. For 53 | unreleased software (between tags), the version identifier should provide 54 | enough information to help developers recreate the same tree, while also 55 | giving them an idea of roughly how old the tree is (after version 1.2, before 56 | version 1.3). Many VCS systems can report a description that captures this, 57 | for example `git describe --tags --dirty --always` reports things like 58 | "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the 59 | 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has 60 | uncommitted changes). 61 | 62 | The version identifier is used for multiple purposes: 63 | 64 | * to allow the module to self-identify its version: `myproject.__version__` 65 | * to choose a name and prefix for a 'setup.py sdist' tarball 66 | 67 | ## Theory of Operation 68 | 69 | Versioneer works by adding a special `_version.py` file into your source 70 | tree, where your `__init__.py` can import it. This `_version.py` knows how to 71 | dynamically ask the VCS tool for version information at import time. 72 | 73 | `_version.py` also contains `$Revision$` markers, and the installation 74 | process marks `_version.py` to have this marker rewritten with a tag name 75 | during the `git archive` command. As a result, generated tarballs will 76 | contain enough information to get the proper version. 77 | 78 | To allow `setup.py` to compute a version too, a `versioneer.py` is added to 79 | the top level of your source tree, next to `setup.py` and the `setup.cfg` 80 | that configures it. This overrides several distutils/setuptools commands to 81 | compute the version when invoked, and changes `setup.py build` and `setup.py 82 | sdist` to replace `_version.py` with a small static file that contains just 83 | the generated version data. 84 | 85 | ## Installation 86 | 87 | See [INSTALL.md](./INSTALL.md) for detailed installation instructions. 88 | 89 | ## Version-String Flavors 90 | 91 | Code which uses Versioneer can learn about its version string at runtime by 92 | importing `_version` from your main `__init__.py` file and running the 93 | `get_versions()` function. From the "outside" (e.g. in `setup.py`), you can 94 | import the top-level `versioneer.py` and run `get_versions()`. 95 | 96 | Both functions return a dictionary with different flavors of version 97 | information: 98 | 99 | * `['version']`: A condensed version string, rendered using the selected 100 | style. This is the most commonly used value for the project's version 101 | string. The default "pep440" style yields strings like `0.11`, 102 | `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section 103 | below for alternative styles. 104 | 105 | * `['full-revisionid']`: detailed revision identifier. For Git, this is the 106 | full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". 107 | 108 | * `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the 109 | commit date in ISO 8601 format. This will be None if the date is not 110 | available. 111 | 112 | * `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that 113 | this is only accurate if run in a VCS checkout, otherwise it is likely to 114 | be False or None 115 | 116 | * `['error']`: if the version string could not be computed, this will be set 117 | to a string describing the problem, otherwise it will be None. It may be 118 | useful to throw an exception in setup.py if this is set, to avoid e.g. 119 | creating tarballs with a version string of "unknown". 120 | 121 | Some variants are more useful than others. Including `full-revisionid` in a 122 | bug report should allow developers to reconstruct the exact code being tested 123 | (or indicate the presence of local changes that should be shared with the 124 | developers). `version` is suitable for display in an "about" box or a CLI 125 | `--version` output: it can be easily compared against release notes and lists 126 | of bugs fixed in various releases. 127 | 128 | The installer adds the following text to your `__init__.py` to place a basic 129 | version in `YOURPROJECT.__version__`: 130 | 131 | from ._version import get_versions 132 | __version__ = get_versions()['version'] 133 | del get_versions 134 | 135 | ## Styles 136 | 137 | The setup.cfg `style=` configuration controls how the VCS information is 138 | rendered into a version string. 139 | 140 | The default style, "pep440", produces a PEP440-compliant string, equal to the 141 | un-prefixed tag name for actual releases, and containing an additional "local 142 | version" section with more detail for in-between builds. For Git, this is 143 | TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags 144 | --dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the 145 | tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and 146 | that this commit is two revisions ("+2") beyond the "0.11" tag. For released 147 | software (exactly equal to a known tag), the identifier will only contain the 148 | stripped tag, e.g. "0.11". 149 | 150 | Other styles are available. See [details.md](details.md) in the Versioneer 151 | source tree for descriptions. 152 | 153 | ## Debugging 154 | 155 | Versioneer tries to avoid fatal errors: if something goes wrong, it will tend 156 | to return a version of "0+unknown". To investigate the problem, run `setup.py 157 | version`, which will run the version-lookup code in a verbose mode, and will 158 | display the full contents of `get_versions()` (including the `error` string, 159 | which may help identify what went wrong). 160 | 161 | ## Known Limitations 162 | 163 | Some situations are known to cause problems for Versioneer. This details the 164 | most significant ones. More can be found on Github 165 | [issues page](https://github.com/python-versioneer/python-versioneer/issues). 166 | 167 | ### Subprojects 168 | 169 | Versioneer has limited support for source trees in which `setup.py` is not in 170 | the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are 171 | two common reasons why `setup.py` might not be in the root: 172 | 173 | * Source trees which contain multiple subprojects, such as 174 | [Buildbot](https://github.com/buildbot/buildbot), which contains both 175 | "master" and "slave" subprojects, each with their own `setup.py`, 176 | `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI 177 | distributions (and upload multiple independently-installable tarballs). 178 | * Source trees whose main purpose is to contain a C library, but which also 179 | provide bindings to Python (and perhaps other languages) in subdirectories. 180 | 181 | Versioneer will look for `.git` in parent directories, and most operations 182 | should get the right version string. However `pip` and `setuptools` have bugs 183 | and implementation details which frequently cause `pip install .` from a 184 | subproject directory to fail to find a correct version string (so it usually 185 | defaults to `0+unknown`). 186 | 187 | `pip install --editable .` should work correctly. `setup.py install` might 188 | work too. 189 | 190 | Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in 191 | some later version. 192 | 193 | [Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking 194 | this issue. The discussion in 195 | [PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the 196 | issue from the Versioneer side in more detail. 197 | [pip PR#3176](https://github.com/pypa/pip/pull/3176) and 198 | [pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve 199 | pip to let Versioneer work correctly. 200 | 201 | Versioneer-0.16 and earlier only looked for a `.git` directory next to the 202 | `setup.cfg`, so subprojects were completely unsupported with those releases. 203 | 204 | ### Editable installs with setuptools <= 18.5 205 | 206 | `setup.py develop` and `pip install --editable .` allow you to install a 207 | project into a virtualenv once, then continue editing the source code (and 208 | test) without re-installing after every change. 209 | 210 | "Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a 211 | convenient way to specify executable scripts that should be installed along 212 | with the python package. 213 | 214 | These both work as expected when using modern setuptools. When using 215 | setuptools-18.5 or earlier, however, certain operations will cause 216 | `pkg_resources.DistributionNotFound` errors when running the entrypoint 217 | script, which must be resolved by re-installing the package. This happens 218 | when the install happens with one version, then the egg_info data is 219 | regenerated while a different version is checked out. Many setup.py commands 220 | cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into 221 | a different virtualenv), so this can be surprising. 222 | 223 | [Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes 224 | this one, but upgrading to a newer version of setuptools should probably 225 | resolve it. 226 | 227 | 228 | ## Updating Versioneer 229 | 230 | To upgrade your project to a new release of Versioneer, do the following: 231 | 232 | * install the new Versioneer (`pip install -U versioneer` or equivalent) 233 | * edit `setup.cfg`, if necessary, to include any new configuration settings 234 | indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. 235 | * re-run `versioneer install` in your source tree, to replace 236 | `SRC/_version.py` 237 | * commit any changed files 238 | 239 | ## Future Directions 240 | 241 | This tool is designed to make it easily extended to other version-control 242 | systems: all VCS-specific components are in separate directories like 243 | src/git/ . The top-level `versioneer.py` script is assembled from these 244 | components by running make-versioneer.py . In the future, make-versioneer.py 245 | will take a VCS name as an argument, and will construct a version of 246 | `versioneer.py` that is specific to the given VCS. It might also take the 247 | configuration arguments that are currently provided manually during 248 | installation by editing setup.py . Alternatively, it might go the other 249 | direction and include code from all supported VCS systems, reducing the 250 | number of intermediate scripts. 251 | 252 | ## Similar projects 253 | 254 | * [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time 255 | dependency 256 | * [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of 257 | versioneer 258 | 259 | ## License 260 | 261 | To make Versioneer easier to embed, all its code is dedicated to the public 262 | domain. The `_version.py` that it creates is also in the public domain. 263 | Specifically, both are released under the Creative Commons "Public Domain 264 | Dedication" license (CC0-1.0), as described in 265 | https://creativecommons.org/publicdomain/zero/1.0/ . 266 | 267 | [pypi-image]: https://img.shields.io/pypi/v/versioneer.svg 268 | [pypi-url]: https://pypi.python.org/pypi/versioneer/ 269 | [travis-image]: 270 | https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg 271 | [travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer 272 | 273 | """ 274 | 275 | import configparser 276 | import errno 277 | import json 278 | import os 279 | import re 280 | import subprocess 281 | import sys 282 | 283 | 284 | class VersioneerConfig: 285 | """Container for Versioneer configuration parameters.""" 286 | 287 | 288 | def get_root(): 289 | """Get the project root directory. 290 | 291 | We require that all commands are run from the project root, i.e. the 292 | directory that contains setup.py, setup.cfg, and versioneer.py . 293 | """ 294 | root = os.path.realpath(os.path.abspath(os.getcwd())) 295 | setup_py = os.path.join(root, "setup.py") 296 | versioneer_py = os.path.join(root, "versioneer.py") 297 | if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): 298 | # allow 'python path/to/setup.py COMMAND' 299 | root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) 300 | setup_py = os.path.join(root, "setup.py") 301 | versioneer_py = os.path.join(root, "versioneer.py") 302 | if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): 303 | err = ( 304 | "Versioneer was unable to run the project root directory. " 305 | "Versioneer requires setup.py to be executed from " 306 | "its immediate directory (like 'python setup.py COMMAND'), " 307 | "or in a way that lets it use sys.argv[0] to find the root " 308 | "(like 'python path/to/setup.py COMMAND')." 309 | ) 310 | raise VersioneerBadRootError(err) 311 | try: 312 | # Certain runtime workflows (setup.py install/develop in a setuptools 313 | # tree) execute all dependencies in a single python process, so 314 | # "versioneer" may be imported multiple times, and python's shared 315 | # module-import table will cache the first one. So we can't use 316 | # os.path.dirname(__file__), as that will find whichever 317 | # versioneer.py was first imported, even in later projects. 318 | me = os.path.realpath(os.path.abspath(__file__)) 319 | me_dir = os.path.normcase(os.path.splitext(me)[0]) 320 | vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) 321 | if me_dir != vsr_dir: 322 | print( 323 | "Warning: build in %s is using versioneer.py from %s" 324 | % (os.path.dirname(me), versioneer_py) 325 | ) 326 | except NameError: 327 | pass 328 | return root 329 | 330 | 331 | def get_config_from_root(root): 332 | """Read the project setup.cfg file to determine Versioneer config.""" 333 | # This might raise EnvironmentError (if setup.cfg is missing), or 334 | # configparser.NoSectionError (if it lacks a [versioneer] section), or 335 | # configparser.NoOptionError (if it lacks "VCS="). See the docstring at 336 | # the top of versioneer.py for instructions on writing your setup.cfg . 337 | setup_cfg = os.path.join(root, "setup.cfg") 338 | parser = configparser.ConfigParser() 339 | with open(setup_cfg, "r") as f: 340 | parser.read_file(f) 341 | VCS = parser.get("versioneer", "VCS") # mandatory 342 | 343 | def get(parser, name): 344 | if parser.has_option("versioneer", name): 345 | return parser.get("versioneer", name) 346 | return None 347 | 348 | cfg = VersioneerConfig() 349 | cfg.VCS = VCS 350 | cfg.style = get(parser, "style") or "" 351 | cfg.versionfile_source = get(parser, "versionfile_source") 352 | cfg.versionfile_build = get(parser, "versionfile_build") 353 | cfg.tag_prefix = get(parser, "tag_prefix") 354 | if cfg.tag_prefix in ("''", '""'): 355 | cfg.tag_prefix = "" 356 | cfg.parentdir_prefix = get(parser, "parentdir_prefix") 357 | cfg.verbose = get(parser, "verbose") 358 | return cfg 359 | 360 | 361 | class NotThisMethod(Exception): 362 | """Exception raised if a method is not valid for the current scenario.""" 363 | 364 | 365 | # these dictionaries contain VCS-specific tools 366 | LONG_VERSION_PY = {} 367 | HANDLERS = {} 368 | 369 | 370 | def register_vcs_handler(vcs, method): # decorator 371 | """Create decorator to mark a method as the handler of a VCS.""" 372 | 373 | def decorate(f): 374 | """Store f in HANDLERS[vcs][method].""" 375 | if vcs not in HANDLERS: 376 | HANDLERS[vcs] = {} 377 | HANDLERS[vcs][method] = f 378 | return f 379 | 380 | return decorate 381 | 382 | 383 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): 384 | """Call the given command(s).""" 385 | assert isinstance(commands, list) 386 | p = None 387 | for c in commands: 388 | try: 389 | dispcmd = str([c] + args) 390 | # remember shell=False, so use git.cmd on windows, not just git 391 | p = subprocess.Popen( 392 | [c] + args, 393 | cwd=cwd, 394 | env=env, 395 | stdout=subprocess.PIPE, 396 | stderr=(subprocess.PIPE if hide_stderr else None), 397 | ) 398 | break 399 | except EnvironmentError: 400 | e = sys.exc_info()[1] 401 | if e.errno == errno.ENOENT: 402 | continue 403 | if verbose: 404 | print("unable to run %s" % dispcmd) 405 | print(e) 406 | return None, None 407 | else: 408 | if verbose: 409 | print("unable to find command, tried %s" % (commands,)) 410 | return None, None 411 | stdout = p.communicate()[0].strip().decode() 412 | if p.returncode != 0: 413 | if verbose: 414 | print("unable to run %s (error)" % dispcmd) 415 | print("stdout was %s" % stdout) 416 | return None, p.returncode 417 | return stdout, p.returncode 418 | 419 | 420 | LONG_VERSION_PY[ 421 | "git" 422 | ] = r''' 423 | # This file helps to compute a version number in source trees obtained from 424 | # git-archive tarball (such as those provided by githubs download-from-tag 425 | # feature). Distribution tarballs (built by setup.py sdist) and build 426 | # directories (produced by setup.py build) will contain a much shorter file 427 | # that just contains the computed version number. 428 | 429 | # This file is released into the public domain. Generated by 430 | # versioneer-0.19 (https://github.com/python-versioneer/python-versioneer) 431 | 432 | """Git implementation of _version.py.""" 433 | 434 | import errno 435 | import os 436 | import re 437 | import subprocess 438 | import sys 439 | 440 | 441 | def get_keywords(): 442 | """Get the keywords needed to look up the version information.""" 443 | # these strings will be replaced by git during git-archive. 444 | # setup.py/versioneer.py will grep for the variable names, so they must 445 | # each be defined on a line of their own. _version.py will just call 446 | # get_keywords(). 447 | git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" 448 | git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" 449 | git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" 450 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} 451 | return keywords 452 | 453 | 454 | class VersioneerConfig: 455 | """Container for Versioneer configuration parameters.""" 456 | 457 | 458 | def get_config(): 459 | """Create, populate and return the VersioneerConfig() object.""" 460 | # these strings are filled in when 'setup.py versioneer' creates 461 | # _version.py 462 | cfg = VersioneerConfig() 463 | cfg.VCS = "git" 464 | cfg.style = "%(STYLE)s" 465 | cfg.tag_prefix = "%(TAG_PREFIX)s" 466 | cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" 467 | cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" 468 | cfg.verbose = False 469 | return cfg 470 | 471 | 472 | class NotThisMethod(Exception): 473 | """Exception raised if a method is not valid for the current scenario.""" 474 | 475 | 476 | LONG_VERSION_PY = {} 477 | HANDLERS = {} 478 | 479 | 480 | def register_vcs_handler(vcs, method): # decorator 481 | """Create decorator to mark a method as the handler of a VCS.""" 482 | def decorate(f): 483 | """Store f in HANDLERS[vcs][method].""" 484 | if vcs not in HANDLERS: 485 | HANDLERS[vcs] = {} 486 | HANDLERS[vcs][method] = f 487 | return f 488 | return decorate 489 | 490 | 491 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, 492 | env=None): 493 | """Call the given command(s).""" 494 | assert isinstance(commands, list) 495 | p = None 496 | for c in commands: 497 | try: 498 | dispcmd = str([c] + args) 499 | # remember shell=False, so use git.cmd on windows, not just git 500 | p = subprocess.Popen([c] + args, cwd=cwd, env=env, 501 | stdout=subprocess.PIPE, 502 | stderr=(subprocess.PIPE if hide_stderr 503 | else None)) 504 | break 505 | except EnvironmentError: 506 | e = sys.exc_info()[1] 507 | if e.errno == errno.ENOENT: 508 | continue 509 | if verbose: 510 | print("unable to run %%s" %% dispcmd) 511 | print(e) 512 | return None, None 513 | else: 514 | if verbose: 515 | print("unable to find command, tried %%s" %% (commands,)) 516 | return None, None 517 | stdout = p.communicate()[0].strip().decode() 518 | if p.returncode != 0: 519 | if verbose: 520 | print("unable to run %%s (error)" %% dispcmd) 521 | print("stdout was %%s" %% stdout) 522 | return None, p.returncode 523 | return stdout, p.returncode 524 | 525 | 526 | def versions_from_parentdir(parentdir_prefix, root, verbose): 527 | """Try to determine the version from the parent directory name. 528 | 529 | Source tarballs conventionally unpack into a directory that includes both 530 | the project name and a version string. We will also support searching up 531 | two directory levels for an appropriately named parent directory 532 | """ 533 | rootdirs = [] 534 | 535 | for i in range(3): 536 | dirname = os.path.basename(root) 537 | if dirname.startswith(parentdir_prefix): 538 | return {"version": dirname[len(parentdir_prefix):], 539 | "full-revisionid": None, 540 | "dirty": False, "error": None, "date": None} 541 | else: 542 | rootdirs.append(root) 543 | root = os.path.dirname(root) # up a level 544 | 545 | if verbose: 546 | print("Tried directories %%s but none started with prefix %%s" %% 547 | (str(rootdirs), parentdir_prefix)) 548 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 549 | 550 | 551 | @register_vcs_handler("git", "get_keywords") 552 | def git_get_keywords(versionfile_abs): 553 | """Extract version information from the given file.""" 554 | # the code embedded in _version.py can just fetch the value of these 555 | # keywords. When used from setup.py, we don't want to import _version.py, 556 | # so we do it with a regexp instead. This function is not used from 557 | # _version.py. 558 | keywords = {} 559 | try: 560 | f = open(versionfile_abs, "r") 561 | for line in f.readlines(): 562 | if line.strip().startswith("git_refnames ="): 563 | mo = re.search(r'=\s*"(.*)"', line) 564 | if mo: 565 | keywords["refnames"] = mo.group(1) 566 | if line.strip().startswith("git_full ="): 567 | mo = re.search(r'=\s*"(.*)"', line) 568 | if mo: 569 | keywords["full"] = mo.group(1) 570 | if line.strip().startswith("git_date ="): 571 | mo = re.search(r'=\s*"(.*)"', line) 572 | if mo: 573 | keywords["date"] = mo.group(1) 574 | f.close() 575 | except EnvironmentError: 576 | pass 577 | return keywords 578 | 579 | 580 | @register_vcs_handler("git", "keywords") 581 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 582 | """Get version information from git keywords.""" 583 | if not keywords: 584 | raise NotThisMethod("no keywords at all, weird") 585 | date = keywords.get("date") 586 | if date is not None: 587 | # Use only the last line. Previous lines may contain GPG signature 588 | # information. 589 | date = date.splitlines()[-1] 590 | 591 | # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant 592 | # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 593 | # -like" string, which we must then edit to make compliant), because 594 | # it's been around since git-1.5.3, and it's too difficult to 595 | # discover which version we're using, or to work around using an 596 | # older one. 597 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 598 | refnames = keywords["refnames"].strip() 599 | if refnames.startswith("$Format"): 600 | if verbose: 601 | print("keywords are unexpanded, not using") 602 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 603 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) 604 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 605 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 606 | TAG = "tag: " 607 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) 608 | if not tags: 609 | # Either we're using git < 1.8.3, or there really are no tags. We use 610 | # a heuristic: assume all version tags have a digit. The old git %%d 611 | # expansion behaves like git log --decorate=short and strips out the 612 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 613 | # between branches and tags. By ignoring refnames without digits, we 614 | # filter out many common branch names like "release" and 615 | # "stabilization", as well as "HEAD" and "master". 616 | tags = set([r for r in refs if re.search(r'\d', r)]) 617 | if verbose: 618 | print("discarding '%%s', no digits" %% ",".join(refs - tags)) 619 | if verbose: 620 | print("likely tags: %%s" %% ",".join(sorted(tags))) 621 | for ref in sorted(tags): 622 | # sorting will prefer e.g. "2.0" over "2.0rc1" 623 | if ref.startswith(tag_prefix): 624 | r = ref[len(tag_prefix):] 625 | if verbose: 626 | print("picking %%s" %% r) 627 | return {"version": r, 628 | "full-revisionid": keywords["full"].strip(), 629 | "dirty": False, "error": None, 630 | "date": date} 631 | # no suitable tags, so version is "0+unknown", but full hex is still there 632 | if verbose: 633 | print("no suitable tags, using unknown + full revision id") 634 | return {"version": "0+unknown", 635 | "full-revisionid": keywords["full"].strip(), 636 | "dirty": False, "error": "no suitable tags", "date": None} 637 | 638 | 639 | @register_vcs_handler("git", "pieces_from_vcs") 640 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 641 | """Get version from 'git describe' in the root of the source tree. 642 | 643 | This only gets called if the git-archive 'subst' keywords were *not* 644 | expanded, and _version.py hasn't already been rewritten with a short 645 | version string, meaning we're inside a checked out source tree. 646 | """ 647 | GITS = ["git"] 648 | if sys.platform == "win32": 649 | GITS = ["git.cmd", "git.exe"] 650 | 651 | out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, 652 | hide_stderr=True) 653 | if rc != 0: 654 | if verbose: 655 | print("Directory %%s not under git control" %% root) 656 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 657 | 658 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 659 | # if there isn't one, this yields HEX[-dirty] (no NUM) 660 | describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", 661 | "--always", "--long", 662 | "--match", "%%s*" %% tag_prefix], 663 | cwd=root) 664 | # --long was added in git-1.5.5 665 | if describe_out is None: 666 | raise NotThisMethod("'git describe' failed") 667 | describe_out = describe_out.strip() 668 | full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 669 | if full_out is None: 670 | raise NotThisMethod("'git rev-parse' failed") 671 | full_out = full_out.strip() 672 | 673 | pieces = {} 674 | pieces["long"] = full_out 675 | pieces["short"] = full_out[:7] # maybe improved later 676 | pieces["error"] = None 677 | 678 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 679 | # TAG might have hyphens. 680 | git_describe = describe_out 681 | 682 | # look for -dirty suffix 683 | dirty = git_describe.endswith("-dirty") 684 | pieces["dirty"] = dirty 685 | if dirty: 686 | git_describe = git_describe[:git_describe.rindex("-dirty")] 687 | 688 | # now we have TAG-NUM-gHEX or HEX 689 | 690 | if "-" in git_describe: 691 | # TAG-NUM-gHEX 692 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 693 | if not mo: 694 | # unparseable. Maybe git-describe is misbehaving? 695 | pieces["error"] = ("unable to parse git-describe output: '%%s'" 696 | %% describe_out) 697 | return pieces 698 | 699 | # tag 700 | full_tag = mo.group(1) 701 | if not full_tag.startswith(tag_prefix): 702 | if verbose: 703 | fmt = "tag '%%s' doesn't start with prefix '%%s'" 704 | print(fmt %% (full_tag, tag_prefix)) 705 | pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" 706 | %% (full_tag, tag_prefix)) 707 | return pieces 708 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 709 | 710 | # distance: number of commits since tag 711 | pieces["distance"] = int(mo.group(2)) 712 | 713 | # commit: short hex revision ID 714 | pieces["short"] = mo.group(3) 715 | 716 | else: 717 | # HEX: no tags 718 | pieces["closest-tag"] = None 719 | count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], 720 | cwd=root) 721 | pieces["distance"] = int(count_out) # total number of commits 722 | 723 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 724 | date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"], 725 | cwd=root)[0].strip() 726 | # Use only the last line. Previous lines may contain GPG signature 727 | # information. 728 | date = date.splitlines()[-1] 729 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 730 | 731 | return pieces 732 | 733 | 734 | def plus_or_dot(pieces): 735 | """Return a + if we don't already have one, else return a .""" 736 | if "+" in pieces.get("closest-tag", ""): 737 | return "." 738 | return "+" 739 | 740 | 741 | def render_pep440(pieces): 742 | """Build up version string, with post-release "local version identifier". 743 | 744 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 745 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 746 | 747 | Exceptions: 748 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 749 | """ 750 | if pieces["closest-tag"]: 751 | rendered = pieces["closest-tag"] 752 | if pieces["distance"] or pieces["dirty"]: 753 | rendered += plus_or_dot(pieces) 754 | rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) 755 | if pieces["dirty"]: 756 | rendered += ".dirty" 757 | else: 758 | # exception #1 759 | rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], 760 | pieces["short"]) 761 | if pieces["dirty"]: 762 | rendered += ".dirty" 763 | return rendered 764 | 765 | 766 | def render_pep440_pre(pieces): 767 | """TAG[.post0.devDISTANCE] -- No -dirty. 768 | 769 | Exceptions: 770 | 1: no tags. 0.post0.devDISTANCE 771 | """ 772 | if pieces["closest-tag"]: 773 | rendered = pieces["closest-tag"] 774 | if pieces["distance"]: 775 | rendered += ".post0.dev%%d" %% pieces["distance"] 776 | else: 777 | # exception #1 778 | rendered = "0.post0.dev%%d" %% pieces["distance"] 779 | return rendered 780 | 781 | 782 | def render_pep440_post(pieces): 783 | """TAG[.postDISTANCE[.dev0]+gHEX] . 784 | 785 | The ".dev0" means dirty. Note that .dev0 sorts backwards 786 | (a dirty tree will appear "older" than the corresponding clean one), 787 | but you shouldn't be releasing software with -dirty anyways. 788 | 789 | Exceptions: 790 | 1: no tags. 0.postDISTANCE[.dev0] 791 | """ 792 | if pieces["closest-tag"]: 793 | rendered = pieces["closest-tag"] 794 | if pieces["distance"] or pieces["dirty"]: 795 | rendered += ".post%%d" %% pieces["distance"] 796 | if pieces["dirty"]: 797 | rendered += ".dev0" 798 | rendered += plus_or_dot(pieces) 799 | rendered += "g%%s" %% pieces["short"] 800 | else: 801 | # exception #1 802 | rendered = "0.post%%d" %% pieces["distance"] 803 | if pieces["dirty"]: 804 | rendered += ".dev0" 805 | rendered += "+g%%s" %% pieces["short"] 806 | return rendered 807 | 808 | 809 | def render_pep440_old(pieces): 810 | """TAG[.postDISTANCE[.dev0]] . 811 | 812 | The ".dev0" means dirty. 813 | 814 | Exceptions: 815 | 1: no tags. 0.postDISTANCE[.dev0] 816 | """ 817 | if pieces["closest-tag"]: 818 | rendered = pieces["closest-tag"] 819 | if pieces["distance"] or pieces["dirty"]: 820 | rendered += ".post%%d" %% pieces["distance"] 821 | if pieces["dirty"]: 822 | rendered += ".dev0" 823 | else: 824 | # exception #1 825 | rendered = "0.post%%d" %% pieces["distance"] 826 | if pieces["dirty"]: 827 | rendered += ".dev0" 828 | return rendered 829 | 830 | 831 | def render_git_describe(pieces): 832 | """TAG[-DISTANCE-gHEX][-dirty]. 833 | 834 | Like 'git describe --tags --dirty --always'. 835 | 836 | Exceptions: 837 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 838 | """ 839 | if pieces["closest-tag"]: 840 | rendered = pieces["closest-tag"] 841 | if pieces["distance"]: 842 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) 843 | else: 844 | # exception #1 845 | rendered = pieces["short"] 846 | if pieces["dirty"]: 847 | rendered += "-dirty" 848 | return rendered 849 | 850 | 851 | def render_git_describe_long(pieces): 852 | """TAG-DISTANCE-gHEX[-dirty]. 853 | 854 | Like 'git describe --tags --dirty --always -long'. 855 | The distance/hash is unconditional. 856 | 857 | Exceptions: 858 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 859 | """ 860 | if pieces["closest-tag"]: 861 | rendered = pieces["closest-tag"] 862 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) 863 | else: 864 | # exception #1 865 | rendered = pieces["short"] 866 | if pieces["dirty"]: 867 | rendered += "-dirty" 868 | return rendered 869 | 870 | 871 | def render(pieces, style): 872 | """Render the given version pieces into the requested style.""" 873 | if pieces["error"]: 874 | return {"version": "unknown", 875 | "full-revisionid": pieces.get("long"), 876 | "dirty": None, 877 | "error": pieces["error"], 878 | "date": None} 879 | 880 | if not style or style == "default": 881 | style = "pep440" # the default 882 | 883 | if style == "pep440": 884 | rendered = render_pep440(pieces) 885 | elif style == "pep440-pre": 886 | rendered = render_pep440_pre(pieces) 887 | elif style == "pep440-post": 888 | rendered = render_pep440_post(pieces) 889 | elif style == "pep440-old": 890 | rendered = render_pep440_old(pieces) 891 | elif style == "git-describe": 892 | rendered = render_git_describe(pieces) 893 | elif style == "git-describe-long": 894 | rendered = render_git_describe_long(pieces) 895 | else: 896 | raise ValueError("unknown style '%%s'" %% style) 897 | 898 | return {"version": rendered, "full-revisionid": pieces["long"], 899 | "dirty": pieces["dirty"], "error": None, 900 | "date": pieces.get("date")} 901 | 902 | 903 | def get_versions(): 904 | """Get version information or return default if unable to do so.""" 905 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 906 | # __file__, we can work backwards from there to the root. Some 907 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 908 | # case we can only use expanded keywords. 909 | 910 | cfg = get_config() 911 | verbose = cfg.verbose 912 | 913 | try: 914 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, 915 | verbose) 916 | except NotThisMethod: 917 | pass 918 | 919 | try: 920 | root = os.path.realpath(__file__) 921 | # versionfile_source is the relative path from the top of the source 922 | # tree (where the .git directory might live) to this file. Invert 923 | # this to find the root from __file__. 924 | for i in cfg.versionfile_source.split('/'): 925 | root = os.path.dirname(root) 926 | except NameError: 927 | return {"version": "0+unknown", "full-revisionid": None, 928 | "dirty": None, 929 | "error": "unable to find root of source tree", 930 | "date": None} 931 | 932 | try: 933 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 934 | return render(pieces, cfg.style) 935 | except NotThisMethod: 936 | pass 937 | 938 | try: 939 | if cfg.parentdir_prefix: 940 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 941 | except NotThisMethod: 942 | pass 943 | 944 | return {"version": "0+unknown", "full-revisionid": None, 945 | "dirty": None, 946 | "error": "unable to compute version", "date": None} 947 | ''' 948 | 949 | 950 | @register_vcs_handler("git", "get_keywords") 951 | def git_get_keywords(versionfile_abs): 952 | """Extract version information from the given file.""" 953 | # the code embedded in _version.py can just fetch the value of these 954 | # keywords. When used from setup.py, we don't want to import _version.py, 955 | # so we do it with a regexp instead. This function is not used from 956 | # _version.py. 957 | keywords = {} 958 | try: 959 | f = open(versionfile_abs, "r") 960 | for line in f.readlines(): 961 | if line.strip().startswith("git_refnames ="): 962 | mo = re.search(r'=\s*"(.*)"', line) 963 | if mo: 964 | keywords["refnames"] = mo.group(1) 965 | if line.strip().startswith("git_full ="): 966 | mo = re.search(r'=\s*"(.*)"', line) 967 | if mo: 968 | keywords["full"] = mo.group(1) 969 | if line.strip().startswith("git_date ="): 970 | mo = re.search(r'=\s*"(.*)"', line) 971 | if mo: 972 | keywords["date"] = mo.group(1) 973 | f.close() 974 | except EnvironmentError: 975 | pass 976 | return keywords 977 | 978 | 979 | @register_vcs_handler("git", "keywords") 980 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 981 | """Get version information from git keywords.""" 982 | if not keywords: 983 | raise NotThisMethod("no keywords at all, weird") 984 | date = keywords.get("date") 985 | if date is not None: 986 | # Use only the last line. Previous lines may contain GPG signature 987 | # information. 988 | date = date.splitlines()[-1] 989 | 990 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant 991 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 992 | # -like" string, which we must then edit to make compliant), because 993 | # it's been around since git-1.5.3, and it's too difficult to 994 | # discover which version we're using, or to work around using an 995 | # older one. 996 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 997 | refnames = keywords["refnames"].strip() 998 | if refnames.startswith("$Format"): 999 | if verbose: 1000 | print("keywords are unexpanded, not using") 1001 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 1002 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) 1003 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 1004 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 1005 | TAG = "tag: " 1006 | tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)]) 1007 | if not tags: 1008 | # Either we're using git < 1.8.3, or there really are no tags. We use 1009 | # a heuristic: assume all version tags have a digit. The old git %d 1010 | # expansion behaves like git log --decorate=short and strips out the 1011 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 1012 | # between branches and tags. By ignoring refnames without digits, we 1013 | # filter out many common branch names like "release" and 1014 | # "stabilization", as well as "HEAD" and "master". 1015 | tags = set([r for r in refs if re.search(r"\d", r)]) 1016 | if verbose: 1017 | print("discarding '%s', no digits" % ",".join(refs - tags)) 1018 | if verbose: 1019 | print("likely tags: %s" % ",".join(sorted(tags))) 1020 | for ref in sorted(tags): 1021 | # sorting will prefer e.g. "2.0" over "2.0rc1" 1022 | if ref.startswith(tag_prefix): 1023 | r = ref[len(tag_prefix) :] 1024 | if verbose: 1025 | print("picking %s" % r) 1026 | return { 1027 | "version": r, 1028 | "full-revisionid": keywords["full"].strip(), 1029 | "dirty": False, 1030 | "error": None, 1031 | "date": date, 1032 | } 1033 | # no suitable tags, so version is "0+unknown", but full hex is still there 1034 | if verbose: 1035 | print("no suitable tags, using unknown + full revision id") 1036 | return { 1037 | "version": "0+unknown", 1038 | "full-revisionid": keywords["full"].strip(), 1039 | "dirty": False, 1040 | "error": "no suitable tags", 1041 | "date": None, 1042 | } 1043 | 1044 | 1045 | @register_vcs_handler("git", "pieces_from_vcs") 1046 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 1047 | """Get version from 'git describe' in the root of the source tree. 1048 | 1049 | This only gets called if the git-archive 'subst' keywords were *not* 1050 | expanded, and _version.py hasn't already been rewritten with a short 1051 | version string, meaning we're inside a checked out source tree. 1052 | """ 1053 | GITS = ["git"] 1054 | if sys.platform == "win32": 1055 | GITS = ["git.cmd", "git.exe"] 1056 | 1057 | out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) 1058 | if rc != 0: 1059 | if verbose: 1060 | print("Directory %s not under git control" % root) 1061 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 1062 | 1063 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 1064 | # if there isn't one, this yields HEX[-dirty] (no NUM) 1065 | describe_out, rc = run_command( 1066 | GITS, 1067 | [ 1068 | "describe", 1069 | "--tags", 1070 | "--dirty", 1071 | "--always", 1072 | "--long", 1073 | "--match", 1074 | "%s*" % tag_prefix, 1075 | ], 1076 | cwd=root, 1077 | ) 1078 | # --long was added in git-1.5.5 1079 | if describe_out is None: 1080 | raise NotThisMethod("'git describe' failed") 1081 | describe_out = describe_out.strip() 1082 | full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 1083 | if full_out is None: 1084 | raise NotThisMethod("'git rev-parse' failed") 1085 | full_out = full_out.strip() 1086 | 1087 | pieces = {} 1088 | pieces["long"] = full_out 1089 | pieces["short"] = full_out[:7] # maybe improved later 1090 | pieces["error"] = None 1091 | 1092 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 1093 | # TAG might have hyphens. 1094 | git_describe = describe_out 1095 | 1096 | # look for -dirty suffix 1097 | dirty = git_describe.endswith("-dirty") 1098 | pieces["dirty"] = dirty 1099 | if dirty: 1100 | git_describe = git_describe[: git_describe.rindex("-dirty")] 1101 | 1102 | # now we have TAG-NUM-gHEX or HEX 1103 | 1104 | if "-" in git_describe: 1105 | # TAG-NUM-gHEX 1106 | mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) 1107 | if not mo: 1108 | # unparseable. Maybe git-describe is misbehaving? 1109 | pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out 1110 | return pieces 1111 | 1112 | # tag 1113 | full_tag = mo.group(1) 1114 | if not full_tag.startswith(tag_prefix): 1115 | if verbose: 1116 | fmt = "tag '%s' doesn't start with prefix '%s'" 1117 | print(fmt % (full_tag, tag_prefix)) 1118 | pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( 1119 | full_tag, 1120 | tag_prefix, 1121 | ) 1122 | return pieces 1123 | pieces["closest-tag"] = full_tag[len(tag_prefix) :] 1124 | 1125 | # distance: number of commits since tag 1126 | pieces["distance"] = int(mo.group(2)) 1127 | 1128 | # commit: short hex revision ID 1129 | pieces["short"] = mo.group(3) 1130 | 1131 | else: 1132 | # HEX: no tags 1133 | pieces["closest-tag"] = None 1134 | count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) 1135 | pieces["distance"] = int(count_out) # total number of commits 1136 | 1137 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 1138 | date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[ 1139 | 0 1140 | ].strip() 1141 | # Use only the last line. Previous lines may contain GPG signature 1142 | # information. 1143 | date = date.splitlines()[-1] 1144 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 1145 | 1146 | return pieces 1147 | 1148 | 1149 | def do_vcs_install(manifest_in, versionfile_source, ipy): 1150 | """Git-specific installation logic for Versioneer. 1151 | 1152 | For Git, this means creating/changing .gitattributes to mark _version.py 1153 | for export-subst keyword substitution. 1154 | """ 1155 | GITS = ["git"] 1156 | if sys.platform == "win32": 1157 | GITS = ["git.cmd", "git.exe"] 1158 | files = [manifest_in, versionfile_source] 1159 | if ipy: 1160 | files.append(ipy) 1161 | try: 1162 | me = __file__ 1163 | if me.endswith(".pyc") or me.endswith(".pyo"): 1164 | me = os.path.splitext(me)[0] + ".py" 1165 | versioneer_file = os.path.relpath(me) 1166 | except NameError: 1167 | versioneer_file = "versioneer.py" 1168 | files.append(versioneer_file) 1169 | present = False 1170 | try: 1171 | f = open(".gitattributes", "r") 1172 | for line in f.readlines(): 1173 | if line.strip().startswith(versionfile_source): 1174 | if "export-subst" in line.strip().split()[1:]: 1175 | present = True 1176 | f.close() 1177 | except EnvironmentError: 1178 | pass 1179 | if not present: 1180 | f = open(".gitattributes", "a+") 1181 | f.write("%s export-subst\n" % versionfile_source) 1182 | f.close() 1183 | files.append(".gitattributes") 1184 | run_command(GITS, ["add", "--"] + files) 1185 | 1186 | 1187 | def versions_from_parentdir(parentdir_prefix, root, verbose): 1188 | """Try to determine the version from the parent directory name. 1189 | 1190 | Source tarballs conventionally unpack into a directory that includes both 1191 | the project name and a version string. We will also support searching up 1192 | two directory levels for an appropriately named parent directory 1193 | """ 1194 | rootdirs = [] 1195 | 1196 | for i in range(3): 1197 | dirname = os.path.basename(root) 1198 | if dirname.startswith(parentdir_prefix): 1199 | return { 1200 | "version": dirname[len(parentdir_prefix) :], 1201 | "full-revisionid": None, 1202 | "dirty": False, 1203 | "error": None, 1204 | "date": None, 1205 | } 1206 | else: 1207 | rootdirs.append(root) 1208 | root = os.path.dirname(root) # up a level 1209 | 1210 | if verbose: 1211 | print( 1212 | "Tried directories %s but none started with prefix %s" 1213 | % (str(rootdirs), parentdir_prefix) 1214 | ) 1215 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 1216 | 1217 | 1218 | SHORT_VERSION_PY = """ 1219 | # This file was generated by 'versioneer.py' (0.19) from 1220 | # revision-control system data, or from the parent directory name of an 1221 | # unpacked source archive. Distribution tarballs contain a pre-generated copy 1222 | # of this file. 1223 | 1224 | import json 1225 | 1226 | version_json = ''' 1227 | %s 1228 | ''' # END VERSION_JSON 1229 | 1230 | 1231 | def get_versions(): 1232 | return json.loads(version_json) 1233 | """ 1234 | 1235 | 1236 | def versions_from_file(filename): 1237 | """Try to determine the version from _version.py if present.""" 1238 | try: 1239 | with open(filename) as f: 1240 | contents = f.read() 1241 | except EnvironmentError: 1242 | raise NotThisMethod("unable to read _version.py") 1243 | mo = re.search( 1244 | r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S 1245 | ) 1246 | if not mo: 1247 | mo = re.search( 1248 | r"version_json = '''\r\n(.*)''' # END VERSION_JSON", contents, re.M | re.S 1249 | ) 1250 | if not mo: 1251 | raise NotThisMethod("no version_json in _version.py") 1252 | return json.loads(mo.group(1)) 1253 | 1254 | 1255 | def write_to_version_file(filename, versions): 1256 | """Write the given version number to the given _version.py file.""" 1257 | os.unlink(filename) 1258 | contents = json.dumps(versions, sort_keys=True, indent=1, separators=(",", ": ")) 1259 | with open(filename, "w") as f: 1260 | f.write(SHORT_VERSION_PY % contents) 1261 | 1262 | print("set %s to '%s'" % (filename, versions["version"])) 1263 | 1264 | 1265 | def plus_or_dot(pieces): 1266 | """Return a + if we don't already have one, else return a .""" 1267 | if "+" in pieces.get("closest-tag", ""): 1268 | return "." 1269 | return "+" 1270 | 1271 | 1272 | def render_pep440(pieces): 1273 | """Build up version string, with post-release "local version identifier". 1274 | 1275 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 1276 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 1277 | 1278 | Exceptions: 1279 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 1280 | """ 1281 | if pieces["closest-tag"]: 1282 | rendered = pieces["closest-tag"] 1283 | if pieces["distance"] or pieces["dirty"]: 1284 | rendered += plus_or_dot(pieces) 1285 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 1286 | if pieces["dirty"]: 1287 | rendered += ".dirty" 1288 | else: 1289 | # exception #1 1290 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) 1291 | if pieces["dirty"]: 1292 | rendered += ".dirty" 1293 | return rendered 1294 | 1295 | 1296 | def render_pep440_pre(pieces): 1297 | """TAG[.post0.devDISTANCE] -- No -dirty. 1298 | 1299 | Exceptions: 1300 | 1: no tags. 0.post0.devDISTANCE 1301 | """ 1302 | if pieces["closest-tag"]: 1303 | rendered = pieces["closest-tag"] 1304 | if pieces["distance"]: 1305 | rendered += ".post0.dev%d" % pieces["distance"] 1306 | else: 1307 | # exception #1 1308 | rendered = "0.post0.dev%d" % pieces["distance"] 1309 | return rendered 1310 | 1311 | 1312 | def render_pep440_post(pieces): 1313 | """TAG[.postDISTANCE[.dev0]+gHEX] . 1314 | 1315 | The ".dev0" means dirty. Note that .dev0 sorts backwards 1316 | (a dirty tree will appear "older" than the corresponding clean one), 1317 | but you shouldn't be releasing software with -dirty anyways. 1318 | 1319 | Exceptions: 1320 | 1: no tags. 0.postDISTANCE[.dev0] 1321 | """ 1322 | if pieces["closest-tag"]: 1323 | rendered = pieces["closest-tag"] 1324 | if pieces["distance"] or pieces["dirty"]: 1325 | rendered += ".post%d" % pieces["distance"] 1326 | if pieces["dirty"]: 1327 | rendered += ".dev0" 1328 | rendered += plus_or_dot(pieces) 1329 | rendered += "g%s" % pieces["short"] 1330 | else: 1331 | # exception #1 1332 | rendered = "0.post%d" % pieces["distance"] 1333 | if pieces["dirty"]: 1334 | rendered += ".dev0" 1335 | rendered += "+g%s" % pieces["short"] 1336 | return rendered 1337 | 1338 | 1339 | def render_pep440_old(pieces): 1340 | """TAG[.postDISTANCE[.dev0]] . 1341 | 1342 | The ".dev0" means dirty. 1343 | 1344 | Exceptions: 1345 | 1: no tags. 0.postDISTANCE[.dev0] 1346 | """ 1347 | if pieces["closest-tag"]: 1348 | rendered = pieces["closest-tag"] 1349 | if pieces["distance"] or pieces["dirty"]: 1350 | rendered += ".post%d" % pieces["distance"] 1351 | if pieces["dirty"]: 1352 | rendered += ".dev0" 1353 | else: 1354 | # exception #1 1355 | rendered = "0.post%d" % pieces["distance"] 1356 | if pieces["dirty"]: 1357 | rendered += ".dev0" 1358 | return rendered 1359 | 1360 | 1361 | def render_git_describe(pieces): 1362 | """TAG[-DISTANCE-gHEX][-dirty]. 1363 | 1364 | Like 'git describe --tags --dirty --always'. 1365 | 1366 | Exceptions: 1367 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 1368 | """ 1369 | if pieces["closest-tag"]: 1370 | rendered = pieces["closest-tag"] 1371 | if pieces["distance"]: 1372 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 1373 | else: 1374 | # exception #1 1375 | rendered = pieces["short"] 1376 | if pieces["dirty"]: 1377 | rendered += "-dirty" 1378 | return rendered 1379 | 1380 | 1381 | def render_git_describe_long(pieces): 1382 | """TAG-DISTANCE-gHEX[-dirty]. 1383 | 1384 | Like 'git describe --tags --dirty --always -long'. 1385 | The distance/hash is unconditional. 1386 | 1387 | Exceptions: 1388 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 1389 | """ 1390 | if pieces["closest-tag"]: 1391 | rendered = pieces["closest-tag"] 1392 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 1393 | else: 1394 | # exception #1 1395 | rendered = pieces["short"] 1396 | if pieces["dirty"]: 1397 | rendered += "-dirty" 1398 | return rendered 1399 | 1400 | 1401 | def render(pieces, style): 1402 | """Render the given version pieces into the requested style.""" 1403 | if pieces["error"]: 1404 | return { 1405 | "version": "unknown", 1406 | "full-revisionid": pieces.get("long"), 1407 | "dirty": None, 1408 | "error": pieces["error"], 1409 | "date": None, 1410 | } 1411 | 1412 | if not style or style == "default": 1413 | style = "pep440" # the default 1414 | 1415 | if style == "pep440": 1416 | rendered = render_pep440(pieces) 1417 | elif style == "pep440-pre": 1418 | rendered = render_pep440_pre(pieces) 1419 | elif style == "pep440-post": 1420 | rendered = render_pep440_post(pieces) 1421 | elif style == "pep440-old": 1422 | rendered = render_pep440_old(pieces) 1423 | elif style == "git-describe": 1424 | rendered = render_git_describe(pieces) 1425 | elif style == "git-describe-long": 1426 | rendered = render_git_describe_long(pieces) 1427 | else: 1428 | raise ValueError("unknown style '%s'" % style) 1429 | 1430 | return { 1431 | "version": rendered, 1432 | "full-revisionid": pieces["long"], 1433 | "dirty": pieces["dirty"], 1434 | "error": None, 1435 | "date": pieces.get("date"), 1436 | } 1437 | 1438 | 1439 | class VersioneerBadRootError(Exception): 1440 | """The project root directory is unknown or missing key files.""" 1441 | 1442 | 1443 | def get_versions(verbose=False): 1444 | """Get the project version from whatever source is available. 1445 | 1446 | Returns dict with two keys: 'version' and 'full'. 1447 | """ 1448 | if "versioneer" in sys.modules: 1449 | # see the discussion in cmdclass.py:get_cmdclass() 1450 | del sys.modules["versioneer"] 1451 | 1452 | root = get_root() 1453 | cfg = get_config_from_root(root) 1454 | 1455 | assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" 1456 | handlers = HANDLERS.get(cfg.VCS) 1457 | assert handlers, "unrecognized VCS '%s'" % cfg.VCS 1458 | verbose = verbose or cfg.verbose 1459 | assert ( 1460 | cfg.versionfile_source is not None 1461 | ), "please set versioneer.versionfile_source" 1462 | assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" 1463 | 1464 | versionfile_abs = os.path.join(root, cfg.versionfile_source) 1465 | 1466 | # extract version from first of: _version.py, VCS command (e.g. 'git 1467 | # describe'), parentdir. This is meant to work for developers using a 1468 | # source checkout, for users of a tarball created by 'setup.py sdist', 1469 | # and for users of a tarball/zipball created by 'git archive' or github's 1470 | # download-from-tag feature or the equivalent in other VCSes. 1471 | 1472 | get_keywords_f = handlers.get("get_keywords") 1473 | from_keywords_f = handlers.get("keywords") 1474 | if get_keywords_f and from_keywords_f: 1475 | try: 1476 | keywords = get_keywords_f(versionfile_abs) 1477 | ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) 1478 | if verbose: 1479 | print("got version from expanded keyword %s" % ver) 1480 | return ver 1481 | except NotThisMethod: 1482 | pass 1483 | 1484 | try: 1485 | ver = versions_from_file(versionfile_abs) 1486 | if verbose: 1487 | print("got version from file %s %s" % (versionfile_abs, ver)) 1488 | return ver 1489 | except NotThisMethod: 1490 | pass 1491 | 1492 | from_vcs_f = handlers.get("pieces_from_vcs") 1493 | if from_vcs_f: 1494 | try: 1495 | pieces = from_vcs_f(cfg.tag_prefix, root, verbose) 1496 | ver = render(pieces, cfg.style) 1497 | if verbose: 1498 | print("got version from VCS %s" % ver) 1499 | return ver 1500 | except NotThisMethod: 1501 | pass 1502 | 1503 | try: 1504 | if cfg.parentdir_prefix: 1505 | ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 1506 | if verbose: 1507 | print("got version from parentdir %s" % ver) 1508 | return ver 1509 | except NotThisMethod: 1510 | pass 1511 | 1512 | if verbose: 1513 | print("unable to compute version") 1514 | 1515 | return { 1516 | "version": "0+unknown", 1517 | "full-revisionid": None, 1518 | "dirty": None, 1519 | "error": "unable to compute version", 1520 | "date": None, 1521 | } 1522 | 1523 | 1524 | def get_version(): 1525 | """Get the short version string for this project.""" 1526 | return get_versions()["version"] 1527 | 1528 | 1529 | def get_cmdclass(cmdclass=None): 1530 | """Get the custom setuptools/distutils subclasses used by Versioneer. 1531 | 1532 | If the package uses a different cmdclass (e.g. one from numpy), it 1533 | should be provide as an argument. 1534 | """ 1535 | if "versioneer" in sys.modules: 1536 | del sys.modules["versioneer"] 1537 | # this fixes the "python setup.py develop" case (also 'install' and 1538 | # 'easy_install .'), in which subdependencies of the main project are 1539 | # built (using setup.py bdist_egg) in the same python process. Assume 1540 | # a main project A and a dependency B, which use different versions 1541 | # of Versioneer. A's setup.py imports A's Versioneer, leaving it in 1542 | # sys.modules by the time B's setup.py is executed, causing B to run 1543 | # with the wrong versioneer. Setuptools wraps the sub-dep builds in a 1544 | # sandbox that restores sys.modules to it's pre-build state, so the 1545 | # parent is protected against the child's "import versioneer". By 1546 | # removing ourselves from sys.modules here, before the child build 1547 | # happens, we protect the child from the parent's versioneer too. 1548 | # Also see https://github.com/python-versioneer/python-versioneer/issues/52 1549 | 1550 | cmds = {} if cmdclass is None else cmdclass.copy() 1551 | 1552 | # we add "version" to both distutils and setuptools 1553 | from distutils.core import Command 1554 | 1555 | class cmd_version(Command): 1556 | description = "report generated version string" 1557 | user_options = [] 1558 | boolean_options = [] 1559 | 1560 | def initialize_options(self): 1561 | pass 1562 | 1563 | def finalize_options(self): 1564 | pass 1565 | 1566 | def run(self): 1567 | vers = get_versions(verbose=True) 1568 | print("Version: %s" % vers["version"]) 1569 | print(" full-revisionid: %s" % vers.get("full-revisionid")) 1570 | print(" dirty: %s" % vers.get("dirty")) 1571 | print(" date: %s" % vers.get("date")) 1572 | if vers["error"]: 1573 | print(" error: %s" % vers["error"]) 1574 | 1575 | cmds["version"] = cmd_version 1576 | 1577 | # we override "build_py" in both distutils and setuptools 1578 | # 1579 | # most invocation pathways end up running build_py: 1580 | # distutils/build -> build_py 1581 | # distutils/install -> distutils/build ->.. 1582 | # setuptools/bdist_wheel -> distutils/install ->.. 1583 | # setuptools/bdist_egg -> distutils/install_lib -> build_py 1584 | # setuptools/install -> bdist_egg ->.. 1585 | # setuptools/develop -> ? 1586 | # pip install: 1587 | # copies source tree to a tempdir before running egg_info/etc 1588 | # if .git isn't copied too, 'git describe' will fail 1589 | # then does setup.py bdist_wheel, or sometimes setup.py install 1590 | # setup.py egg_info -> ? 1591 | 1592 | # we override different "build_py" commands for both environments 1593 | if "build_py" in cmds: 1594 | _build_py = cmds["build_py"] 1595 | elif "setuptools" in sys.modules: 1596 | from setuptools.command.build_py import build_py as _build_py 1597 | else: 1598 | from distutils.command.build_py import build_py as _build_py 1599 | 1600 | class cmd_build_py(_build_py): 1601 | def run(self): 1602 | root = get_root() 1603 | cfg = get_config_from_root(root) 1604 | versions = get_versions() 1605 | _build_py.run(self) 1606 | # now locate _version.py in the new build/ directory and replace 1607 | # it with an updated value 1608 | if cfg.versionfile_build: 1609 | target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build) 1610 | print("UPDATING %s" % target_versionfile) 1611 | write_to_version_file(target_versionfile, versions) 1612 | 1613 | cmds["build_py"] = cmd_build_py 1614 | 1615 | if "setuptools" in sys.modules: 1616 | from setuptools.command.build_ext import build_ext as _build_ext 1617 | else: 1618 | from distutils.command.build_ext import build_ext as _build_ext 1619 | 1620 | class cmd_build_ext(_build_ext): 1621 | def run(self): 1622 | root = get_root() 1623 | cfg = get_config_from_root(root) 1624 | versions = get_versions() 1625 | _build_ext.run(self) 1626 | if self.inplace: 1627 | # build_ext --inplace will only build extensions in 1628 | # build/lib<..> dir with no _version.py to write to. 1629 | # As in place builds will already have a _version.py 1630 | # in the module dir, we do not need to write one. 1631 | return 1632 | # now locate _version.py in the new build/ directory and replace 1633 | # it with an updated value 1634 | target_versionfile = os.path.join(self.build_lib, cfg.versionfile_source) 1635 | print("UPDATING %s" % target_versionfile) 1636 | write_to_version_file(target_versionfile, versions) 1637 | 1638 | cmds["build_ext"] = cmd_build_ext 1639 | 1640 | if "cx_Freeze" in sys.modules: # cx_freeze enabled? 1641 | from cx_Freeze.dist import build_exe as _build_exe 1642 | 1643 | # nczeczulin reports that py2exe won't like the pep440-style string 1644 | # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. 1645 | # setup(console=[{ 1646 | # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION 1647 | # "product_version": versioneer.get_version(), 1648 | # ... 1649 | 1650 | class cmd_build_exe(_build_exe): 1651 | def run(self): 1652 | root = get_root() 1653 | cfg = get_config_from_root(root) 1654 | versions = get_versions() 1655 | target_versionfile = cfg.versionfile_source 1656 | print("UPDATING %s" % target_versionfile) 1657 | write_to_version_file(target_versionfile, versions) 1658 | 1659 | _build_exe.run(self) 1660 | os.unlink(target_versionfile) 1661 | with open(cfg.versionfile_source, "w") as f: 1662 | LONG = LONG_VERSION_PY[cfg.VCS] 1663 | f.write( 1664 | LONG 1665 | % { 1666 | "DOLLAR": "$", 1667 | "STYLE": cfg.style, 1668 | "TAG_PREFIX": cfg.tag_prefix, 1669 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 1670 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 1671 | } 1672 | ) 1673 | 1674 | cmds["build_exe"] = cmd_build_exe 1675 | del cmds["build_py"] 1676 | 1677 | if "py2exe" in sys.modules: # py2exe enabled? 1678 | from py2exe.distutils_buildexe import py2exe as _py2exe 1679 | 1680 | class cmd_py2exe(_py2exe): 1681 | def run(self): 1682 | root = get_root() 1683 | cfg = get_config_from_root(root) 1684 | versions = get_versions() 1685 | target_versionfile = cfg.versionfile_source 1686 | print("UPDATING %s" % target_versionfile) 1687 | write_to_version_file(target_versionfile, versions) 1688 | 1689 | _py2exe.run(self) 1690 | os.unlink(target_versionfile) 1691 | with open(cfg.versionfile_source, "w") as f: 1692 | LONG = LONG_VERSION_PY[cfg.VCS] 1693 | f.write( 1694 | LONG 1695 | % { 1696 | "DOLLAR": "$", 1697 | "STYLE": cfg.style, 1698 | "TAG_PREFIX": cfg.tag_prefix, 1699 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 1700 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 1701 | } 1702 | ) 1703 | 1704 | cmds["py2exe"] = cmd_py2exe 1705 | 1706 | # we override different "sdist" commands for both environments 1707 | if "sdist" in cmds: 1708 | _sdist = cmds["sdist"] 1709 | elif "setuptools" in sys.modules: 1710 | from setuptools.command.sdist import sdist as _sdist 1711 | else: 1712 | from distutils.command.sdist import sdist as _sdist 1713 | 1714 | class cmd_sdist(_sdist): 1715 | def run(self): 1716 | versions = get_versions() 1717 | self._versioneer_generated_versions = versions 1718 | # unless we update this, the command will keep using the old 1719 | # version 1720 | self.distribution.metadata.version = versions["version"] 1721 | return _sdist.run(self) 1722 | 1723 | def make_release_tree(self, base_dir, files): 1724 | root = get_root() 1725 | cfg = get_config_from_root(root) 1726 | _sdist.make_release_tree(self, base_dir, files) 1727 | # now locate _version.py in the new base_dir directory 1728 | # (remembering that it may be a hardlink) and replace it with an 1729 | # updated value 1730 | target_versionfile = os.path.join(base_dir, cfg.versionfile_source) 1731 | print("UPDATING %s" % target_versionfile) 1732 | write_to_version_file( 1733 | target_versionfile, self._versioneer_generated_versions 1734 | ) 1735 | 1736 | cmds["sdist"] = cmd_sdist 1737 | 1738 | return cmds 1739 | 1740 | 1741 | CONFIG_ERROR = """ 1742 | setup.cfg is missing the necessary Versioneer configuration. You need 1743 | a section like: 1744 | 1745 | [versioneer] 1746 | VCS = git 1747 | style = pep440 1748 | versionfile_source = src/myproject/_version.py 1749 | versionfile_build = myproject/_version.py 1750 | tag_prefix = 1751 | parentdir_prefix = myproject- 1752 | 1753 | You will also need to edit your setup.py to use the results: 1754 | 1755 | import versioneer 1756 | setup(version=versioneer.get_version(), 1757 | cmdclass=versioneer.get_cmdclass(), ...) 1758 | 1759 | Please read the docstring in ./versioneer.py for configuration instructions, 1760 | edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. 1761 | """ 1762 | 1763 | SAMPLE_CONFIG = """ 1764 | # See the docstring in versioneer.py for instructions. Note that you must 1765 | # re-run 'versioneer.py setup' after changing this section, and commit the 1766 | # resulting files. 1767 | 1768 | [versioneer] 1769 | #VCS = git 1770 | #style = pep440 1771 | #versionfile_source = 1772 | #versionfile_build = 1773 | #tag_prefix = 1774 | #parentdir_prefix = 1775 | 1776 | """ 1777 | 1778 | INIT_PY_SNIPPET = """ 1779 | from ._version import get_versions 1780 | __version__ = get_versions()['version'] 1781 | del get_versions 1782 | """ 1783 | 1784 | 1785 | def do_setup(): 1786 | """Do main VCS-independent setup function for installing Versioneer.""" 1787 | root = get_root() 1788 | try: 1789 | cfg = get_config_from_root(root) 1790 | except ( 1791 | EnvironmentError, 1792 | configparser.NoSectionError, 1793 | configparser.NoOptionError, 1794 | ) as e: 1795 | if isinstance(e, (EnvironmentError, configparser.NoSectionError)): 1796 | print("Adding sample versioneer config to setup.cfg", file=sys.stderr) 1797 | with open(os.path.join(root, "setup.cfg"), "a") as f: 1798 | f.write(SAMPLE_CONFIG) 1799 | print(CONFIG_ERROR, file=sys.stderr) 1800 | return 1 1801 | 1802 | print(" creating %s" % cfg.versionfile_source) 1803 | with open(cfg.versionfile_source, "w") as f: 1804 | LONG = LONG_VERSION_PY[cfg.VCS] 1805 | f.write( 1806 | LONG 1807 | % { 1808 | "DOLLAR": "$", 1809 | "STYLE": cfg.style, 1810 | "TAG_PREFIX": cfg.tag_prefix, 1811 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 1812 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 1813 | } 1814 | ) 1815 | 1816 | ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py") 1817 | if os.path.exists(ipy): 1818 | try: 1819 | with open(ipy, "r") as f: 1820 | old = f.read() 1821 | except EnvironmentError: 1822 | old = "" 1823 | if INIT_PY_SNIPPET not in old: 1824 | print(" appending to %s" % ipy) 1825 | with open(ipy, "a") as f: 1826 | f.write(INIT_PY_SNIPPET) 1827 | else: 1828 | print(" %s unmodified" % ipy) 1829 | else: 1830 | print(" %s doesn't exist, ok" % ipy) 1831 | ipy = None 1832 | 1833 | # Make sure both the top-level "versioneer.py" and versionfile_source 1834 | # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so 1835 | # they'll be copied into source distributions. Pip won't be able to 1836 | # install the package without this. 1837 | manifest_in = os.path.join(root, "MANIFEST.in") 1838 | simple_includes = set() 1839 | try: 1840 | with open(manifest_in, "r") as f: 1841 | for line in f: 1842 | if line.startswith("include "): 1843 | for include in line.split()[1:]: 1844 | simple_includes.add(include) 1845 | except EnvironmentError: 1846 | pass 1847 | # That doesn't cover everything MANIFEST.in can do 1848 | # (http://docs.python.org/2/distutils/sourcedist.html#commands), so 1849 | # it might give some false negatives. Appending redundant 'include' 1850 | # lines is safe, though. 1851 | if "versioneer.py" not in simple_includes: 1852 | print(" appending 'versioneer.py' to MANIFEST.in") 1853 | with open(manifest_in, "a") as f: 1854 | f.write("include versioneer.py\n") 1855 | else: 1856 | print(" 'versioneer.py' already in MANIFEST.in") 1857 | if cfg.versionfile_source not in simple_includes: 1858 | print( 1859 | " appending versionfile_source ('%s') to MANIFEST.in" 1860 | % cfg.versionfile_source 1861 | ) 1862 | with open(manifest_in, "a") as f: 1863 | f.write("include %s\n" % cfg.versionfile_source) 1864 | else: 1865 | print(" versionfile_source already in MANIFEST.in") 1866 | 1867 | # Make VCS-specific changes. For git, this means creating/changing 1868 | # .gitattributes to mark _version.py for export-subst keyword 1869 | # substitution. 1870 | do_vcs_install(manifest_in, cfg.versionfile_source, ipy) 1871 | return 0 1872 | 1873 | 1874 | def scan_setup_py(): 1875 | """Validate the contents of setup.py against Versioneer's expectations.""" 1876 | found = set() 1877 | setters = False 1878 | errors = 0 1879 | with open("setup.py", "r") as f: 1880 | for line in f.readlines(): 1881 | if "import versioneer" in line: 1882 | found.add("import") 1883 | if "versioneer.get_cmdclass()" in line: 1884 | found.add("cmdclass") 1885 | if "versioneer.get_version()" in line: 1886 | found.add("get_version") 1887 | if "versioneer.VCS" in line: 1888 | setters = True 1889 | if "versioneer.versionfile_source" in line: 1890 | setters = True 1891 | if len(found) != 3: 1892 | print("") 1893 | print("Your setup.py appears to be missing some important items") 1894 | print("(but I might be wrong). Please make sure it has something") 1895 | print("roughly like the following:") 1896 | print("") 1897 | print(" import versioneer") 1898 | print(" setup( version=versioneer.get_version(),") 1899 | print(" cmdclass=versioneer.get_cmdclass(), ...)") 1900 | print("") 1901 | errors += 1 1902 | if setters: 1903 | print("You should remove lines like 'versioneer.VCS = ' and") 1904 | print("'versioneer.versionfile_source = ' . This configuration") 1905 | print("now lives in setup.cfg, and should be removed from setup.py") 1906 | print("") 1907 | errors += 1 1908 | return errors 1909 | 1910 | 1911 | if __name__ == "__main__": 1912 | cmd = sys.argv[1] 1913 | if cmd == "setup": 1914 | errors = do_setup() 1915 | errors += scan_setup_py() 1916 | if errors: 1917 | sys.exit(1) 1918 | --------------------------------------------------------------------------------