├── dask_mpi ├── tests │ ├── __init__.py │ ├── conftest.py │ ├── core_basic.py │ ├── test_core.py │ ├── test_no_exit.py │ ├── core_no_exit.py │ └── test_cli.py ├── __init__.py ├── cli.py ├── core.py └── _version.py ├── .gitattributes ├── .isort.cfg ├── readthedocs.yml ├── .coveragerc ├── environment.yml ├── docs ├── source │ ├── generated │ │ └── dask_mpi.core.initialize.rst │ ├── cli.rst │ ├── api.rst │ ├── history.rst │ ├── develop.rst │ ├── install.rst │ ├── howitworks.rst │ ├── gpu.rst │ ├── index.rst │ ├── interactive.rst │ ├── batch.rst │ └── conf.py ├── environment.yml ├── Makefile └── make.bat ├── MANIFEST.in ├── CONTRIBUTING.md ├── ci ├── env-mpich.yml └── env-openmpi.yml ├── .github └── workflows │ ├── linting.yml │ ├── release.yml │ └── tests.yml ├── .pre-commit-config.yaml ├── LICENSE.txt ├── README.rst ├── setup.cfg ├── setup.py ├── .gitignore └── versioneer.py /dask_mpi/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | dask_mpi/_version.py export-subst 2 | -------------------------------------------------------------------------------- /.isort.cfg: -------------------------------------------------------------------------------- 1 | [settings] 2 | known_third_party = click,dask,distributed,mpi4py,pytest,requests,setuptools,tornado,yaml 3 | -------------------------------------------------------------------------------- /readthedocs.yml: -------------------------------------------------------------------------------- 1 | conda: 2 | file: docs/environment.yml 3 | python: 4 | version: 3 5 | setup_py_install: true 6 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | omit = 3 | dask_mpi/tests/*.py 4 | dask_mpi/_version.py 5 | versioneer.py 6 | setup.py -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: dask-mpi 2 | dependencies: 3 | - dask>=2.19 4 | - distributed>=2.19 5 | - mpi4py>=3.0.3 6 | - jupyter-server-proxy 7 | -------------------------------------------------------------------------------- /dask_mpi/__init__.py: -------------------------------------------------------------------------------- 1 | from ._version import get_versions 2 | from .core import initialize, send_close_signal 3 | 4 | __version__ = get_versions()["version"] 5 | del get_versions 6 | -------------------------------------------------------------------------------- /docs/source/generated/dask_mpi.core.initialize.rst: -------------------------------------------------------------------------------- 1 | dask\_mpi.core.initialize 2 | ========================= 3 | 4 | .. currentmodule:: dask_mpi.core 5 | 6 | .. autofunction:: initialize -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | recursive-include dask_mpi *.py 2 | 3 | include setup.py 4 | include LICENSE.txt 5 | include README.rst 6 | include MANIFEST.in 7 | include environment.yml 8 | include versioneer.py 9 | -------------------------------------------------------------------------------- /docs/source/cli.rst: -------------------------------------------------------------------------------- 1 | .. _cli: 2 | 3 | Command-Line Interface (CLI) 4 | ============================ 5 | 6 | .. click:: dask_mpi.cli:main 7 | :prog: dask-mpi 8 | :show-nested: 9 | 10 | 11 | -------------------------------------------------------------------------------- /docs/source/api.rst: -------------------------------------------------------------------------------- 1 | .. currentmodule:: dask_mpi.core 2 | 3 | .. _api: 4 | 5 | Application Program Interface (API) 6 | =================================== 7 | 8 | .. autosummary:: 9 | :toctree: generated/ 10 | 11 | initialize 12 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | Dask is a community maintained project. We welcome contributions in the form of bug reports, documentation, code, design proposals, and more. 2 | 3 | For general information on how to contribute see https://docs.dask.org/en/latest/develop.html. 4 | -------------------------------------------------------------------------------- /ci/env-mpich.yml: -------------------------------------------------------------------------------- 1 | name: dask-mpi-dev 2 | channels: 3 | - conda-forge 4 | - defaults 5 | dependencies: 6 | - dask>=2.19 7 | - distributed>=2.19 8 | - mpich 9 | - mpi4py>=3.0.3 10 | - pytest 11 | - pytest-icdiff 12 | - pytest-cov 13 | - coverage 14 | - requests 15 | - codecov 16 | - jupyter-server-proxy 17 | -------------------------------------------------------------------------------- /ci/env-openmpi.yml: -------------------------------------------------------------------------------- 1 | name: dask-mpi-dev 2 | channels: 3 | - conda-forge 4 | - defaults 5 | dependencies: 6 | - dask>=2.19 7 | - distributed>=2.19 8 | - openmpi 9 | - mpi4py>=3.0.3 10 | - pytest 11 | - pytest-icdiff 12 | - pytest-cov 13 | - coverage 14 | - requests 15 | - codecov 16 | - jupyter-server-proxy 17 | -------------------------------------------------------------------------------- /.github/workflows/linting.yml: -------------------------------------------------------------------------------- 1 | name: Linting 2 | 3 | on: 4 | push: 5 | branches: 6 | - '*' 7 | pull_request: 8 | branches: 9 | - '*' 10 | 11 | jobs: 12 | linting: 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: actions/checkout@v2 16 | - uses: actions/setup-python@v2 17 | - uses: pre-commit/action@v2.0.0 -------------------------------------------------------------------------------- /docs/environment.yml: -------------------------------------------------------------------------------- 1 | name: dask-mpi-docs 2 | channels: 3 | - conda-forge 4 | - nodefaults 5 | dependencies: 6 | - dask>=2.19 7 | - distributed>=2.19 8 | - mpich 9 | - mpi4py>=3.0.3 10 | - versioneer 11 | - sphinx 12 | - pygments 13 | - pip 14 | - pip: 15 | - dask_sphinx_theme>=2 16 | - numpydoc 17 | - sphinx-click 18 | -------------------------------------------------------------------------------- /dask_mpi/tests/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | 5 | 6 | @pytest.fixture 7 | def allow_run_as_root(): 8 | try: 9 | ALLOW_RUN_AS_ROOT = bool(os.environ.get("ALLOW_RUN_AS_ROOT")) 10 | except Exception: 11 | ALLOW_RUN_AS_ROOT = False 12 | 13 | return ALLOW_RUN_AS_ROOT 14 | 15 | 16 | @pytest.fixture 17 | def mpirun(allow_run_as_root): 18 | if allow_run_as_root: 19 | return ["mpirun", "--allow-run-as-root"] 20 | else: 21 | return ["mpirun"] 22 | -------------------------------------------------------------------------------- /dask_mpi/tests/core_basic.py: -------------------------------------------------------------------------------- 1 | from time import sleep 2 | 3 | from distributed import Client 4 | from distributed.metrics import time 5 | 6 | from dask_mpi import initialize 7 | 8 | initialize() 9 | 10 | with Client() as c: 11 | 12 | start = time() 13 | while len(c.scheduler_info()["workers"]) != 2: 14 | assert time() < start + 10 15 | sleep(0.2) 16 | 17 | assert c.submit(lambda x: x + 1, 10).result() == 11 18 | assert c.submit(lambda x: x + 1, 20, workers=2).result() == 21 19 | -------------------------------------------------------------------------------- /dask_mpi/tests/test_core.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, print_function 2 | 3 | import os 4 | import subprocess 5 | import sys 6 | 7 | import pytest 8 | 9 | pytest.importorskip("mpi4py") 10 | 11 | 12 | def test_basic(mpirun): 13 | script_file = os.path.join( 14 | os.path.dirname(os.path.realpath(__file__)), "core_basic.py" 15 | ) 16 | 17 | p = subprocess.Popen(mpirun + ["-np", "4", sys.executable, script_file]) 18 | 19 | p.communicate() 20 | assert p.returncode == 0 21 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | exclude: 'docs/source/conf.py|versioneer.py' 2 | 3 | repos: 4 | - repo: https://github.com/ambv/black 5 | rev: 20.8b1 6 | hooks: 7 | - id: black 8 | - repo: https://gitlab.com/pycqa/flake8 9 | rev: 3.8.4 10 | hooks: 11 | - id: flake8 12 | - repo: https://github.com/asottile/seed-isort-config 13 | rev: v1.9.3 14 | hooks: 15 | - id: seed-isort-config 16 | - repo: https://github.com/pre-commit/mirrors-isort 17 | rev: v4.3.21 18 | hooks: 19 | - id: isort -------------------------------------------------------------------------------- /dask_mpi/tests/test_no_exit.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, print_function 2 | 3 | import os 4 | import subprocess 5 | import sys 6 | 7 | import pytest 8 | 9 | pytest.importorskip("mpi4py") 10 | 11 | 12 | def test_no_exit(mpirun): 13 | script_file = os.path.join( 14 | os.path.dirname(os.path.realpath(__file__)), "core_no_exit.py" 15 | ) 16 | 17 | p = subprocess.Popen(mpirun + ["-np", "4", sys.executable, script_file]) 18 | 19 | p.communicate() 20 | assert p.returncode == 0 21 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SOURCEDIR = source 8 | BUILDDIR = build 9 | 10 | # Put it first so that "make" without argument is like "make help". 11 | help: 12 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 13 | 14 | .PHONY: help Makefile 15 | 16 | # Catch-all target: route all unknown targets to Sphinx using the new 17 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 18 | %: Makefile 19 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -------------------------------------------------------------------------------- /docs/source/history.rst: -------------------------------------------------------------------------------- 1 | History 2 | ======= 3 | 4 | This package came out of the `Dask Distributed`_ project with help from the 5 | Pangeo_ collaboration. The original code was contained in the ``distributed.cli.dask_mpi`` 6 | module and the original tests were contained in the ``distributed.cli.tests.test_dask_mpi`` 7 | module. The impetus for pulling Dask-MPI out of Dask-Distributed was provided by feedback 8 | on the Dask Distributted `Issue 2402 `_. 9 | 10 | Development history for these original files was preserved. 11 | 12 | .. _`Dask Distributed`: https://github.com/dask/distributed 13 | .. _Pangeo: https://pangeo.io 14 | -------------------------------------------------------------------------------- /dask_mpi/tests/core_no_exit.py: -------------------------------------------------------------------------------- 1 | from distributed import Client 2 | from mpi4py.MPI import COMM_WORLD as world 3 | 4 | from dask_mpi import initialize, send_close_signal 5 | 6 | # Split our MPI world into two pieces, one consisting just of 7 | # the old rank 3 process and the other with everything else 8 | new_comm_assignment = 1 if world.rank == 3 else 0 9 | comm = world.Split(new_comm_assignment) 10 | 11 | if world.rank != 3: 12 | # run tests with rest of comm 13 | is_client = initialize(comm=comm, exit=False) 14 | 15 | if is_client: 16 | with Client() as c: 17 | c.submit(lambda x: x + 1, 10).result() == 11 18 | c.submit(lambda x: x + 1, 20).result() == 21 19 | send_close_signal() 20 | 21 | # check that our original comm is intact 22 | world.Barrier() 23 | x = 100 if world.rank == 0 else 200 24 | x = world.bcast(x) 25 | assert x == 100 26 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Build distribution 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | release: 7 | runs-on: "ubuntu-latest" 8 | 9 | steps: 10 | - name: Checkout source 11 | uses: actions/checkout@v2 12 | 13 | - name: Set up Python 3.8 14 | uses: actions/setup-python@v1 15 | with: 16 | python-version: 3.8 17 | 18 | - name: Install pypa/build 19 | run: python -m pip install build wheel pyyaml 20 | 21 | - name: Build distributions 22 | shell: bash -l {0} 23 | run: python setup.py sdist bdist_wheel 24 | 25 | - name: Publish package to PyPI 26 | if: github.repository == 'dask/dask-mpi' && github.event_name == 'push' && startsWith(github.ref, 'refs/tags') 27 | uses: pypa/gh-action-pypi-publish@master 28 | with: 29 | user: __token__ 30 | password: ${{ secrets.PYPI_API_TOKEN }} 31 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/source/develop.rst: -------------------------------------------------------------------------------- 1 | Development Guidelines 2 | ====================== 3 | 4 | This repository is part of the Dask_ projects. General development guidelines 5 | including where to ask for help, a layout of repositories, testing practices, 6 | and documentation and style standards are available at the `Dask developer 7 | guidelines`_ in the main documentation. 8 | 9 | .. _Dask: https://dask.org 10 | .. _`Dask developer guidelines`: https://docs.dask.org/en/latest/develop.html 11 | 12 | Install 13 | ------- 14 | 15 | After setting up an environment as described in the `Dask developer 16 | guidelines`_ you can clone this repository with git:: 17 | 18 | git clone git@github.com:dask/dask-mpi.git 19 | 20 | and install it from source:: 21 | 22 | cd dask-mpi 23 | python setup.py install 24 | 25 | Test 26 | ---- 27 | 28 | Test using ``pytest``:: 29 | 30 | py.test dask_mpi --verbose 31 | 32 | Build docs 33 | ---------- 34 | 35 | To build docs site after cloning and installing from sources use:: 36 | 37 | cd dask-mpi/docs 38 | make html 39 | 40 | Output will be placed in ``build`` directory. 41 | Required dependencies for building docs can be found in ``dask-mpi/docs/environment.yml``. 42 | -------------------------------------------------------------------------------- /docs/source/install.rst: -------------------------------------------------------------------------------- 1 | Installing 2 | ========== 3 | 4 | You can install Dask-MPI with ``pip``, ``conda``, or by installing from source. 5 | 6 | Pip 7 | --- 8 | 9 | Pip can be used to install both Dask-MPI and its dependencies (e.g. dask, 10 | distributed, NumPy, Pandas, etc.) that are necessary for different 11 | workloads.:: 12 | 13 | pip install dask_mpi --upgrade # Install everything from last released version 14 | 15 | Conda 16 | ----- 17 | 18 | To install the latest version of Dask-MPI from the 19 | `conda-forge `_ repository using 20 | `conda `_:: 21 | 22 | conda install dask-mpi -c conda-forge 23 | 24 | Install from Source 25 | ------------------- 26 | 27 | To install Dask-MPI from source, clone the repository from `github 28 | `_:: 29 | 30 | git clone https://github.com/dask/dask-mpi.git 31 | cd dask-mpi 32 | pip install . 33 | 34 | You can also install directly from git main branch:: 35 | 36 | pip install git+https://github.com/dask/dask-mpi 37 | 38 | 39 | Test 40 | ---- 41 | 42 | Test Dask-MPI with ``pytest``:: 43 | 44 | git clone https://github.com/dask/dask-mpi.git 45 | cd dask-mpi 46 | pytest dask_mpi 47 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2018, Anaconda, Inc. and contributors 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without modification, 5 | are permitted provided that the following conditions are met: 6 | 7 | Redistributions of source code must retain the above copyright notice, 8 | this list of conditions and the following disclaimer. 9 | 10 | Redistributions in binary form must reproduce the above copyright notice, 11 | this list of conditions and the following disclaimer in the documentation 12 | and/or other materials provided with the distribution. 13 | 14 | Neither the name of Anaconda nor the names of any contributors may be used to 15 | endorse or promote products derived from this software without specific prior 16 | written permission. 17 | 18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 19 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 20 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 21 | ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE 22 | LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 23 | CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 24 | SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 25 | INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 26 | CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 27 | ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF 28 | THE POSSIBILITY OF SUCH DAMAGE. 29 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | =========================== 2 | Deploying Dask using MPI4Py 3 | =========================== 4 | 5 | |Gitter| |GHActions| |Codecov| |Docs| |PyPI| |Conda| 6 | 7 | Easily deploy Dask Distributed in an existing MPI environment, such as one 8 | created with the ``mpirun`` or ``mpiexec`` MPI launch commands. See documentation_ 9 | for more details. 10 | 11 | 12 | LICENSE 13 | ------- 14 | 15 | BSD 3-Clause (See `License File `__) 16 | 17 | .. _documentation: http://mpi.dask.org 18 | 19 | .. |Gitter| image:: https://img.shields.io/gitter/room/dask/dask.svg?style=for-the-badge 20 | :alt: Join the chat at https://gitter.im/dask/dask 21 | :target: https://gitter.im/dask/dask?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge 22 | 23 | .. |GHActions| image:: https://img.shields.io/github/workflow/status/dask/dask-mpi/Tests?style=for-the-badge 24 | :target: https://github.com/dask/dask-mpi/actions?query=workflow%3ATests 25 | 26 | .. |Codecov| image:: https://img.shields.io/codecov/c/github/dask/dask-mpi.svg?style=for-the-badge 27 | :target: https://codecov.io/gh/dask/dask-mpi 28 | 29 | .. |Docs| image:: https://readthedocs.org/projects/dask-mpi/badge/?version=latest&style=for-the-badge 30 | :target: https://mpi.dask.org/en/latest/?badge=latest 31 | :alt: Documentation Status 32 | 33 | .. |PyPI| image:: https://img.shields.io/pypi/v/dask-mpi.svg?style=for-the-badge 34 | :target: https://pypi.org/project/dask-mpi/ 35 | :alt: Python Package Index 36 | 37 | .. |Conda| image:: https://img.shields.io/conda/vn/conda-forge/dask-mpi.svg?style=for-the-badge 38 | :target: https://anaconda.org/conda-forge/dask-mpi 39 | :alt: Python Package Index 40 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [flake8] 2 | # References: 3 | # https://flake8.readthedocs.io/en/latest/user/configuration.html 4 | # https://flake8.readthedocs.io/en/latest/user/error-codes.html 5 | 6 | # Note: there cannot be spaces after comma's here 7 | exclude = __init__.py, 8 | ignore = 9 | E20, # Extra space in brackets 10 | E231,E241, # Multiple spaces around "," 11 | E26, # Comments 12 | E4, # Import formatting 13 | E721, # Comparing types instead of isinstance 14 | E731, # Assigning lambda expression 15 | E121, # continuation line under-indented for hanging indent 16 | E126, # continuation line over-indented for hanging indent 17 | E127, # continuation line over-indented for visual indent 18 | E128, # E128 continuation line under-indented for visual indent 19 | E702, # multiple statements on one line (semicolon) 20 | W503, # line break before binary operator 21 | E129, # visually indented line with same indent as next logical line 22 | E116, # unexpected indentation 23 | F811, # redefinition of unused 'loop' from line 10 24 | F841, # local variable is assigned to but never used 25 | E741 # Ambiguous variable names 26 | W504, # line break after binary operator 27 | 28 | max-line-length = 120 29 | 30 | [versioneer] 31 | VCS = git 32 | style = pep440 33 | versionfile_source = dask_mpi/_version.py 34 | versionfile_build = dask_mpi/_version.py 35 | tag_prefix = 36 | parentdir_prefix = dask_mpi- 37 | 38 | [tool:pytest] 39 | addopts = -rsx -v --durations=10 40 | minversion = 3.2 41 | markers = 42 | slow: marks tests as slow (deselect with '-m "not slow"') 43 | 44 | # filterwarnings = 45 | # error 46 | # ignore::UserWarning 47 | # ignore::ImportWarning 48 | # ignore::PendingDeprecationWarning 49 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from os.path import exists 2 | 3 | import yaml 4 | from setuptools import setup 5 | 6 | import versioneer 7 | 8 | 9 | def environment_dependencies(obj, dependencies=None): 10 | if dependencies is None: 11 | dependencies = [] 12 | # if isinstance(obj, string_types): 13 | # dependencies.append(obj.replace('=', '==')) 14 | elif isinstance(obj, dict): 15 | if "dependencies" in obj: 16 | environment_dependencies(obj["dependencies"], dependencies=dependencies) 17 | elif "pip" in obj: 18 | environment_dependencies(obj["pip"], dependencies=dependencies) 19 | elif isinstance(obj, list): 20 | for d in obj: 21 | environment_dependencies(d, dependencies=dependencies) 22 | return dependencies 23 | 24 | 25 | with open("environment.yml") as f: 26 | install_requires = environment_dependencies(yaml.safe_load(f)) 27 | 28 | if exists("README.rst"): 29 | with open("README.rst") as f: 30 | long_description = f.read() 31 | else: 32 | long_description = "" 33 | 34 | setup( 35 | name="dask-mpi", 36 | version=versioneer.get_version(), 37 | cmdclass=versioneer.get_cmdclass(), 38 | description="Deploy Dask using mpi4py", 39 | url="https://github.com/dask/dask-mpi", 40 | project_urls={ 41 | "Documentation": "https://mpi.dask.org/", 42 | "Source": "https://github.com/dask/dask-mpi", 43 | "Tracker": "https://github.com/dask/dask-mpi/issues", 44 | }, 45 | maintainer="Kevin Paul", 46 | maintainer_email="kpaul@ucar.edu", 47 | license="BSD 3-Clause", 48 | include_package_data=True, 49 | install_requires=install_requires, 50 | python_requires=">=3.6", 51 | packages=["dask_mpi"], 52 | long_description=long_description, 53 | entry_points=""" 54 | [console_scripts] 55 | dask-mpi=dask_mpi.cli:go 56 | """, 57 | zip_safe=False, 58 | ) 59 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by .ignore support plugin (hsz.mobi) 2 | ### Python template 3 | # Byte-compiled / optimized / DLL files 4 | __pycache__/ 5 | *.py[cod] 6 | *$py.class 7 | 8 | # C extensions 9 | *.so 10 | 11 | # Distribution / packaging 12 | .Python 13 | build/ 14 | develop-eggs/ 15 | dist/ 16 | downloads/ 17 | eggs/ 18 | .eggs/ 19 | lib/ 20 | lib64/ 21 | parts/ 22 | sdist/ 23 | var/ 24 | wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | .hypothesis/ 50 | .pytest_cache/ 51 | 52 | # Translations 53 | *.mo 54 | *.pot 55 | 56 | # Django stuff: 57 | *.log 58 | local_settings.py 59 | db.sqlite3 60 | 61 | # Flask stuff: 62 | instance/ 63 | .webassets-cache 64 | 65 | # Scrapy stuff: 66 | .scrapy 67 | 68 | # Sphinx documentation 69 | docs/_build/ 70 | 71 | # PyBuilder 72 | target/ 73 | 74 | # Jupyter Notebook 75 | .ipynb_checkpoints 76 | 77 | # pyenv 78 | .python-version 79 | 80 | # celery beat schedule file 81 | celerybeat-schedule 82 | 83 | # SageMath parsed files 84 | *.sage.py 85 | 86 | # Environments 87 | .env 88 | .venv 89 | env/ 90 | venv/ 91 | ENV/ 92 | env.bak/ 93 | venv.bak/ 94 | 95 | # Spyder project settings 96 | .spyderproject 97 | .spyproject 98 | 99 | # Rope project settings 100 | .ropeproject 101 | 102 | # mkdocs documentation 103 | /site 104 | 105 | # mypy 106 | .mypy_cache/ 107 | 108 | .DS_Store 109 | 110 | # PyCharm files 111 | .idea/ 112 | 113 | # Dask files 114 | global.lock 115 | purge.lock 116 | /temp/ 117 | /dask-worker-space/ 118 | 119 | # VSCode files 120 | .vscode/ 121 | 122 | # DevContainer extension files 123 | .devcontainer/ 124 | 125 | test-reports/ 126 | -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: 4 | push: 5 | branches: 6 | - '*' 7 | pull_request: 8 | branches: 9 | - '*' 10 | workflow_dispatch: # allows you to trigger manually 11 | 12 | jobs: 13 | build: 14 | name: python-${{ matrix.python }}-${{ matrix.mpi }} 15 | runs-on: ubuntu-latest 16 | defaults: 17 | run: 18 | shell: bash -l {0} 19 | strategy: 20 | fail-fast: false 21 | matrix: 22 | python: ['3.6', '3.7', '3.8'] 23 | mpi: ['mpich', 'openmpi'] 24 | env: 25 | PYTHON: ${{ matrix.python }} 26 | MPI: ${{ matrix.mpi }} 27 | ENV_FILE: ci/env-${{ matrix.mpi }}.yml 28 | OMPI_MCA_rmaps_base_oversubscribe: '1' 29 | OMPI_ALLOW_RUN_AS_ROOT: '1' 30 | OMPI_ALLOW_RUN_AS_ROOT_CONFIRM: '1' 31 | steps: 32 | - uses: actions/checkout@v2 33 | - name: Cache conda 34 | uses: actions/cache@v2 35 | env: 36 | # Increase this value to reset cache if ci/environment.yml has not changed 37 | CACHE_NUMBER: 0 38 | with: 39 | path: ~/conda_pkgs_dir 40 | key: ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-${{ hashFiles( env.ENV_FILE ) }} 41 | - name: Setup miniconda 42 | uses: conda-incubator/setup-miniconda@v2 43 | with: 44 | activate-environment: dask-mpi-dev # Defined in ci/env-mpi.yml 45 | auto-update-conda: false 46 | python-version: ${{ matrix.python }} 47 | environment-file: ${{ env.ENV_FILE }} 48 | use-only-tar-bz2: true # IMPORTANT: This needs to be set for caching to work properly! 49 | - name: Install package 50 | run: python -m pip install --no-deps -e . 51 | - name: Check Installation 52 | run: | 53 | conda list 54 | pip list 55 | - name: Run Tests 56 | run: pytest --junitxml=test-reports/junit.xml --cov=./ dask_mpi/tests/ --verbose 57 | - name: Upload code coverage to Codecov 58 | uses: codecov/codecov-action@v1 59 | with: 60 | file: ./coverage.xml 61 | flags: unittests 62 | env_vars: PYTHON,MPI 63 | name: codecov-umbrella 64 | fail_ci_if_error: false 65 | -------------------------------------------------------------------------------- /docs/source/howitworks.rst: -------------------------------------------------------------------------------- 1 | How Dask-MPI Works 2 | ================== 3 | 4 | Dask-MPI works by using the ``mpi4py`` package and using MPI to selectively run 5 | different code on different MPI ranks. Hence, like any other application of the 6 | ``mpi4py`` package, it requires creating the appropriate MPI environment through 7 | the running of the ``mpirun`` or ``mpiexec`` commands. 8 | 9 | .. code-block:: bash 10 | 11 | mpirun -np 8 dask-mpi --no-nanny --scheduler-file ~/scheduler.json 12 | 13 | or 14 | 15 | .. code-block:: bash 16 | 17 | mpirun -np 8 python my_dask_script.py 18 | 19 | Using the Dask-MPI CLI 20 | ---------------------- 21 | 22 | By convention, Dask-MPI always launches the Scheduler on MPI rank 0. When using the CLI 23 | (``dask-mpi``), Dask-MPI launches the Workers (or Nannies and Workers) on the remaining 24 | MPI ranks (MPI ranks 1 and above). On each MPI rank, a ``tornado`` event loop is started 25 | after the Scheduler and Workers are created. These event loops continue until a kill 26 | signal is sent to one of the MPI processes, and then the entire Dask cluster (all MPI ranks) 27 | is shut down. 28 | 29 | When using the ``--no-scheduler`` option of the Dask-MPI CLI, more workers can be added to 30 | an existing Dask cluster. Since these two runs will be in separate ``mpirun`` or ``mpiexec`` 31 | executions, they will only be tied to each other through the scheduler. If a worker in the 32 | new cluster crashes and takes down the entire MPI environment, it will not have anything to 33 | do with the first (original) Dask cluster. Similarly, if the first cluster is taken down, 34 | the new workers will wait for the Scheduler to reactivate so they can re-connect. 35 | 36 | Using the Dask-MPI API 37 | ---------------------- 38 | 39 | Again, Dask-MPI always launches the Scheduler on MPI rank 0. When using the ``initialize()`` 40 | method, Dask-MPI runs the Client script on MPI rank 1 and launches the Workers on the 41 | remaining MPI ranks (MPI ranks 2 and above). The Dask Scheduler and Workers start their 42 | ``tornado`` event loops once they are created on their given MPI ranks, and these event 43 | loops run until the Client process (MPI rank 1) sends the termination signal to the 44 | Scheduler. Once the Scheduler receives the termination signal, it will shut down the 45 | Workers, too. 46 | -------------------------------------------------------------------------------- /docs/source/gpu.rst: -------------------------------------------------------------------------------- 1 | Dask-MPI with GPUs 2 | ================== 3 | 4 | When running `dask-mpi` on GPU enabled systems you will be provided with one or more GPUs per MPI rank. 5 | 6 | Today Dask assumes one worker process per GPU with workers tied correctly to each GPU. To help with this 7 | the `dask-cuda `_ package exists which contains 8 | cluster and worker classes which are designed to correctly configure your GPU environment. 9 | 10 | .. code-block:: bash 11 | 12 | conda install -c rapidsai -c nvidia -c conda-forge dask-cuda 13 | # or 14 | python -m pip install dask-cuda 15 | 16 | It is possible to leverage ``dask-cuda`` with ``dask-mpi`` by setting the worker class to use ``dask_cuda.CUDAWorker``. 17 | 18 | .. code-block:: bash 19 | 20 | mpirun -np 4 dask-mpi --worker-class dask_cuda.CUDAWorker 21 | 22 | .. code-block:: python 23 | 24 | from dask_mpi import initialize 25 | 26 | initialize(worker_class="dask_cuda.CUDAWorker") 27 | 28 | 29 | .. tip:: 30 | 31 | If your cluster is configured so that each rank represents one node you may have multiple GPUs 32 | per node. Workers will be created per GPU, not per rank so ``CUDAWorker`` will create one worker 33 | per GPU with names following the pattern ``{rank}-{gpu_index}``. So if you set ``-np 4`` but you 34 | have four GPUs per node you will end up with sixteen workers in your cluster. 35 | 36 | Additional configuration 37 | ------------------------ 38 | 39 | You may also want to pass additional configuration options to ``dask_cuda.CUDAWorker`` in addition to the ones 40 | supported by ``dask-mpi``. It is common to configure things like memory management and network protocols for 41 | GPU workers. 42 | 43 | You can pass any additional options that are accepted by ``dask_cuda.CUDAWorker`` with the worker options paramater. 44 | 45 | On the CLI this is expected to be a JSON serialised dictionary of values. 46 | 47 | .. code-block:: bash 48 | 49 | mpirun -np 4 dask-mpi --worker-class dask_cuda.CUDAWorker --worker-options '{"rmm_managed_memory": true}' 50 | 51 | In Python it is just a dictionary. 52 | 53 | .. code-block:: python 54 | 55 | from dask_mpi import initialize 56 | 57 | initialize(worker_class="dask_cuda.CUDAWorker", worker_options={"rmm_managed_memory": True}) 58 | 59 | .. tip:: 60 | 61 | For more information on using GPUs with Dask check out the `dask-cuda documentation 62 | `_. -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | Dask-MPI 2 | ======== 3 | 4 | *Easily deploy Dask using MPI* 5 | 6 | 7 | The Dask-MPI project makes it easy to deploy Dask from within an existing MPI 8 | environment, such as one created with the common MPI command-line launchers 9 | ``mpirun`` or ``mpiexec``. Such environments are commonly found in high performance 10 | supercomputers, academic research institutions, and other clusters where MPI 11 | has already been installed. 12 | 13 | Dask-MPI provides two convenient interfaces to launch Dask, either from within 14 | a batch script or directly from the command-line. 15 | 16 | Batch Script Example 17 | -------------------- 18 | 19 | You can turn your batch Python script into an MPI executable 20 | with the ``dask_mpi.initialize`` function. 21 | 22 | .. code-block:: python 23 | 24 | from dask_mpi import initialize 25 | initialize() 26 | 27 | from dask.distributed import Client 28 | client = Client() # Connect this local process to remote workers 29 | 30 | This makes your Python script launchable directly with ``mpirun`` or ``mpiexec``. 31 | 32 | .. code-block:: bash 33 | 34 | mpirun -np 4 python my_client_script.py 35 | 36 | This deploys the Dask scheduler and workers as well as the user's Client 37 | process within a single cohesive MPI computation. 38 | 39 | 40 | Command Line Example 41 | -------------------- 42 | 43 | Alternatively you can launch a Dask cluster directly from the command-line 44 | using the ``dask-mpi`` command and specifying a scheduler file where Dask can 45 | write connection information. 46 | 47 | .. code-block:: bash 48 | 49 | mpirun -np 4 dask-mpi --scheduler-file ~/dask-scheduler.json 50 | 51 | You can then access this cluster either from a separate batch script or from an 52 | interactive session (such as a Jupyter Notebook) by referencing the same scheduler 53 | file that ``dask-mpi`` created. 54 | 55 | .. code-block:: python 56 | 57 | from dask.distributed import Client 58 | client = Client(scheduler_file='~/dask-scheduler.json') 59 | 60 | 61 | Use Job Queuing System Directly 62 | ------------------------------- 63 | 64 | You can also use `Dask Jobqueue `_ to deploy Dask 65 | directly on a job queuing system like SLURM, SGE, PBS, LSF, Torque, or others. 66 | This can be especially nice when you want to dynamically scale your cluster 67 | during your computation, or for interactive use. 68 | 69 | 70 | .. toctree:: 71 | :maxdepth: 1 72 | :hidden: 73 | :caption: Getting Started 74 | 75 | install 76 | batch 77 | interactive 78 | gpu 79 | 80 | .. toctree:: 81 | :maxdepth: 1 82 | :hidden: 83 | :caption: Detailed use 84 | 85 | cli 86 | api 87 | 88 | .. toctree:: 89 | :maxdepth: 1 90 | :hidden: 91 | :caption: Help & Reference 92 | 93 | howitworks 94 | develop 95 | history 96 | -------------------------------------------------------------------------------- /docs/source/interactive.rst: -------------------------------------------------------------------------------- 1 | .. _interactive: 2 | 3 | Dask-MPI with Interactive Jobs 4 | ============================== 5 | 6 | Dask-MPI can be used to easily launch an entire Dask cluster in an existing MPI environment, 7 | and attach a client to that cluster in an interactive session. 8 | 9 | In this scenario, you would launch the Dask cluster using the Dask-MPI command-line interface 10 | (CLI) ``dask-mpi``. 11 | 12 | .. code-block:: bash 13 | 14 | mpirun -np 4 dask-mpi --scheduler-file scheduler.json 15 | 16 | In this example, the above code will use MPI to launch the Dask Scheduler on MPI rank 0 and 17 | Dask Workers (or Nannies) on all remaining MPI ranks. 18 | 19 | It is advisable, as shown in the previous example, to use the ``--scheduler-file`` option when 20 | using the ``dask-mpi`` CLI. The ``--scheduler-file`` option saves the location of the Dask 21 | Scheduler to a file that can be referenced later in your interactive session. For example, 22 | the following code would create a Dask Client and connect it to the Scheduler using the 23 | scheduler JSON file. 24 | 25 | .. code-block:: python 26 | 27 | from distributed import Client 28 | client = Client(scheduler_file='/path/to/scheduler.json') 29 | 30 | As long as your interactive session has access to the same filesystem where the scheduler JSON 31 | file is saved, this procedure will let you run your interactive session easily attach to your 32 | separate ``dask-mpi`` job. 33 | 34 | After a Dask cluster has been created, the ``dask-mpi`` CLI can be used to add more workers to 35 | the cluster by using the ``--no-scheduler`` option. 36 | 37 | .. code-block:: bash 38 | 39 | mpirun -n 5 dask-mpi --scheduler-file scheduler.json --no-scheduler 40 | 41 | In this example (above), 5 more workers will be created and they will be registered with the 42 | Scheduler (whose address is in the scheduler JSON file). 43 | 44 | .. tip:: **Running with a Job Scheduler** 45 | 46 | In High-Performance Computing environments, job schedulers, such as LSF, PBS, or SLURM, are 47 | commonly used to request the necessary resources needed for an MPI job, such as the number 48 | of CPU cores, the total memory needed, and/or the number of nodes over which to spread out 49 | the MPI job. In such a case, it is advisable that the user place the ``mpirun ... dask-mpi ...`` 50 | command in a job submission script, with the number of MPI ranks (e.g., ``-np 4``) matches the 51 | number of cores requested from the job scheduler. 52 | 53 | .. warning:: **MPI Jobs and Dask Nannies** 54 | 55 | It is many times useful to launch your Dask-MPI cluster (using ``dask-mpi``) with Dask Nannies 56 | (i.e., with the ``--worker-class distributed.Nanny`` option), rather than strictly with Dask Workers. 57 | This is because the Dask Nannies can relaunch a worker when a failure occurs. However, in some MPI 58 | environments, Dask Nannies will not be able to work as expected. This is because some installations 59 | of MPI may restrict the number of actual running processes from exceeding the number of MPI ranks 60 | requested. When using Dask Nannies, the Nanny process is executed and runs in the background 61 | after forking a Worker process. Hence, one Worker process will exist for each Nanny process. 62 | Some MPI installations will kill any forked process, and you will see many errors arising from 63 | the Worker processes being killed. If this happens, disable the use of Nannies with the 64 | ``--worker-class distributed.Worker`` option to ``dask-mpi``. 65 | 66 | For more details on how to use the ``dask-mpi`` command, see the :ref:`cli`. 67 | -------------------------------------------------------------------------------- /docs/source/batch.rst: -------------------------------------------------------------------------------- 1 | Dask-MPI with Batch Jobs 2 | ======================== 3 | 4 | Dask, with Dask Distributed, is an incredibly powerful engine behind interactive sessions 5 | (see :ref:`interactive`). However, there are many scenarios where your work is pre-defined 6 | and you do not need an interactive session to execute your tasks. In these cases, running 7 | in *batch-mode* is best. 8 | 9 | Dask-MPI makes running in batch-mode in an MPI environment easy by providing an API to the 10 | same functionality created for the ``dask-mpi`` :ref:`cli`. However, in batch mode, you 11 | need the script running your Dask Client to run in the same environment in which your Dask 12 | cluster is constructed, and you want your Dask cluster to shut down after your Client script 13 | has executed. 14 | 15 | To make this functionality possible, Dask-MPI provides the ``initialize()`` method as part of 16 | its :ref:`api`. The ``initialize()`` function, when run from within an MPI environment (i.e., 17 | created by the use of ``mpirun`` or ``mpiexec``), launches the Dask Scheduler on MPI rank 0 18 | and the Dask Workers on MPI ranks 2 and above. On MPI rank 1, the ``initialize()`` function 19 | "passes through" to the Client script, running the Dask-based Client code the user wishes to 20 | execute. 21 | 22 | For example, if you have a Dask-based script named ``myscript.py``, you would be able to 23 | run this script in parallel, using Dask, with the following command. 24 | 25 | .. code-block:: bash 26 | 27 | mpirun -np 4 python myscript.py 28 | 29 | This will run the Dask Scheduler on MPI rank 0, the user's Client code on MPI rank 1, and 30 | 2 workers on MPI rank 2 and MPI rank 3. To make this work, the ``myscript.py`` script must 31 | have (presumably near the top of the script) the following code in it. 32 | 33 | .. code-block:: python 34 | 35 | from dask_mpi import initialize 36 | initialize() 37 | 38 | from distributed import Client 39 | client = Client() 40 | 41 | The Dask Client will automatically detect the location of the Dask Scheduler running on MPI 42 | rank 0 and connect to it. 43 | 44 | When the Client code is finished executing, the Dask Scheduler and Workers (and, possibly, 45 | Nannies) will be terminated. 46 | 47 | .. tip:: **Running Batch Jobs with Job Schedulers** 48 | 49 | It is common in High-Performance Computing (HPC) environments to request the necessary 50 | computing resources with a job scheduler, such LSF, PBS, or SLURM. In such environments, 51 | is is advised that the ``mpirun ... python myscript.py`` command be placed in a job 52 | submission script such that the resources requested from the job scheduler match the 53 | resources used by the ``mpirun`` command. 54 | 55 | For more details on the ``initialize()`` method, see the :ref:`api`. 56 | 57 | Connecting to Dashboard 58 | ----------------------- 59 | 60 | Due to the fact that Dask might be initialized on a node that isn't the login node 61 | a simple port forwarding can be insufficient to connect to a dashboard. 62 | 63 | To find out which node is the one hosting the dashboard append initialization code with location logging: 64 | 65 | .. code-block:: python 66 | 67 | from dask_mpi import initialize 68 | initialize() 69 | 70 | from dask.distributed import Client 71 | from distributed.scheduler import logger 72 | import socket 73 | 74 | client = Client() 75 | 76 | host = client.run_on_scheduler(socket.gethostname) 77 | port = client.scheduler_info()['services']['dashboard'] 78 | login_node_address = "supercomputer.university.edu" # Provide address/domain of login node 79 | 80 | logger.info(f"ssh -N -L {port}:{host}:{port} {login_node_address}") 81 | 82 | Then in batch job output file search for the logged line and use in your terminal:: 83 | 84 | ssh -N -L PORT_NUMBER:node03:PORT_NUMBER supercomputer.university.edu 85 | 86 | The Bokeh Dashboard will be available at ``localhost:PORT_NUMBER``. 87 | -------------------------------------------------------------------------------- /dask_mpi/cli.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import json 3 | 4 | import click 5 | from dask.distributed import Scheduler, Worker 6 | from distributed.cli.utils import check_python_3 7 | from distributed.utils import import_term 8 | from mpi4py import MPI 9 | 10 | 11 | @click.command() 12 | @click.argument("scheduler_address", type=str, required=False) 13 | @click.option( 14 | "--scheduler-file", 15 | type=str, 16 | default=None, 17 | help="Filename to JSON encoded scheduler information.", 18 | ) 19 | @click.option( 20 | "--scheduler-port", 21 | default=None, 22 | type=int, 23 | help="Specify scheduler port number. Defaults to random.", 24 | ) 25 | @click.option( 26 | "--interface", type=str, default=None, help="Network interface like 'eth0' or 'ib0'" 27 | ) 28 | @click.option( 29 | "--protocol", type=str, default=None, help="Network protocol to use like TCP" 30 | ) 31 | @click.option( 32 | "--nthreads", type=int, default=None, help="Number of threads per worker." 33 | ) 34 | @click.option( 35 | "--memory-limit", 36 | default="auto", 37 | help="Number of bytes before spilling data to disk. " 38 | "This can be an integer (nbytes) " 39 | "float (fraction of total memory) " 40 | "or 'auto'", 41 | ) 42 | @click.option( 43 | "--local-directory", default=None, type=str, help="Directory to place worker files" 44 | ) 45 | @click.option( 46 | "--scheduler/--no-scheduler", 47 | default=True, 48 | help=( 49 | "Whether or not to include a scheduler. " 50 | "Use --no-scheduler to increase an existing dask cluster" 51 | ), 52 | ) 53 | @click.option( 54 | "--nanny/--no-nanny", 55 | default=True, 56 | help="Start workers in nanny process for management (deprecated use --worker-class instead)", 57 | ) 58 | @click.option( 59 | "--worker-class", 60 | type=str, 61 | default="distributed.Nanny", 62 | help="Class to use when creating workers", 63 | ) 64 | @click.option( 65 | "--worker-options", 66 | type=str, 67 | default=None, 68 | help="JSON serialised dict of options to pass to workers", 69 | ) 70 | @click.option( 71 | "--dashboard-address", 72 | type=str, 73 | default=None, 74 | help="Address for visual diagnostics dashboard", 75 | ) 76 | @click.option( 77 | "--name", 78 | type=str, 79 | default="dask_mpi", 80 | help="Name prefix for each worker, to which dask-mpi appends ``-``.", 81 | ) 82 | def main( 83 | scheduler_address, 84 | scheduler_file, 85 | interface, 86 | nthreads, 87 | local_directory, 88 | memory_limit, 89 | scheduler, 90 | dashboard_address, 91 | nanny, 92 | worker_class, 93 | worker_options, 94 | scheduler_port, 95 | protocol, 96 | name, 97 | ): 98 | 99 | comm = MPI.COMM_WORLD 100 | rank = comm.Get_rank() 101 | 102 | try: 103 | worker_options = json.loads(worker_options) 104 | except TypeError: 105 | worker_options = {} 106 | 107 | if rank == 0 and scheduler: 108 | 109 | async def run_scheduler(): 110 | async with Scheduler( 111 | interface=interface, 112 | protocol=protocol, 113 | dashboard_address=dashboard_address, 114 | scheduler_file=scheduler_file, 115 | port=scheduler_port, 116 | ) as s: 117 | comm.Barrier() 118 | await s.finished() 119 | 120 | asyncio.get_event_loop().run_until_complete(run_scheduler()) 121 | 122 | else: 123 | comm.Barrier() 124 | 125 | async def run_worker(): 126 | 127 | WorkerType = import_term(worker_class) 128 | if not nanny: 129 | raise DeprecationWarning( 130 | "Option --no-nanny is deprectaed, use --worker-class instead" 131 | ) 132 | WorkerType = Worker 133 | opts = { 134 | "interface": interface, 135 | "protocol": protocol, 136 | "nthreads": nthreads, 137 | "memory_limit": memory_limit, 138 | "local_directory": local_directory, 139 | "name": f"{name}-{rank}", 140 | "scheduler_file": scheduler_file, 141 | **worker_options, 142 | } 143 | if scheduler_address: 144 | opts["scheduler_ip"] = scheduler_address 145 | async with WorkerType(**opts) as worker: 146 | await worker.finished() 147 | 148 | asyncio.get_event_loop().run_until_complete(run_worker()) 149 | 150 | 151 | def go(): 152 | check_python_3() 153 | main() 154 | 155 | 156 | if __name__ == "__main__": 157 | go() 158 | -------------------------------------------------------------------------------- /dask_mpi/core.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import atexit 3 | import sys 4 | 5 | import dask 6 | from dask.distributed import Client, Nanny, Scheduler 7 | from distributed.utils import import_term 8 | from tornado import gen 9 | from tornado.ioloop import IOLoop 10 | 11 | 12 | def initialize( 13 | interface=None, 14 | nthreads=1, 15 | local_directory="", 16 | memory_limit="auto", 17 | nanny=False, 18 | dashboard=True, 19 | dashboard_address=":8787", 20 | protocol=None, 21 | worker_class="distributed.Worker", 22 | worker_options=None, 23 | comm=None, 24 | exit=True, 25 | ): 26 | """ 27 | Initialize a Dask cluster using mpi4py 28 | 29 | Using mpi4py, MPI rank 0 launches the Scheduler, MPI rank 1 passes through to the 30 | client script, and all other MPI ranks launch workers. All MPI ranks other than 31 | MPI rank 1 block while their event loops run. 32 | 33 | In normal operation these ranks exit once rank 1 ends. If exit=False is set they 34 | instead return an bool indicating whether they are the client and should execute 35 | more client code, or a worker/scheduler who should not. In this case the user is 36 | responsible for the client calling send_close_signal when work is complete, and 37 | checking the returned value to choose further actions. 38 | 39 | Parameters 40 | ---------- 41 | interface : str 42 | Network interface like 'eth0' or 'ib0' 43 | nthreads : int 44 | Number of threads per worker 45 | local_directory : str 46 | Directory to place worker files 47 | memory_limit : int, float, or 'auto' 48 | Number of bytes before spilling data to disk. This can be an 49 | integer (nbytes), float (fraction of total memory), or 'auto'. 50 | nanny : bool 51 | Start workers in nanny process for management (deprecated, use worker_class instead) 52 | dashboard : bool 53 | Enable Bokeh visual diagnostics 54 | dashboard_address : str 55 | Bokeh port for visual diagnostics 56 | protocol : str 57 | Protocol like 'inproc' or 'tcp' 58 | worker_class : str 59 | Class to use when creating workers 60 | worker_options : dict 61 | Options to pass to workers 62 | comm: mpi4py.MPI.Intracomm 63 | Optional MPI communicator to use instead of COMM_WORLD 64 | exit: bool 65 | Whether to call sys.exit on the workers and schedulers when the event 66 | loop completes. 67 | 68 | Returns 69 | ------- 70 | is_client: bool 71 | Only returned if exit=False. Inidcates whether this rank should continue 72 | to run client code (True), or if it acts as a scheduler or worker (False). 73 | """ 74 | if comm is None: 75 | from mpi4py import MPI 76 | 77 | comm = MPI.COMM_WORLD 78 | 79 | rank = comm.Get_rank() 80 | loop = IOLoop.current() 81 | 82 | if not worker_options: 83 | worker_options = {} 84 | 85 | if rank == 0: 86 | 87 | async def run_scheduler(): 88 | async with Scheduler( 89 | interface=interface, 90 | protocol=protocol, 91 | dashboard=dashboard, 92 | dashboard_address=dashboard_address, 93 | ) as scheduler: 94 | comm.bcast(scheduler.address, root=0) 95 | comm.Barrier() 96 | await scheduler.finished() 97 | 98 | asyncio.get_event_loop().run_until_complete(run_scheduler()) 99 | if exit: 100 | sys.exit() 101 | else: 102 | return False 103 | 104 | else: 105 | scheduler_address = comm.bcast(None, root=0) 106 | dask.config.set(scheduler_address=scheduler_address) 107 | comm.Barrier() 108 | 109 | if rank == 1: 110 | if exit: 111 | atexit.register(send_close_signal) 112 | return True 113 | else: 114 | 115 | async def run_worker(): 116 | WorkerType = import_term(worker_class) 117 | if nanny: 118 | raise DeprecationWarning( 119 | "Option nanny=True is deprectaed, use worker_class='distributed.Nanny' instead" 120 | ) 121 | WorkerType = Nanny 122 | opts = { 123 | "interface": interface, 124 | "protocol": protocol, 125 | "nthreads": nthreads, 126 | "memory_limit": memory_limit, 127 | "local_directory": local_directory, 128 | "name": rank, 129 | **worker_options, 130 | } 131 | async with WorkerType(**opts) as worker: 132 | await worker.finished() 133 | 134 | asyncio.get_event_loop().run_until_complete(run_worker()) 135 | if exit: 136 | sys.exit() 137 | else: 138 | return False 139 | 140 | 141 | def send_close_signal(): 142 | """ 143 | The client can call this function to explicitly stop 144 | the event loop. 145 | 146 | This is not needed in normal usage, where it is run 147 | automatically when the client code exits python. 148 | 149 | You only need to call this manually when using exit=False 150 | in initialize. 151 | """ 152 | 153 | async def stop(dask_scheduler): 154 | await dask_scheduler.close() 155 | await gen.sleep(0.1) 156 | local_loop = dask_scheduler.loop 157 | local_loop.add_callback(local_loop.stop) 158 | 159 | with Client() as c: 160 | c.run_on_scheduler(stop, wait=False) 161 | -------------------------------------------------------------------------------- /dask_mpi/tests/test_cli.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, print_function 2 | 3 | import json 4 | import os 5 | import subprocess 6 | import tempfile 7 | from time import sleep 8 | 9 | import pytest 10 | import requests 11 | from distributed import Client 12 | from distributed.comm.addressing import get_address_host_port 13 | from distributed.metrics import time 14 | from distributed.utils import import_term, tmpfile 15 | from distributed.utils_test import loop # noqa: F401 16 | from distributed.utils_test import popen 17 | 18 | pytest.importorskip("mpi4py") 19 | 20 | FNULL = open(os.devnull, "w") # hide output of subprocess 21 | 22 | 23 | @pytest.mark.parametrize( 24 | "worker_class", 25 | ["distributed.Worker", "distributed.Nanny", "dask_cuda.CUDAWorker"], 26 | ) 27 | def test_basic(loop, worker_class, mpirun): 28 | try: 29 | import_term(worker_class) 30 | except (ImportError, AttributeError): 31 | pytest.skip( 32 | "Cannot import {}, perhaps it is not installed".format(worker_class) 33 | ) 34 | with tmpfile(extension="json") as fn: 35 | 36 | cmd = mpirun + [ 37 | "-np", 38 | "4", 39 | "dask-mpi", 40 | "--scheduler-file", 41 | fn, 42 | "--worker-class", 43 | worker_class, 44 | ] 45 | 46 | with popen(cmd): 47 | with Client(scheduler_file=fn) as c: 48 | start = time() 49 | while len(c.scheduler_info()["workers"]) < 3: 50 | assert time() < start + 10 51 | sleep(0.2) 52 | 53 | assert c.submit(lambda x: x + 1, 10).result() == 11 54 | 55 | 56 | def test_no_scheduler(loop, mpirun): 57 | with tmpfile(extension="json") as fn: 58 | 59 | cmd = mpirun + ["-np", "2", "dask-mpi", "--scheduler-file", fn] 60 | 61 | with popen(cmd, stdin=FNULL): 62 | with Client(scheduler_file=fn) as c: 63 | 64 | start = time() 65 | while len(c.scheduler_info()["workers"]) != 1: 66 | assert time() < start + 10 67 | sleep(0.2) 68 | 69 | assert c.submit(lambda x: x + 1, 10).result() == 11 70 | 71 | cmd = mpirun + [ 72 | "-np", 73 | "1", 74 | "dask-mpi", 75 | "--scheduler-file", 76 | fn, 77 | "--no-scheduler", 78 | ] 79 | 80 | with popen(cmd): 81 | start = time() 82 | while len(c.scheduler_info()["workers"]) != 2: 83 | assert time() < start + 10 84 | sleep(0.2) 85 | 86 | 87 | @pytest.mark.parametrize("nanny", ["--nanny", "--no-nanny"]) 88 | def test_non_default_ports(loop, nanny, mpirun): 89 | with tmpfile(extension="json") as fn: 90 | 91 | cmd = mpirun + [ 92 | "-np", 93 | "2", 94 | "dask-mpi", 95 | "--scheduler-file", 96 | fn, 97 | nanny, 98 | "--scheduler-port", 99 | "56723", 100 | ] 101 | 102 | with popen(cmd): 103 | with Client(scheduler_file=fn) as c: 104 | sched_info = c.scheduler_info() 105 | _, sched_port = get_address_host_port(sched_info["address"]) 106 | assert sched_port == 56723 107 | 108 | 109 | def check_port_okay(port): 110 | start = time() 111 | while True: 112 | try: 113 | response = requests.get("http://localhost:%d/status/" % port) 114 | assert response.ok 115 | break 116 | except Exception: 117 | sleep(0.1) 118 | assert time() < start + 20 119 | 120 | 121 | def test_dashboard(loop, mpirun): 122 | with tmpfile(extension="json") as fn: 123 | 124 | cmd = mpirun + [ 125 | "-np", 126 | "2", 127 | "dask-mpi", 128 | "--scheduler-file", 129 | fn, 130 | "--dashboard-address", 131 | ":59583", 132 | ] 133 | 134 | with popen(cmd, stdin=FNULL): 135 | check_port_okay(59583) 136 | 137 | with pytest.raises(Exception): 138 | requests.get("http://localhost:59583/status/") 139 | 140 | 141 | @pytest.mark.skip(reason="Should we expose this option?") 142 | def test_bokeh_worker(loop, mpirun): 143 | with tmpfile(extension="json") as fn: 144 | 145 | cmd = mpirun + [ 146 | "-np", 147 | "2", 148 | "dask-mpi", 149 | "--scheduler-file", 150 | fn, 151 | "--bokeh-worker-port", 152 | "59584", 153 | ] 154 | 155 | with popen(cmd, stdin=FNULL): 156 | check_port_okay(59584) 157 | 158 | 159 | def tmpfile_static(extension="", dir=None): 160 | """ 161 | utility function for test_stale_sched test 162 | """ 163 | 164 | extension = "." + extension.lstrip(".") 165 | handle, filename = tempfile.mkstemp(extension, dir=dir) 166 | return handle, filename 167 | 168 | 169 | @pytest.mark.parametrize("nanny", ["--nanny", "--no-nanny"]) 170 | def test_stale_sched(loop, nanny, mpirun): 171 | """ 172 | the purpose of this unit test is to simulate the situation in which 173 | an old scheduler file has been left behind from a non-clean dask exit. 174 | in this situation the scheduler should wake up and overwrite the stale 175 | file before the workers start. 176 | """ 177 | 178 | fhandle, fn = tmpfile_static(extension="json") 179 | 180 | stale_json = { 181 | "type": "Scheduler", 182 | "id": "Scheduler-edb63f9c-9e83-4021-8563-44bcffc451cc", 183 | "address": "tcp://10.128.0.32:45373", 184 | "services": {"dashboard": 8787}, 185 | "workers": {}, 186 | } 187 | 188 | with open(fn, "w") as f: 189 | json.dump(stale_json, f) 190 | 191 | cmd = mpirun + [ 192 | "-np", 193 | "4", 194 | "dask-mpi", 195 | "--scheduler-file", 196 | fn, 197 | "--dashboard-address", 198 | "0", 199 | nanny, 200 | ] 201 | 202 | p = subprocess.Popen(cmd) 203 | 204 | sleep(5) 205 | 206 | p.kill() 207 | 208 | with open(fn) as f: 209 | new_json = json.load(f) 210 | 211 | os.close(fhandle) 212 | os.remove(fn) 213 | 214 | assert new_json != stale_json 215 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Configuration file for the Sphinx documentation builder. 4 | # 5 | # This file does only contain a selection of the most common options. For a 6 | # full list see the documentation: 7 | # http://www.sphinx-doc.org/en/master/config 8 | 9 | # -- Path setup -------------------------------------------------------------- 10 | 11 | # If extensions (or modules to document with autodoc) are in another directory, 12 | # add these directories to sys.path here. If the directory is relative to the 13 | # documentation root, use os.path.abspath to make it absolute, like shown here. 14 | # 15 | # import os 16 | # import sys 17 | # sys.path.insert(0, os.path.abspath('.')) 18 | import dask_mpi 19 | 20 | 21 | # -- Project information ----------------------------------------------------- 22 | 23 | project = 'Dask-MPI' 24 | copyright = '2018, Anaconda, Inc. and contributors' 25 | author = 'Dask-MPI Development Team' 26 | 27 | # The short X.Y version 28 | version = dask_mpi.__version__ 29 | # The full version, including alpha/beta/rc tags 30 | release = dask_mpi.__version__ 31 | 32 | 33 | # -- General configuration --------------------------------------------------- 34 | 35 | # If your documentation needs a minimal Sphinx version, state it here. 36 | # 37 | # needs_sphinx = '1.0' 38 | 39 | # Add any Sphinx extension module names here, as strings. They can be 40 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 41 | # ones. 42 | extensions = [ 43 | 'sphinx.ext.autodoc', 44 | 'sphinx.ext.intersphinx', 45 | 'sphinx.ext.todo', 46 | 'sphinx.ext.ifconfig', 47 | 'sphinx.ext.viewcode', 48 | 'sphinx.ext.extlinks', 49 | 'sphinx_click.ext', 50 | 'numpydoc' 51 | ] 52 | 53 | autosummary_generate = True 54 | 55 | numpydoc_class_members_toctree = True 56 | numpydoc_show_class_members = False 57 | 58 | # Add any paths that contain templates here, relative to this directory. 59 | templates_path = ['_templates'] 60 | 61 | # The suffix(es) of source filenames. 62 | # You can specify multiple suffix as a list of string: 63 | # 64 | # source_suffix = ['.rst', '.md'] 65 | source_suffix = '.rst' 66 | 67 | # The master toctree document. 68 | master_doc = 'index' 69 | 70 | # The language for content autogenerated by Sphinx. Refer to documentation 71 | # for a list of supported languages. 72 | # 73 | # This is also used if you do content translation via gettext catalogs. 74 | # Usually you set "language" from the command line for these cases. 75 | language = None 76 | 77 | # List of patterns, relative to source directory, that match files and 78 | # directories to ignore when looking for source files. 79 | # This pattern also affects html_static_path and html_extra_path. 80 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 81 | 82 | # The name of the Pygments (syntax highlighting) style to use. 83 | pygments_style = 'default' 84 | 85 | 86 | # -- Options for HTML output ------------------------------------------------- 87 | 88 | # The theme to use for HTML and HTML Help pages. See the documentation for 89 | # a list of builtin themes. 90 | # 91 | html_theme = 'dask_sphinx_theme' 92 | 93 | # Theme options are theme-specific and customize the look and feel of a theme 94 | # further. For a list of options available for each theme, see the 95 | # documentation. 96 | # 97 | # html_theme_options = {} 98 | 99 | # Add any paths that contain custom static files (such as style sheets) here, 100 | # relative to this directory. They are copied after the builtin static files, 101 | # so a file named "default.css" will overwrite the builtin "default.css". 102 | html_static_path = ['_static'] 103 | 104 | # Custom sidebar templates, must be a dictionary that maps document names 105 | # to template names. 106 | # 107 | # The default sidebars (for documents that don't match any pattern) are 108 | # defined by theme itself. Builtin themes are using these templates by 109 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', 110 | # 'searchbox.html']``. 111 | # 112 | # html_sidebars = {} 113 | 114 | 115 | # -- Options for HTMLHelp output --------------------------------------------- 116 | 117 | # Output file base name for HTML help builder. 118 | htmlhelp_basename = 'Dask-mpidoc' 119 | 120 | 121 | # -- Options for LaTeX output ------------------------------------------------ 122 | 123 | latex_elements = { 124 | # The paper size ('letterpaper' or 'a4paper'). 125 | # 126 | # 'papersize': 'letterpaper', 127 | 128 | # The font size ('10pt', '11pt' or '12pt'). 129 | # 130 | # 'pointsize': '10pt', 131 | 132 | # Additional stuff for the LaTeX preamble. 133 | # 134 | # 'preamble': '', 135 | 136 | # Latex figure (float) alignment 137 | # 138 | # 'figure_align': 'htbp', 139 | } 140 | 141 | # Grouping the document tree into LaTeX files. List of tuples 142 | # (source start file, target name, title, 143 | # author, documentclass [howto, manual, or own class]). 144 | latex_documents = [ 145 | (master_doc, 'dask-mpi.tex', 'Dask-mpi Documentation', 146 | [author], 'manual'), 147 | ] 148 | 149 | 150 | # -- Options for manual page output ------------------------------------------ 151 | 152 | # One entry per manual page. List of tuples 153 | # (source start file, name, description, authors, manual section). 154 | man_pages = [ 155 | (master_doc, 'dask-mpi', 'Dask-mpi Documentation', 156 | [author], 1) 157 | ] 158 | 159 | 160 | # -- Options for Texinfo output ---------------------------------------------- 161 | 162 | # Grouping the document tree into Texinfo files. List of tuples 163 | # (source start file, target name, title, author, 164 | # dir menu entry, description, category) 165 | texinfo_documents = [ 166 | (master_doc, 'dask-mpi', 'Dask-mpi Documentation', 167 | author, 'Dask-mpi', 'One line description of project.', 168 | 'Miscellaneous'), 169 | ] 170 | 171 | 172 | # -- Options for Epub output ------------------------------------------------- 173 | 174 | # Bibliographic Dublin Core info. 175 | epub_title = project 176 | 177 | # The unique identifier of the text. This can be a ISBN number 178 | # or the project homepage. 179 | # 180 | # epub_identifier = '' 181 | 182 | # A unique identification for the text. 183 | # 184 | # epub_uid = '' 185 | 186 | # A list of files that should not be packed into the epub file. 187 | epub_exclude_files = ['search.html'] 188 | 189 | 190 | # -- Extension configuration ------------------------------------------------- 191 | 192 | # -- Options for intersphinx extension --------------------------------------- 193 | 194 | # Example configuration for intersphinx: refer to the Python standard library. 195 | intersphinx_mapping = { 196 | 'numpy': ('https://docs.scipy.org/doc/numpy/', 197 | 'https://docs.scipy.org/doc/numpy/objects.inv'), 198 | 'dask': ('https://docs.dask.org/en/latest', 199 | 'https://docs.dask.org/en/latest/objects.inv'), 200 | 'distributed': ('https://distributed.dask.org/en/stable/', 201 | 'https://distributed.dask.org/en/stable/objects.inv'), 202 | 'mpi4py': ('https://mpi4py.readthedocs.io/en/stable/', 203 | 'https://mpi4py.readthedocs.io/en/stable/objects.inv') 204 | } 205 | 206 | # -- Options for todo extension ---------------------------------------------- 207 | 208 | # If true, `todo` and `todoList` produce output, else they produce nothing. 209 | todo_include_todos = True 210 | 211 | # -- Options for extlinks extension ------------------------------------------ 212 | 213 | # Link to GitHub issues and pull requests using :pr:`1234` and :issue:`1234` 214 | # syntax 215 | extlinks = { 216 | 'issue': ('https://github.com/dask/dask-mpi/issues/%s', 'GH#'), 217 | 'pr': ('https://github.com/dask/dask-mpi/pull/%s', 'GH#') 218 | } 219 | -------------------------------------------------------------------------------- /dask_mpi/_version.py: -------------------------------------------------------------------------------- 1 | # This file helps to compute a version number in source trees obtained from 2 | # git-archive tarball (such as those provided by githubs download-from-tag 3 | # feature). Distribution tarballs (built by setup.py sdist) and build 4 | # directories (produced by setup.py build) will contain a much shorter file 5 | # that just contains the computed version number. 6 | 7 | # This file is released into the public domain. Generated by 8 | # versioneer-0.18 (https://github.com/warner/python-versioneer) 9 | 10 | """Git implementation of _version.py.""" 11 | 12 | import errno 13 | import os 14 | import re 15 | import subprocess 16 | import sys 17 | 18 | 19 | def get_keywords(): 20 | """Get the keywords needed to look up the version information.""" 21 | # these strings will be replaced by git during git-archive. 22 | # setup.py/versioneer.py will grep for the variable names, so they must 23 | # each be defined on a line of their own. _version.py will just call 24 | # get_keywords(). 25 | git_refnames = " (HEAD -> main)" 26 | git_full = "d0cba37c800a4c87956c360de31b8e4f659847f1" 27 | git_date = "2021-11-12 09:11:38 -0700" 28 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} 29 | return keywords 30 | 31 | 32 | class VersioneerConfig: 33 | """Container for Versioneer configuration parameters.""" 34 | 35 | 36 | def get_config(): 37 | """Create, populate and return the VersioneerConfig() object.""" 38 | # these strings are filled in when 'setup.py versioneer' creates 39 | # _version.py 40 | cfg = VersioneerConfig() 41 | cfg.VCS = "git" 42 | cfg.style = "pep440" 43 | cfg.tag_prefix = "" 44 | cfg.parentdir_prefix = "dask-mpi-" 45 | cfg.versionfile_source = "dask_mpi/_version.py" 46 | cfg.verbose = False 47 | return cfg 48 | 49 | 50 | class NotThisMethod(Exception): 51 | """Exception raised if a method is not valid for the current scenario.""" 52 | 53 | 54 | LONG_VERSION_PY = {} 55 | HANDLERS = {} 56 | 57 | 58 | def register_vcs_handler(vcs, method): # decorator 59 | """Decorator to mark a method as the handler for a particular VCS.""" 60 | 61 | def decorate(f): 62 | """Store f in HANDLERS[vcs][method].""" 63 | if vcs not in HANDLERS: 64 | HANDLERS[vcs] = {} 65 | HANDLERS[vcs][method] = f 66 | return f 67 | 68 | return decorate 69 | 70 | 71 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): 72 | """Call the given command(s).""" 73 | assert isinstance(commands, list) 74 | p = None 75 | for c in commands: 76 | try: 77 | dispcmd = str([c] + args) 78 | # remember shell=False, so use git.cmd on windows, not just git 79 | p = subprocess.Popen( 80 | [c] + args, 81 | cwd=cwd, 82 | env=env, 83 | stdout=subprocess.PIPE, 84 | stderr=(subprocess.PIPE if hide_stderr else None), 85 | ) 86 | break 87 | except EnvironmentError: 88 | e = sys.exc_info()[1] 89 | if e.errno == errno.ENOENT: 90 | continue 91 | if verbose: 92 | print("unable to run %s" % dispcmd) 93 | print(e) 94 | return None, None 95 | else: 96 | if verbose: 97 | print("unable to find command, tried %s" % (commands,)) 98 | return None, None 99 | stdout = p.communicate()[0].strip() 100 | if sys.version_info[0] >= 3: 101 | stdout = stdout.decode() 102 | if p.returncode != 0: 103 | if verbose: 104 | print("unable to run %s (error)" % dispcmd) 105 | print("stdout was %s" % stdout) 106 | return None, p.returncode 107 | return stdout, p.returncode 108 | 109 | 110 | def versions_from_parentdir(parentdir_prefix, root, verbose): 111 | """Try to determine the version from the parent directory name. 112 | 113 | Source tarballs conventionally unpack into a directory that includes both 114 | the project name and a version string. We will also support searching up 115 | two directory levels for an appropriately named parent directory 116 | """ 117 | rootdirs = [] 118 | 119 | for i in range(3): 120 | dirname = os.path.basename(root) 121 | if dirname.startswith(parentdir_prefix): 122 | return { 123 | "version": dirname[len(parentdir_prefix) :], 124 | "full-revisionid": None, 125 | "dirty": False, 126 | "error": None, 127 | "date": None, 128 | } 129 | else: 130 | rootdirs.append(root) 131 | root = os.path.dirname(root) # up a level 132 | 133 | if verbose: 134 | print( 135 | "Tried directories %s but none started with prefix %s" 136 | % (str(rootdirs), parentdir_prefix) 137 | ) 138 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 139 | 140 | 141 | @register_vcs_handler("git", "get_keywords") 142 | def git_get_keywords(versionfile_abs): 143 | """Extract version information from the given file.""" 144 | # the code embedded in _version.py can just fetch the value of these 145 | # keywords. When used from setup.py, we don't want to import _version.py, 146 | # so we do it with a regexp instead. This function is not used from 147 | # _version.py. 148 | keywords = {} 149 | try: 150 | f = open(versionfile_abs, "r") 151 | for line in f.readlines(): 152 | if line.strip().startswith("git_refnames ="): 153 | mo = re.search(r'=\s*"(.*)"', line) 154 | if mo: 155 | keywords["refnames"] = mo.group(1) 156 | if line.strip().startswith("git_full ="): 157 | mo = re.search(r'=\s*"(.*)"', line) 158 | if mo: 159 | keywords["full"] = mo.group(1) 160 | if line.strip().startswith("git_date ="): 161 | mo = re.search(r'=\s*"(.*)"', line) 162 | if mo: 163 | keywords["date"] = mo.group(1) 164 | f.close() 165 | except EnvironmentError: 166 | pass 167 | return keywords 168 | 169 | 170 | @register_vcs_handler("git", "keywords") 171 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 172 | """Get version information from git keywords.""" 173 | if not keywords: 174 | raise NotThisMethod("no keywords at all, weird") 175 | date = keywords.get("date") 176 | if date is not None: 177 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant 178 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 179 | # -like" string, which we must then edit to make compliant), because 180 | # it's been around since git-1.5.3, and it's too difficult to 181 | # discover which version we're using, or to work around using an 182 | # older one. 183 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 184 | refnames = keywords["refnames"].strip() 185 | if refnames.startswith("$Format"): 186 | if verbose: 187 | print("keywords are unexpanded, not using") 188 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 189 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) 190 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 191 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 192 | TAG = "tag: " 193 | tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)]) 194 | if not tags: 195 | # Either we're using git < 1.8.3, or there really are no tags. We use 196 | # a heuristic: assume all version tags have a digit. The old git %d 197 | # expansion behaves like git log --decorate=short and strips out the 198 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 199 | # between branches and tags. By ignoring refnames without digits, we 200 | # filter out many common branch names like "release" and 201 | # "stabilization", as well as "HEAD" and "main". 202 | tags = set([r for r in refs if re.search(r"\d", r)]) 203 | if verbose: 204 | print("discarding '%s', no digits" % ",".join(refs - tags)) 205 | if verbose: 206 | print("likely tags: %s" % ",".join(sorted(tags))) 207 | for ref in sorted(tags): 208 | # sorting will prefer e.g. "2.0" over "2.0rc1" 209 | if ref.startswith(tag_prefix): 210 | r = ref[len(tag_prefix) :] 211 | if verbose: 212 | print("picking %s" % r) 213 | return { 214 | "version": r, 215 | "full-revisionid": keywords["full"].strip(), 216 | "dirty": False, 217 | "error": None, 218 | "date": date, 219 | } 220 | # no suitable tags, so version is "0+unknown", but full hex is still there 221 | if verbose: 222 | print("no suitable tags, using unknown + full revision id") 223 | return { 224 | "version": "0+unknown", 225 | "full-revisionid": keywords["full"].strip(), 226 | "dirty": False, 227 | "error": "no suitable tags", 228 | "date": None, 229 | } 230 | 231 | 232 | @register_vcs_handler("git", "pieces_from_vcs") 233 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 234 | """Get version from 'git describe' in the root of the source tree. 235 | 236 | This only gets called if the git-archive 'subst' keywords were *not* 237 | expanded, and _version.py hasn't already been rewritten with a short 238 | version string, meaning we're inside a checked out source tree. 239 | """ 240 | GITS = ["git"] 241 | if sys.platform == "win32": 242 | GITS = ["git.cmd", "git.exe"] 243 | 244 | out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) 245 | if rc != 0: 246 | if verbose: 247 | print("Directory %s not under git control" % root) 248 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 249 | 250 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 251 | # if there isn't one, this yields HEX[-dirty] (no NUM) 252 | describe_out, rc = run_command( 253 | GITS, 254 | [ 255 | "describe", 256 | "--tags", 257 | "--dirty", 258 | "--always", 259 | "--long", 260 | "--match", 261 | "%s*" % tag_prefix, 262 | ], 263 | cwd=root, 264 | ) 265 | # --long was added in git-1.5.5 266 | if describe_out is None: 267 | raise NotThisMethod("'git describe' failed") 268 | describe_out = describe_out.strip() 269 | full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 270 | if full_out is None: 271 | raise NotThisMethod("'git rev-parse' failed") 272 | full_out = full_out.strip() 273 | 274 | pieces = {} 275 | pieces["long"] = full_out 276 | pieces["short"] = full_out[:7] # maybe improved later 277 | pieces["error"] = None 278 | 279 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 280 | # TAG might have hyphens. 281 | git_describe = describe_out 282 | 283 | # look for -dirty suffix 284 | dirty = git_describe.endswith("-dirty") 285 | pieces["dirty"] = dirty 286 | if dirty: 287 | git_describe = git_describe[: git_describe.rindex("-dirty")] 288 | 289 | # now we have TAG-NUM-gHEX or HEX 290 | 291 | if "-" in git_describe: 292 | # TAG-NUM-gHEX 293 | mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) 294 | if not mo: 295 | # unparseable. Maybe git-describe is misbehaving? 296 | pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out 297 | return pieces 298 | 299 | # tag 300 | full_tag = mo.group(1) 301 | if not full_tag.startswith(tag_prefix): 302 | if verbose: 303 | fmt = "tag '%s' doesn't start with prefix '%s'" 304 | print(fmt % (full_tag, tag_prefix)) 305 | pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( 306 | full_tag, 307 | tag_prefix, 308 | ) 309 | return pieces 310 | pieces["closest-tag"] = full_tag[len(tag_prefix) :] 311 | 312 | # distance: number of commits since tag 313 | pieces["distance"] = int(mo.group(2)) 314 | 315 | # commit: short hex revision ID 316 | pieces["short"] = mo.group(3) 317 | 318 | else: 319 | # HEX: no tags 320 | pieces["closest-tag"] = None 321 | count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) 322 | pieces["distance"] = int(count_out) # total number of commits 323 | 324 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 325 | date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[ 326 | 0 327 | ].strip() 328 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 329 | 330 | return pieces 331 | 332 | 333 | def plus_or_dot(pieces): 334 | """Return a + if we don't already have one, else return a .""" 335 | if "+" in pieces.get("closest-tag", ""): 336 | return "." 337 | return "+" 338 | 339 | 340 | def render_pep440(pieces): 341 | """Build up version string, with post-release "local version identifier". 342 | 343 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 344 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 345 | 346 | Exceptions: 347 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 348 | """ 349 | if pieces["closest-tag"]: 350 | rendered = pieces["closest-tag"] 351 | if pieces["distance"] or pieces["dirty"]: 352 | rendered += plus_or_dot(pieces) 353 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 354 | if pieces["dirty"]: 355 | rendered += ".dirty" 356 | else: 357 | # exception #1 358 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) 359 | if pieces["dirty"]: 360 | rendered += ".dirty" 361 | return rendered 362 | 363 | 364 | def render_pep440_pre(pieces): 365 | """TAG[.post.devDISTANCE] -- No -dirty. 366 | 367 | Exceptions: 368 | 1: no tags. 0.post.devDISTANCE 369 | """ 370 | if pieces["closest-tag"]: 371 | rendered = pieces["closest-tag"] 372 | if pieces["distance"]: 373 | rendered += ".post.dev%d" % pieces["distance"] 374 | else: 375 | # exception #1 376 | rendered = "0.post.dev%d" % pieces["distance"] 377 | return rendered 378 | 379 | 380 | def render_pep440_post(pieces): 381 | """TAG[.postDISTANCE[.dev0]+gHEX] . 382 | 383 | The ".dev0" means dirty. Note that .dev0 sorts backwards 384 | (a dirty tree will appear "older" than the corresponding clean one), 385 | but you shouldn't be releasing software with -dirty anyways. 386 | 387 | Exceptions: 388 | 1: no tags. 0.postDISTANCE[.dev0] 389 | """ 390 | if pieces["closest-tag"]: 391 | rendered = pieces["closest-tag"] 392 | if pieces["distance"] or pieces["dirty"]: 393 | rendered += ".post%d" % pieces["distance"] 394 | if pieces["dirty"]: 395 | rendered += ".dev0" 396 | rendered += plus_or_dot(pieces) 397 | rendered += "g%s" % pieces["short"] 398 | else: 399 | # exception #1 400 | rendered = "0.post%d" % pieces["distance"] 401 | if pieces["dirty"]: 402 | rendered += ".dev0" 403 | rendered += "+g%s" % pieces["short"] 404 | return rendered 405 | 406 | 407 | def render_pep440_old(pieces): 408 | """TAG[.postDISTANCE[.dev0]] . 409 | 410 | The ".dev0" means dirty. 411 | 412 | Eexceptions: 413 | 1: no tags. 0.postDISTANCE[.dev0] 414 | """ 415 | if pieces["closest-tag"]: 416 | rendered = pieces["closest-tag"] 417 | if pieces["distance"] or pieces["dirty"]: 418 | rendered += ".post%d" % pieces["distance"] 419 | if pieces["dirty"]: 420 | rendered += ".dev0" 421 | else: 422 | # exception #1 423 | rendered = "0.post%d" % pieces["distance"] 424 | if pieces["dirty"]: 425 | rendered += ".dev0" 426 | return rendered 427 | 428 | 429 | def render_git_describe(pieces): 430 | """TAG[-DISTANCE-gHEX][-dirty]. 431 | 432 | Like 'git describe --tags --dirty --always'. 433 | 434 | Exceptions: 435 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 436 | """ 437 | if pieces["closest-tag"]: 438 | rendered = pieces["closest-tag"] 439 | if pieces["distance"]: 440 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 441 | else: 442 | # exception #1 443 | rendered = pieces["short"] 444 | if pieces["dirty"]: 445 | rendered += "-dirty" 446 | return rendered 447 | 448 | 449 | def render_git_describe_long(pieces): 450 | """TAG-DISTANCE-gHEX[-dirty]. 451 | 452 | Like 'git describe --tags --dirty --always -long'. 453 | The distance/hash is unconditional. 454 | 455 | Exceptions: 456 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 457 | """ 458 | if pieces["closest-tag"]: 459 | rendered = pieces["closest-tag"] 460 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 461 | else: 462 | # exception #1 463 | rendered = pieces["short"] 464 | if pieces["dirty"]: 465 | rendered += "-dirty" 466 | return rendered 467 | 468 | 469 | def render(pieces, style): 470 | """Render the given version pieces into the requested style.""" 471 | if pieces["error"]: 472 | return { 473 | "version": "unknown", 474 | "full-revisionid": pieces.get("long"), 475 | "dirty": None, 476 | "error": pieces["error"], 477 | "date": None, 478 | } 479 | 480 | if not style or style == "default": 481 | style = "pep440" # the default 482 | 483 | if style == "pep440": 484 | rendered = render_pep440(pieces) 485 | elif style == "pep440-pre": 486 | rendered = render_pep440_pre(pieces) 487 | elif style == "pep440-post": 488 | rendered = render_pep440_post(pieces) 489 | elif style == "pep440-old": 490 | rendered = render_pep440_old(pieces) 491 | elif style == "git-describe": 492 | rendered = render_git_describe(pieces) 493 | elif style == "git-describe-long": 494 | rendered = render_git_describe_long(pieces) 495 | else: 496 | raise ValueError("unknown style '%s'" % style) 497 | 498 | return { 499 | "version": rendered, 500 | "full-revisionid": pieces["long"], 501 | "dirty": pieces["dirty"], 502 | "error": None, 503 | "date": pieces.get("date"), 504 | } 505 | 506 | 507 | def get_versions(): 508 | """Get version information or return default if unable to do so.""" 509 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 510 | # __file__, we can work backwards from there to the root. Some 511 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 512 | # case we can only use expanded keywords. 513 | 514 | cfg = get_config() 515 | verbose = cfg.verbose 516 | 517 | try: 518 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) 519 | except NotThisMethod: 520 | pass 521 | 522 | try: 523 | root = os.path.realpath(__file__) 524 | # versionfile_source is the relative path from the top of the source 525 | # tree (where the .git directory might live) to this file. Invert 526 | # this to find the root from __file__. 527 | for i in cfg.versionfile_source.split("/"): 528 | root = os.path.dirname(root) 529 | except NameError: 530 | return { 531 | "version": "0+unknown", 532 | "full-revisionid": None, 533 | "dirty": None, 534 | "error": "unable to find root of source tree", 535 | "date": None, 536 | } 537 | 538 | try: 539 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 540 | return render(pieces, cfg.style) 541 | except NotThisMethod: 542 | pass 543 | 544 | try: 545 | if cfg.parentdir_prefix: 546 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 547 | except NotThisMethod: 548 | pass 549 | 550 | return { 551 | "version": "0+unknown", 552 | "full-revisionid": None, 553 | "dirty": None, 554 | "error": "unable to compute version", 555 | "date": None, 556 | } 557 | -------------------------------------------------------------------------------- /versioneer.py: -------------------------------------------------------------------------------- 1 | 2 | # Version: 0.18 3 | 4 | """The Versioneer - like a rocketeer, but for versions. 5 | 6 | The Versioneer 7 | ============== 8 | 9 | * like a rocketeer, but for versions! 10 | * https://github.com/warner/python-versioneer 11 | * Brian Warner 12 | * License: Public Domain 13 | * Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy 14 | * [![Latest Version] 15 | (https://pypip.in/version/versioneer/badge.svg?style=flat) 16 | ](https://pypi.python.org/pypi/versioneer/) 17 | * [![Build Status] 18 | (https://travis-ci.org/warner/python-versioneer.png?branch=master) 19 | ](https://travis-ci.org/warner/python-versioneer) 20 | 21 | This is a tool for managing a recorded version number in distutils-based 22 | python projects. The goal is to remove the tedious and error-prone "update 23 | the embedded version string" step from your release process. Making a new 24 | release should be as easy as recording a new tag in your version-control 25 | system, and maybe making new tarballs. 26 | 27 | 28 | ## Quick Install 29 | 30 | * `pip install versioneer` to somewhere to your $PATH 31 | * add a `[versioneer]` section to your setup.cfg (see below) 32 | * run `versioneer install` in your source tree, commit the results 33 | 34 | ## Version Identifiers 35 | 36 | Source trees come from a variety of places: 37 | 38 | * a version-control system checkout (mostly used by developers) 39 | * a nightly tarball, produced by build automation 40 | * a snapshot tarball, produced by a web-based VCS browser, like github's 41 | "tarball from tag" feature 42 | * a release tarball, produced by "setup.py sdist", distributed through PyPI 43 | 44 | Within each source tree, the version identifier (either a string or a number, 45 | this tool is format-agnostic) can come from a variety of places: 46 | 47 | * ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows 48 | about recent "tags" and an absolute revision-id 49 | * the name of the directory into which the tarball was unpacked 50 | * an expanded VCS keyword ($Id$, etc) 51 | * a `_version.py` created by some earlier build step 52 | 53 | For released software, the version identifier is closely related to a VCS 54 | tag. Some projects use tag names that include more than just the version 55 | string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool 56 | needs to strip the tag prefix to extract the version identifier. For 57 | unreleased software (between tags), the version identifier should provide 58 | enough information to help developers recreate the same tree, while also 59 | giving them an idea of roughly how old the tree is (after version 1.2, before 60 | version 1.3). Many VCS systems can report a description that captures this, 61 | for example `git describe --tags --dirty --always` reports things like 62 | "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the 63 | 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has 64 | uncommitted changes. 65 | 66 | The version identifier is used for multiple purposes: 67 | 68 | * to allow the module to self-identify its version: `myproject.__version__` 69 | * to choose a name and prefix for a 'setup.py sdist' tarball 70 | 71 | ## Theory of Operation 72 | 73 | Versioneer works by adding a special `_version.py` file into your source 74 | tree, where your `__init__.py` can import it. This `_version.py` knows how to 75 | dynamically ask the VCS tool for version information at import time. 76 | 77 | `_version.py` also contains `$Revision$` markers, and the installation 78 | process marks `_version.py` to have this marker rewritten with a tag name 79 | during the `git archive` command. As a result, generated tarballs will 80 | contain enough information to get the proper version. 81 | 82 | To allow `setup.py` to compute a version too, a `versioneer.py` is added to 83 | the top level of your source tree, next to `setup.py` and the `setup.cfg` 84 | that configures it. This overrides several distutils/setuptools commands to 85 | compute the version when invoked, and changes `setup.py build` and `setup.py 86 | sdist` to replace `_version.py` with a small static file that contains just 87 | the generated version data. 88 | 89 | ## Installation 90 | 91 | See [INSTALL.md](./INSTALL.md) for detailed installation instructions. 92 | 93 | ## Version-String Flavors 94 | 95 | Code which uses Versioneer can learn about its version string at runtime by 96 | importing `_version` from your main `__init__.py` file and running the 97 | `get_versions()` function. From the "outside" (e.g. in `setup.py`), you can 98 | import the top-level `versioneer.py` and run `get_versions()`. 99 | 100 | Both functions return a dictionary with different flavors of version 101 | information: 102 | 103 | * `['version']`: A condensed version string, rendered using the selected 104 | style. This is the most commonly used value for the project's version 105 | string. The default "pep440" style yields strings like `0.11`, 106 | `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section 107 | below for alternative styles. 108 | 109 | * `['full-revisionid']`: detailed revision identifier. For Git, this is the 110 | full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". 111 | 112 | * `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the 113 | commit date in ISO 8601 format. This will be None if the date is not 114 | available. 115 | 116 | * `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that 117 | this is only accurate if run in a VCS checkout, otherwise it is likely to 118 | be False or None 119 | 120 | * `['error']`: if the version string could not be computed, this will be set 121 | to a string describing the problem, otherwise it will be None. It may be 122 | useful to throw an exception in setup.py if this is set, to avoid e.g. 123 | creating tarballs with a version string of "unknown". 124 | 125 | Some variants are more useful than others. Including `full-revisionid` in a 126 | bug report should allow developers to reconstruct the exact code being tested 127 | (or indicate the presence of local changes that should be shared with the 128 | developers). `version` is suitable for display in an "about" box or a CLI 129 | `--version` output: it can be easily compared against release notes and lists 130 | of bugs fixed in various releases. 131 | 132 | The installer adds the following text to your `__init__.py` to place a basic 133 | version in `YOURPROJECT.__version__`: 134 | 135 | from ._version import get_versions 136 | __version__ = get_versions()['version'] 137 | del get_versions 138 | 139 | ## Styles 140 | 141 | The setup.cfg `style=` configuration controls how the VCS information is 142 | rendered into a version string. 143 | 144 | The default style, "pep440", produces a PEP440-compliant string, equal to the 145 | un-prefixed tag name for actual releases, and containing an additional "local 146 | version" section with more detail for in-between builds. For Git, this is 147 | TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags 148 | --dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the 149 | tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and 150 | that this commit is two revisions ("+2") beyond the "0.11" tag. For released 151 | software (exactly equal to a known tag), the identifier will only contain the 152 | stripped tag, e.g. "0.11". 153 | 154 | Other styles are available. See [details.md](details.md) in the Versioneer 155 | source tree for descriptions. 156 | 157 | ## Debugging 158 | 159 | Versioneer tries to avoid fatal errors: if something goes wrong, it will tend 160 | to return a version of "0+unknown". To investigate the problem, run `setup.py 161 | version`, which will run the version-lookup code in a verbose mode, and will 162 | display the full contents of `get_versions()` (including the `error` string, 163 | which may help identify what went wrong). 164 | 165 | ## Known Limitations 166 | 167 | Some situations are known to cause problems for Versioneer. This details the 168 | most significant ones. More can be found on Github 169 | [issues page](https://github.com/warner/python-versioneer/issues). 170 | 171 | ### Subprojects 172 | 173 | Versioneer has limited support for source trees in which `setup.py` is not in 174 | the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are 175 | two common reasons why `setup.py` might not be in the root: 176 | 177 | * Source trees which contain multiple subprojects, such as 178 | [Buildbot](https://github.com/buildbot/buildbot), which contains both 179 | "master" and "slave" subprojects, each with their own `setup.py`, 180 | `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI 181 | distributions (and upload multiple independently-installable tarballs). 182 | * Source trees whose main purpose is to contain a C library, but which also 183 | provide bindings to Python (and perhaps other langauges) in subdirectories. 184 | 185 | Versioneer will look for `.git` in parent directories, and most operations 186 | should get the right version string. However `pip` and `setuptools` have bugs 187 | and implementation details which frequently cause `pip install .` from a 188 | subproject directory to fail to find a correct version string (so it usually 189 | defaults to `0+unknown`). 190 | 191 | `pip install --editable .` should work correctly. `setup.py install` might 192 | work too. 193 | 194 | Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in 195 | some later version. 196 | 197 | [Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking 198 | this issue. The discussion in 199 | [PR #61](https://github.com/warner/python-versioneer/pull/61) describes the 200 | issue from the Versioneer side in more detail. 201 | [pip PR#3176](https://github.com/pypa/pip/pull/3176) and 202 | [pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve 203 | pip to let Versioneer work correctly. 204 | 205 | Versioneer-0.16 and earlier only looked for a `.git` directory next to the 206 | `setup.cfg`, so subprojects were completely unsupported with those releases. 207 | 208 | ### Editable installs with setuptools <= 18.5 209 | 210 | `setup.py develop` and `pip install --editable .` allow you to install a 211 | project into a virtualenv once, then continue editing the source code (and 212 | test) without re-installing after every change. 213 | 214 | "Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a 215 | convenient way to specify executable scripts that should be installed along 216 | with the python package. 217 | 218 | These both work as expected when using modern setuptools. When using 219 | setuptools-18.5 or earlier, however, certain operations will cause 220 | `pkg_resources.DistributionNotFound` errors when running the entrypoint 221 | script, which must be resolved by re-installing the package. This happens 222 | when the install happens with one version, then the egg_info data is 223 | regenerated while a different version is checked out. Many setup.py commands 224 | cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into 225 | a different virtualenv), so this can be surprising. 226 | 227 | [Bug #83](https://github.com/warner/python-versioneer/issues/83) describes 228 | this one, but upgrading to a newer version of setuptools should probably 229 | resolve it. 230 | 231 | ### Unicode version strings 232 | 233 | While Versioneer works (and is continually tested) with both Python 2 and 234 | Python 3, it is not entirely consistent with bytes-vs-unicode distinctions. 235 | Newer releases probably generate unicode version strings on py2. It's not 236 | clear that this is wrong, but it may be surprising for applications when then 237 | write these strings to a network connection or include them in bytes-oriented 238 | APIs like cryptographic checksums. 239 | 240 | [Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates 241 | this question. 242 | 243 | 244 | ## Updating Versioneer 245 | 246 | To upgrade your project to a new release of Versioneer, do the following: 247 | 248 | * install the new Versioneer (`pip install -U versioneer` or equivalent) 249 | * edit `setup.cfg`, if necessary, to include any new configuration settings 250 | indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. 251 | * re-run `versioneer install` in your source tree, to replace 252 | `SRC/_version.py` 253 | * commit any changed files 254 | 255 | ## Future Directions 256 | 257 | This tool is designed to make it easily extended to other version-control 258 | systems: all VCS-specific components are in separate directories like 259 | src/git/ . The top-level `versioneer.py` script is assembled from these 260 | components by running make-versioneer.py . In the future, make-versioneer.py 261 | will take a VCS name as an argument, and will construct a version of 262 | `versioneer.py` that is specific to the given VCS. It might also take the 263 | configuration arguments that are currently provided manually during 264 | installation by editing setup.py . Alternatively, it might go the other 265 | direction and include code from all supported VCS systems, reducing the 266 | number of intermediate scripts. 267 | 268 | 269 | ## License 270 | 271 | To make Versioneer easier to embed, all its code is dedicated to the public 272 | domain. The `_version.py` that it creates is also in the public domain. 273 | Specifically, both are released under the Creative Commons "Public Domain 274 | Dedication" license (CC0-1.0), as described in 275 | https://creativecommons.org/publicdomain/zero/1.0/ . 276 | 277 | """ 278 | 279 | from __future__ import print_function 280 | try: 281 | import configparser 282 | except ImportError: 283 | import ConfigParser as configparser 284 | import errno 285 | import json 286 | import os 287 | import re 288 | import subprocess 289 | import sys 290 | 291 | 292 | class VersioneerConfig: 293 | """Container for Versioneer configuration parameters.""" 294 | 295 | 296 | def get_root(): 297 | """Get the project root directory. 298 | 299 | We require that all commands are run from the project root, i.e. the 300 | directory that contains setup.py, setup.cfg, and versioneer.py . 301 | """ 302 | root = os.path.realpath(os.path.abspath(os.getcwd())) 303 | setup_py = os.path.join(root, "setup.py") 304 | versioneer_py = os.path.join(root, "versioneer.py") 305 | if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): 306 | # allow 'python path/to/setup.py COMMAND' 307 | root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) 308 | setup_py = os.path.join(root, "setup.py") 309 | versioneer_py = os.path.join(root, "versioneer.py") 310 | if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): 311 | err = ("Versioneer was unable to run the project root directory. " 312 | "Versioneer requires setup.py to be executed from " 313 | "its immediate directory (like 'python setup.py COMMAND'), " 314 | "or in a way that lets it use sys.argv[0] to find the root " 315 | "(like 'python path/to/setup.py COMMAND').") 316 | raise VersioneerBadRootError(err) 317 | try: 318 | # Certain runtime workflows (setup.py install/develop in a setuptools 319 | # tree) execute all dependencies in a single python process, so 320 | # "versioneer" may be imported multiple times, and python's shared 321 | # module-import table will cache the first one. So we can't use 322 | # os.path.dirname(__file__), as that will find whichever 323 | # versioneer.py was first imported, even in later projects. 324 | me = os.path.realpath(os.path.abspath(__file__)) 325 | me_dir = os.path.normcase(os.path.splitext(me)[0]) 326 | vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) 327 | if me_dir != vsr_dir: 328 | print("Warning: build in %s is using versioneer.py from %s" 329 | % (os.path.dirname(me), versioneer_py)) 330 | except NameError: 331 | pass 332 | return root 333 | 334 | 335 | def get_config_from_root(root): 336 | """Read the project setup.cfg file to determine Versioneer config.""" 337 | # This might raise EnvironmentError (if setup.cfg is missing), or 338 | # configparser.NoSectionError (if it lacks a [versioneer] section), or 339 | # configparser.NoOptionError (if it lacks "VCS="). See the docstring at 340 | # the top of versioneer.py for instructions on writing your setup.cfg . 341 | setup_cfg = os.path.join(root, "setup.cfg") 342 | parser = configparser.SafeConfigParser() 343 | with open(setup_cfg, "r") as f: 344 | parser.readfp(f) 345 | VCS = parser.get("versioneer", "VCS") # mandatory 346 | 347 | def get(parser, name): 348 | if parser.has_option("versioneer", name): 349 | return parser.get("versioneer", name) 350 | return None 351 | cfg = VersioneerConfig() 352 | cfg.VCS = VCS 353 | cfg.style = get(parser, "style") or "" 354 | cfg.versionfile_source = get(parser, "versionfile_source") 355 | cfg.versionfile_build = get(parser, "versionfile_build") 356 | cfg.tag_prefix = get(parser, "tag_prefix") 357 | if cfg.tag_prefix in ("''", '""'): 358 | cfg.tag_prefix = "" 359 | cfg.parentdir_prefix = get(parser, "parentdir_prefix") 360 | cfg.verbose = get(parser, "verbose") 361 | return cfg 362 | 363 | 364 | class NotThisMethod(Exception): 365 | """Exception raised if a method is not valid for the current scenario.""" 366 | 367 | 368 | # these dictionaries contain VCS-specific tools 369 | LONG_VERSION_PY = {} 370 | HANDLERS = {} 371 | 372 | 373 | def register_vcs_handler(vcs, method): # decorator 374 | """Decorator to mark a method as the handler for a particular VCS.""" 375 | def decorate(f): 376 | """Store f in HANDLERS[vcs][method].""" 377 | if vcs not in HANDLERS: 378 | HANDLERS[vcs] = {} 379 | HANDLERS[vcs][method] = f 380 | return f 381 | return decorate 382 | 383 | 384 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, 385 | env=None): 386 | """Call the given command(s).""" 387 | assert isinstance(commands, list) 388 | p = None 389 | for c in commands: 390 | try: 391 | dispcmd = str([c] + args) 392 | # remember shell=False, so use git.cmd on windows, not just git 393 | p = subprocess.Popen([c] + args, cwd=cwd, env=env, 394 | stdout=subprocess.PIPE, 395 | stderr=(subprocess.PIPE if hide_stderr 396 | else None)) 397 | break 398 | except EnvironmentError: 399 | e = sys.exc_info()[1] 400 | if e.errno == errno.ENOENT: 401 | continue 402 | if verbose: 403 | print("unable to run %s" % dispcmd) 404 | print(e) 405 | return None, None 406 | else: 407 | if verbose: 408 | print("unable to find command, tried %s" % (commands,)) 409 | return None, None 410 | stdout = p.communicate()[0].strip() 411 | if sys.version_info[0] >= 3: 412 | stdout = stdout.decode() 413 | if p.returncode != 0: 414 | if verbose: 415 | print("unable to run %s (error)" % dispcmd) 416 | print("stdout was %s" % stdout) 417 | return None, p.returncode 418 | return stdout, p.returncode 419 | 420 | 421 | LONG_VERSION_PY['git'] = ''' 422 | # This file helps to compute a version number in source trees obtained from 423 | # git-archive tarball (such as those provided by githubs download-from-tag 424 | # feature). Distribution tarballs (built by setup.py sdist) and build 425 | # directories (produced by setup.py build) will contain a much shorter file 426 | # that just contains the computed version number. 427 | 428 | # This file is released into the public domain. Generated by 429 | # versioneer-0.18 (https://github.com/warner/python-versioneer) 430 | 431 | """Git implementation of _version.py.""" 432 | 433 | import errno 434 | import os 435 | import re 436 | import subprocess 437 | import sys 438 | 439 | 440 | def get_keywords(): 441 | """Get the keywords needed to look up the version information.""" 442 | # these strings will be replaced by git during git-archive. 443 | # setup.py/versioneer.py will grep for the variable names, so they must 444 | # each be defined on a line of their own. _version.py will just call 445 | # get_keywords(). 446 | git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" 447 | git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" 448 | git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" 449 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} 450 | return keywords 451 | 452 | 453 | class VersioneerConfig: 454 | """Container for Versioneer configuration parameters.""" 455 | 456 | 457 | def get_config(): 458 | """Create, populate and return the VersioneerConfig() object.""" 459 | # these strings are filled in when 'setup.py versioneer' creates 460 | # _version.py 461 | cfg = VersioneerConfig() 462 | cfg.VCS = "git" 463 | cfg.style = "%(STYLE)s" 464 | cfg.tag_prefix = "%(TAG_PREFIX)s" 465 | cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" 466 | cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" 467 | cfg.verbose = False 468 | return cfg 469 | 470 | 471 | class NotThisMethod(Exception): 472 | """Exception raised if a method is not valid for the current scenario.""" 473 | 474 | 475 | LONG_VERSION_PY = {} 476 | HANDLERS = {} 477 | 478 | 479 | def register_vcs_handler(vcs, method): # decorator 480 | """Decorator to mark a method as the handler for a particular VCS.""" 481 | def decorate(f): 482 | """Store f in HANDLERS[vcs][method].""" 483 | if vcs not in HANDLERS: 484 | HANDLERS[vcs] = {} 485 | HANDLERS[vcs][method] = f 486 | return f 487 | return decorate 488 | 489 | 490 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, 491 | env=None): 492 | """Call the given command(s).""" 493 | assert isinstance(commands, list) 494 | p = None 495 | for c in commands: 496 | try: 497 | dispcmd = str([c] + args) 498 | # remember shell=False, so use git.cmd on windows, not just git 499 | p = subprocess.Popen([c] + args, cwd=cwd, env=env, 500 | stdout=subprocess.PIPE, 501 | stderr=(subprocess.PIPE if hide_stderr 502 | else None)) 503 | break 504 | except EnvironmentError: 505 | e = sys.exc_info()[1] 506 | if e.errno == errno.ENOENT: 507 | continue 508 | if verbose: 509 | print("unable to run %%s" %% dispcmd) 510 | print(e) 511 | return None, None 512 | else: 513 | if verbose: 514 | print("unable to find command, tried %%s" %% (commands,)) 515 | return None, None 516 | stdout = p.communicate()[0].strip() 517 | if sys.version_info[0] >= 3: 518 | stdout = stdout.decode() 519 | if p.returncode != 0: 520 | if verbose: 521 | print("unable to run %%s (error)" %% dispcmd) 522 | print("stdout was %%s" %% stdout) 523 | return None, p.returncode 524 | return stdout, p.returncode 525 | 526 | 527 | def versions_from_parentdir(parentdir_prefix, root, verbose): 528 | """Try to determine the version from the parent directory name. 529 | 530 | Source tarballs conventionally unpack into a directory that includes both 531 | the project name and a version string. We will also support searching up 532 | two directory levels for an appropriately named parent directory 533 | """ 534 | rootdirs = [] 535 | 536 | for i in range(3): 537 | dirname = os.path.basename(root) 538 | if dirname.startswith(parentdir_prefix): 539 | return {"version": dirname[len(parentdir_prefix):], 540 | "full-revisionid": None, 541 | "dirty": False, "error": None, "date": None} 542 | else: 543 | rootdirs.append(root) 544 | root = os.path.dirname(root) # up a level 545 | 546 | if verbose: 547 | print("Tried directories %%s but none started with prefix %%s" %% 548 | (str(rootdirs), parentdir_prefix)) 549 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 550 | 551 | 552 | @register_vcs_handler("git", "get_keywords") 553 | def git_get_keywords(versionfile_abs): 554 | """Extract version information from the given file.""" 555 | # the code embedded in _version.py can just fetch the value of these 556 | # keywords. When used from setup.py, we don't want to import _version.py, 557 | # so we do it with a regexp instead. This function is not used from 558 | # _version.py. 559 | keywords = {} 560 | try: 561 | f = open(versionfile_abs, "r") 562 | for line in f.readlines(): 563 | if line.strip().startswith("git_refnames ="): 564 | mo = re.search(r'=\s*"(.*)"', line) 565 | if mo: 566 | keywords["refnames"] = mo.group(1) 567 | if line.strip().startswith("git_full ="): 568 | mo = re.search(r'=\s*"(.*)"', line) 569 | if mo: 570 | keywords["full"] = mo.group(1) 571 | if line.strip().startswith("git_date ="): 572 | mo = re.search(r'=\s*"(.*)"', line) 573 | if mo: 574 | keywords["date"] = mo.group(1) 575 | f.close() 576 | except EnvironmentError: 577 | pass 578 | return keywords 579 | 580 | 581 | @register_vcs_handler("git", "keywords") 582 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 583 | """Get version information from git keywords.""" 584 | if not keywords: 585 | raise NotThisMethod("no keywords at all, weird") 586 | date = keywords.get("date") 587 | if date is not None: 588 | # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant 589 | # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 590 | # -like" string, which we must then edit to make compliant), because 591 | # it's been around since git-1.5.3, and it's too difficult to 592 | # discover which version we're using, or to work around using an 593 | # older one. 594 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 595 | refnames = keywords["refnames"].strip() 596 | if refnames.startswith("$Format"): 597 | if verbose: 598 | print("keywords are unexpanded, not using") 599 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 600 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) 601 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 602 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 603 | TAG = "tag: " 604 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) 605 | if not tags: 606 | # Either we're using git < 1.8.3, or there really are no tags. We use 607 | # a heuristic: assume all version tags have a digit. The old git %%d 608 | # expansion behaves like git log --decorate=short and strips out the 609 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 610 | # between branches and tags. By ignoring refnames without digits, we 611 | # filter out many common branch names like "release" and 612 | # "stabilization", as well as "HEAD" and "main". 613 | tags = set([r for r in refs if re.search(r'\d', r)]) 614 | if verbose: 615 | print("discarding '%%s', no digits" %% ",".join(refs - tags)) 616 | if verbose: 617 | print("likely tags: %%s" %% ",".join(sorted(tags))) 618 | for ref in sorted(tags): 619 | # sorting will prefer e.g. "2.0" over "2.0rc1" 620 | if ref.startswith(tag_prefix): 621 | r = ref[len(tag_prefix):] 622 | if verbose: 623 | print("picking %%s" %% r) 624 | return {"version": r, 625 | "full-revisionid": keywords["full"].strip(), 626 | "dirty": False, "error": None, 627 | "date": date} 628 | # no suitable tags, so version is "0+unknown", but full hex is still there 629 | if verbose: 630 | print("no suitable tags, using unknown + full revision id") 631 | return {"version": "0+unknown", 632 | "full-revisionid": keywords["full"].strip(), 633 | "dirty": False, "error": "no suitable tags", "date": None} 634 | 635 | 636 | @register_vcs_handler("git", "pieces_from_vcs") 637 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 638 | """Get version from 'git describe' in the root of the source tree. 639 | 640 | This only gets called if the git-archive 'subst' keywords were *not* 641 | expanded, and _version.py hasn't already been rewritten with a short 642 | version string, meaning we're inside a checked out source tree. 643 | """ 644 | GITS = ["git"] 645 | if sys.platform == "win32": 646 | GITS = ["git.cmd", "git.exe"] 647 | 648 | out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, 649 | hide_stderr=True) 650 | if rc != 0: 651 | if verbose: 652 | print("Directory %%s not under git control" %% root) 653 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 654 | 655 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 656 | # if there isn't one, this yields HEX[-dirty] (no NUM) 657 | describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", 658 | "--always", "--long", 659 | "--match", "%%s*" %% tag_prefix], 660 | cwd=root) 661 | # --long was added in git-1.5.5 662 | if describe_out is None: 663 | raise NotThisMethod("'git describe' failed") 664 | describe_out = describe_out.strip() 665 | full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 666 | if full_out is None: 667 | raise NotThisMethod("'git rev-parse' failed") 668 | full_out = full_out.strip() 669 | 670 | pieces = {} 671 | pieces["long"] = full_out 672 | pieces["short"] = full_out[:7] # maybe improved later 673 | pieces["error"] = None 674 | 675 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 676 | # TAG might have hyphens. 677 | git_describe = describe_out 678 | 679 | # look for -dirty suffix 680 | dirty = git_describe.endswith("-dirty") 681 | pieces["dirty"] = dirty 682 | if dirty: 683 | git_describe = git_describe[:git_describe.rindex("-dirty")] 684 | 685 | # now we have TAG-NUM-gHEX or HEX 686 | 687 | if "-" in git_describe: 688 | # TAG-NUM-gHEX 689 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 690 | if not mo: 691 | # unparseable. Maybe git-describe is misbehaving? 692 | pieces["error"] = ("unable to parse git-describe output: '%%s'" 693 | %% describe_out) 694 | return pieces 695 | 696 | # tag 697 | full_tag = mo.group(1) 698 | if not full_tag.startswith(tag_prefix): 699 | if verbose: 700 | fmt = "tag '%%s' doesn't start with prefix '%%s'" 701 | print(fmt %% (full_tag, tag_prefix)) 702 | pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" 703 | %% (full_tag, tag_prefix)) 704 | return pieces 705 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 706 | 707 | # distance: number of commits since tag 708 | pieces["distance"] = int(mo.group(2)) 709 | 710 | # commit: short hex revision ID 711 | pieces["short"] = mo.group(3) 712 | 713 | else: 714 | # HEX: no tags 715 | pieces["closest-tag"] = None 716 | count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], 717 | cwd=root) 718 | pieces["distance"] = int(count_out) # total number of commits 719 | 720 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 721 | date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"], 722 | cwd=root)[0].strip() 723 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 724 | 725 | return pieces 726 | 727 | 728 | def plus_or_dot(pieces): 729 | """Return a + if we don't already have one, else return a .""" 730 | if "+" in pieces.get("closest-tag", ""): 731 | return "." 732 | return "+" 733 | 734 | 735 | def render_pep440(pieces): 736 | """Build up version string, with post-release "local version identifier". 737 | 738 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 739 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 740 | 741 | Exceptions: 742 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 743 | """ 744 | if pieces["closest-tag"]: 745 | rendered = pieces["closest-tag"] 746 | if pieces["distance"] or pieces["dirty"]: 747 | rendered += plus_or_dot(pieces) 748 | rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) 749 | if pieces["dirty"]: 750 | rendered += ".dirty" 751 | else: 752 | # exception #1 753 | rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], 754 | pieces["short"]) 755 | if pieces["dirty"]: 756 | rendered += ".dirty" 757 | return rendered 758 | 759 | 760 | def render_pep440_pre(pieces): 761 | """TAG[.post.devDISTANCE] -- No -dirty. 762 | 763 | Exceptions: 764 | 1: no tags. 0.post.devDISTANCE 765 | """ 766 | if pieces["closest-tag"]: 767 | rendered = pieces["closest-tag"] 768 | if pieces["distance"]: 769 | rendered += ".post.dev%%d" %% pieces["distance"] 770 | else: 771 | # exception #1 772 | rendered = "0.post.dev%%d" %% pieces["distance"] 773 | return rendered 774 | 775 | 776 | def render_pep440_post(pieces): 777 | """TAG[.postDISTANCE[.dev0]+gHEX] . 778 | 779 | The ".dev0" means dirty. Note that .dev0 sorts backwards 780 | (a dirty tree will appear "older" than the corresponding clean one), 781 | but you shouldn't be releasing software with -dirty anyways. 782 | 783 | Exceptions: 784 | 1: no tags. 0.postDISTANCE[.dev0] 785 | """ 786 | if pieces["closest-tag"]: 787 | rendered = pieces["closest-tag"] 788 | if pieces["distance"] or pieces["dirty"]: 789 | rendered += ".post%%d" %% pieces["distance"] 790 | if pieces["dirty"]: 791 | rendered += ".dev0" 792 | rendered += plus_or_dot(pieces) 793 | rendered += "g%%s" %% pieces["short"] 794 | else: 795 | # exception #1 796 | rendered = "0.post%%d" %% pieces["distance"] 797 | if pieces["dirty"]: 798 | rendered += ".dev0" 799 | rendered += "+g%%s" %% pieces["short"] 800 | return rendered 801 | 802 | 803 | def render_pep440_old(pieces): 804 | """TAG[.postDISTANCE[.dev0]] . 805 | 806 | The ".dev0" means dirty. 807 | 808 | Eexceptions: 809 | 1: no tags. 0.postDISTANCE[.dev0] 810 | """ 811 | if pieces["closest-tag"]: 812 | rendered = pieces["closest-tag"] 813 | if pieces["distance"] or pieces["dirty"]: 814 | rendered += ".post%%d" %% pieces["distance"] 815 | if pieces["dirty"]: 816 | rendered += ".dev0" 817 | else: 818 | # exception #1 819 | rendered = "0.post%%d" %% pieces["distance"] 820 | if pieces["dirty"]: 821 | rendered += ".dev0" 822 | return rendered 823 | 824 | 825 | def render_git_describe(pieces): 826 | """TAG[-DISTANCE-gHEX][-dirty]. 827 | 828 | Like 'git describe --tags --dirty --always'. 829 | 830 | Exceptions: 831 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 832 | """ 833 | if pieces["closest-tag"]: 834 | rendered = pieces["closest-tag"] 835 | if pieces["distance"]: 836 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) 837 | else: 838 | # exception #1 839 | rendered = pieces["short"] 840 | if pieces["dirty"]: 841 | rendered += "-dirty" 842 | return rendered 843 | 844 | 845 | def render_git_describe_long(pieces): 846 | """TAG-DISTANCE-gHEX[-dirty]. 847 | 848 | Like 'git describe --tags --dirty --always -long'. 849 | The distance/hash is unconditional. 850 | 851 | Exceptions: 852 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 853 | """ 854 | if pieces["closest-tag"]: 855 | rendered = pieces["closest-tag"] 856 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) 857 | else: 858 | # exception #1 859 | rendered = pieces["short"] 860 | if pieces["dirty"]: 861 | rendered += "-dirty" 862 | return rendered 863 | 864 | 865 | def render(pieces, style): 866 | """Render the given version pieces into the requested style.""" 867 | if pieces["error"]: 868 | return {"version": "unknown", 869 | "full-revisionid": pieces.get("long"), 870 | "dirty": None, 871 | "error": pieces["error"], 872 | "date": None} 873 | 874 | if not style or style == "default": 875 | style = "pep440" # the default 876 | 877 | if style == "pep440": 878 | rendered = render_pep440(pieces) 879 | elif style == "pep440-pre": 880 | rendered = render_pep440_pre(pieces) 881 | elif style == "pep440-post": 882 | rendered = render_pep440_post(pieces) 883 | elif style == "pep440-old": 884 | rendered = render_pep440_old(pieces) 885 | elif style == "git-describe": 886 | rendered = render_git_describe(pieces) 887 | elif style == "git-describe-long": 888 | rendered = render_git_describe_long(pieces) 889 | else: 890 | raise ValueError("unknown style '%%s'" %% style) 891 | 892 | return {"version": rendered, "full-revisionid": pieces["long"], 893 | "dirty": pieces["dirty"], "error": None, 894 | "date": pieces.get("date")} 895 | 896 | 897 | def get_versions(): 898 | """Get version information or return default if unable to do so.""" 899 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 900 | # __file__, we can work backwards from there to the root. Some 901 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 902 | # case we can only use expanded keywords. 903 | 904 | cfg = get_config() 905 | verbose = cfg.verbose 906 | 907 | try: 908 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, 909 | verbose) 910 | except NotThisMethod: 911 | pass 912 | 913 | try: 914 | root = os.path.realpath(__file__) 915 | # versionfile_source is the relative path from the top of the source 916 | # tree (where the .git directory might live) to this file. Invert 917 | # this to find the root from __file__. 918 | for i in cfg.versionfile_source.split('/'): 919 | root = os.path.dirname(root) 920 | except NameError: 921 | return {"version": "0+unknown", "full-revisionid": None, 922 | "dirty": None, 923 | "error": "unable to find root of source tree", 924 | "date": None} 925 | 926 | try: 927 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 928 | return render(pieces, cfg.style) 929 | except NotThisMethod: 930 | pass 931 | 932 | try: 933 | if cfg.parentdir_prefix: 934 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 935 | except NotThisMethod: 936 | pass 937 | 938 | return {"version": "0+unknown", "full-revisionid": None, 939 | "dirty": None, 940 | "error": "unable to compute version", "date": None} 941 | ''' 942 | 943 | 944 | @register_vcs_handler("git", "get_keywords") 945 | def git_get_keywords(versionfile_abs): 946 | """Extract version information from the given file.""" 947 | # the code embedded in _version.py can just fetch the value of these 948 | # keywords. When used from setup.py, we don't want to import _version.py, 949 | # so we do it with a regexp instead. This function is not used from 950 | # _version.py. 951 | keywords = {} 952 | try: 953 | f = open(versionfile_abs, "r") 954 | for line in f.readlines(): 955 | if line.strip().startswith("git_refnames ="): 956 | mo = re.search(r'=\s*"(.*)"', line) 957 | if mo: 958 | keywords["refnames"] = mo.group(1) 959 | if line.strip().startswith("git_full ="): 960 | mo = re.search(r'=\s*"(.*)"', line) 961 | if mo: 962 | keywords["full"] = mo.group(1) 963 | if line.strip().startswith("git_date ="): 964 | mo = re.search(r'=\s*"(.*)"', line) 965 | if mo: 966 | keywords["date"] = mo.group(1) 967 | f.close() 968 | except EnvironmentError: 969 | pass 970 | return keywords 971 | 972 | 973 | @register_vcs_handler("git", "keywords") 974 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 975 | """Get version information from git keywords.""" 976 | if not keywords: 977 | raise NotThisMethod("no keywords at all, weird") 978 | date = keywords.get("date") 979 | if date is not None: 980 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant 981 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 982 | # -like" string, which we must then edit to make compliant), because 983 | # it's been around since git-1.5.3, and it's too difficult to 984 | # discover which version we're using, or to work around using an 985 | # older one. 986 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 987 | refnames = keywords["refnames"].strip() 988 | if refnames.startswith("$Format"): 989 | if verbose: 990 | print("keywords are unexpanded, not using") 991 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 992 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) 993 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 994 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 995 | TAG = "tag: " 996 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) 997 | if not tags: 998 | # Either we're using git < 1.8.3, or there really are no tags. We use 999 | # a heuristic: assume all version tags have a digit. The old git %d 1000 | # expansion behaves like git log --decorate=short and strips out the 1001 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 1002 | # between branches and tags. By ignoring refnames without digits, we 1003 | # filter out many common branch names like "release" and 1004 | # "stabilization", as well as "HEAD" and "main". 1005 | tags = set([r for r in refs if re.search(r'\d', r)]) 1006 | if verbose: 1007 | print("discarding '%s', no digits" % ",".join(refs - tags)) 1008 | if verbose: 1009 | print("likely tags: %s" % ",".join(sorted(tags))) 1010 | for ref in sorted(tags): 1011 | # sorting will prefer e.g. "2.0" over "2.0rc1" 1012 | if ref.startswith(tag_prefix): 1013 | r = ref[len(tag_prefix):] 1014 | if verbose: 1015 | print("picking %s" % r) 1016 | return {"version": r, 1017 | "full-revisionid": keywords["full"].strip(), 1018 | "dirty": False, "error": None, 1019 | "date": date} 1020 | # no suitable tags, so version is "0+unknown", but full hex is still there 1021 | if verbose: 1022 | print("no suitable tags, using unknown + full revision id") 1023 | return {"version": "0+unknown", 1024 | "full-revisionid": keywords["full"].strip(), 1025 | "dirty": False, "error": "no suitable tags", "date": None} 1026 | 1027 | 1028 | @register_vcs_handler("git", "pieces_from_vcs") 1029 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 1030 | """Get version from 'git describe' in the root of the source tree. 1031 | 1032 | This only gets called if the git-archive 'subst' keywords were *not* 1033 | expanded, and _version.py hasn't already been rewritten with a short 1034 | version string, meaning we're inside a checked out source tree. 1035 | """ 1036 | GITS = ["git"] 1037 | if sys.platform == "win32": 1038 | GITS = ["git.cmd", "git.exe"] 1039 | 1040 | out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, 1041 | hide_stderr=True) 1042 | if rc != 0: 1043 | if verbose: 1044 | print("Directory %s not under git control" % root) 1045 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 1046 | 1047 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 1048 | # if there isn't one, this yields HEX[-dirty] (no NUM) 1049 | describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", 1050 | "--always", "--long", 1051 | "--match", "%s*" % tag_prefix], 1052 | cwd=root) 1053 | # --long was added in git-1.5.5 1054 | if describe_out is None: 1055 | raise NotThisMethod("'git describe' failed") 1056 | describe_out = describe_out.strip() 1057 | full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 1058 | if full_out is None: 1059 | raise NotThisMethod("'git rev-parse' failed") 1060 | full_out = full_out.strip() 1061 | 1062 | pieces = {} 1063 | pieces["long"] = full_out 1064 | pieces["short"] = full_out[:7] # maybe improved later 1065 | pieces["error"] = None 1066 | 1067 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 1068 | # TAG might have hyphens. 1069 | git_describe = describe_out 1070 | 1071 | # look for -dirty suffix 1072 | dirty = git_describe.endswith("-dirty") 1073 | pieces["dirty"] = dirty 1074 | if dirty: 1075 | git_describe = git_describe[:git_describe.rindex("-dirty")] 1076 | 1077 | # now we have TAG-NUM-gHEX or HEX 1078 | 1079 | if "-" in git_describe: 1080 | # TAG-NUM-gHEX 1081 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 1082 | if not mo: 1083 | # unparseable. Maybe git-describe is misbehaving? 1084 | pieces["error"] = ("unable to parse git-describe output: '%s'" 1085 | % describe_out) 1086 | return pieces 1087 | 1088 | # tag 1089 | full_tag = mo.group(1) 1090 | if not full_tag.startswith(tag_prefix): 1091 | if verbose: 1092 | fmt = "tag '%s' doesn't start with prefix '%s'" 1093 | print(fmt % (full_tag, tag_prefix)) 1094 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" 1095 | % (full_tag, tag_prefix)) 1096 | return pieces 1097 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 1098 | 1099 | # distance: number of commits since tag 1100 | pieces["distance"] = int(mo.group(2)) 1101 | 1102 | # commit: short hex revision ID 1103 | pieces["short"] = mo.group(3) 1104 | 1105 | else: 1106 | # HEX: no tags 1107 | pieces["closest-tag"] = None 1108 | count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], 1109 | cwd=root) 1110 | pieces["distance"] = int(count_out) # total number of commits 1111 | 1112 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 1113 | date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], 1114 | cwd=root)[0].strip() 1115 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 1116 | 1117 | return pieces 1118 | 1119 | 1120 | def do_vcs_install(manifest_in, versionfile_source, ipy): 1121 | """Git-specific installation logic for Versioneer. 1122 | 1123 | For Git, this means creating/changing .gitattributes to mark _version.py 1124 | for export-subst keyword substitution. 1125 | """ 1126 | GITS = ["git"] 1127 | if sys.platform == "win32": 1128 | GITS = ["git.cmd", "git.exe"] 1129 | files = [manifest_in, versionfile_source] 1130 | if ipy: 1131 | files.append(ipy) 1132 | try: 1133 | me = __file__ 1134 | if me.endswith(".pyc") or me.endswith(".pyo"): 1135 | me = os.path.splitext(me)[0] + ".py" 1136 | versioneer_file = os.path.relpath(me) 1137 | except NameError: 1138 | versioneer_file = "versioneer.py" 1139 | files.append(versioneer_file) 1140 | present = False 1141 | try: 1142 | f = open(".gitattributes", "r") 1143 | for line in f.readlines(): 1144 | if line.strip().startswith(versionfile_source): 1145 | if "export-subst" in line.strip().split()[1:]: 1146 | present = True 1147 | f.close() 1148 | except EnvironmentError: 1149 | pass 1150 | if not present: 1151 | f = open(".gitattributes", "a+") 1152 | f.write("%s export-subst\n" % versionfile_source) 1153 | f.close() 1154 | files.append(".gitattributes") 1155 | run_command(GITS, ["add", "--"] + files) 1156 | 1157 | 1158 | def versions_from_parentdir(parentdir_prefix, root, verbose): 1159 | """Try to determine the version from the parent directory name. 1160 | 1161 | Source tarballs conventionally unpack into a directory that includes both 1162 | the project name and a version string. We will also support searching up 1163 | two directory levels for an appropriately named parent directory 1164 | """ 1165 | rootdirs = [] 1166 | 1167 | for i in range(3): 1168 | dirname = os.path.basename(root) 1169 | if dirname.startswith(parentdir_prefix): 1170 | return {"version": dirname[len(parentdir_prefix):], 1171 | "full-revisionid": None, 1172 | "dirty": False, "error": None, "date": None} 1173 | else: 1174 | rootdirs.append(root) 1175 | root = os.path.dirname(root) # up a level 1176 | 1177 | if verbose: 1178 | print("Tried directories %s but none started with prefix %s" % 1179 | (str(rootdirs), parentdir_prefix)) 1180 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 1181 | 1182 | 1183 | SHORT_VERSION_PY = """ 1184 | # This file was generated by 'versioneer.py' (0.18) from 1185 | # revision-control system data, or from the parent directory name of an 1186 | # unpacked source archive. Distribution tarballs contain a pre-generated copy 1187 | # of this file. 1188 | 1189 | import json 1190 | 1191 | version_json = ''' 1192 | %s 1193 | ''' # END VERSION_JSON 1194 | 1195 | 1196 | def get_versions(): 1197 | return json.loads(version_json) 1198 | """ 1199 | 1200 | 1201 | def versions_from_file(filename): 1202 | """Try to determine the version from _version.py if present.""" 1203 | try: 1204 | with open(filename) as f: 1205 | contents = f.read() 1206 | except EnvironmentError: 1207 | raise NotThisMethod("unable to read _version.py") 1208 | mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", 1209 | contents, re.M | re.S) 1210 | if not mo: 1211 | mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", 1212 | contents, re.M | re.S) 1213 | if not mo: 1214 | raise NotThisMethod("no version_json in _version.py") 1215 | return json.loads(mo.group(1)) 1216 | 1217 | 1218 | def write_to_version_file(filename, versions): 1219 | """Write the given version number to the given _version.py file.""" 1220 | os.unlink(filename) 1221 | contents = json.dumps(versions, sort_keys=True, 1222 | indent=1, separators=(",", ": ")) 1223 | with open(filename, "w") as f: 1224 | f.write(SHORT_VERSION_PY % contents) 1225 | 1226 | print("set %s to '%s'" % (filename, versions["version"])) 1227 | 1228 | 1229 | def plus_or_dot(pieces): 1230 | """Return a + if we don't already have one, else return a .""" 1231 | if "+" in pieces.get("closest-tag", ""): 1232 | return "." 1233 | return "+" 1234 | 1235 | 1236 | def render_pep440(pieces): 1237 | """Build up version string, with post-release "local version identifier". 1238 | 1239 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 1240 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 1241 | 1242 | Exceptions: 1243 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 1244 | """ 1245 | if pieces["closest-tag"]: 1246 | rendered = pieces["closest-tag"] 1247 | if pieces["distance"] or pieces["dirty"]: 1248 | rendered += plus_or_dot(pieces) 1249 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 1250 | if pieces["dirty"]: 1251 | rendered += ".dirty" 1252 | else: 1253 | # exception #1 1254 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], 1255 | pieces["short"]) 1256 | if pieces["dirty"]: 1257 | rendered += ".dirty" 1258 | return rendered 1259 | 1260 | 1261 | def render_pep440_pre(pieces): 1262 | """TAG[.post.devDISTANCE] -- No -dirty. 1263 | 1264 | Exceptions: 1265 | 1: no tags. 0.post.devDISTANCE 1266 | """ 1267 | if pieces["closest-tag"]: 1268 | rendered = pieces["closest-tag"] 1269 | if pieces["distance"]: 1270 | rendered += ".post.dev%d" % pieces["distance"] 1271 | else: 1272 | # exception #1 1273 | rendered = "0.post.dev%d" % pieces["distance"] 1274 | return rendered 1275 | 1276 | 1277 | def render_pep440_post(pieces): 1278 | """TAG[.postDISTANCE[.dev0]+gHEX] . 1279 | 1280 | The ".dev0" means dirty. Note that .dev0 sorts backwards 1281 | (a dirty tree will appear "older" than the corresponding clean one), 1282 | but you shouldn't be releasing software with -dirty anyways. 1283 | 1284 | Exceptions: 1285 | 1: no tags. 0.postDISTANCE[.dev0] 1286 | """ 1287 | if pieces["closest-tag"]: 1288 | rendered = pieces["closest-tag"] 1289 | if pieces["distance"] or pieces["dirty"]: 1290 | rendered += ".post%d" % pieces["distance"] 1291 | if pieces["dirty"]: 1292 | rendered += ".dev0" 1293 | rendered += plus_or_dot(pieces) 1294 | rendered += "g%s" % pieces["short"] 1295 | else: 1296 | # exception #1 1297 | rendered = "0.post%d" % pieces["distance"] 1298 | if pieces["dirty"]: 1299 | rendered += ".dev0" 1300 | rendered += "+g%s" % pieces["short"] 1301 | return rendered 1302 | 1303 | 1304 | def render_pep440_old(pieces): 1305 | """TAG[.postDISTANCE[.dev0]] . 1306 | 1307 | The ".dev0" means dirty. 1308 | 1309 | Eexceptions: 1310 | 1: no tags. 0.postDISTANCE[.dev0] 1311 | """ 1312 | if pieces["closest-tag"]: 1313 | rendered = pieces["closest-tag"] 1314 | if pieces["distance"] or pieces["dirty"]: 1315 | rendered += ".post%d" % pieces["distance"] 1316 | if pieces["dirty"]: 1317 | rendered += ".dev0" 1318 | else: 1319 | # exception #1 1320 | rendered = "0.post%d" % pieces["distance"] 1321 | if pieces["dirty"]: 1322 | rendered += ".dev0" 1323 | return rendered 1324 | 1325 | 1326 | def render_git_describe(pieces): 1327 | """TAG[-DISTANCE-gHEX][-dirty]. 1328 | 1329 | Like 'git describe --tags --dirty --always'. 1330 | 1331 | Exceptions: 1332 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 1333 | """ 1334 | if pieces["closest-tag"]: 1335 | rendered = pieces["closest-tag"] 1336 | if pieces["distance"]: 1337 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 1338 | else: 1339 | # exception #1 1340 | rendered = pieces["short"] 1341 | if pieces["dirty"]: 1342 | rendered += "-dirty" 1343 | return rendered 1344 | 1345 | 1346 | def render_git_describe_long(pieces): 1347 | """TAG-DISTANCE-gHEX[-dirty]. 1348 | 1349 | Like 'git describe --tags --dirty --always -long'. 1350 | The distance/hash is unconditional. 1351 | 1352 | Exceptions: 1353 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 1354 | """ 1355 | if pieces["closest-tag"]: 1356 | rendered = pieces["closest-tag"] 1357 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 1358 | else: 1359 | # exception #1 1360 | rendered = pieces["short"] 1361 | if pieces["dirty"]: 1362 | rendered += "-dirty" 1363 | return rendered 1364 | 1365 | 1366 | def render(pieces, style): 1367 | """Render the given version pieces into the requested style.""" 1368 | if pieces["error"]: 1369 | return {"version": "unknown", 1370 | "full-revisionid": pieces.get("long"), 1371 | "dirty": None, 1372 | "error": pieces["error"], 1373 | "date": None} 1374 | 1375 | if not style or style == "default": 1376 | style = "pep440" # the default 1377 | 1378 | if style == "pep440": 1379 | rendered = render_pep440(pieces) 1380 | elif style == "pep440-pre": 1381 | rendered = render_pep440_pre(pieces) 1382 | elif style == "pep440-post": 1383 | rendered = render_pep440_post(pieces) 1384 | elif style == "pep440-old": 1385 | rendered = render_pep440_old(pieces) 1386 | elif style == "git-describe": 1387 | rendered = render_git_describe(pieces) 1388 | elif style == "git-describe-long": 1389 | rendered = render_git_describe_long(pieces) 1390 | else: 1391 | raise ValueError("unknown style '%s'" % style) 1392 | 1393 | return {"version": rendered, "full-revisionid": pieces["long"], 1394 | "dirty": pieces["dirty"], "error": None, 1395 | "date": pieces.get("date")} 1396 | 1397 | 1398 | class VersioneerBadRootError(Exception): 1399 | """The project root directory is unknown or missing key files.""" 1400 | 1401 | 1402 | def get_versions(verbose=False): 1403 | """Get the project version from whatever source is available. 1404 | 1405 | Returns dict with two keys: 'version' and 'full'. 1406 | """ 1407 | if "versioneer" in sys.modules: 1408 | # see the discussion in cmdclass.py:get_cmdclass() 1409 | del sys.modules["versioneer"] 1410 | 1411 | root = get_root() 1412 | cfg = get_config_from_root(root) 1413 | 1414 | assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" 1415 | handlers = HANDLERS.get(cfg.VCS) 1416 | assert handlers, "unrecognized VCS '%s'" % cfg.VCS 1417 | verbose = verbose or cfg.verbose 1418 | assert cfg.versionfile_source is not None, \ 1419 | "please set versioneer.versionfile_source" 1420 | assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" 1421 | 1422 | versionfile_abs = os.path.join(root, cfg.versionfile_source) 1423 | 1424 | # extract version from first of: _version.py, VCS command (e.g. 'git 1425 | # describe'), parentdir. This is meant to work for developers using a 1426 | # source checkout, for users of a tarball created by 'setup.py sdist', 1427 | # and for users of a tarball/zipball created by 'git archive' or github's 1428 | # download-from-tag feature or the equivalent in other VCSes. 1429 | 1430 | get_keywords_f = handlers.get("get_keywords") 1431 | from_keywords_f = handlers.get("keywords") 1432 | if get_keywords_f and from_keywords_f: 1433 | try: 1434 | keywords = get_keywords_f(versionfile_abs) 1435 | ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) 1436 | if verbose: 1437 | print("got version from expanded keyword %s" % ver) 1438 | return ver 1439 | except NotThisMethod: 1440 | pass 1441 | 1442 | try: 1443 | ver = versions_from_file(versionfile_abs) 1444 | if verbose: 1445 | print("got version from file %s %s" % (versionfile_abs, ver)) 1446 | return ver 1447 | except NotThisMethod: 1448 | pass 1449 | 1450 | from_vcs_f = handlers.get("pieces_from_vcs") 1451 | if from_vcs_f: 1452 | try: 1453 | pieces = from_vcs_f(cfg.tag_prefix, root, verbose) 1454 | ver = render(pieces, cfg.style) 1455 | if verbose: 1456 | print("got version from VCS %s" % ver) 1457 | return ver 1458 | except NotThisMethod: 1459 | pass 1460 | 1461 | try: 1462 | if cfg.parentdir_prefix: 1463 | ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 1464 | if verbose: 1465 | print("got version from parentdir %s" % ver) 1466 | return ver 1467 | except NotThisMethod: 1468 | pass 1469 | 1470 | if verbose: 1471 | print("unable to compute version") 1472 | 1473 | return {"version": "0+unknown", "full-revisionid": None, 1474 | "dirty": None, "error": "unable to compute version", 1475 | "date": None} 1476 | 1477 | 1478 | def get_version(): 1479 | """Get the short version string for this project.""" 1480 | return get_versions()["version"] 1481 | 1482 | 1483 | def get_cmdclass(): 1484 | """Get the custom setuptools/distutils subclasses used by Versioneer.""" 1485 | if "versioneer" in sys.modules: 1486 | del sys.modules["versioneer"] 1487 | # this fixes the "python setup.py develop" case (also 'install' and 1488 | # 'easy_install .'), in which subdependencies of the main project are 1489 | # built (using setup.py bdist_egg) in the same python process. Assume 1490 | # a main project A and a dependency B, which use different versions 1491 | # of Versioneer. A's setup.py imports A's Versioneer, leaving it in 1492 | # sys.modules by the time B's setup.py is executed, causing B to run 1493 | # with the wrong versioneer. Setuptools wraps the sub-dep builds in a 1494 | # sandbox that restores sys.modules to it's pre-build state, so the 1495 | # parent is protected against the child's "import versioneer". By 1496 | # removing ourselves from sys.modules here, before the child build 1497 | # happens, we protect the child from the parent's versioneer too. 1498 | # Also see https://github.com/warner/python-versioneer/issues/52 1499 | 1500 | cmds = {} 1501 | 1502 | # we add "version" to both distutils and setuptools 1503 | from distutils.core import Command 1504 | 1505 | class cmd_version(Command): 1506 | description = "report generated version string" 1507 | user_options = [] 1508 | boolean_options = [] 1509 | 1510 | def initialize_options(self): 1511 | pass 1512 | 1513 | def finalize_options(self): 1514 | pass 1515 | 1516 | def run(self): 1517 | vers = get_versions(verbose=True) 1518 | print("Version: %s" % vers["version"]) 1519 | print(" full-revisionid: %s" % vers.get("full-revisionid")) 1520 | print(" dirty: %s" % vers.get("dirty")) 1521 | print(" date: %s" % vers.get("date")) 1522 | if vers["error"]: 1523 | print(" error: %s" % vers["error"]) 1524 | cmds["version"] = cmd_version 1525 | 1526 | # we override "build_py" in both distutils and setuptools 1527 | # 1528 | # most invocation pathways end up running build_py: 1529 | # distutils/build -> build_py 1530 | # distutils/install -> distutils/build ->.. 1531 | # setuptools/bdist_wheel -> distutils/install ->.. 1532 | # setuptools/bdist_egg -> distutils/install_lib -> build_py 1533 | # setuptools/install -> bdist_egg ->.. 1534 | # setuptools/develop -> ? 1535 | # pip install: 1536 | # copies source tree to a tempdir before running egg_info/etc 1537 | # if .git isn't copied too, 'git describe' will fail 1538 | # then does setup.py bdist_wheel, or sometimes setup.py install 1539 | # setup.py egg_info -> ? 1540 | 1541 | # we override different "build_py" commands for both environments 1542 | if "setuptools" in sys.modules: 1543 | from setuptools.command.build_py import build_py as _build_py 1544 | else: 1545 | from distutils.command.build_py import build_py as _build_py 1546 | 1547 | class cmd_build_py(_build_py): 1548 | def run(self): 1549 | root = get_root() 1550 | cfg = get_config_from_root(root) 1551 | versions = get_versions() 1552 | _build_py.run(self) 1553 | # now locate _version.py in the new build/ directory and replace 1554 | # it with an updated value 1555 | if cfg.versionfile_build: 1556 | target_versionfile = os.path.join(self.build_lib, 1557 | cfg.versionfile_build) 1558 | print("UPDATING %s" % target_versionfile) 1559 | write_to_version_file(target_versionfile, versions) 1560 | cmds["build_py"] = cmd_build_py 1561 | 1562 | if "cx_Freeze" in sys.modules: # cx_freeze enabled? 1563 | from cx_Freeze.dist import build_exe as _build_exe 1564 | # nczeczulin reports that py2exe won't like the pep440-style string 1565 | # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. 1566 | # setup(console=[{ 1567 | # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION 1568 | # "product_version": versioneer.get_version(), 1569 | # ... 1570 | 1571 | class cmd_build_exe(_build_exe): 1572 | def run(self): 1573 | root = get_root() 1574 | cfg = get_config_from_root(root) 1575 | versions = get_versions() 1576 | target_versionfile = cfg.versionfile_source 1577 | print("UPDATING %s" % target_versionfile) 1578 | write_to_version_file(target_versionfile, versions) 1579 | 1580 | _build_exe.run(self) 1581 | os.unlink(target_versionfile) 1582 | with open(cfg.versionfile_source, "w") as f: 1583 | LONG = LONG_VERSION_PY[cfg.VCS] 1584 | f.write(LONG % 1585 | {"DOLLAR": "$", 1586 | "STYLE": cfg.style, 1587 | "TAG_PREFIX": cfg.tag_prefix, 1588 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 1589 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 1590 | }) 1591 | cmds["build_exe"] = cmd_build_exe 1592 | del cmds["build_py"] 1593 | 1594 | if 'py2exe' in sys.modules: # py2exe enabled? 1595 | try: 1596 | from py2exe.distutils_buildexe import py2exe as _py2exe # py3 1597 | except ImportError: 1598 | from py2exe.build_exe import py2exe as _py2exe # py2 1599 | 1600 | class cmd_py2exe(_py2exe): 1601 | def run(self): 1602 | root = get_root() 1603 | cfg = get_config_from_root(root) 1604 | versions = get_versions() 1605 | target_versionfile = cfg.versionfile_source 1606 | print("UPDATING %s" % target_versionfile) 1607 | write_to_version_file(target_versionfile, versions) 1608 | 1609 | _py2exe.run(self) 1610 | os.unlink(target_versionfile) 1611 | with open(cfg.versionfile_source, "w") as f: 1612 | LONG = LONG_VERSION_PY[cfg.VCS] 1613 | f.write(LONG % 1614 | {"DOLLAR": "$", 1615 | "STYLE": cfg.style, 1616 | "TAG_PREFIX": cfg.tag_prefix, 1617 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 1618 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 1619 | }) 1620 | cmds["py2exe"] = cmd_py2exe 1621 | 1622 | # we override different "sdist" commands for both environments 1623 | if "setuptools" in sys.modules: 1624 | from setuptools.command.sdist import sdist as _sdist 1625 | else: 1626 | from distutils.command.sdist import sdist as _sdist 1627 | 1628 | class cmd_sdist(_sdist): 1629 | def run(self): 1630 | versions = get_versions() 1631 | self._versioneer_generated_versions = versions 1632 | # unless we update this, the command will keep using the old 1633 | # version 1634 | self.distribution.metadata.version = versions["version"] 1635 | return _sdist.run(self) 1636 | 1637 | def make_release_tree(self, base_dir, files): 1638 | root = get_root() 1639 | cfg = get_config_from_root(root) 1640 | _sdist.make_release_tree(self, base_dir, files) 1641 | # now locate _version.py in the new base_dir directory 1642 | # (remembering that it may be a hardlink) and replace it with an 1643 | # updated value 1644 | target_versionfile = os.path.join(base_dir, cfg.versionfile_source) 1645 | print("UPDATING %s" % target_versionfile) 1646 | write_to_version_file(target_versionfile, 1647 | self._versioneer_generated_versions) 1648 | cmds["sdist"] = cmd_sdist 1649 | 1650 | return cmds 1651 | 1652 | 1653 | CONFIG_ERROR = """ 1654 | setup.cfg is missing the necessary Versioneer configuration. You need 1655 | a section like: 1656 | 1657 | [versioneer] 1658 | VCS = git 1659 | style = pep440 1660 | versionfile_source = src/myproject/_version.py 1661 | versionfile_build = myproject/_version.py 1662 | tag_prefix = 1663 | parentdir_prefix = myproject- 1664 | 1665 | You will also need to edit your setup.py to use the results: 1666 | 1667 | import versioneer 1668 | setup(version=versioneer.get_version(), 1669 | cmdclass=versioneer.get_cmdclass(), ...) 1670 | 1671 | Please read the docstring in ./versioneer.py for configuration instructions, 1672 | edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. 1673 | """ 1674 | 1675 | SAMPLE_CONFIG = """ 1676 | # See the docstring in versioneer.py for instructions. Note that you must 1677 | # re-run 'versioneer.py setup' after changing this section, and commit the 1678 | # resulting files. 1679 | 1680 | [versioneer] 1681 | #VCS = git 1682 | #style = pep440 1683 | #versionfile_source = 1684 | #versionfile_build = 1685 | #tag_prefix = 1686 | #parentdir_prefix = 1687 | 1688 | """ 1689 | 1690 | INIT_PY_SNIPPET = """ 1691 | from ._version import get_versions 1692 | __version__ = get_versions()['version'] 1693 | del get_versions 1694 | """ 1695 | 1696 | 1697 | def do_setup(): 1698 | """Main VCS-independent setup function for installing Versioneer.""" 1699 | root = get_root() 1700 | try: 1701 | cfg = get_config_from_root(root) 1702 | except (EnvironmentError, configparser.NoSectionError, 1703 | configparser.NoOptionError) as e: 1704 | if isinstance(e, (EnvironmentError, configparser.NoSectionError)): 1705 | print("Adding sample versioneer config to setup.cfg", 1706 | file=sys.stderr) 1707 | with open(os.path.join(root, "setup.cfg"), "a") as f: 1708 | f.write(SAMPLE_CONFIG) 1709 | print(CONFIG_ERROR, file=sys.stderr) 1710 | return 1 1711 | 1712 | print(" creating %s" % cfg.versionfile_source) 1713 | with open(cfg.versionfile_source, "w") as f: 1714 | LONG = LONG_VERSION_PY[cfg.VCS] 1715 | f.write(LONG % {"DOLLAR": "$", 1716 | "STYLE": cfg.style, 1717 | "TAG_PREFIX": cfg.tag_prefix, 1718 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 1719 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 1720 | }) 1721 | 1722 | ipy = os.path.join(os.path.dirname(cfg.versionfile_source), 1723 | "__init__.py") 1724 | if os.path.exists(ipy): 1725 | try: 1726 | with open(ipy, "r") as f: 1727 | old = f.read() 1728 | except EnvironmentError: 1729 | old = "" 1730 | if INIT_PY_SNIPPET not in old: 1731 | print(" appending to %s" % ipy) 1732 | with open(ipy, "a") as f: 1733 | f.write(INIT_PY_SNIPPET) 1734 | else: 1735 | print(" %s unmodified" % ipy) 1736 | else: 1737 | print(" %s doesn't exist, ok" % ipy) 1738 | ipy = None 1739 | 1740 | # Make sure both the top-level "versioneer.py" and versionfile_source 1741 | # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so 1742 | # they'll be copied into source distributions. Pip won't be able to 1743 | # install the package without this. 1744 | manifest_in = os.path.join(root, "MANIFEST.in") 1745 | simple_includes = set() 1746 | try: 1747 | with open(manifest_in, "r") as f: 1748 | for line in f: 1749 | if line.startswith("include "): 1750 | for include in line.split()[1:]: 1751 | simple_includes.add(include) 1752 | except EnvironmentError: 1753 | pass 1754 | # That doesn't cover everything MANIFEST.in can do 1755 | # (http://docs.python.org/2/distutils/sourcedist.html#commands), so 1756 | # it might give some false negatives. Appending redundant 'include' 1757 | # lines is safe, though. 1758 | if "versioneer.py" not in simple_includes: 1759 | print(" appending 'versioneer.py' to MANIFEST.in") 1760 | with open(manifest_in, "a") as f: 1761 | f.write("include versioneer.py\n") 1762 | else: 1763 | print(" 'versioneer.py' already in MANIFEST.in") 1764 | if cfg.versionfile_source not in simple_includes: 1765 | print(" appending versionfile_source ('%s') to MANIFEST.in" % 1766 | cfg.versionfile_source) 1767 | with open(manifest_in, "a") as f: 1768 | f.write("include %s\n" % cfg.versionfile_source) 1769 | else: 1770 | print(" versionfile_source already in MANIFEST.in") 1771 | 1772 | # Make VCS-specific changes. For git, this means creating/changing 1773 | # .gitattributes to mark _version.py for export-subst keyword 1774 | # substitution. 1775 | do_vcs_install(manifest_in, cfg.versionfile_source, ipy) 1776 | return 0 1777 | 1778 | 1779 | def scan_setup_py(): 1780 | """Validate the contents of setup.py against Versioneer's expectations.""" 1781 | found = set() 1782 | setters = False 1783 | errors = 0 1784 | with open("setup.py", "r") as f: 1785 | for line in f.readlines(): 1786 | if "import versioneer" in line: 1787 | found.add("import") 1788 | if "versioneer.get_cmdclass()" in line: 1789 | found.add("cmdclass") 1790 | if "versioneer.get_version()" in line: 1791 | found.add("get_version") 1792 | if "versioneer.VCS" in line: 1793 | setters = True 1794 | if "versioneer.versionfile_source" in line: 1795 | setters = True 1796 | if len(found) != 3: 1797 | print("") 1798 | print("Your setup.py appears to be missing some important items") 1799 | print("(but I might be wrong). Please make sure it has something") 1800 | print("roughly like the following:") 1801 | print("") 1802 | print(" import versioneer") 1803 | print(" setup( version=versioneer.get_version(),") 1804 | print(" cmdclass=versioneer.get_cmdclass(), ...)") 1805 | print("") 1806 | errors += 1 1807 | if setters: 1808 | print("You should remove lines like 'versioneer.VCS = ' and") 1809 | print("'versioneer.versionfile_source = ' . This configuration") 1810 | print("now lives in setup.cfg, and should be removed from setup.py") 1811 | print("") 1812 | errors += 1 1813 | return errors 1814 | 1815 | 1816 | if __name__ == "__main__": 1817 | cmd = sys.argv[1] 1818 | if cmd == "setup": 1819 | errors = do_setup() 1820 | errors += scan_setup_py() 1821 | if errors: 1822 | sys.exit(1) 1823 | --------------------------------------------------------------------------------