├── hosts.txt ├── requirements.txt ├── scratch ├── test.sh ├── worker.sh └── drmaa-test.py ├── env.yml ├── run-slave.sh ├── setup.cfg ├── MANIFEST.in ├── dask_drmaa ├── __init__.py ├── cli │ ├── tests │ │ └── test_dask_drmaa.py │ └── dask_drmaa.py ├── tests │ ├── test_sge.py │ ├── test_adaptive.py │ └── test_core.py ├── sge.py ├── adaptive.py ├── core.py └── _version.py ├── start-sge.sh ├── .gitattributes ├── ci └── scripts │ └── conda_setup.sh ├── run-master.sh ├── Dockerfile-slave ├── add_worker.sh ├── Dockerfile-master ├── setup.py ├── docker-compose.yml ├── setup-slave.sh ├── .travis.yml ├── .gitignore ├── scheduler.txt ├── LICENSE.txt ├── setup-master.sh ├── queue.txt ├── README.rst └── versioneer.py /hosts.txt: -------------------------------------------------------------------------------- 1 | group_name @allhosts 2 | hostlist NONE 3 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | dask 2 | distributed >= 1.21.3 3 | drmaa 4 | click 5 | -------------------------------------------------------------------------------- /scratch/test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "HELLO WORLD" 4 | echo `date` 5 | -------------------------------------------------------------------------------- /scratch/worker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | /opt/anaconda/bin/dask-worker 172.21.0.1:8786 3 | -------------------------------------------------------------------------------- /env.yml: -------------------------------------------------------------------------------- 1 | name: drmaa 2 | dependencies: 3 | - dask 4 | - distributed>=1.20.0 5 | - ipython 6 | - drmaa 7 | - python=3 8 | -------------------------------------------------------------------------------- /run-slave.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # start sge 4 | sudo service gridengine-exec restart 5 | 6 | sleep 4 7 | 8 | sudo service gridengine-exec restart 9 | 10 | python -m http.server 8888 11 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [versioneer] 2 | VCS = git 3 | style = pep440 4 | versionfile_source = dask_drmaa/_version.py 5 | versionfile_build = dask_drmaa/_version.py 6 | tag_prefix = 7 | parentdir_prefix = dask_drmaa 8 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE.txt 2 | include README.rst 3 | 4 | include requirements.txt 5 | 6 | recursive-exclude * __pycache__ 7 | recursive-exclude * *.py[co] 8 | 9 | include versioneer.py 10 | include dask_drmaa/_version.py 11 | -------------------------------------------------------------------------------- /dask_drmaa/__init__.py: -------------------------------------------------------------------------------- 1 | from .core import DRMAACluster, get_session 2 | from .sge import SGECluster 3 | from .adaptive import Adaptive 4 | 5 | from ._version import get_versions 6 | __version__ = get_versions()['version'] 7 | del get_versions 8 | -------------------------------------------------------------------------------- /start-sge.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | docker-compose up -d 3 | while [ `docker exec -it sge_master qhost | grep lx26-amd64 | wc -l` -ne 2 ] 4 | do 5 | echo "Waiting for SGE slots to become available"; 6 | sleep 1 7 | done 8 | echo "SGE properly configured" 9 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | dask_drmaa/_version.py export-subst 2 | 3 | # Windows compatibility: When setting up testing environment, 4 | # shell scripts are run in ubuntu-based docker container, so 5 | # ensure that carriage returns are not present. 6 | *.sh text eol=lf 7 | *.yml text eol=lf 8 | -------------------------------------------------------------------------------- /ci/scripts/conda_setup.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | set -x 5 | 6 | # Install miniconda 7 | wget http://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/miniconda.sh 8 | bash ~/miniconda.sh -b -p $HOME/miniconda 9 | export PATH="$HOME/miniconda/bin:$PATH" 10 | conda update conda --yes 11 | conda clean -tipy 12 | conda config --set always_yes yes --set changeps1 no 13 | conda --version 14 | -------------------------------------------------------------------------------- /dask_drmaa/cli/tests/test_dask_drmaa.py: -------------------------------------------------------------------------------- 1 | from time import sleep, time 2 | 3 | from distributed.utils_test import popen 4 | from distributed import Client 5 | from distributed.utils_test import loop 6 | 7 | 8 | def test_dask_drmaa(loop): 9 | with popen(['dask-drmaa', '2']) as proc: 10 | with Client('127.0.0.1:8786', loop=loop) as client: 11 | start = time() 12 | while len(client.ncores()) != 2: 13 | sleep(0.1) 14 | assert time() < start + 30 15 | -------------------------------------------------------------------------------- /run-master.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | 4 | # start sge 5 | sudo service gridengine-master restart 6 | 7 | while ! ping -c1 slave_one &>/dev/null; do :; done 8 | 9 | qconf -Msconf /scheduler.txt 10 | qconf -Ahgrp /hosts.txt 11 | qconf -Aq /queue.txt 12 | 13 | qconf -ah slave_one 14 | qconf -ah slave_two 15 | qconf -ah slave_three 16 | 17 | qconf -as $HOSTNAME 18 | bash add_worker.sh dask.q slave_one 4 19 | bash add_worker.sh dask.q slave_two 4 20 | 21 | sudo service gridengine-master restart 22 | 23 | python -m http.server 8888 24 | -------------------------------------------------------------------------------- /Dockerfile-slave: -------------------------------------------------------------------------------- 1 | FROM ubuntu:14.04 2 | 3 | ENV LANG C.UTF-8 4 | 5 | RUN apt-get update && apt-get install curl bzip2 git gcc -y --fix-missing 6 | 7 | RUN curl -o miniconda.sh https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh && \ 8 | bash miniconda.sh -f -b -p /opt/anaconda && \ 9 | /opt/anaconda/bin/conda clean -tipy && \ 10 | rm -f miniconda.sh 11 | ENV PATH /opt/anaconda/bin:$PATH 12 | RUN conda install -n root conda=4.4.11 && conda clean -tipy 13 | RUN conda install -c conda-forge dask distributed blas pytest mock ipython pip psutil python-drmaa && conda clean -tipy 14 | 15 | COPY ./setup-slave.sh / 16 | COPY ./run-slave.sh / 17 | RUN bash ./setup-slave.sh 18 | 19 | CMD python -m SimpleHTTPServer 20 | -------------------------------------------------------------------------------- /scratch/drmaa-test.py: -------------------------------------------------------------------------------- 1 | import drmaa 2 | import os 3 | 4 | def main(): 5 | """ 6 | Submit a job. 7 | Note, need file called sleeper.sh in current directory. 8 | """ 9 | with drmaa.Session() as s: 10 | print('Creating job template') 11 | jt = s.createJobTemplate() 12 | jt.remoteCommand = os.path.join('/dask-drmaa', 'scratch', 'sleeper.sh') 13 | jt.args = ['42', 'Simon says:'] 14 | jt.joinFiles=True 15 | jt.outputPath = ':/dask-drmaa/scratch/' 16 | jt.errorPath= ':/dask-drmaa/scratch/' 17 | 18 | jobid = s.runJob(jt) 19 | print('Your job has been submitted with ID %s' % jobid) 20 | 21 | print('Cleaning up') 22 | s.deleteJobTemplate(jt) 23 | 24 | if __name__=='__main__': 25 | main() 26 | -------------------------------------------------------------------------------- /add_worker.sh: -------------------------------------------------------------------------------- 1 | #`/bin/bash 2 | 3 | #!/bin/bash 4 | 5 | QUEUE=$1 6 | HOSTNAME=$2 7 | SLOTS=$3 8 | 9 | # add to the execution host list 10 | TMPFILE=/tmp/sge.hostname-$HOSTNAME 11 | echo -e "hostname $HOSTNAME\nload_scaling NONE\ncomplex_values NONE\nuser_lists NONE\nxuser_lists NONE\nprojects NONE\nxprojects NONE\nusage_scaling NONE\nreport_variables NONE" > $TMPFILE 12 | qconf -Ae $TMPFILE 13 | rm $TMPFILE 14 | 15 | # add to the all hosts list 16 | qconf -aattr hostgroup hostlist $HOSTNAME @allhosts 17 | 18 | # enable the host for the queue, in case it was disabled and not removed 19 | qmod -e $QUEUE@$HOSTNAME 20 | 21 | # Add memory resource 22 | qconf -mattr exechost complex_values h_vmem=100G $HOSTNAME 23 | 24 | if [ "$SLOTS" ]; then 25 | qconf -aattr queue slots "[$HOSTNAME=$SLOTS]" $QUEUE 26 | fi 27 | -------------------------------------------------------------------------------- /dask_drmaa/cli/dask_drmaa.py: -------------------------------------------------------------------------------- 1 | 2 | import logging 3 | import signal 4 | import sys 5 | from time import sleep 6 | 7 | import click 8 | 9 | from dask_drmaa import DRMAACluster 10 | from distributed.cli.utils import check_python_3 11 | 12 | 13 | @click.command() 14 | @click.argument('nworkers', type=int) 15 | def main(nworkers): 16 | cluster = DRMAACluster(silence_logs=logging.INFO, scheduler_port=8786) 17 | cluster.start_workers(nworkers) 18 | 19 | def handle_signal(sig, frame): 20 | cluster.close() 21 | sys.exit(0) 22 | 23 | signal.signal(signal.SIGINT, handle_signal) 24 | signal.signal(signal.SIGTERM, handle_signal) 25 | 26 | while True: 27 | sleep(1) 28 | 29 | 30 | def go(): 31 | check_python_3() 32 | main() 33 | 34 | if __name__ == '__main__': 35 | go() 36 | -------------------------------------------------------------------------------- /Dockerfile-master: -------------------------------------------------------------------------------- 1 | FROM ubuntu:14.04 2 | 3 | ENV LANG C.UTF-8 4 | 5 | RUN apt-get update && apt-get install curl bzip2 git gcc -y --fix-missing 6 | 7 | RUN curl -o miniconda.sh https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh && \ 8 | bash miniconda.sh -f -b -p /opt/anaconda && \ 9 | /opt/anaconda/bin/conda clean -tipy && \ 10 | rm -f miniconda.sh 11 | ENV PATH /opt/anaconda/bin:$PATH 12 | RUN conda install -n root conda=4.4.11 && conda clean -tipy 13 | RUN conda install -c conda-forge dask distributed blas pytest mock ipython pip psutil python-drmaa && conda clean -tipy 14 | 15 | COPY ./*.sh / 16 | COPY ./*.txt / 17 | RUN bash ./setup-master.sh 18 | 19 | # expose ports 20 | EXPOSE 8000 21 | EXPOSE 6444 22 | EXPOSE 6445 23 | EXPOSE 6446 24 | 25 | ENV DRMAA_LIBRARY_PATH /usr/lib/gridengine-drmaa/lib/libdrmaa.so 26 | ENV SGE_ROOT /var/lib/gridengine/ 27 | ENV SGE_CELL default 28 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from os.path import exists 4 | from setuptools import setup 5 | import versioneer 6 | 7 | setup(name='dask-drmaa', 8 | version=versioneer.get_version(), 9 | description='Dask on DRMAA', 10 | url='http://github.com/dask/dask-drmaa/', 11 | maintainer='Matthew Rocklin', 12 | maintainer_email='mrocklin@gmail.com', 13 | license='BSD', 14 | keywords='', 15 | packages=['dask_drmaa', 16 | 'dask_drmaa.cli'], 17 | cmdclass=versioneer.get_cmdclass(), 18 | install_requires=list(open('requirements.txt').read().strip().split('\n')), 19 | long_description=(open('README.rst').read() if exists('README.rst') 20 | else ''), 21 | entry_points={ 22 | 'console_scripts': [ 23 | 'dask-drmaa=dask_drmaa.cli.dask_drmaa:go', 24 | ], 25 | }, 26 | zip_safe=False) 27 | -------------------------------------------------------------------------------- /dask_drmaa/tests/test_sge.py: -------------------------------------------------------------------------------- 1 | from time import sleep 2 | 3 | import pytest 4 | 5 | from dask_drmaa import SGECluster 6 | from distributed import Client 7 | from distributed.utils_test import loop 8 | 9 | def test_sge_memory(loop): 10 | with SGECluster(scheduler_port=0) as cluster: 11 | cluster.start_workers(2, memory=3e9, memory_fraction=0.5) 12 | with Client(cluster, loop=loop) as client: 13 | while len(cluster.scheduler.ncores) < 2: 14 | sleep(0.1) 15 | 16 | assert all(info['memory_limit'] == 1.5e9 17 | for info in cluster.scheduler.worker_info.values()) 18 | 19 | 20 | def test_sge_cpus(loop): 21 | with SGECluster(scheduler_port=0) as cluster: 22 | cluster.start_workers(1, cpus=2) 23 | with Client(cluster, loop=loop) as client: 24 | while len(cluster.scheduler.ncores) < 1: 25 | sleep(0.1) 26 | 27 | assert list(cluster.scheduler.ncores.values()) == [2] 28 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "2" 2 | 3 | services: 4 | 5 | master: 6 | build: 7 | context: . 8 | dockerfile: Dockerfile-master 9 | container_name: sge_master 10 | hostname: sge_master 11 | #network_mode: host 12 | volumes: 13 | - .:/dask-drmaa 14 | command: bash /run-master.sh 15 | 16 | slave-one: 17 | build: 18 | context: . 19 | dockerfile: Dockerfile-slave 20 | container_name: slave_one 21 | hostname: slave_one 22 | #network_mode: host 23 | volumes: 24 | - .:/dask-drmaa 25 | command: bash /run-slave.sh 26 | links: 27 | - "master:sge_master" 28 | depends_on: 29 | - master 30 | 31 | slave-two: 32 | build: 33 | context: . 34 | dockerfile: Dockerfile-slave 35 | container_name: slave_two 36 | hostname: slave_two 37 | #network_mode: host 38 | volumes: 39 | - .:/dask-drmaa 40 | command: bash /run-slave.sh 41 | links: 42 | - "master:sge_master" 43 | depends_on: 44 | - master 45 | -------------------------------------------------------------------------------- /setup-slave.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | export MASTER_HOSTNAME=sge_master 3 | echo "gridengine-common shared/gridenginemaster string $MASTER_HOSTNAME" | sudo debconf-set-selections 4 | echo "gridengine-common shared/gridenginecell string default" | sudo debconf-set-selections 5 | echo "gridengine-common shared/gridengineconfig boolean false" | sudo debconf-set-selections 6 | echo "gridengine-client shared/gridenginemaster string $MASTER_HOSTNAME" | sudo debconf-set-selections 7 | echo "gridengine-client shared/gridenginecell string default" | sudo debconf-set-selections 8 | echo "gridengine-client shared/gridengineconfig boolean false" | sudo debconf-set-selections 9 | echo "postfix postfix/main_mailer_type select No configuration" | sudo debconf-set-selections 10 | 11 | sudo DEBIAN_FRONTEND=noninteractive apt-get install -y gridengine-exec gridengine-client gridengine-drmaa-dev -qq 12 | 13 | sudo service postfix stop 14 | sudo update-rc.d postfix disable 15 | echo $MASTER_HOSTNAME | sudo tee /var/lib/gridengine/default/common/act_qmaster 16 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | dist: trusty 3 | sudo: required 4 | 5 | notifications: 6 | email: false 7 | 8 | services: 9 | - docker 10 | 11 | matrix: 12 | include: 13 | - python: "2.7" 14 | env: OS=ubuntu-14.04 15 | - python: "3.6" 16 | env: OS=ubuntu-14.04 17 | 18 | env: 19 | global: 20 | - DOCKER_COMPOSE_VERSION=1.6.0 21 | 22 | 23 | before_install: 24 | - pwd 25 | - docker version 26 | - docker-compose version 27 | 28 | # Install miniconda and create `adam` environment 29 | - bash ci/scripts/conda_setup.sh 30 | - export PATH="$HOME/miniconda/bin:$PATH" 31 | 32 | # Start containers 33 | - ./start-sge.sh 34 | - docker ps -a 35 | - docker images 36 | 37 | install: 38 | - which python 39 | 40 | script: 41 | - docker exec -it sge_master /bin/bash -c "cd /dask-drmaa; pip install --no-cache-dir ." 42 | - docker exec -it sge_master /bin/bash -c "cd /dask-drmaa; py.test dask_drmaa --verbose" 43 | 44 | after_success: 45 | - docker exec -it sge_master bash -c 'cat /tmp/sge*' 46 | - docker exec -it slave_one bash -c 'cat /tmp/exec*' 47 | - docker exec -it slave_two bash -c 'cat /tmp/exec*' 48 | - pip install --no-cache-dir coveralls 49 | - coveralls 50 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | .pytest_cache 44 | nosetests.xml 45 | coverage.xml 46 | *,cover 47 | .hypothesis/ 48 | 49 | # Translations 50 | *.mo 51 | *.pot 52 | 53 | # Django stuff: 54 | *.log 55 | local_settings.py 56 | 57 | # Flask stuff: 58 | instance/ 59 | .webassets-cache 60 | 61 | # Scrapy stuff: 62 | .scrapy 63 | 64 | # Sphinx documentation 65 | docs/_build/ 66 | 67 | # PyBuilder 68 | target/ 69 | 70 | # IPython Notebook 71 | .ipynb_checkpoints 72 | 73 | # pyenv 74 | .python-version 75 | 76 | # celery beat schedule file 77 | celerybeat-schedule 78 | 79 | # dotenv 80 | .env 81 | 82 | # virtualenv 83 | venv/ 84 | ENV/ 85 | 86 | # Spyder project settings 87 | .spyderproject 88 | 89 | # Rope project settings 90 | .ropeproject 91 | 92 | #SGE 93 | scratch/* 94 | -------------------------------------------------------------------------------- /scheduler.txt: -------------------------------------------------------------------------------- 1 | algorithm default 2 | schedule_interval 0:0:1 3 | maxujobs 0 4 | queue_sort_method load 5 | job_load_adjustments np_load_avg=0.50 6 | load_adjustment_decay_time 0:7:30 7 | load_formula np_load_avg 8 | schedd_job_info true 9 | flush_submit_sec 0 10 | flush_finish_sec 0 11 | params none 12 | reprioritize_interval 0:0:0 13 | halftime 168 14 | usage_weight_list cpu=1.000000,mem=0.000000,io=0.000000 15 | compensation_factor 5.000000 16 | weight_user 0.250000 17 | weight_project 0.250000 18 | weight_department 0.250000 19 | weight_job 0.250000 20 | weight_tickets_functional 0 21 | weight_tickets_share 0 22 | share_override_tickets TRUE 23 | share_functional_shares TRUE 24 | max_functional_jobs_to_schedule 200 25 | report_pjob_tickets TRUE 26 | max_pending_tasks_per_job 50 27 | halflife_decay_list none 28 | policy_hierarchy OFS 29 | weight_ticket 0.500000 30 | weight_waiting_time 0.278000 31 | weight_deadline 3600000.000000 32 | weight_urgency 0.500000 33 | weight_priority 0.000000 34 | max_reservation 0 35 | default_duration INFINITY 36 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2016, Continuum Analytics, Inc. and contributors 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without modification, 5 | are permitted provided that the following conditions are met: 6 | 7 | Redistributions of source code must retain the above copyright notice, 8 | this list of conditions and the following disclaimer. 9 | 10 | Redistributions in binary form must reproduce the above copyright notice, 11 | this list of conditions and the following disclaimer in the documentation 12 | and/or other materials provided with the distribution. 13 | 14 | Neither the name of Continuum Analytics nor the names of any contributors 15 | may be used to endorse or promote products derived from this software 16 | without specific prior written permission. 17 | 18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 19 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 20 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 21 | ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE 22 | LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 23 | CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 24 | SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 25 | INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 26 | CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 27 | ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF 28 | THE POSSIBILITY OF SUCH DAMAGE. 29 | -------------------------------------------------------------------------------- /setup-master.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Configure the master hostname for Grid Engine 3 | echo "gridengine-master shared/gridenginemaster string $HOSTNAME" | sudo debconf-set-selections 4 | echo "gridengine-master shared/gridenginecell string default" | sudo debconf-set-selections 5 | echo "gridengine-master shared/gridengineconfig boolean false" | sudo debconf-set-selections 6 | echo "gridengine-common shared/gridenginemaster string $HOSTNAME" | sudo debconf-set-selections 7 | echo "gridengine-common shared/gridenginecell string default" | sudo debconf-set-selections 8 | echo "gridengine-common shared/gridengineconfig boolean false" | sudo debconf-set-selections 9 | echo "gridengine-client shared/gridenginemaster string $HOSTNAME" | sudo debconf-set-selections 10 | echo "gridengine-client shared/gridenginecell string default" | sudo debconf-set-selections 11 | echo "gridengine-client shared/gridengineconfig boolean false" | sudo debconf-set-selections 12 | # Postfix mail server is also installed as a dependency 13 | echo "postfix postfix/main_mailer_type select No configuration" | sudo debconf-set-selections 14 | 15 | # Install Grid Engine 16 | sudo DEBIAN_FRONTEND=noninteractive apt-get install -y gridengine-master gridengine-client gridengine-drmaa-dev -qq 17 | 18 | # Set up Grid Engine 19 | sudo -u sgeadmin /usr/share/gridengine/scripts/init_cluster /var/lib/gridengine default /var/spool/gridengine/spooldb sgeadmin 20 | sudo service gridengine-master restart 21 | 22 | # Disable Postfix 23 | sudo service postfix stop 24 | sudo update-rc.d postfix disable 25 | -------------------------------------------------------------------------------- /queue.txt: -------------------------------------------------------------------------------- 1 | qname dask.q 2 | hostlist @allhosts 3 | seq_no 0 4 | load_thresholds NONE 5 | suspend_thresholds NONE 6 | nsuspend 1 7 | suspend_interval 00:00:01 8 | priority 0 9 | min_cpu_interval 00:00:01 10 | processors UNDEFINED 11 | qtype BATCH INTERACTIVE 12 | ckpt_list NONE 13 | pe_list make 14 | rerun FALSE 15 | slots 2 16 | tmpdir /tmp 17 | shell /bin/csh 18 | prolog NONE 19 | epilog NONE 20 | shell_start_mode posix_compliant 21 | starter_method NONE 22 | suspend_method NONE 23 | resume_method NONE 24 | terminate_method NONE 25 | notify 00:00:01 26 | owner_list NONE 27 | user_lists NONE 28 | xuser_lists NONE 29 | subordinate_list NONE 30 | complex_values NONE 31 | projects NONE 32 | xprojects NONE 33 | calendar NONE 34 | initial_state default 35 | s_rt INFINITY 36 | h_rt INFINITY 37 | s_cpu INFINITY 38 | h_cpu INFINITY 39 | s_fsize INFINITY 40 | h_fsize INFINITY 41 | s_data INFINITY 42 | h_data INFINITY 43 | s_stack INFINITY 44 | h_stack INFINITY 45 | s_core INFINITY 46 | h_core INFINITY 47 | s_rss INFINITY 48 | h_rss INFINITY 49 | s_vmem INFINITY 50 | h_vmem INFINITY 51 | -------------------------------------------------------------------------------- /dask_drmaa/sge.py: -------------------------------------------------------------------------------- 1 | from .core import DRMAACluster, get_session 2 | 3 | 4 | class SGECluster(DRMAACluster): 5 | default_memory = None 6 | 7 | def start_workers(self, n=1, nativeSpecification='', cpus=1, memory=None, 8 | memory_fraction=0.5): 9 | ''' 10 | Start dask workers on an SGE cluster. 11 | 12 | Parameters 13 | ---------- 14 | n: integer 15 | Number of dask workers to start 16 | nativeSpecification: string (optional) 17 | Specify options native to the job scheduler 18 | cpus: integer 19 | Number of threads per dask worker 20 | memory: integer (optional) 21 | Number of bytes of RAM per dask worker 22 | memory_fraction: float 23 | If memory is specified, specify the fraction of memory used 24 | before dask workers begin to cache data to disk 25 | ''' 26 | return super(SGECluster, self).start_workers( 27 | n=n, nativeSpecification=nativeSpecification, cpus=cpus, 28 | memory=memory, memory_fraction=memory_fraction) 29 | 30 | def create_job_template(self, nativeSpecification='', cpus=1, memory=None, 31 | memory_fraction=0.5): 32 | memory = memory or self.default_memory 33 | template = self.template.copy() 34 | 35 | ns = template['nativeSpecification'] 36 | args = template['args'] 37 | 38 | args = [self.scheduler_address] + template['args'] 39 | 40 | if nativeSpecification: 41 | ns = ns + nativeSpecification 42 | if memory: 43 | args = args + ['--memory-limit', str(memory * (1 - memory_fraction))] 44 | args = args + ['--resources', 'memory=%f' % (memory * memory_fraction)] 45 | # h_vmem is SGE-specific 46 | ns += ' -l h_vmem=%dG' % int(memory / 1e9) # / cpus 47 | if cpus: 48 | args = args + ['--nprocs', '1', '--nthreads', str(cpus)] 49 | # ns += ' -l TODO=%d' % (cpu + 1) 50 | 51 | template['nativeSpecification'] = ns 52 | template['args'] = args 53 | 54 | jt = get_session().createJobTemplate() 55 | valid_attributes = dir(jt) 56 | 57 | for key, value in template.items(): 58 | if key not in valid_attributes: 59 | raise ValueError("Invalid job template attribute %s" % key) 60 | setattr(jt, key, value) 61 | 62 | return jt 63 | -------------------------------------------------------------------------------- /dask_drmaa/tests/test_adaptive.py: -------------------------------------------------------------------------------- 1 | from time import sleep, time 2 | 3 | import pytest 4 | from toolz import first 5 | 6 | from dask_drmaa import SGECluster 7 | from dask_drmaa.adaptive import Adaptive 8 | from distributed import Client 9 | from distributed.utils_test import loop, inc, slowinc 10 | 11 | def test_adaptive_memory(loop): 12 | with SGECluster(scheduler_port=0, cleanup_interval=100) as cluster: 13 | cluster.adapt() 14 | with Client(cluster, loop=loop) as client: 15 | future = client.submit(inc, 1, resources={'memory': 1e9}) 16 | assert future.result() == 2 17 | assert len(cluster.scheduler.ncores) > 0 18 | r = list(cluster.scheduler.worker_resources.values())[0] 19 | assert r['memory'] > 1e9 20 | 21 | del future 22 | 23 | start = time() 24 | while client.ncores(): 25 | sleep(0.3) 26 | assert time() < start + 10 27 | 28 | start = time() 29 | while cluster.workers: 30 | sleep(0.1) 31 | assert time() < start + 10 32 | 33 | 34 | def test_adaptive_normal_tasks(loop): 35 | with SGECluster(scheduler_port=0) as cluster: 36 | cluster.adapt() 37 | with Client(cluster, loop=loop) as client: 38 | future = client.submit(inc, 1) 39 | assert future.result() == 2 40 | 41 | 42 | @pytest.mark.parametrize('interval', [50, 1000]) 43 | def test_dont_over_request(loop, interval): 44 | with SGECluster(scheduler_port=0) as cluster: 45 | cluster.adapt() 46 | with Client(cluster, loop=loop) as client: 47 | future = client.submit(inc, 1) 48 | assert future.result() == 2 49 | assert len(cluster.scheduler.workers) == 1 50 | 51 | for i in range(5): 52 | sleep(0.2) 53 | assert len(cluster.scheduler.workers) == 1 54 | 55 | 56 | def test_request_more_than_one(loop): 57 | with SGECluster(scheduler_port=0) as cluster: 58 | cluster.adapt() 59 | with Client(cluster, loop=loop) as client: 60 | futures = client.map(slowinc, range(1000), delay=0.2) 61 | while len(cluster.scheduler.workers) < 3: 62 | sleep(0.1) 63 | 64 | 65 | def test_dont_request_if_idle(loop): 66 | with SGECluster(scheduler_port=0) as cluster: 67 | cluster.start_workers(1) 68 | with Client(cluster, loop=loop) as client: 69 | while not cluster.scheduler.workers: 70 | sleep(0.1) 71 | futures = client.map(slowinc, range(1000), delay=0.2, 72 | workers=first(cluster.scheduler.workers)) 73 | cluster.adapt(interval=2000) 74 | 75 | for i in range(60): 76 | sleep(0.1) 77 | assert len(cluster.workers) < 5 78 | 79 | 80 | def test_dont_request_if_not_enough_tasks(loop): 81 | with SGECluster(scheduler_port=0) as cluster: 82 | cluster.adapt() 83 | with Client(cluster, loop=loop) as client: 84 | cluster.scheduler.task_duration['slowinc'] = 1000 85 | future = client.submit(slowinc, 1, delay=1000) 86 | 87 | for i in range(50): 88 | sleep(0.1) 89 | assert len(cluster.workers) < 2 90 | 91 | 92 | @pytest.mark.xfail 93 | def test_dont_request_on_many_short_tasks(loop): 94 | with SGECluster(scheduler_port=0) as cluster: 95 | cluster.adapt(interval=50, startup_cost=10) 96 | with Client(cluster, loop=loop) as client: 97 | cluster.scheduler.task_duration['slowinc'] = 0.001 98 | futures = client.map(slowinc, range(1000), delay=0.001) 99 | 100 | while not cluster.scheduler.workers: 101 | sleep(0.01) 102 | 103 | for i in range(20): 104 | sleep(0.1) 105 | assert len(cluster.workers) < 2 106 | 107 | 108 | def test_order_warns(loop): 109 | with SGECluster(scheduler_port=0) as cluster: 110 | scheduler = cluster.scheduler 111 | with pytest.warns(FutureWarning): 112 | adapt = Adaptive(scheduler, cluster) 113 | -------------------------------------------------------------------------------- /dask_drmaa/adaptive.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function, division, absolute_import 2 | 3 | import logging 4 | import warnings 5 | 6 | from distributed import Scheduler 7 | from distributed.utils import log_errors 8 | from distributed.deploy import adaptive 9 | from tornado import gen 10 | 11 | from .core import get_session 12 | 13 | logger = logging.getLogger(__file__) 14 | 15 | 16 | class Adaptive(adaptive.Adaptive): 17 | ''' 18 | Adaptively allocate workers based on scheduler load. A superclass. 19 | 20 | Contains logic to dynamically resize a Dask cluster based on current use. 21 | 22 | Parameters 23 | ---------- 24 | cluster: object 25 | Must have scale_up and scale_down methods/coroutines 26 | scheduler: distributed.Scheduler 27 | 28 | Examples 29 | -------- 30 | >>> class MyCluster(object): 31 | ... def scale_up(self, n): 32 | ... """ Bring worker count up to n """ 33 | ... def scale_down(self, workers): 34 | ... """ Remove worker addresses from cluster """ 35 | ''' 36 | def __init__(self, cluster=None, scheduler=None, interval=1000, 37 | startup_cost=1, scale_factor=2, **kwargs): 38 | if cluster is None: 39 | raise TypeError("`Adaptive.__init__() missing required argument: " 40 | "`cluster`") 41 | 42 | if isinstance(cluster, Scheduler): 43 | warnings.warn("The ``cluster`` and ``scheduler`` arguments to " 44 | "Adaptive.__init__ will switch positions in a future" 45 | " release. Please use keyword arguments.", 46 | FutureWarning) 47 | cluster, scheduler = scheduler, cluster 48 | if scheduler is None: 49 | scheduler = cluster.scheduler 50 | 51 | super(Adaptive, self).__init__(scheduler, cluster, interval, 52 | startup_cost=startup_cost, 53 | scale_factor=scale_factor, 54 | **kwargs) 55 | 56 | def get_busy_workers(self): 57 | s = self.scheduler 58 | busy = {w for w in s.workers 59 | if len(s.processing[w]) > 2 * s.ncores[w] 60 | and s.occupancy[w] > self.startup_cost * 2} 61 | return busy 62 | 63 | def needs_cpu(self): 64 | # don't want to call super(), since it ignores number of tasks 65 | s = self.scheduler 66 | busy = self.get_busy_workers() 67 | if s.unrunnable or busy: 68 | if any(get_session().jobStatus(jid) == 'queued_active' for 69 | jid in self.cluster.workers): # TODO: is this slow? 70 | return False 71 | if len(s.workers) < len(self.cluster.workers): 72 | # TODO: this depends on reliable cleanup of closed workers 73 | return False 74 | return True 75 | 76 | def get_scale_up_kwargs(self): 77 | instances = max(1, len(self.scheduler.ncores) * self.scale_factor) 78 | kwargs = {'n': max(instances, len(self.get_busy_workers()))} 79 | memory = [] 80 | if self.scheduler.unrunnable: 81 | for task in self.scheduler.unrunnable: 82 | key = task.key 83 | prefix = task.prefix 84 | duration = 0 85 | memory = [] 86 | duration += self.scheduler.task_duration.get(prefix, 0.1) 87 | 88 | if key in self.scheduler.resource_restrictions: 89 | m = self.scheduler.resource_restrictions[key].get('memory') 90 | if m: 91 | memory.append(m) 92 | if memory: 93 | kwargs['memory'] = max(memory) * 4 94 | logger.info("Starting workers due to resource constraints: %s", 95 | kwargs['n']) 96 | return kwargs 97 | 98 | @gen.coroutine 99 | def _retire_workers(self, workers=None): 100 | if workers is None: 101 | workers = self.workers_to_close() 102 | if not workers: 103 | raise gen.Return(workers) 104 | with log_errors(): 105 | result = yield self.scheduler.retire_workers(workers, 106 | remove=True, 107 | close_workers=True) 108 | if result: 109 | logger.info("Retiring workers {}".format(result)) 110 | # Diverges from distributed.Adaptive here: 111 | # ref c51a15a35a8a64c21c1182bfd9209cb6b7d95380 112 | # TODO: can this be reconciled back to base class implementation? 113 | raise gen.Return(result) 114 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | Dask on DRMAA 2 | ============= 3 | 4 | *This project is unmaintained. We recommended that you use 5 | dask-jobqueue instead: https://github.com/dask/dask-jobqueue* 6 | 7 | |Build Status| |PyPI Release| |conda-forge Release| 8 | 9 | Deploy a Dask.distributed_ cluster on top of a cluster running a 10 | DRMAA_-compliant job scheduler. 11 | 12 | 13 | Example 14 | ------- 15 | 16 | Launch from Python 17 | 18 | .. code-block:: python 19 | 20 | from dask_drmaa import DRMAACluster 21 | cluster = DRMAACluster() 22 | 23 | from dask.distributed import Client 24 | client = Client(cluster) 25 | cluster.start_workers(2) 26 | 27 | >>> future = client.submit(lambda x: x + 1, 10) 28 | >>> future.result() 29 | 11 30 | 31 | Or launch from the command line:: 32 | 33 | $ dask-drmaa 10 # starts local scheduler and ten remote workers 34 | 35 | 36 | Install 37 | ------- 38 | 39 | Python packages are available from PyPI and can be installed with ``pip``:: 40 | 41 | pip install dask-drmaa 42 | 43 | Also ``conda`` packages are available from conda-forge:: 44 | 45 | conda install -c conda-forge dask-drmaa 46 | 47 | Additionally the package can be installed from GitHub with the latest changes:: 48 | 49 | pip install git+https://github.com/dask/dask-drmaa.git --upgrade 50 | 51 | or:: 52 | 53 | git clone git@github.com:dask/dask-drmaa.git 54 | cd dask-drmaa 55 | pip install . 56 | 57 | You must have the DRMAA system library installed and be able to submit jobs 58 | from your local machine. Please make sure to set the environment variable 59 | ``DRMAA_LIBRARY_PATH`` to point to the location of ``libdrmaa.so`` for your 60 | system. 61 | 62 | 63 | Testing 64 | ------- 65 | 66 | This repository contains a Docker-compose testing harness for a Son of Grid 67 | Engine cluster with a master and two slaves. You can initialize this system 68 | as follows: 69 | 70 | .. code-block:: bash 71 | 72 | docker-compose build 73 | ./start-sge.sh 74 | 75 | If you have done this previously and need to refresh your solution you can do 76 | the following 77 | 78 | .. code-block:: bash 79 | 80 | docker-compose stop 81 | docker-compose build --no-cache 82 | ./start-sge.sh 83 | 84 | And run tests with py.test in the master docker container 85 | 86 | .. code-block:: bash 87 | 88 | docker exec -it sge_master /bin/bash -c "cd /dask-drmaa; python setup.py develop" 89 | docker exec -it sge_master /bin/bash -c "cd /dask-drmaa; py.test dask_drmaa --verbose" 90 | 91 | 92 | Adaptive Load 93 | ------------- 94 | 95 | Dask-drmaa can adapt to scheduler load, deploying more workers on the grid when 96 | it has more work, and cleaning up these workers when they are no longer 97 | necessary. This can simplify setup (you can just leave a cluster running) and 98 | it can reduce load on the cluster, making IT happy. 99 | 100 | To enable this, call the ``adapt`` method of a ``DRMAACluster``. You can 101 | submit computations to the cluster without ever explicitly creating workers. 102 | 103 | .. code-block:: python 104 | 105 | from dask_drmaa import DRMAACluster 106 | from dask.distributed import Client 107 | 108 | cluster = DRMAACluster() 109 | cluster.adapt() 110 | client = Client(cluster) 111 | 112 | futures = client.map(func, seq) # workers will be created as necessary 113 | 114 | 115 | Extensible 116 | ---------- 117 | 118 | The DRMAA interface is the lowest common denominator among many different job 119 | schedulers like SGE, SLURM, LSF, Torque, and others. However, sometimes users 120 | need to specify parameters particular to their cluster, such as resource 121 | queues, wall times, memory constraints, etc.. 122 | 123 | DRMAA allows users to pass native specifications either when constructing the 124 | cluster or when starting new workers: 125 | 126 | .. code-block:: python 127 | 128 | cluster = DRMAACluster(template={'nativeSpecification': '-l h_rt=01:00:00'}) 129 | # or 130 | cluster.start_workers(10, nativeSpecification='-l h_rt=01:00:00') 131 | 132 | 133 | Related Work 134 | ------------ 135 | 136 | * DRMAA_: The Distributed Resource Management Application API, a high level 137 | API for general use on traditional job schedulers 138 | * drmaa-python_: The Python bindings for DRMAA 139 | * DaskSGE_: An earlier dask-drmaa implementation 140 | * `Son of Grid Engine`_: The default implementation used in testing 141 | * Dask.distributed_: The actual distributed computing library this launches 142 | 143 | .. _DRMAA: https://www.drmaa.org/ 144 | .. _drmaa-python: http://drmaa-python.readthedocs.io/en/latest/ 145 | .. _`Son of Grid Engine`: https://arc.liv.ac.uk/trac/SGE 146 | .. _dasksge: https://github.com/mfouesneau/dasksge 147 | .. _Dask.distributed: http://distributed.readthedocs.io/en/latest/ 148 | .. _DRMAA: https://www.drmaa.org/ 149 | 150 | 151 | .. |Build Status| image:: https://travis-ci.org/dask/dask-drmaa.svg?branch=master 152 | :target: https://travis-ci.org/dask/dask-drmaa 153 | 154 | .. |PyPI Release| image:: https://img.shields.io/pypi/v/dask-drmaa.svg 155 | :target: https://pypi.python.org/pypi/dask-drmaa 156 | 157 | .. |conda-forge Release| image:: https://img.shields.io/conda/vn/conda-forge/dask-drmaa.svg 158 | :target: https://github.com/conda-forge/dask-drmaa-feedstock 159 | -------------------------------------------------------------------------------- /dask_drmaa/tests/test_core.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | 3 | import os 4 | import shutil 5 | import sys 6 | import tempfile 7 | from time import sleep, time 8 | import socket 9 | 10 | import pytest 11 | 12 | from dask_drmaa import DRMAACluster 13 | from dask_drmaa.core import make_job_script, worker_bin_path 14 | from distributed import Client 15 | from distributed.utils_test import loop, inc 16 | from distributed.utils import tmpfile 17 | 18 | 19 | def test_no_workers(loop): 20 | with DRMAACluster(scheduler_port=0) as cluster: 21 | with Client(cluster, loop=loop) as client: 22 | cluster.start_workers(0) 23 | assert not cluster.workers 24 | cluster.stop_workers([]) 25 | 26 | assert not os.path.exists(cluster.script) 27 | 28 | 29 | def test_simple(loop): 30 | with DRMAACluster(scheduler_port=0) as cluster: 31 | with Client(cluster, loop=loop) as client: 32 | cluster.start_workers(2) 33 | future = client.submit(lambda x: x + 1, 1) 34 | assert future.result() == 2 35 | 36 | cluster.stop_workers(cluster.workers) 37 | 38 | start = time() 39 | while client.ncores(): 40 | sleep(0.2) 41 | assert time() < start + 60 42 | 43 | assert not cluster.workers 44 | 45 | assert not os.path.exists(cluster.script) 46 | 47 | 48 | def test_str(loop): 49 | with DRMAACluster(scheduler_port=0) as cluster: 50 | cluster.start_workers(2) 51 | assert 'DRMAACluster' in str(cluster) 52 | assert 'DRMAACluster' in repr(cluster) 53 | assert '2' in str(cluster) 54 | assert '2' in repr(cluster) 55 | 1 + 1 56 | 57 | 58 | def test_pythonpath(): 59 | tmpdir = tempfile.mkdtemp(prefix='test_drmaa_pythonpath_', dir='.') 60 | try: 61 | with open(os.path.join(tmpdir, "bzzz_unlikely_module_name.py"), "w") as f: 62 | f.write("""if 1: 63 | def f(): 64 | return 5 65 | """) 66 | 67 | def func(): 68 | import bzzz_unlikely_module_name 69 | return bzzz_unlikely_module_name.f() 70 | 71 | with DRMAACluster(scheduler_port=0, 72 | preexec_commands=['export PYTHONPATH=%s:PYTHONPATH' % tmpdir], 73 | ) as cluster: 74 | with Client(cluster) as client: 75 | cluster.start_workers(2) 76 | x = client.submit(func) 77 | assert x.result() == 5 78 | 79 | finally: 80 | shutil.rmtree(tmpdir) 81 | 82 | 83 | def test_job_name_as_name(loop): 84 | with DRMAACluster(scheduler_port=0) as cluster: 85 | cluster.start_workers(2) 86 | while len(cluster.scheduler.workers) < 2: 87 | sleep(0.1) 88 | 89 | names = {cluster.scheduler.worker_info[w]['name'] 90 | for w in cluster.scheduler.workers} 91 | 92 | assert names == set(cluster.workers) 93 | 94 | 95 | def test_multiple_overlapping_clusters(loop): 96 | with DRMAACluster(scheduler_port=0) as cluster_1: 97 | cluster_1.start_workers(1) 98 | with Client(cluster_1, loop=loop) as client_1: 99 | with DRMAACluster(scheduler_port=0) as cluster_2: 100 | cluster_2.start_workers(1) 101 | with Client(cluster_2, loop=loop) as client_2: 102 | future_1 = client_1.submit(inc, 1) 103 | future_2 = client_2.submit(inc, 2) 104 | 105 | assert future_1.result() == 2 106 | assert future_2.result() == 3 107 | 108 | 109 | @pytest.mark.skip( 110 | reason="Failing worker cleanup possibly due to upstream change.\n" 111 | "xref: https://github.com/dask/dask-drmaa/issues/93" 112 | ) 113 | def test_stop_single_worker(loop): 114 | with DRMAACluster(scheduler_port=0) as cluster: 115 | with Client(cluster, loop=loop) as client: 116 | cluster.start_workers(2) 117 | future = client.submit(lambda x: x + 1, 1) 118 | assert future.result() == 2 119 | while len(client.ncores()) < 2: 120 | sleep(0.1) 121 | 122 | a, b = cluster.workers 123 | local_dir = client.run(lambda dask_worker: dask_worker.local_dir, 124 | workers=[a])[a] 125 | assert os.path.exists(local_dir) 126 | 127 | cluster.stop_workers(a) 128 | start = time() 129 | while len(client.ncores()) != 1: 130 | sleep(0.2) 131 | assert time() < start + 60 132 | assert not os.path.exists(local_dir) 133 | 134 | 135 | def test_stop_workers_politely(loop): 136 | with DRMAACluster(scheduler_port=0) as cluster: 137 | with Client(cluster, loop=loop) as client: 138 | cluster.start_workers(2) 139 | 140 | while len(client.ncores()) < 2: 141 | sleep(0.1) 142 | 143 | futures = client.scatter(list(range(10))) 144 | 145 | a, b = cluster.workers 146 | cluster.stop_workers(a) 147 | 148 | while len(client.ncores()) != 1: 149 | sleep(0.1) 150 | 151 | data = client.gather(futures) 152 | assert data == list(range(10)) 153 | 154 | 155 | def test_logs(loop): 156 | with DRMAACluster(scheduler_port=0) as cluster: 157 | cluster.start_workers(2) 158 | while len(cluster.scheduler.workers) < 2: 159 | sleep(0.1) 160 | 161 | for w in cluster.workers: 162 | fn = 'worker.%s.err' % w 163 | assert os.path.exists(fn) 164 | with open(fn) as f: 165 | assert "worker" in f.read() 166 | 167 | 168 | def test_stdout_in_worker(): 169 | """ 170 | stdout and stderr should be redirected and line-buffered in workers. 171 | """ 172 | def inc_and_print(x): 173 | print("stdout: inc_and_print(%s)" % (x,)) 174 | print("stderr: inc_and_print(%s)" % (x,), file=sys.stderr) 175 | return x + 1 176 | 177 | def get_lines(fn): 178 | with open(fn) as f: 179 | return [line.strip() for line in f] 180 | 181 | with DRMAACluster(scheduler_port=0, diagnostics_port=None) as cluster: 182 | with Client(cluster) as client: 183 | cluster.start_workers(1) 184 | future = client.submit(inc_and_print, 1) 185 | assert future.result() == 2 186 | 187 | w, = cluster.workers.values() 188 | assert "stdout: inc_and_print(1)" in get_lines(w.stdout) 189 | assert "stderr: inc_and_print(1)" in get_lines(w.stderr) 190 | 191 | 192 | def test_cleanup(): 193 | """ 194 | Not a test, just ensure that all worker logs are cleaned up at the 195 | end of the test run. 196 | """ 197 | def cleanup_logs(): 198 | from glob import glob 199 | import os 200 | for fn in glob('worker.*.out'): 201 | os.remove(fn) 202 | for fn in glob('worker.*.err'): 203 | os.remove(fn) 204 | 205 | import atexit 206 | atexit.register(cleanup_logs) 207 | 208 | 209 | def test_passed_script(loop): 210 | with tmpfile(extension='sh') as fn: 211 | with open(fn, 'w') as f: 212 | f.write(make_job_script(executable=worker_bin_path, 213 | name='foo')) 214 | os.chmod(fn, 0o777) 215 | with DRMAACluster(scheduler_port=0, script=fn) as cluster: 216 | tmp_script_location = cluster.script 217 | assert cluster.script.split(os.path.sep)[-1] == fn.split(os.path.sep)[-1] 218 | job = cluster.start_workers(1) 219 | with Client(cluster, loop=loop) as client: 220 | assert client.submit(lambda x: x + 1, 10).result() == 11 221 | assert os.path.exists(fn) # doesn't cleanup provided script 222 | assert not os.path.exists(tmp_script_location) 223 | 224 | 225 | def test_ip(): 226 | ip = socket.gethostbyname(socket.gethostname()) 227 | with DRMAACluster(ip=ip, scheduler_port=0, diagnostics_port=None) as cluster: 228 | assert cluster.local_cluster.scheduler.ip == ip 229 | -------------------------------------------------------------------------------- /dask_drmaa/core.py: -------------------------------------------------------------------------------- 1 | from collections import namedtuple 2 | import logging 3 | import os 4 | import shutil 5 | import socket 6 | import sys 7 | import tempfile 8 | 9 | import drmaa 10 | from toolz import merge 11 | from tornado import gen 12 | 13 | from distributed import LocalCluster 14 | from distributed.deploy import Cluster 15 | from distributed.utils import log_errors, ignoring 16 | from distributed.utils import PeriodicCallback 17 | 18 | logger = logging.getLogger(__name__) 19 | 20 | 21 | _global_session = [None] 22 | 23 | 24 | def get_session(): 25 | if not _global_session[0]: 26 | _global_session[0] = drmaa.Session() 27 | _global_session[0].initialize() 28 | return _global_session[0] 29 | 30 | 31 | WorkerSpec = namedtuple('WorkerSpec', 32 | ('job_id', 'kwargs', 'stdout', 'stderr')) 33 | 34 | 35 | worker_bin_path = ( 36 | '%(python)s -m distributed.cli.dask_worker' 37 | % dict(python=sys.executable) 38 | ) 39 | 40 | # All JOB_ID and TASK_ID environment variables 41 | _drm_info = get_session().drmsInfo 42 | _drmaa_implementation = get_session().drmaaImplementation 43 | 44 | if "SLURM" in _drm_info: 45 | JOB_PARAM = "%j" 46 | JOB_ID = "$SLURM_JOB_ID" 47 | TASK_ID = "$SLURM_ARRAY_TASK_ID" 48 | elif "LSF" in _drm_info: 49 | JOB_PARAM = "%J" 50 | JOB_ID = "$LSB_JOBID" 51 | TASK_ID = "$LSB_JOBINDEX" 52 | elif "GE" in _drm_info: 53 | JOB_PARAM = "$JOB_ID" 54 | JOB_ID = "$JOB_ID" 55 | TASK_ID = "$SGE_TASK_ID" 56 | elif "Torque" == _drm_info or "PBS" in _drmaa_implementation: 57 | JOB_PARAM = "$PBS_JOBID" 58 | JOB_ID = "$PBS_JOBID" 59 | TASK_ID = "$PBS_TASKNUM" 60 | else: 61 | JOB_PARAM = "" 62 | JOB_ID = "" 63 | TASK_ID = "" 64 | 65 | worker_out_path_template = os.path.join( 66 | os.getcwd(), 67 | 'worker.%(jid)s.%(ext)s' 68 | ) 69 | 70 | default_template = { 71 | 'jobName': 'dask-worker', 72 | 'outputPath': ':' + worker_out_path_template % dict( 73 | jid=".".join([JOB_PARAM, '$drmaa_incr_ph$']), ext='out' 74 | ), 75 | 'errorPath': ':' + worker_out_path_template % dict( 76 | jid=".".join([JOB_PARAM, '$drmaa_incr_ph$']), ext='err' 77 | ), 78 | 'workingDirectory': os.getcwd(), 79 | 'nativeSpecification': '', 80 | # stdout/stderr are redirected to files, make sure their contents don't lag 81 | 'jobEnvironment': {'PYTHONUNBUFFERED': '1'}, 82 | 'args': [] 83 | } 84 | 85 | 86 | def make_job_script(executable, name, preexec=()): 87 | shebang = '#!/bin/bash' 88 | execute = ( 89 | '%(executable)s $1 --name %(name)s "${@:2}"' 90 | % dict(executable=executable, name=name) 91 | ) 92 | preparation = list(preexec) 93 | script_template = '\n'.join([shebang] + preparation + [execute, '']) 94 | return script_template 95 | 96 | 97 | class DRMAACluster(Cluster): 98 | def __init__(self, template=None, cleanup_interval=1000, hostname=None, 99 | script=None, preexec_commands=(), copy_script=True, 100 | ip='', 101 | **kwargs): 102 | """ 103 | Dask workers launched by a DRMAA-compatible cluster 104 | 105 | Parameters 106 | ---------- 107 | template: dict 108 | Dictionary specifying options to pass to the DRMAA cluster 109 | and the worker. Relevant items are: 110 | 111 | jobName: string 112 | Name of the job as known by the DRMAA cluster. 113 | args: list 114 | Extra string arguments to pass to dask-worker 115 | outputPath: string 116 | Path to the dask-worker stdout. Must start with ':'. 117 | Defaults to worker.JOBID.TASKID.out in current directory. 118 | errorPath: string 119 | Path to the dask-worker stderr. Must start with ':' 120 | Defaults to worker.JOBID.TASKID.err in current directory. 121 | workingDirectory: string 122 | Where dask-worker runs, defaults to current directory 123 | nativeSpecification: string 124 | Options native to the job scheduler 125 | 126 | cleanup_interval: int 127 | Time interval in seconds at which closed workers are cleaned. 128 | Defaults to 1000 129 | hostname: string 130 | Host on which to start the local scheduler, defaults to localhost 131 | script: string (optional) 132 | Path to the dask-worker executable script. 133 | A temporary file will be made if none is provided (recommended) 134 | preexec_commands: tuple (optional) 135 | Commands to be executed first by temporary script. Cannot be 136 | specified at the same time as script. 137 | copy_script: bool 138 | Whether should copy the passed script to the current working 139 | directory. This is primarily to work around an issue with SGE. 140 | ip: string 141 | IP of the scheduler, default is the empty string 142 | which will listen on the primary ip address of the host 143 | **kwargs: 144 | Additional keyword arguments to be passed to the local scheduler 145 | 146 | Examples 147 | -------- 148 | >>> from dask_drmaa import DRMAACluster # doctest: +SKIP 149 | >>> cluster = DRMAACluster() # doctest: +SKIP 150 | >>> cluster.start_workers(10) # doctest: +SKIP 151 | 152 | >>> from distributed import Client # doctest: +SKIP 153 | >>> client = Client(cluster) # doctest: +SKIP 154 | 155 | >>> future = client.submit(lambda x: x + 1, 10) # doctest: +SKIP 156 | >>> future.result() # doctest: +SKIP 157 | 11 158 | """ 159 | self.hostname = hostname or socket.gethostname() 160 | logger.info("Start local scheduler at %s", self.hostname) 161 | self.local_cluster = LocalCluster(n_workers=0, ip=ip, **kwargs) 162 | 163 | if script is None: 164 | fn = os.path.abspath(tempfile.mktemp( 165 | suffix='.sh', 166 | prefix='dask-worker-script-', 167 | dir=os.path.curdir, 168 | )) 169 | self.script = fn 170 | self._should_cleanup_script = True 171 | 172 | script_contents = make_job_script(executable=worker_bin_path, 173 | name='%s.%s' % (JOB_ID, TASK_ID), 174 | preexec=preexec_commands) 175 | with open(fn, 'wt') as f: 176 | f.write(script_contents) 177 | 178 | @atexit.register 179 | def remove_script(): 180 | if os.path.exists(fn): 181 | os.remove(fn) 182 | 183 | os.chmod(self.script, 0o777) 184 | 185 | else: 186 | self._should_cleanup_script = False 187 | if copy_script: 188 | with ignoring(EnvironmentError): # may be in the same path 189 | shutil.copy(script, os.path.curdir) # python 2.x returns None 190 | script = os.path.join(os.path.curdir, os.path.basename(script)) 191 | self._should_cleanup_script = True 192 | self.script = os.path.abspath(script) 193 | assert not preexec_commands, "Cannot specify both script and preexec_commands" 194 | 195 | # TODO: check that user-provided script is executable 196 | 197 | self.template = merge(default_template, 198 | {'remoteCommand': self.script}, 199 | template or {}) 200 | 201 | self._cleanup_callback = PeriodicCallback(callback=self.cleanup_closed_workers, 202 | callback_time=cleanup_interval, 203 | io_loop=self.scheduler.loop) 204 | self._cleanup_callback.start() 205 | 206 | self.workers = {} # {job-id: WorkerSpec} 207 | 208 | def adapt(self, **kwargs): 209 | """ Turn on adaptivity 210 | 211 | For keyword arguments see dask_drmaa.adaptive.Adaptive 212 | 213 | Examples 214 | -------- 215 | >>> cluster.adapt(minimum=0, maximum=10, interval='500ms') 216 | 217 | See Also 218 | -------- 219 | Cluster: an interface for other clusters to inherit from 220 | """ 221 | from .adaptive import Adaptive 222 | 223 | with ignoring(AttributeError): 224 | self._adaptive.stop() 225 | if not hasattr(self, '_adaptive_options'): 226 | self._adaptive_options = {} 227 | 228 | self._adaptive_options.update(kwargs) 229 | self._adaptive = Adaptive( 230 | self, self.scheduler, **self._adaptive_options 231 | ) 232 | 233 | return self._adaptive 234 | 235 | @gen.coroutine 236 | def _start(self): 237 | pass 238 | 239 | @property 240 | def scheduler(self): 241 | return self.local_cluster.scheduler 242 | 243 | def create_job_template(self, **kwargs): 244 | template = self.template.copy() 245 | if kwargs: 246 | template.update(kwargs) 247 | template['args'] = [self.scheduler_address] + template['args'] 248 | 249 | jt = get_session().createJobTemplate() 250 | valid_attributes = dir(jt) 251 | 252 | for key, value in template.items(): 253 | if key not in valid_attributes: 254 | raise ValueError("Invalid job template attribute %s" % key) 255 | setattr(jt, key, value) 256 | 257 | return jt 258 | 259 | def start_workers(self, n=1, **kwargs): 260 | if n == 0: 261 | return 262 | 263 | with log_errors(): 264 | with self.create_job_template(**kwargs) as jt: 265 | ids = get_session().runBulkJobs(jt, 1, n, 1) 266 | logger.info("Start %d workers. Job ID: %s", len(ids), ids[0].split('.')[0]) 267 | self.workers.update( 268 | {jid: WorkerSpec(job_id=jid, kwargs=kwargs, 269 | stdout=worker_out_path_template % dict(jid=jid, ext='out'), 270 | stderr=worker_out_path_template % dict(jid=jid, ext='err'), 271 | ) 272 | for jid in ids}) 273 | 274 | @gen.coroutine 275 | def stop_workers(self, worker_ids, sync=False): 276 | if isinstance(worker_ids, str): 277 | worker_ids = [worker_ids] 278 | elif worker_ids: 279 | worker_ids = list(worker_ids) 280 | else: 281 | return 282 | 283 | # Let the scheduler gracefully retire workers first 284 | ids_to_ips = { 285 | v['name']: k for k, v in self.scheduler.worker_info.items() 286 | } 287 | worker_ips = [ids_to_ips[wid] 288 | for wid in worker_ids 289 | if wid in ids_to_ips] 290 | retired = yield self.scheduler.retire_workers(workers=worker_ips, 291 | close_workers=True) 292 | logger.info("Retired workers %s", retired) 293 | for wid in list(worker_ids): 294 | try: 295 | get_session().control(wid, drmaa.JobControlAction.TERMINATE) 296 | except drmaa.errors.InvalidJobException: 297 | pass 298 | try: 299 | self.workers.pop(wid) 300 | except KeyError: 301 | # If we have multiple callers at once, it may have already 302 | # been popped off 303 | pass 304 | 305 | logger.info("Stop workers %s", worker_ids) 306 | if sync: 307 | get_session().synchronize(worker_ids, dispose=True) 308 | 309 | @gen.coroutine 310 | def scale_up(self, n, **kwargs): 311 | yield [self.start_workers(**kwargs) 312 | for _ in range(n - len(self.workers))] 313 | 314 | @gen.coroutine 315 | def scale_down(self, workers): 316 | workers = set(workers) 317 | yield self.scheduler.retire_workers(workers=workers) 318 | 319 | def close(self): 320 | logger.info("Closing DRMAA cluster") 321 | self.stop_workers(self.workers, sync=True) 322 | 323 | self.local_cluster.close() 324 | if self._should_cleanup_script and os.path.exists(self.script): 325 | os.remove(self.script) 326 | 327 | def __enter__(self): 328 | return self 329 | 330 | def __exit__(self, *args): 331 | self.close() 332 | 333 | def cleanup_closed_workers(self): 334 | for jid in list(self.workers): 335 | if get_session().jobStatus(jid) in ('closed', 'done'): 336 | logger.info("Removing closed worker %s", jid) 337 | del self.workers[jid] 338 | 339 | def __del__(self): 340 | try: 341 | self.close() 342 | except: 343 | pass 344 | 345 | def __str__(self): 346 | return "<%s: %d workers>" % (self.__class__.__name__, len(self.workers)) 347 | 348 | __repr__ = __str__ 349 | 350 | 351 | 352 | def remove_workers(): 353 | get_session().control(drmaa.Session.JOB_IDS_SESSION_ALL, 354 | drmaa.JobControlAction.TERMINATE) 355 | 356 | 357 | import atexit 358 | atexit.register(remove_workers) 359 | -------------------------------------------------------------------------------- /dask_drmaa/_version.py: -------------------------------------------------------------------------------- 1 | 2 | # This file helps to compute a version number in source trees obtained from 3 | # git-archive tarball (such as those provided by githubs download-from-tag 4 | # feature). Distribution tarballs (built by setup.py sdist) and build 5 | # directories (produced by setup.py build) will contain a much shorter file 6 | # that just contains the computed version number. 7 | 8 | # This file is released into the public domain. Generated by 9 | # versioneer-0.18 (https://github.com/warner/python-versioneer) 10 | 11 | """Git implementation of _version.py.""" 12 | 13 | import errno 14 | import os 15 | import re 16 | import subprocess 17 | import sys 18 | 19 | 20 | def get_keywords(): 21 | """Get the keywords needed to look up the version information.""" 22 | # these strings will be replaced by git during git-archive. 23 | # setup.py/versioneer.py will grep for the variable names, so they must 24 | # each be defined on a line of their own. _version.py will just call 25 | # get_keywords(). 26 | git_refnames = " (HEAD -> master)" 27 | git_full = "6c64a60add7665140899374ea9b3739282048868" 28 | git_date = "2021-02-08 14:30:40 -0800" 29 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} 30 | return keywords 31 | 32 | 33 | class VersioneerConfig: 34 | """Container for Versioneer configuration parameters.""" 35 | 36 | 37 | def get_config(): 38 | """Create, populate and return the VersioneerConfig() object.""" 39 | # these strings are filled in when 'setup.py versioneer' creates 40 | # _version.py 41 | cfg = VersioneerConfig() 42 | cfg.VCS = "git" 43 | cfg.style = "pep440" 44 | cfg.tag_prefix = "" 45 | cfg.parentdir_prefix = "dask_drmaa" 46 | cfg.versionfile_source = "dask_drmaa/_version.py" 47 | cfg.verbose = False 48 | return cfg 49 | 50 | 51 | class NotThisMethod(Exception): 52 | """Exception raised if a method is not valid for the current scenario.""" 53 | 54 | 55 | LONG_VERSION_PY = {} 56 | HANDLERS = {} 57 | 58 | 59 | def register_vcs_handler(vcs, method): # decorator 60 | """Decorator to mark a method as the handler for a particular VCS.""" 61 | def decorate(f): 62 | """Store f in HANDLERS[vcs][method].""" 63 | if vcs not in HANDLERS: 64 | HANDLERS[vcs] = {} 65 | HANDLERS[vcs][method] = f 66 | return f 67 | return decorate 68 | 69 | 70 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, 71 | env=None): 72 | """Call the given command(s).""" 73 | assert isinstance(commands, list) 74 | p = None 75 | for c in commands: 76 | try: 77 | dispcmd = str([c] + args) 78 | # remember shell=False, so use git.cmd on windows, not just git 79 | p = subprocess.Popen([c] + args, cwd=cwd, env=env, 80 | stdout=subprocess.PIPE, 81 | stderr=(subprocess.PIPE if hide_stderr 82 | else None)) 83 | break 84 | except EnvironmentError: 85 | e = sys.exc_info()[1] 86 | if e.errno == errno.ENOENT: 87 | continue 88 | if verbose: 89 | print("unable to run %s" % dispcmd) 90 | print(e) 91 | return None, None 92 | else: 93 | if verbose: 94 | print("unable to find command, tried %s" % (commands,)) 95 | return None, None 96 | stdout = p.communicate()[0].strip() 97 | if sys.version_info[0] >= 3: 98 | stdout = stdout.decode() 99 | if p.returncode != 0: 100 | if verbose: 101 | print("unable to run %s (error)" % dispcmd) 102 | print("stdout was %s" % stdout) 103 | return None, p.returncode 104 | return stdout, p.returncode 105 | 106 | 107 | def versions_from_parentdir(parentdir_prefix, root, verbose): 108 | """Try to determine the version from the parent directory name. 109 | 110 | Source tarballs conventionally unpack into a directory that includes both 111 | the project name and a version string. We will also support searching up 112 | two directory levels for an appropriately named parent directory 113 | """ 114 | rootdirs = [] 115 | 116 | for i in range(3): 117 | dirname = os.path.basename(root) 118 | if dirname.startswith(parentdir_prefix): 119 | return {"version": dirname[len(parentdir_prefix):], 120 | "full-revisionid": None, 121 | "dirty": False, "error": None, "date": None} 122 | else: 123 | rootdirs.append(root) 124 | root = os.path.dirname(root) # up a level 125 | 126 | if verbose: 127 | print("Tried directories %s but none started with prefix %s" % 128 | (str(rootdirs), parentdir_prefix)) 129 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 130 | 131 | 132 | @register_vcs_handler("git", "get_keywords") 133 | def git_get_keywords(versionfile_abs): 134 | """Extract version information from the given file.""" 135 | # the code embedded in _version.py can just fetch the value of these 136 | # keywords. When used from setup.py, we don't want to import _version.py, 137 | # so we do it with a regexp instead. This function is not used from 138 | # _version.py. 139 | keywords = {} 140 | try: 141 | f = open(versionfile_abs, "r") 142 | for line in f.readlines(): 143 | if line.strip().startswith("git_refnames ="): 144 | mo = re.search(r'=\s*"(.*)"', line) 145 | if mo: 146 | keywords["refnames"] = mo.group(1) 147 | if line.strip().startswith("git_full ="): 148 | mo = re.search(r'=\s*"(.*)"', line) 149 | if mo: 150 | keywords["full"] = mo.group(1) 151 | if line.strip().startswith("git_date ="): 152 | mo = re.search(r'=\s*"(.*)"', line) 153 | if mo: 154 | keywords["date"] = mo.group(1) 155 | f.close() 156 | except EnvironmentError: 157 | pass 158 | return keywords 159 | 160 | 161 | @register_vcs_handler("git", "keywords") 162 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 163 | """Get version information from git keywords.""" 164 | if not keywords: 165 | raise NotThisMethod("no keywords at all, weird") 166 | date = keywords.get("date") 167 | if date is not None: 168 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant 169 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 170 | # -like" string, which we must then edit to make compliant), because 171 | # it's been around since git-1.5.3, and it's too difficult to 172 | # discover which version we're using, or to work around using an 173 | # older one. 174 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 175 | refnames = keywords["refnames"].strip() 176 | if refnames.startswith("$Format"): 177 | if verbose: 178 | print("keywords are unexpanded, not using") 179 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 180 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) 181 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 182 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 183 | TAG = "tag: " 184 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) 185 | if not tags: 186 | # Either we're using git < 1.8.3, or there really are no tags. We use 187 | # a heuristic: assume all version tags have a digit. The old git %d 188 | # expansion behaves like git log --decorate=short and strips out the 189 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 190 | # between branches and tags. By ignoring refnames without digits, we 191 | # filter out many common branch names like "release" and 192 | # "stabilization", as well as "HEAD" and "master". 193 | tags = set([r for r in refs if re.search(r'\d', r)]) 194 | if verbose: 195 | print("discarding '%s', no digits" % ",".join(refs - tags)) 196 | if verbose: 197 | print("likely tags: %s" % ",".join(sorted(tags))) 198 | for ref in sorted(tags): 199 | # sorting will prefer e.g. "2.0" over "2.0rc1" 200 | if ref.startswith(tag_prefix): 201 | r = ref[len(tag_prefix):] 202 | if verbose: 203 | print("picking %s" % r) 204 | return {"version": r, 205 | "full-revisionid": keywords["full"].strip(), 206 | "dirty": False, "error": None, 207 | "date": date} 208 | # no suitable tags, so version is "0+unknown", but full hex is still there 209 | if verbose: 210 | print("no suitable tags, using unknown + full revision id") 211 | return {"version": "0+unknown", 212 | "full-revisionid": keywords["full"].strip(), 213 | "dirty": False, "error": "no suitable tags", "date": None} 214 | 215 | 216 | @register_vcs_handler("git", "pieces_from_vcs") 217 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 218 | """Get version from 'git describe' in the root of the source tree. 219 | 220 | This only gets called if the git-archive 'subst' keywords were *not* 221 | expanded, and _version.py hasn't already been rewritten with a short 222 | version string, meaning we're inside a checked out source tree. 223 | """ 224 | GITS = ["git"] 225 | if sys.platform == "win32": 226 | GITS = ["git.cmd", "git.exe"] 227 | 228 | out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, 229 | hide_stderr=True) 230 | if rc != 0: 231 | if verbose: 232 | print("Directory %s not under git control" % root) 233 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 234 | 235 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 236 | # if there isn't one, this yields HEX[-dirty] (no NUM) 237 | describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", 238 | "--always", "--long", 239 | "--match", "%s*" % tag_prefix], 240 | cwd=root) 241 | # --long was added in git-1.5.5 242 | if describe_out is None: 243 | raise NotThisMethod("'git describe' failed") 244 | describe_out = describe_out.strip() 245 | full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 246 | if full_out is None: 247 | raise NotThisMethod("'git rev-parse' failed") 248 | full_out = full_out.strip() 249 | 250 | pieces = {} 251 | pieces["long"] = full_out 252 | pieces["short"] = full_out[:7] # maybe improved later 253 | pieces["error"] = None 254 | 255 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 256 | # TAG might have hyphens. 257 | git_describe = describe_out 258 | 259 | # look for -dirty suffix 260 | dirty = git_describe.endswith("-dirty") 261 | pieces["dirty"] = dirty 262 | if dirty: 263 | git_describe = git_describe[:git_describe.rindex("-dirty")] 264 | 265 | # now we have TAG-NUM-gHEX or HEX 266 | 267 | if "-" in git_describe: 268 | # TAG-NUM-gHEX 269 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 270 | if not mo: 271 | # unparseable. Maybe git-describe is misbehaving? 272 | pieces["error"] = ("unable to parse git-describe output: '%s'" 273 | % describe_out) 274 | return pieces 275 | 276 | # tag 277 | full_tag = mo.group(1) 278 | if not full_tag.startswith(tag_prefix): 279 | if verbose: 280 | fmt = "tag '%s' doesn't start with prefix '%s'" 281 | print(fmt % (full_tag, tag_prefix)) 282 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" 283 | % (full_tag, tag_prefix)) 284 | return pieces 285 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 286 | 287 | # distance: number of commits since tag 288 | pieces["distance"] = int(mo.group(2)) 289 | 290 | # commit: short hex revision ID 291 | pieces["short"] = mo.group(3) 292 | 293 | else: 294 | # HEX: no tags 295 | pieces["closest-tag"] = None 296 | count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], 297 | cwd=root) 298 | pieces["distance"] = int(count_out) # total number of commits 299 | 300 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 301 | date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], 302 | cwd=root)[0].strip() 303 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 304 | 305 | return pieces 306 | 307 | 308 | def plus_or_dot(pieces): 309 | """Return a + if we don't already have one, else return a .""" 310 | if "+" in pieces.get("closest-tag", ""): 311 | return "." 312 | return "+" 313 | 314 | 315 | def render_pep440(pieces): 316 | """Build up version string, with post-release "local version identifier". 317 | 318 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 319 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 320 | 321 | Exceptions: 322 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 323 | """ 324 | if pieces["closest-tag"]: 325 | rendered = pieces["closest-tag"] 326 | if pieces["distance"] or pieces["dirty"]: 327 | rendered += plus_or_dot(pieces) 328 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 329 | if pieces["dirty"]: 330 | rendered += ".dirty" 331 | else: 332 | # exception #1 333 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], 334 | pieces["short"]) 335 | if pieces["dirty"]: 336 | rendered += ".dirty" 337 | return rendered 338 | 339 | 340 | def render_pep440_pre(pieces): 341 | """TAG[.post.devDISTANCE] -- No -dirty. 342 | 343 | Exceptions: 344 | 1: no tags. 0.post.devDISTANCE 345 | """ 346 | if pieces["closest-tag"]: 347 | rendered = pieces["closest-tag"] 348 | if pieces["distance"]: 349 | rendered += ".post.dev%d" % pieces["distance"] 350 | else: 351 | # exception #1 352 | rendered = "0.post.dev%d" % pieces["distance"] 353 | return rendered 354 | 355 | 356 | def render_pep440_post(pieces): 357 | """TAG[.postDISTANCE[.dev0]+gHEX] . 358 | 359 | The ".dev0" means dirty. Note that .dev0 sorts backwards 360 | (a dirty tree will appear "older" than the corresponding clean one), 361 | but you shouldn't be releasing software with -dirty anyways. 362 | 363 | Exceptions: 364 | 1: no tags. 0.postDISTANCE[.dev0] 365 | """ 366 | if pieces["closest-tag"]: 367 | rendered = pieces["closest-tag"] 368 | if pieces["distance"] or pieces["dirty"]: 369 | rendered += ".post%d" % pieces["distance"] 370 | if pieces["dirty"]: 371 | rendered += ".dev0" 372 | rendered += plus_or_dot(pieces) 373 | rendered += "g%s" % pieces["short"] 374 | else: 375 | # exception #1 376 | rendered = "0.post%d" % pieces["distance"] 377 | if pieces["dirty"]: 378 | rendered += ".dev0" 379 | rendered += "+g%s" % pieces["short"] 380 | return rendered 381 | 382 | 383 | def render_pep440_old(pieces): 384 | """TAG[.postDISTANCE[.dev0]] . 385 | 386 | The ".dev0" means dirty. 387 | 388 | Eexceptions: 389 | 1: no tags. 0.postDISTANCE[.dev0] 390 | """ 391 | if pieces["closest-tag"]: 392 | rendered = pieces["closest-tag"] 393 | if pieces["distance"] or pieces["dirty"]: 394 | rendered += ".post%d" % pieces["distance"] 395 | if pieces["dirty"]: 396 | rendered += ".dev0" 397 | else: 398 | # exception #1 399 | rendered = "0.post%d" % pieces["distance"] 400 | if pieces["dirty"]: 401 | rendered += ".dev0" 402 | return rendered 403 | 404 | 405 | def render_git_describe(pieces): 406 | """TAG[-DISTANCE-gHEX][-dirty]. 407 | 408 | Like 'git describe --tags --dirty --always'. 409 | 410 | Exceptions: 411 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 412 | """ 413 | if pieces["closest-tag"]: 414 | rendered = pieces["closest-tag"] 415 | if pieces["distance"]: 416 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 417 | else: 418 | # exception #1 419 | rendered = pieces["short"] 420 | if pieces["dirty"]: 421 | rendered += "-dirty" 422 | return rendered 423 | 424 | 425 | def render_git_describe_long(pieces): 426 | """TAG-DISTANCE-gHEX[-dirty]. 427 | 428 | Like 'git describe --tags --dirty --always -long'. 429 | The distance/hash is unconditional. 430 | 431 | Exceptions: 432 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 433 | """ 434 | if pieces["closest-tag"]: 435 | rendered = pieces["closest-tag"] 436 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 437 | else: 438 | # exception #1 439 | rendered = pieces["short"] 440 | if pieces["dirty"]: 441 | rendered += "-dirty" 442 | return rendered 443 | 444 | 445 | def render(pieces, style): 446 | """Render the given version pieces into the requested style.""" 447 | if pieces["error"]: 448 | return {"version": "unknown", 449 | "full-revisionid": pieces.get("long"), 450 | "dirty": None, 451 | "error": pieces["error"], 452 | "date": None} 453 | 454 | if not style or style == "default": 455 | style = "pep440" # the default 456 | 457 | if style == "pep440": 458 | rendered = render_pep440(pieces) 459 | elif style == "pep440-pre": 460 | rendered = render_pep440_pre(pieces) 461 | elif style == "pep440-post": 462 | rendered = render_pep440_post(pieces) 463 | elif style == "pep440-old": 464 | rendered = render_pep440_old(pieces) 465 | elif style == "git-describe": 466 | rendered = render_git_describe(pieces) 467 | elif style == "git-describe-long": 468 | rendered = render_git_describe_long(pieces) 469 | else: 470 | raise ValueError("unknown style '%s'" % style) 471 | 472 | return {"version": rendered, "full-revisionid": pieces["long"], 473 | "dirty": pieces["dirty"], "error": None, 474 | "date": pieces.get("date")} 475 | 476 | 477 | def get_versions(): 478 | """Get version information or return default if unable to do so.""" 479 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 480 | # __file__, we can work backwards from there to the root. Some 481 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 482 | # case we can only use expanded keywords. 483 | 484 | cfg = get_config() 485 | verbose = cfg.verbose 486 | 487 | try: 488 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, 489 | verbose) 490 | except NotThisMethod: 491 | pass 492 | 493 | try: 494 | root = os.path.realpath(__file__) 495 | # versionfile_source is the relative path from the top of the source 496 | # tree (where the .git directory might live) to this file. Invert 497 | # this to find the root from __file__. 498 | for i in cfg.versionfile_source.split('/'): 499 | root = os.path.dirname(root) 500 | except NameError: 501 | return {"version": "0+unknown", "full-revisionid": None, 502 | "dirty": None, 503 | "error": "unable to find root of source tree", 504 | "date": None} 505 | 506 | try: 507 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 508 | return render(pieces, cfg.style) 509 | except NotThisMethod: 510 | pass 511 | 512 | try: 513 | if cfg.parentdir_prefix: 514 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 515 | except NotThisMethod: 516 | pass 517 | 518 | return {"version": "0+unknown", "full-revisionid": None, 519 | "dirty": None, 520 | "error": "unable to compute version", "date": None} 521 | -------------------------------------------------------------------------------- /versioneer.py: -------------------------------------------------------------------------------- 1 | 2 | # Version: 0.18 3 | 4 | """The Versioneer - like a rocketeer, but for versions. 5 | 6 | The Versioneer 7 | ============== 8 | 9 | * like a rocketeer, but for versions! 10 | * https://github.com/warner/python-versioneer 11 | * Brian Warner 12 | * License: Public Domain 13 | * Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy 14 | * [![Latest Version] 15 | (https://pypip.in/version/versioneer/badge.svg?style=flat) 16 | ](https://pypi.python.org/pypi/versioneer/) 17 | * [![Build Status] 18 | (https://travis-ci.org/warner/python-versioneer.png?branch=master) 19 | ](https://travis-ci.org/warner/python-versioneer) 20 | 21 | This is a tool for managing a recorded version number in distutils-based 22 | python projects. The goal is to remove the tedious and error-prone "update 23 | the embedded version string" step from your release process. Making a new 24 | release should be as easy as recording a new tag in your version-control 25 | system, and maybe making new tarballs. 26 | 27 | 28 | ## Quick Install 29 | 30 | * `pip install versioneer` to somewhere to your $PATH 31 | * add a `[versioneer]` section to your setup.cfg (see below) 32 | * run `versioneer install` in your source tree, commit the results 33 | 34 | ## Version Identifiers 35 | 36 | Source trees come from a variety of places: 37 | 38 | * a version-control system checkout (mostly used by developers) 39 | * a nightly tarball, produced by build automation 40 | * a snapshot tarball, produced by a web-based VCS browser, like github's 41 | "tarball from tag" feature 42 | * a release tarball, produced by "setup.py sdist", distributed through PyPI 43 | 44 | Within each source tree, the version identifier (either a string or a number, 45 | this tool is format-agnostic) can come from a variety of places: 46 | 47 | * ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows 48 | about recent "tags" and an absolute revision-id 49 | * the name of the directory into which the tarball was unpacked 50 | * an expanded VCS keyword ($Id$, etc) 51 | * a `_version.py` created by some earlier build step 52 | 53 | For released software, the version identifier is closely related to a VCS 54 | tag. Some projects use tag names that include more than just the version 55 | string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool 56 | needs to strip the tag prefix to extract the version identifier. For 57 | unreleased software (between tags), the version identifier should provide 58 | enough information to help developers recreate the same tree, while also 59 | giving them an idea of roughly how old the tree is (after version 1.2, before 60 | version 1.3). Many VCS systems can report a description that captures this, 61 | for example `git describe --tags --dirty --always` reports things like 62 | "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the 63 | 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has 64 | uncommitted changes. 65 | 66 | The version identifier is used for multiple purposes: 67 | 68 | * to allow the module to self-identify its version: `myproject.__version__` 69 | * to choose a name and prefix for a 'setup.py sdist' tarball 70 | 71 | ## Theory of Operation 72 | 73 | Versioneer works by adding a special `_version.py` file into your source 74 | tree, where your `__init__.py` can import it. This `_version.py` knows how to 75 | dynamically ask the VCS tool for version information at import time. 76 | 77 | `_version.py` also contains `$Revision$` markers, and the installation 78 | process marks `_version.py` to have this marker rewritten with a tag name 79 | during the `git archive` command. As a result, generated tarballs will 80 | contain enough information to get the proper version. 81 | 82 | To allow `setup.py` to compute a version too, a `versioneer.py` is added to 83 | the top level of your source tree, next to `setup.py` and the `setup.cfg` 84 | that configures it. This overrides several distutils/setuptools commands to 85 | compute the version when invoked, and changes `setup.py build` and `setup.py 86 | sdist` to replace `_version.py` with a small static file that contains just 87 | the generated version data. 88 | 89 | ## Installation 90 | 91 | See [INSTALL.md](./INSTALL.md) for detailed installation instructions. 92 | 93 | ## Version-String Flavors 94 | 95 | Code which uses Versioneer can learn about its version string at runtime by 96 | importing `_version` from your main `__init__.py` file and running the 97 | `get_versions()` function. From the "outside" (e.g. in `setup.py`), you can 98 | import the top-level `versioneer.py` and run `get_versions()`. 99 | 100 | Both functions return a dictionary with different flavors of version 101 | information: 102 | 103 | * `['version']`: A condensed version string, rendered using the selected 104 | style. This is the most commonly used value for the project's version 105 | string. The default "pep440" style yields strings like `0.11`, 106 | `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section 107 | below for alternative styles. 108 | 109 | * `['full-revisionid']`: detailed revision identifier. For Git, this is the 110 | full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". 111 | 112 | * `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the 113 | commit date in ISO 8601 format. This will be None if the date is not 114 | available. 115 | 116 | * `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that 117 | this is only accurate if run in a VCS checkout, otherwise it is likely to 118 | be False or None 119 | 120 | * `['error']`: if the version string could not be computed, this will be set 121 | to a string describing the problem, otherwise it will be None. It may be 122 | useful to throw an exception in setup.py if this is set, to avoid e.g. 123 | creating tarballs with a version string of "unknown". 124 | 125 | Some variants are more useful than others. Including `full-revisionid` in a 126 | bug report should allow developers to reconstruct the exact code being tested 127 | (or indicate the presence of local changes that should be shared with the 128 | developers). `version` is suitable for display in an "about" box or a CLI 129 | `--version` output: it can be easily compared against release notes and lists 130 | of bugs fixed in various releases. 131 | 132 | The installer adds the following text to your `__init__.py` to place a basic 133 | version in `YOURPROJECT.__version__`: 134 | 135 | from ._version import get_versions 136 | __version__ = get_versions()['version'] 137 | del get_versions 138 | 139 | ## Styles 140 | 141 | The setup.cfg `style=` configuration controls how the VCS information is 142 | rendered into a version string. 143 | 144 | The default style, "pep440", produces a PEP440-compliant string, equal to the 145 | un-prefixed tag name for actual releases, and containing an additional "local 146 | version" section with more detail for in-between builds. For Git, this is 147 | TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags 148 | --dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the 149 | tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and 150 | that this commit is two revisions ("+2") beyond the "0.11" tag. For released 151 | software (exactly equal to a known tag), the identifier will only contain the 152 | stripped tag, e.g. "0.11". 153 | 154 | Other styles are available. See [details.md](details.md) in the Versioneer 155 | source tree for descriptions. 156 | 157 | ## Debugging 158 | 159 | Versioneer tries to avoid fatal errors: if something goes wrong, it will tend 160 | to return a version of "0+unknown". To investigate the problem, run `setup.py 161 | version`, which will run the version-lookup code in a verbose mode, and will 162 | display the full contents of `get_versions()` (including the `error` string, 163 | which may help identify what went wrong). 164 | 165 | ## Known Limitations 166 | 167 | Some situations are known to cause problems for Versioneer. This details the 168 | most significant ones. More can be found on Github 169 | [issues page](https://github.com/warner/python-versioneer/issues). 170 | 171 | ### Subprojects 172 | 173 | Versioneer has limited support for source trees in which `setup.py` is not in 174 | the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are 175 | two common reasons why `setup.py` might not be in the root: 176 | 177 | * Source trees which contain multiple subprojects, such as 178 | [Buildbot](https://github.com/buildbot/buildbot), which contains both 179 | "master" and "slave" subprojects, each with their own `setup.py`, 180 | `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI 181 | distributions (and upload multiple independently-installable tarballs). 182 | * Source trees whose main purpose is to contain a C library, but which also 183 | provide bindings to Python (and perhaps other langauges) in subdirectories. 184 | 185 | Versioneer will look for `.git` in parent directories, and most operations 186 | should get the right version string. However `pip` and `setuptools` have bugs 187 | and implementation details which frequently cause `pip install .` from a 188 | subproject directory to fail to find a correct version string (so it usually 189 | defaults to `0+unknown`). 190 | 191 | `pip install --editable .` should work correctly. `setup.py install` might 192 | work too. 193 | 194 | Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in 195 | some later version. 196 | 197 | [Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking 198 | this issue. The discussion in 199 | [PR #61](https://github.com/warner/python-versioneer/pull/61) describes the 200 | issue from the Versioneer side in more detail. 201 | [pip PR#3176](https://github.com/pypa/pip/pull/3176) and 202 | [pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve 203 | pip to let Versioneer work correctly. 204 | 205 | Versioneer-0.16 and earlier only looked for a `.git` directory next to the 206 | `setup.cfg`, so subprojects were completely unsupported with those releases. 207 | 208 | ### Editable installs with setuptools <= 18.5 209 | 210 | `setup.py develop` and `pip install --editable .` allow you to install a 211 | project into a virtualenv once, then continue editing the source code (and 212 | test) without re-installing after every change. 213 | 214 | "Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a 215 | convenient way to specify executable scripts that should be installed along 216 | with the python package. 217 | 218 | These both work as expected when using modern setuptools. When using 219 | setuptools-18.5 or earlier, however, certain operations will cause 220 | `pkg_resources.DistributionNotFound` errors when running the entrypoint 221 | script, which must be resolved by re-installing the package. This happens 222 | when the install happens with one version, then the egg_info data is 223 | regenerated while a different version is checked out. Many setup.py commands 224 | cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into 225 | a different virtualenv), so this can be surprising. 226 | 227 | [Bug #83](https://github.com/warner/python-versioneer/issues/83) describes 228 | this one, but upgrading to a newer version of setuptools should probably 229 | resolve it. 230 | 231 | ### Unicode version strings 232 | 233 | While Versioneer works (and is continually tested) with both Python 2 and 234 | Python 3, it is not entirely consistent with bytes-vs-unicode distinctions. 235 | Newer releases probably generate unicode version strings on py2. It's not 236 | clear that this is wrong, but it may be surprising for applications when then 237 | write these strings to a network connection or include them in bytes-oriented 238 | APIs like cryptographic checksums. 239 | 240 | [Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates 241 | this question. 242 | 243 | 244 | ## Updating Versioneer 245 | 246 | To upgrade your project to a new release of Versioneer, do the following: 247 | 248 | * install the new Versioneer (`pip install -U versioneer` or equivalent) 249 | * edit `setup.cfg`, if necessary, to include any new configuration settings 250 | indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. 251 | * re-run `versioneer install` in your source tree, to replace 252 | `SRC/_version.py` 253 | * commit any changed files 254 | 255 | ## Future Directions 256 | 257 | This tool is designed to make it easily extended to other version-control 258 | systems: all VCS-specific components are in separate directories like 259 | src/git/ . The top-level `versioneer.py` script is assembled from these 260 | components by running make-versioneer.py . In the future, make-versioneer.py 261 | will take a VCS name as an argument, and will construct a version of 262 | `versioneer.py` that is specific to the given VCS. It might also take the 263 | configuration arguments that are currently provided manually during 264 | installation by editing setup.py . Alternatively, it might go the other 265 | direction and include code from all supported VCS systems, reducing the 266 | number of intermediate scripts. 267 | 268 | 269 | ## License 270 | 271 | To make Versioneer easier to embed, all its code is dedicated to the public 272 | domain. The `_version.py` that it creates is also in the public domain. 273 | Specifically, both are released under the Creative Commons "Public Domain 274 | Dedication" license (CC0-1.0), as described in 275 | https://creativecommons.org/publicdomain/zero/1.0/ . 276 | 277 | """ 278 | 279 | from __future__ import print_function 280 | try: 281 | import configparser 282 | except ImportError: 283 | import ConfigParser as configparser 284 | import errno 285 | import json 286 | import os 287 | import re 288 | import subprocess 289 | import sys 290 | 291 | 292 | class VersioneerConfig: 293 | """Container for Versioneer configuration parameters.""" 294 | 295 | 296 | def get_root(): 297 | """Get the project root directory. 298 | 299 | We require that all commands are run from the project root, i.e. the 300 | directory that contains setup.py, setup.cfg, and versioneer.py . 301 | """ 302 | root = os.path.realpath(os.path.abspath(os.getcwd())) 303 | setup_py = os.path.join(root, "setup.py") 304 | versioneer_py = os.path.join(root, "versioneer.py") 305 | if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): 306 | # allow 'python path/to/setup.py COMMAND' 307 | root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) 308 | setup_py = os.path.join(root, "setup.py") 309 | versioneer_py = os.path.join(root, "versioneer.py") 310 | if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): 311 | err = ("Versioneer was unable to run the project root directory. " 312 | "Versioneer requires setup.py to be executed from " 313 | "its immediate directory (like 'python setup.py COMMAND'), " 314 | "or in a way that lets it use sys.argv[0] to find the root " 315 | "(like 'python path/to/setup.py COMMAND').") 316 | raise VersioneerBadRootError(err) 317 | try: 318 | # Certain runtime workflows (setup.py install/develop in a setuptools 319 | # tree) execute all dependencies in a single python process, so 320 | # "versioneer" may be imported multiple times, and python's shared 321 | # module-import table will cache the first one. So we can't use 322 | # os.path.dirname(__file__), as that will find whichever 323 | # versioneer.py was first imported, even in later projects. 324 | me = os.path.realpath(os.path.abspath(__file__)) 325 | me_dir = os.path.normcase(os.path.splitext(me)[0]) 326 | vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) 327 | if me_dir != vsr_dir: 328 | print("Warning: build in %s is using versioneer.py from %s" 329 | % (os.path.dirname(me), versioneer_py)) 330 | except NameError: 331 | pass 332 | return root 333 | 334 | 335 | def get_config_from_root(root): 336 | """Read the project setup.cfg file to determine Versioneer config.""" 337 | # This might raise EnvironmentError (if setup.cfg is missing), or 338 | # configparser.NoSectionError (if it lacks a [versioneer] section), or 339 | # configparser.NoOptionError (if it lacks "VCS="). See the docstring at 340 | # the top of versioneer.py for instructions on writing your setup.cfg . 341 | setup_cfg = os.path.join(root, "setup.cfg") 342 | parser = configparser.SafeConfigParser() 343 | with open(setup_cfg, "r") as f: 344 | parser.readfp(f) 345 | VCS = parser.get("versioneer", "VCS") # mandatory 346 | 347 | def get(parser, name): 348 | if parser.has_option("versioneer", name): 349 | return parser.get("versioneer", name) 350 | return None 351 | cfg = VersioneerConfig() 352 | cfg.VCS = VCS 353 | cfg.style = get(parser, "style") or "" 354 | cfg.versionfile_source = get(parser, "versionfile_source") 355 | cfg.versionfile_build = get(parser, "versionfile_build") 356 | cfg.tag_prefix = get(parser, "tag_prefix") 357 | if cfg.tag_prefix in ("''", '""'): 358 | cfg.tag_prefix = "" 359 | cfg.parentdir_prefix = get(parser, "parentdir_prefix") 360 | cfg.verbose = get(parser, "verbose") 361 | return cfg 362 | 363 | 364 | class NotThisMethod(Exception): 365 | """Exception raised if a method is not valid for the current scenario.""" 366 | 367 | 368 | # these dictionaries contain VCS-specific tools 369 | LONG_VERSION_PY = {} 370 | HANDLERS = {} 371 | 372 | 373 | def register_vcs_handler(vcs, method): # decorator 374 | """Decorator to mark a method as the handler for a particular VCS.""" 375 | def decorate(f): 376 | """Store f in HANDLERS[vcs][method].""" 377 | if vcs not in HANDLERS: 378 | HANDLERS[vcs] = {} 379 | HANDLERS[vcs][method] = f 380 | return f 381 | return decorate 382 | 383 | 384 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, 385 | env=None): 386 | """Call the given command(s).""" 387 | assert isinstance(commands, list) 388 | p = None 389 | for c in commands: 390 | try: 391 | dispcmd = str([c] + args) 392 | # remember shell=False, so use git.cmd on windows, not just git 393 | p = subprocess.Popen([c] + args, cwd=cwd, env=env, 394 | stdout=subprocess.PIPE, 395 | stderr=(subprocess.PIPE if hide_stderr 396 | else None)) 397 | break 398 | except EnvironmentError: 399 | e = sys.exc_info()[1] 400 | if e.errno == errno.ENOENT: 401 | continue 402 | if verbose: 403 | print("unable to run %s" % dispcmd) 404 | print(e) 405 | return None, None 406 | else: 407 | if verbose: 408 | print("unable to find command, tried %s" % (commands,)) 409 | return None, None 410 | stdout = p.communicate()[0].strip() 411 | if sys.version_info[0] >= 3: 412 | stdout = stdout.decode() 413 | if p.returncode != 0: 414 | if verbose: 415 | print("unable to run %s (error)" % dispcmd) 416 | print("stdout was %s" % stdout) 417 | return None, p.returncode 418 | return stdout, p.returncode 419 | 420 | 421 | LONG_VERSION_PY['git'] = ''' 422 | # This file helps to compute a version number in source trees obtained from 423 | # git-archive tarball (such as those provided by githubs download-from-tag 424 | # feature). Distribution tarballs (built by setup.py sdist) and build 425 | # directories (produced by setup.py build) will contain a much shorter file 426 | # that just contains the computed version number. 427 | 428 | # This file is released into the public domain. Generated by 429 | # versioneer-0.18 (https://github.com/warner/python-versioneer) 430 | 431 | """Git implementation of _version.py.""" 432 | 433 | import errno 434 | import os 435 | import re 436 | import subprocess 437 | import sys 438 | 439 | 440 | def get_keywords(): 441 | """Get the keywords needed to look up the version information.""" 442 | # these strings will be replaced by git during git-archive. 443 | # setup.py/versioneer.py will grep for the variable names, so they must 444 | # each be defined on a line of their own. _version.py will just call 445 | # get_keywords(). 446 | git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" 447 | git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" 448 | git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" 449 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} 450 | return keywords 451 | 452 | 453 | class VersioneerConfig: 454 | """Container for Versioneer configuration parameters.""" 455 | 456 | 457 | def get_config(): 458 | """Create, populate and return the VersioneerConfig() object.""" 459 | # these strings are filled in when 'setup.py versioneer' creates 460 | # _version.py 461 | cfg = VersioneerConfig() 462 | cfg.VCS = "git" 463 | cfg.style = "%(STYLE)s" 464 | cfg.tag_prefix = "%(TAG_PREFIX)s" 465 | cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" 466 | cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" 467 | cfg.verbose = False 468 | return cfg 469 | 470 | 471 | class NotThisMethod(Exception): 472 | """Exception raised if a method is not valid for the current scenario.""" 473 | 474 | 475 | LONG_VERSION_PY = {} 476 | HANDLERS = {} 477 | 478 | 479 | def register_vcs_handler(vcs, method): # decorator 480 | """Decorator to mark a method as the handler for a particular VCS.""" 481 | def decorate(f): 482 | """Store f in HANDLERS[vcs][method].""" 483 | if vcs not in HANDLERS: 484 | HANDLERS[vcs] = {} 485 | HANDLERS[vcs][method] = f 486 | return f 487 | return decorate 488 | 489 | 490 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, 491 | env=None): 492 | """Call the given command(s).""" 493 | assert isinstance(commands, list) 494 | p = None 495 | for c in commands: 496 | try: 497 | dispcmd = str([c] + args) 498 | # remember shell=False, so use git.cmd on windows, not just git 499 | p = subprocess.Popen([c] + args, cwd=cwd, env=env, 500 | stdout=subprocess.PIPE, 501 | stderr=(subprocess.PIPE if hide_stderr 502 | else None)) 503 | break 504 | except EnvironmentError: 505 | e = sys.exc_info()[1] 506 | if e.errno == errno.ENOENT: 507 | continue 508 | if verbose: 509 | print("unable to run %%s" %% dispcmd) 510 | print(e) 511 | return None, None 512 | else: 513 | if verbose: 514 | print("unable to find command, tried %%s" %% (commands,)) 515 | return None, None 516 | stdout = p.communicate()[0].strip() 517 | if sys.version_info[0] >= 3: 518 | stdout = stdout.decode() 519 | if p.returncode != 0: 520 | if verbose: 521 | print("unable to run %%s (error)" %% dispcmd) 522 | print("stdout was %%s" %% stdout) 523 | return None, p.returncode 524 | return stdout, p.returncode 525 | 526 | 527 | def versions_from_parentdir(parentdir_prefix, root, verbose): 528 | """Try to determine the version from the parent directory name. 529 | 530 | Source tarballs conventionally unpack into a directory that includes both 531 | the project name and a version string. We will also support searching up 532 | two directory levels for an appropriately named parent directory 533 | """ 534 | rootdirs = [] 535 | 536 | for i in range(3): 537 | dirname = os.path.basename(root) 538 | if dirname.startswith(parentdir_prefix): 539 | return {"version": dirname[len(parentdir_prefix):], 540 | "full-revisionid": None, 541 | "dirty": False, "error": None, "date": None} 542 | else: 543 | rootdirs.append(root) 544 | root = os.path.dirname(root) # up a level 545 | 546 | if verbose: 547 | print("Tried directories %%s but none started with prefix %%s" %% 548 | (str(rootdirs), parentdir_prefix)) 549 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 550 | 551 | 552 | @register_vcs_handler("git", "get_keywords") 553 | def git_get_keywords(versionfile_abs): 554 | """Extract version information from the given file.""" 555 | # the code embedded in _version.py can just fetch the value of these 556 | # keywords. When used from setup.py, we don't want to import _version.py, 557 | # so we do it with a regexp instead. This function is not used from 558 | # _version.py. 559 | keywords = {} 560 | try: 561 | f = open(versionfile_abs, "r") 562 | for line in f.readlines(): 563 | if line.strip().startswith("git_refnames ="): 564 | mo = re.search(r'=\s*"(.*)"', line) 565 | if mo: 566 | keywords["refnames"] = mo.group(1) 567 | if line.strip().startswith("git_full ="): 568 | mo = re.search(r'=\s*"(.*)"', line) 569 | if mo: 570 | keywords["full"] = mo.group(1) 571 | if line.strip().startswith("git_date ="): 572 | mo = re.search(r'=\s*"(.*)"', line) 573 | if mo: 574 | keywords["date"] = mo.group(1) 575 | f.close() 576 | except EnvironmentError: 577 | pass 578 | return keywords 579 | 580 | 581 | @register_vcs_handler("git", "keywords") 582 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 583 | """Get version information from git keywords.""" 584 | if not keywords: 585 | raise NotThisMethod("no keywords at all, weird") 586 | date = keywords.get("date") 587 | if date is not None: 588 | # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant 589 | # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 590 | # -like" string, which we must then edit to make compliant), because 591 | # it's been around since git-1.5.3, and it's too difficult to 592 | # discover which version we're using, or to work around using an 593 | # older one. 594 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 595 | refnames = keywords["refnames"].strip() 596 | if refnames.startswith("$Format"): 597 | if verbose: 598 | print("keywords are unexpanded, not using") 599 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 600 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) 601 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 602 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 603 | TAG = "tag: " 604 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) 605 | if not tags: 606 | # Either we're using git < 1.8.3, or there really are no tags. We use 607 | # a heuristic: assume all version tags have a digit. The old git %%d 608 | # expansion behaves like git log --decorate=short and strips out the 609 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 610 | # between branches and tags. By ignoring refnames without digits, we 611 | # filter out many common branch names like "release" and 612 | # "stabilization", as well as "HEAD" and "master". 613 | tags = set([r for r in refs if re.search(r'\d', r)]) 614 | if verbose: 615 | print("discarding '%%s', no digits" %% ",".join(refs - tags)) 616 | if verbose: 617 | print("likely tags: %%s" %% ",".join(sorted(tags))) 618 | for ref in sorted(tags): 619 | # sorting will prefer e.g. "2.0" over "2.0rc1" 620 | if ref.startswith(tag_prefix): 621 | r = ref[len(tag_prefix):] 622 | if verbose: 623 | print("picking %%s" %% r) 624 | return {"version": r, 625 | "full-revisionid": keywords["full"].strip(), 626 | "dirty": False, "error": None, 627 | "date": date} 628 | # no suitable tags, so version is "0+unknown", but full hex is still there 629 | if verbose: 630 | print("no suitable tags, using unknown + full revision id") 631 | return {"version": "0+unknown", 632 | "full-revisionid": keywords["full"].strip(), 633 | "dirty": False, "error": "no suitable tags", "date": None} 634 | 635 | 636 | @register_vcs_handler("git", "pieces_from_vcs") 637 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 638 | """Get version from 'git describe' in the root of the source tree. 639 | 640 | This only gets called if the git-archive 'subst' keywords were *not* 641 | expanded, and _version.py hasn't already been rewritten with a short 642 | version string, meaning we're inside a checked out source tree. 643 | """ 644 | GITS = ["git"] 645 | if sys.platform == "win32": 646 | GITS = ["git.cmd", "git.exe"] 647 | 648 | out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, 649 | hide_stderr=True) 650 | if rc != 0: 651 | if verbose: 652 | print("Directory %%s not under git control" %% root) 653 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 654 | 655 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 656 | # if there isn't one, this yields HEX[-dirty] (no NUM) 657 | describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", 658 | "--always", "--long", 659 | "--match", "%%s*" %% tag_prefix], 660 | cwd=root) 661 | # --long was added in git-1.5.5 662 | if describe_out is None: 663 | raise NotThisMethod("'git describe' failed") 664 | describe_out = describe_out.strip() 665 | full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 666 | if full_out is None: 667 | raise NotThisMethod("'git rev-parse' failed") 668 | full_out = full_out.strip() 669 | 670 | pieces = {} 671 | pieces["long"] = full_out 672 | pieces["short"] = full_out[:7] # maybe improved later 673 | pieces["error"] = None 674 | 675 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 676 | # TAG might have hyphens. 677 | git_describe = describe_out 678 | 679 | # look for -dirty suffix 680 | dirty = git_describe.endswith("-dirty") 681 | pieces["dirty"] = dirty 682 | if dirty: 683 | git_describe = git_describe[:git_describe.rindex("-dirty")] 684 | 685 | # now we have TAG-NUM-gHEX or HEX 686 | 687 | if "-" in git_describe: 688 | # TAG-NUM-gHEX 689 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 690 | if not mo: 691 | # unparseable. Maybe git-describe is misbehaving? 692 | pieces["error"] = ("unable to parse git-describe output: '%%s'" 693 | %% describe_out) 694 | return pieces 695 | 696 | # tag 697 | full_tag = mo.group(1) 698 | if not full_tag.startswith(tag_prefix): 699 | if verbose: 700 | fmt = "tag '%%s' doesn't start with prefix '%%s'" 701 | print(fmt %% (full_tag, tag_prefix)) 702 | pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" 703 | %% (full_tag, tag_prefix)) 704 | return pieces 705 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 706 | 707 | # distance: number of commits since tag 708 | pieces["distance"] = int(mo.group(2)) 709 | 710 | # commit: short hex revision ID 711 | pieces["short"] = mo.group(3) 712 | 713 | else: 714 | # HEX: no tags 715 | pieces["closest-tag"] = None 716 | count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], 717 | cwd=root) 718 | pieces["distance"] = int(count_out) # total number of commits 719 | 720 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 721 | date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"], 722 | cwd=root)[0].strip() 723 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 724 | 725 | return pieces 726 | 727 | 728 | def plus_or_dot(pieces): 729 | """Return a + if we don't already have one, else return a .""" 730 | if "+" in pieces.get("closest-tag", ""): 731 | return "." 732 | return "+" 733 | 734 | 735 | def render_pep440(pieces): 736 | """Build up version string, with post-release "local version identifier". 737 | 738 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 739 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 740 | 741 | Exceptions: 742 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 743 | """ 744 | if pieces["closest-tag"]: 745 | rendered = pieces["closest-tag"] 746 | if pieces["distance"] or pieces["dirty"]: 747 | rendered += plus_or_dot(pieces) 748 | rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) 749 | if pieces["dirty"]: 750 | rendered += ".dirty" 751 | else: 752 | # exception #1 753 | rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], 754 | pieces["short"]) 755 | if pieces["dirty"]: 756 | rendered += ".dirty" 757 | return rendered 758 | 759 | 760 | def render_pep440_pre(pieces): 761 | """TAG[.post.devDISTANCE] -- No -dirty. 762 | 763 | Exceptions: 764 | 1: no tags. 0.post.devDISTANCE 765 | """ 766 | if pieces["closest-tag"]: 767 | rendered = pieces["closest-tag"] 768 | if pieces["distance"]: 769 | rendered += ".post.dev%%d" %% pieces["distance"] 770 | else: 771 | # exception #1 772 | rendered = "0.post.dev%%d" %% pieces["distance"] 773 | return rendered 774 | 775 | 776 | def render_pep440_post(pieces): 777 | """TAG[.postDISTANCE[.dev0]+gHEX] . 778 | 779 | The ".dev0" means dirty. Note that .dev0 sorts backwards 780 | (a dirty tree will appear "older" than the corresponding clean one), 781 | but you shouldn't be releasing software with -dirty anyways. 782 | 783 | Exceptions: 784 | 1: no tags. 0.postDISTANCE[.dev0] 785 | """ 786 | if pieces["closest-tag"]: 787 | rendered = pieces["closest-tag"] 788 | if pieces["distance"] or pieces["dirty"]: 789 | rendered += ".post%%d" %% pieces["distance"] 790 | if pieces["dirty"]: 791 | rendered += ".dev0" 792 | rendered += plus_or_dot(pieces) 793 | rendered += "g%%s" %% pieces["short"] 794 | else: 795 | # exception #1 796 | rendered = "0.post%%d" %% pieces["distance"] 797 | if pieces["dirty"]: 798 | rendered += ".dev0" 799 | rendered += "+g%%s" %% pieces["short"] 800 | return rendered 801 | 802 | 803 | def render_pep440_old(pieces): 804 | """TAG[.postDISTANCE[.dev0]] . 805 | 806 | The ".dev0" means dirty. 807 | 808 | Eexceptions: 809 | 1: no tags. 0.postDISTANCE[.dev0] 810 | """ 811 | if pieces["closest-tag"]: 812 | rendered = pieces["closest-tag"] 813 | if pieces["distance"] or pieces["dirty"]: 814 | rendered += ".post%%d" %% pieces["distance"] 815 | if pieces["dirty"]: 816 | rendered += ".dev0" 817 | else: 818 | # exception #1 819 | rendered = "0.post%%d" %% pieces["distance"] 820 | if pieces["dirty"]: 821 | rendered += ".dev0" 822 | return rendered 823 | 824 | 825 | def render_git_describe(pieces): 826 | """TAG[-DISTANCE-gHEX][-dirty]. 827 | 828 | Like 'git describe --tags --dirty --always'. 829 | 830 | Exceptions: 831 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 832 | """ 833 | if pieces["closest-tag"]: 834 | rendered = pieces["closest-tag"] 835 | if pieces["distance"]: 836 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) 837 | else: 838 | # exception #1 839 | rendered = pieces["short"] 840 | if pieces["dirty"]: 841 | rendered += "-dirty" 842 | return rendered 843 | 844 | 845 | def render_git_describe_long(pieces): 846 | """TAG-DISTANCE-gHEX[-dirty]. 847 | 848 | Like 'git describe --tags --dirty --always -long'. 849 | The distance/hash is unconditional. 850 | 851 | Exceptions: 852 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 853 | """ 854 | if pieces["closest-tag"]: 855 | rendered = pieces["closest-tag"] 856 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) 857 | else: 858 | # exception #1 859 | rendered = pieces["short"] 860 | if pieces["dirty"]: 861 | rendered += "-dirty" 862 | return rendered 863 | 864 | 865 | def render(pieces, style): 866 | """Render the given version pieces into the requested style.""" 867 | if pieces["error"]: 868 | return {"version": "unknown", 869 | "full-revisionid": pieces.get("long"), 870 | "dirty": None, 871 | "error": pieces["error"], 872 | "date": None} 873 | 874 | if not style or style == "default": 875 | style = "pep440" # the default 876 | 877 | if style == "pep440": 878 | rendered = render_pep440(pieces) 879 | elif style == "pep440-pre": 880 | rendered = render_pep440_pre(pieces) 881 | elif style == "pep440-post": 882 | rendered = render_pep440_post(pieces) 883 | elif style == "pep440-old": 884 | rendered = render_pep440_old(pieces) 885 | elif style == "git-describe": 886 | rendered = render_git_describe(pieces) 887 | elif style == "git-describe-long": 888 | rendered = render_git_describe_long(pieces) 889 | else: 890 | raise ValueError("unknown style '%%s'" %% style) 891 | 892 | return {"version": rendered, "full-revisionid": pieces["long"], 893 | "dirty": pieces["dirty"], "error": None, 894 | "date": pieces.get("date")} 895 | 896 | 897 | def get_versions(): 898 | """Get version information or return default if unable to do so.""" 899 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 900 | # __file__, we can work backwards from there to the root. Some 901 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 902 | # case we can only use expanded keywords. 903 | 904 | cfg = get_config() 905 | verbose = cfg.verbose 906 | 907 | try: 908 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, 909 | verbose) 910 | except NotThisMethod: 911 | pass 912 | 913 | try: 914 | root = os.path.realpath(__file__) 915 | # versionfile_source is the relative path from the top of the source 916 | # tree (where the .git directory might live) to this file. Invert 917 | # this to find the root from __file__. 918 | for i in cfg.versionfile_source.split('/'): 919 | root = os.path.dirname(root) 920 | except NameError: 921 | return {"version": "0+unknown", "full-revisionid": None, 922 | "dirty": None, 923 | "error": "unable to find root of source tree", 924 | "date": None} 925 | 926 | try: 927 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 928 | return render(pieces, cfg.style) 929 | except NotThisMethod: 930 | pass 931 | 932 | try: 933 | if cfg.parentdir_prefix: 934 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 935 | except NotThisMethod: 936 | pass 937 | 938 | return {"version": "0+unknown", "full-revisionid": None, 939 | "dirty": None, 940 | "error": "unable to compute version", "date": None} 941 | ''' 942 | 943 | 944 | @register_vcs_handler("git", "get_keywords") 945 | def git_get_keywords(versionfile_abs): 946 | """Extract version information from the given file.""" 947 | # the code embedded in _version.py can just fetch the value of these 948 | # keywords. When used from setup.py, we don't want to import _version.py, 949 | # so we do it with a regexp instead. This function is not used from 950 | # _version.py. 951 | keywords = {} 952 | try: 953 | f = open(versionfile_abs, "r") 954 | for line in f.readlines(): 955 | if line.strip().startswith("git_refnames ="): 956 | mo = re.search(r'=\s*"(.*)"', line) 957 | if mo: 958 | keywords["refnames"] = mo.group(1) 959 | if line.strip().startswith("git_full ="): 960 | mo = re.search(r'=\s*"(.*)"', line) 961 | if mo: 962 | keywords["full"] = mo.group(1) 963 | if line.strip().startswith("git_date ="): 964 | mo = re.search(r'=\s*"(.*)"', line) 965 | if mo: 966 | keywords["date"] = mo.group(1) 967 | f.close() 968 | except EnvironmentError: 969 | pass 970 | return keywords 971 | 972 | 973 | @register_vcs_handler("git", "keywords") 974 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 975 | """Get version information from git keywords.""" 976 | if not keywords: 977 | raise NotThisMethod("no keywords at all, weird") 978 | date = keywords.get("date") 979 | if date is not None: 980 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant 981 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 982 | # -like" string, which we must then edit to make compliant), because 983 | # it's been around since git-1.5.3, and it's too difficult to 984 | # discover which version we're using, or to work around using an 985 | # older one. 986 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 987 | refnames = keywords["refnames"].strip() 988 | if refnames.startswith("$Format"): 989 | if verbose: 990 | print("keywords are unexpanded, not using") 991 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 992 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) 993 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 994 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 995 | TAG = "tag: " 996 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) 997 | if not tags: 998 | # Either we're using git < 1.8.3, or there really are no tags. We use 999 | # a heuristic: assume all version tags have a digit. The old git %d 1000 | # expansion behaves like git log --decorate=short and strips out the 1001 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 1002 | # between branches and tags. By ignoring refnames without digits, we 1003 | # filter out many common branch names like "release" and 1004 | # "stabilization", as well as "HEAD" and "master". 1005 | tags = set([r for r in refs if re.search(r'\d', r)]) 1006 | if verbose: 1007 | print("discarding '%s', no digits" % ",".join(refs - tags)) 1008 | if verbose: 1009 | print("likely tags: %s" % ",".join(sorted(tags))) 1010 | for ref in sorted(tags): 1011 | # sorting will prefer e.g. "2.0" over "2.0rc1" 1012 | if ref.startswith(tag_prefix): 1013 | r = ref[len(tag_prefix):] 1014 | if verbose: 1015 | print("picking %s" % r) 1016 | return {"version": r, 1017 | "full-revisionid": keywords["full"].strip(), 1018 | "dirty": False, "error": None, 1019 | "date": date} 1020 | # no suitable tags, so version is "0+unknown", but full hex is still there 1021 | if verbose: 1022 | print("no suitable tags, using unknown + full revision id") 1023 | return {"version": "0+unknown", 1024 | "full-revisionid": keywords["full"].strip(), 1025 | "dirty": False, "error": "no suitable tags", "date": None} 1026 | 1027 | 1028 | @register_vcs_handler("git", "pieces_from_vcs") 1029 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 1030 | """Get version from 'git describe' in the root of the source tree. 1031 | 1032 | This only gets called if the git-archive 'subst' keywords were *not* 1033 | expanded, and _version.py hasn't already been rewritten with a short 1034 | version string, meaning we're inside a checked out source tree. 1035 | """ 1036 | GITS = ["git"] 1037 | if sys.platform == "win32": 1038 | GITS = ["git.cmd", "git.exe"] 1039 | 1040 | out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, 1041 | hide_stderr=True) 1042 | if rc != 0: 1043 | if verbose: 1044 | print("Directory %s not under git control" % root) 1045 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 1046 | 1047 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 1048 | # if there isn't one, this yields HEX[-dirty] (no NUM) 1049 | describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", 1050 | "--always", "--long", 1051 | "--match", "%s*" % tag_prefix], 1052 | cwd=root) 1053 | # --long was added in git-1.5.5 1054 | if describe_out is None: 1055 | raise NotThisMethod("'git describe' failed") 1056 | describe_out = describe_out.strip() 1057 | full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 1058 | if full_out is None: 1059 | raise NotThisMethod("'git rev-parse' failed") 1060 | full_out = full_out.strip() 1061 | 1062 | pieces = {} 1063 | pieces["long"] = full_out 1064 | pieces["short"] = full_out[:7] # maybe improved later 1065 | pieces["error"] = None 1066 | 1067 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 1068 | # TAG might have hyphens. 1069 | git_describe = describe_out 1070 | 1071 | # look for -dirty suffix 1072 | dirty = git_describe.endswith("-dirty") 1073 | pieces["dirty"] = dirty 1074 | if dirty: 1075 | git_describe = git_describe[:git_describe.rindex("-dirty")] 1076 | 1077 | # now we have TAG-NUM-gHEX or HEX 1078 | 1079 | if "-" in git_describe: 1080 | # TAG-NUM-gHEX 1081 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 1082 | if not mo: 1083 | # unparseable. Maybe git-describe is misbehaving? 1084 | pieces["error"] = ("unable to parse git-describe output: '%s'" 1085 | % describe_out) 1086 | return pieces 1087 | 1088 | # tag 1089 | full_tag = mo.group(1) 1090 | if not full_tag.startswith(tag_prefix): 1091 | if verbose: 1092 | fmt = "tag '%s' doesn't start with prefix '%s'" 1093 | print(fmt % (full_tag, tag_prefix)) 1094 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" 1095 | % (full_tag, tag_prefix)) 1096 | return pieces 1097 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 1098 | 1099 | # distance: number of commits since tag 1100 | pieces["distance"] = int(mo.group(2)) 1101 | 1102 | # commit: short hex revision ID 1103 | pieces["short"] = mo.group(3) 1104 | 1105 | else: 1106 | # HEX: no tags 1107 | pieces["closest-tag"] = None 1108 | count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], 1109 | cwd=root) 1110 | pieces["distance"] = int(count_out) # total number of commits 1111 | 1112 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 1113 | date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], 1114 | cwd=root)[0].strip() 1115 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 1116 | 1117 | return pieces 1118 | 1119 | 1120 | def do_vcs_install(manifest_in, versionfile_source, ipy): 1121 | """Git-specific installation logic for Versioneer. 1122 | 1123 | For Git, this means creating/changing .gitattributes to mark _version.py 1124 | for export-subst keyword substitution. 1125 | """ 1126 | GITS = ["git"] 1127 | if sys.platform == "win32": 1128 | GITS = ["git.cmd", "git.exe"] 1129 | files = [manifest_in, versionfile_source] 1130 | if ipy: 1131 | files.append(ipy) 1132 | try: 1133 | me = __file__ 1134 | if me.endswith(".pyc") or me.endswith(".pyo"): 1135 | me = os.path.splitext(me)[0] + ".py" 1136 | versioneer_file = os.path.relpath(me) 1137 | except NameError: 1138 | versioneer_file = "versioneer.py" 1139 | files.append(versioneer_file) 1140 | present = False 1141 | try: 1142 | f = open(".gitattributes", "r") 1143 | for line in f.readlines(): 1144 | if line.strip().startswith(versionfile_source): 1145 | if "export-subst" in line.strip().split()[1:]: 1146 | present = True 1147 | f.close() 1148 | except EnvironmentError: 1149 | pass 1150 | if not present: 1151 | f = open(".gitattributes", "a+") 1152 | f.write("%s export-subst\n" % versionfile_source) 1153 | f.close() 1154 | files.append(".gitattributes") 1155 | run_command(GITS, ["add", "--"] + files) 1156 | 1157 | 1158 | def versions_from_parentdir(parentdir_prefix, root, verbose): 1159 | """Try to determine the version from the parent directory name. 1160 | 1161 | Source tarballs conventionally unpack into a directory that includes both 1162 | the project name and a version string. We will also support searching up 1163 | two directory levels for an appropriately named parent directory 1164 | """ 1165 | rootdirs = [] 1166 | 1167 | for i in range(3): 1168 | dirname = os.path.basename(root) 1169 | if dirname.startswith(parentdir_prefix): 1170 | return {"version": dirname[len(parentdir_prefix):], 1171 | "full-revisionid": None, 1172 | "dirty": False, "error": None, "date": None} 1173 | else: 1174 | rootdirs.append(root) 1175 | root = os.path.dirname(root) # up a level 1176 | 1177 | if verbose: 1178 | print("Tried directories %s but none started with prefix %s" % 1179 | (str(rootdirs), parentdir_prefix)) 1180 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 1181 | 1182 | 1183 | SHORT_VERSION_PY = """ 1184 | # This file was generated by 'versioneer.py' (0.18) from 1185 | # revision-control system data, or from the parent directory name of an 1186 | # unpacked source archive. Distribution tarballs contain a pre-generated copy 1187 | # of this file. 1188 | 1189 | import json 1190 | 1191 | version_json = ''' 1192 | %s 1193 | ''' # END VERSION_JSON 1194 | 1195 | 1196 | def get_versions(): 1197 | return json.loads(version_json) 1198 | """ 1199 | 1200 | 1201 | def versions_from_file(filename): 1202 | """Try to determine the version from _version.py if present.""" 1203 | try: 1204 | with open(filename) as f: 1205 | contents = f.read() 1206 | except EnvironmentError: 1207 | raise NotThisMethod("unable to read _version.py") 1208 | mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", 1209 | contents, re.M | re.S) 1210 | if not mo: 1211 | mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", 1212 | contents, re.M | re.S) 1213 | if not mo: 1214 | raise NotThisMethod("no version_json in _version.py") 1215 | return json.loads(mo.group(1)) 1216 | 1217 | 1218 | def write_to_version_file(filename, versions): 1219 | """Write the given version number to the given _version.py file.""" 1220 | os.unlink(filename) 1221 | contents = json.dumps(versions, sort_keys=True, 1222 | indent=1, separators=(",", ": ")) 1223 | with open(filename, "w") as f: 1224 | f.write(SHORT_VERSION_PY % contents) 1225 | 1226 | print("set %s to '%s'" % (filename, versions["version"])) 1227 | 1228 | 1229 | def plus_or_dot(pieces): 1230 | """Return a + if we don't already have one, else return a .""" 1231 | if "+" in pieces.get("closest-tag", ""): 1232 | return "." 1233 | return "+" 1234 | 1235 | 1236 | def render_pep440(pieces): 1237 | """Build up version string, with post-release "local version identifier". 1238 | 1239 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 1240 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 1241 | 1242 | Exceptions: 1243 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 1244 | """ 1245 | if pieces["closest-tag"]: 1246 | rendered = pieces["closest-tag"] 1247 | if pieces["distance"] or pieces["dirty"]: 1248 | rendered += plus_or_dot(pieces) 1249 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 1250 | if pieces["dirty"]: 1251 | rendered += ".dirty" 1252 | else: 1253 | # exception #1 1254 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], 1255 | pieces["short"]) 1256 | if pieces["dirty"]: 1257 | rendered += ".dirty" 1258 | return rendered 1259 | 1260 | 1261 | def render_pep440_pre(pieces): 1262 | """TAG[.post.devDISTANCE] -- No -dirty. 1263 | 1264 | Exceptions: 1265 | 1: no tags. 0.post.devDISTANCE 1266 | """ 1267 | if pieces["closest-tag"]: 1268 | rendered = pieces["closest-tag"] 1269 | if pieces["distance"]: 1270 | rendered += ".post.dev%d" % pieces["distance"] 1271 | else: 1272 | # exception #1 1273 | rendered = "0.post.dev%d" % pieces["distance"] 1274 | return rendered 1275 | 1276 | 1277 | def render_pep440_post(pieces): 1278 | """TAG[.postDISTANCE[.dev0]+gHEX] . 1279 | 1280 | The ".dev0" means dirty. Note that .dev0 sorts backwards 1281 | (a dirty tree will appear "older" than the corresponding clean one), 1282 | but you shouldn't be releasing software with -dirty anyways. 1283 | 1284 | Exceptions: 1285 | 1: no tags. 0.postDISTANCE[.dev0] 1286 | """ 1287 | if pieces["closest-tag"]: 1288 | rendered = pieces["closest-tag"] 1289 | if pieces["distance"] or pieces["dirty"]: 1290 | rendered += ".post%d" % pieces["distance"] 1291 | if pieces["dirty"]: 1292 | rendered += ".dev0" 1293 | rendered += plus_or_dot(pieces) 1294 | rendered += "g%s" % pieces["short"] 1295 | else: 1296 | # exception #1 1297 | rendered = "0.post%d" % pieces["distance"] 1298 | if pieces["dirty"]: 1299 | rendered += ".dev0" 1300 | rendered += "+g%s" % pieces["short"] 1301 | return rendered 1302 | 1303 | 1304 | def render_pep440_old(pieces): 1305 | """TAG[.postDISTANCE[.dev0]] . 1306 | 1307 | The ".dev0" means dirty. 1308 | 1309 | Eexceptions: 1310 | 1: no tags. 0.postDISTANCE[.dev0] 1311 | """ 1312 | if pieces["closest-tag"]: 1313 | rendered = pieces["closest-tag"] 1314 | if pieces["distance"] or pieces["dirty"]: 1315 | rendered += ".post%d" % pieces["distance"] 1316 | if pieces["dirty"]: 1317 | rendered += ".dev0" 1318 | else: 1319 | # exception #1 1320 | rendered = "0.post%d" % pieces["distance"] 1321 | if pieces["dirty"]: 1322 | rendered += ".dev0" 1323 | return rendered 1324 | 1325 | 1326 | def render_git_describe(pieces): 1327 | """TAG[-DISTANCE-gHEX][-dirty]. 1328 | 1329 | Like 'git describe --tags --dirty --always'. 1330 | 1331 | Exceptions: 1332 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 1333 | """ 1334 | if pieces["closest-tag"]: 1335 | rendered = pieces["closest-tag"] 1336 | if pieces["distance"]: 1337 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 1338 | else: 1339 | # exception #1 1340 | rendered = pieces["short"] 1341 | if pieces["dirty"]: 1342 | rendered += "-dirty" 1343 | return rendered 1344 | 1345 | 1346 | def render_git_describe_long(pieces): 1347 | """TAG-DISTANCE-gHEX[-dirty]. 1348 | 1349 | Like 'git describe --tags --dirty --always -long'. 1350 | The distance/hash is unconditional. 1351 | 1352 | Exceptions: 1353 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 1354 | """ 1355 | if pieces["closest-tag"]: 1356 | rendered = pieces["closest-tag"] 1357 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 1358 | else: 1359 | # exception #1 1360 | rendered = pieces["short"] 1361 | if pieces["dirty"]: 1362 | rendered += "-dirty" 1363 | return rendered 1364 | 1365 | 1366 | def render(pieces, style): 1367 | """Render the given version pieces into the requested style.""" 1368 | if pieces["error"]: 1369 | return {"version": "unknown", 1370 | "full-revisionid": pieces.get("long"), 1371 | "dirty": None, 1372 | "error": pieces["error"], 1373 | "date": None} 1374 | 1375 | if not style or style == "default": 1376 | style = "pep440" # the default 1377 | 1378 | if style == "pep440": 1379 | rendered = render_pep440(pieces) 1380 | elif style == "pep440-pre": 1381 | rendered = render_pep440_pre(pieces) 1382 | elif style == "pep440-post": 1383 | rendered = render_pep440_post(pieces) 1384 | elif style == "pep440-old": 1385 | rendered = render_pep440_old(pieces) 1386 | elif style == "git-describe": 1387 | rendered = render_git_describe(pieces) 1388 | elif style == "git-describe-long": 1389 | rendered = render_git_describe_long(pieces) 1390 | else: 1391 | raise ValueError("unknown style '%s'" % style) 1392 | 1393 | return {"version": rendered, "full-revisionid": pieces["long"], 1394 | "dirty": pieces["dirty"], "error": None, 1395 | "date": pieces.get("date")} 1396 | 1397 | 1398 | class VersioneerBadRootError(Exception): 1399 | """The project root directory is unknown or missing key files.""" 1400 | 1401 | 1402 | def get_versions(verbose=False): 1403 | """Get the project version from whatever source is available. 1404 | 1405 | Returns dict with two keys: 'version' and 'full'. 1406 | """ 1407 | if "versioneer" in sys.modules: 1408 | # see the discussion in cmdclass.py:get_cmdclass() 1409 | del sys.modules["versioneer"] 1410 | 1411 | root = get_root() 1412 | cfg = get_config_from_root(root) 1413 | 1414 | assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" 1415 | handlers = HANDLERS.get(cfg.VCS) 1416 | assert handlers, "unrecognized VCS '%s'" % cfg.VCS 1417 | verbose = verbose or cfg.verbose 1418 | assert cfg.versionfile_source is not None, \ 1419 | "please set versioneer.versionfile_source" 1420 | assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" 1421 | 1422 | versionfile_abs = os.path.join(root, cfg.versionfile_source) 1423 | 1424 | # extract version from first of: _version.py, VCS command (e.g. 'git 1425 | # describe'), parentdir. This is meant to work for developers using a 1426 | # source checkout, for users of a tarball created by 'setup.py sdist', 1427 | # and for users of a tarball/zipball created by 'git archive' or github's 1428 | # download-from-tag feature or the equivalent in other VCSes. 1429 | 1430 | get_keywords_f = handlers.get("get_keywords") 1431 | from_keywords_f = handlers.get("keywords") 1432 | if get_keywords_f and from_keywords_f: 1433 | try: 1434 | keywords = get_keywords_f(versionfile_abs) 1435 | ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) 1436 | if verbose: 1437 | print("got version from expanded keyword %s" % ver) 1438 | return ver 1439 | except NotThisMethod: 1440 | pass 1441 | 1442 | try: 1443 | ver = versions_from_file(versionfile_abs) 1444 | if verbose: 1445 | print("got version from file %s %s" % (versionfile_abs, ver)) 1446 | return ver 1447 | except NotThisMethod: 1448 | pass 1449 | 1450 | from_vcs_f = handlers.get("pieces_from_vcs") 1451 | if from_vcs_f: 1452 | try: 1453 | pieces = from_vcs_f(cfg.tag_prefix, root, verbose) 1454 | ver = render(pieces, cfg.style) 1455 | if verbose: 1456 | print("got version from VCS %s" % ver) 1457 | return ver 1458 | except NotThisMethod: 1459 | pass 1460 | 1461 | try: 1462 | if cfg.parentdir_prefix: 1463 | ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 1464 | if verbose: 1465 | print("got version from parentdir %s" % ver) 1466 | return ver 1467 | except NotThisMethod: 1468 | pass 1469 | 1470 | if verbose: 1471 | print("unable to compute version") 1472 | 1473 | return {"version": "0+unknown", "full-revisionid": None, 1474 | "dirty": None, "error": "unable to compute version", 1475 | "date": None} 1476 | 1477 | 1478 | def get_version(): 1479 | """Get the short version string for this project.""" 1480 | return get_versions()["version"] 1481 | 1482 | 1483 | def get_cmdclass(): 1484 | """Get the custom setuptools/distutils subclasses used by Versioneer.""" 1485 | if "versioneer" in sys.modules: 1486 | del sys.modules["versioneer"] 1487 | # this fixes the "python setup.py develop" case (also 'install' and 1488 | # 'easy_install .'), in which subdependencies of the main project are 1489 | # built (using setup.py bdist_egg) in the same python process. Assume 1490 | # a main project A and a dependency B, which use different versions 1491 | # of Versioneer. A's setup.py imports A's Versioneer, leaving it in 1492 | # sys.modules by the time B's setup.py is executed, causing B to run 1493 | # with the wrong versioneer. Setuptools wraps the sub-dep builds in a 1494 | # sandbox that restores sys.modules to it's pre-build state, so the 1495 | # parent is protected against the child's "import versioneer". By 1496 | # removing ourselves from sys.modules here, before the child build 1497 | # happens, we protect the child from the parent's versioneer too. 1498 | # Also see https://github.com/warner/python-versioneer/issues/52 1499 | 1500 | cmds = {} 1501 | 1502 | # we add "version" to both distutils and setuptools 1503 | from distutils.core import Command 1504 | 1505 | class cmd_version(Command): 1506 | description = "report generated version string" 1507 | user_options = [] 1508 | boolean_options = [] 1509 | 1510 | def initialize_options(self): 1511 | pass 1512 | 1513 | def finalize_options(self): 1514 | pass 1515 | 1516 | def run(self): 1517 | vers = get_versions(verbose=True) 1518 | print("Version: %s" % vers["version"]) 1519 | print(" full-revisionid: %s" % vers.get("full-revisionid")) 1520 | print(" dirty: %s" % vers.get("dirty")) 1521 | print(" date: %s" % vers.get("date")) 1522 | if vers["error"]: 1523 | print(" error: %s" % vers["error"]) 1524 | cmds["version"] = cmd_version 1525 | 1526 | # we override "build_py" in both distutils and setuptools 1527 | # 1528 | # most invocation pathways end up running build_py: 1529 | # distutils/build -> build_py 1530 | # distutils/install -> distutils/build ->.. 1531 | # setuptools/bdist_wheel -> distutils/install ->.. 1532 | # setuptools/bdist_egg -> distutils/install_lib -> build_py 1533 | # setuptools/install -> bdist_egg ->.. 1534 | # setuptools/develop -> ? 1535 | # pip install: 1536 | # copies source tree to a tempdir before running egg_info/etc 1537 | # if .git isn't copied too, 'git describe' will fail 1538 | # then does setup.py bdist_wheel, or sometimes setup.py install 1539 | # setup.py egg_info -> ? 1540 | 1541 | # we override different "build_py" commands for both environments 1542 | if "setuptools" in sys.modules: 1543 | from setuptools.command.build_py import build_py as _build_py 1544 | else: 1545 | from distutils.command.build_py import build_py as _build_py 1546 | 1547 | class cmd_build_py(_build_py): 1548 | def run(self): 1549 | root = get_root() 1550 | cfg = get_config_from_root(root) 1551 | versions = get_versions() 1552 | _build_py.run(self) 1553 | # now locate _version.py in the new build/ directory and replace 1554 | # it with an updated value 1555 | if cfg.versionfile_build: 1556 | target_versionfile = os.path.join(self.build_lib, 1557 | cfg.versionfile_build) 1558 | print("UPDATING %s" % target_versionfile) 1559 | write_to_version_file(target_versionfile, versions) 1560 | cmds["build_py"] = cmd_build_py 1561 | 1562 | if "cx_Freeze" in sys.modules: # cx_freeze enabled? 1563 | from cx_Freeze.dist import build_exe as _build_exe 1564 | # nczeczulin reports that py2exe won't like the pep440-style string 1565 | # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. 1566 | # setup(console=[{ 1567 | # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION 1568 | # "product_version": versioneer.get_version(), 1569 | # ... 1570 | 1571 | class cmd_build_exe(_build_exe): 1572 | def run(self): 1573 | root = get_root() 1574 | cfg = get_config_from_root(root) 1575 | versions = get_versions() 1576 | target_versionfile = cfg.versionfile_source 1577 | print("UPDATING %s" % target_versionfile) 1578 | write_to_version_file(target_versionfile, versions) 1579 | 1580 | _build_exe.run(self) 1581 | os.unlink(target_versionfile) 1582 | with open(cfg.versionfile_source, "w") as f: 1583 | LONG = LONG_VERSION_PY[cfg.VCS] 1584 | f.write(LONG % 1585 | {"DOLLAR": "$", 1586 | "STYLE": cfg.style, 1587 | "TAG_PREFIX": cfg.tag_prefix, 1588 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 1589 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 1590 | }) 1591 | cmds["build_exe"] = cmd_build_exe 1592 | del cmds["build_py"] 1593 | 1594 | if 'py2exe' in sys.modules: # py2exe enabled? 1595 | try: 1596 | from py2exe.distutils_buildexe import py2exe as _py2exe # py3 1597 | except ImportError: 1598 | from py2exe.build_exe import py2exe as _py2exe # py2 1599 | 1600 | class cmd_py2exe(_py2exe): 1601 | def run(self): 1602 | root = get_root() 1603 | cfg = get_config_from_root(root) 1604 | versions = get_versions() 1605 | target_versionfile = cfg.versionfile_source 1606 | print("UPDATING %s" % target_versionfile) 1607 | write_to_version_file(target_versionfile, versions) 1608 | 1609 | _py2exe.run(self) 1610 | os.unlink(target_versionfile) 1611 | with open(cfg.versionfile_source, "w") as f: 1612 | LONG = LONG_VERSION_PY[cfg.VCS] 1613 | f.write(LONG % 1614 | {"DOLLAR": "$", 1615 | "STYLE": cfg.style, 1616 | "TAG_PREFIX": cfg.tag_prefix, 1617 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 1618 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 1619 | }) 1620 | cmds["py2exe"] = cmd_py2exe 1621 | 1622 | # we override different "sdist" commands for both environments 1623 | if "setuptools" in sys.modules: 1624 | from setuptools.command.sdist import sdist as _sdist 1625 | else: 1626 | from distutils.command.sdist import sdist as _sdist 1627 | 1628 | class cmd_sdist(_sdist): 1629 | def run(self): 1630 | versions = get_versions() 1631 | self._versioneer_generated_versions = versions 1632 | # unless we update this, the command will keep using the old 1633 | # version 1634 | self.distribution.metadata.version = versions["version"] 1635 | return _sdist.run(self) 1636 | 1637 | def make_release_tree(self, base_dir, files): 1638 | root = get_root() 1639 | cfg = get_config_from_root(root) 1640 | _sdist.make_release_tree(self, base_dir, files) 1641 | # now locate _version.py in the new base_dir directory 1642 | # (remembering that it may be a hardlink) and replace it with an 1643 | # updated value 1644 | target_versionfile = os.path.join(base_dir, cfg.versionfile_source) 1645 | print("UPDATING %s" % target_versionfile) 1646 | write_to_version_file(target_versionfile, 1647 | self._versioneer_generated_versions) 1648 | cmds["sdist"] = cmd_sdist 1649 | 1650 | return cmds 1651 | 1652 | 1653 | CONFIG_ERROR = """ 1654 | setup.cfg is missing the necessary Versioneer configuration. You need 1655 | a section like: 1656 | 1657 | [versioneer] 1658 | VCS = git 1659 | style = pep440 1660 | versionfile_source = src/myproject/_version.py 1661 | versionfile_build = myproject/_version.py 1662 | tag_prefix = 1663 | parentdir_prefix = myproject- 1664 | 1665 | You will also need to edit your setup.py to use the results: 1666 | 1667 | import versioneer 1668 | setup(version=versioneer.get_version(), 1669 | cmdclass=versioneer.get_cmdclass(), ...) 1670 | 1671 | Please read the docstring in ./versioneer.py for configuration instructions, 1672 | edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. 1673 | """ 1674 | 1675 | SAMPLE_CONFIG = """ 1676 | # See the docstring in versioneer.py for instructions. Note that you must 1677 | # re-run 'versioneer.py setup' after changing this section, and commit the 1678 | # resulting files. 1679 | 1680 | [versioneer] 1681 | #VCS = git 1682 | #style = pep440 1683 | #versionfile_source = 1684 | #versionfile_build = 1685 | #tag_prefix = 1686 | #parentdir_prefix = 1687 | 1688 | """ 1689 | 1690 | INIT_PY_SNIPPET = """ 1691 | from ._version import get_versions 1692 | __version__ = get_versions()['version'] 1693 | del get_versions 1694 | """ 1695 | 1696 | 1697 | def do_setup(): 1698 | """Main VCS-independent setup function for installing Versioneer.""" 1699 | root = get_root() 1700 | try: 1701 | cfg = get_config_from_root(root) 1702 | except (EnvironmentError, configparser.NoSectionError, 1703 | configparser.NoOptionError) as e: 1704 | if isinstance(e, (EnvironmentError, configparser.NoSectionError)): 1705 | print("Adding sample versioneer config to setup.cfg", 1706 | file=sys.stderr) 1707 | with open(os.path.join(root, "setup.cfg"), "a") as f: 1708 | f.write(SAMPLE_CONFIG) 1709 | print(CONFIG_ERROR, file=sys.stderr) 1710 | return 1 1711 | 1712 | print(" creating %s" % cfg.versionfile_source) 1713 | with open(cfg.versionfile_source, "w") as f: 1714 | LONG = LONG_VERSION_PY[cfg.VCS] 1715 | f.write(LONG % {"DOLLAR": "$", 1716 | "STYLE": cfg.style, 1717 | "TAG_PREFIX": cfg.tag_prefix, 1718 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 1719 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 1720 | }) 1721 | 1722 | ipy = os.path.join(os.path.dirname(cfg.versionfile_source), 1723 | "__init__.py") 1724 | if os.path.exists(ipy): 1725 | try: 1726 | with open(ipy, "r") as f: 1727 | old = f.read() 1728 | except EnvironmentError: 1729 | old = "" 1730 | if INIT_PY_SNIPPET not in old: 1731 | print(" appending to %s" % ipy) 1732 | with open(ipy, "a") as f: 1733 | f.write(INIT_PY_SNIPPET) 1734 | else: 1735 | print(" %s unmodified" % ipy) 1736 | else: 1737 | print(" %s doesn't exist, ok" % ipy) 1738 | ipy = None 1739 | 1740 | # Make sure both the top-level "versioneer.py" and versionfile_source 1741 | # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so 1742 | # they'll be copied into source distributions. Pip won't be able to 1743 | # install the package without this. 1744 | manifest_in = os.path.join(root, "MANIFEST.in") 1745 | simple_includes = set() 1746 | try: 1747 | with open(manifest_in, "r") as f: 1748 | for line in f: 1749 | if line.startswith("include "): 1750 | for include in line.split()[1:]: 1751 | simple_includes.add(include) 1752 | except EnvironmentError: 1753 | pass 1754 | # That doesn't cover everything MANIFEST.in can do 1755 | # (http://docs.python.org/2/distutils/sourcedist.html#commands), so 1756 | # it might give some false negatives. Appending redundant 'include' 1757 | # lines is safe, though. 1758 | if "versioneer.py" not in simple_includes: 1759 | print(" appending 'versioneer.py' to MANIFEST.in") 1760 | with open(manifest_in, "a") as f: 1761 | f.write("include versioneer.py\n") 1762 | else: 1763 | print(" 'versioneer.py' already in MANIFEST.in") 1764 | if cfg.versionfile_source not in simple_includes: 1765 | print(" appending versionfile_source ('%s') to MANIFEST.in" % 1766 | cfg.versionfile_source) 1767 | with open(manifest_in, "a") as f: 1768 | f.write("include %s\n" % cfg.versionfile_source) 1769 | else: 1770 | print(" versionfile_source already in MANIFEST.in") 1771 | 1772 | # Make VCS-specific changes. For git, this means creating/changing 1773 | # .gitattributes to mark _version.py for export-subst keyword 1774 | # substitution. 1775 | do_vcs_install(manifest_in, cfg.versionfile_source, ipy) 1776 | return 0 1777 | 1778 | 1779 | def scan_setup_py(): 1780 | """Validate the contents of setup.py against Versioneer's expectations.""" 1781 | found = set() 1782 | setters = False 1783 | errors = 0 1784 | with open("setup.py", "r") as f: 1785 | for line in f.readlines(): 1786 | if "import versioneer" in line: 1787 | found.add("import") 1788 | if "versioneer.get_cmdclass()" in line: 1789 | found.add("cmdclass") 1790 | if "versioneer.get_version()" in line: 1791 | found.add("get_version") 1792 | if "versioneer.VCS" in line: 1793 | setters = True 1794 | if "versioneer.versionfile_source" in line: 1795 | setters = True 1796 | if len(found) != 3: 1797 | print("") 1798 | print("Your setup.py appears to be missing some important items") 1799 | print("(but I might be wrong). Please make sure it has something") 1800 | print("roughly like the following:") 1801 | print("") 1802 | print(" import versioneer") 1803 | print(" setup( version=versioneer.get_version(),") 1804 | print(" cmdclass=versioneer.get_cmdclass(), ...)") 1805 | print("") 1806 | errors += 1 1807 | if setters: 1808 | print("You should remove lines like 'versioneer.VCS = ' and") 1809 | print("'versioneer.versionfile_source = ' . This configuration") 1810 | print("now lives in setup.cfg, and should be removed from setup.py") 1811 | print("") 1812 | errors += 1 1813 | return errors 1814 | 1815 | 1816 | if __name__ == "__main__": 1817 | cmd = sys.argv[1] 1818 | if cmd == "setup": 1819 | errors = do_setup() 1820 | errors += scan_setup_py() 1821 | if errors: 1822 | sys.exit(1) 1823 | --------------------------------------------------------------------------------