├── .circleci └── config.yml ├── .coveragerc ├── .gitattributes ├── .gitignore ├── LICENSE ├── MANIFEST.in ├── README.md ├── ext ├── _core.c ├── _core.h └── core.pyx ├── ipython └── Example01.ipynb ├── pydpc ├── __init__.py ├── _reference.py ├── _version.py └── dpc.py ├── requirements.txt ├── setup.cfg ├── setup.py ├── test ├── test_consistency.py └── test_indices.py └── versioneer.py /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | # Python CircleCI 2.0 configuration file 2 | # 3 | # Check https://circleci.com/docs/2.0/language-python/ for more details 4 | # 5 | version: 2 6 | jobs: 7 | build: 8 | docker: 9 | - image: circleci/python:3.7 10 | 11 | working_directory: ~/repo 12 | 13 | steps: 14 | - checkout 15 | 16 | # Download and cache dependencies 17 | - restore_cache: 18 | keys: 19 | - v1-dependencies-{{ checksum "requirements.txt" }} 20 | # fallback to using the latest cache if no exact match is found 21 | - v1-dependencies- 22 | 23 | - run: 24 | name: install dependencies 25 | command: | 26 | python3 -m venv venv 27 | . venv/bin/activate 28 | pip install -r requirements.txt 29 | 30 | - save_cache: 31 | paths: 32 | - ./venv 33 | key: v1-dependencies-{{ checksum "requirements.txt" }} 34 | 35 | - run: 36 | name: run tests 37 | command: | 38 | . venv/bin/activate 39 | python setup.py test 40 | 41 | - store_artifacts: 42 | path: test-reports 43 | destination: test-reports 44 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | omit = */_version.py -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | pydpc/_version.py export-subst 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | ext/core.c 9 | 10 | # IPython checkpints 11 | .ipynb_checkpoints 12 | 13 | # Distribution / packaging 14 | .Python 15 | env/ 16 | build/ 17 | develop-eggs/ 18 | dist/ 19 | downloads/ 20 | eggs/ 21 | .eggs/ 22 | lib/ 23 | lib64/ 24 | parts/ 25 | sdist/ 26 | var/ 27 | *.egg-info/ 28 | .installed.cfg 29 | *.egg 30 | 31 | # PyInstaller 32 | # Usually these files are written by a python script from a template 33 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 34 | *.manifest 35 | *.spec 36 | 37 | # Installer logs 38 | pip-log.txt 39 | pip-delete-this-directory.txt 40 | 41 | # Unit test / coverage reports 42 | htmlcov/ 43 | .tox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *,cover 50 | .hypothesis/ 51 | 52 | # Translations 53 | *.mo 54 | *.pot 55 | 56 | # Django stuff: 57 | *.log 58 | 59 | # Sphinx documentation 60 | docs/_build/ 61 | 62 | # PyBuilder 63 | target/ 64 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU LESSER GENERAL PUBLIC LICENSE 2 | Version 3, 29 June 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | 9 | This version of the GNU Lesser General Public License incorporates 10 | the terms and conditions of version 3 of the GNU General Public 11 | License, supplemented by the additional permissions listed below. 12 | 13 | 0. Additional Definitions. 14 | 15 | As used herein, "this License" refers to version 3 of the GNU Lesser 16 | General Public License, and the "GNU GPL" refers to version 3 of the GNU 17 | General Public License. 18 | 19 | "The Library" refers to a covered work governed by this License, 20 | other than an Application or a Combined Work as defined below. 21 | 22 | An "Application" is any work that makes use of an interface provided 23 | by the Library, but which is not otherwise based on the Library. 24 | Defining a subclass of a class defined by the Library is deemed a mode 25 | of using an interface provided by the Library. 26 | 27 | A "Combined Work" is a work produced by combining or linking an 28 | Application with the Library. The particular version of the Library 29 | with which the Combined Work was made is also called the "Linked 30 | Version". 31 | 32 | The "Minimal Corresponding Source" for a Combined Work means the 33 | Corresponding Source for the Combined Work, excluding any source code 34 | for portions of the Combined Work that, considered in isolation, are 35 | based on the Application, and not on the Linked Version. 36 | 37 | The "Corresponding Application Code" for a Combined Work means the 38 | object code and/or source code for the Application, including any data 39 | and utility programs needed for reproducing the Combined Work from the 40 | Application, but excluding the System Libraries of the Combined Work. 41 | 42 | 1. Exception to Section 3 of the GNU GPL. 43 | 44 | You may convey a covered work under sections 3 and 4 of this License 45 | without being bound by section 3 of the GNU GPL. 46 | 47 | 2. Conveying Modified Versions. 48 | 49 | If you modify a copy of the Library, and, in your modifications, a 50 | facility refers to a function or data to be supplied by an Application 51 | that uses the facility (other than as an argument passed when the 52 | facility is invoked), then you may convey a copy of the modified 53 | version: 54 | 55 | a) under this License, provided that you make a good faith effort to 56 | ensure that, in the event an Application does not supply the 57 | function or data, the facility still operates, and performs 58 | whatever part of its purpose remains meaningful, or 59 | 60 | b) under the GNU GPL, with none of the additional permissions of 61 | this License applicable to that copy. 62 | 63 | 3. Object Code Incorporating Material from Library Header Files. 64 | 65 | The object code form of an Application may incorporate material from 66 | a header file that is part of the Library. You may convey such object 67 | code under terms of your choice, provided that, if the incorporated 68 | material is not limited to numerical parameters, data structure 69 | layouts and accessors, or small macros, inline functions and templates 70 | (ten or fewer lines in length), you do both of the following: 71 | 72 | a) Give prominent notice with each copy of the object code that the 73 | Library is used in it and that the Library and its use are 74 | covered by this License. 75 | 76 | b) Accompany the object code with a copy of the GNU GPL and this license 77 | document. 78 | 79 | 4. Combined Works. 80 | 81 | You may convey a Combined Work under terms of your choice that, 82 | taken together, effectively do not restrict modification of the 83 | portions of the Library contained in the Combined Work and reverse 84 | engineering for debugging such modifications, if you also do each of 85 | the following: 86 | 87 | a) Give prominent notice with each copy of the Combined Work that 88 | the Library is used in it and that the Library and its use are 89 | covered by this License. 90 | 91 | b) Accompany the Combined Work with a copy of the GNU GPL and this license 92 | document. 93 | 94 | c) For a Combined Work that displays copyright notices during 95 | execution, include the copyright notice for the Library among 96 | these notices, as well as a reference directing the user to the 97 | copies of the GNU GPL and this license document. 98 | 99 | d) Do one of the following: 100 | 101 | 0) Convey the Minimal Corresponding Source under the terms of this 102 | License, and the Corresponding Application Code in a form 103 | suitable for, and under terms that permit, the user to 104 | recombine or relink the Application with a modified version of 105 | the Linked Version to produce a modified Combined Work, in the 106 | manner specified by section 6 of the GNU GPL for conveying 107 | Corresponding Source. 108 | 109 | 1) Use a suitable shared library mechanism for linking with the 110 | Library. A suitable mechanism is one that (a) uses at run time 111 | a copy of the Library already present on the user's computer 112 | system, and (b) will operate properly with a modified version 113 | of the Library that is interface-compatible with the Linked 114 | Version. 115 | 116 | e) Provide Installation Information, but only if you would otherwise 117 | be required to provide such information under section 6 of the 118 | GNU GPL, and only to the extent that such information is 119 | necessary to install and execute a modified version of the 120 | Combined Work produced by recombining or relinking the 121 | Application with a modified version of the Linked Version. (If 122 | you use option 4d0, the Installation Information must accompany 123 | the Minimal Corresponding Source and Corresponding Application 124 | Code. If you use option 4d1, you must provide the Installation 125 | Information in the manner specified by section 6 of the GNU GPL 126 | for conveying Corresponding Source.) 127 | 128 | 5. Combined Libraries. 129 | 130 | You may place library facilities that are a work based on the 131 | Library side by side in a single library together with other library 132 | facilities that are not Applications and are not covered by this 133 | License, and convey such a combined library under terms of your 134 | choice, if you do both of the following: 135 | 136 | a) Accompany the combined library with a copy of the same work based 137 | on the Library, uncombined with any other library facilities, 138 | conveyed under the terms of this License. 139 | 140 | b) Give prominent notice with the combined library that part of it 141 | is a work based on the Library, and explaining where to find the 142 | accompanying uncombined form of the same work. 143 | 144 | 6. Revised Versions of the GNU Lesser General Public License. 145 | 146 | The Free Software Foundation may publish revised and/or new versions 147 | of the GNU Lesser General Public License from time to time. Such new 148 | versions will be similar in spirit to the present version, but may 149 | differ in detail to address new problems or concerns. 150 | 151 | Each version is given a distinguishing version number. If the 152 | Library as you received it specifies that a certain numbered version 153 | of the GNU Lesser General Public License "or any later version" 154 | applies to it, you have the option of following the terms and 155 | conditions either of that published version or of any later version 156 | published by the Free Software Foundation. If the Library as you 157 | received it does not specify a version number of the GNU Lesser 158 | General Public License, you may choose any version of the GNU Lesser 159 | General Public License ever published by the Free Software Foundation. 160 | 161 | If the Library as you received it specifies that a proxy can decide 162 | whether future versions of the GNU Lesser General Public License shall 163 | apply, that proxy's public statement of acceptance of any version is 164 | permanent authorization for you to choose that version for the 165 | Library. 166 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE 2 | 3 | include versioneer.py 4 | include pydpc/_version.py 5 | 6 | # include extension code 7 | recursive-include ext *.pyx *.c *.h 8 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # pydpc - a Python package for Density Peak-based Clustering 2 | 3 | [![CircleCI](https://circleci.com/gh/cwehmeyer/pydpc.svg?style=svg)](https://circleci.com/gh/cwehmeyer/pydpc) 4 | [![PyPI version](https://badge.fury.io/py/pydpc.svg)](https://pypi.python.org/pypi/pydpc) 5 | [![PyPI downloads](https://img.shields.io/pypi/dm/pydpc.svg)](https://pypi.python.org/pypi/pydpc) 6 | 7 | *Clustering by fast search and find of density peaks* was designed by Alex Rodriguez and Alessandro Laio; see their [project page](http://people.sissa.it/~laio/Research/Res_clustering.php) for more information. 8 | 9 | The pydpc package aims to make this algorithm available for Python users. 10 | 11 | ### Installation 12 | 13 | Install pydpc via pip from the Python package index 14 | 15 | ```bash 16 | pip install pydpc 17 | ``` 18 | 19 | or the latest version from github 20 | 21 | ```bash 22 | pip install git+https://github.com/cwehmeyer/pydpc.git@master 23 | ``` 24 | -------------------------------------------------------------------------------- /ext/_core.c: -------------------------------------------------------------------------------- 1 | /* 2 | * This file is part of pydpc. 3 | * 4 | * Copyright 2016 Christoph Wehmeyer 5 | * 6 | * pydpc is free software: you can redistribute it and/or modify 7 | * it under the terms of the GNU Lesser General Public License as published by 8 | * the Free Software Foundation, either version 3 of the License, or 9 | * (at your option) any later version. 10 | * 11 | * This program is distributed in the hope that it will be useful, 12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | * GNU General Public License for more details. 15 | * 16 | * You should have received a copy of the GNU Lesser General Public License 17 | * along with this program. If not, see . 18 | */ 19 | 20 | #include 21 | #include 22 | 23 | /*************************************************************************************************** 24 | * C99 compatibility for macros INFINITY and NAN 25 | ***************************************************************************************************/ 26 | 27 | #ifdef _MSC_VER 28 | /* handle Microsofts C99 incompatibility */ 29 | #include 30 | #define INFINITY (DBL_MAX+DBL_MAX) 31 | #define NAN (INFINITY-INFINITY) 32 | #else 33 | /* if not available otherwise, define INFINITY/NAN in the GNU style */ 34 | #ifndef INFINITY 35 | #define INFINITY (1.0/0.0) 36 | #endif 37 | #ifndef NAN 38 | #define NAN (INFINITY-INFINITY) 39 | #endif 40 | #endif 41 | 42 | /*************************************************************************************************** 43 | * static convenience functions (without cython wrappers) 44 | ***************************************************************************************************/ 45 | 46 | static double sqr(double x) 47 | { 48 | return (x == 0.0) ? 0.0 : x * x; 49 | } 50 | 51 | static double distance(double *points, int n, int m, int ndim) 52 | { 53 | int i, o = n * ndim, p = m * ndim; 54 | double sum = 0.0; 55 | for(i=0; i 25) /* use quicksort */ 66 | { 67 | l = L - 1; 68 | r = R; 69 | for(;;) 70 | { 71 | while(array[++l] < array[R]); 72 | while((array[--r] > array[R]) && (r > l)); 73 | if(l >= r) break; 74 | swap = array[l]; 75 | array[l] = array[r]; 76 | array[r] = swap; 77 | } 78 | swap = array[l]; 79 | array[l] = array[R]; 80 | array[R] = swap; 81 | mixed_sort(array, L, l - 1); 82 | mixed_sort(array, l + 1, R); 83 | } 84 | else /* use insertion sort */ 85 | { 86 | for(l=L+1; l<=R; ++l) 87 | { 88 | swap = array[l]; 89 | for(r=l-1; (r >= L) && (swap < array[r]); --r) 90 | array[r + 1] = array[r]; 91 | array[r + 1] = swap; 92 | } 93 | } 94 | } 95 | 96 | /*************************************************************************************************** 97 | * pydpc core functions (with cython wrappers) 98 | ***************************************************************************************************/ 99 | 100 | extern void _get_distances(double *points, int npoints, int ndim, double *distances) 101 | { 102 | int i, j, o; 103 | for(i=0; i. 18 | */ 19 | 20 | #ifndef PYDPC_HEADER 21 | #define PYDPC_HEADER 22 | 23 | extern void _get_distances(double *points, int npoints, int ndim, double *distances); 24 | extern double _get_kernel_size(double *distances, int npoints, double fraction); 25 | extern void _get_density(double kernel_size, double *distances, int npoints, double *density); 26 | extern void _get_delta_and_neighbour( 27 | double max_distance, double *distances, int *order, int npoints, double *delta, int *neighbour); 28 | extern void _get_membership( 29 | int *clusters, int nclusters, int *order, int *neighbour, int npoints, int *membership); 30 | extern void _get_border( 31 | double kernel_size, double *distances, double *density, int *membership, int npoints, 32 | int *border_member, double *border_density); 33 | extern void _get_halo( 34 | int border_only, double *border_density, 35 | double *density, int *membership, int *border_member, int npoints, int *halo); 36 | 37 | #endif 38 | -------------------------------------------------------------------------------- /ext/core.pyx: -------------------------------------------------------------------------------- 1 | # This file is part of pydpc. 2 | # 3 | # Copyright 2016 Christoph Wehmeyer 4 | # 5 | # pydpc is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU Lesser General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU Lesser General Public License 16 | # along with this program. If not, see . 17 | 18 | import numpy as _np 19 | cimport numpy as _np 20 | 21 | cdef extern from "_core.h": 22 | void _get_distances(double *points, int npoints, int ndim, double *distances) 23 | double _get_kernel_size(double *distances, int npoints, double fraction) 24 | void _get_density(double kernel_size, double *distances, int npoints, double *density) 25 | void _get_delta_and_neighbour( 26 | double max_distance, double *distances, int *order, 27 | int npoints, double *delta, int *neighbour) 28 | void _get_membership( 29 | int *clusters, int nclusters, int *order, int *neighbour, int npoints, int *membership) 30 | void _get_border( 31 | double kernel_size, double *distances, double *density, int *membership, int npoints, 32 | int *border_member, double *border_density) 33 | void _get_halo( 34 | int border_only, double *border_density, 35 | double *density, int *membership, int *border_member, int npoints, int *halo) 36 | 37 | def get_distances(_np.ndarray[double, ndim=2, mode="c"] points not None): 38 | npoints = points.shape[0] 39 | ndim = points.shape[1] 40 | distances = _np.zeros(shape=(npoints, npoints), dtype=_np.float64) 41 | _get_distances( 42 | _np.PyArray_DATA(points), 43 | npoints, ndim, 44 | _np.PyArray_DATA(distances)) 45 | return distances 46 | 47 | def get_kernel_size(_np.ndarray[double, ndim=2, mode="c"] distances not None, fraction): 48 | return _get_kernel_size( _np.PyArray_DATA(distances), distances.shape[0], fraction) 49 | 50 | def get_density(_np.ndarray[double, ndim=2, mode="c"] distances not None, kernel_size): 51 | npoints = distances.shape[0] 52 | density = _np.zeros(shape=(npoints,), dtype=_np.float64) 53 | _get_density( 54 | kernel_size, 55 | _np.PyArray_DATA(distances), 56 | npoints, 57 | _np.PyArray_DATA(density)) 58 | return density 59 | 60 | def get_delta_and_neighbour( 61 | _np.ndarray[int, ndim=1, mode="c"] order not None, 62 | _np.ndarray[double, ndim=2, mode="c"] distances not None, 63 | max_distance): 64 | npoints = distances.shape[0] 65 | delta = _np.zeros(shape=(npoints,), dtype=_np.float64) 66 | neighbour = _np.zeros(shape=(npoints,), dtype=_np.intc) 67 | _get_delta_and_neighbour( 68 | max_distance, 69 | _np.PyArray_DATA(distances), 70 | _np.PyArray_DATA(order), 71 | npoints, 72 | _np.PyArray_DATA(delta), 73 | _np.PyArray_DATA(neighbour)) 74 | return delta, neighbour 75 | 76 | def get_membership( 77 | _np.ndarray[int, ndim=1, mode="c"] clusters not None, 78 | _np.ndarray[int, ndim=1, mode="c"] order not None, 79 | _np.ndarray[int, ndim=1, mode="c"] neighbour not None): 80 | npoints = order.shape[0] 81 | membership = _np.zeros(shape=(npoints,), dtype=_np.intc) 82 | _get_membership( 83 | _np.PyArray_DATA(clusters), 84 | clusters.shape[0], 85 | _np.PyArray_DATA(order), 86 | _np.PyArray_DATA(neighbour), 87 | npoints, 88 | _np.PyArray_DATA(membership)) 89 | return membership 90 | 91 | def get_border( 92 | kernel_size, 93 | _np.ndarray[double, ndim=2, mode="c"] distances not None, 94 | _np.ndarray[double, ndim=1, mode="c"] density not None, 95 | _np.ndarray[int, ndim=1, mode="c"] membership not None, 96 | nclusters): 97 | npoints = distances.shape[0] 98 | border_density = _np.zeros(shape=(nclusters,), dtype=_np.float64) 99 | border_member = _np.zeros(shape=(npoints,), dtype=_np.intc) 100 | _get_border( 101 | kernel_size, 102 | _np.PyArray_DATA(distances), 103 | _np.PyArray_DATA(density), 104 | _np.PyArray_DATA(membership), 105 | npoints, 106 | _np.PyArray_DATA(border_member), 107 | _np.PyArray_DATA(border_density)) 108 | return border_density, border_member.astype(_np.bool) 109 | 110 | def get_halo( 111 | _np.ndarray[double, ndim=1, mode="c"] density not None, 112 | _np.ndarray[int, ndim=1, mode="c"] membership not None, 113 | _np.ndarray[double, ndim=1, mode="c"] border_density not None, 114 | _np.ndarray[int, ndim=1, mode="c"] border_member not None, 115 | border_only=False): 116 | halo = membership.copy() 117 | flag = 0 118 | if border_only: 119 | flag = 1 120 | _get_halo( 121 | flag, 122 | _np.PyArray_DATA(border_density), 123 | _np.PyArray_DATA(density), 124 | _np.PyArray_DATA(membership), 125 | _np.PyArray_DATA(border_member), 126 | density.shape[0], 127 | _np.PyArray_DATA(halo)) 128 | halo_idx = _np.where(halo == -1)[0].astype(_np.intc) 129 | core_idx = _np.where(halo != -1)[0].astype(_np.intc) 130 | return halo_idx, core_idx 131 | -------------------------------------------------------------------------------- /ipython/Example01.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Example and timings\n", 8 | "\n", 9 | "This notebook gives a short introduction in how to use pydpc for a simple clustering problem." 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": null, 15 | "metadata": { 16 | "collapsed": false 17 | }, 18 | "outputs": [], 19 | "source": [ 20 | "%matplotlib inline\n", 21 | "import matplotlib as mpl\n", 22 | "import matplotlib.pyplot as plt\n", 23 | "import numpy as np\n", 24 | "from pydpc import Cluster\n", 25 | "from pydpc._reference import Cluster as RefCluster" 26 | ] 27 | }, 28 | { 29 | "cell_type": "markdown", 30 | "metadata": {}, 31 | "source": [ 32 | "We start with preparing the data points for clustering. The data is two-dimensional and craeted by drawing random numbers from four superpositioned gaussian distributions which are centered at the corners of a square (indicated by the red dashed lines)." 33 | ] 34 | }, 35 | { 36 | "cell_type": "code", 37 | "execution_count": null, 38 | "metadata": { 39 | "collapsed": false 40 | }, 41 | "outputs": [], 42 | "source": [ 43 | "# generate the data points\n", 44 | "npoints = 2000\n", 45 | "mux = 1.6\n", 46 | "muy = 1.6\n", 47 | "points = np.zeros(shape=(npoints, 2), dtype=np.float64)\n", 48 | "points[:, 0] = np.random.randn(npoints) + mux * (-1)**np.random.randint(0, high=2, size=npoints)\n", 49 | "points[:, 1] = np.random.randn(npoints) + muy * (-1)**np.random.randint(0, high=2, size=npoints)\n", 50 | "# draw the data points\n", 51 | "fig, ax = plt.subplots(figsize=(5, 5))\n", 52 | "ax.scatter(points[:, 0], points[:, 1], s=40)\n", 53 | "ax.plot([-mux, -mux], [-1.5 * muy, 1.5 * muy], '--', linewidth=2, color=\"red\")\n", 54 | "ax.plot([mux, mux], [-1.5 * muy, 1.5 * muy], '--', linewidth=2, color=\"red\")\n", 55 | "ax.plot([-1.5 * mux, 1.5 * mux], [-muy, -muy], '--', linewidth=2, color=\"red\")\n", 56 | "ax.plot([-1.5 * mux, 1.5 * mux], [muy, muy], '--', linewidth=2, color=\"red\")\n", 57 | "ax.set_xlabel(r\"x / a.u.\", fontsize=20)\n", 58 | "ax.set_ylabel(r\"y / a.u.\", fontsize=20)\n", 59 | "ax.tick_params(labelsize=15)\n", 60 | "ax.set_xlim([-7, 7])\n", 61 | "ax.set_ylim([-7, 7])\n", 62 | "ax.set_aspect('equal')\n", 63 | "fig.tight_layout()" 64 | ] 65 | }, 66 | { 67 | "cell_type": "markdown", 68 | "metadata": {}, 69 | "source": [ 70 | "Now comes the interesting part.\n", 71 | "\n", 72 | "We pass the numpy ndarray with the data points to the ``Cluster`` class which prepares the data set for clustering. In this stage, it computes the Euclidean distances between all data points and from that the two properties to identify clusters within the data: each data points' ``density`` and minimal distance ``delta`` to a point of higher density.\n", 73 | "\n", 74 | "Once these properties are computed, a decision graph is drawn, where each outlier in the upper right corner represents a different cluster. In our example, we should find four outliers. So far, however, no clustering has yet been done." 75 | ] 76 | }, 77 | { 78 | "cell_type": "code", 79 | "execution_count": null, 80 | "metadata": { 81 | "collapsed": false 82 | }, 83 | "outputs": [], 84 | "source": [ 85 | "clu = Cluster(points)" 86 | ] 87 | }, 88 | { 89 | "cell_type": "markdown", 90 | "metadata": {}, 91 | "source": [ 92 | "Now that we have the decision graph, we can select the outliers via the ``assign`` method by setting lower bounds for ``delta`` and ``density``. The assign method does the actual clustering; it also shows the decision graph again with the given selection." 93 | ] 94 | }, 95 | { 96 | "cell_type": "code", 97 | "execution_count": null, 98 | "metadata": { 99 | "collapsed": false 100 | }, 101 | "outputs": [], 102 | "source": [ 103 | "clu.assign(20, 1.5)" 104 | ] 105 | }, 106 | { 107 | "cell_type": "markdown", 108 | "metadata": {}, 109 | "source": [ 110 | "Let us have a look at the result.\n", 111 | "\n", 112 | "We again plot the data and red dashed lines indicating the centeres of the gaussian distributions. Indicated in the left panel by red dots are the four outliers from the decision graph; these are our four cluster centers. The center panel shows the points' densities and the right panel shows the membership to the four clusters by different coloring." 113 | ] 114 | }, 115 | { 116 | "cell_type": "code", 117 | "execution_count": null, 118 | "metadata": { 119 | "collapsed": false 120 | }, 121 | "outputs": [], 122 | "source": [ 123 | "fig, ax = plt.subplots(1, 3, figsize=(15, 5))\n", 124 | "ax[0].scatter(points[:, 0], points[:, 1], s=40)\n", 125 | "ax[0].scatter(points[clu.clusters, 0], points[clu.clusters, 1], s=50, c=\"red\")\n", 126 | "ax[1].scatter(points[:, 0], points[:, 1], s=40, c=clu.density)\n", 127 | "ax[2].scatter(points[:, 0], points[:, 1], s=40, c=clu.membership, cmap=mpl.cm.cool)\n", 128 | "for _ax in ax:\n", 129 | " _ax.plot([-mux, -mux], [-1.5 * muy, 1.5 * muy], '--', linewidth=2, color=\"red\")\n", 130 | " _ax.plot([mux, mux], [-1.5 * muy, 1.5 * muy], '--', linewidth=2, color=\"red\")\n", 131 | " _ax.plot([-1.5 * mux, 1.5 * mux], [-muy, -muy], '--', linewidth=2, color=\"red\")\n", 132 | " _ax.plot([-1.5 * mux, 1.5 * mux], [muy, muy], '--', linewidth=2, color=\"red\")\n", 133 | " _ax.set_xlabel(r\"x / a.u.\", fontsize=20)\n", 134 | " _ax.set_ylabel(r\"y / a.u.\", fontsize=20)\n", 135 | " _ax.tick_params(labelsize=15)\n", 136 | " _ax.set_xlim([-7, 7])\n", 137 | " _ax.set_ylim([-7, 7])\n", 138 | " _ax.set_aspect('equal')\n", 139 | "fig.tight_layout()" 140 | ] 141 | }, 142 | { 143 | "cell_type": "markdown", 144 | "metadata": {}, 145 | "source": [ 146 | "The density peak clusterng can further resolve if the membership of a data point to a certain cluster is strong or rather weak and separates the data points further into core and halo regions.\n", 147 | "\n", 148 | "The left panel depicts the border members in grey.\n", 149 | "The separation in the center panel uses the core/halo criterion of the original authors, the right panel shows a less strict criterion which assumes a halo only between different clusters; here, the halo members are depicted in grey." 150 | ] 151 | }, 152 | { 153 | "cell_type": "code", 154 | "execution_count": null, 155 | "metadata": { 156 | "collapsed": false 157 | }, 158 | "outputs": [], 159 | "source": [ 160 | "fig, ax = plt.subplots(1, 3, figsize=(15, 5))\n", 161 | "ax[0].scatter(\n", 162 | " points[:, 0], points[:, 1],\n", 163 | " s=40, c=clu.membership, cmap=mpl.cm.cool)\n", 164 | "ax[0].scatter(points[clu.border_member, 0], points[clu.border_member, 1], s=40, c=\"grey\")\n", 165 | "ax[1].scatter(\n", 166 | " points[clu.core_idx, 0], points[clu.core_idx, 1],\n", 167 | " s=40, c=clu.membership[clu.core_idx], cmap=mpl.cm.cool)\n", 168 | "ax[1].scatter(points[clu.halo_idx, 0], points[clu.halo_idx, 1], s=40, c=\"grey\")\n", 169 | "clu.autoplot=False\n", 170 | "clu.assign(20, 1.5, border_only=True)\n", 171 | "ax[2].scatter(\n", 172 | " points[clu.core_idx, 0], points[clu.core_idx, 1],\n", 173 | " s=40, c=clu.membership[clu.core_idx], cmap=mpl.cm.cool)\n", 174 | "ax[2].scatter(points[clu.halo_idx, 0], points[clu.halo_idx, 1], s=40, c=\"grey\")\n", 175 | "ax[2].tick_params(labelsize=15)\n", 176 | "for _ax in ax:\n", 177 | " _ax.plot([-mux, -mux], [-1.5 * muy, 1.5 * muy], '--', linewidth=2, color=\"red\")\n", 178 | " _ax.plot([mux, mux], [-1.5 * muy, 1.5 * muy], '--', linewidth=2, color=\"red\")\n", 179 | " _ax.plot([-1.5 * mux, 1.5 * mux], [-muy, -muy], '--', linewidth=2, color=\"red\")\n", 180 | " _ax.plot([-1.5 * mux, 1.5 * mux], [muy, muy], '--', linewidth=2, color=\"red\")\n", 181 | " _ax.set_xlabel(r\"x / a.u.\", fontsize=20)\n", 182 | " _ax.set_ylabel(r\"y / a.u.\", fontsize=20)\n", 183 | " _ax.tick_params(labelsize=15)\n", 184 | " _ax.set_xlim([-7, 7])\n", 185 | " _ax.set_ylim([-7, 7])\n", 186 | " _ax.set_aspect('equal')\n", 187 | "fig.tight_layout()" 188 | ] 189 | }, 190 | { 191 | "cell_type": "markdown", 192 | "metadata": {}, 193 | "source": [ 194 | "This concludes the example.\n", 195 | "\n", 196 | "In the remaining part, we address the performance of the pydpc implementation (numpy + cython-wrapped C code) with respect to an older development version (numpy). In particular, we look at the numerically most demanding part of computing the Euclidean distances between the data points and estimating density and delta." 197 | ] 198 | }, 199 | { 200 | "cell_type": "code", 201 | "execution_count": null, 202 | "metadata": { 203 | "collapsed": false 204 | }, 205 | "outputs": [], 206 | "source": [ 207 | "npoints = 1000\n", 208 | "points = np.zeros(shape=(npoints, 2), dtype=np.float64)\n", 209 | "points[:, 0] = np.random.randn(npoints) + 1.8 * (-1)**np.random.randint(0, high=2, size=npoints)\n", 210 | "points[:, 1] = np.random.randn(npoints) + 1.8 * (-1)**np.random.randint(0, high=2, size=npoints)\n", 211 | "\n", 212 | "%timeit Cluster(points, fraction=0.02, autoplot=False)\n", 213 | "%timeit RefCluster(fraction=0.02, autoplot=False).load(points)" 214 | ] 215 | }, 216 | { 217 | "cell_type": "markdown", 218 | "metadata": {}, 219 | "source": [ 220 | "The next two cells measure the full clustering." 221 | ] 222 | }, 223 | { 224 | "cell_type": "code", 225 | "execution_count": null, 226 | "metadata": { 227 | "collapsed": false 228 | }, 229 | "outputs": [], 230 | "source": [ 231 | "%%timeit\n", 232 | "Cluster(points, fraction=0.02, autoplot=False).assign(20, 1.5)" 233 | ] 234 | }, 235 | { 236 | "cell_type": "code", 237 | "execution_count": null, 238 | "metadata": { 239 | "collapsed": false 240 | }, 241 | "outputs": [], 242 | "source": [ 243 | "%%timeit\n", 244 | "tmp = RefCluster(fraction=0.02, autoplot=False)\n", 245 | "tmp.load(points)\n", 246 | "tmp.assign(20, 1.5)" 247 | ] 248 | } 249 | ], 250 | "metadata": { 251 | "kernelspec": { 252 | "display_name": "Python 2", 253 | "language": "python", 254 | "name": "python2" 255 | }, 256 | "language_info": { 257 | "codemirror_mode": { 258 | "name": "ipython", 259 | "version": 2 260 | }, 261 | "file_extension": ".py", 262 | "mimetype": "text/x-python", 263 | "name": "python", 264 | "nbconvert_exporter": "python", 265 | "pygments_lexer": "ipython2", 266 | "version": "2.7.11" 267 | } 268 | }, 269 | "nbformat": 4, 270 | "nbformat_minor": 0 271 | } 272 | -------------------------------------------------------------------------------- /pydpc/__init__.py: -------------------------------------------------------------------------------- 1 | # This file is part of pydpc. 2 | # 3 | # Copyright 2016 Christoph Wehmeyer 4 | # 5 | # pydpc is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU Lesser General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU Lesser General Public License 16 | # along with this program. If not, see . 17 | 18 | from .dpc import Cluster 19 | 20 | from ._version import get_versions 21 | __version__ = get_versions()['version'] 22 | del get_versions 23 | 24 | __author__ = "Christoph Wehmeyer" 25 | __copyright__ = "Copyright 2016 Christoph Wehmeyer" 26 | __license__ = "LGPLv3+" 27 | __email__ = "christoph.wehmeyer@fu-berlin.de" 28 | -------------------------------------------------------------------------------- /pydpc/_reference.py: -------------------------------------------------------------------------------- 1 | # This file is part of pydpc. 2 | # 3 | # Copyright 2016 Christoph Wehmeyer 4 | # 5 | # pydpc is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU Lesser General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU Lesser General Public License 16 | # along with this program. If not, see . 17 | 18 | r"""This module provides a previously tested development version. It is rather 19 | slow and only used for consistency checks""" 20 | 21 | import numpy as _np 22 | import matplotlib.pyplot as _plt 23 | 24 | class Cluster(object): 25 | def __init__(self, fraction=0.02, autoplot=True): 26 | self.fraction = fraction 27 | self.autoplot = autoplot 28 | def load(self, points): 29 | self.points = points 30 | self.npoints = self.points.shape[0] 31 | self._get_distances() 32 | self._get_kernel_size() 33 | self._get_density() 34 | self._get_delta_and_neighbour() 35 | if self.autoplot: 36 | self.draw_decision_graph() 37 | def draw_decision_graph(self, min_density=None, min_delta=None): 38 | fig, ax = _plt.subplots(figsize=(8, 4.5)) 39 | ax.scatter(self.density, self.delta, s=40) 40 | if min_density is not None and min_delta is not None: 41 | ax.plot( 42 | [min_density, self.density.max()], [min_delta, min_delta], linewidth=2, color="red") 43 | ax.plot( 44 | [min_density, min_density], [min_delta, self.delta.max()], linewidth=2, color="red") 45 | ax.set_xlabel(r"density", fontsize=20) 46 | ax.set_ylabel(r"delta / a.u.", fontsize=20) 47 | ax.tick_params(labelsize=15) 48 | def assign(self, min_density, min_delta, border_only=False): 49 | self.min_density = min_density 50 | self.min_delta = min_delta 51 | self.border_only = border_only 52 | if self.autoplot: 53 | self.draw_decision_graph(self.min_density, self.min_delta) 54 | self._get_cluster_indices() 55 | self._get_membership() 56 | self._get_halo() 57 | def _get_distances(self): 58 | self.distances = _np.zeros(shape=(self.npoints, self.npoints), dtype=_np.float64) 59 | for i in range(self.npoints - 1): 60 | for j in range(i + 1, self.npoints): 61 | self.distances[i, j] = _np.linalg.norm(self.points[i, :] - self.points[j, :]) 62 | self.distances[j, i] = self.distances[i, j] 63 | def _get_kernel_size(self): 64 | arr = self.distances[0, 1:] 65 | for i in range(1, self.npoints - 1): 66 | arr = _np.hstack((arr, self.distances[i, i + 1:])) 67 | arr = _np.sort(arr) 68 | imax = int(_np.floor(0.5 + self.fraction * arr.shape[0])) 69 | self.kernel_size = arr[imax] 70 | def _get_density(self): 71 | self.density = _np.zeros(shape=(self.npoints,), dtype=_np.float64) 72 | for i in range(self.npoints - 1): 73 | for j in range(i + 1, self.npoints): 74 | rho = _np.exp(-(self.distances[i, j] / self.kernel_size)**2) 75 | self.density[i] += rho 76 | self.density[j] += rho 77 | def _get_delta_and_neighbour(self): 78 | self.order = _np.argsort(self.density)[::-1] 79 | max_distance = self.distances.max() 80 | self.delta = _np.zeros(shape=self.order.shape, dtype=_np.float64) 81 | self.delta[self.order[0]] = -1.0 82 | self.delta[self.order[1:]] = max_distance 83 | self.neighbour = _np.empty_like(self.order) 84 | self.neighbour[:] = -1 85 | for i in range(1, self.npoints): 86 | for j in range(i): 87 | if self.distances[self.order[i], self.order[j]] < self.delta[self.order[i]]: 88 | self.delta[self.order[i]] = self.distances[self.order[i], self.order[j]] 89 | self.neighbour[self.order[i]] = self.order[j] 90 | self.delta[self.order[0]] = self.delta.max() 91 | def _get_cluster_indices(self): 92 | self.clusters = _np.intersect1d( 93 | _np.where(self.density > self.min_density)[0], 94 | _np.where(self.delta > self.min_delta)[0], assume_unique=True) 95 | self.ncl = self.clusters.shape[0] 96 | def _get_membership(self): 97 | self.membership = -1 * _np.ones(shape=self.order.shape, dtype=_np.intc) 98 | for i in range(self.ncl): 99 | self.membership[self.clusters[i]] = i 100 | for i in range(self.npoints): 101 | if self.membership[self.order[i]] == -1: 102 | self.membership[self.order[i]] = self.membership[self.neighbour[self.order[i]]] 103 | def _get_halo(self): 104 | self.halo = self.membership.copy() 105 | self.border_density = _np.zeros(shape=(self.ncl,), dtype=_np.float64) 106 | self.border_member = _np.zeros(shape=self.membership.shape, dtype=_np.bool) 107 | for i in range(self.npoints - 1): 108 | for j in range(i + 1, self.npoints): 109 | if (self.membership[i] != self.membership[j]) and (self.distances[i, j] < self.kernel_size): 110 | average_density = 0.5 * (self.density[i] + self.density[j]) 111 | if self.border_density[self.membership[i]] < average_density: 112 | self.border_density[self.membership[i]] = average_density 113 | if self.border_density[self.membership[j]] < average_density: 114 | self.border_density[self.membership[j]] = average_density 115 | self.border_member[i] = True 116 | self.border_member[j] = True 117 | if self.border_only: 118 | for i in range(self.npoints): 119 | if (self.density[i] < self.border_density[self.membership[i]]) and self.border_member[i]: 120 | self.halo[i] = -1 121 | else: 122 | for i in range(self.npoints): 123 | if (self.density[i] < self.border_density[self.membership[i]]): 124 | self.halo[i] = -1 125 | self.halo_idx = _np.where(self.halo == -1)[0] 126 | self.core_idx = _np.where(self.halo != -1)[0] 127 | -------------------------------------------------------------------------------- /pydpc/_version.py: -------------------------------------------------------------------------------- 1 | 2 | # This file helps to compute a version number in source trees obtained from 3 | # git-archive tarball (such as those provided by githubs download-from-tag 4 | # feature). Distribution tarballs (built by setup.py sdist) and build 5 | # directories (produced by setup.py build) will contain a much shorter file 6 | # that just contains the computed version number. 7 | 8 | # This file is released into the public domain. Generated by 9 | # versioneer-0.15 (https://github.com/warner/python-versioneer) 10 | 11 | import errno 12 | import os 13 | import re 14 | import subprocess 15 | import sys 16 | 17 | 18 | def get_keywords(): 19 | # these strings will be replaced by git during git-archive. 20 | # setup.py/versioneer.py will grep for the variable names, so they must 21 | # each be defined on a line of their own. _version.py will just call 22 | # get_keywords(). 23 | git_refnames = " (HEAD -> master)" 24 | git_full = "ae72ff092b8e2fe3f76e3285e35c0d175fb62847" 25 | keywords = {"refnames": git_refnames, "full": git_full} 26 | return keywords 27 | 28 | 29 | class VersioneerConfig: 30 | pass 31 | 32 | 33 | def get_config(): 34 | # these strings are filled in when 'setup.py versioneer' creates 35 | # _version.py 36 | cfg = VersioneerConfig() 37 | cfg.VCS = "git" 38 | cfg.style = "pep440" 39 | cfg.tag_prefix = "" 40 | cfg.parentdir_prefix = "pydpc-" 41 | cfg.versionfile_source = "pydpc/_version.py" 42 | cfg.verbose = False 43 | return cfg 44 | 45 | 46 | class NotThisMethod(Exception): 47 | pass 48 | 49 | 50 | LONG_VERSION_PY = {} 51 | HANDLERS = {} 52 | 53 | 54 | def register_vcs_handler(vcs, method): # decorator 55 | def decorate(f): 56 | if vcs not in HANDLERS: 57 | HANDLERS[vcs] = {} 58 | HANDLERS[vcs][method] = f 59 | return f 60 | return decorate 61 | 62 | 63 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): 64 | assert isinstance(commands, list) 65 | p = None 66 | for c in commands: 67 | try: 68 | dispcmd = str([c] + args) 69 | # remember shell=False, so use git.cmd on windows, not just git 70 | p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE, 71 | stderr=(subprocess.PIPE if hide_stderr 72 | else None)) 73 | break 74 | except EnvironmentError: 75 | e = sys.exc_info()[1] 76 | if e.errno == errno.ENOENT: 77 | continue 78 | if verbose: 79 | print("unable to run %s" % dispcmd) 80 | print(e) 81 | return None 82 | else: 83 | if verbose: 84 | print("unable to find command, tried %s" % (commands,)) 85 | return None 86 | stdout = p.communicate()[0].strip() 87 | if sys.version_info[0] >= 3: 88 | stdout = stdout.decode() 89 | if p.returncode != 0: 90 | if verbose: 91 | print("unable to run %s (error)" % dispcmd) 92 | return None 93 | return stdout 94 | 95 | 96 | def versions_from_parentdir(parentdir_prefix, root, verbose): 97 | # Source tarballs conventionally unpack into a directory that includes 98 | # both the project name and a version string. 99 | dirname = os.path.basename(root) 100 | if not dirname.startswith(parentdir_prefix): 101 | if verbose: 102 | print("guessing rootdir is '%s', but '%s' doesn't start with " 103 | "prefix '%s'" % (root, dirname, parentdir_prefix)) 104 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 105 | return {"version": dirname[len(parentdir_prefix):], 106 | "full-revisionid": None, 107 | "dirty": False, "error": None} 108 | 109 | 110 | @register_vcs_handler("git", "get_keywords") 111 | def git_get_keywords(versionfile_abs): 112 | # the code embedded in _version.py can just fetch the value of these 113 | # keywords. When used from setup.py, we don't want to import _version.py, 114 | # so we do it with a regexp instead. This function is not used from 115 | # _version.py. 116 | keywords = {} 117 | try: 118 | f = open(versionfile_abs, "r") 119 | for line in f.readlines(): 120 | if line.strip().startswith("git_refnames ="): 121 | mo = re.search(r'=\s*"(.*)"', line) 122 | if mo: 123 | keywords["refnames"] = mo.group(1) 124 | if line.strip().startswith("git_full ="): 125 | mo = re.search(r'=\s*"(.*)"', line) 126 | if mo: 127 | keywords["full"] = mo.group(1) 128 | f.close() 129 | except EnvironmentError: 130 | pass 131 | return keywords 132 | 133 | 134 | @register_vcs_handler("git", "keywords") 135 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 136 | if not keywords: 137 | raise NotThisMethod("no keywords at all, weird") 138 | refnames = keywords["refnames"].strip() 139 | if refnames.startswith("$Format"): 140 | if verbose: 141 | print("keywords are unexpanded, not using") 142 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 143 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) 144 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 145 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 146 | TAG = "tag: " 147 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) 148 | if not tags: 149 | # Either we're using git < 1.8.3, or there really are no tags. We use 150 | # a heuristic: assume all version tags have a digit. The old git %d 151 | # expansion behaves like git log --decorate=short and strips out the 152 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 153 | # between branches and tags. By ignoring refnames without digits, we 154 | # filter out many common branch names like "release" and 155 | # "stabilization", as well as "HEAD" and "master". 156 | tags = set([r for r in refs if re.search(r'\d', r)]) 157 | if verbose: 158 | print("discarding '%s', no digits" % ",".join(refs-tags)) 159 | if verbose: 160 | print("likely tags: %s" % ",".join(sorted(tags))) 161 | for ref in sorted(tags): 162 | # sorting will prefer e.g. "2.0" over "2.0rc1" 163 | if ref.startswith(tag_prefix): 164 | r = ref[len(tag_prefix):] 165 | if verbose: 166 | print("picking %s" % r) 167 | return {"version": r, 168 | "full-revisionid": keywords["full"].strip(), 169 | "dirty": False, "error": None 170 | } 171 | # no suitable tags, so version is "0+unknown", but full hex is still there 172 | if verbose: 173 | print("no suitable tags, using unknown + full revision id") 174 | return {"version": "0+unknown", 175 | "full-revisionid": keywords["full"].strip(), 176 | "dirty": False, "error": "no suitable tags"} 177 | 178 | 179 | @register_vcs_handler("git", "pieces_from_vcs") 180 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 181 | # this runs 'git' from the root of the source tree. This only gets called 182 | # if the git-archive 'subst' keywords were *not* expanded, and 183 | # _version.py hasn't already been rewritten with a short version string, 184 | # meaning we're inside a checked out source tree. 185 | 186 | if not os.path.exists(os.path.join(root, ".git")): 187 | if verbose: 188 | print("no .git in %s" % root) 189 | raise NotThisMethod("no .git directory") 190 | 191 | GITS = ["git"] 192 | if sys.platform == "win32": 193 | GITS = ["git.cmd", "git.exe"] 194 | # if there is a tag, this yields TAG-NUM-gHEX[-dirty] 195 | # if there are no tags, this yields HEX[-dirty] (no NUM) 196 | describe_out = run_command(GITS, ["describe", "--tags", "--dirty", 197 | "--always", "--long"], 198 | cwd=root) 199 | # --long was added in git-1.5.5 200 | if describe_out is None: 201 | raise NotThisMethod("'git describe' failed") 202 | describe_out = describe_out.strip() 203 | full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 204 | if full_out is None: 205 | raise NotThisMethod("'git rev-parse' failed") 206 | full_out = full_out.strip() 207 | 208 | pieces = {} 209 | pieces["long"] = full_out 210 | pieces["short"] = full_out[:7] # maybe improved later 211 | pieces["error"] = None 212 | 213 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 214 | # TAG might have hyphens. 215 | git_describe = describe_out 216 | 217 | # look for -dirty suffix 218 | dirty = git_describe.endswith("-dirty") 219 | pieces["dirty"] = dirty 220 | if dirty: 221 | git_describe = git_describe[:git_describe.rindex("-dirty")] 222 | 223 | # now we have TAG-NUM-gHEX or HEX 224 | 225 | if "-" in git_describe: 226 | # TAG-NUM-gHEX 227 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 228 | if not mo: 229 | # unparseable. Maybe git-describe is misbehaving? 230 | pieces["error"] = ("unable to parse git-describe output: '%s'" 231 | % describe_out) 232 | return pieces 233 | 234 | # tag 235 | full_tag = mo.group(1) 236 | if not full_tag.startswith(tag_prefix): 237 | if verbose: 238 | fmt = "tag '%s' doesn't start with prefix '%s'" 239 | print(fmt % (full_tag, tag_prefix)) 240 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" 241 | % (full_tag, tag_prefix)) 242 | return pieces 243 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 244 | 245 | # distance: number of commits since tag 246 | pieces["distance"] = int(mo.group(2)) 247 | 248 | # commit: short hex revision ID 249 | pieces["short"] = mo.group(3) 250 | 251 | else: 252 | # HEX: no tags 253 | pieces["closest-tag"] = None 254 | count_out = run_command(GITS, ["rev-list", "HEAD", "--count"], 255 | cwd=root) 256 | pieces["distance"] = int(count_out) # total number of commits 257 | 258 | return pieces 259 | 260 | 261 | def plus_or_dot(pieces): 262 | if "+" in pieces.get("closest-tag", ""): 263 | return "." 264 | return "+" 265 | 266 | 267 | def render_pep440(pieces): 268 | # now build up version string, with post-release "local version 269 | # identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 270 | # get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 271 | 272 | # exceptions: 273 | # 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 274 | 275 | if pieces["closest-tag"]: 276 | rendered = pieces["closest-tag"] 277 | if pieces["distance"] or pieces["dirty"]: 278 | rendered += plus_or_dot(pieces) 279 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 280 | if pieces["dirty"]: 281 | rendered += ".dirty" 282 | else: 283 | # exception #1 284 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], 285 | pieces["short"]) 286 | if pieces["dirty"]: 287 | rendered += ".dirty" 288 | return rendered 289 | 290 | 291 | def render_pep440_pre(pieces): 292 | # TAG[.post.devDISTANCE] . No -dirty 293 | 294 | # exceptions: 295 | # 1: no tags. 0.post.devDISTANCE 296 | 297 | if pieces["closest-tag"]: 298 | rendered = pieces["closest-tag"] 299 | if pieces["distance"]: 300 | rendered += ".post.dev%d" % pieces["distance"] 301 | else: 302 | # exception #1 303 | rendered = "0.post.dev%d" % pieces["distance"] 304 | return rendered 305 | 306 | 307 | def render_pep440_post(pieces): 308 | # TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that 309 | # .dev0 sorts backwards (a dirty tree will appear "older" than the 310 | # corresponding clean one), but you shouldn't be releasing software with 311 | # -dirty anyways. 312 | 313 | # exceptions: 314 | # 1: no tags. 0.postDISTANCE[.dev0] 315 | 316 | if pieces["closest-tag"]: 317 | rendered = pieces["closest-tag"] 318 | if pieces["distance"] or pieces["dirty"]: 319 | rendered += ".post%d" % pieces["distance"] 320 | if pieces["dirty"]: 321 | rendered += ".dev0" 322 | rendered += plus_or_dot(pieces) 323 | rendered += "g%s" % pieces["short"] 324 | else: 325 | # exception #1 326 | rendered = "0.post%d" % pieces["distance"] 327 | if pieces["dirty"]: 328 | rendered += ".dev0" 329 | rendered += "+g%s" % pieces["short"] 330 | return rendered 331 | 332 | 333 | def render_pep440_old(pieces): 334 | # TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. 335 | 336 | # exceptions: 337 | # 1: no tags. 0.postDISTANCE[.dev0] 338 | 339 | if pieces["closest-tag"]: 340 | rendered = pieces["closest-tag"] 341 | if pieces["distance"] or pieces["dirty"]: 342 | rendered += ".post%d" % pieces["distance"] 343 | if pieces["dirty"]: 344 | rendered += ".dev0" 345 | else: 346 | # exception #1 347 | rendered = "0.post%d" % pieces["distance"] 348 | if pieces["dirty"]: 349 | rendered += ".dev0" 350 | return rendered 351 | 352 | 353 | def render_git_describe(pieces): 354 | # TAG[-DISTANCE-gHEX][-dirty], like 'git describe --tags --dirty 355 | # --always' 356 | 357 | # exceptions: 358 | # 1: no tags. HEX[-dirty] (note: no 'g' prefix) 359 | 360 | if pieces["closest-tag"]: 361 | rendered = pieces["closest-tag"] 362 | if pieces["distance"]: 363 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 364 | else: 365 | # exception #1 366 | rendered = pieces["short"] 367 | if pieces["dirty"]: 368 | rendered += "-dirty" 369 | return rendered 370 | 371 | 372 | def render_git_describe_long(pieces): 373 | # TAG-DISTANCE-gHEX[-dirty], like 'git describe --tags --dirty 374 | # --always -long'. The distance/hash is unconditional. 375 | 376 | # exceptions: 377 | # 1: no tags. HEX[-dirty] (note: no 'g' prefix) 378 | 379 | if pieces["closest-tag"]: 380 | rendered = pieces["closest-tag"] 381 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 382 | else: 383 | # exception #1 384 | rendered = pieces["short"] 385 | if pieces["dirty"]: 386 | rendered += "-dirty" 387 | return rendered 388 | 389 | 390 | def render(pieces, style): 391 | if pieces["error"]: 392 | return {"version": "unknown", 393 | "full-revisionid": pieces.get("long"), 394 | "dirty": None, 395 | "error": pieces["error"]} 396 | 397 | if not style or style == "default": 398 | style = "pep440" # the default 399 | 400 | if style == "pep440": 401 | rendered = render_pep440(pieces) 402 | elif style == "pep440-pre": 403 | rendered = render_pep440_pre(pieces) 404 | elif style == "pep440-post": 405 | rendered = render_pep440_post(pieces) 406 | elif style == "pep440-old": 407 | rendered = render_pep440_old(pieces) 408 | elif style == "git-describe": 409 | rendered = render_git_describe(pieces) 410 | elif style == "git-describe-long": 411 | rendered = render_git_describe_long(pieces) 412 | else: 413 | raise ValueError("unknown style '%s'" % style) 414 | 415 | return {"version": rendered, "full-revisionid": pieces["long"], 416 | "dirty": pieces["dirty"], "error": None} 417 | 418 | 419 | def get_versions(): 420 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 421 | # __file__, we can work backwards from there to the root. Some 422 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 423 | # case we can only use expanded keywords. 424 | 425 | cfg = get_config() 426 | verbose = cfg.verbose 427 | 428 | try: 429 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, 430 | verbose) 431 | except NotThisMethod: 432 | pass 433 | 434 | try: 435 | root = os.path.realpath(__file__) 436 | # versionfile_source is the relative path from the top of the source 437 | # tree (where the .git directory might live) to this file. Invert 438 | # this to find the root from __file__. 439 | for i in cfg.versionfile_source.split('/'): 440 | root = os.path.dirname(root) 441 | except NameError: 442 | return {"version": "0+unknown", "full-revisionid": None, 443 | "dirty": None, 444 | "error": "unable to find root of source tree"} 445 | 446 | try: 447 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 448 | return render(pieces, cfg.style) 449 | except NotThisMethod: 450 | pass 451 | 452 | try: 453 | if cfg.parentdir_prefix: 454 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 455 | except NotThisMethod: 456 | pass 457 | 458 | return {"version": "0+unknown", "full-revisionid": None, 459 | "dirty": None, 460 | "error": "unable to compute version"} 461 | -------------------------------------------------------------------------------- /pydpc/dpc.py: -------------------------------------------------------------------------------- 1 | # This file is part of pydpc. 2 | # 3 | # Copyright 2016 Christoph Wehmeyer 4 | # 5 | # pydpc is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU Lesser General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU Lesser General Public License 16 | # along with this program. If not, see . 17 | 18 | import numpy as _np 19 | import matplotlib.pyplot as _plt 20 | from . import core as _core 21 | 22 | __all__ = ['Cluster'] 23 | 24 | class Distances(object): 25 | def __init__(self, points, distances = None): 26 | self.points = points 27 | self.npoints = self.points.shape[0] 28 | 29 | if distances is None: 30 | self.distances = _core.get_distances(self.points) 31 | else: 32 | if not distances.shape == (self.npoints, self.npoints): 33 | raise ValueError("Distance matrix must have shape (n_points, n_points)") 34 | self.distances = distances 35 | 36 | self.max_distance = self.distances.max() 37 | 38 | class Density(Distances): 39 | def __init__(self, points, fraction, kernel_size = None, **kwargs): 40 | super(Density, self).__init__(points, **kwargs) 41 | self.fraction = fraction 42 | if kernel_size is None: 43 | self.kernel_size = _core.get_kernel_size(self.distances, self.fraction) 44 | else: 45 | self.kernel_size = kernel_size 46 | if self.kernel_size <= 0: 47 | raise ValueError( 48 | ("kernel_size = %s is invalid; must be strictly positive. " 49 | "This can occur in the degenerate case where the distance matrix is all zeros, check your input.") % self.kernel_size 50 | ) 51 | self.density = _core.get_density(self.distances, self.kernel_size) 52 | 53 | class Graph(Density): 54 | def __init__(self, points, fraction, **kwargs): 55 | super(Graph, self).__init__(points, fraction, **kwargs) 56 | self.order = _np.ascontiguousarray(_np.argsort(self.density).astype(_np.intc)[::-1]) 57 | self.delta, self.neighbour = _core.get_delta_and_neighbour( 58 | self.order, self.distances, self.max_distance) 59 | 60 | class Cluster(Graph): 61 | def __init__(self, points, fraction=0.02, autoplot=True, **kwargs): 62 | super(Cluster, self).__init__(points, fraction, **kwargs) 63 | self.autoplot = autoplot 64 | if self.autoplot: 65 | self.draw_decision_graph() 66 | def draw_decision_graph(self, min_density=None, min_delta=None): 67 | fig, ax = _plt.subplots(figsize=(5, 5)) 68 | ax.scatter(self.density, self.delta, s=40) 69 | if min_density is not None and min_delta is not None: 70 | ax.plot( 71 | [min_density, self.density.max()], [min_delta, min_delta], linewidth=2, color="red") 72 | ax.plot( 73 | [min_density, min_density], [min_delta, self.delta.max()], linewidth=2, color="red") 74 | ax.set_xlabel(r"density", fontsize=20) 75 | ax.set_ylabel(r"delta / a.u.", fontsize=20) 76 | ax.tick_params(labelsize=15) 77 | return fig, ax 78 | def assign(self, min_density, min_delta, border_only=False): 79 | self.min_density = min_density 80 | self.min_delta = min_delta 81 | self.border_only = border_only 82 | if self.autoplot: 83 | self.draw_decision_graph(self.min_density, self.min_delta) 84 | self._get_cluster_indices() 85 | self.membership = _core.get_membership(self.clusters, self.order, self.neighbour) 86 | self.border_density, self.border_member = _core.get_border( 87 | self.kernel_size, self.distances, self.density, self.membership, self.nclusters) 88 | self.halo_idx, self.core_idx = _core.get_halo( 89 | self.density, self.membership, 90 | self.border_density, self.border_member.astype(_np.intc), border_only=border_only) 91 | def _get_cluster_indices(self): 92 | self.clusters = _np.intersect1d( 93 | _np.where(self.density > self.min_density)[0], 94 | _np.where(self.delta > self.min_delta)[0], assume_unique=True).astype(_np.intc) 95 | self.nclusters = self.clusters.shape[0] 96 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | setuptools 2 | numpy 3 | cython 4 | matplotlib 5 | nose -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | 2 | # See the docstring in versioneer.py for instructions. Note that you must 3 | # re-run 'versioneer.py setup' after changing this section, and commit the 4 | # resulting files. 5 | 6 | [versioneer] 7 | VCS = git 8 | style = pep440 9 | versionfile_source = pydpc/_version.py 10 | #versionfile_build = 11 | tag_prefix = 12 | parentdir_prefix = pydpc- 13 | 14 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # This file is part of pydpc. 4 | # 5 | # Copyright 2016 Christoph Wehmeyer 6 | # 7 | # pydpc is free software: you can redistribute it and/or modify 8 | # it under the terms of the GNU Lesser General Public License as published by 9 | # the Free Software Foundation, either version 3 of the License, or 10 | # (at your option) any later version. 11 | # 12 | # This program is distributed in the hope that it will be useful, 13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 | # GNU General Public License for more details. 16 | # 17 | # You should have received a copy of the GNU Lesser General Public License 18 | # along with this program. If not, see . 19 | 20 | from setuptools import setup, Extension 21 | import versioneer 22 | 23 | def extensions(): 24 | from numpy import get_include 25 | from Cython.Build import cythonize 26 | ext_core = Extension( 27 | "pydpc.core", 28 | sources=["ext/core.pyx", "ext/_core.c"], 29 | include_dirs=[get_include()], 30 | extra_compile_args=["-O3", "-std=c99"]) 31 | exts = [ext_core] 32 | return cythonize(exts) 33 | 34 | class lazy_cythonize(list): 35 | """evaluates extension list lazyly. 36 | pattern taken from http://tinyurl.com/qb8478q""" 37 | def __init__(self, callback): 38 | self._list, self.callback = None, callback 39 | def c_list(self): 40 | if self._list is None: self._list = self.callback() 41 | return self._list 42 | def __iter__(self): 43 | for e in self.c_list(): yield e 44 | def __getitem__(self, ii): return self.c_list()[ii] 45 | def __len__(self): return len(self.c_list()) 46 | 47 | def long_description(): 48 | ld = "Clustering by fast search and find of density peaks, designed by Alex Rodriguez" 49 | ld += " and Alessandro Laio, is a density-peak-based clustering algorithm. The pydpc package" 50 | ld += " aims to make this algorithm available for Python users." 51 | return ld 52 | 53 | setup( 54 | cmdclass=versioneer.get_cmdclass(), 55 | ext_modules=lazy_cythonize(extensions), 56 | name='pydpc', 57 | version=versioneer.get_version(), 58 | description='Python package for Density Peak-based Clustering', 59 | long_description=long_description(), 60 | classifiers=[ 61 | 'Development Status :: 3 - Alpha', 62 | 'Environment :: Console', 63 | 'Intended Audience :: Science/Research', 64 | 'License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)', 65 | 'Natural Language :: English', 66 | 'Operating System :: MacOS :: MacOS X', 67 | 'Operating System :: POSIX', 68 | 'Operating System :: Microsoft :: Windows', 69 | 'Programming Language :: C', 70 | 'Programming Language :: Cython', 71 | 'Programming Language :: Python :: 2.7', 72 | 'Programming Language :: Python :: 3', 73 | 'Topic :: Scientific/Engineering :: Bio-Informatics', 74 | 'Topic :: Scientific/Engineering :: Chemistry', 75 | 'Topic :: Scientific/Engineering :: Mathematics', 76 | 'Topic :: Scientific/Engineering :: Physics'], 77 | keywords=[ 78 | 'cluster', 79 | 'density'], 80 | url='https://github.com/cwehmeyer/pydpc', 81 | author='Christoph Wehmeyer', 82 | author_email='christoph.wehmeyer@fu-berlin.de', 83 | license='LGPLv3+', 84 | setup_requires=[ 85 | 'numpy>=1.7', 86 | 'cython>=0.20', 87 | 'setuptools>=0.6'], 88 | tests_require=[ 89 | 'numpy>=1.7', 90 | 'nose>=1.3'], 91 | install_requires=[ 92 | 'numpy>=1.7', 93 | 'matplotlib'], 94 | packages=['pydpc'], 95 | test_suite='nose.collector', 96 | scripts=[] 97 | ) 98 | 99 | -------------------------------------------------------------------------------- /test/test_consistency.py: -------------------------------------------------------------------------------- 1 | # This file is part of pydpc. 2 | # 3 | # Copyright 2016 Christoph Wehmeyer 4 | # 5 | # pydpc is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU Lesser General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU Lesser General Public License 16 | # along with this program. If not, see . 17 | 18 | from pydpc._reference import Cluster as Ref 19 | from pydpc import Cluster 20 | import numpy as np 21 | from numpy.testing import assert_array_equal, assert_almost_equal 22 | 23 | class TestFourGaussians2D(object): 24 | @classmethod 25 | def setup_class(cls): 26 | # data generation 27 | cls.npoints = 1000 28 | cls.mux = 1.8 29 | cls.muy = 1.8 30 | cls.fraction = 0.02 31 | cls.points = np.zeros(shape=(cls.npoints, 2), dtype=np.float64) 32 | cls.points[:, 0] = np.random.randn(cls.npoints) + \ 33 | cls.mux * (-1)**np.random.randint(0, high=2, size=cls.npoints) 34 | cls.points[:, 1] = np.random.randn(cls.npoints) + \ 35 | cls.muy * (-1)**np.random.randint(0, high=2, size=cls.npoints) 36 | # cluster initialisation 37 | cls.ref = Ref(cls.fraction, autoplot=False) 38 | cls.ref.load(cls.points) 39 | cls.ref.assign(20, 1.5) 40 | cls.dpc = Cluster(cls.points, cls.fraction, autoplot=False) 41 | cls.dpc.assign(20, 1.5) 42 | @classmethod 43 | def teardown_class(cls): 44 | pass 45 | def setup(self): 46 | pass 47 | def teardown(self): 48 | pass 49 | def test_distances(self): 50 | assert_almost_equal(self.dpc.distances, self.ref.distances, decimal=10) 51 | def test_distances(self): 52 | assert_almost_equal(self.dpc.kernel_size, self.ref.kernel_size, decimal=10) 53 | def test_density(self): 54 | assert_almost_equal(self.dpc.density, self.ref.density, decimal=10) 55 | def test_order(self): 56 | assert_array_equal(self.dpc.order, self.ref.order) 57 | def test_delta(self): 58 | assert_almost_equal(self.dpc.delta, self.ref.delta, decimal=10) 59 | def test_neighbour(self): 60 | assert_array_equal(self.dpc.neighbour, self.ref.neighbour) 61 | def test_clusters(self): 62 | assert_array_equal(self.dpc.clusters, self.ref.clusters) 63 | def test_membership(self): 64 | assert_array_equal(self.dpc.membership, self.ref.membership) 65 | def test_border_density(self): 66 | assert_almost_equal(self.dpc.border_density, self.ref.border_density, decimal=10) 67 | def test_border_member(self): 68 | assert_array_equal(self.dpc.border_member, self.ref.border_member) 69 | def test_halo_idx(self): 70 | assert_array_equal(self.dpc.halo_idx, self.ref.halo_idx) 71 | def test_core_idx(self): 72 | assert_array_equal(self.dpc.core_idx, self.ref.core_idx) 73 | -------------------------------------------------------------------------------- /test/test_indices.py: -------------------------------------------------------------------------------- 1 | # This file is part of pydpc. 2 | # 3 | # Copyright 2016 Christoph Wehmeyer 4 | # 5 | # pydpc is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU Lesser General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU Lesser General Public License 16 | # along with this program. If not, see . 17 | 18 | from pydpc import Cluster 19 | import numpy as np 20 | from nose.tools import assert_true 21 | from numpy.testing import assert_array_less 22 | 23 | class TestFourGaussians2D(object): 24 | @classmethod 25 | def setup_class(cls): 26 | # data generation 27 | cls.npoints = 1000 28 | cls.mux = 1.8 29 | cls.muy = 1.8 30 | cls.fraction = 0.02 31 | cls.points = np.zeros(shape=(cls.npoints, 2), dtype=np.float64) 32 | cls.points[:, 0] = np.random.randn(cls.npoints) + \ 33 | cls.mux * (-1)**np.random.randint(0, high=2, size=cls.npoints) 34 | cls.points[:, 1] = np.random.randn(cls.npoints) + \ 35 | cls.muy * (-1)**np.random.randint(0, high=2, size=cls.npoints) 36 | # cluster initialisation 37 | cls.dpc = Cluster(cls.points, cls.fraction, autoplot=False) 38 | cls.dpc.assign(20, 1.5) 39 | @classmethod 40 | def teardown_class(cls): 41 | pass 42 | def setup(self): 43 | pass 44 | def teardown(self): 45 | pass 46 | def test_order(self): 47 | assert_array_less(-1, self.dpc.order) 48 | assert_array_less(self.dpc.order, self.npoints) 49 | def test_neighbour(self): 50 | assert_array_less(-2, self.dpc.neighbour) 51 | assert_array_less(self.dpc.neighbour, self.npoints) 52 | assert_true((self.dpc.neighbour == -1).sum() == 1) 53 | def test_clusters(self): 54 | assert_array_less(-2, self.dpc.clusters) 55 | assert_array_less(self.dpc.clusters, self.npoints) 56 | def test_membership(self): 57 | assert_array_less(-1, self.dpc.membership) 58 | assert_array_less(self.dpc.membership, self.dpc.nclusters) 59 | def test_halo_idx(self): 60 | assert_array_less(-1, self.dpc.halo_idx) 61 | assert_array_less(self.dpc.halo_idx, self.npoints) 62 | def test_core_idx(self): 63 | assert_array_less(-1, self.dpc.core_idx) 64 | assert_array_less(self.dpc.core_idx, self.npoints) 65 | -------------------------------------------------------------------------------- /versioneer.py: -------------------------------------------------------------------------------- 1 | 2 | # Version: 0.15 3 | 4 | """ 5 | The Versioneer 6 | ============== 7 | 8 | * like a rocketeer, but for versions! 9 | * https://github.com/warner/python-versioneer 10 | * Brian Warner 11 | * License: Public Domain 12 | * Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, and pypy 13 | * [![Latest Version] 14 | (https://pypip.in/version/versioneer/badge.svg?style=flat) 15 | ](https://pypi.python.org/pypi/versioneer/) 16 | * [![Build Status] 17 | (https://travis-ci.org/warner/python-versioneer.png?branch=master) 18 | ](https://travis-ci.org/warner/python-versioneer) 19 | 20 | This is a tool for managing a recorded version number in distutils-based 21 | python projects. The goal is to remove the tedious and error-prone "update 22 | the embedded version string" step from your release process. Making a new 23 | release should be as easy as recording a new tag in your version-control 24 | system, and maybe making new tarballs. 25 | 26 | 27 | ## Quick Install 28 | 29 | * `pip install versioneer` to somewhere to your $PATH 30 | * add a `[versioneer]` section to your setup.cfg (see below) 31 | * run `versioneer install` in your source tree, commit the results 32 | 33 | ## Version Identifiers 34 | 35 | Source trees come from a variety of places: 36 | 37 | * a version-control system checkout (mostly used by developers) 38 | * a nightly tarball, produced by build automation 39 | * a snapshot tarball, produced by a web-based VCS browser, like github's 40 | "tarball from tag" feature 41 | * a release tarball, produced by "setup.py sdist", distributed through PyPI 42 | 43 | Within each source tree, the version identifier (either a string or a number, 44 | this tool is format-agnostic) can come from a variety of places: 45 | 46 | * ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows 47 | about recent "tags" and an absolute revision-id 48 | * the name of the directory into which the tarball was unpacked 49 | * an expanded VCS keyword ($Id$, etc) 50 | * a `_version.py` created by some earlier build step 51 | 52 | For released software, the version identifier is closely related to a VCS 53 | tag. Some projects use tag names that include more than just the version 54 | string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool 55 | needs to strip the tag prefix to extract the version identifier. For 56 | unreleased software (between tags), the version identifier should provide 57 | enough information to help developers recreate the same tree, while also 58 | giving them an idea of roughly how old the tree is (after version 1.2, before 59 | version 1.3). Many VCS systems can report a description that captures this, 60 | for example `git describe --tags --dirty --always` reports things like 61 | "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the 62 | 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has 63 | uncommitted changes. 64 | 65 | The version identifier is used for multiple purposes: 66 | 67 | * to allow the module to self-identify its version: `myproject.__version__` 68 | * to choose a name and prefix for a 'setup.py sdist' tarball 69 | 70 | ## Theory of Operation 71 | 72 | Versioneer works by adding a special `_version.py` file into your source 73 | tree, where your `__init__.py` can import it. This `_version.py` knows how to 74 | dynamically ask the VCS tool for version information at import time. 75 | 76 | `_version.py` also contains `$Revision$` markers, and the installation 77 | process marks `_version.py` to have this marker rewritten with a tag name 78 | during the `git archive` command. As a result, generated tarballs will 79 | contain enough information to get the proper version. 80 | 81 | To allow `setup.py` to compute a version too, a `versioneer.py` is added to 82 | the top level of your source tree, next to `setup.py` and the `setup.cfg` 83 | that configures it. This overrides several distutils/setuptools commands to 84 | compute the version when invoked, and changes `setup.py build` and `setup.py 85 | sdist` to replace `_version.py` with a small static file that contains just 86 | the generated version data. 87 | 88 | ## Installation 89 | 90 | First, decide on values for the following configuration variables: 91 | 92 | * `VCS`: the version control system you use. Currently accepts "git". 93 | 94 | * `style`: the style of version string to be produced. See "Styles" below for 95 | details. Defaults to "pep440", which looks like 96 | `TAG[+DISTANCE.gSHORTHASH[.dirty]]`. 97 | 98 | * `versionfile_source`: 99 | 100 | A project-relative pathname into which the generated version strings should 101 | be written. This is usually a `_version.py` next to your project's main 102 | `__init__.py` file, so it can be imported at runtime. If your project uses 103 | `src/myproject/__init__.py`, this should be `src/myproject/_version.py`. 104 | This file should be checked in to your VCS as usual: the copy created below 105 | by `setup.py setup_versioneer` will include code that parses expanded VCS 106 | keywords in generated tarballs. The 'build' and 'sdist' commands will 107 | replace it with a copy that has just the calculated version string. 108 | 109 | This must be set even if your project does not have any modules (and will 110 | therefore never import `_version.py`), since "setup.py sdist" -based trees 111 | still need somewhere to record the pre-calculated version strings. Anywhere 112 | in the source tree should do. If there is a `__init__.py` next to your 113 | `_version.py`, the `setup.py setup_versioneer` command (described below) 114 | will append some `__version__`-setting assignments, if they aren't already 115 | present. 116 | 117 | * `versionfile_build`: 118 | 119 | Like `versionfile_source`, but relative to the build directory instead of 120 | the source directory. These will differ when your setup.py uses 121 | 'package_dir='. If you have `package_dir={'myproject': 'src/myproject'}`, 122 | then you will probably have `versionfile_build='myproject/_version.py'` and 123 | `versionfile_source='src/myproject/_version.py'`. 124 | 125 | If this is set to None, then `setup.py build` will not attempt to rewrite 126 | any `_version.py` in the built tree. If your project does not have any 127 | libraries (e.g. if it only builds a script), then you should use 128 | `versionfile_build = None` and override `distutils.command.build_scripts` 129 | to explicitly insert a copy of `versioneer.get_version()` into your 130 | generated script. 131 | 132 | * `tag_prefix`: 133 | 134 | a string, like 'PROJECTNAME-', which appears at the start of all VCS tags. 135 | If your tags look like 'myproject-1.2.0', then you should use 136 | tag_prefix='myproject-'. If you use unprefixed tags like '1.2.0', this 137 | should be an empty string. 138 | 139 | * `parentdir_prefix`: 140 | 141 | a optional string, frequently the same as tag_prefix, which appears at the 142 | start of all unpacked tarball filenames. If your tarball unpacks into 143 | 'myproject-1.2.0', this should be 'myproject-'. To disable this feature, 144 | just omit the field from your `setup.cfg`. 145 | 146 | This tool provides one script, named `versioneer`. That script has one mode, 147 | "install", which writes a copy of `versioneer.py` into the current directory 148 | and runs `versioneer.py setup` to finish the installation. 149 | 150 | To versioneer-enable your project: 151 | 152 | * 1: Modify your `setup.cfg`, adding a section named `[versioneer]` and 153 | populating it with the configuration values you decided earlier (note that 154 | the option names are not case-sensitive): 155 | 156 | ```` 157 | [versioneer] 158 | VCS = git 159 | style = pep440 160 | versionfile_source = src/myproject/_version.py 161 | versionfile_build = myproject/_version.py 162 | tag_prefix = "" 163 | parentdir_prefix = myproject- 164 | ```` 165 | 166 | * 2: Run `versioneer install`. This will do the following: 167 | 168 | * copy `versioneer.py` into the top of your source tree 169 | * create `_version.py` in the right place (`versionfile_source`) 170 | * modify your `__init__.py` (if one exists next to `_version.py`) to define 171 | `__version__` (by calling a function from `_version.py`) 172 | * modify your `MANIFEST.in` to include both `versioneer.py` and the 173 | generated `_version.py` in sdist tarballs 174 | 175 | `versioneer install` will complain about any problems it finds with your 176 | `setup.py` or `setup.cfg`. Run it multiple times until you have fixed all 177 | the problems. 178 | 179 | * 3: add a `import versioneer` to your setup.py, and add the following 180 | arguments to the setup() call: 181 | 182 | version=versioneer.get_version(), 183 | cmdclass=versioneer.get_cmdclass(), 184 | 185 | * 4: commit these changes to your VCS. To make sure you won't forget, 186 | `versioneer install` will mark everything it touched for addition using 187 | `git add`. Don't forget to add `setup.py` and `setup.cfg` too. 188 | 189 | ## Post-Installation Usage 190 | 191 | Once established, all uses of your tree from a VCS checkout should get the 192 | current version string. All generated tarballs should include an embedded 193 | version string (so users who unpack them will not need a VCS tool installed). 194 | 195 | If you distribute your project through PyPI, then the release process should 196 | boil down to two steps: 197 | 198 | * 1: git tag 1.0 199 | * 2: python setup.py register sdist upload 200 | 201 | If you distribute it through github (i.e. users use github to generate 202 | tarballs with `git archive`), the process is: 203 | 204 | * 1: git tag 1.0 205 | * 2: git push; git push --tags 206 | 207 | Versioneer will report "0+untagged.NUMCOMMITS.gHASH" until your tree has at 208 | least one tag in its history. 209 | 210 | ## Version-String Flavors 211 | 212 | Code which uses Versioneer can learn about its version string at runtime by 213 | importing `_version` from your main `__init__.py` file and running the 214 | `get_versions()` function. From the "outside" (e.g. in `setup.py`), you can 215 | import the top-level `versioneer.py` and run `get_versions()`. 216 | 217 | Both functions return a dictionary with different flavors of version 218 | information: 219 | 220 | * `['version']`: A condensed version string, rendered using the selected 221 | style. This is the most commonly used value for the project's version 222 | string. The default "pep440" style yields strings like `0.11`, 223 | `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section 224 | below for alternative styles. 225 | 226 | * `['full-revisionid']`: detailed revision identifier. For Git, this is the 227 | full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". 228 | 229 | * `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that 230 | this is only accurate if run in a VCS checkout, otherwise it is likely to 231 | be False or None 232 | 233 | * `['error']`: if the version string could not be computed, this will be set 234 | to a string describing the problem, otherwise it will be None. It may be 235 | useful to throw an exception in setup.py if this is set, to avoid e.g. 236 | creating tarballs with a version string of "unknown". 237 | 238 | Some variants are more useful than others. Including `full-revisionid` in a 239 | bug report should allow developers to reconstruct the exact code being tested 240 | (or indicate the presence of local changes that should be shared with the 241 | developers). `version` is suitable for display in an "about" box or a CLI 242 | `--version` output: it can be easily compared against release notes and lists 243 | of bugs fixed in various releases. 244 | 245 | The installer adds the following text to your `__init__.py` to place a basic 246 | version in `YOURPROJECT.__version__`: 247 | 248 | from ._version import get_versions 249 | __version__ = get_versions()['version'] 250 | del get_versions 251 | 252 | ## Styles 253 | 254 | The setup.cfg `style=` configuration controls how the VCS information is 255 | rendered into a version string. 256 | 257 | The default style, "pep440", produces a PEP440-compliant string, equal to the 258 | un-prefixed tag name for actual releases, and containing an additional "local 259 | version" section with more detail for in-between builds. For Git, this is 260 | TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags 261 | --dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the 262 | tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and 263 | that this commit is two revisions ("+2") beyond the "0.11" tag. For released 264 | software (exactly equal to a known tag), the identifier will only contain the 265 | stripped tag, e.g. "0.11". 266 | 267 | Other styles are available. See details.md in the Versioneer source tree for 268 | descriptions. 269 | 270 | ## Debugging 271 | 272 | Versioneer tries to avoid fatal errors: if something goes wrong, it will tend 273 | to return a version of "0+unknown". To investigate the problem, run `setup.py 274 | version`, which will run the version-lookup code in a verbose mode, and will 275 | display the full contents of `get_versions()` (including the `error` string, 276 | which may help identify what went wrong). 277 | 278 | ## Updating Versioneer 279 | 280 | To upgrade your project to a new release of Versioneer, do the following: 281 | 282 | * install the new Versioneer (`pip install -U versioneer` or equivalent) 283 | * edit `setup.cfg`, if necessary, to include any new configuration settings 284 | indicated by the release notes 285 | * re-run `versioneer install` in your source tree, to replace 286 | `SRC/_version.py` 287 | * commit any changed files 288 | 289 | ### Upgrading to 0.15 290 | 291 | Starting with this version, Versioneer is configured with a `[versioneer]` 292 | section in your `setup.cfg` file. Earlier versions required the `setup.py` to 293 | set attributes on the `versioneer` module immediately after import. The new 294 | version will refuse to run (raising an exception during import) until you 295 | have provided the necessary `setup.cfg` section. 296 | 297 | In addition, the Versioneer package provides an executable named 298 | `versioneer`, and the installation process is driven by running `versioneer 299 | install`. In 0.14 and earlier, the executable was named 300 | `versioneer-installer` and was run without an argument. 301 | 302 | ### Upgrading to 0.14 303 | 304 | 0.14 changes the format of the version string. 0.13 and earlier used 305 | hyphen-separated strings like "0.11-2-g1076c97-dirty". 0.14 and beyond use a 306 | plus-separated "local version" section strings, with dot-separated 307 | components, like "0.11+2.g1076c97". PEP440-strict tools did not like the old 308 | format, but should be ok with the new one. 309 | 310 | ### Upgrading from 0.11 to 0.12 311 | 312 | Nothing special. 313 | 314 | ### Upgrading from 0.10 to 0.11 315 | 316 | You must add a `versioneer.VCS = "git"` to your `setup.py` before re-running 317 | `setup.py setup_versioneer`. This will enable the use of additional 318 | version-control systems (SVN, etc) in the future. 319 | 320 | ## Future Directions 321 | 322 | This tool is designed to make it easily extended to other version-control 323 | systems: all VCS-specific components are in separate directories like 324 | src/git/ . The top-level `versioneer.py` script is assembled from these 325 | components by running make-versioneer.py . In the future, make-versioneer.py 326 | will take a VCS name as an argument, and will construct a version of 327 | `versioneer.py` that is specific to the given VCS. It might also take the 328 | configuration arguments that are currently provided manually during 329 | installation by editing setup.py . Alternatively, it might go the other 330 | direction and include code from all supported VCS systems, reducing the 331 | number of intermediate scripts. 332 | 333 | 334 | ## License 335 | 336 | To make Versioneer easier to embed, all its code is hereby released into the 337 | public domain. The `_version.py` that it creates is also in the public 338 | domain. 339 | 340 | """ 341 | 342 | from __future__ import print_function 343 | try: 344 | import configparser 345 | except ImportError: 346 | import ConfigParser as configparser 347 | import errno 348 | import json 349 | import os 350 | import re 351 | import subprocess 352 | import sys 353 | 354 | 355 | class VersioneerConfig: 356 | pass 357 | 358 | 359 | def get_root(): 360 | # we require that all commands are run from the project root, i.e. the 361 | # directory that contains setup.py, setup.cfg, and versioneer.py . 362 | root = os.path.realpath(os.path.abspath(os.getcwd())) 363 | setup_py = os.path.join(root, "setup.py") 364 | versioneer_py = os.path.join(root, "versioneer.py") 365 | if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): 366 | # allow 'python path/to/setup.py COMMAND' 367 | root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) 368 | setup_py = os.path.join(root, "setup.py") 369 | versioneer_py = os.path.join(root, "versioneer.py") 370 | if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): 371 | err = ("Versioneer was unable to run the project root directory. " 372 | "Versioneer requires setup.py to be executed from " 373 | "its immediate directory (like 'python setup.py COMMAND'), " 374 | "or in a way that lets it use sys.argv[0] to find the root " 375 | "(like 'python path/to/setup.py COMMAND').") 376 | raise VersioneerBadRootError(err) 377 | try: 378 | # Certain runtime workflows (setup.py install/develop in a setuptools 379 | # tree) execute all dependencies in a single python process, so 380 | # "versioneer" may be imported multiple times, and python's shared 381 | # module-import table will cache the first one. So we can't use 382 | # os.path.dirname(__file__), as that will find whichever 383 | # versioneer.py was first imported, even in later projects. 384 | me = os.path.realpath(os.path.abspath(__file__)) 385 | if os.path.splitext(me)[0] != os.path.splitext(versioneer_py)[0]: 386 | print("Warning: build in %s is using versioneer.py from %s" 387 | % (os.path.dirname(me), versioneer_py)) 388 | except NameError: 389 | pass 390 | return root 391 | 392 | 393 | def get_config_from_root(root): 394 | # This might raise EnvironmentError (if setup.cfg is missing), or 395 | # configparser.NoSectionError (if it lacks a [versioneer] section), or 396 | # configparser.NoOptionError (if it lacks "VCS="). See the docstring at 397 | # the top of versioneer.py for instructions on writing your setup.cfg . 398 | setup_cfg = os.path.join(root, "setup.cfg") 399 | parser = configparser.SafeConfigParser() 400 | with open(setup_cfg, "r") as f: 401 | parser.readfp(f) 402 | VCS = parser.get("versioneer", "VCS") # mandatory 403 | 404 | def get(parser, name): 405 | if parser.has_option("versioneer", name): 406 | return parser.get("versioneer", name) 407 | return None 408 | cfg = VersioneerConfig() 409 | cfg.VCS = VCS 410 | cfg.style = get(parser, "style") or "" 411 | cfg.versionfile_source = get(parser, "versionfile_source") 412 | cfg.versionfile_build = get(parser, "versionfile_build") 413 | cfg.tag_prefix = get(parser, "tag_prefix") 414 | cfg.parentdir_prefix = get(parser, "parentdir_prefix") 415 | cfg.verbose = get(parser, "verbose") 416 | return cfg 417 | 418 | 419 | class NotThisMethod(Exception): 420 | pass 421 | 422 | # these dictionaries contain VCS-specific tools 423 | LONG_VERSION_PY = {} 424 | HANDLERS = {} 425 | 426 | 427 | def register_vcs_handler(vcs, method): # decorator 428 | def decorate(f): 429 | if vcs not in HANDLERS: 430 | HANDLERS[vcs] = {} 431 | HANDLERS[vcs][method] = f 432 | return f 433 | return decorate 434 | 435 | 436 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): 437 | assert isinstance(commands, list) 438 | p = None 439 | for c in commands: 440 | try: 441 | dispcmd = str([c] + args) 442 | # remember shell=False, so use git.cmd on windows, not just git 443 | p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE, 444 | stderr=(subprocess.PIPE if hide_stderr 445 | else None)) 446 | break 447 | except EnvironmentError: 448 | e = sys.exc_info()[1] 449 | if e.errno == errno.ENOENT: 450 | continue 451 | if verbose: 452 | print("unable to run %s" % dispcmd) 453 | print(e) 454 | return None 455 | else: 456 | if verbose: 457 | print("unable to find command, tried %s" % (commands,)) 458 | return None 459 | stdout = p.communicate()[0].strip() 460 | if sys.version_info[0] >= 3: 461 | stdout = stdout.decode() 462 | if p.returncode != 0: 463 | if verbose: 464 | print("unable to run %s (error)" % dispcmd) 465 | return None 466 | return stdout 467 | LONG_VERSION_PY['git'] = ''' 468 | # This file helps to compute a version number in source trees obtained from 469 | # git-archive tarball (such as those provided by githubs download-from-tag 470 | # feature). Distribution tarballs (built by setup.py sdist) and build 471 | # directories (produced by setup.py build) will contain a much shorter file 472 | # that just contains the computed version number. 473 | 474 | # This file is released into the public domain. Generated by 475 | # versioneer-0.15 (https://github.com/warner/python-versioneer) 476 | 477 | import errno 478 | import os 479 | import re 480 | import subprocess 481 | import sys 482 | 483 | 484 | def get_keywords(): 485 | # these strings will be replaced by git during git-archive. 486 | # setup.py/versioneer.py will grep for the variable names, so they must 487 | # each be defined on a line of their own. _version.py will just call 488 | # get_keywords(). 489 | git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" 490 | git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" 491 | keywords = {"refnames": git_refnames, "full": git_full} 492 | return keywords 493 | 494 | 495 | class VersioneerConfig: 496 | pass 497 | 498 | 499 | def get_config(): 500 | # these strings are filled in when 'setup.py versioneer' creates 501 | # _version.py 502 | cfg = VersioneerConfig() 503 | cfg.VCS = "git" 504 | cfg.style = "%(STYLE)s" 505 | cfg.tag_prefix = "%(TAG_PREFIX)s" 506 | cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" 507 | cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" 508 | cfg.verbose = False 509 | return cfg 510 | 511 | 512 | class NotThisMethod(Exception): 513 | pass 514 | 515 | 516 | LONG_VERSION_PY = {} 517 | HANDLERS = {} 518 | 519 | 520 | def register_vcs_handler(vcs, method): # decorator 521 | def decorate(f): 522 | if vcs not in HANDLERS: 523 | HANDLERS[vcs] = {} 524 | HANDLERS[vcs][method] = f 525 | return f 526 | return decorate 527 | 528 | 529 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): 530 | assert isinstance(commands, list) 531 | p = None 532 | for c in commands: 533 | try: 534 | dispcmd = str([c] + args) 535 | # remember shell=False, so use git.cmd on windows, not just git 536 | p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE, 537 | stderr=(subprocess.PIPE if hide_stderr 538 | else None)) 539 | break 540 | except EnvironmentError: 541 | e = sys.exc_info()[1] 542 | if e.errno == errno.ENOENT: 543 | continue 544 | if verbose: 545 | print("unable to run %%s" %% dispcmd) 546 | print(e) 547 | return None 548 | else: 549 | if verbose: 550 | print("unable to find command, tried %%s" %% (commands,)) 551 | return None 552 | stdout = p.communicate()[0].strip() 553 | if sys.version_info[0] >= 3: 554 | stdout = stdout.decode() 555 | if p.returncode != 0: 556 | if verbose: 557 | print("unable to run %%s (error)" %% dispcmd) 558 | return None 559 | return stdout 560 | 561 | 562 | def versions_from_parentdir(parentdir_prefix, root, verbose): 563 | # Source tarballs conventionally unpack into a directory that includes 564 | # both the project name and a version string. 565 | dirname = os.path.basename(root) 566 | if not dirname.startswith(parentdir_prefix): 567 | if verbose: 568 | print("guessing rootdir is '%%s', but '%%s' doesn't start with " 569 | "prefix '%%s'" %% (root, dirname, parentdir_prefix)) 570 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 571 | return {"version": dirname[len(parentdir_prefix):], 572 | "full-revisionid": None, 573 | "dirty": False, "error": None} 574 | 575 | 576 | @register_vcs_handler("git", "get_keywords") 577 | def git_get_keywords(versionfile_abs): 578 | # the code embedded in _version.py can just fetch the value of these 579 | # keywords. When used from setup.py, we don't want to import _version.py, 580 | # so we do it with a regexp instead. This function is not used from 581 | # _version.py. 582 | keywords = {} 583 | try: 584 | f = open(versionfile_abs, "r") 585 | for line in f.readlines(): 586 | if line.strip().startswith("git_refnames ="): 587 | mo = re.search(r'=\s*"(.*)"', line) 588 | if mo: 589 | keywords["refnames"] = mo.group(1) 590 | if line.strip().startswith("git_full ="): 591 | mo = re.search(r'=\s*"(.*)"', line) 592 | if mo: 593 | keywords["full"] = mo.group(1) 594 | f.close() 595 | except EnvironmentError: 596 | pass 597 | return keywords 598 | 599 | 600 | @register_vcs_handler("git", "keywords") 601 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 602 | if not keywords: 603 | raise NotThisMethod("no keywords at all, weird") 604 | refnames = keywords["refnames"].strip() 605 | if refnames.startswith("$Format"): 606 | if verbose: 607 | print("keywords are unexpanded, not using") 608 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 609 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) 610 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 611 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 612 | TAG = "tag: " 613 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) 614 | if not tags: 615 | # Either we're using git < 1.8.3, or there really are no tags. We use 616 | # a heuristic: assume all version tags have a digit. The old git %%d 617 | # expansion behaves like git log --decorate=short and strips out the 618 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 619 | # between branches and tags. By ignoring refnames without digits, we 620 | # filter out many common branch names like "release" and 621 | # "stabilization", as well as "HEAD" and "master". 622 | tags = set([r for r in refs if re.search(r'\d', r)]) 623 | if verbose: 624 | print("discarding '%%s', no digits" %% ",".join(refs-tags)) 625 | if verbose: 626 | print("likely tags: %%s" %% ",".join(sorted(tags))) 627 | for ref in sorted(tags): 628 | # sorting will prefer e.g. "2.0" over "2.0rc1" 629 | if ref.startswith(tag_prefix): 630 | r = ref[len(tag_prefix):] 631 | if verbose: 632 | print("picking %%s" %% r) 633 | return {"version": r, 634 | "full-revisionid": keywords["full"].strip(), 635 | "dirty": False, "error": None 636 | } 637 | # no suitable tags, so version is "0+unknown", but full hex is still there 638 | if verbose: 639 | print("no suitable tags, using unknown + full revision id") 640 | return {"version": "0+unknown", 641 | "full-revisionid": keywords["full"].strip(), 642 | "dirty": False, "error": "no suitable tags"} 643 | 644 | 645 | @register_vcs_handler("git", "pieces_from_vcs") 646 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 647 | # this runs 'git' from the root of the source tree. This only gets called 648 | # if the git-archive 'subst' keywords were *not* expanded, and 649 | # _version.py hasn't already been rewritten with a short version string, 650 | # meaning we're inside a checked out source tree. 651 | 652 | if not os.path.exists(os.path.join(root, ".git")): 653 | if verbose: 654 | print("no .git in %%s" %% root) 655 | raise NotThisMethod("no .git directory") 656 | 657 | GITS = ["git"] 658 | if sys.platform == "win32": 659 | GITS = ["git.cmd", "git.exe"] 660 | # if there is a tag, this yields TAG-NUM-gHEX[-dirty] 661 | # if there are no tags, this yields HEX[-dirty] (no NUM) 662 | describe_out = run_command(GITS, ["describe", "--tags", "--dirty", 663 | "--always", "--long"], 664 | cwd=root) 665 | # --long was added in git-1.5.5 666 | if describe_out is None: 667 | raise NotThisMethod("'git describe' failed") 668 | describe_out = describe_out.strip() 669 | full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 670 | if full_out is None: 671 | raise NotThisMethod("'git rev-parse' failed") 672 | full_out = full_out.strip() 673 | 674 | pieces = {} 675 | pieces["long"] = full_out 676 | pieces["short"] = full_out[:7] # maybe improved later 677 | pieces["error"] = None 678 | 679 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 680 | # TAG might have hyphens. 681 | git_describe = describe_out 682 | 683 | # look for -dirty suffix 684 | dirty = git_describe.endswith("-dirty") 685 | pieces["dirty"] = dirty 686 | if dirty: 687 | git_describe = git_describe[:git_describe.rindex("-dirty")] 688 | 689 | # now we have TAG-NUM-gHEX or HEX 690 | 691 | if "-" in git_describe: 692 | # TAG-NUM-gHEX 693 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 694 | if not mo: 695 | # unparseable. Maybe git-describe is misbehaving? 696 | pieces["error"] = ("unable to parse git-describe output: '%%s'" 697 | %% describe_out) 698 | return pieces 699 | 700 | # tag 701 | full_tag = mo.group(1) 702 | if not full_tag.startswith(tag_prefix): 703 | if verbose: 704 | fmt = "tag '%%s' doesn't start with prefix '%%s'" 705 | print(fmt %% (full_tag, tag_prefix)) 706 | pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" 707 | %% (full_tag, tag_prefix)) 708 | return pieces 709 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 710 | 711 | # distance: number of commits since tag 712 | pieces["distance"] = int(mo.group(2)) 713 | 714 | # commit: short hex revision ID 715 | pieces["short"] = mo.group(3) 716 | 717 | else: 718 | # HEX: no tags 719 | pieces["closest-tag"] = None 720 | count_out = run_command(GITS, ["rev-list", "HEAD", "--count"], 721 | cwd=root) 722 | pieces["distance"] = int(count_out) # total number of commits 723 | 724 | return pieces 725 | 726 | 727 | def plus_or_dot(pieces): 728 | if "+" in pieces.get("closest-tag", ""): 729 | return "." 730 | return "+" 731 | 732 | 733 | def render_pep440(pieces): 734 | # now build up version string, with post-release "local version 735 | # identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 736 | # get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 737 | 738 | # exceptions: 739 | # 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 740 | 741 | if pieces["closest-tag"]: 742 | rendered = pieces["closest-tag"] 743 | if pieces["distance"] or pieces["dirty"]: 744 | rendered += plus_or_dot(pieces) 745 | rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) 746 | if pieces["dirty"]: 747 | rendered += ".dirty" 748 | else: 749 | # exception #1 750 | rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], 751 | pieces["short"]) 752 | if pieces["dirty"]: 753 | rendered += ".dirty" 754 | return rendered 755 | 756 | 757 | def render_pep440_pre(pieces): 758 | # TAG[.post.devDISTANCE] . No -dirty 759 | 760 | # exceptions: 761 | # 1: no tags. 0.post.devDISTANCE 762 | 763 | if pieces["closest-tag"]: 764 | rendered = pieces["closest-tag"] 765 | if pieces["distance"]: 766 | rendered += ".post.dev%%d" %% pieces["distance"] 767 | else: 768 | # exception #1 769 | rendered = "0.post.dev%%d" %% pieces["distance"] 770 | return rendered 771 | 772 | 773 | def render_pep440_post(pieces): 774 | # TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that 775 | # .dev0 sorts backwards (a dirty tree will appear "older" than the 776 | # corresponding clean one), but you shouldn't be releasing software with 777 | # -dirty anyways. 778 | 779 | # exceptions: 780 | # 1: no tags. 0.postDISTANCE[.dev0] 781 | 782 | if pieces["closest-tag"]: 783 | rendered = pieces["closest-tag"] 784 | if pieces["distance"] or pieces["dirty"]: 785 | rendered += ".post%%d" %% pieces["distance"] 786 | if pieces["dirty"]: 787 | rendered += ".dev0" 788 | rendered += plus_or_dot(pieces) 789 | rendered += "g%%s" %% pieces["short"] 790 | else: 791 | # exception #1 792 | rendered = "0.post%%d" %% pieces["distance"] 793 | if pieces["dirty"]: 794 | rendered += ".dev0" 795 | rendered += "+g%%s" %% pieces["short"] 796 | return rendered 797 | 798 | 799 | def render_pep440_old(pieces): 800 | # TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. 801 | 802 | # exceptions: 803 | # 1: no tags. 0.postDISTANCE[.dev0] 804 | 805 | if pieces["closest-tag"]: 806 | rendered = pieces["closest-tag"] 807 | if pieces["distance"] or pieces["dirty"]: 808 | rendered += ".post%%d" %% pieces["distance"] 809 | if pieces["dirty"]: 810 | rendered += ".dev0" 811 | else: 812 | # exception #1 813 | rendered = "0.post%%d" %% pieces["distance"] 814 | if pieces["dirty"]: 815 | rendered += ".dev0" 816 | return rendered 817 | 818 | 819 | def render_git_describe(pieces): 820 | # TAG[-DISTANCE-gHEX][-dirty], like 'git describe --tags --dirty 821 | # --always' 822 | 823 | # exceptions: 824 | # 1: no tags. HEX[-dirty] (note: no 'g' prefix) 825 | 826 | if pieces["closest-tag"]: 827 | rendered = pieces["closest-tag"] 828 | if pieces["distance"]: 829 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) 830 | else: 831 | # exception #1 832 | rendered = pieces["short"] 833 | if pieces["dirty"]: 834 | rendered += "-dirty" 835 | return rendered 836 | 837 | 838 | def render_git_describe_long(pieces): 839 | # TAG-DISTANCE-gHEX[-dirty], like 'git describe --tags --dirty 840 | # --always -long'. The distance/hash is unconditional. 841 | 842 | # exceptions: 843 | # 1: no tags. HEX[-dirty] (note: no 'g' prefix) 844 | 845 | if pieces["closest-tag"]: 846 | rendered = pieces["closest-tag"] 847 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) 848 | else: 849 | # exception #1 850 | rendered = pieces["short"] 851 | if pieces["dirty"]: 852 | rendered += "-dirty" 853 | return rendered 854 | 855 | 856 | def render(pieces, style): 857 | if pieces["error"]: 858 | return {"version": "unknown", 859 | "full-revisionid": pieces.get("long"), 860 | "dirty": None, 861 | "error": pieces["error"]} 862 | 863 | if not style or style == "default": 864 | style = "pep440" # the default 865 | 866 | if style == "pep440": 867 | rendered = render_pep440(pieces) 868 | elif style == "pep440-pre": 869 | rendered = render_pep440_pre(pieces) 870 | elif style == "pep440-post": 871 | rendered = render_pep440_post(pieces) 872 | elif style == "pep440-old": 873 | rendered = render_pep440_old(pieces) 874 | elif style == "git-describe": 875 | rendered = render_git_describe(pieces) 876 | elif style == "git-describe-long": 877 | rendered = render_git_describe_long(pieces) 878 | else: 879 | raise ValueError("unknown style '%%s'" %% style) 880 | 881 | return {"version": rendered, "full-revisionid": pieces["long"], 882 | "dirty": pieces["dirty"], "error": None} 883 | 884 | 885 | def get_versions(): 886 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 887 | # __file__, we can work backwards from there to the root. Some 888 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 889 | # case we can only use expanded keywords. 890 | 891 | cfg = get_config() 892 | verbose = cfg.verbose 893 | 894 | try: 895 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, 896 | verbose) 897 | except NotThisMethod: 898 | pass 899 | 900 | try: 901 | root = os.path.realpath(__file__) 902 | # versionfile_source is the relative path from the top of the source 903 | # tree (where the .git directory might live) to this file. Invert 904 | # this to find the root from __file__. 905 | for i in cfg.versionfile_source.split('/'): 906 | root = os.path.dirname(root) 907 | except NameError: 908 | return {"version": "0+unknown", "full-revisionid": None, 909 | "dirty": None, 910 | "error": "unable to find root of source tree"} 911 | 912 | try: 913 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 914 | return render(pieces, cfg.style) 915 | except NotThisMethod: 916 | pass 917 | 918 | try: 919 | if cfg.parentdir_prefix: 920 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 921 | except NotThisMethod: 922 | pass 923 | 924 | return {"version": "0+unknown", "full-revisionid": None, 925 | "dirty": None, 926 | "error": "unable to compute version"} 927 | ''' 928 | 929 | 930 | @register_vcs_handler("git", "get_keywords") 931 | def git_get_keywords(versionfile_abs): 932 | # the code embedded in _version.py can just fetch the value of these 933 | # keywords. When used from setup.py, we don't want to import _version.py, 934 | # so we do it with a regexp instead. This function is not used from 935 | # _version.py. 936 | keywords = {} 937 | try: 938 | f = open(versionfile_abs, "r") 939 | for line in f.readlines(): 940 | if line.strip().startswith("git_refnames ="): 941 | mo = re.search(r'=\s*"(.*)"', line) 942 | if mo: 943 | keywords["refnames"] = mo.group(1) 944 | if line.strip().startswith("git_full ="): 945 | mo = re.search(r'=\s*"(.*)"', line) 946 | if mo: 947 | keywords["full"] = mo.group(1) 948 | f.close() 949 | except EnvironmentError: 950 | pass 951 | return keywords 952 | 953 | 954 | @register_vcs_handler("git", "keywords") 955 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 956 | if not keywords: 957 | raise NotThisMethod("no keywords at all, weird") 958 | refnames = keywords["refnames"].strip() 959 | if refnames.startswith("$Format"): 960 | if verbose: 961 | print("keywords are unexpanded, not using") 962 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 963 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) 964 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 965 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 966 | TAG = "tag: " 967 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) 968 | if not tags: 969 | # Either we're using git < 1.8.3, or there really are no tags. We use 970 | # a heuristic: assume all version tags have a digit. The old git %d 971 | # expansion behaves like git log --decorate=short and strips out the 972 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 973 | # between branches and tags. By ignoring refnames without digits, we 974 | # filter out many common branch names like "release" and 975 | # "stabilization", as well as "HEAD" and "master". 976 | tags = set([r for r in refs if re.search(r'\d', r)]) 977 | if verbose: 978 | print("discarding '%s', no digits" % ",".join(refs-tags)) 979 | if verbose: 980 | print("likely tags: %s" % ",".join(sorted(tags))) 981 | for ref in sorted(tags): 982 | # sorting will prefer e.g. "2.0" over "2.0rc1" 983 | if ref.startswith(tag_prefix): 984 | r = ref[len(tag_prefix):] 985 | if verbose: 986 | print("picking %s" % r) 987 | return {"version": r, 988 | "full-revisionid": keywords["full"].strip(), 989 | "dirty": False, "error": None 990 | } 991 | # no suitable tags, so version is "0+unknown", but full hex is still there 992 | if verbose: 993 | print("no suitable tags, using unknown + full revision id") 994 | return {"version": "0+unknown", 995 | "full-revisionid": keywords["full"].strip(), 996 | "dirty": False, "error": "no suitable tags"} 997 | 998 | 999 | @register_vcs_handler("git", "pieces_from_vcs") 1000 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 1001 | # this runs 'git' from the root of the source tree. This only gets called 1002 | # if the git-archive 'subst' keywords were *not* expanded, and 1003 | # _version.py hasn't already been rewritten with a short version string, 1004 | # meaning we're inside a checked out source tree. 1005 | 1006 | if not os.path.exists(os.path.join(root, ".git")): 1007 | if verbose: 1008 | print("no .git in %s" % root) 1009 | raise NotThisMethod("no .git directory") 1010 | 1011 | GITS = ["git"] 1012 | if sys.platform == "win32": 1013 | GITS = ["git.cmd", "git.exe"] 1014 | # if there is a tag, this yields TAG-NUM-gHEX[-dirty] 1015 | # if there are no tags, this yields HEX[-dirty] (no NUM) 1016 | describe_out = run_command(GITS, ["describe", "--tags", "--dirty", 1017 | "--always", "--long"], 1018 | cwd=root) 1019 | # --long was added in git-1.5.5 1020 | if describe_out is None: 1021 | raise NotThisMethod("'git describe' failed") 1022 | describe_out = describe_out.strip() 1023 | full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 1024 | if full_out is None: 1025 | raise NotThisMethod("'git rev-parse' failed") 1026 | full_out = full_out.strip() 1027 | 1028 | pieces = {} 1029 | pieces["long"] = full_out 1030 | pieces["short"] = full_out[:7] # maybe improved later 1031 | pieces["error"] = None 1032 | 1033 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 1034 | # TAG might have hyphens. 1035 | git_describe = describe_out 1036 | 1037 | # look for -dirty suffix 1038 | dirty = git_describe.endswith("-dirty") 1039 | pieces["dirty"] = dirty 1040 | if dirty: 1041 | git_describe = git_describe[:git_describe.rindex("-dirty")] 1042 | 1043 | # now we have TAG-NUM-gHEX or HEX 1044 | 1045 | if "-" in git_describe: 1046 | # TAG-NUM-gHEX 1047 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 1048 | if not mo: 1049 | # unparseable. Maybe git-describe is misbehaving? 1050 | pieces["error"] = ("unable to parse git-describe output: '%s'" 1051 | % describe_out) 1052 | return pieces 1053 | 1054 | # tag 1055 | full_tag = mo.group(1) 1056 | if not full_tag.startswith(tag_prefix): 1057 | if verbose: 1058 | fmt = "tag '%s' doesn't start with prefix '%s'" 1059 | print(fmt % (full_tag, tag_prefix)) 1060 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" 1061 | % (full_tag, tag_prefix)) 1062 | return pieces 1063 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 1064 | 1065 | # distance: number of commits since tag 1066 | pieces["distance"] = int(mo.group(2)) 1067 | 1068 | # commit: short hex revision ID 1069 | pieces["short"] = mo.group(3) 1070 | 1071 | else: 1072 | # HEX: no tags 1073 | pieces["closest-tag"] = None 1074 | count_out = run_command(GITS, ["rev-list", "HEAD", "--count"], 1075 | cwd=root) 1076 | pieces["distance"] = int(count_out) # total number of commits 1077 | 1078 | return pieces 1079 | 1080 | 1081 | def do_vcs_install(manifest_in, versionfile_source, ipy): 1082 | GITS = ["git"] 1083 | if sys.platform == "win32": 1084 | GITS = ["git.cmd", "git.exe"] 1085 | files = [manifest_in, versionfile_source] 1086 | if ipy: 1087 | files.append(ipy) 1088 | try: 1089 | me = __file__ 1090 | if me.endswith(".pyc") or me.endswith(".pyo"): 1091 | me = os.path.splitext(me)[0] + ".py" 1092 | versioneer_file = os.path.relpath(me) 1093 | except NameError: 1094 | versioneer_file = "versioneer.py" 1095 | files.append(versioneer_file) 1096 | present = False 1097 | try: 1098 | f = open(".gitattributes", "r") 1099 | for line in f.readlines(): 1100 | if line.strip().startswith(versionfile_source): 1101 | if "export-subst" in line.strip().split()[1:]: 1102 | present = True 1103 | f.close() 1104 | except EnvironmentError: 1105 | pass 1106 | if not present: 1107 | f = open(".gitattributes", "a+") 1108 | f.write("%s export-subst\n" % versionfile_source) 1109 | f.close() 1110 | files.append(".gitattributes") 1111 | run_command(GITS, ["add", "--"] + files) 1112 | 1113 | 1114 | def versions_from_parentdir(parentdir_prefix, root, verbose): 1115 | # Source tarballs conventionally unpack into a directory that includes 1116 | # both the project name and a version string. 1117 | dirname = os.path.basename(root) 1118 | if not dirname.startswith(parentdir_prefix): 1119 | if verbose: 1120 | print("guessing rootdir is '%s', but '%s' doesn't start with " 1121 | "prefix '%s'" % (root, dirname, parentdir_prefix)) 1122 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 1123 | return {"version": dirname[len(parentdir_prefix):], 1124 | "full-revisionid": None, 1125 | "dirty": False, "error": None} 1126 | 1127 | SHORT_VERSION_PY = """ 1128 | # This file was generated by 'versioneer.py' (0.15) from 1129 | # revision-control system data, or from the parent directory name of an 1130 | # unpacked source archive. Distribution tarballs contain a pre-generated copy 1131 | # of this file. 1132 | 1133 | import json 1134 | import sys 1135 | 1136 | version_json = ''' 1137 | %s 1138 | ''' # END VERSION_JSON 1139 | 1140 | 1141 | def get_versions(): 1142 | return json.loads(version_json) 1143 | """ 1144 | 1145 | 1146 | def versions_from_file(filename): 1147 | try: 1148 | with open(filename) as f: 1149 | contents = f.read() 1150 | except EnvironmentError: 1151 | raise NotThisMethod("unable to read _version.py") 1152 | mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", 1153 | contents, re.M | re.S) 1154 | if not mo: 1155 | raise NotThisMethod("no version_json in _version.py") 1156 | return json.loads(mo.group(1)) 1157 | 1158 | 1159 | def write_to_version_file(filename, versions): 1160 | os.unlink(filename) 1161 | contents = json.dumps(versions, sort_keys=True, 1162 | indent=1, separators=(",", ": ")) 1163 | with open(filename, "w") as f: 1164 | f.write(SHORT_VERSION_PY % contents) 1165 | 1166 | print("set %s to '%s'" % (filename, versions["version"])) 1167 | 1168 | 1169 | def plus_or_dot(pieces): 1170 | if "+" in pieces.get("closest-tag", ""): 1171 | return "." 1172 | return "+" 1173 | 1174 | 1175 | def render_pep440(pieces): 1176 | # now build up version string, with post-release "local version 1177 | # identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 1178 | # get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 1179 | 1180 | # exceptions: 1181 | # 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 1182 | 1183 | if pieces["closest-tag"]: 1184 | rendered = pieces["closest-tag"] 1185 | if pieces["distance"] or pieces["dirty"]: 1186 | rendered += plus_or_dot(pieces) 1187 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 1188 | if pieces["dirty"]: 1189 | rendered += ".dirty" 1190 | else: 1191 | # exception #1 1192 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], 1193 | pieces["short"]) 1194 | if pieces["dirty"]: 1195 | rendered += ".dirty" 1196 | return rendered 1197 | 1198 | 1199 | def render_pep440_pre(pieces): 1200 | # TAG[.post.devDISTANCE] . No -dirty 1201 | 1202 | # exceptions: 1203 | # 1: no tags. 0.post.devDISTANCE 1204 | 1205 | if pieces["closest-tag"]: 1206 | rendered = pieces["closest-tag"] 1207 | if pieces["distance"]: 1208 | rendered += ".post.dev%d" % pieces["distance"] 1209 | else: 1210 | # exception #1 1211 | rendered = "0.post.dev%d" % pieces["distance"] 1212 | return rendered 1213 | 1214 | 1215 | def render_pep440_post(pieces): 1216 | # TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that 1217 | # .dev0 sorts backwards (a dirty tree will appear "older" than the 1218 | # corresponding clean one), but you shouldn't be releasing software with 1219 | # -dirty anyways. 1220 | 1221 | # exceptions: 1222 | # 1: no tags. 0.postDISTANCE[.dev0] 1223 | 1224 | if pieces["closest-tag"]: 1225 | rendered = pieces["closest-tag"] 1226 | if pieces["distance"] or pieces["dirty"]: 1227 | rendered += ".post%d" % pieces["distance"] 1228 | if pieces["dirty"]: 1229 | rendered += ".dev0" 1230 | rendered += plus_or_dot(pieces) 1231 | rendered += "g%s" % pieces["short"] 1232 | else: 1233 | # exception #1 1234 | rendered = "0.post%d" % pieces["distance"] 1235 | if pieces["dirty"]: 1236 | rendered += ".dev0" 1237 | rendered += "+g%s" % pieces["short"] 1238 | return rendered 1239 | 1240 | 1241 | def render_pep440_old(pieces): 1242 | # TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. 1243 | 1244 | # exceptions: 1245 | # 1: no tags. 0.postDISTANCE[.dev0] 1246 | 1247 | if pieces["closest-tag"]: 1248 | rendered = pieces["closest-tag"] 1249 | if pieces["distance"] or pieces["dirty"]: 1250 | rendered += ".post%d" % pieces["distance"] 1251 | if pieces["dirty"]: 1252 | rendered += ".dev0" 1253 | else: 1254 | # exception #1 1255 | rendered = "0.post%d" % pieces["distance"] 1256 | if pieces["dirty"]: 1257 | rendered += ".dev0" 1258 | return rendered 1259 | 1260 | 1261 | def render_git_describe(pieces): 1262 | # TAG[-DISTANCE-gHEX][-dirty], like 'git describe --tags --dirty 1263 | # --always' 1264 | 1265 | # exceptions: 1266 | # 1: no tags. HEX[-dirty] (note: no 'g' prefix) 1267 | 1268 | if pieces["closest-tag"]: 1269 | rendered = pieces["closest-tag"] 1270 | if pieces["distance"]: 1271 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 1272 | else: 1273 | # exception #1 1274 | rendered = pieces["short"] 1275 | if pieces["dirty"]: 1276 | rendered += "-dirty" 1277 | return rendered 1278 | 1279 | 1280 | def render_git_describe_long(pieces): 1281 | # TAG-DISTANCE-gHEX[-dirty], like 'git describe --tags --dirty 1282 | # --always -long'. The distance/hash is unconditional. 1283 | 1284 | # exceptions: 1285 | # 1: no tags. HEX[-dirty] (note: no 'g' prefix) 1286 | 1287 | if pieces["closest-tag"]: 1288 | rendered = pieces["closest-tag"] 1289 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 1290 | else: 1291 | # exception #1 1292 | rendered = pieces["short"] 1293 | if pieces["dirty"]: 1294 | rendered += "-dirty" 1295 | return rendered 1296 | 1297 | 1298 | def render(pieces, style): 1299 | if pieces["error"]: 1300 | return {"version": "unknown", 1301 | "full-revisionid": pieces.get("long"), 1302 | "dirty": None, 1303 | "error": pieces["error"]} 1304 | 1305 | if not style or style == "default": 1306 | style = "pep440" # the default 1307 | 1308 | if style == "pep440": 1309 | rendered = render_pep440(pieces) 1310 | elif style == "pep440-pre": 1311 | rendered = render_pep440_pre(pieces) 1312 | elif style == "pep440-post": 1313 | rendered = render_pep440_post(pieces) 1314 | elif style == "pep440-old": 1315 | rendered = render_pep440_old(pieces) 1316 | elif style == "git-describe": 1317 | rendered = render_git_describe(pieces) 1318 | elif style == "git-describe-long": 1319 | rendered = render_git_describe_long(pieces) 1320 | else: 1321 | raise ValueError("unknown style '%s'" % style) 1322 | 1323 | return {"version": rendered, "full-revisionid": pieces["long"], 1324 | "dirty": pieces["dirty"], "error": None} 1325 | 1326 | 1327 | class VersioneerBadRootError(Exception): 1328 | pass 1329 | 1330 | 1331 | def get_versions(verbose=False): 1332 | # returns dict with two keys: 'version' and 'full' 1333 | 1334 | if "versioneer" in sys.modules: 1335 | # see the discussion in cmdclass.py:get_cmdclass() 1336 | del sys.modules["versioneer"] 1337 | 1338 | root = get_root() 1339 | cfg = get_config_from_root(root) 1340 | 1341 | assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" 1342 | handlers = HANDLERS.get(cfg.VCS) 1343 | assert handlers, "unrecognized VCS '%s'" % cfg.VCS 1344 | verbose = verbose or cfg.verbose 1345 | assert cfg.versionfile_source is not None, \ 1346 | "please set versioneer.versionfile_source" 1347 | assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" 1348 | 1349 | versionfile_abs = os.path.join(root, cfg.versionfile_source) 1350 | 1351 | # extract version from first of: _version.py, VCS command (e.g. 'git 1352 | # describe'), parentdir. This is meant to work for developers using a 1353 | # source checkout, for users of a tarball created by 'setup.py sdist', 1354 | # and for users of a tarball/zipball created by 'git archive' or github's 1355 | # download-from-tag feature or the equivalent in other VCSes. 1356 | 1357 | get_keywords_f = handlers.get("get_keywords") 1358 | from_keywords_f = handlers.get("keywords") 1359 | if get_keywords_f and from_keywords_f: 1360 | try: 1361 | keywords = get_keywords_f(versionfile_abs) 1362 | ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) 1363 | if verbose: 1364 | print("got version from expanded keyword %s" % ver) 1365 | return ver 1366 | except NotThisMethod: 1367 | pass 1368 | 1369 | try: 1370 | ver = versions_from_file(versionfile_abs) 1371 | if verbose: 1372 | print("got version from file %s %s" % (versionfile_abs, ver)) 1373 | return ver 1374 | except NotThisMethod: 1375 | pass 1376 | 1377 | from_vcs_f = handlers.get("pieces_from_vcs") 1378 | if from_vcs_f: 1379 | try: 1380 | pieces = from_vcs_f(cfg.tag_prefix, root, verbose) 1381 | ver = render(pieces, cfg.style) 1382 | if verbose: 1383 | print("got version from VCS %s" % ver) 1384 | return ver 1385 | except NotThisMethod: 1386 | pass 1387 | 1388 | try: 1389 | if cfg.parentdir_prefix: 1390 | ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 1391 | if verbose: 1392 | print("got version from parentdir %s" % ver) 1393 | return ver 1394 | except NotThisMethod: 1395 | pass 1396 | 1397 | if verbose: 1398 | print("unable to compute version") 1399 | 1400 | return {"version": "0+unknown", "full-revisionid": None, 1401 | "dirty": None, "error": "unable to compute version"} 1402 | 1403 | 1404 | def get_version(): 1405 | return get_versions()["version"] 1406 | 1407 | 1408 | def get_cmdclass(): 1409 | if "versioneer" in sys.modules: 1410 | del sys.modules["versioneer"] 1411 | # this fixes the "python setup.py develop" case (also 'install' and 1412 | # 'easy_install .'), in which subdependencies of the main project are 1413 | # built (using setup.py bdist_egg) in the same python process. Assume 1414 | # a main project A and a dependency B, which use different versions 1415 | # of Versioneer. A's setup.py imports A's Versioneer, leaving it in 1416 | # sys.modules by the time B's setup.py is executed, causing B to run 1417 | # with the wrong versioneer. Setuptools wraps the sub-dep builds in a 1418 | # sandbox that restores sys.modules to it's pre-build state, so the 1419 | # parent is protected against the child's "import versioneer". By 1420 | # removing ourselves from sys.modules here, before the child build 1421 | # happens, we protect the child from the parent's versioneer too. 1422 | # Also see https://github.com/warner/python-versioneer/issues/52 1423 | 1424 | cmds = {} 1425 | 1426 | # we add "version" to both distutils and setuptools 1427 | from distutils.core import Command 1428 | 1429 | class cmd_version(Command): 1430 | description = "report generated version string" 1431 | user_options = [] 1432 | boolean_options = [] 1433 | 1434 | def initialize_options(self): 1435 | pass 1436 | 1437 | def finalize_options(self): 1438 | pass 1439 | 1440 | def run(self): 1441 | vers = get_versions(verbose=True) 1442 | print("Version: %s" % vers["version"]) 1443 | print(" full-revisionid: %s" % vers.get("full-revisionid")) 1444 | print(" dirty: %s" % vers.get("dirty")) 1445 | if vers["error"]: 1446 | print(" error: %s" % vers["error"]) 1447 | cmds["version"] = cmd_version 1448 | 1449 | # we override "build_py" in both distutils and setuptools 1450 | # 1451 | # most invocation pathways end up running build_py: 1452 | # distutils/build -> build_py 1453 | # distutils/install -> distutils/build ->.. 1454 | # setuptools/bdist_wheel -> distutils/install ->.. 1455 | # setuptools/bdist_egg -> distutils/install_lib -> build_py 1456 | # setuptools/install -> bdist_egg ->.. 1457 | # setuptools/develop -> ? 1458 | 1459 | from distutils.command.build_py import build_py as _build_py 1460 | 1461 | class cmd_build_py(_build_py): 1462 | def run(self): 1463 | root = get_root() 1464 | cfg = get_config_from_root(root) 1465 | versions = get_versions() 1466 | _build_py.run(self) 1467 | # now locate _version.py in the new build/ directory and replace 1468 | # it with an updated value 1469 | if cfg.versionfile_build: 1470 | target_versionfile = os.path.join(self.build_lib, 1471 | cfg.versionfile_build) 1472 | print("UPDATING %s" % target_versionfile) 1473 | write_to_version_file(target_versionfile, versions) 1474 | cmds["build_py"] = cmd_build_py 1475 | 1476 | if "cx_Freeze" in sys.modules: # cx_freeze enabled? 1477 | from cx_Freeze.dist import build_exe as _build_exe 1478 | 1479 | class cmd_build_exe(_build_exe): 1480 | def run(self): 1481 | root = get_root() 1482 | cfg = get_config_from_root(root) 1483 | versions = get_versions() 1484 | target_versionfile = cfg.versionfile_source 1485 | print("UPDATING %s" % target_versionfile) 1486 | write_to_version_file(target_versionfile, versions) 1487 | 1488 | _build_exe.run(self) 1489 | os.unlink(target_versionfile) 1490 | with open(cfg.versionfile_source, "w") as f: 1491 | LONG = LONG_VERSION_PY[cfg.VCS] 1492 | f.write(LONG % 1493 | {"DOLLAR": "$", 1494 | "STYLE": cfg.style, 1495 | "TAG_PREFIX": cfg.tag_prefix, 1496 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 1497 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 1498 | }) 1499 | cmds["build_exe"] = cmd_build_exe 1500 | del cmds["build_py"] 1501 | 1502 | # we override different "sdist" commands for both environments 1503 | if "setuptools" in sys.modules: 1504 | from setuptools.command.sdist import sdist as _sdist 1505 | else: 1506 | from distutils.command.sdist import sdist as _sdist 1507 | 1508 | class cmd_sdist(_sdist): 1509 | def run(self): 1510 | versions = get_versions() 1511 | self._versioneer_generated_versions = versions 1512 | # unless we update this, the command will keep using the old 1513 | # version 1514 | self.distribution.metadata.version = versions["version"] 1515 | return _sdist.run(self) 1516 | 1517 | def make_release_tree(self, base_dir, files): 1518 | root = get_root() 1519 | cfg = get_config_from_root(root) 1520 | _sdist.make_release_tree(self, base_dir, files) 1521 | # now locate _version.py in the new base_dir directory 1522 | # (remembering that it may be a hardlink) and replace it with an 1523 | # updated value 1524 | target_versionfile = os.path.join(base_dir, cfg.versionfile_source) 1525 | print("UPDATING %s" % target_versionfile) 1526 | write_to_version_file(target_versionfile, 1527 | self._versioneer_generated_versions) 1528 | cmds["sdist"] = cmd_sdist 1529 | 1530 | return cmds 1531 | 1532 | 1533 | CONFIG_ERROR = """ 1534 | setup.cfg is missing the necessary Versioneer configuration. You need 1535 | a section like: 1536 | 1537 | [versioneer] 1538 | VCS = git 1539 | style = pep440 1540 | versionfile_source = src/myproject/_version.py 1541 | versionfile_build = myproject/_version.py 1542 | tag_prefix = "" 1543 | parentdir_prefix = myproject- 1544 | 1545 | You will also need to edit your setup.py to use the results: 1546 | 1547 | import versioneer 1548 | setup(version=versioneer.get_version(), 1549 | cmdclass=versioneer.get_cmdclass(), ...) 1550 | 1551 | Please read the docstring in ./versioneer.py for configuration instructions, 1552 | edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. 1553 | """ 1554 | 1555 | SAMPLE_CONFIG = """ 1556 | # See the docstring in versioneer.py for instructions. Note that you must 1557 | # re-run 'versioneer.py setup' after changing this section, and commit the 1558 | # resulting files. 1559 | 1560 | [versioneer] 1561 | #VCS = git 1562 | #style = pep440 1563 | #versionfile_source = 1564 | #versionfile_build = 1565 | #tag_prefix = 1566 | #parentdir_prefix = 1567 | 1568 | """ 1569 | 1570 | INIT_PY_SNIPPET = """ 1571 | from ._version import get_versions 1572 | __version__ = get_versions()['version'] 1573 | del get_versions 1574 | """ 1575 | 1576 | 1577 | def do_setup(): 1578 | root = get_root() 1579 | try: 1580 | cfg = get_config_from_root(root) 1581 | except (EnvironmentError, configparser.NoSectionError, 1582 | configparser.NoOptionError) as e: 1583 | if isinstance(e, (EnvironmentError, configparser.NoSectionError)): 1584 | print("Adding sample versioneer config to setup.cfg", 1585 | file=sys.stderr) 1586 | with open(os.path.join(root, "setup.cfg"), "a") as f: 1587 | f.write(SAMPLE_CONFIG) 1588 | print(CONFIG_ERROR, file=sys.stderr) 1589 | return 1 1590 | 1591 | print(" creating %s" % cfg.versionfile_source) 1592 | with open(cfg.versionfile_source, "w") as f: 1593 | LONG = LONG_VERSION_PY[cfg.VCS] 1594 | f.write(LONG % {"DOLLAR": "$", 1595 | "STYLE": cfg.style, 1596 | "TAG_PREFIX": cfg.tag_prefix, 1597 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 1598 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 1599 | }) 1600 | 1601 | ipy = os.path.join(os.path.dirname(cfg.versionfile_source), 1602 | "__init__.py") 1603 | if os.path.exists(ipy): 1604 | try: 1605 | with open(ipy, "r") as f: 1606 | old = f.read() 1607 | except EnvironmentError: 1608 | old = "" 1609 | if INIT_PY_SNIPPET not in old: 1610 | print(" appending to %s" % ipy) 1611 | with open(ipy, "a") as f: 1612 | f.write(INIT_PY_SNIPPET) 1613 | else: 1614 | print(" %s unmodified" % ipy) 1615 | else: 1616 | print(" %s doesn't exist, ok" % ipy) 1617 | ipy = None 1618 | 1619 | # Make sure both the top-level "versioneer.py" and versionfile_source 1620 | # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so 1621 | # they'll be copied into source distributions. Pip won't be able to 1622 | # install the package without this. 1623 | manifest_in = os.path.join(root, "MANIFEST.in") 1624 | simple_includes = set() 1625 | try: 1626 | with open(manifest_in, "r") as f: 1627 | for line in f: 1628 | if line.startswith("include "): 1629 | for include in line.split()[1:]: 1630 | simple_includes.add(include) 1631 | except EnvironmentError: 1632 | pass 1633 | # That doesn't cover everything MANIFEST.in can do 1634 | # (http://docs.python.org/2/distutils/sourcedist.html#commands), so 1635 | # it might give some false negatives. Appending redundant 'include' 1636 | # lines is safe, though. 1637 | if "versioneer.py" not in simple_includes: 1638 | print(" appending 'versioneer.py' to MANIFEST.in") 1639 | with open(manifest_in, "a") as f: 1640 | f.write("include versioneer.py\n") 1641 | else: 1642 | print(" 'versioneer.py' already in MANIFEST.in") 1643 | if cfg.versionfile_source not in simple_includes: 1644 | print(" appending versionfile_source ('%s') to MANIFEST.in" % 1645 | cfg.versionfile_source) 1646 | with open(manifest_in, "a") as f: 1647 | f.write("include %s\n" % cfg.versionfile_source) 1648 | else: 1649 | print(" versionfile_source already in MANIFEST.in") 1650 | 1651 | # Make VCS-specific changes. For git, this means creating/changing 1652 | # .gitattributes to mark _version.py for export-time keyword 1653 | # substitution. 1654 | do_vcs_install(manifest_in, cfg.versionfile_source, ipy) 1655 | return 0 1656 | 1657 | 1658 | def scan_setup_py(): 1659 | found = set() 1660 | setters = False 1661 | errors = 0 1662 | with open("setup.py", "r") as f: 1663 | for line in f.readlines(): 1664 | if "import versioneer" in line: 1665 | found.add("import") 1666 | if "versioneer.get_cmdclass()" in line: 1667 | found.add("cmdclass") 1668 | if "versioneer.get_version()" in line: 1669 | found.add("get_version") 1670 | if "versioneer.VCS" in line: 1671 | setters = True 1672 | if "versioneer.versionfile_source" in line: 1673 | setters = True 1674 | if len(found) != 3: 1675 | print("") 1676 | print("Your setup.py appears to be missing some important items") 1677 | print("(but I might be wrong). Please make sure it has something") 1678 | print("roughly like the following:") 1679 | print("") 1680 | print(" import versioneer") 1681 | print(" setup( version=versioneer.get_version(),") 1682 | print(" cmdclass=versioneer.get_cmdclass(), ...)") 1683 | print("") 1684 | errors += 1 1685 | if setters: 1686 | print("You should remove lines like 'versioneer.VCS = ' and") 1687 | print("'versioneer.versionfile_source = ' . This configuration") 1688 | print("now lives in setup.cfg, and should be removed from setup.py") 1689 | print("") 1690 | errors += 1 1691 | return errors 1692 | 1693 | if __name__ == "__main__": 1694 | cmd = sys.argv[1] 1695 | if cmd == "setup": 1696 | errors = do_setup() 1697 | errors += scan_setup_py() 1698 | if errors: 1699 | sys.exit(1) 1700 | --------------------------------------------------------------------------------