├── .gitattributes ├── .github └── workflows │ ├── publish.yml │ ├── style.yml │ └── tox.yml ├── .gitignore ├── AUTHORS.rst ├── CONTRIBUTING.rst ├── LICENSE ├── MANIFEST.in ├── README.md ├── development.md ├── environment.yml ├── environment_minimum_requirements.yml ├── flip_number_paths.py ├── mypy.ini ├── notebooks ├── 1-Basic_Plots.ipynb ├── 2-Animations.ipynb ├── 2018_CLE_2018122305_1246.tsv ├── 2018_GB_2018090912_3564.tsv ├── 3-Faceting.ipynb ├── 4-Tips_and_Tricks.ipynb └── roster_2018.csv ├── parse_envs.py ├── ptplot.gif ├── ptplot ├── __init__.py ├── _version.py ├── animation.py ├── callback.py ├── core.py ├── facet.py ├── hover.py ├── nfl.py ├── pick.py ├── pick.ts ├── plot.py ├── ptplot.py └── utils.py ├── pytest.ini ├── setup.cfg ├── setup.py ├── tests ├── conftest.py ├── test_ptplot.py └── test_utils.py ├── tox-conda.ini ├── tox-pip.ini └── versioneer.py /.gitattributes: -------------------------------------------------------------------------------- 1 | ptplot/_version.py export-subst 2 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: Publish to PyPI 2 | 3 | on: 4 | release: 5 | types: [created] 6 | 7 | jobs: 8 | deploy: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f # v2.3.4 12 | - name: Set up Python 13 | uses: actions/setup-python@3105fb18c05ddd93efea5f9e0bef7a03a6e9e7df # v2.2.1 14 | with: 15 | python-version: '3.8' 16 | - name: Install dependencies 17 | run: | 18 | python -m pip install --upgrade pip 19 | pip install setuptools wheel twine 20 | - name: Build and publish 21 | env: 22 | TWINE_USERNAME: __token__ 23 | TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} 24 | run: | 25 | python setup.py sdist bdist_wheel 26 | twine upload dist/* -------------------------------------------------------------------------------- /.github/workflows/style.yml: -------------------------------------------------------------------------------- 1 | name: Run style checks 2 | 3 | on: [pull_request] 4 | 5 | jobs: 6 | style_checks: 7 | runs-on: ubuntu-latest 8 | strategy: 9 | max-parallel: 5 10 | defaults: 11 | run: 12 | shell: bash -l {0} 13 | 14 | steps: 15 | - uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f # v2.3.4 16 | - name: Set up conda 17 | uses: conda-incubator/setup-miniconda@73b9a8098aade40363e43af145303c23542ccb97 # v2.0.1 18 | with: 19 | activate-environment: ptplot-dev 20 | auto-activate-base: false 21 | auto-update-conda: true 22 | environment-file: environment.yml 23 | - name: Sanity Check 24 | run: | 25 | conda list 26 | conda info -e 27 | which python 28 | 29 | - name: mypy 30 | run: | 31 | mypy -v ptplot/ 32 | 33 | - name: Black 34 | run: | 35 | python -m black -l 120 --check ptplot/ 36 | 37 | - name: flake8 38 | run: | 39 | python -m flake8 ptplot/ -------------------------------------------------------------------------------- /.github/workflows/tox.yml: -------------------------------------------------------------------------------- 1 | name: Run tests with tox 2 | 3 | on: 4 | pull_request: 5 | schedule: 6 | - cron: '0 13 * * SAT' 7 | 8 | jobs: 9 | tests: 10 | runs-on: ubuntu-latest 11 | strategy: 12 | max-parallel: 5 13 | defaults: 14 | run: 15 | shell: bash -l {0} 16 | 17 | steps: 18 | - uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f # v2.3.4 19 | - name: Set up conda 20 | uses: conda-incubator/setup-miniconda@73b9a8098aade40363e43af145303c23542ccb97 # v2.0.1 21 | with: 22 | auto-activate-base: true 23 | auto-update-conda: true 24 | activate-environment: "" 25 | channels: conda-forge 26 | - name: Install tox 27 | run: | 28 | conda install tox 29 | - name: conda info 30 | run: | 31 | conda info -e 32 | - name: conda list 33 | run: | 34 | conda list 35 | - name: Test with tox - conda 36 | run: | 37 | tox -c tox-conda.ini 38 | - name: Test with tox - pip 39 | run: | 40 | tox -c tox-pip.ini 41 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | 131 | # pycharm 132 | .idea/ -------------------------------------------------------------------------------- /AUTHORS.rst: -------------------------------------------------------------------------------- 1 | ======= 2 | Credits 3 | ======= 4 | 5 | Development Lead 6 | ---------------- 7 | 8 | * Andrew Schechtman-Rook 9 | 10 | Contributors 11 | ------------ 12 | 13 | None yet. Why not be the first? 14 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | .. highlight:: shell 2 | 3 | ============ 4 | Contributing 5 | ============ 6 | 7 | Contributions are welcome, and they are greatly appreciated! Every little bit 8 | helps, and credit will always be given. 9 | 10 | You can contribute in many ways: 11 | 12 | Types of Contributions 13 | ---------------------- 14 | 15 | Report Bugs 16 | ~~~~~~~~~~~ 17 | 18 | Report bugs at https://github.com/AndrewRook/ptplot/issues. 19 | 20 | If you are reporting a bug, please include: 21 | 22 | * Your operating system name and version. 23 | * Any details about your local setup that might be helpful in troubleshooting. 24 | * Detailed steps to reproduce the bug. 25 | 26 | Fix Bugs 27 | ~~~~~~~~ 28 | 29 | Look through the GitHub issues for bugs. Anything tagged with "bug" and "help 30 | wanted" is open to whoever wants to implement it. 31 | 32 | Implement Features 33 | ~~~~~~~~~~~~~~~~~~ 34 | 35 | Look through the GitHub issues for features. Anything tagged with "enhancement" 36 | and "help wanted" is open to whoever wants to implement it. 37 | 38 | Write Documentation 39 | ~~~~~~~~~~~~~~~~~~~ 40 | 41 | ptplot could always use more documentation, whether as part of the 42 | official ptplot docs, in docstrings, or even on the web in blog posts, 43 | articles, and such. 44 | 45 | Submit Feedback 46 | ~~~~~~~~~~~~~~~ 47 | 48 | The best way to send feedback is to file an issue at https://github.com/AndrewRook/ptplot/issues. 49 | 50 | If you are proposing a feature: 51 | 52 | * Explain in detail how it would work. 53 | * Keep the scope as narrow as possible, to make it easier to implement. 54 | * Remember that this is a volunteer-driven project, and that contributions 55 | are welcome :) 56 | 57 | Get Started! 58 | ------------ 59 | 60 | Ready to contribute? Here's how to set up `ptplot` for local development. 61 | 62 | 1. Fork the `ptplot` repo on GitHub. 63 | 2. Clone your fork locally:: 64 | 65 | $ git clone git@github.com:your_name_here/ptplot.git 66 | 67 | 3. Install your local copy into a conda environment. Assuming you have conda installed, this is how you set up your fork for local development:: 68 | 69 | $ cd ptplot/ 70 | $ conda env create -f environment.yml 71 | $ conda activate ptplot-dev 72 | $ pip install -e .[dev] 73 | 74 | 4. Create a branch for local development:: 75 | 76 | $ git checkout -b name-of-your-bugfix-or-feature 77 | 78 | Now you can make your changes locally. 79 | 80 | 5. When you're done making changes, check that your changes pass flake8 and the 81 | tests, including testing other Python versions with tox:: 82 | 83 | $ flake8 ptplot 84 | $ python -m py.test 85 | 86 | 6. Commit your changes and push your branch to GitHub:: 87 | 88 | $ git add . 89 | $ git commit -m "Your detailed description of your changes." 90 | $ git push origin name-of-your-bugfix-or-feature 91 | 92 | 7. Submit a pull request through the GitHub website. 93 | 94 | Pull Request Guidelines 95 | ----------------------- 96 | 97 | Before you submit a pull request, check that it meets these guidelines: 98 | 99 | 1. The pull request should include tests. 100 | 2. If the pull request adds functionality, the docs should be updated. Put 101 | your new functionality into a function with a docstring. Follow examples from existing 102 | functions to see how that works. 103 | 3. The pull request should work for Python 3.7+. 104 | 105 | Tips 106 | ---- 107 | 108 | To run a subset of tests:: 109 | 110 | $ pytest tests.test_ptplot 111 | 112 | 113 | Deploying 114 | --------- 115 | 116 | TBD 117 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU GENERAL PUBLIC LICENSE 2 | Version 3, 29 June 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | Preamble 9 | 10 | The GNU General Public License is a free, copyleft license for 11 | software and other kinds of works. 12 | 13 | The licenses for most software and other practical works are designed 14 | to take away your freedom to share and change the works. By contrast, 15 | the GNU General Public License is intended to guarantee your freedom to 16 | share and change all versions of a program--to make sure it remains free 17 | software for all its users. We, the Free Software Foundation, use the 18 | GNU General Public License for most of our software; it applies also to 19 | any other work released this way by its authors. You can apply it to 20 | your programs, too. 21 | 22 | When we speak of free software, we are referring to freedom, not 23 | price. Our General Public Licenses are designed to make sure that you 24 | have the freedom to distribute copies of free software (and charge for 25 | them if you wish), that you receive source code or can get it if you 26 | want it, that you can change the software or use pieces of it in new 27 | free programs, and that you know you can do these things. 28 | 29 | To protect your rights, we need to prevent others from denying you 30 | these rights or asking you to surrender the rights. Therefore, you have 31 | certain responsibilities if you distribute copies of the software, or if 32 | you modify it: responsibilities to respect the freedom of others. 33 | 34 | For example, if you distribute copies of such a program, whether 35 | gratis or for a fee, you must pass on to the recipients the same 36 | freedoms that you received. You must make sure that they, too, receive 37 | or can get the source code. And you must show them these terms so they 38 | know their rights. 39 | 40 | Developers that use the GNU GPL protect your rights with two steps: 41 | (1) assert copyright on the software, and (2) offer you this License 42 | giving you legal permission to copy, distribute and/or modify it. 43 | 44 | For the developers' and authors' protection, the GPL clearly explains 45 | that there is no warranty for this free software. For both users' and 46 | authors' sake, the GPL requires that modified versions be marked as 47 | changed, so that their problems will not be attributed erroneously to 48 | authors of previous versions. 49 | 50 | Some devices are designed to deny users access to install or run 51 | modified versions of the software inside them, although the manufacturer 52 | can do so. This is fundamentally incompatible with the aim of 53 | protecting users' freedom to change the software. The systematic 54 | pattern of such abuse occurs in the area of products for individuals to 55 | use, which is precisely where it is most unacceptable. Therefore, we 56 | have designed this version of the GPL to prohibit the practice for those 57 | products. If such problems arise substantially in other domains, we 58 | stand ready to extend this provision to those domains in future versions 59 | of the GPL, as needed to protect the freedom of users. 60 | 61 | Finally, every program is threatened constantly by software patents. 62 | States should not allow patents to restrict development and use of 63 | software on general-purpose computers, but in those that do, we wish to 64 | avoid the special danger that patents applied to a free program could 65 | make it effectively proprietary. To prevent this, the GPL assures that 66 | patents cannot be used to render the program non-free. 67 | 68 | The precise terms and conditions for copying, distribution and 69 | modification follow. 70 | 71 | TERMS AND CONDITIONS 72 | 73 | 0. Definitions. 74 | 75 | "This License" refers to version 3 of the GNU General Public License. 76 | 77 | "Copyright" also means copyright-like laws that apply to other kinds of 78 | works, such as semiconductor masks. 79 | 80 | "The Program" refers to any copyrightable work licensed under this 81 | License. Each licensee is addressed as "you". "Licensees" and 82 | "recipients" may be individuals or organizations. 83 | 84 | To "modify" a work means to copy from or adapt all or part of the work 85 | in a fashion requiring copyright permission, other than the making of an 86 | exact copy. The resulting work is called a "modified version" of the 87 | earlier work or a work "based on" the earlier work. 88 | 89 | A "covered work" means either the unmodified Program or a work based 90 | on the Program. 91 | 92 | To "propagate" a work means to do anything with it that, without 93 | permission, would make you directly or secondarily liable for 94 | infringement under applicable copyright law, except executing it on a 95 | computer or modifying a private copy. Propagation includes copying, 96 | distribution (with or without modification), making available to the 97 | public, and in some countries other activities as well. 98 | 99 | To "convey" a work means any kind of propagation that enables other 100 | parties to make or receive copies. Mere interaction with a user through 101 | a computer network, with no transfer of a copy, is not conveying. 102 | 103 | An interactive user interface displays "Appropriate Legal Notices" 104 | to the extent that it includes a convenient and prominently visible 105 | feature that (1) displays an appropriate copyright notice, and (2) 106 | tells the user that there is no warranty for the work (except to the 107 | extent that warranties are provided), that licensees may convey the 108 | work under this License, and how to view a copy of this License. If 109 | the interface presents a list of user commands or options, such as a 110 | menu, a prominent item in the list meets this criterion. 111 | 112 | 1. Source Code. 113 | 114 | The "source code" for a work means the preferred form of the work 115 | for making modifications to it. "Object code" means any non-source 116 | form of a work. 117 | 118 | A "Standard Interface" means an interface that either is an official 119 | standard defined by a recognized standards body, or, in the case of 120 | interfaces specified for a particular programming language, one that 121 | is widely used among developers working in that language. 122 | 123 | The "System Libraries" of an executable work include anything, other 124 | than the work as a whole, that (a) is included in the normal form of 125 | packaging a Major Component, but which is not part of that Major 126 | Component, and (b) serves only to enable use of the work with that 127 | Major Component, or to implement a Standard Interface for which an 128 | implementation is available to the public in source code form. A 129 | "Major Component", in this context, means a major essential component 130 | (kernel, window system, and so on) of the specific operating system 131 | (if any) on which the executable work runs, or a compiler used to 132 | produce the work, or an object code interpreter used to run it. 133 | 134 | The "Corresponding Source" for a work in object code form means all 135 | the source code needed to generate, install, and (for an executable 136 | work) run the object code and to modify the work, including scripts to 137 | control those activities. However, it does not include the work's 138 | System Libraries, or general-purpose tools or generally available free 139 | programs which are used unmodified in performing those activities but 140 | which are not part of the work. For example, Corresponding Source 141 | includes interface definition files associated with source files for 142 | the work, and the source code for shared libraries and dynamically 143 | linked subprograms that the work is specifically designed to require, 144 | such as by intimate data communication or control flow between those 145 | subprograms and other parts of the work. 146 | 147 | The Corresponding Source need not include anything that users 148 | can regenerate automatically from other parts of the Corresponding 149 | Source. 150 | 151 | The Corresponding Source for a work in source code form is that 152 | same work. 153 | 154 | 2. Basic Permissions. 155 | 156 | All rights granted under this License are granted for the term of 157 | copyright on the Program, and are irrevocable provided the stated 158 | conditions are met. This License explicitly affirms your unlimited 159 | permission to run the unmodified Program. The output from running a 160 | covered work is covered by this License only if the output, given its 161 | content, constitutes a covered work. This License acknowledges your 162 | rights of fair use or other equivalent, as provided by copyright law. 163 | 164 | You may make, run and propagate covered works that you do not 165 | convey, without conditions so long as your license otherwise remains 166 | in force. You may convey covered works to others for the sole purpose 167 | of having them make modifications exclusively for you, or provide you 168 | with facilities for running those works, provided that you comply with 169 | the terms of this License in conveying all material for which you do 170 | not control copyright. Those thus making or running the covered works 171 | for you must do so exclusively on your behalf, under your direction 172 | and control, on terms that prohibit them from making any copies of 173 | your copyrighted material outside their relationship with you. 174 | 175 | Conveying under any other circumstances is permitted solely under 176 | the conditions stated below. Sublicensing is not allowed; section 10 177 | makes it unnecessary. 178 | 179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law. 180 | 181 | No covered work shall be deemed part of an effective technological 182 | measure under any applicable law fulfilling obligations under article 183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or 184 | similar laws prohibiting or restricting circumvention of such 185 | measures. 186 | 187 | When you convey a covered work, you waive any legal power to forbid 188 | circumvention of technological measures to the extent such circumvention 189 | is effected by exercising rights under this License with respect to 190 | the covered work, and you disclaim any intention to limit operation or 191 | modification of the work as a means of enforcing, against the work's 192 | users, your or third parties' legal rights to forbid circumvention of 193 | technological measures. 194 | 195 | 4. Conveying Verbatim Copies. 196 | 197 | You may convey verbatim copies of the Program's source code as you 198 | receive it, in any medium, provided that you conspicuously and 199 | appropriately publish on each copy an appropriate copyright notice; 200 | keep intact all notices stating that this License and any 201 | non-permissive terms added in accord with section 7 apply to the code; 202 | keep intact all notices of the absence of any warranty; and give all 203 | recipients a copy of this License along with the Program. 204 | 205 | You may charge any price or no price for each copy that you convey, 206 | and you may offer support or warranty protection for a fee. 207 | 208 | 5. Conveying Modified Source Versions. 209 | 210 | You may convey a work based on the Program, or the modifications to 211 | produce it from the Program, in the form of source code under the 212 | terms of section 4, provided that you also meet all of these conditions: 213 | 214 | a) The work must carry prominent notices stating that you modified 215 | it, and giving a relevant date. 216 | 217 | b) The work must carry prominent notices stating that it is 218 | released under this License and any conditions added under section 219 | 7. This requirement modifies the requirement in section 4 to 220 | "keep intact all notices". 221 | 222 | c) You must license the entire work, as a whole, under this 223 | License to anyone who comes into possession of a copy. This 224 | License will therefore apply, along with any applicable section 7 225 | additional terms, to the whole of the work, and all its parts, 226 | regardless of how they are packaged. This License gives no 227 | permission to license the work in any other way, but it does not 228 | invalidate such permission if you have separately received it. 229 | 230 | d) If the work has interactive user interfaces, each must display 231 | Appropriate Legal Notices; however, if the Program has interactive 232 | interfaces that do not display Appropriate Legal Notices, your 233 | work need not make them do so. 234 | 235 | A compilation of a covered work with other separate and independent 236 | works, which are not by their nature extensions of the covered work, 237 | and which are not combined with it such as to form a larger program, 238 | in or on a volume of a storage or distribution medium, is called an 239 | "aggregate" if the compilation and its resulting copyright are not 240 | used to limit the access or legal rights of the compilation's users 241 | beyond what the individual works permit. Inclusion of a covered work 242 | in an aggregate does not cause this License to apply to the other 243 | parts of the aggregate. 244 | 245 | 6. Conveying Non-Source Forms. 246 | 247 | You may convey a covered work in object code form under the terms 248 | of sections 4 and 5, provided that you also convey the 249 | machine-readable Corresponding Source under the terms of this License, 250 | in one of these ways: 251 | 252 | a) Convey the object code in, or embodied in, a physical product 253 | (including a physical distribution medium), accompanied by the 254 | Corresponding Source fixed on a durable physical medium 255 | customarily used for software interchange. 256 | 257 | b) Convey the object code in, or embodied in, a physical product 258 | (including a physical distribution medium), accompanied by a 259 | written offer, valid for at least three years and valid for as 260 | long as you offer spare parts or customer support for that product 261 | model, to give anyone who possesses the object code either (1) a 262 | copy of the Corresponding Source for all the software in the 263 | product that is covered by this License, on a durable physical 264 | medium customarily used for software interchange, for a price no 265 | more than your reasonable cost of physically performing this 266 | conveying of source, or (2) access to copy the 267 | Corresponding Source from a network server at no charge. 268 | 269 | c) Convey individual copies of the object code with a copy of the 270 | written offer to provide the Corresponding Source. This 271 | alternative is allowed only occasionally and noncommercially, and 272 | only if you received the object code with such an offer, in accord 273 | with subsection 6b. 274 | 275 | d) Convey the object code by offering access from a designated 276 | place (gratis or for a charge), and offer equivalent access to the 277 | Corresponding Source in the same way through the same place at no 278 | further charge. You need not require recipients to copy the 279 | Corresponding Source along with the object code. If the place to 280 | copy the object code is a network server, the Corresponding Source 281 | may be on a different server (operated by you or a third party) 282 | that supports equivalent copying facilities, provided you maintain 283 | clear directions next to the object code saying where to find the 284 | Corresponding Source. Regardless of what server hosts the 285 | Corresponding Source, you remain obligated to ensure that it is 286 | available for as long as needed to satisfy these requirements. 287 | 288 | e) Convey the object code using peer-to-peer transmission, provided 289 | you inform other peers where the object code and Corresponding 290 | Source of the work are being offered to the general public at no 291 | charge under subsection 6d. 292 | 293 | A separable portion of the object code, whose source code is excluded 294 | from the Corresponding Source as a System Library, need not be 295 | included in conveying the object code work. 296 | 297 | A "User Product" is either (1) a "consumer product", which means any 298 | tangible personal property which is normally used for personal, family, 299 | or household purposes, or (2) anything designed or sold for incorporation 300 | into a dwelling. In determining whether a product is a consumer product, 301 | doubtful cases shall be resolved in favor of coverage. For a particular 302 | product received by a particular user, "normally used" refers to a 303 | typical or common use of that class of product, regardless of the status 304 | of the particular user or of the way in which the particular user 305 | actually uses, or expects or is expected to use, the product. A product 306 | is a consumer product regardless of whether the product has substantial 307 | commercial, industrial or non-consumer uses, unless such uses represent 308 | the only significant mode of use of the product. 309 | 310 | "Installation Information" for a User Product means any methods, 311 | procedures, authorization keys, or other information required to install 312 | and execute modified versions of a covered work in that User Product from 313 | a modified version of its Corresponding Source. The information must 314 | suffice to ensure that the continued functioning of the modified object 315 | code is in no case prevented or interfered with solely because 316 | modification has been made. 317 | 318 | If you convey an object code work under this section in, or with, or 319 | specifically for use in, a User Product, and the conveying occurs as 320 | part of a transaction in which the right of possession and use of the 321 | User Product is transferred to the recipient in perpetuity or for a 322 | fixed term (regardless of how the transaction is characterized), the 323 | Corresponding Source conveyed under this section must be accompanied 324 | by the Installation Information. But this requirement does not apply 325 | if neither you nor any third party retains the ability to install 326 | modified object code on the User Product (for example, the work has 327 | been installed in ROM). 328 | 329 | The requirement to provide Installation Information does not include a 330 | requirement to continue to provide support service, warranty, or updates 331 | for a work that has been modified or installed by the recipient, or for 332 | the User Product in which it has been modified or installed. Access to a 333 | network may be denied when the modification itself materially and 334 | adversely affects the operation of the network or violates the rules and 335 | protocols for communication across the network. 336 | 337 | Corresponding Source conveyed, and Installation Information provided, 338 | in accord with this section must be in a format that is publicly 339 | documented (and with an implementation available to the public in 340 | source code form), and must require no special password or key for 341 | unpacking, reading or copying. 342 | 343 | 7. Additional Terms. 344 | 345 | "Additional permissions" are terms that supplement the terms of this 346 | License by making exceptions from one or more of its conditions. 347 | Additional permissions that are applicable to the entire Program shall 348 | be treated as though they were included in this License, to the extent 349 | that they are valid under applicable law. If additional permissions 350 | apply only to part of the Program, that part may be used separately 351 | under those permissions, but the entire Program remains governed by 352 | this License without regard to the additional permissions. 353 | 354 | When you convey a copy of a covered work, you may at your option 355 | remove any additional permissions from that copy, or from any part of 356 | it. (Additional permissions may be written to require their own 357 | removal in certain cases when you modify the work.) You may place 358 | additional permissions on material, added by you to a covered work, 359 | for which you have or can give appropriate copyright permission. 360 | 361 | Notwithstanding any other provision of this License, for material you 362 | add to a covered work, you may (if authorized by the copyright holders of 363 | that material) supplement the terms of this License with terms: 364 | 365 | a) Disclaiming warranty or limiting liability differently from the 366 | terms of sections 15 and 16 of this License; or 367 | 368 | b) Requiring preservation of specified reasonable legal notices or 369 | author attributions in that material or in the Appropriate Legal 370 | Notices displayed by works containing it; or 371 | 372 | c) Prohibiting misrepresentation of the origin of that material, or 373 | requiring that modified versions of such material be marked in 374 | reasonable ways as different from the original version; or 375 | 376 | d) Limiting the use for publicity purposes of names of licensors or 377 | authors of the material; or 378 | 379 | e) Declining to grant rights under trademark law for use of some 380 | trade names, trademarks, or service marks; or 381 | 382 | f) Requiring indemnification of licensors and authors of that 383 | material by anyone who conveys the material (or modified versions of 384 | it) with contractual assumptions of liability to the recipient, for 385 | any liability that these contractual assumptions directly impose on 386 | those licensors and authors. 387 | 388 | All other non-permissive additional terms are considered "further 389 | restrictions" within the meaning of section 10. If the Program as you 390 | received it, or any part of it, contains a notice stating that it is 391 | governed by this License along with a term that is a further 392 | restriction, you may remove that term. If a license document contains 393 | a further restriction but permits relicensing or conveying under this 394 | License, you may add to a covered work material governed by the terms 395 | of that license document, provided that the further restriction does 396 | not survive such relicensing or conveying. 397 | 398 | If you add terms to a covered work in accord with this section, you 399 | must place, in the relevant source files, a statement of the 400 | additional terms that apply to those files, or a notice indicating 401 | where to find the applicable terms. 402 | 403 | Additional terms, permissive or non-permissive, may be stated in the 404 | form of a separately written license, or stated as exceptions; 405 | the above requirements apply either way. 406 | 407 | 8. Termination. 408 | 409 | You may not propagate or modify a covered work except as expressly 410 | provided under this License. Any attempt otherwise to propagate or 411 | modify it is void, and will automatically terminate your rights under 412 | this License (including any patent licenses granted under the third 413 | paragraph of section 11). 414 | 415 | However, if you cease all violation of this License, then your 416 | license from a particular copyright holder is reinstated (a) 417 | provisionally, unless and until the copyright holder explicitly and 418 | finally terminates your license, and (b) permanently, if the copyright 419 | holder fails to notify you of the violation by some reasonable means 420 | prior to 60 days after the cessation. 421 | 422 | Moreover, your license from a particular copyright holder is 423 | reinstated permanently if the copyright holder notifies you of the 424 | violation by some reasonable means, this is the first time you have 425 | received notice of violation of this License (for any work) from that 426 | copyright holder, and you cure the violation prior to 30 days after 427 | your receipt of the notice. 428 | 429 | Termination of your rights under this section does not terminate the 430 | licenses of parties who have received copies or rights from you under 431 | this License. If your rights have been terminated and not permanently 432 | reinstated, you do not qualify to receive new licenses for the same 433 | material under section 10. 434 | 435 | 9. Acceptance Not Required for Having Copies. 436 | 437 | You are not required to accept this License in order to receive or 438 | run a copy of the Program. Ancillary propagation of a covered work 439 | occurring solely as a consequence of using peer-to-peer transmission 440 | to receive a copy likewise does not require acceptance. However, 441 | nothing other than this License grants you permission to propagate or 442 | modify any covered work. These actions infringe copyright if you do 443 | not accept this License. Therefore, by modifying or propagating a 444 | covered work, you indicate your acceptance of this License to do so. 445 | 446 | 10. Automatic Licensing of Downstream Recipients. 447 | 448 | Each time you convey a covered work, the recipient automatically 449 | receives a license from the original licensors, to run, modify and 450 | propagate that work, subject to this License. You are not responsible 451 | for enforcing compliance by third parties with this License. 452 | 453 | An "entity transaction" is a transaction transferring control of an 454 | organization, or substantially all assets of one, or subdividing an 455 | organization, or merging organizations. If propagation of a covered 456 | work results from an entity transaction, each party to that 457 | transaction who receives a copy of the work also receives whatever 458 | licenses to the work the party's predecessor in interest had or could 459 | give under the previous paragraph, plus a right to possession of the 460 | Corresponding Source of the work from the predecessor in interest, if 461 | the predecessor has it or can get it with reasonable efforts. 462 | 463 | You may not impose any further restrictions on the exercise of the 464 | rights granted or affirmed under this License. For example, you may 465 | not impose a license fee, royalty, or other charge for exercise of 466 | rights granted under this License, and you may not initiate litigation 467 | (including a cross-claim or counterclaim in a lawsuit) alleging that 468 | any patent claim is infringed by making, using, selling, offering for 469 | sale, or importing the Program or any portion of it. 470 | 471 | 11. Patents. 472 | 473 | A "contributor" is a copyright holder who authorizes use under this 474 | License of the Program or a work on which the Program is based. The 475 | work thus licensed is called the contributor's "contributor version". 476 | 477 | A contributor's "essential patent claims" are all patent claims 478 | owned or controlled by the contributor, whether already acquired or 479 | hereafter acquired, that would be infringed by some manner, permitted 480 | by this License, of making, using, or selling its contributor version, 481 | but do not include claims that would be infringed only as a 482 | consequence of further modification of the contributor version. For 483 | purposes of this definition, "control" includes the right to grant 484 | patent sublicenses in a manner consistent with the requirements of 485 | this License. 486 | 487 | Each contributor grants you a non-exclusive, worldwide, royalty-free 488 | patent license under the contributor's essential patent claims, to 489 | make, use, sell, offer for sale, import and otherwise run, modify and 490 | propagate the contents of its contributor version. 491 | 492 | In the following three paragraphs, a "patent license" is any express 493 | agreement or commitment, however denominated, not to enforce a patent 494 | (such as an express permission to practice a patent or covenant not to 495 | sue for patent infringement). To "grant" such a patent license to a 496 | party means to make such an agreement or commitment not to enforce a 497 | patent against the party. 498 | 499 | If you convey a covered work, knowingly relying on a patent license, 500 | and the Corresponding Source of the work is not available for anyone 501 | to copy, free of charge and under the terms of this License, through a 502 | publicly available network server or other readily accessible means, 503 | then you must either (1) cause the Corresponding Source to be so 504 | available, or (2) arrange to deprive yourself of the benefit of the 505 | patent license for this particular work, or (3) arrange, in a manner 506 | consistent with the requirements of this License, to extend the patent 507 | license to downstream recipients. "Knowingly relying" means you have 508 | actual knowledge that, but for the patent license, your conveying the 509 | covered work in a country, or your recipient's use of the covered work 510 | in a country, would infringe one or more identifiable patents in that 511 | country that you have reason to believe are valid. 512 | 513 | If, pursuant to or in connection with a single transaction or 514 | arrangement, you convey, or propagate by procuring conveyance of, a 515 | covered work, and grant a patent license to some of the parties 516 | receiving the covered work authorizing them to use, propagate, modify 517 | or convey a specific copy of the covered work, then the patent license 518 | you grant is automatically extended to all recipients of the covered 519 | work and works based on it. 520 | 521 | A patent license is "discriminatory" if it does not include within 522 | the scope of its coverage, prohibits the exercise of, or is 523 | conditioned on the non-exercise of one or more of the rights that are 524 | specifically granted under this License. You may not convey a covered 525 | work if you are a party to an arrangement with a third party that is 526 | in the business of distributing software, under which you make payment 527 | to the third party based on the extent of your activity of conveying 528 | the work, and under which the third party grants, to any of the 529 | parties who would receive the covered work from you, a discriminatory 530 | patent license (a) in connection with copies of the covered work 531 | conveyed by you (or copies made from those copies), or (b) primarily 532 | for and in connection with specific products or compilations that 533 | contain the covered work, unless you entered into that arrangement, 534 | or that patent license was granted, prior to 28 March 2007. 535 | 536 | Nothing in this License shall be construed as excluding or limiting 537 | any implied license or other defenses to infringement that may 538 | otherwise be available to you under applicable patent law. 539 | 540 | 12. No Surrender of Others' Freedom. 541 | 542 | If conditions are imposed on you (whether by court order, agreement or 543 | otherwise) that contradict the conditions of this License, they do not 544 | excuse you from the conditions of this License. If you cannot convey a 545 | covered work so as to satisfy simultaneously your obligations under this 546 | License and any other pertinent obligations, then as a consequence you may 547 | not convey it at all. For example, if you agree to terms that obligate you 548 | to collect a royalty for further conveying from those to whom you convey 549 | the Program, the only way you could satisfy both those terms and this 550 | License would be to refrain entirely from conveying the Program. 551 | 552 | 13. Use with the GNU Affero General Public License. 553 | 554 | Notwithstanding any other provision of this License, you have 555 | permission to link or combine any covered work with a work licensed 556 | under version 3 of the GNU Affero General Public License into a single 557 | combined work, and to convey the resulting work. The terms of this 558 | License will continue to apply to the part which is the covered work, 559 | but the special requirements of the GNU Affero General Public License, 560 | section 13, concerning interaction through a network will apply to the 561 | combination as such. 562 | 563 | 14. Revised Versions of this License. 564 | 565 | The Free Software Foundation may publish revised and/or new versions of 566 | the GNU General Public License from time to time. Such new versions will 567 | be similar in spirit to the present version, but may differ in detail to 568 | address new problems or concerns. 569 | 570 | Each version is given a distinguishing version number. If the 571 | Program specifies that a certain numbered version of the GNU General 572 | Public License "or any later version" applies to it, you have the 573 | option of following the terms and conditions either of that numbered 574 | version or of any later version published by the Free Software 575 | Foundation. If the Program does not specify a version number of the 576 | GNU General Public License, you may choose any version ever published 577 | by the Free Software Foundation. 578 | 579 | If the Program specifies that a proxy can decide which future 580 | versions of the GNU General Public License can be used, that proxy's 581 | public statement of acceptance of a version permanently authorizes you 582 | to choose that version for the Program. 583 | 584 | Later license versions may give you additional or different 585 | permissions. However, no additional obligations are imposed on any 586 | author or copyright holder as a result of your choosing to follow a 587 | later version. 588 | 589 | 15. Disclaimer of Warranty. 590 | 591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY 592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT 593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY 594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, 595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM 597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF 598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 599 | 600 | 16. Limitation of Liability. 601 | 602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS 604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY 605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE 606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF 607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD 608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), 609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF 610 | SUCH DAMAGES. 611 | 612 | 17. Interpretation of Sections 15 and 16. 613 | 614 | If the disclaimer of warranty and limitation of liability provided 615 | above cannot be given local legal effect according to their terms, 616 | reviewing courts shall apply local law that most closely approximates 617 | an absolute waiver of all civil liability in connection with the 618 | Program, unless a warranty or assumption of liability accompanies a 619 | copy of the Program in return for a fee. 620 | 621 | END OF TERMS AND CONDITIONS 622 | 623 | How to Apply These Terms to Your New Programs 624 | 625 | If you develop a new program, and you want it to be of the greatest 626 | possible use to the public, the best way to achieve this is to make it 627 | free software which everyone can redistribute and change under these terms. 628 | 629 | To do so, attach the following notices to the program. It is safest 630 | to attach them to the start of each source file to most effectively 631 | state the exclusion of warranty; and each file should have at least 632 | the "copyright" line and a pointer to where the full notice is found. 633 | 634 | 635 | Copyright (C) 636 | 637 | This program is free software: you can redistribute it and/or modify 638 | it under the terms of the GNU General Public License as published by 639 | the Free Software Foundation, either version 3 of the License, or 640 | (at your option) any later version. 641 | 642 | This program is distributed in the hope that it will be useful, 643 | but WITHOUT ANY WARRANTY; without even the implied warranty of 644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 645 | GNU General Public License for more details. 646 | 647 | You should have received a copy of the GNU General Public License 648 | along with this program. If not, see . 649 | 650 | Also add information on how to contact you by electronic and paper mail. 651 | 652 | If the program does terminal interaction, make it output a short 653 | notice like this when it starts in an interactive mode: 654 | 655 | Copyright (C) 656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. 657 | This is free software, and you are welcome to redistribute it 658 | under certain conditions; type `show c' for details. 659 | 660 | The hypothetical commands `show w' and `show c' should show the appropriate 661 | parts of the General Public License. Of course, your program's commands 662 | might be different; for a GUI interface, you would use an "about box". 663 | 664 | You should also get your employer (if you work as a programmer) or school, 665 | if any, to sign a "copyright disclaimer" for the program, if necessary. 666 | For more information on this, and how to apply and follow the GNU GPL, see 667 | . 668 | 669 | The GNU General Public License does not permit incorporating your program 670 | into proprietary programs. If your program is a subroutine library, you 671 | may consider it more useful to permit linking proprietary applications with 672 | the library. If this is what you want to do, use the GNU Lesser General 673 | Public License instead of this License. But first, please read 674 | . 675 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include versioneer.py 2 | include ptplot/_version.py 3 | include ptplot/pick.ts 4 | 5 | include environment.yml 6 | include environment_minimum_requirements.yml 7 | include parse_envs.py 8 | 9 | include AUTHORS.rst 10 | include CONTRIBUTING.rst 11 | include LICENSE 12 | include README.md -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ptplot 2 | `ptplot` makes it easy to turn player-tracking data into beautiful, 3 | interactive visualizations — including animations! These visualizations can be used to guide 4 | data exploration/analysis work, or to embed in webpages to share with 5 | the world. 6 | 7 | ![example animation](ptplot.gif) 8 | 9 | ## Installation 10 | 11 | `ptplot` can be installed via pip: 12 | 13 | ```bash 14 | $ pip install ptplot 15 | ``` 16 | 17 | It is strongly recommended that you install `ptplot` into a virtual 18 | environment, such as with [`conda`](https://docs.conda.io/en/latest/): 19 | 20 | ```bash 21 | [After installing conda] 22 | $ conda create -n player_tracking python=3 23 | $ conda activate player_tracking 24 | $ pip install ptplot 25 | ``` 26 | 27 | You may wish to install some of `ptplot`'s dependencies 28 | via conda, specifically `pandas` and `bokeh`: 29 | 30 | ```bash 31 | [After installing conda] 32 | $ conda create -n player_tracking python=3 pandas bokeh 33 | $ conda activate player_tracking 34 | $ pip install ptplot 35 | ``` 36 | 37 | ## Getting Started 38 | 39 | Making your first plot can be as simple as 40 | 41 | ```python 42 | import pandas as pd 43 | 44 | from bokeh.plotting import show 45 | 46 | from ptplot import PTPlot 47 | from ptplot.nfl import Field 48 | from ptplot.plot import Positions 49 | 50 | data = pd.read_csv("YOUR PLAYER TRACKING DATA") 51 | plot = PTPlot(data) + Field() + Positions("X_COORDINATE_COLUMN", "Y_COORDINATE_COLUMN") 52 | show(plot.draw()) 53 | ``` 54 | 55 | For additional documentation and examples, check out the 56 | notebooks in the `notebooks/` directory, which can be viewed 57 | online with all of the plots correctly rendered via nbviewer: 58 | 1. [Basic Plots](https://nbviewer.jupyter.org/github/AndrewRook/ptplot/blob/main/notebooks/1-Basic_Plots.ipynb) 59 | 2. [Animations](https://nbviewer.jupyter.org/github/AndrewRook/ptplot/blob/main/notebooks/2-Animations.ipynb) 60 | 3. [Faceting](https://nbviewer.jupyter.org/github/AndrewRook/ptplot/blob/main/notebooks/3-Faceting.ipynb) 61 | 4. [Tips and Tricks](https://nbviewer.jupyter.org/github/AndrewRook/ptplot/blob/main/notebooks/4-Tips_and_Tricks.ipynb) 62 | 63 | Additionally, layers within `ptplot` have docstrings with 64 | more usage details. Those can be accessed either by reading the 65 | source code or running `help([FUNCTION])` inside of Python. 66 | 67 | ## Development Docs 68 | 69 | See [here](development.md) -------------------------------------------------------------------------------- /development.md: -------------------------------------------------------------------------------- 1 | # Developing on `ptplot` 2 | 3 | ## Installation 4 | 5 | Install either with the conda 6 | environment file (strongly recommended) or via pip using the `[dev]` extras: 7 | ```bash 8 | --This uses ssh to clone the repo, feel free to use your protocol of choice-- 9 | $ git clone git@github.com:AndrewRook/ptplot.git 10 | $ cd ptplot 11 | $ conda env create -f environment.yml 12 | $ conda activate ptplot-dev 13 | $ pip install -e . 14 | --OR (Note that you will need to separately install nodejs)-- 15 | $ pip install -e .[dev] 16 | ``` 17 | 18 | ## Running tests and style checks 19 | 20 | `ptplot` uses `pytest`, `flake8`, `mypy`, and `black`. All of these must 21 | pass in order for a PR to be merged, so it's valuable to run them 22 | yourself locally before pushing changes: 23 | 24 | ```bash 25 | $ python -m mypy ptplot/ 26 | $ python -m pytest tests/ ptplot/ 27 | $ python -m black -l 120 ptplot/ 28 | $ python -m flake8 ptplot/ 29 | ``` 30 | 31 | ## Notebooks 32 | 33 | `ptplot`'s primary form of documentation is currently Jupyter 34 | notebooks. Unfortunately, due to how plotly renders animations, 35 | the animation notebook is quite large in size. Whenever you 36 | work with `ptplot` animations in the notebook, please check the 37 | size of the resulting notebook **before** committing it to the repo. 38 | 39 | _Note: When you make an update to any of the custom extensions, you may 40 | need to [force-reload the notebook pages](https://support.google.com/chrome/thread/16531954/clear-cache-for-specific-website-in-google-chrome?hl=en) in order to clear the cache and 41 | make Jupyter look for the new JS files._ 42 | 43 | 44 | ## Cutting a release 45 | 46 | 1. Make sure that all changes you want have been merged to `main`. 47 | 2. Rerun all the notebooks and make sure they still work. If the 48 | results have changed in any way, make sure that's reflected in the 49 | versions that are in `main`. If not, rerun them and PR those updates 50 | in. 51 | 3. In GitHub, create a [new release](https://github.com/AndrewRook/ptplot/releases/new) 52 | based on the `main` branch. 53 | Give it a version tag that makes sense. `ptplot` uses [semantic 54 | versioning](https://semver.org/), with no "v" preceding the 55 | version numbers. 56 | 4. Go to the Actions tab for the repo and confirm that the publish 57 | action runs successfully (takes a couple of minutes). -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: ptplot-dev 2 | channels: 3 | - conda-forge 4 | - defaults 5 | dependencies: 6 | - black 7 | - bokeh 8 | - flake8 9 | - mypy 10 | - nodejs 11 | - notebook 12 | - numpy 13 | - pandas 14 | - patsy 15 | - pip 16 | - pytest 17 | - pytest-cov 18 | - tox 19 | - pip: 20 | - svgpathtools 21 | -------------------------------------------------------------------------------- /environment_minimum_requirements.yml: -------------------------------------------------------------------------------- 1 | name: ptplot-dev-minreqs 2 | channels: 3 | - conda-forge 4 | - defaults 5 | dependencies: 6 | - black==20.8b1 7 | - bokeh==2.3.3 8 | - flake8==3.8.4 9 | - mypy==0.910 10 | - nodejs==14.17.1 11 | - notebook==6.2.0 12 | - numpy==1.19.5 13 | - pandas==1.2.0 14 | - patsy==0.5.1 15 | - pip==20.3.3 16 | - pytest==6.2.1 17 | - pytest-cov==2.11.1 18 | - tox==3.21.4 19 | - pip: 20 | - svgpathtools==1.4.1 21 | -------------------------------------------------------------------------------- /flip_number_paths.py: -------------------------------------------------------------------------------- 1 | from svgpathtools import parse_path, Path, Line, Arc, QuadraticBezier 2 | 3 | # These numbers came from a text to svg converter 4 | # (Alice font, https://danmarshall.github.io/google-font-to-svg-path/) 5 | # That was designed for SVGs where 0 is the upper left instead of the bottom left. 6 | # This script just flips them - I couldn't use Path.scaled() because some of the 7 | # paths are Arcs which are unsupported. 8 | upside_down_one = parse_path( 9 | "M 0.713 2.01 L 0.713 0.97 A 3.257 3.257 0 0 0 0.714 0.901 A 1.126 1.126 0 0 0 0.691 0.633 " 10 | "Q 0.663 0.53 0.593 0.5 Q 0.523 0.47 0.363 0.47 L 0.118 0.47 A 0.095 0.095 0 0 1 0.028 0.35 " 11 | "Q 0.033 0.275 0.058 0.258 Q 0.083 0.24 0.121 0.24 L 0.173 0.24 Q 0.383 0.24 0.583 0.173 " 12 | "Q 0.783 0.105 1.018 0.01 Q 1.048 0 1.068 0 Q 1.148 0 1.148 0.115 L 1.148 2.01 " 13 | "Q 1.148 2.23 1.201 2.325 Q 1.253 2.42 1.366 2.445 Q 1.478 2.47 1.728 2.47 L 1.778 2.47 " 14 | "Q 1.818 2.47 1.836 2.49 Q 1.853 2.51 1.858 2.57 Q 1.858 2.69 1.768 2.69 " 15 | "Q 1.408 2.675 0.928 2.675 Q 0.453 2.675 0.093 2.69 A 0.095 0.095 0 0 1 0.003 2.57 " 16 | "Q 0.008 2.51 0.026 2.49 Q 0.043 2.47 0.083 2.47 Q 0.363 2.47 0.481 2.448 " 17 | "Q 0.598 2.425 0.656 2.33 Q 0.713 2.235 0.713 2.01 Z" 18 | ) 19 | upside_down_two = parse_path( 20 | "M 0.447 2.27 L 0.447 2.295 L 1.787 2.295 A 0.449 0.449 0 0 1 1.969 2.323 " 21 | "Q 2.027 2.35 2.027 2.47 Q 2.027 2.65 1.852 2.65 L 0.177 2.65 Q 0.002 2.65 0.002 2.47 " 22 | "Q 0.007 2.365 0.059 2.3 Q 0.112 2.235 0.232 2.155 Q 0.627 1.89 0.844 1.718 " 23 | "Q 1.062 1.545 1.237 1.28 Q 1.412 1.015 1.412 0.685 Q 1.412 0.445 1.284 0.313 " 24 | "Q 1.157 0.18 0.927 0.18 Q 0.732 0.18 0.604 0.278 Q 0.477 0.375 0.477 0.545 " 25 | "Q 0.477 0.63 0.517 0.688 Q 0.557 0.745 0.599 0.78 Q 0.642 0.815 0.642 0.82 " 26 | "Q 0.642 0.855 0.574 0.905 Q 0.507 0.955 0.412 0.955 Q 0.257 0.955 0.164 0.858 " 27 | "Q 0.072 0.76 0.072 0.61 Q 0.072 0.41 0.207 0.27 Q 0.342 0.13 0.549 0.065 " 28 | "Q 0.757 0 0.972 0 Q 1.347 0 1.619 0.185 Q 1.892 0.37 1.892 0.75 Q 1.892 1.025 1.742 1.25 " 29 | "Q 1.592 1.475 1.384 1.633 Q 1.177 1.79 0.842 2.005 Q 0.577 2.17 0.447 2.27 Z" 30 | ) 31 | upside_down_three = parse_path( 32 | "M 0.9 1.426 L 0.715 1.426 Q 0.68 1.426 0.66 1.396 Q 0.64 1.366 0.64 1.326 " 33 | "Q 0.64 1.281 0.66 1.249 Q 0.68 1.216 0.715 1.211 L 0.9 1.211 Q 1.095 1.191 1.213 1.036 " 34 | "Q 1.33 0.881 1.33 0.661 Q 1.33 0.421 1.195 0.294 Q 1.06 0.166 0.88 0.166 " 35 | "Q 0.71 0.166 0.588 0.264 Q 0.465 0.361 0.465 0.531 Q 0.465 0.641 0.508 0.679 " 36 | "Q 0.55 0.716 0.59 0.724 Q 0.63 0.731 0.63 0.736 Q 0.63 0.771 0.563 0.821 " 37 | "Q 0.495 0.871 0.4 0.871 Q 0.24 0.871 0.15 0.771 Q 0.06 0.671 0.06 0.516 " 38 | "Q 0.06 0.271 0.29 0.136 Q 0.52 0.001 0.885 0.001 Q 1.275 0.001 1.53 0.171 " 39 | "Q 1.785 0.341 1.785 0.661 Q 1.785 0.871 1.638 1.051 Q 1.49 1.231 1.215 1.306 " 40 | "L 1.215 1.326 Q 1.495 1.356 1.7 1.504 Q 1.905 1.651 1.905 1.971 Q 1.905 2.376 1.61 2.569 " 41 | "Q 1.315 2.761 0.87 2.761 Q 0.505 2.761 0.253 2.619 Q 0 2.476 0 2.201 Q 0 2.056 0.09 1.951 " 42 | "Q 0.18 1.846 0.34 1.846 Q 0.435 1.846 0.503 1.896 Q 0.57 1.946 0.57 1.981 " 43 | "Q 0.57 1.986 0.525 2.019 Q 0.48 2.051 0.443 2.106 Q 0.405 2.161 0.405 2.246 " 44 | "Q 0.405 2.401 0.538 2.489 Q 0.67 2.576 0.885 2.576 Q 1.42 2.576 1.42 1.981 " 45 | "Q 1.42 1.736 1.29 1.589 Q 1.16 1.441 0.9 1.426 Z" 46 | ) 47 | upside_down_four = parse_path( 48 | "M 1.355 2.636 L 1.355 1.871 L 0.16 1.871 Q 0.08 1.871 0.04 1.821 Q 0 1.771 0 1.696 " 49 | "Q 0 1.631 0.035 1.586 L 1.44 0.051 Q 1.49 0.001 1.595 0.001 Q 1.675 0.001 1.73 0.043 " 50 | "Q 1.785 0.086 1.79 0.176 L 1.79 1.626 L 2.09 1.626 Q 2.16 1.626 2.19 1.643 " 51 | "Q 2.22 1.661 2.225 1.746 Q 2.225 1.801 2.203 1.836 Q 2.18 1.871 2.13 1.871 " 52 | "L 1.79 1.871 L 1.79 2.636 Q 1.79 2.681 1.725 2.706 Q 1.66 2.731 1.575 2.731 " 53 | "Q 1.49 2.731 1.425 2.706 Q 1.36 2.681 1.355 2.636 Z M 1.355 0.441 L 0.29 1.626 " 54 | "L 1.355 1.626 L 1.355 0.441 Z" 55 | ) 56 | upside_down_five = parse_path( 57 | "M 0.135 1.25 L 0.14 1.19 L 0.295 0 L 1.67 0 Q 1.74 0 1.783 0.045 Q 1.825 0.09 1.825 0.17 " 58 | "Q 1.825 0.355 1.635 0.355 L 0.525 0.355 L 0.42 1.05 Q 0.64 0.915 0.96 0.915 " 59 | "Q 1.175 0.915 1.393 1.01 Q 1.61 1.105 1.755 1.3 Q 1.9 1.495 1.9 1.785 Q 1.9 2.075 1.758 2.283 " 60 | "Q 1.615 2.49 1.378 2.595 Q 1.14 2.7 0.86 2.7 Q 0.49 2.7 0.245 2.565 Q 0 2.43 0 2.145 " 61 | "Q 0 1.99 0.093 1.873 Q 0.185 1.755 0.34 1.755 Q 0.435 1.755 0.503 1.805 Q 0.57 1.855 0.57 1.89 " 62 | "Q 0.57 1.895 0.525 1.935 Q 0.48 1.975 0.443 2.033 Q 0.405 2.09 0.405 2.165 " 63 | "Q 0.405 2.335 0.528 2.425 Q 0.65 2.515 0.87 2.515 Q 1.145 2.515 1.283 2.32 " 64 | "Q 1.42 2.125 1.42 1.81 Q 1.42 1.52 1.28 1.328 Q 1.14 1.135 0.845 1.135 " 65 | "Q 0.71 1.135 0.613 1.178 Q 0.515 1.22 0.405 1.3 Q 0.34 1.345 0.298 1.368 " 66 | "Q 0.255 1.39 0.215 1.39 Q 0.135 1.39 0.135 1.25 Z" 67 | ) 68 | 69 | 70 | def flip_path(upside_down_path): 71 | path = [] 72 | _, _, min_y, max_y = upside_down_path.bbox() 73 | offset = max_y + min_y 74 | for segment in upside_down_path._segments: 75 | if type(segment) is Line: 76 | path.append(Line( 77 | complex(segment.start.real, -segment.start.imag + offset), 78 | complex(segment.end.real, -segment.end.imag + offset) 79 | 80 | )) 81 | elif type(segment) is Arc: 82 | path.append(Arc( 83 | complex(segment.start.real, -segment.start.imag + offset), 84 | segment.radius, 85 | abs(180 - segment.rotation), 86 | segment.large_arc, 87 | not segment.sweep, 88 | complex(segment.end.real, -segment.end.imag + offset) 89 | )) 90 | elif type(segment) is QuadraticBezier: 91 | path.append(QuadraticBezier( 92 | complex(segment.start.real, -segment.start.imag + offset), 93 | complex(segment.control.real, -segment.control.imag + offset), 94 | complex(segment.end.real, -segment.end.imag + offset) 95 | 96 | )) 97 | else: 98 | raise ValueError(f"Unknown type: {type(segment)}") 99 | 100 | return Path(*path) 101 | 102 | 103 | if __name__ == "__main__": 104 | print("one") 105 | print(flip_path(upside_down_one).d()) 106 | print("\ntwo") 107 | print(flip_path(upside_down_two).d()) 108 | print("\nthree") 109 | print(flip_path(upside_down_three).d()) 110 | print("\nfour") 111 | print(flip_path(upside_down_four).d()) 112 | print("\nfive") 113 | print(flip_path(upside_down_five).d()) 114 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | ignore_missing_imports = True 3 | strict_optional = True 4 | warn_redundant_casts = True 5 | warn_unused_ignores = True 6 | disallow_any_generics = True 7 | disallow_untyped_defs = True 8 | exclude = ptplot/_version.py 9 | -------------------------------------------------------------------------------- /parse_envs.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | from dataclasses import dataclass 4 | from typing import Union 5 | 6 | 7 | @dataclass 8 | class Package: 9 | name: str 10 | dependency_kind: Union[str, None] = None 11 | dependency_version: Union[str, None] = None 12 | 13 | 14 | def parse_conda_envs(min_env_name, max_env_name, optional_packages=None, package_modifiers=None): 15 | if optional_packages is None: 16 | optional_packages = {} 17 | if package_modifiers is None: 18 | package_modifiers = {} 19 | 20 | # Parse out the min and max envs 21 | min_env_dependencies = _parse_conda_env_file(min_env_name) 22 | max_env_dependencies = _parse_conda_env_file(max_env_name) 23 | 24 | # Check that all the minimum env specifications are '==' 25 | num_not_equal = sum([ 26 | min_dep.dependency_kind != "==" 27 | for min_dep in min_env_dependencies 28 | ]) 29 | if num_not_equal != 0: 30 | raise ValueError("All minimum environments should be explicitly pinned with '=='") 31 | 32 | # Check that envs have same number of packages 33 | if len(min_env_dependencies) != len(max_env_dependencies): 34 | return IndexError("Environments have different number of dependencies") 35 | 36 | # Generate the initial dependency strings for pip 37 | required_dependencies = [] 38 | optional_dependencies = { 39 | package: [] for package in optional_packages 40 | } 41 | for min_dependency, max_dependency in zip(min_env_dependencies, max_env_dependencies): 42 | # Check that envs have same packages 43 | if min_dependency.name != max_dependency.name: 44 | raise ValueError("Environments have different dependencies") 45 | # Determine if an optional dependency 46 | extra_requires_names = [ 47 | key for key, package_names in optional_packages.items() 48 | if min_dependency.name in package_names 49 | ] 50 | # Apply any modifiers (e.g. [dev]) 51 | dependency_name = min_dependency.name + ( 52 | "" if min_dependency.name not in package_modifiers 53 | else f"[{package_modifiers[min_dependency.name]}]" 54 | ) 55 | 56 | # Figure out what the allowed versions are: 57 | dependency_version = f">={min_dependency.dependency_version}" 58 | if max_dependency.dependency_kind is not None: 59 | max_dependency_kind = "<" if max_dependency.dependency_kind == "<" else "<=" 60 | dependency_version +=f",{max_dependency_kind}{max_dependency.dependency_version}" 61 | full_dependency_string = dependency_name + dependency_version 62 | if len(extra_requires_names) == 0: 63 | # Required dependency 64 | required_dependencies.append(full_dependency_string) 65 | else: 66 | # Optional dependency 67 | for name in extra_requires_names: 68 | optional_dependencies[name].append(full_dependency_string) 69 | 70 | # TODO: ensure that the optional dependencies are actually in the conda files 71 | 72 | return required_dependencies, optional_dependencies 73 | 74 | 75 | def _parse_conda_env_file(env_filename): 76 | with open(env_filename) as env_file: 77 | file_lines = env_file.readlines() 78 | # strip all spaces and carriage return, assume no tabs: 79 | file_lines = [line.strip().replace(" ", "") for line in file_lines] 80 | raw_dependencies = [] 81 | for i, line in enumerate(file_lines): 82 | if line == "dependencies:": 83 | # every line from here on that starts with '-' is a valid dependency 84 | for dependency_line in file_lines[i + 1:]: 85 | if dependency_line[0] != "-": 86 | break 87 | if dependency_line == "-pip:": 88 | continue 89 | split_dependency = re.match( 90 | r"-([^=><]+)(([=><]{1,2})([.\w]+$)|$)", 91 | # (package_name, _, dependency_kind|None, dependency_version|None) 92 | dependency_line 93 | ).groups() 94 | raw_dependencies.append( 95 | Package(split_dependency[0], split_dependency[2], split_dependency[3]) 96 | ) 97 | break 98 | return sorted(raw_dependencies, key=lambda x: x.name) 99 | -------------------------------------------------------------------------------- /ptplot.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AndrewRook/ptplot/8ab6e26f54069925b21a57a7eb5d3dee9a7fe147/ptplot.gif -------------------------------------------------------------------------------- /ptplot/__init__.py: -------------------------------------------------------------------------------- 1 | from .ptplot import PTPlot # noqa: F401 2 | -------------------------------------------------------------------------------- /ptplot/_version.py: -------------------------------------------------------------------------------- 1 | # This file helps to compute a version number in source trees obtained from 2 | # git-archive tarball (such as those provided by githubs download-from-tag 3 | # feature). Distribution tarballs (built by setup.py sdist) and build 4 | # directories (produced by setup.py build) will contain a much shorter file 5 | # that just contains the computed version number. 6 | 7 | # This file is released into the public domain. Generated by 8 | # versioneer-0.19 (https://github.com/python-versioneer/python-versioneer) 9 | 10 | """Git implementation of _version.py.""" 11 | # flake8: noqa 12 | 13 | import errno 14 | import os 15 | import re 16 | import subprocess 17 | import sys 18 | 19 | 20 | def get_keywords(): 21 | """Get the keywords needed to look up the version information.""" 22 | # these strings will be replaced by git during git-archive. 23 | # setup.py/versioneer.py will grep for the variable names, so they must 24 | # each be defined on a line of their own. _version.py will just call 25 | # get_keywords(). 26 | git_refnames = " (HEAD -> main)" 27 | git_full = "8ab6e26f54069925b21a57a7eb5d3dee9a7fe147" 28 | git_date = "2021-09-20 19:11:03 -0400" 29 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} 30 | return keywords 31 | 32 | 33 | class VersioneerConfig: 34 | """Container for Versioneer configuration parameters.""" 35 | 36 | 37 | def get_config(): 38 | """Create, populate and return the VersioneerConfig() object.""" 39 | # these strings are filled in when 'setup.py versioneer' creates 40 | # _version.py 41 | cfg = VersioneerConfig() 42 | cfg.VCS = "git" 43 | cfg.style = "pep440" 44 | cfg.tag_prefix = "" 45 | cfg.parentdir_prefix = "" 46 | cfg.versionfile_source = "ptplot/_version.py" 47 | cfg.verbose = False 48 | return cfg 49 | 50 | 51 | class NotThisMethod(Exception): 52 | """Exception raised if a method is not valid for the current scenario.""" 53 | 54 | 55 | LONG_VERSION_PY = {} 56 | HANDLERS = {} 57 | 58 | 59 | def register_vcs_handler(vcs, method): # decorator 60 | """Create decorator to mark a method as the handler of a VCS.""" 61 | 62 | def decorate(f): 63 | """Store f in HANDLERS[vcs][method].""" 64 | if vcs not in HANDLERS: 65 | HANDLERS[vcs] = {} 66 | HANDLERS[vcs][method] = f 67 | return f 68 | 69 | return decorate 70 | 71 | 72 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): 73 | """Call the given command(s).""" 74 | assert isinstance(commands, list) 75 | p = None 76 | for c in commands: 77 | try: 78 | dispcmd = str([c] + args) 79 | # remember shell=False, so use git.cmd on windows, not just git 80 | p = subprocess.Popen( 81 | [c] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None) 82 | ) 83 | break 84 | except EnvironmentError: 85 | e = sys.exc_info()[1] 86 | if e.errno == errno.ENOENT: 87 | continue 88 | if verbose: 89 | print("unable to run %s" % dispcmd) 90 | print(e) 91 | return None, None 92 | else: 93 | if verbose: 94 | print("unable to find command, tried %s" % (commands,)) 95 | return None, None 96 | stdout = p.communicate()[0].strip().decode() 97 | if p.returncode != 0: 98 | if verbose: 99 | print("unable to run %s (error)" % dispcmd) 100 | print("stdout was %s" % stdout) 101 | return None, p.returncode 102 | return stdout, p.returncode 103 | 104 | 105 | def versions_from_parentdir(parentdir_prefix, root, verbose): 106 | """Try to determine the version from the parent directory name. 107 | 108 | Source tarballs conventionally unpack into a directory that includes both 109 | the project name and a version string. We will also support searching up 110 | two directory levels for an appropriately named parent directory 111 | """ 112 | rootdirs = [] 113 | 114 | for i in range(3): 115 | dirname = os.path.basename(root) 116 | if dirname.startswith(parentdir_prefix): 117 | return { 118 | "version": dirname[len(parentdir_prefix) :], 119 | "full-revisionid": None, 120 | "dirty": False, 121 | "error": None, 122 | "date": None, 123 | } 124 | else: 125 | rootdirs.append(root) 126 | root = os.path.dirname(root) # up a level 127 | 128 | if verbose: 129 | print("Tried directories %s but none started with prefix %s" % (str(rootdirs), parentdir_prefix)) 130 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 131 | 132 | 133 | @register_vcs_handler("git", "get_keywords") 134 | def git_get_keywords(versionfile_abs): 135 | """Extract version information from the given file.""" 136 | # the code embedded in _version.py can just fetch the value of these 137 | # keywords. When used from setup.py, we don't want to import _version.py, 138 | # so we do it with a regexp instead. This function is not used from 139 | # _version.py. 140 | keywords = {} 141 | try: 142 | f = open(versionfile_abs, "r") 143 | for line in f.readlines(): 144 | if line.strip().startswith("git_refnames ="): 145 | mo = re.search(r'=\s*"(.*)"', line) 146 | if mo: 147 | keywords["refnames"] = mo.group(1) 148 | if line.strip().startswith("git_full ="): 149 | mo = re.search(r'=\s*"(.*)"', line) 150 | if mo: 151 | keywords["full"] = mo.group(1) 152 | if line.strip().startswith("git_date ="): 153 | mo = re.search(r'=\s*"(.*)"', line) 154 | if mo: 155 | keywords["date"] = mo.group(1) 156 | f.close() 157 | except EnvironmentError: 158 | pass 159 | return keywords 160 | 161 | 162 | @register_vcs_handler("git", "keywords") 163 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 164 | """Get version information from git keywords.""" 165 | if not keywords: 166 | raise NotThisMethod("no keywords at all, weird") 167 | date = keywords.get("date") 168 | if date is not None: 169 | # Use only the last line. Previous lines may contain GPG signature 170 | # information. 171 | date = date.splitlines()[-1] 172 | 173 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant 174 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 175 | # -like" string, which we must then edit to make compliant), because 176 | # it's been around since git-1.5.3, and it's too difficult to 177 | # discover which version we're using, or to work around using an 178 | # older one. 179 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 180 | refnames = keywords["refnames"].strip() 181 | if refnames.startswith("$Format"): 182 | if verbose: 183 | print("keywords are unexpanded, not using") 184 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 185 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) 186 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 187 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 188 | TAG = "tag: " 189 | tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)]) 190 | if not tags: 191 | # Either we're using git < 1.8.3, or there really are no tags. We use 192 | # a heuristic: assume all version tags have a digit. The old git %d 193 | # expansion behaves like git log --decorate=short and strips out the 194 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 195 | # between branches and tags. By ignoring refnames without digits, we 196 | # filter out many common branch names like "release" and 197 | # "stabilization", as well as "HEAD" and "master". 198 | tags = set([r for r in refs if re.search(r"\d", r)]) 199 | if verbose: 200 | print("discarding '%s', no digits" % ",".join(refs - tags)) 201 | if verbose: 202 | print("likely tags: %s" % ",".join(sorted(tags))) 203 | for ref in sorted(tags): 204 | # sorting will prefer e.g. "2.0" over "2.0rc1" 205 | if ref.startswith(tag_prefix): 206 | r = ref[len(tag_prefix) :] 207 | if verbose: 208 | print("picking %s" % r) 209 | return { 210 | "version": r, 211 | "full-revisionid": keywords["full"].strip(), 212 | "dirty": False, 213 | "error": None, 214 | "date": date, 215 | } 216 | # no suitable tags, so version is "0+unknown", but full hex is still there 217 | if verbose: 218 | print("no suitable tags, using unknown + full revision id") 219 | return { 220 | "version": "0+unknown", 221 | "full-revisionid": keywords["full"].strip(), 222 | "dirty": False, 223 | "error": "no suitable tags", 224 | "date": None, 225 | } 226 | 227 | 228 | @register_vcs_handler("git", "pieces_from_vcs") 229 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 230 | """Get version from 'git describe' in the root of the source tree. 231 | 232 | This only gets called if the git-archive 'subst' keywords were *not* 233 | expanded, and _version.py hasn't already been rewritten with a short 234 | version string, meaning we're inside a checked out source tree. 235 | """ 236 | GITS = ["git"] 237 | if sys.platform == "win32": 238 | GITS = ["git.cmd", "git.exe"] 239 | 240 | out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) 241 | if rc != 0: 242 | if verbose: 243 | print("Directory %s not under git control" % root) 244 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 245 | 246 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 247 | # if there isn't one, this yields HEX[-dirty] (no NUM) 248 | describe_out, rc = run_command( 249 | GITS, ["describe", "--tags", "--dirty", "--always", "--long", "--match", "%s*" % tag_prefix], cwd=root 250 | ) 251 | # --long was added in git-1.5.5 252 | if describe_out is None: 253 | raise NotThisMethod("'git describe' failed") 254 | describe_out = describe_out.strip() 255 | full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 256 | if full_out is None: 257 | raise NotThisMethod("'git rev-parse' failed") 258 | full_out = full_out.strip() 259 | 260 | pieces = {} 261 | pieces["long"] = full_out 262 | pieces["short"] = full_out[:7] # maybe improved later 263 | pieces["error"] = None 264 | 265 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 266 | # TAG might have hyphens. 267 | git_describe = describe_out 268 | 269 | # look for -dirty suffix 270 | dirty = git_describe.endswith("-dirty") 271 | pieces["dirty"] = dirty 272 | if dirty: 273 | git_describe = git_describe[: git_describe.rindex("-dirty")] 274 | 275 | # now we have TAG-NUM-gHEX or HEX 276 | 277 | if "-" in git_describe: 278 | # TAG-NUM-gHEX 279 | mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) 280 | if not mo: 281 | # unparseable. Maybe git-describe is misbehaving? 282 | pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out 283 | return pieces 284 | 285 | # tag 286 | full_tag = mo.group(1) 287 | if not full_tag.startswith(tag_prefix): 288 | if verbose: 289 | fmt = "tag '%s' doesn't start with prefix '%s'" 290 | print(fmt % (full_tag, tag_prefix)) 291 | pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix) 292 | return pieces 293 | pieces["closest-tag"] = full_tag[len(tag_prefix) :] 294 | 295 | # distance: number of commits since tag 296 | pieces["distance"] = int(mo.group(2)) 297 | 298 | # commit: short hex revision ID 299 | pieces["short"] = mo.group(3) 300 | 301 | else: 302 | # HEX: no tags 303 | pieces["closest-tag"] = None 304 | count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) 305 | pieces["distance"] = int(count_out) # total number of commits 306 | 307 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 308 | date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() 309 | # Use only the last line. Previous lines may contain GPG signature 310 | # information. 311 | date = date.splitlines()[-1] 312 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 313 | 314 | return pieces 315 | 316 | 317 | def plus_or_dot(pieces): 318 | """Return a + if we don't already have one, else return a .""" 319 | if "+" in pieces.get("closest-tag", ""): 320 | return "." 321 | return "+" 322 | 323 | 324 | def render_pep440(pieces): 325 | """Build up version string, with post-release "local version identifier". 326 | 327 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 328 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 329 | 330 | Exceptions: 331 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 332 | """ 333 | if pieces["closest-tag"]: 334 | rendered = pieces["closest-tag"] 335 | if pieces["distance"] or pieces["dirty"]: 336 | rendered += plus_or_dot(pieces) 337 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 338 | if pieces["dirty"]: 339 | rendered += ".dirty" 340 | else: 341 | # exception #1 342 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) 343 | if pieces["dirty"]: 344 | rendered += ".dirty" 345 | return rendered 346 | 347 | 348 | def render_pep440_pre(pieces): 349 | """TAG[.post0.devDISTANCE] -- No -dirty. 350 | 351 | Exceptions: 352 | 1: no tags. 0.post0.devDISTANCE 353 | """ 354 | if pieces["closest-tag"]: 355 | rendered = pieces["closest-tag"] 356 | if pieces["distance"]: 357 | rendered += ".post0.dev%d" % pieces["distance"] 358 | else: 359 | # exception #1 360 | rendered = "0.post0.dev%d" % pieces["distance"] 361 | return rendered 362 | 363 | 364 | def render_pep440_post(pieces): 365 | """TAG[.postDISTANCE[.dev0]+gHEX] . 366 | 367 | The ".dev0" means dirty. Note that .dev0 sorts backwards 368 | (a dirty tree will appear "older" than the corresponding clean one), 369 | but you shouldn't be releasing software with -dirty anyways. 370 | 371 | Exceptions: 372 | 1: no tags. 0.postDISTANCE[.dev0] 373 | """ 374 | if pieces["closest-tag"]: 375 | rendered = pieces["closest-tag"] 376 | if pieces["distance"] or pieces["dirty"]: 377 | rendered += ".post%d" % pieces["distance"] 378 | if pieces["dirty"]: 379 | rendered += ".dev0" 380 | rendered += plus_or_dot(pieces) 381 | rendered += "g%s" % pieces["short"] 382 | else: 383 | # exception #1 384 | rendered = "0.post%d" % pieces["distance"] 385 | if pieces["dirty"]: 386 | rendered += ".dev0" 387 | rendered += "+g%s" % pieces["short"] 388 | return rendered 389 | 390 | 391 | def render_pep440_old(pieces): 392 | """TAG[.postDISTANCE[.dev0]] . 393 | 394 | The ".dev0" means dirty. 395 | 396 | Exceptions: 397 | 1: no tags. 0.postDISTANCE[.dev0] 398 | """ 399 | if pieces["closest-tag"]: 400 | rendered = pieces["closest-tag"] 401 | if pieces["distance"] or pieces["dirty"]: 402 | rendered += ".post%d" % pieces["distance"] 403 | if pieces["dirty"]: 404 | rendered += ".dev0" 405 | else: 406 | # exception #1 407 | rendered = "0.post%d" % pieces["distance"] 408 | if pieces["dirty"]: 409 | rendered += ".dev0" 410 | return rendered 411 | 412 | 413 | def render_git_describe(pieces): 414 | """TAG[-DISTANCE-gHEX][-dirty]. 415 | 416 | Like 'git describe --tags --dirty --always'. 417 | 418 | Exceptions: 419 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 420 | """ 421 | if pieces["closest-tag"]: 422 | rendered = pieces["closest-tag"] 423 | if pieces["distance"]: 424 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 425 | else: 426 | # exception #1 427 | rendered = pieces["short"] 428 | if pieces["dirty"]: 429 | rendered += "-dirty" 430 | return rendered 431 | 432 | 433 | def render_git_describe_long(pieces): 434 | """TAG-DISTANCE-gHEX[-dirty]. 435 | 436 | Like 'git describe --tags --dirty --always -long'. 437 | The distance/hash is unconditional. 438 | 439 | Exceptions: 440 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 441 | """ 442 | if pieces["closest-tag"]: 443 | rendered = pieces["closest-tag"] 444 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 445 | else: 446 | # exception #1 447 | rendered = pieces["short"] 448 | if pieces["dirty"]: 449 | rendered += "-dirty" 450 | return rendered 451 | 452 | 453 | def render(pieces, style): 454 | """Render the given version pieces into the requested style.""" 455 | if pieces["error"]: 456 | return { 457 | "version": "unknown", 458 | "full-revisionid": pieces.get("long"), 459 | "dirty": None, 460 | "error": pieces["error"], 461 | "date": None, 462 | } 463 | 464 | if not style or style == "default": 465 | style = "pep440" # the default 466 | 467 | if style == "pep440": 468 | rendered = render_pep440(pieces) 469 | elif style == "pep440-pre": 470 | rendered = render_pep440_pre(pieces) 471 | elif style == "pep440-post": 472 | rendered = render_pep440_post(pieces) 473 | elif style == "pep440-old": 474 | rendered = render_pep440_old(pieces) 475 | elif style == "git-describe": 476 | rendered = render_git_describe(pieces) 477 | elif style == "git-describe-long": 478 | rendered = render_git_describe_long(pieces) 479 | else: 480 | raise ValueError("unknown style '%s'" % style) 481 | 482 | return { 483 | "version": rendered, 484 | "full-revisionid": pieces["long"], 485 | "dirty": pieces["dirty"], 486 | "error": None, 487 | "date": pieces.get("date"), 488 | } 489 | 490 | 491 | def get_versions(): 492 | """Get version information or return default if unable to do so.""" 493 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 494 | # __file__, we can work backwards from there to the root. Some 495 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 496 | # case we can only use expanded keywords. 497 | 498 | cfg = get_config() 499 | verbose = cfg.verbose 500 | 501 | try: 502 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) 503 | except NotThisMethod: 504 | pass 505 | 506 | try: 507 | root = os.path.realpath(__file__) 508 | # versionfile_source is the relative path from the top of the source 509 | # tree (where the .git directory might live) to this file. Invert 510 | # this to find the root from __file__. 511 | for i in cfg.versionfile_source.split("/"): 512 | root = os.path.dirname(root) 513 | except NameError: 514 | return { 515 | "version": "0+unknown", 516 | "full-revisionid": None, 517 | "dirty": None, 518 | "error": "unable to find root of source tree", 519 | "date": None, 520 | } 521 | 522 | try: 523 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 524 | return render(pieces, cfg.style) 525 | except NotThisMethod: 526 | pass 527 | 528 | try: 529 | if cfg.parentdir_prefix: 530 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 531 | except NotThisMethod: 532 | pass 533 | 534 | return { 535 | "version": "0+unknown", 536 | "full-revisionid": None, 537 | "dirty": None, 538 | "error": "unable to compute version", 539 | "date": None, 540 | } 541 | -------------------------------------------------------------------------------- /ptplot/animation.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import TYPE_CHECKING, Any, Callable, Sequence 4 | from bokeh.models import CustomJS, Slider, Toggle 5 | 6 | from ptplot.core import Layer 7 | 8 | 9 | if TYPE_CHECKING: 10 | import pandas as pd 11 | from bokeh.models import Widget 12 | 13 | 14 | class Animation(Layer): 15 | """ 16 | Animate a given visualization. 17 | 18 | Adding this layer will append a play/pause button and a slider below 19 | the visualization, and then automatically connect those buttons to the 20 | plot layers used (assuming the layers support animations). 21 | 22 | Parameters 23 | ---------- 24 | frame_mapping : The mapping used to determine the frame of the animation. 25 | frame_rate : The number of frames to display per second when using the play/pause 26 | button. 27 | """ 28 | 29 | def __init__(self, frame_mapping: str, frame_rate: int): 30 | self.frame_mapping = frame_mapping 31 | self.frame_rate = frame_rate 32 | 33 | def get_mappings(self) -> Sequence[str]: 34 | return [self.frame_mapping] 35 | 36 | def animate( 37 | self, data: pd.DataFrame, layer_animations: Sequence[Callable[[str, Any], CustomJS]] 38 | ) -> Sequence[Widget]: 39 | min_frame = data[self.frame_mapping].min() 40 | max_frame = data[self.frame_mapping].max() 41 | play_pause = Toggle(label="► Play", active=False) 42 | slider = Slider(start=min_frame, end=max_frame, value=min_frame, step=1, title="Frame") 43 | play_pause_js = CustomJS( 44 | args={"slider": slider, "min_frame": min_frame, "max_frame": max_frame, "frame_rate": self.frame_rate}, 45 | code=""" 46 | var check_and_iterate = function(){ 47 | var slider_val = slider.value; 48 | var toggle_val = cb_obj.active; 49 | if(toggle_val == false) { 50 | cb_obj.label = '► Play'; 51 | clearInterval(play_pause_loop); 52 | } 53 | else if(slider_val == max_frame) { 54 | cb_obj.label = '► Play'; 55 | slider.value = min_frame; 56 | cb_obj.active = false; 57 | clearInterval(play_pause_loop); 58 | } 59 | else if(slider_val !== max_frame){ 60 | slider.value = slider_val + 1; 61 | } 62 | else { 63 | clearInterval(play_pause_loop); 64 | } 65 | } 66 | if(cb_obj.active == false){ 67 | cb_obj.label = '► Play'; 68 | clearInterval(play_pause_loop); 69 | } 70 | else { 71 | cb_obj.label = '❚❚ Pause'; 72 | var play_pause_loop = setInterval(check_and_iterate, 1000 / frame_rate); 73 | }; 74 | """, 75 | ) 76 | play_pause.js_on_change("active", play_pause_js) 77 | for animation in layer_animations: 78 | callback = animation(self.frame_mapping, min_frame) 79 | slider.js_on_change("value", callback) 80 | return [play_pause, slider] 81 | -------------------------------------------------------------------------------- /ptplot/callback.py: -------------------------------------------------------------------------------- 1 | # This can also be done via setting an IndexFilter view on the data and then updating the indices 2 | # of that filter. This is actually how it works in 0.2.0. However there is a bug when trying 3 | # to animate a single object (https://github.com/bokeh/bokeh/issues/11439) so for 4 | # the time being this is the best way to do it :/ 5 | FIND_CURRENT_FRAME = """ 6 | var data = source.data; 7 | var full_data = full_source.data; 8 | for (const column in data) { 9 | data[column] = []; 10 | for (let i = 0; i < full_data[frame_column].length; i++) { 11 | if (full_data[frame_column][i] == cb_obj.value) { 12 | data[column].push(full_data[column][i]); 13 | } 14 | // Assumes data is sorted by frame_column 15 | if (full_data[frame_column][i] > cb_obj.value) { 16 | break; 17 | } 18 | } 19 | } 20 | source.change.emit(); 21 | """ 22 | 23 | FIND_ALL_FRAMES_UP_TO_CURRENT_FRAME = """ 24 | var data = source.data; 25 | var full_data = full_source.data; 26 | for (const column in data) { 27 | data[column] = []; 28 | for (let i = 0; i < full_data[frame_column].length; i++) { 29 | if (full_data[frame_column][i] <= cb_obj.value) { 30 | data[column].push(full_data[column][i]); 31 | } 32 | // Assumes data is sorted by frame_column 33 | if (full_data[frame_column][i] > cb_obj.value) { 34 | break; 35 | } 36 | } 37 | } 38 | source.change.emit(); 39 | """ 40 | -------------------------------------------------------------------------------- /ptplot/core.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from abc import ABC 4 | from dataclasses import dataclass 5 | 6 | import pandas as pd 7 | from bokeh.plotting import figure 8 | 9 | from typing import TYPE_CHECKING, Any, Callable, Iterator, Mapping, Sequence, Optional, Tuple 10 | 11 | if TYPE_CHECKING: 12 | from bokeh.models import CustomJS, GlyphRenderer 13 | from ptplot import PTPlot 14 | 15 | 16 | @dataclass 17 | class _Metadata: 18 | label: Optional[str] = "" 19 | is_home: bool = True 20 | color_list: Sequence[str] = ("black", "gray") 21 | marker: Optional[Callable[[figure], Callable[..., GlyphRenderer]]] = None 22 | 23 | 24 | class Layer(ABC): 25 | def get_mappings(self) -> Sequence[str]: 26 | return [] 27 | 28 | def draw( 29 | self, ptplot: PTPlot, data: pd.DataFrame, bokeh_figure: figure, metadata: _Metadata 30 | ) -> Optional[Sequence[Callable[[str, Any], CustomJS]]]: 31 | pass 32 | 33 | 34 | class _Aesthetics(Layer): 35 | team_color_mapping: Mapping[str, Sequence[str]] = {} 36 | ball_colors: Sequence[str] = ("black", "black") 37 | ball_marker_generator: Optional[Callable[[figure], Callable[..., GlyphRenderer]]] = None 38 | 39 | def __init__( 40 | self, 41 | team_ball_mapping: Optional[str] = None, 42 | home_away_mapping: Optional[str] = None, 43 | ball_identifier: Optional[str] = None, 44 | ): 45 | self.team_ball_mapping = team_ball_mapping 46 | self.home_away_mapping = home_away_mapping 47 | self.ball_identifier = ball_identifier 48 | 49 | def get_mappings(self) -> Sequence[str]: 50 | mappings = [] 51 | if self.team_ball_mapping is not None: 52 | mappings.append(self.team_ball_mapping) 53 | if self.home_away_mapping is not None: 54 | mappings.append(self.home_away_mapping) 55 | return mappings 56 | 57 | def map_aesthetics(self, data: pd.DataFrame) -> Iterator[Tuple[pd.DataFrame, _Metadata]]: 58 | if self.team_ball_mapping is not None: 59 | team_ball_groups = data.groupby(self.team_ball_mapping) 60 | for team_ball_name, team_ball_data in team_ball_groups: 61 | if self.ball_identifier is not None and team_ball_name == self.ball_identifier: 62 | yield team_ball_data, _Metadata( 63 | label=team_ball_name, 64 | is_home=True, 65 | # have to access the __func__ method directly to avoid needing to wrap all the 66 | # methods in staticmethod decorators 67 | # Also need to ignore mypy because it doesn't like doing that. 68 | color_list=self.ball_colors, 69 | marker=self.ball_marker_generator.__func__, # type: ignore 70 | ) 71 | else: 72 | team_color_list = self.team_color_mapping[team_ball_name] 73 | if self.home_away_mapping is not None: 74 | home_away_groups = team_ball_data.groupby(self.home_away_mapping) 75 | for is_home, home_away_data in home_away_groups: 76 | yield home_away_data, _Metadata( 77 | label=team_ball_name, is_home=is_home, color_list=team_color_list 78 | ) 79 | else: 80 | yield team_ball_data, _Metadata(label=team_ball_name, is_home=True, color_list=team_color_list) 81 | else: 82 | if self.home_away_mapping is not None: 83 | home_away_groups = data.groupby(self.home_away_mapping) 84 | for is_home, home_away_data in home_away_groups: 85 | yield home_away_data, _Metadata(is_home=is_home) 86 | else: 87 | yield data, _Metadata() 88 | -------------------------------------------------------------------------------- /ptplot/facet.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import math 4 | 5 | from .core import Layer 6 | 7 | from typing import TYPE_CHECKING, Any, Iterator, Optional, Sequence, Tuple 8 | 9 | if TYPE_CHECKING: 10 | import pandas as pd 11 | 12 | from bokeh.plotting import figure 13 | from ptplot import PTPlot 14 | from ptplot.core import _Metadata 15 | 16 | 17 | class Facet(Layer): 18 | """Break a dataset into multiple subplots ("facets"). 19 | 20 | Note that you can't have more than one Facet on a visualization. 21 | 22 | Parameters 23 | ---------- 24 | facet_mapping : The mapping to use to split the dataset into facets. 25 | num_col, num_row : The number of columns/rows to split the dataset into. Only 26 | one of the two variables should be defined. 27 | """ 28 | 29 | def __init__(self, facet_mapping: str, num_col: Optional[int] = None, num_row: Optional[int] = None): 30 | self.facet_mapping = facet_mapping 31 | self.num_col = num_col 32 | self.num_row = num_row 33 | if self.num_row is not None and self.num_col is not None: 34 | raise ValueError("Can only specify one of num_col or num_row") 35 | 36 | def get_mappings(self) -> Sequence[str]: 37 | return [self.facet_mapping] 38 | 39 | def faceting(self, data: pd.DataFrame) -> Iterator[Tuple[Any, pd.DataFrame]]: 40 | groups = data.groupby(self.facet_mapping, sort=False) 41 | if self.num_col is not None: 42 | self.num_row = math.ceil(len(groups) / self.num_col) 43 | elif self.num_row is not None: 44 | self.num_col = math.ceil(len(groups) / self.num_row) 45 | else: 46 | self.num_row = len(groups) 47 | self.num_col = 1 48 | return groups 49 | 50 | def draw(self, ptplot: PTPlot, data: pd.DataFrame, bokeh_figure: figure, metadata: _Metadata) -> None: 51 | # This will get run multiple times per aesthetic, but the title ought to be the same 52 | # every time so this should be ok. 53 | facet_value = data[self.facet_mapping].unique()[0] 54 | bokeh_figure.title.text = str(facet_value) 55 | -------------------------------------------------------------------------------- /ptplot/hover.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from bokeh.models import HoverTool 4 | 5 | from typing import TYPE_CHECKING, List, Optional, Sequence, Tuple, Union 6 | 7 | from .core import Layer 8 | 9 | 10 | if TYPE_CHECKING: 11 | from ptplot import PTPlot 12 | import pandas as pd 13 | from bokeh.plotting import figure 14 | from .core import _Metadata 15 | 16 | 17 | class Hover(Layer): 18 | """Add a hoverlabel to the visualization. Hoverlabels will appear as mouseover 19 | events. While they are usually used for simply identifying data points, they can be almost arbitrarily 20 | complex. See https://docs.bokeh.org/en/latest/docs/user_guide/tools.html#hovertool 21 | for details (and inspiration!). 22 | 23 | Note that, conceptually, multiple Hover layers can be used in the same visualization; however this has not 24 | been thoroughly tested so unexpected behavior may occur. 25 | 26 | Parameters 27 | ---------- 28 | tooltip_specification : The definition for the tooltip display. It can be anything that can be passed 29 | as the tooltips argument to Bokeh's HoverTool. 30 | plot_name : The name you assigned to the specific plotting layer that you wish the hoverlabel to be attached to 31 | (ie what glyphs you want the label to pop up on when moused over). 32 | tooltip_mappings : The mappings for any columns that you want to use in the tooltips (unfortunately it is 33 | not yet possible to pull those mappings directly from the tooltip_specification input). 34 | """ 35 | 36 | def __init__( 37 | self, 38 | tooltip_specification: Union[str, List[Tuple[str, str]]], 39 | plot_name: str, 40 | tooltip_mappings: Optional[Sequence[str]] = None, 41 | ): 42 | self.plot_name = plot_name 43 | self.tooltip_specification = tooltip_specification 44 | self.tool = HoverTool(names=[plot_name], tooltips=self.tooltip_specification) 45 | self.tooltip_mappings = [] if tooltip_mappings is None else tooltip_mappings 46 | 47 | def get_mappings(self) -> Sequence[str]: 48 | return self.tooltip_mappings 49 | 50 | def draw(self, ptplot: PTPlot, data: pd.DataFrame, bokeh_figure: figure, metadata: _Metadata) -> None: 51 | bokeh_figure.add_tools(self.tool) 52 | return None 53 | -------------------------------------------------------------------------------- /ptplot/nfl.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import math 4 | 5 | import pandas as pd 6 | 7 | from functools import partial 8 | 9 | from ptplot.core import Layer, _Aesthetics, _Metadata 10 | from typing import TYPE_CHECKING, Callable, Sequence 11 | 12 | if TYPE_CHECKING: 13 | from bokeh.models import GlyphRenderer 14 | from bokeh.plotting import figure 15 | from .ptplot import PTPlot 16 | 17 | 18 | NFL_TEAMS = { 19 | "ARI": ("#97233f", "white"), 20 | "ATL": ("#a71930", "#a5acaf"), 21 | "BAL": ("#241773", "#9e7c0c"), 22 | "BUF": ("#00338d", "#c60c30"), 23 | "CAR": ("#0085ca", "#bfc0bf"), 24 | "CHI": ("#0b162a", "#c83803"), 25 | "CIN": ("#fb4f14", "white"), 26 | "CLE": ("#311d00", "#ff3c00"), 27 | "DAL": ("#002244", "#869397"), 28 | "DEN": ("#fb4f14", "#002244"), 29 | "DET": ("#0076b6", "#b0b7bc"), 30 | "GB": ("#203731", "#ffb612"), 31 | "HOU": ("#03202f", "#a71930"), 32 | "IND": ("#002c5f", "#a5acaf"), 33 | "JAX": ("#006778", "#9f792c"), 34 | "KC": ("#e31837", "#ffb612"), 35 | "LAC": ("#0073cf", "#ffb612"), 36 | "LAR": ("#002244", "#b3995d"), 37 | "LV": ("black", "#a5acaf"), 38 | "MIA": ("#008e97", "#f26a24"), 39 | "MIN": ("#4f2683", "#ffc62f"), 40 | "NE": ("#002244", "#c60c30"), 41 | "NO": ("black", "#d3bc8d"), 42 | "NYG": ("#0b2265", "#a71930"), 43 | "NYJ": ("#003f2d", "white"), 44 | "PHI": ("#004c54", "#a5acaf"), 45 | "PIT": ("black", "#ffb612"), 46 | "SF": ("#aa0000", "#b3995d"), 47 | "SEA": ("#002244", "#69be28"), 48 | "TB": ("#d50a0a", "#34302b"), 49 | "TEN": ("#002244", "#4b92db"), 50 | "WAS": ("#773141", "#ffb612"), 51 | "OAK": ("black", "#a5acaf"), 52 | "STL": ("#002244", "#b3995d"), 53 | } 54 | 55 | 56 | def _ball_marker_generator(figure: figure) -> Callable[[figure], Callable[..., GlyphRenderer]]: 57 | return partial(figure.ellipse, width=2, height=1, angle=0.0, fill_color="brown", line_color="brown") 58 | 59 | 60 | class Aesthetics(_Aesthetics): 61 | """ 62 | Team colors and ball colors/marker for the NFL. 63 | """ 64 | 65 | team_color_mapping = NFL_TEAMS 66 | ball_colors = ["brown", "brown"] 67 | ball_marker_generator = _ball_marker_generator 68 | 69 | 70 | class Field(Layer): 71 | """Generate an NFL field. 72 | 73 | Parameters 74 | ---------- 75 | min_yardline : The minimum yardline to use. Note that the default will cover the whole 76 | left/bottom endzone with a three-yard buffer. 77 | max_yardline : The maximum yardline to use. Note that the default will cover the whole 78 | right/top endzone with a three-yard buffer. 79 | relative_yardlines : If True, then the yardlines will be centered around zero rather than 80 | representing real positions on the field. This can be useful for plotting multiple 81 | plays on top of each other. 82 | sideline_buffer : How many yards of extra space to provide on each sideline. 83 | """ 84 | 85 | def __init__( 86 | self, 87 | min_yardline: float = -13, 88 | max_yardline: float = 113, 89 | relative_yardlines: bool = False, 90 | sideline_buffer: float = 3, 91 | ): 92 | 93 | self.min_yardline = min_yardline 94 | self.max_yardline = max_yardline 95 | self.relative_yardlines = relative_yardlines 96 | self.sideline_buffer = sideline_buffer 97 | 98 | def get_mappings(self) -> Sequence[str]: 99 | return [] 100 | 101 | def draw(self, ptplot: PTPlot, data: pd.DataFrame, bokeh_figure: figure, metadata: _Metadata) -> None: 102 | 103 | field_width_yards = 53.3 104 | y_min = 0 - self.sideline_buffer 105 | y_max = field_width_yards + self.sideline_buffer 106 | x_yards = self.max_yardline - self.min_yardline 107 | y_yards = y_max - y_min 108 | 109 | # Have to manually set the width here because I can't figure out how to make bokeh scale 110 | # to it automatically :( 111 | bokeh_figure.width = int(round(bokeh_figure.height * x_yards / y_yards)) 112 | 113 | # For some reason you have to manually specify the range bounds here in order to be able 114 | # access them downstream (apparently otherwise they're only computed in the JS, see 115 | # https://stackoverflow.com/a/50735228/1373664 116 | bokeh_figure.x_range.start = self.min_yardline 117 | bokeh_figure.x_range.end = self.max_yardline 118 | bokeh_figure.y_range.start = y_min 119 | bokeh_figure.y_range.end = y_max 120 | 121 | # This is a total kludge to scale font size up and down with plot size, 122 | # based on a font size I found to work reasonably well 123 | pixels_per_data_unit = bokeh_figure.height / abs(bokeh_figure.y_range.end - bokeh_figure.y_range.start) 124 | font_size = pixels_per_data_unit * 3 125 | 126 | bokeh_figure.background_fill_color = "#34aa62" 127 | 128 | # Set up field lines 129 | yardlines = _get_vertical_line_locations( 130 | # If relative, just use max/min 131 | # If absolute, use max/min but not past the goal lines 132 | max(self.min_yardline, 0 if not self.relative_yardlines else self.min_yardline), 133 | min(self.max_yardline, 100 if not self.relative_yardlines else self.max_yardline), 134 | 5, 135 | ) 136 | bokeh_figure.rect( 137 | yardlines, 138 | [field_width_yards / 2] * len(yardlines), 139 | width=0.3, 140 | height=field_width_yards, 141 | fill_color="white", 142 | line_width=0, 143 | level="image", 144 | ) 145 | if not self.relative_yardlines: 146 | endzone_yardlines = [yard for yard in [-10, 110] if yard > self.min_yardline and yard < self.max_yardline] 147 | bokeh_figure.rect( 148 | endzone_yardlines, 149 | [field_width_yards / 2] * len(endzone_yardlines), 150 | width=0.6, 151 | height=field_width_yards, 152 | fill_color="white", 153 | line_width=0, 154 | level="image", 155 | ) 156 | if self.sideline_buffer > 0: 157 | lines_start = max(-10.2, self.min_yardline) 158 | lines_end = min(110.2, self.max_yardline) 159 | bokeh_figure.rect( 160 | [(lines_end + lines_start) / 2] * 2, 161 | [0, field_width_yards], 162 | height=0.6, 163 | width=lines_end - lines_start, 164 | fill_color="white", 165 | line_width=0, 166 | level="image", 167 | ) 168 | 169 | # Set up numbers 170 | number_yardlines = _get_vertical_line_locations( 171 | # If relative, just use max/min 172 | # If absolute, use max/min but not past the 10s 173 | max(self.min_yardline, 10 if not self.relative_yardlines else self.min_yardline), 174 | min(self.max_yardline, 90 if not self.relative_yardlines else self.max_yardline), 175 | 10, 176 | ) 177 | string_markers = [ 178 | str(yardline) if self.relative_yardlines else str(50 - abs(50 - yardline)) for yardline in number_yardlines 179 | ] 180 | string_markers = [ 181 | f" \u0020\u2005{string_marker}" 182 | if len(string_marker) == 1 183 | else f"{string_marker[0]}\u2005{string_marker[1]}" 184 | for string_marker in string_markers 185 | ] 186 | bokeh_figure.text( 187 | number_yardlines, 188 | 3, 189 | text=string_markers, 190 | text_align="center", 191 | text_baseline="middle", 192 | text_color="white", 193 | text_font_size=f"{font_size:.2f}px", 194 | level="image", 195 | ) 196 | bokeh_figure.text( 197 | number_yardlines, 198 | 50, 199 | text=string_markers, 200 | angle=math.pi, 201 | text_align="center", 202 | text_baseline="middle", 203 | text_color="white", 204 | text_font_size=f"{font_size:.2f}px", 205 | level="image", 206 | ) 207 | 208 | return None 209 | 210 | 211 | def _get_vertical_line_locations( 212 | min_yards: float, 213 | max_yards: float, 214 | yard_modulus: int, 215 | ) -> Sequence[int]: 216 | vlines = [yard for yard in range(math.ceil(min_yards), math.floor(max_yards) + 1) if yard % yard_modulus == 0] 217 | return vlines 218 | -------------------------------------------------------------------------------- /ptplot/pick.py: -------------------------------------------------------------------------------- 1 | from bokeh.models.glyphs import Circle 2 | from bokeh.core.property.dataspec import field 3 | from bokeh.core.properties import AngleSpec 4 | 5 | 6 | class Pick(Circle): 7 | __implementation__ = "pick.ts" 8 | _args = ("x", "y", "rot") 9 | 10 | rot = AngleSpec(default=field("rot")) 11 | -------------------------------------------------------------------------------- /ptplot/pick.ts: -------------------------------------------------------------------------------- 1 | import * as p from "core/properties" 2 | import {Circle, CircleView, CircleData} from "models/glyphs/circle" 3 | import {Context2d} from "core/util/canvas" 4 | 5 | 6 | function _convert_to_bezier(x: number, y: number, radius: number, 7 | rot: number): [number, number, number, number, 8 | number, number] { 9 | //0 degrees is pointing down (in data space), rotation is clockwise 10 | const [xy0_offset, cx_offset, cy_offset] = _generate_offsets(radius) 11 | 12 | 13 | const cosine = -Math.cos(rot * Math.PI / 180) 14 | const sine = Math.sin(rot * Math.PI / 180) 15 | 16 | const x0 = x - xy0_offset * sine 17 | const y0 = y + xy0_offset * cosine 18 | const cx0 = x - cx_offset * cosine + cy_offset * sine 19 | const cx1 = x + cx_offset * cosine + cy_offset * sine 20 | const cy0 = y - cx_offset * sine - cy_offset * cosine 21 | const cy1 = y + cx_offset * sine - cy_offset * cosine 22 | return [x0, y0, cx0, cx1, cy0, cy1] 23 | } 24 | 25 | function _generate_offsets(radius: number): [number, number, number] { 26 | //Empirically these values basically "work" to make a pick with the same 27 | //visual radius as a circle 28 | const adjusted_radius = radius * 3.5 29 | const xy0_offset = adjusted_radius / 2.5 30 | const cx_offset = adjusted_radius / 1. 31 | const cy_offset = adjusted_radius / 2.2 32 | return [xy0_offset, cx_offset, cy_offset] 33 | } 34 | 35 | 36 | export type PickData = CircleData & { 37 | rot: p.UniformVector 38 | } 39 | 40 | export interface PickView extends PickData {} 41 | 42 | export class PickView extends CircleView { 43 | model: Pick 44 | visuals: Pick.Visuals 45 | 46 | protected _render(ctx: Context2d, indices: number[], data?: PickData): void { 47 | const {sx, sy, sradius} = data ?? this 48 | 49 | const rot = this.rot.array 50 | 51 | for (const i of indices) { 52 | const sx_i = sx[i] 53 | const sy_i = sy[i] 54 | const rot_i = rot[i] 55 | const sradius_i = sradius[i] 56 | 57 | if (!isFinite(sx_i + sy_i + sradius_i)) 58 | continue 59 | 60 | const [x0, y0, cx0, cx1, cy0, cy1] = _convert_to_bezier( 61 | sx_i, sy_i, sradius_i, rot_i 62 | ) 63 | 64 | ctx.beginPath() 65 | ctx.moveTo(x0, y0) 66 | ctx.bezierCurveTo(cx0, cy0, cx1, cy1, x0, y0) 67 | 68 | if (this.visuals.line.doit) { 69 | this.visuals.line.set_vectorize(ctx, i) 70 | ctx.stroke() 71 | } 72 | if (this.visuals.fill.doit) { 73 | this.visuals.fill.set_vectorize(ctx, i) 74 | ctx.fill() 75 | } 76 | if (this.visuals.hatch.doit) { 77 | this.visuals.hatch.set_vectorize(ctx, i) 78 | ctx.fill() 79 | } 80 | } 81 | } 82 | } 83 | 84 | export namespace Pick { 85 | export type Attrs = p.AttrsOf 86 | 87 | export type Props = Circle.Props & { 88 | rot: p.AngleSpec 89 | } 90 | 91 | export type Mixins = Circle.Mixins 92 | 93 | export type Visuals = Circle.Visuals 94 | 95 | } 96 | 97 | export interface Pick extends Pick.Attrs {} 98 | 99 | export class Pick extends Circle { 100 | properties: Pick.Props 101 | __view_type__: PickView 102 | 103 | constructor(attrs?: Partial) { 104 | super(attrs) 105 | } 106 | 107 | static init_Pick(): void { 108 | this.prototype.default_view = PickView 109 | 110 | this.define(({}) => ({ 111 | rot: [ p.AngleSpec, {field: "rot"} ] 112 | })) 113 | 114 | } 115 | } -------------------------------------------------------------------------------- /ptplot/plot.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from bokeh.models import ColumnDataSource, CustomJS 4 | from bokeh.plotting._decorators import glyph_method 5 | from typing import TYPE_CHECKING, Any, Callable, Sequence, Optional 6 | 7 | from ptplot.callback import FIND_CURRENT_FRAME, FIND_ALL_FRAMES_UP_TO_CURRENT_FRAME 8 | from ptplot.core import Layer, _Metadata 9 | from ptplot.pick import Pick 10 | from ptplot.utils import _union_kwargs 11 | 12 | if TYPE_CHECKING: 13 | from bokeh.plotting import figure 14 | from bokeh.models import GlyphRenderer 15 | from .ptplot import PTPlot 16 | import pandas as pd 17 | 18 | 19 | class Tracks(Layer): 20 | """ 21 | Generate tracks showing position over time for players and/or the ball. 22 | 23 | For a static image the tracks are just lines. For an animated image the lines will grow over time 24 | as the players and the ball move. 25 | 26 | Parameters 27 | ---------- 28 | x : The mapping to be used as the x (horizontal) coordinate for the tracks. 29 | y : The mapping to be used as the y (vertical) coordinate for the tracks. 30 | track_mapping : the mapping to group the data into individual tracks. Usually the column that corresponds 31 | to each player (e.g. player name, jersey number). 32 | animate : If True and an Animation layer is provided to the plot, animate the tracks. If False, show the 33 | full tracks even if an Animation layer is provided. 34 | name : If you plan on using the Hover layer, provide a name for the layer in order to assign hoverlabels 35 | to the glyphs drawn by this layer. 36 | kwargs : Any additional keyword arguments to bokeh.figure.lines. 37 | """ 38 | 39 | def __init__( 40 | self, x: str, y: str, track_mapping: str, animate: bool = True, name: Optional[str] = None, **kwargs: Any 41 | ): 42 | self.x = x 43 | self.y = y 44 | self.track_mapping = track_mapping 45 | self.animate = animate 46 | self.callback = FIND_ALL_FRAMES_UP_TO_CURRENT_FRAME 47 | self.name = name 48 | self.kwargs = kwargs 49 | 50 | def get_mappings(self) -> Sequence[str]: 51 | return [self.x, self.y, self.track_mapping] 52 | 53 | def set_up_animation(self, graphics: GlyphRenderer) -> Callable[[str, Any], CustomJS]: 54 | source = graphics.data_source 55 | full_source = ColumnDataSource(source.data) 56 | 57 | def animate(frame_column: str, initial_frame: Any) -> CustomJS: 58 | is_in_initial_frame = source.data[frame_column] <= initial_frame 59 | initial_data = {column: source.data[column][is_in_initial_frame] for column in source.data} 60 | source.data = initial_data 61 | 62 | callback = CustomJS( 63 | args={"source": source, "full_source": full_source, "frame_column": frame_column}, code=self.callback 64 | ) 65 | return callback 66 | 67 | return animate 68 | 69 | def draw( 70 | self, ptplot: PTPlot, data: pd.DataFrame, bokeh_figure: figure, metadata: _Metadata 71 | ) -> Optional[Sequence[Callable[[str, Any], CustomJS]]]: 72 | 73 | line_color = metadata.color_list[0] if metadata.is_home is True else metadata.color_list[1] 74 | groups = data.groupby(self.track_mapping) 75 | all_graphics = [] 76 | for group_name, group_data in groups: 77 | source = ColumnDataSource(group_data) 78 | kwargs = _union_kwargs( 79 | { 80 | "x": self.x, 81 | "y": self.y, 82 | "source": source, 83 | "line_color": line_color, 84 | "legend_label": metadata.label, 85 | "name": self.name, 86 | }, 87 | self.kwargs, 88 | ) 89 | graphics = bokeh_figure.line(**kwargs) 90 | all_graphics.append(graphics) 91 | 92 | if self.animate is False: 93 | return None 94 | else: 95 | return [self.set_up_animation(graphics) for graphics in all_graphics] 96 | 97 | 98 | class Positions(Layer): 99 | """ 100 | Generate markers showing the positions of players and/or the ball. 101 | 102 | If a special marker has not been specified as part of an Aesthetics, then 103 | the default circle will be used. 104 | 105 | Optionally, a mapping can be specified to plot text on top of the markers, 106 | designed for showing jersey numbers. Note that there are no guardrails in place 107 | to ensure that the text stays within the markers, although sensible defaults have 108 | been chosen where two-digit numbers look reasonable at the default zoom level. 109 | 110 | Parameters 111 | ---------- 112 | x : The mapping to be used as the x (horizontal) coordinate for the tracks. 113 | y : The mapping to be used as the y (vertical) coordinate for the tracks. 114 | orientation : If set, the player orientation in degrees, where 0 degrees is facing directly up 115 | on the field (ie looking toward the far sideline). 116 | number : If set, a mapping indicating what text should be shown on top of the markers. 117 | frame_filter : If set, a True/False mapping of the data used to determine a specific frame 118 | to display at all times, even if an Animation is set. 119 | marker_radius : The size (in data units, e.g. yards for American Football) of the radius of the 120 | marker. 121 | name : If you plan on using the Hover layer, provide a name for the layer in order to assign hoverlabels 122 | to the glyphs drawn by this layer. 123 | kwargs : Any additional keyword arguments to the glyph renderer for the markers. Note that these do not 124 | apply to any text on top of the markers 125 | """ 126 | 127 | def __init__( 128 | self, 129 | x: str, 130 | y: str, 131 | orientation: Optional[str] = None, 132 | number: Optional[str] = None, 133 | frame_filter: Optional[str] = None, 134 | marker_radius: float = 1, 135 | name: Optional[str] = None, 136 | **kwargs: Any, 137 | ): 138 | self.x = x 139 | self.y = y 140 | self.orientation = orientation 141 | self.number = number 142 | self.frame_filter = frame_filter 143 | self.callback = FIND_CURRENT_FRAME 144 | self.marker_radius = marker_radius 145 | self.name = name 146 | self.kwargs = kwargs 147 | 148 | def get_mappings(self) -> Sequence[str]: 149 | mappings = [self.x, self.y] 150 | 151 | if self.orientation is not None: 152 | mappings += [self.orientation] 153 | if self.frame_filter is not None: 154 | mappings += [self.frame_filter] 155 | if self.number is not None: 156 | mappings += [self.number] 157 | return mappings 158 | 159 | def set_up_animation(self, graphics: GlyphRenderer) -> Callable[[str, Any], CustomJS]: 160 | source = graphics.data_source 161 | full_source = ColumnDataSource(source.data) 162 | 163 | def animate(frame_column: str, initial_frame: Any) -> CustomJS: 164 | is_in_initial_frame = source.data[frame_column] <= initial_frame 165 | initial_data = {column: source.data[column][is_in_initial_frame] for column in source.data} 166 | source.data = initial_data 167 | 168 | callback = CustomJS( 169 | args={"source": source, "full_source": full_source, "frame_column": frame_column}, code=self.callback 170 | ) 171 | return callback 172 | 173 | return animate 174 | 175 | def draw( 176 | self, ptplot: PTPlot, data: pd.DataFrame, bokeh_figure: figure, metadata: _Metadata 177 | ) -> Optional[Sequence[Callable[[str, Any], CustomJS]]]: 178 | 179 | # If you have multiple frames but only want to show one (even in an animation): 180 | if self.frame_filter is not None: 181 | data = data[data[self.frame_filter]] 182 | source = ColumnDataSource(data) 183 | 184 | all_kwargs = _union_kwargs( 185 | {"x": self.x, "y": self.y, "source": source, "legend_label": metadata.label, "name": self.name}, self.kwargs 186 | ) 187 | 188 | if metadata.marker is not None: 189 | graphics = metadata.marker(bokeh_figure)(**all_kwargs) 190 | else: 191 | fill_color, line_color = ( 192 | metadata.color_list if metadata.is_home is True else ["white", metadata.color_list[0]] 193 | ) 194 | player_kwargs = _union_kwargs( 195 | {"fill_color": fill_color, "line_color": line_color, "radius": self.marker_radius}, all_kwargs 196 | ) 197 | if self.orientation is None: 198 | graphics = bokeh_figure.circle(**player_kwargs) 199 | else: 200 | # This is a kludge to let me take advantage of the bokeh all-in-one 201 | # figure.plot_name syntax, which handles adding the source, making the legends, 202 | # etc. 203 | def pick(**kwargs: Any) -> None: 204 | pass 205 | 206 | decorated_pick = glyph_method(Pick)(pick) 207 | 208 | player_kwargs = _union_kwargs({"rot": self.orientation}, player_kwargs) 209 | graphics = decorated_pick(bokeh_figure, **player_kwargs) 210 | 211 | if self.number is not None: 212 | # https://github.com/bokeh/bokeh/issues/2439#issuecomment-447498732 213 | text_color = "white" if metadata.is_home is True else "black" 214 | 215 | # This is a total kludge to scale font size up and down with plot size, 216 | # based on a font size I found to work reasonably well with two-digit 217 | # numbers 218 | pixels_per_data_unit = bokeh_figure.height / abs(bokeh_figure.y_range.end - bokeh_figure.y_range.start) 219 | font_size = pixels_per_data_unit * self.marker_radius 220 | 221 | bokeh_figure.text( 222 | x=self.x, 223 | y=self.y, 224 | text=self.number, 225 | source=source, 226 | text_color=text_color, 227 | text_align="center", 228 | text_baseline="middle", 229 | text_font_size=f"{font_size:.2f}px", 230 | ) 231 | # Don't need to set up a separate animation for the numbers because the source, view, and callback are 232 | # all the same 233 | if self.frame_filter is not None: 234 | return None 235 | else: 236 | return [self.set_up_animation(graphics)] 237 | -------------------------------------------------------------------------------- /ptplot/ptplot.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import itertools 4 | import pandas as pd 5 | import patsy 6 | 7 | from bokeh.plotting import figure 8 | from bokeh.layouts import Column, gridplot, row 9 | from typing import TYPE_CHECKING, Any, Callable, Iterator, List, Optional, Tuple, TypeVar, Type 10 | 11 | from ptplot.animation import Animation 12 | from ptplot.core import _Aesthetics 13 | from ptplot.facet import Facet 14 | 15 | 16 | if TYPE_CHECKING: 17 | from bokeh.models import CustomJS 18 | from ptplot.core import Layer 19 | 20 | layer_type = TypeVar("layer_type", bound=Layer) 21 | 22 | 23 | class PTPlot: 24 | """The core plotting object, used as the base for all visualizations. 25 | 26 | Once instantiated, Layers can be added to the plot using the "+" operator. 27 | 28 | Parameters 29 | ---------- 30 | data : The dataset you want to visualize 31 | pixel_height : How tall the full visualization should be, in pixels. If facets are used this will 32 | be the total height of all the facets combined. 33 | """ 34 | 35 | def __init__(self, data: pd.DataFrame, pixel_height: int = 400): 36 | self.data = data 37 | self.pixel_height = pixel_height 38 | 39 | self.layers: List[Layer] = [] 40 | 41 | @property 42 | def facet_layer(self) -> Facet: 43 | layer = self._get_class_instance_from_layers(Facet) 44 | if layer is None: 45 | try: 46 | layer = self._layer 47 | except AttributeError: 48 | 49 | class DummyFacet(Facet): 50 | def faceting(self, data: pd.DataFrame) -> Iterator[Tuple[Any, pd.DataFrame]]: 51 | self.num_col = 1 52 | self.num_row = 1 53 | yield (None, data) 54 | 55 | # Need to use this internal storage for the DummyFacet instance or else you re-instantiate 56 | # every time you call this property *facepalm* 57 | self._layer = DummyFacet("dummy") 58 | layer = self._layer 59 | return layer 60 | 61 | @property 62 | def aesthetics_layer(self) -> _Aesthetics: 63 | layer = self._get_class_instance_from_layers(_Aesthetics) 64 | if layer is None: 65 | layer = _Aesthetics() 66 | return layer 67 | 68 | @property 69 | def animation_layer(self) -> Optional[Animation]: 70 | return self._get_class_instance_from_layers(Animation) 71 | 72 | def _get_class_instance_from_layers(self, class_name: Type[layer_type]) -> Optional[layer_type]: 73 | layer_to_return = None 74 | for layer in self.layers: 75 | if isinstance(layer, class_name): 76 | if layer_to_return is None: 77 | layer_to_return = layer 78 | else: 79 | raise ValueError(f"Only one {class_name} layer can be used for a given visualization") 80 | return layer_to_return 81 | 82 | def __add__(self, layer: Layer) -> PTPlot: 83 | self.layers.append(layer) 84 | return self # Allows method chaining 85 | 86 | def draw(self) -> Column: 87 | """ 88 | Build the visualization specified by all the added layers. 89 | 90 | Returns 91 | ------- 92 | The final visualization, which is a Bokeh object that can be rendered 93 | via any of the common Bokeh methods (e.g. show()) 94 | """ 95 | 96 | # Extract all mappings set by each layer, then prune duplicates 97 | all_mappings = itertools.chain(*[layer.get_mappings() for layer in self.layers]) 98 | unique_mappings = set(all_mappings) 99 | # make a dataframe where each mapping is a new column, named based on the mapping 100 | mapping_data = pd.DataFrame({mapping: _apply_mapping(self.data, mapping) for mapping in unique_mappings}) 101 | 102 | # If animation, sort the data by the frame column 103 | if self.animation_layer is not None: 104 | mapping_data = mapping_data.sort_values(self.animation_layer.frame_mapping) 105 | 106 | facets = self.facet_layer.faceting(mapping_data) 107 | 108 | figures = [] 109 | animations: List[Callable[[str, Any], CustomJS]] = [] 110 | for (facet_name, facet_data) in facets: 111 | # self.facet_layer.num_row should always be non-null at this point, but it 112 | # appeases mypy 113 | num_rows = self.facet_layer.num_row if self.facet_layer.num_row is not None else 1 114 | 115 | figure_object = figure(sizing_mode="scale_both", height=int(self.pixel_height / num_rows)) 116 | figure_object.x_range.range_padding = figure_object.y_range.range_padding = 0 117 | figure_object.x_range.bounds = figure_object.y_range.bounds = "auto" 118 | figure_object.xgrid.visible = False 119 | figure_object.ygrid.visible = False 120 | figure_object.xaxis.visible = False 121 | figure_object.yaxis.visible = False 122 | for data_subset, metadata in self.aesthetics_layer.map_aesthetics(facet_data): 123 | for layer in self.layers: 124 | layer_animation = layer.draw(self, data_subset, figure_object, metadata) 125 | if layer_animation is not None: 126 | animations += layer_animation 127 | figure_object.legend.click_policy = "mute" 128 | figures.append(figure_object) 129 | 130 | plot_grid = gridplot(figures, ncols=self.facet_layer.num_col) 131 | # TODO: could this be handled by using bokeh's tagging functionality? 132 | # Probably could, by storing the closure with the plot 133 | if self.animation_layer is not None: 134 | widgets = self.animation_layer.animate(mapping_data, animations) 135 | plot_grid.children.append(row(widgets)) 136 | return plot_grid 137 | 138 | 139 | def _apply_mapping( 140 | data: pd.DataFrame, 141 | mapping: str, 142 | ) -> pd.Series: 143 | if mapping in data.columns: 144 | return data[mapping].copy(deep=True) 145 | 146 | processed_string_data = patsy.dmatrix( 147 | f"I({mapping}) - 1", data, NA_action=patsy.NAAction(NA_types=[]), return_type="dataframe" 148 | ) 149 | 150 | final_data = ( 151 | processed_string_data[processed_string_data.columns[0]] 152 | if len(processed_string_data.columns) == 1 # pure arithmetic 153 | else processed_string_data[processed_string_data.columns[1]].astype(bool) # conditional 154 | ) 155 | final_data.name = mapping # Have to explicitly assign the mapping as the name 156 | return final_data 157 | -------------------------------------------------------------------------------- /ptplot/utils.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict 2 | 3 | 4 | def _union_kwargs(protected_kwargs: Dict[str, Any], *other_kwargs: Dict[str, Any]) -> Dict[str, Any]: 5 | """ 6 | 7 | Parameters 8 | ---------- 9 | protected_kwargs: Any keyword arguments that are non-overrideable 10 | other_kwargs: Other keyword argument sets. If multiple argument sets have the same 11 | argument, the argument from the last set will be used. e.g. if there are 5 sets 12 | in the list of other_kwargs and the same kwarg is used in the 2nd and 4th sets, 13 | the value from the 4th set will be used. 14 | 15 | Returns 16 | ------- 17 | A single dictionary of combined keyword arguments, that can be passed into a function 18 | with **. 19 | """ 20 | final_kwargs = protected_kwargs 21 | for other_kwarg_set in other_kwargs: 22 | protected_kwargs_intersections = set(protected_kwargs.keys()).intersection(set(other_kwarg_set.keys())) 23 | if len(protected_kwargs_intersections) > 0: 24 | raise KeyError( 25 | f"The following keywords are protected and cannot be overridden: {protected_kwargs_intersections}" 26 | ) 27 | final_kwargs = {**final_kwargs, **other_kwarg_set} 28 | 29 | return final_kwargs 30 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | addopts = --doctest-modules 3 | filterwarnings = 4 | ignore::DeprecationWarning:patsy.constraint 5 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [bdist_wheel] 2 | universal = 1 3 | 4 | [flake8] 5 | exclude = docs,tests 6 | max-line-length = 120 7 | 8 | [versioneer] 9 | VCS = git 10 | style = pep440 11 | versionfile_source = ptplot/_version.py 12 | versionfile_build = ptplot/_version.py 13 | tag_prefix = 14 | parentdir_prefix = -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """The setup script.""" 4 | import versioneer 5 | import parse_envs 6 | 7 | from setuptools import setup, find_packages 8 | 9 | with open('README.md') as readme_file: 10 | readme = readme_file.read() 11 | 12 | extras = { 13 | 'dev': [ 14 | 'black','notebook', 'flake8', 'mypy', 'pytest', 'pytest-cov', 'tox' 15 | ], 16 | 'no_pip_package': ['nodejs', 'pip'] 17 | } 18 | 19 | requirements, extra_requirements = parse_envs.parse_conda_envs( 20 | "environment_minimum_requirements.yml", 21 | "environment.yml", 22 | optional_packages=extras 23 | ) 24 | extra_requirements.pop('no_pip_package', None) 25 | 26 | setup( 27 | author='Andrew Schechtman-Rook', 28 | author_email='footballastronomer@gmail.com', 29 | python_requires='>=3.7', 30 | classifiers=[ 31 | 'Development Status :: 3 - Alpha', 32 | 'Intended Audience :: Developers', 33 | 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 34 | 'Natural Language :: English', 35 | 'Programming Language :: Python :: 3', 36 | 'Programming Language :: Python :: 3.7', 37 | 'Programming Language :: Python :: 3.8', 38 | 'Programming Language :: Python :: 3.9', 39 | ], 40 | cmdclass=versioneer.get_cmdclass(), 41 | description='Make beautiful plots of player tracking data', 42 | extras_require=extra_requirements, 43 | install_requires=requirements, 44 | license='GNU General Public License v3', 45 | long_description=readme, 46 | long_description_content_type="text/markdown", 47 | include_package_data=True, 48 | keywords='ptplot', 49 | name='ptplot', 50 | packages=find_packages(include=['ptplot', 'ptplot.*']), 51 | url='https://github.com/AndrewRook/ptplot', 52 | version=versioneer.get_version(), 53 | zip_safe=False, 54 | ) 55 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AndrewRook/ptplot/8ab6e26f54069925b21a57a7eb5d3dee9a7fe147/tests/conftest.py -------------------------------------------------------------------------------- /tests/test_ptplot.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import pytest 3 | 4 | import ptplot.ptplot as pt 5 | from ptplot.core import Layer 6 | 7 | 8 | class TestFacetLayer: 9 | def test_creates_single_facet_when_no_facet_specified(self): 10 | plot = pt.PTPlot(pd.DataFrame()) 11 | facet_layer = plot.facet_layer 12 | # Have to actually iterate the iterator to set num_row and num_col 13 | next(facet_layer.faceting(pd.DataFrame())) 14 | assert facet_layer.num_row == 1 15 | assert facet_layer.num_col == 1 16 | 17 | def test_does_not_recreate_dummy_facet_when_called_multiple_times(self): 18 | plot = pt.PTPlot(pd.DataFrame()) 19 | facet_layer = plot.facet_layer 20 | facet_layer.random_attribute = 100 21 | second_facet_layer = plot.facet_layer 22 | assert hasattr(second_facet_layer, "random_attribute") 23 | assert second_facet_layer.random_attribute == 100 24 | 25 | 26 | class TestAestheticsLayer: 27 | def test_creates_empty_layer_when_no_aesthetics_specified(self): 28 | plot = pt.PTPlot(pd.DataFrame()) 29 | aesthetics_layer = plot.aesthetics_layer 30 | assert aesthetics_layer.team_color_mapping == {} 31 | 32 | 33 | class TestInternalGetClassInstanceFromLayers: 34 | @pytest.fixture(scope="function") 35 | def layer(self): 36 | class TestLayer(Layer): 37 | pass 38 | 39 | return TestLayer 40 | 41 | def test_returns_none_if_no_layer(self, layer): 42 | class OtherLayer(Layer): 43 | pass 44 | plot = pt.PTPlot(pd.DataFrame()) + layer() 45 | assert plot._get_class_instance_from_layers(OtherLayer) is None 46 | 47 | def test_returns_layer_if_exists(self, layer): 48 | test_layer = layer() 49 | plot = pt.PTPlot(pd.DataFrame()) + test_layer 50 | assert plot._get_class_instance_from_layers(layer) == test_layer 51 | 52 | def test_returns_layer_if_subclass(self, layer): 53 | class TestSubclass(layer): 54 | def extra_method(self): 55 | return "extra method" 56 | plot = pt.PTPlot(pd.DataFrame()) + TestSubclass() 57 | assert plot._get_class_instance_from_layers(layer).extra_method() == "extra method" 58 | 59 | def test_errors_with_multiple_layers(self, layer): 60 | plot = pt.PTPlot(pd.DataFrame()) + layer() + layer() 61 | with pytest.raises(ValueError): 62 | plot._get_class_instance_from_layers(layer) 63 | 64 | 65 | class TestInternalApplyMapping: 66 | @pytest.fixture(scope="function") 67 | def input_data(self): 68 | df = pd.DataFrame({ 69 | "a": [1, 2, 3, 4, 5], 70 | "b": [6, 7, 8, 9, 10] 71 | }) 72 | return df 73 | 74 | def test_passes_column_name(self, input_data): 75 | expected = pd.Series([1, 2, 3, 4, 5], name="a") 76 | actual = pt._apply_mapping(input_data, "a") 77 | pd.testing.assert_series_equal(expected, actual) 78 | 79 | def test_copies_when_passed_column_name(self, input_data): 80 | mapped_data = pt._apply_mapping(input_data, "b") 81 | input_data.loc[input_data["a"] > 2, "b"] = 999 82 | expected_mapped_data = pd.Series([6, 7, 8, 9, 10], name="b") 83 | expected_input_data = pd.DataFrame({ 84 | "a": [1, 2, 3, 4, 5], 85 | "b": [6, 7, 999, 999, 999] 86 | }) 87 | pd.testing.assert_series_equal(mapped_data, expected_mapped_data) 88 | pd.testing.assert_frame_equal(input_data, expected_input_data) 89 | 90 | def test_preserves_index(self, input_data): 91 | input_data.index = [2, 4, 8, 16, 32] 92 | expected = pd.Series([1, 2, 3, 4, 5], name="a", index=[2, 4, 8, 16, 32]) 93 | actual = pt._apply_mapping(input_data, "a") 94 | pd.testing.assert_series_equal(expected, actual) 95 | 96 | def test_works_with_arithmetic(self, input_data): 97 | arithmetic = "a *b" 98 | expected = pd.Series([6., 14., 24., 36., 50.], name=arithmetic) 99 | actual = pt._apply_mapping(input_data, arithmetic) 100 | pd.testing.assert_series_equal(expected, actual) 101 | 102 | def test_works_with_conditional(self, input_data): 103 | conditional = "3*a > b" 104 | expected = pd.Series([False, False, True, True, True], name=conditional) 105 | actual = pt._apply_mapping(input_data, conditional) 106 | pd.testing.assert_series_equal(expected, actual) 107 | 108 | def test_handles_weird_variable_names(self): 109 | input_data = pd.DataFrame({ 110 | "one": [1, 2, 3], 111 | "two": [4, 5, 6], 112 | "one + two": [7, 8, 9] 113 | }) 114 | arithmetic = "Q('one + two') + 6" 115 | expected = pd.Series([13., 14., 15.], name=arithmetic) 116 | actual = pt._apply_mapping(input_data, arithmetic) 117 | pd.testing.assert_series_equal(expected, actual) -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from ptplot import utils 4 | 5 | 6 | class TestInternalUnionKwargs: 7 | def test_works_with_no_protected_kwargs(self): 8 | expected = {"a": 1, "b": 2, "c": 3} 9 | actual = utils._union_kwargs({}, expected) 10 | assert actual == expected 11 | 12 | def test_errors_when_overriding_protected_kwargs(self): 13 | with pytest.raises(KeyError, match="The following keywords are protected and cannot be overridden: {\'protected\'}"): 14 | utils._union_kwargs({"protected": 1}, {"protected": 2, "unprotected": 3}) 15 | 16 | def test_non_protected_overrides_work(self): 17 | expected = {"a": 1, "b": 3, "c": 9} 18 | actual = utils._union_kwargs( 19 | {"a": 1}, 20 | {"b": 1, "c": 4}, 21 | {"c": 9}, 22 | {"b": 3} 23 | ) 24 | assert actual == expected 25 | 26 | def test_doesnt_modify_dictionaries(self): 27 | protected_kwargs = {"a": 1, "b": 2} 28 | unprotected_one = {"c": 3, "d": 4} 29 | unprotected_two = {"c": 7, "e": 9} 30 | _ = utils._union_kwargs( 31 | protected_kwargs.copy(), 32 | unprotected_one.copy(), 33 | unprotected_two.copy() 34 | ) 35 | assert protected_kwargs == {"a": 1, "b": 2} 36 | assert unprotected_one == {"c": 3, "d": 4} 37 | assert unprotected_two == {"c": 7, "e": 9} 38 | -------------------------------------------------------------------------------- /tox-conda.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | requires = tox-conda 3 | envlist = py{37,38,39}-current, py{37,38,39}-minver 4 | 5 | [testenv] 6 | conda_env = 7 | current: environment.yml 8 | minver: environment_minimum_requirements.yml 9 | commands = python -m py.test -------------------------------------------------------------------------------- /tox-pip.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | # Uses tox-conda here even though we're testing a pip install since this enables 3 | # the easy creation of multiple python version installs 4 | requires = tox-conda 5 | envlist = py{37,38,39} 6 | 7 | [testenv] 8 | extras = dev 9 | commands = python -m py.test -------------------------------------------------------------------------------- /versioneer.py: -------------------------------------------------------------------------------- 1 | 2 | # Version: 0.19 3 | 4 | """The Versioneer - like a rocketeer, but for versions. 5 | 6 | The Versioneer 7 | ============== 8 | 9 | * like a rocketeer, but for versions! 10 | * https://github.com/python-versioneer/python-versioneer 11 | * Brian Warner 12 | * License: Public Domain 13 | * Compatible with: Python 3.6, 3.7, 3.8, 3.9 and pypy3 14 | * [![Latest Version][pypi-image]][pypi-url] 15 | * [![Build Status][travis-image]][travis-url] 16 | 17 | This is a tool for managing a recorded version number in distutils-based 18 | python projects. The goal is to remove the tedious and error-prone "update 19 | the embedded version string" step from your release process. Making a new 20 | release should be as easy as recording a new tag in your version-control 21 | system, and maybe making new tarballs. 22 | 23 | 24 | ## Quick Install 25 | 26 | * `pip install versioneer` to somewhere in your $PATH 27 | * add a `[versioneer]` section to your setup.cfg (see [Install](INSTALL.md)) 28 | * run `versioneer install` in your source tree, commit the results 29 | * Verify version information with `python setup.py version` 30 | 31 | ## Version Identifiers 32 | 33 | Source trees come from a variety of places: 34 | 35 | * a version-control system checkout (mostly used by developers) 36 | * a nightly tarball, produced by build automation 37 | * a snapshot tarball, produced by a web-based VCS browser, like github's 38 | "tarball from tag" feature 39 | * a release tarball, produced by "setup.py sdist", distributed through PyPI 40 | 41 | Within each source tree, the version identifier (either a string or a number, 42 | this tool is format-agnostic) can come from a variety of places: 43 | 44 | * ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows 45 | about recent "tags" and an absolute revision-id 46 | * the name of the directory into which the tarball was unpacked 47 | * an expanded VCS keyword ($Id$, etc) 48 | * a `_version.py` created by some earlier build step 49 | 50 | For released software, the version identifier is closely related to a VCS 51 | tag. Some projects use tag names that include more than just the version 52 | string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool 53 | needs to strip the tag prefix to extract the version identifier. For 54 | unreleased software (between tags), the version identifier should provide 55 | enough information to help developers recreate the same tree, while also 56 | giving them an idea of roughly how old the tree is (after version 1.2, before 57 | version 1.3). Many VCS systems can report a description that captures this, 58 | for example `git describe --tags --dirty --always` reports things like 59 | "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the 60 | 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has 61 | uncommitted changes). 62 | 63 | The version identifier is used for multiple purposes: 64 | 65 | * to allow the module to self-identify its version: `myproject.__version__` 66 | * to choose a name and prefix for a 'setup.py sdist' tarball 67 | 68 | ## Theory of Operation 69 | 70 | Versioneer works by adding a special `_version.py` file into your source 71 | tree, where your `__init__.py` can import it. This `_version.py` knows how to 72 | dynamically ask the VCS tool for version information at import time. 73 | 74 | `_version.py` also contains `$Revision$` markers, and the installation 75 | process marks `_version.py` to have this marker rewritten with a tag name 76 | during the `git archive` command. As a result, generated tarballs will 77 | contain enough information to get the proper version. 78 | 79 | To allow `setup.py` to compute a version too, a `versioneer.py` is added to 80 | the top level of your source tree, next to `setup.py` and the `setup.cfg` 81 | that configures it. This overrides several distutils/setuptools commands to 82 | compute the version when invoked, and changes `setup.py build` and `setup.py 83 | sdist` to replace `_version.py` with a small static file that contains just 84 | the generated version data. 85 | 86 | ## Installation 87 | 88 | See [INSTALL.md](./INSTALL.md) for detailed installation instructions. 89 | 90 | ## Version-String Flavors 91 | 92 | Code which uses Versioneer can learn about its version string at runtime by 93 | importing `_version` from your main `__init__.py` file and running the 94 | `get_versions()` function. From the "outside" (e.g. in `setup.py`), you can 95 | import the top-level `versioneer.py` and run `get_versions()`. 96 | 97 | Both functions return a dictionary with different flavors of version 98 | information: 99 | 100 | * `['version']`: A condensed version string, rendered using the selected 101 | style. This is the most commonly used value for the project's version 102 | string. The default "pep440" style yields strings like `0.11`, 103 | `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section 104 | below for alternative styles. 105 | 106 | * `['full-revisionid']`: detailed revision identifier. For Git, this is the 107 | full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". 108 | 109 | * `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the 110 | commit date in ISO 8601 format. This will be None if the date is not 111 | available. 112 | 113 | * `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that 114 | this is only accurate if run in a VCS checkout, otherwise it is likely to 115 | be False or None 116 | 117 | * `['error']`: if the version string could not be computed, this will be set 118 | to a string describing the problem, otherwise it will be None. It may be 119 | useful to throw an exception in setup.py if this is set, to avoid e.g. 120 | creating tarballs with a version string of "unknown". 121 | 122 | Some variants are more useful than others. Including `full-revisionid` in a 123 | bug report should allow developers to reconstruct the exact code being tested 124 | (or indicate the presence of local changes that should be shared with the 125 | developers). `version` is suitable for display in an "about" box or a CLI 126 | `--version` output: it can be easily compared against release notes and lists 127 | of bugs fixed in various releases. 128 | 129 | The installer adds the following text to your `__init__.py` to place a basic 130 | version in `YOURPROJECT.__version__`: 131 | 132 | from ._version import get_versions 133 | __version__ = get_versions()['version'] 134 | del get_versions 135 | 136 | ## Styles 137 | 138 | The setup.cfg `style=` configuration controls how the VCS information is 139 | rendered into a version string. 140 | 141 | The default style, "pep440", produces a PEP440-compliant string, equal to the 142 | un-prefixed tag name for actual releases, and containing an additional "local 143 | version" section with more detail for in-between builds. For Git, this is 144 | TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags 145 | --dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the 146 | tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and 147 | that this commit is two revisions ("+2") beyond the "0.11" tag. For released 148 | software (exactly equal to a known tag), the identifier will only contain the 149 | stripped tag, e.g. "0.11". 150 | 151 | Other styles are available. See [details.md](details.md) in the Versioneer 152 | source tree for descriptions. 153 | 154 | ## Debugging 155 | 156 | Versioneer tries to avoid fatal errors: if something goes wrong, it will tend 157 | to return a version of "0+unknown". To investigate the problem, run `setup.py 158 | version`, which will run the version-lookup code in a verbose mode, and will 159 | display the full contents of `get_versions()` (including the `error` string, 160 | which may help identify what went wrong). 161 | 162 | ## Known Limitations 163 | 164 | Some situations are known to cause problems for Versioneer. This details the 165 | most significant ones. More can be found on Github 166 | [issues page](https://github.com/python-versioneer/python-versioneer/issues). 167 | 168 | ### Subprojects 169 | 170 | Versioneer has limited support for source trees in which `setup.py` is not in 171 | the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are 172 | two common reasons why `setup.py` might not be in the root: 173 | 174 | * Source trees which contain multiple subprojects, such as 175 | [Buildbot](https://github.com/buildbot/buildbot), which contains both 176 | "master" and "slave" subprojects, each with their own `setup.py`, 177 | `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI 178 | distributions (and upload multiple independently-installable tarballs). 179 | * Source trees whose main purpose is to contain a C library, but which also 180 | provide bindings to Python (and perhaps other languages) in subdirectories. 181 | 182 | Versioneer will look for `.git` in parent directories, and most operations 183 | should get the right version string. However `pip` and `setuptools` have bugs 184 | and implementation details which frequently cause `pip install .` from a 185 | subproject directory to fail to find a correct version string (so it usually 186 | defaults to `0+unknown`). 187 | 188 | `pip install --editable .` should work correctly. `setup.py install` might 189 | work too. 190 | 191 | Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in 192 | some later version. 193 | 194 | [Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking 195 | this issue. The discussion in 196 | [PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the 197 | issue from the Versioneer side in more detail. 198 | [pip PR#3176](https://github.com/pypa/pip/pull/3176) and 199 | [pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve 200 | pip to let Versioneer work correctly. 201 | 202 | Versioneer-0.16 and earlier only looked for a `.git` directory next to the 203 | `setup.cfg`, so subprojects were completely unsupported with those releases. 204 | 205 | ### Editable installs with setuptools <= 18.5 206 | 207 | `setup.py develop` and `pip install --editable .` allow you to install a 208 | project into a virtualenv once, then continue editing the source code (and 209 | test) without re-installing after every change. 210 | 211 | "Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a 212 | convenient way to specify executable scripts that should be installed along 213 | with the python package. 214 | 215 | These both work as expected when using modern setuptools. When using 216 | setuptools-18.5 or earlier, however, certain operations will cause 217 | `pkg_resources.DistributionNotFound` errors when running the entrypoint 218 | script, which must be resolved by re-installing the package. This happens 219 | when the install happens with one version, then the egg_info data is 220 | regenerated while a different version is checked out. Many setup.py commands 221 | cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into 222 | a different virtualenv), so this can be surprising. 223 | 224 | [Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes 225 | this one, but upgrading to a newer version of setuptools should probably 226 | resolve it. 227 | 228 | 229 | ## Updating Versioneer 230 | 231 | To upgrade your project to a new release of Versioneer, do the following: 232 | 233 | * install the new Versioneer (`pip install -U versioneer` or equivalent) 234 | * edit `setup.cfg`, if necessary, to include any new configuration settings 235 | indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. 236 | * re-run `versioneer install` in your source tree, to replace 237 | `SRC/_version.py` 238 | * commit any changed files 239 | 240 | ## Future Directions 241 | 242 | This tool is designed to make it easily extended to other version-control 243 | systems: all VCS-specific components are in separate directories like 244 | src/git/ . The top-level `versioneer.py` script is assembled from these 245 | components by running make-versioneer.py . In the future, make-versioneer.py 246 | will take a VCS name as an argument, and will construct a version of 247 | `versioneer.py` that is specific to the given VCS. It might also take the 248 | configuration arguments that are currently provided manually during 249 | installation by editing setup.py . Alternatively, it might go the other 250 | direction and include code from all supported VCS systems, reducing the 251 | number of intermediate scripts. 252 | 253 | ## Similar projects 254 | 255 | * [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time 256 | dependency 257 | * [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of 258 | versioneer 259 | 260 | ## License 261 | 262 | To make Versioneer easier to embed, all its code is dedicated to the public 263 | domain. The `_version.py` that it creates is also in the public domain. 264 | Specifically, both are released under the Creative Commons "Public Domain 265 | Dedication" license (CC0-1.0), as described in 266 | https://creativecommons.org/publicdomain/zero/1.0/ . 267 | 268 | [pypi-image]: https://img.shields.io/pypi/v/versioneer.svg 269 | [pypi-url]: https://pypi.python.org/pypi/versioneer/ 270 | [travis-image]: 271 | https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg 272 | [travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer 273 | 274 | """ 275 | 276 | import configparser 277 | import errno 278 | import json 279 | import os 280 | import re 281 | import subprocess 282 | import sys 283 | 284 | 285 | class VersioneerConfig: 286 | """Container for Versioneer configuration parameters.""" 287 | 288 | 289 | def get_root(): 290 | """Get the project root directory. 291 | 292 | We require that all commands are run from the project root, i.e. the 293 | directory that contains setup.py, setup.cfg, and versioneer.py . 294 | """ 295 | root = os.path.realpath(os.path.abspath(os.getcwd())) 296 | setup_py = os.path.join(root, "setup.py") 297 | versioneer_py = os.path.join(root, "versioneer.py") 298 | if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): 299 | # allow 'python path/to/setup.py COMMAND' 300 | root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) 301 | setup_py = os.path.join(root, "setup.py") 302 | versioneer_py = os.path.join(root, "versioneer.py") 303 | if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): 304 | err = ("Versioneer was unable to run the project root directory. " 305 | "Versioneer requires setup.py to be executed from " 306 | "its immediate directory (like 'python setup.py COMMAND'), " 307 | "or in a way that lets it use sys.argv[0] to find the root " 308 | "(like 'python path/to/setup.py COMMAND').") 309 | raise VersioneerBadRootError(err) 310 | try: 311 | # Certain runtime workflows (setup.py install/develop in a setuptools 312 | # tree) execute all dependencies in a single python process, so 313 | # "versioneer" may be imported multiple times, and python's shared 314 | # module-import table will cache the first one. So we can't use 315 | # os.path.dirname(__file__), as that will find whichever 316 | # versioneer.py was first imported, even in later projects. 317 | me = os.path.realpath(os.path.abspath(__file__)) 318 | me_dir = os.path.normcase(os.path.splitext(me)[0]) 319 | vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) 320 | if me_dir != vsr_dir: 321 | print("Warning: build in %s is using versioneer.py from %s" 322 | % (os.path.dirname(me), versioneer_py)) 323 | except NameError: 324 | pass 325 | return root 326 | 327 | 328 | def get_config_from_root(root): 329 | """Read the project setup.cfg file to determine Versioneer config.""" 330 | # This might raise EnvironmentError (if setup.cfg is missing), or 331 | # configparser.NoSectionError (if it lacks a [versioneer] section), or 332 | # configparser.NoOptionError (if it lacks "VCS="). See the docstring at 333 | # the top of versioneer.py for instructions on writing your setup.cfg . 334 | setup_cfg = os.path.join(root, "setup.cfg") 335 | parser = configparser.ConfigParser() 336 | with open(setup_cfg, "r") as f: 337 | parser.read_file(f) 338 | VCS = parser.get("versioneer", "VCS") # mandatory 339 | 340 | def get(parser, name): 341 | if parser.has_option("versioneer", name): 342 | return parser.get("versioneer", name) 343 | return None 344 | cfg = VersioneerConfig() 345 | cfg.VCS = VCS 346 | cfg.style = get(parser, "style") or "" 347 | cfg.versionfile_source = get(parser, "versionfile_source") 348 | cfg.versionfile_build = get(parser, "versionfile_build") 349 | cfg.tag_prefix = get(parser, "tag_prefix") 350 | if cfg.tag_prefix in ("''", '""'): 351 | cfg.tag_prefix = "" 352 | cfg.parentdir_prefix = get(parser, "parentdir_prefix") 353 | cfg.verbose = get(parser, "verbose") 354 | return cfg 355 | 356 | 357 | class NotThisMethod(Exception): 358 | """Exception raised if a method is not valid for the current scenario.""" 359 | 360 | 361 | # these dictionaries contain VCS-specific tools 362 | LONG_VERSION_PY = {} 363 | HANDLERS = {} 364 | 365 | 366 | def register_vcs_handler(vcs, method): # decorator 367 | """Create decorator to mark a method as the handler of a VCS.""" 368 | def decorate(f): 369 | """Store f in HANDLERS[vcs][method].""" 370 | if vcs not in HANDLERS: 371 | HANDLERS[vcs] = {} 372 | HANDLERS[vcs][method] = f 373 | return f 374 | return decorate 375 | 376 | 377 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, 378 | env=None): 379 | """Call the given command(s).""" 380 | assert isinstance(commands, list) 381 | p = None 382 | for c in commands: 383 | try: 384 | dispcmd = str([c] + args) 385 | # remember shell=False, so use git.cmd on windows, not just git 386 | p = subprocess.Popen([c] + args, cwd=cwd, env=env, 387 | stdout=subprocess.PIPE, 388 | stderr=(subprocess.PIPE if hide_stderr 389 | else None)) 390 | break 391 | except EnvironmentError: 392 | e = sys.exc_info()[1] 393 | if e.errno == errno.ENOENT: 394 | continue 395 | if verbose: 396 | print("unable to run %s" % dispcmd) 397 | print(e) 398 | return None, None 399 | else: 400 | if verbose: 401 | print("unable to find command, tried %s" % (commands,)) 402 | return None, None 403 | stdout = p.communicate()[0].strip().decode() 404 | if p.returncode != 0: 405 | if verbose: 406 | print("unable to run %s (error)" % dispcmd) 407 | print("stdout was %s" % stdout) 408 | return None, p.returncode 409 | return stdout, p.returncode 410 | 411 | 412 | LONG_VERSION_PY['git'] = r''' 413 | # This file helps to compute a version number in source trees obtained from 414 | # git-archive tarball (such as those provided by githubs download-from-tag 415 | # feature). Distribution tarballs (built by setup.py sdist) and build 416 | # directories (produced by setup.py build) will contain a much shorter file 417 | # that just contains the computed version number. 418 | 419 | # This file is released into the public domain. Generated by 420 | # versioneer-0.19 (https://github.com/python-versioneer/python-versioneer) 421 | 422 | """Git implementation of _version.py.""" 423 | 424 | import errno 425 | import os 426 | import re 427 | import subprocess 428 | import sys 429 | 430 | 431 | def get_keywords(): 432 | """Get the keywords needed to look up the version information.""" 433 | # these strings will be replaced by git during git-archive. 434 | # setup.py/versioneer.py will grep for the variable names, so they must 435 | # each be defined on a line of their own. _version.py will just call 436 | # get_keywords(). 437 | git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" 438 | git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" 439 | git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" 440 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} 441 | return keywords 442 | 443 | 444 | class VersioneerConfig: 445 | """Container for Versioneer configuration parameters.""" 446 | 447 | 448 | def get_config(): 449 | """Create, populate and return the VersioneerConfig() object.""" 450 | # these strings are filled in when 'setup.py versioneer' creates 451 | # _version.py 452 | cfg = VersioneerConfig() 453 | cfg.VCS = "git" 454 | cfg.style = "%(STYLE)s" 455 | cfg.tag_prefix = "%(TAG_PREFIX)s" 456 | cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" 457 | cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" 458 | cfg.verbose = False 459 | return cfg 460 | 461 | 462 | class NotThisMethod(Exception): 463 | """Exception raised if a method is not valid for the current scenario.""" 464 | 465 | 466 | LONG_VERSION_PY = {} 467 | HANDLERS = {} 468 | 469 | 470 | def register_vcs_handler(vcs, method): # decorator 471 | """Create decorator to mark a method as the handler of a VCS.""" 472 | def decorate(f): 473 | """Store f in HANDLERS[vcs][method].""" 474 | if vcs not in HANDLERS: 475 | HANDLERS[vcs] = {} 476 | HANDLERS[vcs][method] = f 477 | return f 478 | return decorate 479 | 480 | 481 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, 482 | env=None): 483 | """Call the given command(s).""" 484 | assert isinstance(commands, list) 485 | p = None 486 | for c in commands: 487 | try: 488 | dispcmd = str([c] + args) 489 | # remember shell=False, so use git.cmd on windows, not just git 490 | p = subprocess.Popen([c] + args, cwd=cwd, env=env, 491 | stdout=subprocess.PIPE, 492 | stderr=(subprocess.PIPE if hide_stderr 493 | else None)) 494 | break 495 | except EnvironmentError: 496 | e = sys.exc_info()[1] 497 | if e.errno == errno.ENOENT: 498 | continue 499 | if verbose: 500 | print("unable to run %%s" %% dispcmd) 501 | print(e) 502 | return None, None 503 | else: 504 | if verbose: 505 | print("unable to find command, tried %%s" %% (commands,)) 506 | return None, None 507 | stdout = p.communicate()[0].strip().decode() 508 | if p.returncode != 0: 509 | if verbose: 510 | print("unable to run %%s (error)" %% dispcmd) 511 | print("stdout was %%s" %% stdout) 512 | return None, p.returncode 513 | return stdout, p.returncode 514 | 515 | 516 | def versions_from_parentdir(parentdir_prefix, root, verbose): 517 | """Try to determine the version from the parent directory name. 518 | 519 | Source tarballs conventionally unpack into a directory that includes both 520 | the project name and a version string. We will also support searching up 521 | two directory levels for an appropriately named parent directory 522 | """ 523 | rootdirs = [] 524 | 525 | for i in range(3): 526 | dirname = os.path.basename(root) 527 | if dirname.startswith(parentdir_prefix): 528 | return {"version": dirname[len(parentdir_prefix):], 529 | "full-revisionid": None, 530 | "dirty": False, "error": None, "date": None} 531 | else: 532 | rootdirs.append(root) 533 | root = os.path.dirname(root) # up a level 534 | 535 | if verbose: 536 | print("Tried directories %%s but none started with prefix %%s" %% 537 | (str(rootdirs), parentdir_prefix)) 538 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 539 | 540 | 541 | @register_vcs_handler("git", "get_keywords") 542 | def git_get_keywords(versionfile_abs): 543 | """Extract version information from the given file.""" 544 | # the code embedded in _version.py can just fetch the value of these 545 | # keywords. When used from setup.py, we don't want to import _version.py, 546 | # so we do it with a regexp instead. This function is not used from 547 | # _version.py. 548 | keywords = {} 549 | try: 550 | f = open(versionfile_abs, "r") 551 | for line in f.readlines(): 552 | if line.strip().startswith("git_refnames ="): 553 | mo = re.search(r'=\s*"(.*)"', line) 554 | if mo: 555 | keywords["refnames"] = mo.group(1) 556 | if line.strip().startswith("git_full ="): 557 | mo = re.search(r'=\s*"(.*)"', line) 558 | if mo: 559 | keywords["full"] = mo.group(1) 560 | if line.strip().startswith("git_date ="): 561 | mo = re.search(r'=\s*"(.*)"', line) 562 | if mo: 563 | keywords["date"] = mo.group(1) 564 | f.close() 565 | except EnvironmentError: 566 | pass 567 | return keywords 568 | 569 | 570 | @register_vcs_handler("git", "keywords") 571 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 572 | """Get version information from git keywords.""" 573 | if not keywords: 574 | raise NotThisMethod("no keywords at all, weird") 575 | date = keywords.get("date") 576 | if date is not None: 577 | # Use only the last line. Previous lines may contain GPG signature 578 | # information. 579 | date = date.splitlines()[-1] 580 | 581 | # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant 582 | # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 583 | # -like" string, which we must then edit to make compliant), because 584 | # it's been around since git-1.5.3, and it's too difficult to 585 | # discover which version we're using, or to work around using an 586 | # older one. 587 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 588 | refnames = keywords["refnames"].strip() 589 | if refnames.startswith("$Format"): 590 | if verbose: 591 | print("keywords are unexpanded, not using") 592 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 593 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) 594 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 595 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 596 | TAG = "tag: " 597 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) 598 | if not tags: 599 | # Either we're using git < 1.8.3, or there really are no tags. We use 600 | # a heuristic: assume all version tags have a digit. The old git %%d 601 | # expansion behaves like git log --decorate=short and strips out the 602 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 603 | # between branches and tags. By ignoring refnames without digits, we 604 | # filter out many common branch names like "release" and 605 | # "stabilization", as well as "HEAD" and "master". 606 | tags = set([r for r in refs if re.search(r'\d', r)]) 607 | if verbose: 608 | print("discarding '%%s', no digits" %% ",".join(refs - tags)) 609 | if verbose: 610 | print("likely tags: %%s" %% ",".join(sorted(tags))) 611 | for ref in sorted(tags): 612 | # sorting will prefer e.g. "2.0" over "2.0rc1" 613 | if ref.startswith(tag_prefix): 614 | r = ref[len(tag_prefix):] 615 | if verbose: 616 | print("picking %%s" %% r) 617 | return {"version": r, 618 | "full-revisionid": keywords["full"].strip(), 619 | "dirty": False, "error": None, 620 | "date": date} 621 | # no suitable tags, so version is "0+unknown", but full hex is still there 622 | if verbose: 623 | print("no suitable tags, using unknown + full revision id") 624 | return {"version": "0+unknown", 625 | "full-revisionid": keywords["full"].strip(), 626 | "dirty": False, "error": "no suitable tags", "date": None} 627 | 628 | 629 | @register_vcs_handler("git", "pieces_from_vcs") 630 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 631 | """Get version from 'git describe' in the root of the source tree. 632 | 633 | This only gets called if the git-archive 'subst' keywords were *not* 634 | expanded, and _version.py hasn't already been rewritten with a short 635 | version string, meaning we're inside a checked out source tree. 636 | """ 637 | GITS = ["git"] 638 | if sys.platform == "win32": 639 | GITS = ["git.cmd", "git.exe"] 640 | 641 | out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, 642 | hide_stderr=True) 643 | if rc != 0: 644 | if verbose: 645 | print("Directory %%s not under git control" %% root) 646 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 647 | 648 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 649 | # if there isn't one, this yields HEX[-dirty] (no NUM) 650 | describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", 651 | "--always", "--long", 652 | "--match", "%%s*" %% tag_prefix], 653 | cwd=root) 654 | # --long was added in git-1.5.5 655 | if describe_out is None: 656 | raise NotThisMethod("'git describe' failed") 657 | describe_out = describe_out.strip() 658 | full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 659 | if full_out is None: 660 | raise NotThisMethod("'git rev-parse' failed") 661 | full_out = full_out.strip() 662 | 663 | pieces = {} 664 | pieces["long"] = full_out 665 | pieces["short"] = full_out[:7] # maybe improved later 666 | pieces["error"] = None 667 | 668 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 669 | # TAG might have hyphens. 670 | git_describe = describe_out 671 | 672 | # look for -dirty suffix 673 | dirty = git_describe.endswith("-dirty") 674 | pieces["dirty"] = dirty 675 | if dirty: 676 | git_describe = git_describe[:git_describe.rindex("-dirty")] 677 | 678 | # now we have TAG-NUM-gHEX or HEX 679 | 680 | if "-" in git_describe: 681 | # TAG-NUM-gHEX 682 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 683 | if not mo: 684 | # unparseable. Maybe git-describe is misbehaving? 685 | pieces["error"] = ("unable to parse git-describe output: '%%s'" 686 | %% describe_out) 687 | return pieces 688 | 689 | # tag 690 | full_tag = mo.group(1) 691 | if not full_tag.startswith(tag_prefix): 692 | if verbose: 693 | fmt = "tag '%%s' doesn't start with prefix '%%s'" 694 | print(fmt %% (full_tag, tag_prefix)) 695 | pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" 696 | %% (full_tag, tag_prefix)) 697 | return pieces 698 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 699 | 700 | # distance: number of commits since tag 701 | pieces["distance"] = int(mo.group(2)) 702 | 703 | # commit: short hex revision ID 704 | pieces["short"] = mo.group(3) 705 | 706 | else: 707 | # HEX: no tags 708 | pieces["closest-tag"] = None 709 | count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], 710 | cwd=root) 711 | pieces["distance"] = int(count_out) # total number of commits 712 | 713 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 714 | date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"], 715 | cwd=root)[0].strip() 716 | # Use only the last line. Previous lines may contain GPG signature 717 | # information. 718 | date = date.splitlines()[-1] 719 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 720 | 721 | return pieces 722 | 723 | 724 | def plus_or_dot(pieces): 725 | """Return a + if we don't already have one, else return a .""" 726 | if "+" in pieces.get("closest-tag", ""): 727 | return "." 728 | return "+" 729 | 730 | 731 | def render_pep440(pieces): 732 | """Build up version string, with post-release "local version identifier". 733 | 734 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 735 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 736 | 737 | Exceptions: 738 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 739 | """ 740 | if pieces["closest-tag"]: 741 | rendered = pieces["closest-tag"] 742 | if pieces["distance"] or pieces["dirty"]: 743 | rendered += plus_or_dot(pieces) 744 | rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) 745 | if pieces["dirty"]: 746 | rendered += ".dirty" 747 | else: 748 | # exception #1 749 | rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], 750 | pieces["short"]) 751 | if pieces["dirty"]: 752 | rendered += ".dirty" 753 | return rendered 754 | 755 | 756 | def render_pep440_pre(pieces): 757 | """TAG[.post0.devDISTANCE] -- No -dirty. 758 | 759 | Exceptions: 760 | 1: no tags. 0.post0.devDISTANCE 761 | """ 762 | if pieces["closest-tag"]: 763 | rendered = pieces["closest-tag"] 764 | if pieces["distance"]: 765 | rendered += ".post0.dev%%d" %% pieces["distance"] 766 | else: 767 | # exception #1 768 | rendered = "0.post0.dev%%d" %% pieces["distance"] 769 | return rendered 770 | 771 | 772 | def render_pep440_post(pieces): 773 | """TAG[.postDISTANCE[.dev0]+gHEX] . 774 | 775 | The ".dev0" means dirty. Note that .dev0 sorts backwards 776 | (a dirty tree will appear "older" than the corresponding clean one), 777 | but you shouldn't be releasing software with -dirty anyways. 778 | 779 | Exceptions: 780 | 1: no tags. 0.postDISTANCE[.dev0] 781 | """ 782 | if pieces["closest-tag"]: 783 | rendered = pieces["closest-tag"] 784 | if pieces["distance"] or pieces["dirty"]: 785 | rendered += ".post%%d" %% pieces["distance"] 786 | if pieces["dirty"]: 787 | rendered += ".dev0" 788 | rendered += plus_or_dot(pieces) 789 | rendered += "g%%s" %% pieces["short"] 790 | else: 791 | # exception #1 792 | rendered = "0.post%%d" %% pieces["distance"] 793 | if pieces["dirty"]: 794 | rendered += ".dev0" 795 | rendered += "+g%%s" %% pieces["short"] 796 | return rendered 797 | 798 | 799 | def render_pep440_old(pieces): 800 | """TAG[.postDISTANCE[.dev0]] . 801 | 802 | The ".dev0" means dirty. 803 | 804 | Exceptions: 805 | 1: no tags. 0.postDISTANCE[.dev0] 806 | """ 807 | if pieces["closest-tag"]: 808 | rendered = pieces["closest-tag"] 809 | if pieces["distance"] or pieces["dirty"]: 810 | rendered += ".post%%d" %% pieces["distance"] 811 | if pieces["dirty"]: 812 | rendered += ".dev0" 813 | else: 814 | # exception #1 815 | rendered = "0.post%%d" %% pieces["distance"] 816 | if pieces["dirty"]: 817 | rendered += ".dev0" 818 | return rendered 819 | 820 | 821 | def render_git_describe(pieces): 822 | """TAG[-DISTANCE-gHEX][-dirty]. 823 | 824 | Like 'git describe --tags --dirty --always'. 825 | 826 | Exceptions: 827 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 828 | """ 829 | if pieces["closest-tag"]: 830 | rendered = pieces["closest-tag"] 831 | if pieces["distance"]: 832 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) 833 | else: 834 | # exception #1 835 | rendered = pieces["short"] 836 | if pieces["dirty"]: 837 | rendered += "-dirty" 838 | return rendered 839 | 840 | 841 | def render_git_describe_long(pieces): 842 | """TAG-DISTANCE-gHEX[-dirty]. 843 | 844 | Like 'git describe --tags --dirty --always -long'. 845 | The distance/hash is unconditional. 846 | 847 | Exceptions: 848 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 849 | """ 850 | if pieces["closest-tag"]: 851 | rendered = pieces["closest-tag"] 852 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) 853 | else: 854 | # exception #1 855 | rendered = pieces["short"] 856 | if pieces["dirty"]: 857 | rendered += "-dirty" 858 | return rendered 859 | 860 | 861 | def render(pieces, style): 862 | """Render the given version pieces into the requested style.""" 863 | if pieces["error"]: 864 | return {"version": "unknown", 865 | "full-revisionid": pieces.get("long"), 866 | "dirty": None, 867 | "error": pieces["error"], 868 | "date": None} 869 | 870 | if not style or style == "default": 871 | style = "pep440" # the default 872 | 873 | if style == "pep440": 874 | rendered = render_pep440(pieces) 875 | elif style == "pep440-pre": 876 | rendered = render_pep440_pre(pieces) 877 | elif style == "pep440-post": 878 | rendered = render_pep440_post(pieces) 879 | elif style == "pep440-old": 880 | rendered = render_pep440_old(pieces) 881 | elif style == "git-describe": 882 | rendered = render_git_describe(pieces) 883 | elif style == "git-describe-long": 884 | rendered = render_git_describe_long(pieces) 885 | else: 886 | raise ValueError("unknown style '%%s'" %% style) 887 | 888 | return {"version": rendered, "full-revisionid": pieces["long"], 889 | "dirty": pieces["dirty"], "error": None, 890 | "date": pieces.get("date")} 891 | 892 | 893 | def get_versions(): 894 | """Get version information or return default if unable to do so.""" 895 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 896 | # __file__, we can work backwards from there to the root. Some 897 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 898 | # case we can only use expanded keywords. 899 | 900 | cfg = get_config() 901 | verbose = cfg.verbose 902 | 903 | try: 904 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, 905 | verbose) 906 | except NotThisMethod: 907 | pass 908 | 909 | try: 910 | root = os.path.realpath(__file__) 911 | # versionfile_source is the relative path from the top of the source 912 | # tree (where the .git directory might live) to this file. Invert 913 | # this to find the root from __file__. 914 | for i in cfg.versionfile_source.split('/'): 915 | root = os.path.dirname(root) 916 | except NameError: 917 | return {"version": "0+unknown", "full-revisionid": None, 918 | "dirty": None, 919 | "error": "unable to find root of source tree", 920 | "date": None} 921 | 922 | try: 923 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 924 | return render(pieces, cfg.style) 925 | except NotThisMethod: 926 | pass 927 | 928 | try: 929 | if cfg.parentdir_prefix: 930 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 931 | except NotThisMethod: 932 | pass 933 | 934 | return {"version": "0+unknown", "full-revisionid": None, 935 | "dirty": None, 936 | "error": "unable to compute version", "date": None} 937 | ''' 938 | 939 | 940 | @register_vcs_handler("git", "get_keywords") 941 | def git_get_keywords(versionfile_abs): 942 | """Extract version information from the given file.""" 943 | # the code embedded in _version.py can just fetch the value of these 944 | # keywords. When used from setup.py, we don't want to import _version.py, 945 | # so we do it with a regexp instead. This function is not used from 946 | # _version.py. 947 | keywords = {} 948 | try: 949 | f = open(versionfile_abs, "r") 950 | for line in f.readlines(): 951 | if line.strip().startswith("git_refnames ="): 952 | mo = re.search(r'=\s*"(.*)"', line) 953 | if mo: 954 | keywords["refnames"] = mo.group(1) 955 | if line.strip().startswith("git_full ="): 956 | mo = re.search(r'=\s*"(.*)"', line) 957 | if mo: 958 | keywords["full"] = mo.group(1) 959 | if line.strip().startswith("git_date ="): 960 | mo = re.search(r'=\s*"(.*)"', line) 961 | if mo: 962 | keywords["date"] = mo.group(1) 963 | f.close() 964 | except EnvironmentError: 965 | pass 966 | return keywords 967 | 968 | 969 | @register_vcs_handler("git", "keywords") 970 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 971 | """Get version information from git keywords.""" 972 | if not keywords: 973 | raise NotThisMethod("no keywords at all, weird") 974 | date = keywords.get("date") 975 | if date is not None: 976 | # Use only the last line. Previous lines may contain GPG signature 977 | # information. 978 | date = date.splitlines()[-1] 979 | 980 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant 981 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 982 | # -like" string, which we must then edit to make compliant), because 983 | # it's been around since git-1.5.3, and it's too difficult to 984 | # discover which version we're using, or to work around using an 985 | # older one. 986 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 987 | refnames = keywords["refnames"].strip() 988 | if refnames.startswith("$Format"): 989 | if verbose: 990 | print("keywords are unexpanded, not using") 991 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 992 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) 993 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 994 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 995 | TAG = "tag: " 996 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) 997 | if not tags: 998 | # Either we're using git < 1.8.3, or there really are no tags. We use 999 | # a heuristic: assume all version tags have a digit. The old git %d 1000 | # expansion behaves like git log --decorate=short and strips out the 1001 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 1002 | # between branches and tags. By ignoring refnames without digits, we 1003 | # filter out many common branch names like "release" and 1004 | # "stabilization", as well as "HEAD" and "master". 1005 | tags = set([r for r in refs if re.search(r'\d', r)]) 1006 | if verbose: 1007 | print("discarding '%s', no digits" % ",".join(refs - tags)) 1008 | if verbose: 1009 | print("likely tags: %s" % ",".join(sorted(tags))) 1010 | for ref in sorted(tags): 1011 | # sorting will prefer e.g. "2.0" over "2.0rc1" 1012 | if ref.startswith(tag_prefix): 1013 | r = ref[len(tag_prefix):] 1014 | if verbose: 1015 | print("picking %s" % r) 1016 | return {"version": r, 1017 | "full-revisionid": keywords["full"].strip(), 1018 | "dirty": False, "error": None, 1019 | "date": date} 1020 | # no suitable tags, so version is "0+unknown", but full hex is still there 1021 | if verbose: 1022 | print("no suitable tags, using unknown + full revision id") 1023 | return {"version": "0+unknown", 1024 | "full-revisionid": keywords["full"].strip(), 1025 | "dirty": False, "error": "no suitable tags", "date": None} 1026 | 1027 | 1028 | @register_vcs_handler("git", "pieces_from_vcs") 1029 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 1030 | """Get version from 'git describe' in the root of the source tree. 1031 | 1032 | This only gets called if the git-archive 'subst' keywords were *not* 1033 | expanded, and _version.py hasn't already been rewritten with a short 1034 | version string, meaning we're inside a checked out source tree. 1035 | """ 1036 | GITS = ["git"] 1037 | if sys.platform == "win32": 1038 | GITS = ["git.cmd", "git.exe"] 1039 | 1040 | out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, 1041 | hide_stderr=True) 1042 | if rc != 0: 1043 | if verbose: 1044 | print("Directory %s not under git control" % root) 1045 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 1046 | 1047 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 1048 | # if there isn't one, this yields HEX[-dirty] (no NUM) 1049 | describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", 1050 | "--always", "--long", 1051 | "--match", "%s*" % tag_prefix], 1052 | cwd=root) 1053 | # --long was added in git-1.5.5 1054 | if describe_out is None: 1055 | raise NotThisMethod("'git describe' failed") 1056 | describe_out = describe_out.strip() 1057 | full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 1058 | if full_out is None: 1059 | raise NotThisMethod("'git rev-parse' failed") 1060 | full_out = full_out.strip() 1061 | 1062 | pieces = {} 1063 | pieces["long"] = full_out 1064 | pieces["short"] = full_out[:7] # maybe improved later 1065 | pieces["error"] = None 1066 | 1067 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 1068 | # TAG might have hyphens. 1069 | git_describe = describe_out 1070 | 1071 | # look for -dirty suffix 1072 | dirty = git_describe.endswith("-dirty") 1073 | pieces["dirty"] = dirty 1074 | if dirty: 1075 | git_describe = git_describe[:git_describe.rindex("-dirty")] 1076 | 1077 | # now we have TAG-NUM-gHEX or HEX 1078 | 1079 | if "-" in git_describe: 1080 | # TAG-NUM-gHEX 1081 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 1082 | if not mo: 1083 | # unparseable. Maybe git-describe is misbehaving? 1084 | pieces["error"] = ("unable to parse git-describe output: '%s'" 1085 | % describe_out) 1086 | return pieces 1087 | 1088 | # tag 1089 | full_tag = mo.group(1) 1090 | if not full_tag.startswith(tag_prefix): 1091 | if verbose: 1092 | fmt = "tag '%s' doesn't start with prefix '%s'" 1093 | print(fmt % (full_tag, tag_prefix)) 1094 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" 1095 | % (full_tag, tag_prefix)) 1096 | return pieces 1097 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 1098 | 1099 | # distance: number of commits since tag 1100 | pieces["distance"] = int(mo.group(2)) 1101 | 1102 | # commit: short hex revision ID 1103 | pieces["short"] = mo.group(3) 1104 | 1105 | else: 1106 | # HEX: no tags 1107 | pieces["closest-tag"] = None 1108 | count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], 1109 | cwd=root) 1110 | pieces["distance"] = int(count_out) # total number of commits 1111 | 1112 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 1113 | date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], 1114 | cwd=root)[0].strip() 1115 | # Use only the last line. Previous lines may contain GPG signature 1116 | # information. 1117 | date = date.splitlines()[-1] 1118 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 1119 | 1120 | return pieces 1121 | 1122 | 1123 | def do_vcs_install(manifest_in, versionfile_source, ipy): 1124 | """Git-specific installation logic for Versioneer. 1125 | 1126 | For Git, this means creating/changing .gitattributes to mark _version.py 1127 | for export-subst keyword substitution. 1128 | """ 1129 | GITS = ["git"] 1130 | if sys.platform == "win32": 1131 | GITS = ["git.cmd", "git.exe"] 1132 | files = [manifest_in, versionfile_source] 1133 | if ipy: 1134 | files.append(ipy) 1135 | try: 1136 | me = __file__ 1137 | if me.endswith(".pyc") or me.endswith(".pyo"): 1138 | me = os.path.splitext(me)[0] + ".py" 1139 | versioneer_file = os.path.relpath(me) 1140 | except NameError: 1141 | versioneer_file = "versioneer.py" 1142 | files.append(versioneer_file) 1143 | present = False 1144 | try: 1145 | f = open(".gitattributes", "r") 1146 | for line in f.readlines(): 1147 | if line.strip().startswith(versionfile_source): 1148 | if "export-subst" in line.strip().split()[1:]: 1149 | present = True 1150 | f.close() 1151 | except EnvironmentError: 1152 | pass 1153 | if not present: 1154 | f = open(".gitattributes", "a+") 1155 | f.write("%s export-subst\n" % versionfile_source) 1156 | f.close() 1157 | files.append(".gitattributes") 1158 | run_command(GITS, ["add", "--"] + files) 1159 | 1160 | 1161 | def versions_from_parentdir(parentdir_prefix, root, verbose): 1162 | """Try to determine the version from the parent directory name. 1163 | 1164 | Source tarballs conventionally unpack into a directory that includes both 1165 | the project name and a version string. We will also support searching up 1166 | two directory levels for an appropriately named parent directory 1167 | """ 1168 | rootdirs = [] 1169 | 1170 | for i in range(3): 1171 | dirname = os.path.basename(root) 1172 | if dirname.startswith(parentdir_prefix): 1173 | return {"version": dirname[len(parentdir_prefix):], 1174 | "full-revisionid": None, 1175 | "dirty": False, "error": None, "date": None} 1176 | else: 1177 | rootdirs.append(root) 1178 | root = os.path.dirname(root) # up a level 1179 | 1180 | if verbose: 1181 | print("Tried directories %s but none started with prefix %s" % 1182 | (str(rootdirs), parentdir_prefix)) 1183 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 1184 | 1185 | 1186 | SHORT_VERSION_PY = """ 1187 | # This file was generated by 'versioneer.py' (0.19) from 1188 | # revision-control system data, or from the parent directory name of an 1189 | # unpacked source archive. Distribution tarballs contain a pre-generated copy 1190 | # of this file. 1191 | 1192 | import json 1193 | 1194 | version_json = ''' 1195 | %s 1196 | ''' # END VERSION_JSON 1197 | 1198 | 1199 | def get_versions(): 1200 | return json.loads(version_json) 1201 | """ 1202 | 1203 | 1204 | def versions_from_file(filename): 1205 | """Try to determine the version from _version.py if present.""" 1206 | try: 1207 | with open(filename) as f: 1208 | contents = f.read() 1209 | except EnvironmentError: 1210 | raise NotThisMethod("unable to read _version.py") 1211 | mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", 1212 | contents, re.M | re.S) 1213 | if not mo: 1214 | mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", 1215 | contents, re.M | re.S) 1216 | if not mo: 1217 | raise NotThisMethod("no version_json in _version.py") 1218 | return json.loads(mo.group(1)) 1219 | 1220 | 1221 | def write_to_version_file(filename, versions): 1222 | """Write the given version number to the given _version.py file.""" 1223 | os.unlink(filename) 1224 | contents = json.dumps(versions, sort_keys=True, 1225 | indent=1, separators=(",", ": ")) 1226 | with open(filename, "w") as f: 1227 | f.write(SHORT_VERSION_PY % contents) 1228 | 1229 | print("set %s to '%s'" % (filename, versions["version"])) 1230 | 1231 | 1232 | def plus_or_dot(pieces): 1233 | """Return a + if we don't already have one, else return a .""" 1234 | if "+" in pieces.get("closest-tag", ""): 1235 | return "." 1236 | return "+" 1237 | 1238 | 1239 | def render_pep440(pieces): 1240 | """Build up version string, with post-release "local version identifier". 1241 | 1242 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 1243 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 1244 | 1245 | Exceptions: 1246 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 1247 | """ 1248 | if pieces["closest-tag"]: 1249 | rendered = pieces["closest-tag"] 1250 | if pieces["distance"] or pieces["dirty"]: 1251 | rendered += plus_or_dot(pieces) 1252 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 1253 | if pieces["dirty"]: 1254 | rendered += ".dirty" 1255 | else: 1256 | # exception #1 1257 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], 1258 | pieces["short"]) 1259 | if pieces["dirty"]: 1260 | rendered += ".dirty" 1261 | return rendered 1262 | 1263 | 1264 | def render_pep440_pre(pieces): 1265 | """TAG[.post0.devDISTANCE] -- No -dirty. 1266 | 1267 | Exceptions: 1268 | 1: no tags. 0.post0.devDISTANCE 1269 | """ 1270 | if pieces["closest-tag"]: 1271 | rendered = pieces["closest-tag"] 1272 | if pieces["distance"]: 1273 | rendered += ".post0.dev%d" % pieces["distance"] 1274 | else: 1275 | # exception #1 1276 | rendered = "0.post0.dev%d" % pieces["distance"] 1277 | return rendered 1278 | 1279 | 1280 | def render_pep440_post(pieces): 1281 | """TAG[.postDISTANCE[.dev0]+gHEX] . 1282 | 1283 | The ".dev0" means dirty. Note that .dev0 sorts backwards 1284 | (a dirty tree will appear "older" than the corresponding clean one), 1285 | but you shouldn't be releasing software with -dirty anyways. 1286 | 1287 | Exceptions: 1288 | 1: no tags. 0.postDISTANCE[.dev0] 1289 | """ 1290 | if pieces["closest-tag"]: 1291 | rendered = pieces["closest-tag"] 1292 | if pieces["distance"] or pieces["dirty"]: 1293 | rendered += ".post%d" % pieces["distance"] 1294 | if pieces["dirty"]: 1295 | rendered += ".dev0" 1296 | rendered += plus_or_dot(pieces) 1297 | rendered += "g%s" % pieces["short"] 1298 | else: 1299 | # exception #1 1300 | rendered = "0.post%d" % pieces["distance"] 1301 | if pieces["dirty"]: 1302 | rendered += ".dev0" 1303 | rendered += "+g%s" % pieces["short"] 1304 | return rendered 1305 | 1306 | 1307 | def render_pep440_old(pieces): 1308 | """TAG[.postDISTANCE[.dev0]] . 1309 | 1310 | The ".dev0" means dirty. 1311 | 1312 | Exceptions: 1313 | 1: no tags. 0.postDISTANCE[.dev0] 1314 | """ 1315 | if pieces["closest-tag"]: 1316 | rendered = pieces["closest-tag"] 1317 | if pieces["distance"] or pieces["dirty"]: 1318 | rendered += ".post%d" % pieces["distance"] 1319 | if pieces["dirty"]: 1320 | rendered += ".dev0" 1321 | else: 1322 | # exception #1 1323 | rendered = "0.post%d" % pieces["distance"] 1324 | if pieces["dirty"]: 1325 | rendered += ".dev0" 1326 | return rendered 1327 | 1328 | 1329 | def render_git_describe(pieces): 1330 | """TAG[-DISTANCE-gHEX][-dirty]. 1331 | 1332 | Like 'git describe --tags --dirty --always'. 1333 | 1334 | Exceptions: 1335 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 1336 | """ 1337 | if pieces["closest-tag"]: 1338 | rendered = pieces["closest-tag"] 1339 | if pieces["distance"]: 1340 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 1341 | else: 1342 | # exception #1 1343 | rendered = pieces["short"] 1344 | if pieces["dirty"]: 1345 | rendered += "-dirty" 1346 | return rendered 1347 | 1348 | 1349 | def render_git_describe_long(pieces): 1350 | """TAG-DISTANCE-gHEX[-dirty]. 1351 | 1352 | Like 'git describe --tags --dirty --always -long'. 1353 | The distance/hash is unconditional. 1354 | 1355 | Exceptions: 1356 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 1357 | """ 1358 | if pieces["closest-tag"]: 1359 | rendered = pieces["closest-tag"] 1360 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 1361 | else: 1362 | # exception #1 1363 | rendered = pieces["short"] 1364 | if pieces["dirty"]: 1365 | rendered += "-dirty" 1366 | return rendered 1367 | 1368 | 1369 | def render(pieces, style): 1370 | """Render the given version pieces into the requested style.""" 1371 | if pieces["error"]: 1372 | return {"version": "unknown", 1373 | "full-revisionid": pieces.get("long"), 1374 | "dirty": None, 1375 | "error": pieces["error"], 1376 | "date": None} 1377 | 1378 | if not style or style == "default": 1379 | style = "pep440" # the default 1380 | 1381 | if style == "pep440": 1382 | rendered = render_pep440(pieces) 1383 | elif style == "pep440-pre": 1384 | rendered = render_pep440_pre(pieces) 1385 | elif style == "pep440-post": 1386 | rendered = render_pep440_post(pieces) 1387 | elif style == "pep440-old": 1388 | rendered = render_pep440_old(pieces) 1389 | elif style == "git-describe": 1390 | rendered = render_git_describe(pieces) 1391 | elif style == "git-describe-long": 1392 | rendered = render_git_describe_long(pieces) 1393 | else: 1394 | raise ValueError("unknown style '%s'" % style) 1395 | 1396 | return {"version": rendered, "full-revisionid": pieces["long"], 1397 | "dirty": pieces["dirty"], "error": None, 1398 | "date": pieces.get("date")} 1399 | 1400 | 1401 | class VersioneerBadRootError(Exception): 1402 | """The project root directory is unknown or missing key files.""" 1403 | 1404 | 1405 | def get_versions(verbose=False): 1406 | """Get the project version from whatever source is available. 1407 | 1408 | Returns dict with two keys: 'version' and 'full'. 1409 | """ 1410 | if "versioneer" in sys.modules: 1411 | # see the discussion in cmdclass.py:get_cmdclass() 1412 | del sys.modules["versioneer"] 1413 | 1414 | root = get_root() 1415 | cfg = get_config_from_root(root) 1416 | 1417 | assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" 1418 | handlers = HANDLERS.get(cfg.VCS) 1419 | assert handlers, "unrecognized VCS '%s'" % cfg.VCS 1420 | verbose = verbose or cfg.verbose 1421 | assert cfg.versionfile_source is not None, \ 1422 | "please set versioneer.versionfile_source" 1423 | assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" 1424 | 1425 | versionfile_abs = os.path.join(root, cfg.versionfile_source) 1426 | 1427 | # extract version from first of: _version.py, VCS command (e.g. 'git 1428 | # describe'), parentdir. This is meant to work for developers using a 1429 | # source checkout, for users of a tarball created by 'setup.py sdist', 1430 | # and for users of a tarball/zipball created by 'git archive' or github's 1431 | # download-from-tag feature or the equivalent in other VCSes. 1432 | 1433 | get_keywords_f = handlers.get("get_keywords") 1434 | from_keywords_f = handlers.get("keywords") 1435 | if get_keywords_f and from_keywords_f: 1436 | try: 1437 | keywords = get_keywords_f(versionfile_abs) 1438 | ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) 1439 | if verbose: 1440 | print("got version from expanded keyword %s" % ver) 1441 | return ver 1442 | except NotThisMethod: 1443 | pass 1444 | 1445 | try: 1446 | ver = versions_from_file(versionfile_abs) 1447 | if verbose: 1448 | print("got version from file %s %s" % (versionfile_abs, ver)) 1449 | return ver 1450 | except NotThisMethod: 1451 | pass 1452 | 1453 | from_vcs_f = handlers.get("pieces_from_vcs") 1454 | if from_vcs_f: 1455 | try: 1456 | pieces = from_vcs_f(cfg.tag_prefix, root, verbose) 1457 | ver = render(pieces, cfg.style) 1458 | if verbose: 1459 | print("got version from VCS %s" % ver) 1460 | return ver 1461 | except NotThisMethod: 1462 | pass 1463 | 1464 | try: 1465 | if cfg.parentdir_prefix: 1466 | ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 1467 | if verbose: 1468 | print("got version from parentdir %s" % ver) 1469 | return ver 1470 | except NotThisMethod: 1471 | pass 1472 | 1473 | if verbose: 1474 | print("unable to compute version") 1475 | 1476 | return {"version": "0+unknown", "full-revisionid": None, 1477 | "dirty": None, "error": "unable to compute version", 1478 | "date": None} 1479 | 1480 | 1481 | def get_version(): 1482 | """Get the short version string for this project.""" 1483 | return get_versions()["version"] 1484 | 1485 | 1486 | def get_cmdclass(cmdclass=None): 1487 | """Get the custom setuptools/distutils subclasses used by Versioneer. 1488 | 1489 | If the package uses a different cmdclass (e.g. one from numpy), it 1490 | should be provide as an argument. 1491 | """ 1492 | if "versioneer" in sys.modules: 1493 | del sys.modules["versioneer"] 1494 | # this fixes the "python setup.py develop" case (also 'install' and 1495 | # 'easy_install .'), in which subdependencies of the main project are 1496 | # built (using setup.py bdist_egg) in the same python process. Assume 1497 | # a main project A and a dependency B, which use different versions 1498 | # of Versioneer. A's setup.py imports A's Versioneer, leaving it in 1499 | # sys.modules by the time B's setup.py is executed, causing B to run 1500 | # with the wrong versioneer. Setuptools wraps the sub-dep builds in a 1501 | # sandbox that restores sys.modules to it's pre-build state, so the 1502 | # parent is protected against the child's "import versioneer". By 1503 | # removing ourselves from sys.modules here, before the child build 1504 | # happens, we protect the child from the parent's versioneer too. 1505 | # Also see https://github.com/python-versioneer/python-versioneer/issues/52 1506 | 1507 | cmds = {} if cmdclass is None else cmdclass.copy() 1508 | 1509 | # we add "version" to both distutils and setuptools 1510 | from distutils.core import Command 1511 | 1512 | class cmd_version(Command): 1513 | description = "report generated version string" 1514 | user_options = [] 1515 | boolean_options = [] 1516 | 1517 | def initialize_options(self): 1518 | pass 1519 | 1520 | def finalize_options(self): 1521 | pass 1522 | 1523 | def run(self): 1524 | vers = get_versions(verbose=True) 1525 | print("Version: %s" % vers["version"]) 1526 | print(" full-revisionid: %s" % vers.get("full-revisionid")) 1527 | print(" dirty: %s" % vers.get("dirty")) 1528 | print(" date: %s" % vers.get("date")) 1529 | if vers["error"]: 1530 | print(" error: %s" % vers["error"]) 1531 | cmds["version"] = cmd_version 1532 | 1533 | # we override "build_py" in both distutils and setuptools 1534 | # 1535 | # most invocation pathways end up running build_py: 1536 | # distutils/build -> build_py 1537 | # distutils/install -> distutils/build ->.. 1538 | # setuptools/bdist_wheel -> distutils/install ->.. 1539 | # setuptools/bdist_egg -> distutils/install_lib -> build_py 1540 | # setuptools/install -> bdist_egg ->.. 1541 | # setuptools/develop -> ? 1542 | # pip install: 1543 | # copies source tree to a tempdir before running egg_info/etc 1544 | # if .git isn't copied too, 'git describe' will fail 1545 | # then does setup.py bdist_wheel, or sometimes setup.py install 1546 | # setup.py egg_info -> ? 1547 | 1548 | # we override different "build_py" commands for both environments 1549 | if 'build_py' in cmds: 1550 | _build_py = cmds['build_py'] 1551 | elif "setuptools" in sys.modules: 1552 | from setuptools.command.build_py import build_py as _build_py 1553 | else: 1554 | from distutils.command.build_py import build_py as _build_py 1555 | 1556 | class cmd_build_py(_build_py): 1557 | def run(self): 1558 | root = get_root() 1559 | cfg = get_config_from_root(root) 1560 | versions = get_versions() 1561 | _build_py.run(self) 1562 | # now locate _version.py in the new build/ directory and replace 1563 | # it with an updated value 1564 | if cfg.versionfile_build: 1565 | target_versionfile = os.path.join(self.build_lib, 1566 | cfg.versionfile_build) 1567 | print("UPDATING %s" % target_versionfile) 1568 | write_to_version_file(target_versionfile, versions) 1569 | cmds["build_py"] = cmd_build_py 1570 | 1571 | if "setuptools" in sys.modules: 1572 | from setuptools.command.build_ext import build_ext as _build_ext 1573 | else: 1574 | from distutils.command.build_ext import build_ext as _build_ext 1575 | 1576 | class cmd_build_ext(_build_ext): 1577 | def run(self): 1578 | root = get_root() 1579 | cfg = get_config_from_root(root) 1580 | versions = get_versions() 1581 | _build_ext.run(self) 1582 | if self.inplace: 1583 | # build_ext --inplace will only build extensions in 1584 | # build/lib<..> dir with no _version.py to write to. 1585 | # As in place builds will already have a _version.py 1586 | # in the module dir, we do not need to write one. 1587 | return 1588 | # now locate _version.py in the new build/ directory and replace 1589 | # it with an updated value 1590 | target_versionfile = os.path.join(self.build_lib, 1591 | cfg.versionfile_source) 1592 | print("UPDATING %s" % target_versionfile) 1593 | write_to_version_file(target_versionfile, versions) 1594 | cmds["build_ext"] = cmd_build_ext 1595 | 1596 | if "cx_Freeze" in sys.modules: # cx_freeze enabled? 1597 | from cx_Freeze.dist import build_exe as _build_exe 1598 | # nczeczulin reports that py2exe won't like the pep440-style string 1599 | # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. 1600 | # setup(console=[{ 1601 | # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION 1602 | # "product_version": versioneer.get_version(), 1603 | # ... 1604 | 1605 | class cmd_build_exe(_build_exe): 1606 | def run(self): 1607 | root = get_root() 1608 | cfg = get_config_from_root(root) 1609 | versions = get_versions() 1610 | target_versionfile = cfg.versionfile_source 1611 | print("UPDATING %s" % target_versionfile) 1612 | write_to_version_file(target_versionfile, versions) 1613 | 1614 | _build_exe.run(self) 1615 | os.unlink(target_versionfile) 1616 | with open(cfg.versionfile_source, "w") as f: 1617 | LONG = LONG_VERSION_PY[cfg.VCS] 1618 | f.write(LONG % 1619 | {"DOLLAR": "$", 1620 | "STYLE": cfg.style, 1621 | "TAG_PREFIX": cfg.tag_prefix, 1622 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 1623 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 1624 | }) 1625 | cmds["build_exe"] = cmd_build_exe 1626 | del cmds["build_py"] 1627 | 1628 | if 'py2exe' in sys.modules: # py2exe enabled? 1629 | from py2exe.distutils_buildexe import py2exe as _py2exe 1630 | 1631 | class cmd_py2exe(_py2exe): 1632 | def run(self): 1633 | root = get_root() 1634 | cfg = get_config_from_root(root) 1635 | versions = get_versions() 1636 | target_versionfile = cfg.versionfile_source 1637 | print("UPDATING %s" % target_versionfile) 1638 | write_to_version_file(target_versionfile, versions) 1639 | 1640 | _py2exe.run(self) 1641 | os.unlink(target_versionfile) 1642 | with open(cfg.versionfile_source, "w") as f: 1643 | LONG = LONG_VERSION_PY[cfg.VCS] 1644 | f.write(LONG % 1645 | {"DOLLAR": "$", 1646 | "STYLE": cfg.style, 1647 | "TAG_PREFIX": cfg.tag_prefix, 1648 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 1649 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 1650 | }) 1651 | cmds["py2exe"] = cmd_py2exe 1652 | 1653 | # we override different "sdist" commands for both environments 1654 | if 'sdist' in cmds: 1655 | _sdist = cmds['sdist'] 1656 | elif "setuptools" in sys.modules: 1657 | from setuptools.command.sdist import sdist as _sdist 1658 | else: 1659 | from distutils.command.sdist import sdist as _sdist 1660 | 1661 | class cmd_sdist(_sdist): 1662 | def run(self): 1663 | versions = get_versions() 1664 | self._versioneer_generated_versions = versions 1665 | # unless we update this, the command will keep using the old 1666 | # version 1667 | self.distribution.metadata.version = versions["version"] 1668 | return _sdist.run(self) 1669 | 1670 | def make_release_tree(self, base_dir, files): 1671 | root = get_root() 1672 | cfg = get_config_from_root(root) 1673 | _sdist.make_release_tree(self, base_dir, files) 1674 | # now locate _version.py in the new base_dir directory 1675 | # (remembering that it may be a hardlink) and replace it with an 1676 | # updated value 1677 | target_versionfile = os.path.join(base_dir, cfg.versionfile_source) 1678 | print("UPDATING %s" % target_versionfile) 1679 | write_to_version_file(target_versionfile, 1680 | self._versioneer_generated_versions) 1681 | cmds["sdist"] = cmd_sdist 1682 | 1683 | return cmds 1684 | 1685 | 1686 | CONFIG_ERROR = """ 1687 | setup.cfg is missing the necessary Versioneer configuration. You need 1688 | a section like: 1689 | 1690 | [versioneer] 1691 | VCS = git 1692 | style = pep440 1693 | versionfile_source = src/myproject/_version.py 1694 | versionfile_build = myproject/_version.py 1695 | tag_prefix = 1696 | parentdir_prefix = myproject- 1697 | 1698 | You will also need to edit your setup.py to use the results: 1699 | 1700 | import versioneer 1701 | setup(version=versioneer.get_version(), 1702 | cmdclass=versioneer.get_cmdclass(), ...) 1703 | 1704 | Please read the docstring in ./versioneer.py for configuration instructions, 1705 | edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. 1706 | """ 1707 | 1708 | SAMPLE_CONFIG = """ 1709 | # See the docstring in versioneer.py for instructions. Note that you must 1710 | # re-run 'versioneer.py setup' after changing this section, and commit the 1711 | # resulting files. 1712 | 1713 | [versioneer] 1714 | #VCS = git 1715 | #style = pep440 1716 | #versionfile_source = 1717 | #versionfile_build = 1718 | #tag_prefix = 1719 | #parentdir_prefix = 1720 | 1721 | """ 1722 | 1723 | INIT_PY_SNIPPET = """ 1724 | from ._version import get_versions 1725 | __version__ = get_versions()['version'] 1726 | del get_versions 1727 | """ 1728 | 1729 | 1730 | def do_setup(): 1731 | """Do main VCS-independent setup function for installing Versioneer.""" 1732 | root = get_root() 1733 | try: 1734 | cfg = get_config_from_root(root) 1735 | except (EnvironmentError, configparser.NoSectionError, 1736 | configparser.NoOptionError) as e: 1737 | if isinstance(e, (EnvironmentError, configparser.NoSectionError)): 1738 | print("Adding sample versioneer config to setup.cfg", 1739 | file=sys.stderr) 1740 | with open(os.path.join(root, "setup.cfg"), "a") as f: 1741 | f.write(SAMPLE_CONFIG) 1742 | print(CONFIG_ERROR, file=sys.stderr) 1743 | return 1 1744 | 1745 | print(" creating %s" % cfg.versionfile_source) 1746 | with open(cfg.versionfile_source, "w") as f: 1747 | LONG = LONG_VERSION_PY[cfg.VCS] 1748 | f.write(LONG % {"DOLLAR": "$", 1749 | "STYLE": cfg.style, 1750 | "TAG_PREFIX": cfg.tag_prefix, 1751 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 1752 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 1753 | }) 1754 | 1755 | ipy = os.path.join(os.path.dirname(cfg.versionfile_source), 1756 | "__init__.py") 1757 | if os.path.exists(ipy): 1758 | try: 1759 | with open(ipy, "r") as f: 1760 | old = f.read() 1761 | except EnvironmentError: 1762 | old = "" 1763 | if INIT_PY_SNIPPET not in old: 1764 | print(" appending to %s" % ipy) 1765 | with open(ipy, "a") as f: 1766 | f.write(INIT_PY_SNIPPET) 1767 | else: 1768 | print(" %s unmodified" % ipy) 1769 | else: 1770 | print(" %s doesn't exist, ok" % ipy) 1771 | ipy = None 1772 | 1773 | # Make sure both the top-level "versioneer.py" and versionfile_source 1774 | # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so 1775 | # they'll be copied into source distributions. Pip won't be able to 1776 | # install the package without this. 1777 | manifest_in = os.path.join(root, "MANIFEST.in") 1778 | simple_includes = set() 1779 | try: 1780 | with open(manifest_in, "r") as f: 1781 | for line in f: 1782 | if line.startswith("include "): 1783 | for include in line.split()[1:]: 1784 | simple_includes.add(include) 1785 | except EnvironmentError: 1786 | pass 1787 | # That doesn't cover everything MANIFEST.in can do 1788 | # (http://docs.python.org/2/distutils/sourcedist.html#commands), so 1789 | # it might give some false negatives. Appending redundant 'include' 1790 | # lines is safe, though. 1791 | if "versioneer.py" not in simple_includes: 1792 | print(" appending 'versioneer.py' to MANIFEST.in") 1793 | with open(manifest_in, "a") as f: 1794 | f.write("include versioneer.py\n") 1795 | else: 1796 | print(" 'versioneer.py' already in MANIFEST.in") 1797 | if cfg.versionfile_source not in simple_includes: 1798 | print(" appending versionfile_source ('%s') to MANIFEST.in" % 1799 | cfg.versionfile_source) 1800 | with open(manifest_in, "a") as f: 1801 | f.write("include %s\n" % cfg.versionfile_source) 1802 | else: 1803 | print(" versionfile_source already in MANIFEST.in") 1804 | 1805 | # Make VCS-specific changes. For git, this means creating/changing 1806 | # .gitattributes to mark _version.py for export-subst keyword 1807 | # substitution. 1808 | do_vcs_install(manifest_in, cfg.versionfile_source, ipy) 1809 | return 0 1810 | 1811 | 1812 | def scan_setup_py(): 1813 | """Validate the contents of setup.py against Versioneer's expectations.""" 1814 | found = set() 1815 | setters = False 1816 | errors = 0 1817 | with open("setup.py", "r") as f: 1818 | for line in f.readlines(): 1819 | if "import versioneer" in line: 1820 | found.add("import") 1821 | if "versioneer.get_cmdclass()" in line: 1822 | found.add("cmdclass") 1823 | if "versioneer.get_version()" in line: 1824 | found.add("get_version") 1825 | if "versioneer.VCS" in line: 1826 | setters = True 1827 | if "versioneer.versionfile_source" in line: 1828 | setters = True 1829 | if len(found) != 3: 1830 | print("") 1831 | print("Your setup.py appears to be missing some important items") 1832 | print("(but I might be wrong). Please make sure it has something") 1833 | print("roughly like the following:") 1834 | print("") 1835 | print(" import versioneer") 1836 | print(" setup( version=versioneer.get_version(),") 1837 | print(" cmdclass=versioneer.get_cmdclass(), ...)") 1838 | print("") 1839 | errors += 1 1840 | if setters: 1841 | print("You should remove lines like 'versioneer.VCS = ' and") 1842 | print("'versioneer.versionfile_source = ' . This configuration") 1843 | print("now lives in setup.cfg, and should be removed from setup.py") 1844 | print("") 1845 | errors += 1 1846 | return errors 1847 | 1848 | 1849 | if __name__ == "__main__": 1850 | cmd = sys.argv[1] 1851 | if cmd == "setup": 1852 | errors = do_setup() 1853 | errors += scan_setup_py() 1854 | if errors: 1855 | sys.exit(1) 1856 | --------------------------------------------------------------------------------