├── .conda ├── environment.yml ├── environment_minimal.yml └── meta.yaml ├── .coveragerc ├── .gitattributes ├── .gitignore ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── MANIFEST.in ├── README.md ├── azure-pipelines.yml ├── binder ├── environment.yml └── postBuild ├── doc8.ini ├── docs ├── _templates │ └── autosummary │ │ ├── base.rst │ │ ├── class.rst │ │ └── module.rst ├── conf.py ├── generated │ └── unumpy.rst ├── index.rst └── logo.png ├── mypy.ini ├── notebooks ├── 01_user_facing.ipynb └── fruit-puzzle.jpg ├── pytest.ini ├── readthedocs.yml ├── requirements.txt ├── requirements ├── all.txt ├── backends.txt ├── docs.txt ├── optional.txt └── tests.txt ├── setup.cfg ├── setup.py ├── unumpy ├── __init__.py ├── _multimethods.py ├── _version.py ├── cupy_backend.py ├── dask_backend.py ├── lib │ ├── __init__.py │ ├── _multimethods.py │ └── index_tricks │ │ ├── __init__.py │ │ └── _multimethods.py ├── linalg │ ├── __init__.py │ └── _multimethods.py ├── numpy_backend.py ├── random │ ├── __init__.py │ └── _multimethods.py ├── sparse_backend.py ├── tests │ ├── __init__.py │ └── test_numpy.py └── torch_backend.py └── versioneer.py /.conda/environment.yml: -------------------------------------------------------------------------------- 1 | name: uarray 2 | channels: 3 | - pytorch 4 | - conda-forge 5 | dependencies: 6 | - python <3.9 7 | - pip 8 | - sphinx 9 | - sphinx_rtd_theme 10 | - pytest 11 | - pytest-cov 12 | - mypy 13 | - pytorch-cpu 14 | - scipy 15 | - dask 16 | - sparse 17 | - doc8 18 | - black 19 | - pytest-mypy 20 | - pytest-black 21 | -------------------------------------------------------------------------------- /.conda/environment_minimal.yml: -------------------------------------------------------------------------------- 1 | name: uarray_min 2 | channels: 3 | - pytorch 4 | - conda-forge 5 | dependencies: 6 | - python <3.9 7 | - pip 8 | - sphinx 9 | - sphinx_rtd_theme 10 | - pytest 11 | - pytest-cov 12 | - mypy 13 | - pytorch-cpu 14 | - scipy 15 | - dask 16 | - sparse 17 | - doc8 18 | - black 19 | - pytest-mypy 20 | - pytest-black 21 | -------------------------------------------------------------------------------- /.conda/meta.yaml: -------------------------------------------------------------------------------- 1 | {% set version = '0.4' %} 2 | {% set sha256 = '06fc0874e1e27c91c862d40b24ee314b95e038275ce653a3dfd8eca00030a85f' %} 3 | 4 | package: 5 | name: unumpy 6 | version: {{ version }} 7 | 8 | source: 9 | fn: unumpy-{{ version }}.tar.gz 10 | url: https://github.com/Quansight-Labs/unumpy/archive/v{{ version }}.tar.gz 11 | sha256: {{ sha256 }} 12 | 13 | requirements: 14 | build: 15 | - python 16 | run: 17 | - python 18 | 19 | build: 20 | noarch: python 21 | 22 | test: {} 23 | 24 | about: 25 | home: https://github.com/Quansight-Labs/unumpy 26 | license: BSD 27 | license_family: BSD 28 | license_file: LICENSE 29 | summary: 'Array interface object for Python with pluggable backends and a multiple-dispatch mechanism for defining down-stream functions' 30 | dev_url: https://github.com/Quansight-Labs/unumpy 31 | 32 | extra: 33 | recipe-maintainers: 34 | - hameerabbasi 35 | - costrouc 36 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch = True 3 | source = 4 | uarray 5 | [report] 6 | omit = 7 | **/tests/ 8 | docs/ 9 | uarray/_version.py 10 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | uarray/_version.py export-subst 2 | unumpy/_version.py export-subst 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .vscode 2 | *.code-workspace 3 | .pytest_cache 4 | .mypy_cache 5 | .hypothesis 6 | __pycache__ 7 | Digraph* 8 | .ipynb_checkpoints 9 | junit 10 | test_readme.py 11 | junit 12 | coverage.xml 13 | **/.coverage 14 | dist 15 | **/*.pyc 16 | _build/ 17 | docs/_build 18 | .idea/ 19 | pytype_output 20 | htmlcov/ 21 | *.egg-info/ 22 | sandbox.py 23 | *.so 24 | build/ 25 | .eggs/ 26 | dask-worker-space/ 27 | *.ipynb 28 | default.profraw 29 | .coverage.* 30 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | This repository is governed by the Quansight Repository Code of Conduct. It 2 | can be found here: 3 | https://github.com/Quansight/.github/blob/master/CODE_OF_CONDUCT.md. 4 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | Contributions to `unumpy` are welcome and appreciated. Contributions can take the form of bug reports, documentation, code, and more. 4 | 5 | ## Getting the code 6 | 7 | Make a fork of the main [unumpy repository](https://github.com/Quansight-Labs/unumpy) and clone the fork: 8 | 9 | ``` 10 | git clone https://github.com//unumpy 11 | ``` 12 | 13 | ## Install 14 | 15 | Note that unumpy supports Python versions >= 3.5. If you're running `conda` and would prefer to have dependencies 16 | pulled from there, use 17 | 18 | ``` 19 | conda env create -f .conda/environment.yml 20 | conda activate uarray 21 | ``` 22 | 23 | `unumpy` and all development dependencies can be installed via: 24 | 25 | ``` 26 | pip install -e ".[all]" 27 | ``` 28 | 29 | 30 | This will create an environment named `uarray` which you can use for development. 31 | 32 | ## Testing 33 | 34 | Tests can be run from the main uarray directory as follows: 35 | 36 | ``` 37 | pytest 38 | ``` 39 | 40 | To run a subset of tests: 41 | 42 | ``` 43 | pytest unumpy/tests/test_numpy.py 44 | ``` 45 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2018, Quansight-Labs 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | * Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | * Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | * Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include versioneer.py 2 | recursive-include unumpy * 3 | include LICENSE 4 | include requirements.txt 5 | recursive-include requirements *.txt 6 | global-exclude *.pyc *.so *.dll *.dylib 7 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # `unumpy` - NumPy, but implementation-independent 2 | 3 | [![Gitter](https://badges.gitter.im/Plures/uarray.svg)](https://gitter.im/Plures/uarray?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) [![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/Quansight-Labs/unumpy/master) [![Build Status](https://dev.azure.com/Quansight-Labs/uarray/_apis/build/status/Quansight-Labs.unumpy?branchName=master)](https://dev.azure.com/Quansight-Labs/uarray/_build/latest?definitionId=2&branchName=master) [![PyPI](https://img.shields.io/pypi/v/unumpy.svg?style=flat-square)](https://pypi.org/project/unumpy/) 4 | 5 | uarray logo 6 | 7 | - [Documentation](https://unumpy.uarray.org/en/latest/) 8 | - [Road Map](https://github.com/orgs/Quansight-Labs/projects/1) 9 | 10 | ## Contributing 11 | 12 | See [`CONTRIBUTING.md`](CONTRIBUTING.md) for more information on how to contribute to `uarray`. 13 | -------------------------------------------------------------------------------- /azure-pipelines.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: Tests 3 | pool: 4 | vmImage: 'ubuntu-16.04' 5 | 6 | steps: 7 | - script: | 8 | echo "##vso[task.prependpath]$CONDA/bin" 9 | conda env create -f .conda/environment.yml 10 | displayName: Prepare conda 11 | 12 | - script: | 13 | source activate uarray 14 | pip install git+https://github.com/Quansight-Labs/uarray.git 15 | pip install -e . --no-deps 16 | displayName: Install package 17 | 18 | - script: | 19 | source activate uarray 20 | pytest 21 | displayName: Run tests 22 | 23 | - task: PublishCodeCoverageResults@1 24 | inputs: 25 | codeCoverageTool: Cobertura 26 | summaryFileLocation: "$(System.DefaultWorkingDirectory)/**/coverage.xml" 27 | 28 | - job: TestsMinimalEnv 29 | pool: 30 | vmImage: 'ubuntu-16.04' 31 | 32 | steps: 33 | - script: | 34 | echo "##vso[task.prependpath]$CONDA/bin" 35 | conda env create -f .conda/environment_minimal.yml 36 | displayName: Prepare conda 37 | 38 | - script: | 39 | source activate uarray_min 40 | pip install git+https://github.com/Quansight-Labs/uarray.git 41 | pip install -e . --no-deps 42 | displayName: Install package 43 | 44 | - script: | 45 | source activate uarray_min 46 | pytest 47 | displayName: Run tests 48 | 49 | - task: PublishCodeCoverageResults@1 50 | inputs: 51 | codeCoverageTool: Cobertura 52 | summaryFileLocation: "$(System.DefaultWorkingDirectory)/**/coverage.xml" 53 | 54 | - job: Docs 55 | pool: 56 | vmImage: 'ubuntu-16.04' 57 | steps: 58 | - script: | 59 | echo "##vso[task.prependpath]$CONDA/bin" 60 | conda env create -f .conda/environment.yml 61 | displayName: Prepare conda 62 | 63 | - script: | 64 | source activate uarray 65 | pip install git+https://github.com/Quansight-Labs/uarray.git 66 | pip install -e . --no-deps 67 | displayName: Install package 68 | 69 | - script: | 70 | source activate uarray 71 | sphinx-build -W -b html docs/ _build/html 72 | displayName: Build docs 73 | 74 | - script: | 75 | source activate uarray 76 | doc8 77 | displayName: Lint docs 78 | 79 | - task: PublishPipelineArtifact@0 80 | inputs: 81 | artifactName: 'Documentation' 82 | targetPath: '$(System.DefaultWorkingDirectory)/_build/html' 83 | 84 | trigger: 85 | branches: 86 | include: 87 | - master 88 | 89 | pr: 90 | - master -------------------------------------------------------------------------------- /binder/environment.yml: -------------------------------------------------------------------------------- 1 | ../.conda/environment.yml -------------------------------------------------------------------------------- /binder/postBuild: -------------------------------------------------------------------------------- 1 | set -ex 2 | 3 | conda install jupyterlab jupyterlab_launcher 4 | pip install git+https://github.com/Quansight-Labs/uarray.git 5 | pip install --no-deps -e . 6 | jupyter lab clean 7 | -------------------------------------------------------------------------------- /doc8.ini: -------------------------------------------------------------------------------- 1 | [doc8] 2 | max-line-length=88 3 | ignore-path=docs/generated,docs/_build,_build,*.egg-info -------------------------------------------------------------------------------- /docs/_templates/autosummary/base.rst: -------------------------------------------------------------------------------- 1 | {{ objname | escape | underline}} 2 | 3 | .. currentmodule:: {{ module }} 4 | 5 | .. auto{{ objtype }}:: {{ objname }} 6 | -------------------------------------------------------------------------------- /docs/_templates/autosummary/class.rst: -------------------------------------------------------------------------------- 1 | {{ objname | escape | underline}} 2 | 3 | .. currentmodule:: {{ module }} 4 | 5 | .. autoclass:: {{ objname }} 6 | 7 | {% block attributes %} 8 | {% if attributes %} 9 | .. rubric:: Attributes 10 | .. autosummary:: 11 | :toctree: 12 | {% for item in attributes %} 13 | {{ name }}.{{ item }} 14 | {% endfor %} 15 | {% endif %} 16 | {% endblock %} 17 | 18 | {% block methods %} 19 | {% if methods %} 20 | .. rubric:: Methods 21 | .. autosummary:: 22 | :toctree: 23 | {% for item in methods %} 24 | {{ name }}.{{ item }} 25 | {% endfor %} 26 | {% endif %} 27 | {% endblock %} 28 | -------------------------------------------------------------------------------- /docs/_templates/autosummary/module.rst: -------------------------------------------------------------------------------- 1 | {{ fullname | escape | underline }} 2 | 3 | .. rubric:: Description 4 | .. automodule:: {{ fullname }} 5 | .. currentmodule:: {{ fullname }} 6 | 7 | {% if classes %} 8 | .. rubric:: Classes 9 | 10 | .. autosummary:: 11 | :toctree: 12 | 13 | {% for class in classes %} 14 | {{ class }} 15 | {% endfor %} 16 | 17 | {% endif %} 18 | 19 | {% if functions %} 20 | .. rubric:: Functions 21 | 22 | .. autosummary:: 23 | :toctree: 24 | 25 | {% for function in functions %} 26 | {{ function }} 27 | {% endfor %} 28 | 29 | {% endif %} 30 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Configuration file for the Sphinx documentation builder. 4 | # 5 | # This file does only contain a selection of the most common options. For a 6 | # full list see the documentation: 7 | # http://www.sphinx-doc.org/en/master/config 8 | 9 | # -- Path setup -------------------------------------------------------------- 10 | 11 | # If extensions (or modules to document with autodoc) are in another directory, 12 | # add these directories to sys.path here. If the directory is relative to the 13 | # documentation root, use os.path.abspath to make it absolute, like shown here. 14 | # 15 | import os 16 | import sys 17 | 18 | # sys.path.insert(0, os.path.abspath('.')) 19 | from typing import List, Dict 20 | 21 | sys.path.insert(0, os.path.abspath("..")) 22 | from unumpy import __version__ # noqa: E402 23 | 24 | 25 | # -- Project information ----------------------------------------------------- 26 | 27 | project = "unumpy" 28 | copyright = "2019, Quansight-Labs" 29 | author = "Quansight-Labs" 30 | 31 | # The short X.Y version 32 | version = __version__ 33 | # The full version, including alpha/beta/rc tags 34 | release = __version__ 35 | 36 | 37 | # -- General configuration --------------------------------------------------- 38 | 39 | # If your documentation needs a minimal Sphinx version, state it here. 40 | # 41 | # needs_sphinx = '1.0' 42 | 43 | # Add any Sphinx extension module names here, as strings. They can be 44 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 45 | # ones. 46 | extensions: List[str] = [ 47 | "sphinx.ext.autodoc", 48 | "sphinx.ext.viewcode", 49 | "sphinx.ext.napoleon", 50 | "sphinx.ext.intersphinx", 51 | "sphinx.ext.autosummary", 52 | "sphinx.ext.doctest", 53 | ] 54 | 55 | # Add any paths that contain templates here, relative to this directory. 56 | templates_path = ["_templates"] 57 | 58 | # The suffix(es) of source filenames. 59 | # You can specify multiple suffix as a list of string: 60 | # 61 | # source_suffix = ['.rst', '.md'] 62 | source_suffix = ".rst" 63 | 64 | # The master toctree document. 65 | master_doc = "index" 66 | 67 | # The language for content autogenerated by Sphinx. Refer to documentation 68 | # for a list of supported languages. 69 | # 70 | # This is also used if you do content translation via gettext catalogs. 71 | # Usually you set "language" from the command line for these cases. 72 | language = None 73 | 74 | # List of patterns, relative to source directory, that match files and 75 | # directories to ignore when looking for source files. 76 | # This pattern also affects html_static_path and html_extra_path. 77 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] 78 | 79 | # The name of the Pygments (syntax highlighting) style to use. 80 | pygments_style = None 81 | 82 | 83 | # -- Options for HTML output ------------------------------------------------- 84 | 85 | # The theme to use for HTML and HTML Help pages. See the documentation for 86 | # a list of builtin themes. 87 | # 88 | html_theme = "sphinx_rtd_theme" 89 | html_logo = "logo.png" 90 | html_favicon = "logo.png" 91 | 92 | # Theme options are theme-specific and customize the look and feel of a theme 93 | # further. For a list of options available for each theme, see the 94 | # documentation. 95 | # 96 | # html_theme_options = {} 97 | 98 | # Add any paths that contain custom static files (such as style sheets) here, 99 | # relative to this directory. They are copied after the builtin static files, 100 | # so a file named "default.css" will overwrite the builtin "default.css". 101 | html_static_path: List[str] = [] 102 | 103 | # Custom sidebar templates, must be a dictionary that maps document names 104 | # to template names. 105 | # 106 | # The default sidebars (for documents that don't match any pattern) are 107 | # defined by theme itself. Builtin themes are using these templates by 108 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', 109 | # 'searchbox.html']``. 110 | # 111 | # html_sidebars = {} 112 | 113 | 114 | # -- Options for HTMLHelp output --------------------------------------------- 115 | 116 | # Output file base name for HTML help builder. 117 | htmlhelp_basename = "uarraydoc" 118 | 119 | 120 | # -- Options for LaTeX output ------------------------------------------------ 121 | 122 | latex_elements: Dict[str, str] = { 123 | # The paper size ('letterpaper' or 'a4paper'). 124 | # 125 | # 'papersize': 'letterpaper', 126 | # The font size ('10pt', '11pt' or '12pt'). 127 | # 128 | # 'pointsize': '10pt', 129 | # Additional stuff for the LaTeX preamble. 130 | # 131 | # 'preamble': '', 132 | # Latex figure (float) alignment 133 | # 134 | # 'figure_align': 'htbp', 135 | } 136 | 137 | # Grouping the document tree into LaTeX files. List of tuples 138 | # (source start file, target name, title, 139 | # author, documentclass [howto, manual, or own class]). 140 | latex_documents = [ 141 | (master_doc, "uarray.tex", "uarray Documentation", "Quansight-Labs", "manual") 142 | ] 143 | 144 | 145 | # -- Options for manual page output ------------------------------------------ 146 | 147 | # One entry per manual page. List of tuples 148 | # (source start file, name, description, authors, manual section). 149 | man_pages = [(master_doc, "uarray", "uarray Documentation", [author], 1)] 150 | 151 | 152 | # -- Options for Texinfo output ---------------------------------------------- 153 | 154 | # Grouping the document tree into Texinfo files. List of tuples 155 | # (source start file, target name, title, author, 156 | # dir menu entry, description, category) 157 | texinfo_documents = [ 158 | ( 159 | master_doc, 160 | "uarray", 161 | "uarray Documentation", 162 | author, 163 | "uarray", 164 | "One line description of project.", 165 | "Miscellaneous", 166 | ) 167 | ] 168 | 169 | 170 | # -- Options for Epub output ------------------------------------------------- 171 | 172 | # Bibliographic Dublin Core info. 173 | epub_title = project 174 | 175 | # The unique identifier of the text. This can be a ISBN number 176 | # or the project homepage. 177 | # 178 | # epub_identifier = '' 179 | 180 | # A unique identification for the text. 181 | # 182 | # epub_uid = '' 183 | 184 | # A list of files that should not be packed into the epub file. 185 | epub_exclude_files = ["search.html"] 186 | 187 | autosummary_generate = True 188 | autoclass_content = "both" 189 | 190 | intersphinx_mapping = { 191 | "python": ("https://docs.python.org/3/", None), 192 | "numpy": ("https://docs.scipy.org/doc/numpy/", None), 193 | "scipy": ("https://docs.scipy.org/doc/scipy/reference/", None), 194 | "uarray": ("https://uarray.org/en/latest/", None), 195 | } 196 | 197 | doctest_global_setup = """ 198 | import uarray as ua 199 | """ 200 | -------------------------------------------------------------------------------- /docs/generated/unumpy.rst: -------------------------------------------------------------------------------- 1 | unumpy 2 | ====== 3 | 4 | .. automodule:: unumpy -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | ``unumpy`` 2 | ========== 3 | 4 | .. note:: 5 | This page describes the overall philosophy behind :obj:`unumpy`. If you are 6 | interested in a general dispatch mechanism, see :obj:`uarray`. 7 | 8 | :obj:`unumpy` builds on top of :obj:`uarray`. It is an effort to specify the core 9 | NumPy API, and provide backends for the API. 10 | 11 | What's new in ``unumpy``? 12 | ------------------------- 13 | 14 | :obj:`unumpy` is the first approach to leverage :obj:`uarray` in order to build a 15 | generic backend system for (what we hope will be) the core NumPy API specification. 16 | It's possible to create the backend object, and use that to perform operations. 17 | In addition, it's possible to change the used backend via a context manager. 18 | 19 | Relation to the NumPy duck-array ecosystem 20 | ------------------------------------------ 21 | 22 | There are three main NumPy enhancement proposals (NEPs) inside NumPy itself that relate 23 | to the duck-array ecosystem. There is `NEP-22 `_, 24 | which is a high-level overview of the duck-array ecosystem, and the direction NumPy 25 | intends to move towards. Two main protocols were introduced to fill this gap, 26 | the ``__array_function__`` protocol defined in `NEP-18 `_, 27 | and the older ``__array_ufunc__`` protocol defined in `NEP-13 `_. 28 | 29 | :obj:`unumpy` provides an an alternate framework based on :obj:`uarray`, bypassing 30 | the ``__array_function__`` and ``__array_ufunc__`` protocols entirely. It 31 | provides a clear separation of concerns. It defines callables which can be overridden, 32 | and expresses everything else in terms of these callables. See the :obj:`uarray` 33 | documentation for more details. 34 | 35 | 36 | .. toctree:: 37 | :hidden: 38 | :maxdepth: 3 39 | 40 | generated/unumpy 41 | 42 | Indices and tables 43 | ================== 44 | 45 | * :ref:`genindex` 46 | * :ref:`modindex` 47 | * :ref:`search` 48 | -------------------------------------------------------------------------------- /docs/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Quansight-Labs/unumpy/97dcc82e8f3343c2a66b5ee6d8081886652de1d0/docs/logo.png -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | python_version = 3.7 3 | ignore_missing_imports = True 4 | warn_redundant_casts = True 5 | -------------------------------------------------------------------------------- /notebooks/01_user_facing.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# What is `uarray`?\n", 8 | "At its core, `uarray` is a dispatch and back-end mechanism specifically geared towards array computing. Combined with its sister packages `unumpy` (and others currently in development), it allows NumPy functions to be overridden by their counterparts in other libraries (such as Dask, Xnd, and so on) while using the exact same code everywhere. Backends can be changed using just a context manager.\n", 9 | "\n", 10 | "Please note that only a small subset of the NumPy API is implemented, and not every backend implements every API method." 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": 1, 16 | "metadata": {}, 17 | "outputs": [], 18 | "source": [ 19 | "import uarray as ua\n", 20 | "import unumpy as np # Note the changed import statement" 21 | ] 22 | }, 23 | { 24 | "cell_type": "code", 25 | "execution_count": 2, 26 | "metadata": {}, 27 | "outputs": [], 28 | "source": [ 29 | "import unumpy.xnd_backend as XndBackend\n", 30 | "import unumpy.numpy_backend as NumpyBackend\n", 31 | "from unumpy.dask_backend import DaskBackend\n", 32 | "import unumpy.sparse_backend as SparseBackend\n", 33 | "\n", 34 | "ua.set_global_backend(NumpyBackend)\n", 35 | "ua.register_backend(DaskBackend(inner=SparseBackend))\n", 36 | "ua.register_backend(XndBackend)\n" 37 | ] 38 | }, 39 | { 40 | "cell_type": "markdown", 41 | "metadata": {}, 42 | "source": [ 43 | "## Computing on different back-ends\n", 44 | "`unumpy` allows you to compute with different back-ends. Here are examples of creating arrays via `unumpy` (something not currently possible with NEP-18, the `__array_function__` protocol)." 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": 3, 50 | "metadata": {}, 51 | "outputs": [ 52 | { 53 | "name": "stdout", 54 | "output_type": "stream", 55 | "text": [ 56 | "With the XND backend set, the type of the array is: \n", 57 | "With NumPy backend set, the type of the array is: \n", 58 | "With Dask Backend set, the type of the array is: \n", 59 | "With Dask Backend set, the type of the computed array is: \n" 60 | ] 61 | } 62 | ], 63 | "source": [ 64 | "my_list = [0, 1, 2, 3, 4]\n", 65 | "\n", 66 | "with ua.set_backend(XndBackend):\n", 67 | " x = np.asarray(my_list)\n", 68 | "print('With the XND backend set, the type of the array is: {}'.format(type(x)))\n", 69 | "\n", 70 | "with ua.set_backend(NumpyBackend):\n", 71 | " y = np.asarray(my_list)\n", 72 | "print('With NumPy backend set, the type of the array is: {}'.format(type(y)))\n", 73 | "\n", 74 | "with ua.set_backend(DaskBackend(inner=SparseBackend)):\n", 75 | " z = np.asarray(my_list)\n", 76 | "print('With Dask Backend set, the type of the array is: {}'.format(type(z)))\n", 77 | "print('With Dask Backend set, the type of the computed array is: {}'.format(type(z.compute())))" 78 | ] 79 | }, 80 | { 81 | "cell_type": "markdown", 82 | "metadata": {}, 83 | "source": [ 84 | "## Computing based on the type of array passed in\n", 85 | "`unumpy` allows you to compute on arrays based on the type, in a fashion similar to NEP-18." 86 | ] 87 | }, 88 | { 89 | "cell_type": "code", 90 | "execution_count": 5, 91 | "metadata": {}, 92 | "outputs": [ 93 | { 94 | "name": "stdout", 95 | "output_type": "stream", 96 | "text": [ 97 | "With np.sum(xnd_array) we get: \n", 98 | "With np.sum(numpy_array) we get: \n", 99 | "With np.sum(dask_array) we get: \n" 100 | ] 101 | } 102 | ], 103 | "source": [ 104 | "print('With np.sum(xnd_array) we get: {}'.format(type(np.sum(x))))\n", 105 | "print('With np.sum(numpy_array) we get: {}'.format(type(np.sum(y))))\n", 106 | "print('With np.sum(dask_array) we get: {}'.format(type(np.sum(z))))" 107 | ] 108 | }, 109 | { 110 | "cell_type": "markdown", 111 | "metadata": {}, 112 | "source": [ 113 | "## Forcing a particular backend\n", 114 | "You can even force a particular back-end, if you want to pipe all possible computations through that back-end." 115 | ] 116 | }, 117 | { 118 | "cell_type": "code", 119 | "execution_count": 8, 120 | "metadata": {}, 121 | "outputs": [ 122 | { 123 | "name": "stdout", 124 | "output_type": "stream", 125 | "text": [ 126 | "Using the Dask backend with coerce=True on a NumPy array: \n", 127 | "Using the Dask backend with coerce=True on a NumPy array: \n" 128 | ] 129 | } 130 | ], 131 | "source": [ 132 | "with ua.set_backend(DaskBackend(inner=SparseBackend), coerce=True):\n", 133 | " print('Using the Dask backend with coerce=True on a NumPy array: {}'.format(type(np.sum(z))))\n", 134 | " print('Using the Dask backend with coerce=True on a NumPy array: {}'.format(type(np.sum(z).compute())))" 135 | ] 136 | }, 137 | { 138 | "cell_type": "code", 139 | "execution_count": null, 140 | "metadata": {}, 141 | "outputs": [], 142 | "source": [] 143 | } 144 | ], 145 | "metadata": { 146 | "kernelspec": { 147 | "display_name": "Python 3", 148 | "language": "python", 149 | "name": "python3" 150 | }, 151 | "language_info": { 152 | "codemirror_mode": { 153 | "name": "ipython", 154 | "version": 3 155 | }, 156 | "file_extension": ".py", 157 | "mimetype": "text/x-python", 158 | "name": "python", 159 | "nbconvert_exporter": "python", 160 | "pygments_lexer": "ipython3", 161 | "version": "3.7.6" 162 | } 163 | }, 164 | "nbformat": 4, 165 | "nbformat_minor": 4 166 | } 167 | -------------------------------------------------------------------------------- /notebooks/fruit-puzzle.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Quansight-Labs/unumpy/97dcc82e8f3343c2a66b5ee6d8081886652de1d0/notebooks/fruit-puzzle.jpg -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | junit_family = xunit2 3 | addopts = --mypy --black --doctest-modules --junitxml=junit/test-results.xml --cov-report=xml --cov-report=term --cov --cov-report html --cov . --cov-config .coveragerc 4 | testpaths = 5 | unumpy 6 | doctest_optionflags= IGNORE_EXCEPTION_DETAIL 7 | -------------------------------------------------------------------------------- /readthedocs.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | sphinx: 4 | configuration: docs/conf.py 5 | fail_on_warning: true 6 | 7 | formats: all 8 | 9 | build: 10 | image: latest 11 | 12 | python: 13 | version: 3.7 14 | install: 15 | - method: pip 16 | path: . 17 | extra_requirements: 18 | - docs 19 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | . 2 | -------------------------------------------------------------------------------- /requirements/all.txt: -------------------------------------------------------------------------------- 1 | -r backends.txt 2 | -r optional.txt 3 | -r tests.txt 4 | -r docs.txt 5 | -------------------------------------------------------------------------------- /requirements/backends.txt: -------------------------------------------------------------------------------- 1 | torch 2 | numpy 3 | scipy 4 | gumath 5 | dask 6 | sparse 7 | -------------------------------------------------------------------------------- /requirements/docs.txt: -------------------------------------------------------------------------------- 1 | sphinx 2 | sphinx_rtd_theme 3 | doc8 4 | -------------------------------------------------------------------------------- /requirements/optional.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Quansight-Labs/unumpy/97dcc82e8f3343c2a66b5ee6d8081886652de1d0/requirements/optional.txt -------------------------------------------------------------------------------- /requirements/tests.txt: -------------------------------------------------------------------------------- 1 | pytest>=3.5 2 | pytest-flake8 3 | pytest-cov 4 | pytest-mypy 5 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [versioneer] 2 | VCS = git 3 | style = pep440 4 | versionfile_source = unumpy/_version.py 5 | versionfile_build = unumpy/_version.py 6 | tag_prefix = 7 | parentdir_prefix = unumpy- 8 | 9 | [bdist_wheel] 10 | universal=1 11 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from setuptools import setup, find_packages 4 | import versioneer 5 | from pathlib import Path 6 | import sys 7 | import os 8 | 9 | cwd = Path(os.path.dirname(os.path.abspath(__file__))) 10 | 11 | 12 | def open_reqs_file(file, reqs_path=Path(cwd)): 13 | i = 0 14 | while i < len(reqs): 15 | if reqs[i].startswith("-r"): 16 | reqs[i : i + 1] = open_reqs_file(reqs[i][2:].strip(), reqs_path=reqs_path) 17 | else: 18 | i += 1 19 | 20 | return reqs 21 | 22 | 23 | extras_require = {} 24 | reqs = [] # type: ignore 25 | 26 | 27 | def parse_requires(): 28 | reqs_path = cwd / "requirements" 29 | reqs.extend(open_reqs_file("requirements.txt")) 30 | 31 | if sys.version_info < (3, 7): 32 | reqs.append("contextvars") 33 | 34 | for f in reqs_path.iterdir(): 35 | extras_require[f.stem] = open_reqs_file(f.parts[-1], reqs_path=reqs_path) 36 | 37 | 38 | parse_requires() 39 | 40 | with open("README.md") as f: 41 | long_desc = f.read() 42 | 43 | setup( 44 | name="unumpy", 45 | version=versioneer.get_version(), 46 | cmdclass=versioneer.get_cmdclass(), 47 | description="Array interface object for Python with pluggable backends and a multiple-dispatch" 48 | "mechanism for defining down-stream functions", 49 | url="https://github.com/Quansight-Labs/uarray/", 50 | maintainer="Hameer Abbasi", 51 | maintainer_email="habbasi@quansight.com", 52 | license="BSD 3-Clause License (Revised)", 53 | keywords="uarray,numpy,scipy,pytorch,cupy,tensorflow", 54 | packages=find_packages(include=["unumpy", "unumpy.*"]), 55 | long_description=long_desc, 56 | long_description_content_type="text/markdown", 57 | install_requires=[ 58 | "uarray @ git+https://github.com/Quansight-Labs/uarray@master#egg=uarray" 59 | ], 60 | extras_require=extras_require, 61 | zip_safe=False, 62 | classifiers=[ 63 | "Development Status :: 2 - Pre-Alpha", 64 | "Operating System :: OS Independent", 65 | "License :: OSI Approved :: BSD License", 66 | "Programming Language :: Python", 67 | "Programming Language :: Python :: 3", 68 | "Programming Language :: Python :: 3.5", 69 | "Programming Language :: Python :: 3.6", 70 | "Programming Language :: Python :: 3.7", 71 | "Programming Language :: Python :: 3 :: Only", 72 | "Intended Audience :: Developers", 73 | "Intended Audience :: Science/Research", 74 | ], 75 | project_urls={ 76 | "Documentation": "https://unumpy.uarray.org/", 77 | "Source": "https://github.com/Quansight-Labs/uarray/", 78 | "Tracker": "https://github.com/Quansight-Labs/uarray/issues", 79 | }, 80 | python_requires=">=3.5, <4", 81 | ) 82 | -------------------------------------------------------------------------------- /unumpy/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | .. note:: 3 | If you are interested in writing backends or multimethods for ``unumpy``, 4 | please look at the documentation for :obj:`uarray`, which explains how to 5 | do this. 6 | 7 | ``unumpy`` is meant for three groups of individuals: 8 | 9 | * Those who write array-like objects, like developers of Dask, Xnd, PyData/Sparse, 10 | CuPy, and others. 11 | * Library authors or programmers that hope to target multiple array backends, listed 12 | above. 13 | * Users who wish to target their code to other backends. 14 | 15 | For example, the following is currently possible: 16 | 17 | >>> import uarray as ua 18 | >>> import unumpy as np 19 | >>> from unumpy.dask_backend import DaskBackend 20 | >>> import unumpy.sparse_backend as SparseBackend 21 | >>> import sparse, dask.array as da 22 | >>> def main(): 23 | ... x = np.zeros(5) 24 | ... return np.exp(x) 25 | >>> with ua.set_backend(DaskBackend()): 26 | ... isinstance(main(), da.core.Array) 27 | True 28 | >>> with ua.set_backend(SparseBackend): 29 | ... isinstance(main(), sparse.SparseArray) 30 | True 31 | 32 | Now imagine some arbitrarily nested code, all for which the implementations can be 33 | switched out using a simple context manager. 34 | 35 | ``unumpy`` is an in-progress mirror of the NumPy API which allows the user 36 | to dynamically switch out the backend that is used. It also allows 37 | auto-selection of the backend based on the arguments passed into a function. It does this by 38 | defining a collection of :obj:`uarray` multimethods that support dispatch. 39 | Although it currently provides a number of backends, the aspiration is that, 40 | with time, these back-ends will move into the respective libraries and it will be possible 41 | to use the library modules directly as backends. 42 | 43 | Note that currently, our coverage is very incomplete. However, we have attempted 44 | to provide at least one of each kind of object in ``unumpy`` for 45 | reference. There are :obj:`ufunc` s and :obj:`ndarray` s, which are classes, 46 | methods on :obj:`ufunc` such as :obj:`__call__ `, and 47 | :obj:`reduce ` and also functions such as :obj:`sum`. 48 | 49 | Where possible, we attempt to provide default implementations so that the whole API 50 | does not have to be reimplemented, however, it might be useful to gain speed or to 51 | re-implement it in terms of other functions which already exist in your library. 52 | 53 | The idea is that once things are more mature, it will be possible to switch 54 | out your backend with a simple import statement switch: 55 | 56 | .. code:: python 57 | 58 | import numpy as np # Old method 59 | import unumpy as np # Once this project is mature 60 | 61 | Currently, the following functions are supported: 62 | 63 | * All NumPy `universal functions `_. 64 | 65 | * :obj:`ufunc reductions ` 66 | 67 | For the full range of functions, use ``dir(unumpy)``. 68 | 69 | You can use the :obj:`uarray.set_backend` decorator to set a backend and use the 70 | desired backend. Note that not every backend supports every method. For example, 71 | PyTorch does not have an exact :obj:`ufunc` equivalent, so we dispatch to actual 72 | methods using a dictionary lookup. The following 73 | backends are supported: 74 | 75 | * :obj:`numpy_backend` 76 | * :obj:`torch_backend` 77 | * :obj:`xnd_backend` 78 | * :obj:`dask_backend` 79 | * :obj:`cupy_backend` 80 | * :obj:`sparse_backend` 81 | 82 | Writing Backends 83 | ---------------- 84 | 85 | Since :obj:`unumpy` is based on :obj:`uarray`, all overrides are done via the ``__ua_*__`` 86 | protocols. We strongly recommend you read the 87 | `uarray documentation `_ for context. 88 | 89 | All functions/methods in :obj:`unumpy` are :obj:`uarray` multimethods. This means 90 | you can override them using the ``__ua_function__`` protocol. 91 | 92 | In addition, :obj:`unumpy` allows dispatch on :obj:`numpy.ndarray`, 93 | :obj:`numpy.ufunc` and :obj:`numpy.dtype` via the ``__ua_convert__`` protocol. 94 | 95 | Dispatching on objects means one can intercept these, convert to an equivalent 96 | native format, or dispatch on their methods, including ``__call__``. 97 | 98 | We suggest you browse the source for example backends. 99 | 100 | Differences between overriding :obj:`numpy.ufunc` objects and other multimethods 101 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 102 | 103 | Of note here is that there are certain callable objects within NumPy, most 104 | prominently :obj:`numpy.ufunc` objects, that are not typical functions/methods, 105 | and so cannot be directly overridden, the key word here being *directly*. 106 | 107 | In Python, when a method is called, i.e. ``x.method(*a, **kw)`` it is the same 108 | as writing ``type(x).method(x, *a, **kw)`` assuming that ``method`` was a regular 109 | method defined on the type. This allows some very interesting things to happen. 110 | 111 | For instance, if we make ``method`` a multimethod, it allows us to override 112 | methods, provided we know that the first argument passed in will be ``x``. 113 | 114 | One other thing that is possible (and done in :obj:`unumpy`) is to override the 115 | ``__call__`` method on a callable object. This is, in fact, exactly how to override 116 | a :obj:`ufunc`. 117 | 118 | Other interesting things that can be done (but as of now, are not) are to replace 119 | :obj:`ufunc` objects entirely by native equivalents overriding the ``__get__`` method. 120 | This technique can also be applied to ``dtype`` objects. 121 | 122 | Meta-array support 123 | ^^^^^^^^^^^^^^^^^^ 124 | 125 | Meta-arrays are arrays that can hold other arrays, such as Dask arrays and XArray 126 | datasets. 127 | 128 | If meta-arrays and libraries depend on :obj:`unumpy` instead of NumPy, they can benefit 129 | from containerization and hold arbitrary arrays; not just :obj:`numpy.ndarray` objects. 130 | 131 | Inside their ``__ua_function__`` implementation, they might need to do something like the 132 | following: 133 | 134 | >>> class Backend: pass 135 | >>> meta_backend = Backend() 136 | >>> meta_backend.__ua_domain__ = "numpy" 137 | >>> def ua_func(f, a, kw): 138 | ... # We do this to avoid infinite recursion 139 | ... with ua.skip_backend(meta_backend): 140 | ... # Actual implementation here 141 | ... pass 142 | >>> meta_backend.__ua_function__ = ua_func 143 | 144 | In this form, one could do something like the following to use the meta-backend: 145 | 146 | >>> with ua.set_backend(DaskBackend(inner=SparseBackend)): 147 | ... x = np.zeros((2000, 2000)) 148 | ... isinstance(x, da.Array) 149 | ... isinstance(x.compute(), sparse.SparseArray) 150 | True 151 | True 152 | """ 153 | from ._multimethods import * 154 | from .lib import c_, r_, s_ 155 | from . import linalg 156 | from . import lib 157 | from . import random 158 | 159 | from ._version import get_versions 160 | 161 | __version__ = get_versions()["version"] 162 | del get_versions 163 | -------------------------------------------------------------------------------- /unumpy/_version.py: -------------------------------------------------------------------------------- 1 | # This file helps to compute a version number in source trees obtained from 2 | # git-archive tarball (such as those provided by githubs download-from-tag 3 | # feature). Distribution tarballs (built by setup.py sdist) and build 4 | # directories (produced by setup.py build) will contain a much shorter file 5 | # that just contains the computed version number. 6 | 7 | # This file is released into the public domain. Generated by 8 | # versioneer-0.18 (https://github.com/warner/python-versioneer) 9 | 10 | """Git implementation of _version.py.""" 11 | 12 | import errno 13 | import os 14 | import re 15 | import subprocess 16 | import sys 17 | from typing import Dict 18 | 19 | 20 | def get_keywords(): 21 | """Get the keywords needed to look up the version information.""" 22 | # these strings will be replaced by git during git-archive. 23 | # setup.py/versioneer.py will grep for the variable names, so they must 24 | # each be defined on a line of their own. _version.py will just call 25 | # get_keywords(). 26 | git_refnames = " (HEAD -> master)" 27 | git_full = "97dcc82e8f3343c2a66b5ee6d8081886652de1d0" 28 | git_date = "2021-02-27 07:06:22 +0100" 29 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} 30 | return keywords 31 | 32 | 33 | class VersioneerConfig: 34 | """Container for Versioneer configuration parameters.""" 35 | 36 | 37 | def get_config(): 38 | """Create, populate and return the VersioneerConfig() object.""" 39 | # these strings are filled in when 'setup.py versioneer' creates 40 | # _version.py 41 | cfg = VersioneerConfig() 42 | cfg.VCS = "git" 43 | cfg.style = "pep440" 44 | cfg.tag_prefix = "" 45 | cfg.parentdir_prefix = "unumpy-" 46 | cfg.versionfile_source = "unumpy/_version.py" 47 | cfg.verbose = False 48 | return cfg 49 | 50 | 51 | class NotThisMethod(Exception): 52 | """Exception raised if a method is not valid for the current scenario.""" 53 | 54 | 55 | LONG_VERSION_PY = {} # type: Dict 56 | HANDLERS = {} # type: Dict 57 | 58 | 59 | def register_vcs_handler(vcs, method): # decorator 60 | """Decorator to mark a method as the handler for a particular VCS.""" 61 | 62 | def decorate(f): 63 | """Store f in HANDLERS[vcs][method].""" 64 | if vcs not in HANDLERS: 65 | HANDLERS[vcs] = {} 66 | HANDLERS[vcs][method] = f 67 | return f 68 | 69 | return decorate 70 | 71 | 72 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): 73 | """Call the given command(s).""" 74 | assert isinstance(commands, list) 75 | p = None 76 | for c in commands: 77 | try: 78 | dispcmd = str([c] + args) 79 | # remember shell=False, so use git.cmd on windows, not just git 80 | p = subprocess.Popen( 81 | [c] + args, 82 | cwd=cwd, 83 | env=env, 84 | stdout=subprocess.PIPE, 85 | stderr=(subprocess.PIPE if hide_stderr else None), 86 | ) 87 | break 88 | except EnvironmentError: 89 | e = sys.exc_info()[1] 90 | if e.errno == errno.ENOENT: 91 | continue 92 | if verbose: 93 | print("unable to run %s" % dispcmd) 94 | print(e) 95 | return None, None 96 | else: 97 | if verbose: 98 | print("unable to find command, tried %s" % (commands,)) 99 | return None, None 100 | stdout = p.communicate()[0].strip() 101 | if sys.version_info[0] >= 3: 102 | stdout = stdout.decode() 103 | if p.returncode != 0: 104 | if verbose: 105 | print("unable to run %s (error)" % dispcmd) 106 | print("stdout was %s" % stdout) 107 | return None, p.returncode 108 | return stdout, p.returncode 109 | 110 | 111 | def versions_from_parentdir(parentdir_prefix, root, verbose): 112 | """Try to determine the version from the parent directory name. 113 | 114 | Source tarballs conventionally unpack into a directory that includes both 115 | the project name and a version string. We will also support searching up 116 | two directory levels for an appropriately named parent directory 117 | """ 118 | rootdirs = [] 119 | 120 | for i in range(3): 121 | dirname = os.path.basename(root) 122 | if dirname.startswith(parentdir_prefix): 123 | return { 124 | "version": dirname[len(parentdir_prefix) :], 125 | "full-revisionid": None, 126 | "dirty": False, 127 | "error": None, 128 | "date": None, 129 | } 130 | else: 131 | rootdirs.append(root) 132 | root = os.path.dirname(root) # up a level 133 | 134 | if verbose: 135 | print( 136 | "Tried directories %s but none started with prefix %s" 137 | % (str(rootdirs), parentdir_prefix) 138 | ) 139 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 140 | 141 | 142 | @register_vcs_handler("git", "get_keywords") 143 | def git_get_keywords(versionfile_abs): 144 | """Extract version information from the given file.""" 145 | # the code embedded in _version.py can just fetch the value of these 146 | # keywords. When used from setup.py, we don't want to import _version.py, 147 | # so we do it with a regexp instead. This function is not used from 148 | # _version.py. 149 | keywords = {} 150 | try: 151 | f = open(versionfile_abs, "r") 152 | for line in f.readlines(): 153 | if line.strip().startswith("git_refnames ="): 154 | mo = re.search(r'=\s*"(.*)"', line) 155 | if mo: 156 | keywords["refnames"] = mo.group(1) 157 | if line.strip().startswith("git_full ="): 158 | mo = re.search(r'=\s*"(.*)"', line) 159 | if mo: 160 | keywords["full"] = mo.group(1) 161 | if line.strip().startswith("git_date ="): 162 | mo = re.search(r'=\s*"(.*)"', line) 163 | if mo: 164 | keywords["date"] = mo.group(1) 165 | f.close() 166 | except EnvironmentError: 167 | pass 168 | return keywords 169 | 170 | 171 | @register_vcs_handler("git", "keywords") 172 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 173 | """Get version information from git keywords.""" 174 | if not keywords: 175 | raise NotThisMethod("no keywords at all, weird") 176 | date = keywords.get("date") 177 | if date is not None: 178 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant 179 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 180 | # -like" string, which we must then edit to make compliant), because 181 | # it's been around since git-1.5.3, and it's too difficult to 182 | # discover which version we're using, or to work around using an 183 | # older one. 184 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 185 | refnames = keywords["refnames"].strip() 186 | if refnames.startswith("$Format"): 187 | if verbose: 188 | print("keywords are unexpanded, not using") 189 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 190 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) 191 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 192 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 193 | TAG = "tag: " 194 | tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)]) 195 | if not tags: 196 | # Either we're using git < 1.8.3, or there really are no tags. We use 197 | # a heuristic: assume all version tags have a digit. The old git %d 198 | # expansion behaves like git log --decorate=short and strips out the 199 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 200 | # between branches and tags. By ignoring refnames without digits, we 201 | # filter out many common branch names like "release" and 202 | # "stabilization", as well as "HEAD" and "master". 203 | tags = set([r for r in refs if re.search(r"\d", r)]) 204 | if verbose: 205 | print("discarding '%s', no digits" % ",".join(refs - tags)) 206 | if verbose: 207 | print("likely tags: %s" % ",".join(sorted(tags))) 208 | for ref in sorted(tags): 209 | # sorting will prefer e.g. "2.0" over "2.0rc1" 210 | if ref.startswith(tag_prefix): 211 | r = ref[len(tag_prefix) :] 212 | if verbose: 213 | print("picking %s" % r) 214 | return { 215 | "version": r, 216 | "full-revisionid": keywords["full"].strip(), 217 | "dirty": False, 218 | "error": None, 219 | "date": date, 220 | } 221 | # no suitable tags, so version is "0+unknown", but full hex is still there 222 | if verbose: 223 | print("no suitable tags, using unknown + full revision id") 224 | return { 225 | "version": "0+unknown", 226 | "full-revisionid": keywords["full"].strip(), 227 | "dirty": False, 228 | "error": "no suitable tags", 229 | "date": None, 230 | } 231 | 232 | 233 | @register_vcs_handler("git", "pieces_from_vcs") 234 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 235 | """Get version from 'git describe' in the root of the source tree. 236 | 237 | This only gets called if the git-archive 'subst' keywords were *not* 238 | expanded, and _version.py hasn't already been rewritten with a short 239 | version string, meaning we're inside a checked out source tree. 240 | """ 241 | GITS = ["git"] 242 | if sys.platform == "win32": 243 | GITS = ["git.cmd", "git.exe"] 244 | 245 | out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) 246 | if rc != 0: 247 | if verbose: 248 | print("Directory %s not under git control" % root) 249 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 250 | 251 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 252 | # if there isn't one, this yields HEX[-dirty] (no NUM) 253 | describe_out, rc = run_command( 254 | GITS, 255 | [ 256 | "describe", 257 | "--tags", 258 | "--dirty", 259 | "--always", 260 | "--long", 261 | "--match", 262 | "%s*" % tag_prefix, 263 | ], 264 | cwd=root, 265 | ) 266 | # --long was added in git-1.5.5 267 | if describe_out is None: 268 | raise NotThisMethod("'git describe' failed") 269 | describe_out = describe_out.strip() 270 | full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 271 | if full_out is None: 272 | raise NotThisMethod("'git rev-parse' failed") 273 | full_out = full_out.strip() 274 | 275 | pieces = {} 276 | pieces["long"] = full_out 277 | pieces["short"] = full_out[:7] # maybe improved later 278 | pieces["error"] = None 279 | 280 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 281 | # TAG might have hyphens. 282 | git_describe = describe_out 283 | 284 | # look for -dirty suffix 285 | dirty = git_describe.endswith("-dirty") 286 | pieces["dirty"] = dirty 287 | if dirty: 288 | git_describe = git_describe[: git_describe.rindex("-dirty")] 289 | 290 | # now we have TAG-NUM-gHEX or HEX 291 | 292 | if "-" in git_describe: 293 | # TAG-NUM-gHEX 294 | mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) 295 | if not mo: 296 | # unparseable. Maybe git-describe is misbehaving? 297 | pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out 298 | return pieces 299 | 300 | # tag 301 | full_tag = mo.group(1) 302 | if not full_tag.startswith(tag_prefix): 303 | if verbose: 304 | fmt = "tag '%s' doesn't start with prefix '%s'" 305 | print(fmt % (full_tag, tag_prefix)) 306 | pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( 307 | full_tag, 308 | tag_prefix, 309 | ) 310 | return pieces 311 | pieces["closest-tag"] = full_tag[len(tag_prefix) :] 312 | 313 | # distance: number of commits since tag 314 | pieces["distance"] = int(mo.group(2)) 315 | 316 | # commit: short hex revision ID 317 | pieces["short"] = mo.group(3) 318 | 319 | else: 320 | # HEX: no tags 321 | pieces["closest-tag"] = None 322 | count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) 323 | pieces["distance"] = int(count_out) # total number of commits 324 | 325 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 326 | date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[ 327 | 0 328 | ].strip() 329 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 330 | 331 | return pieces 332 | 333 | 334 | def plus_or_dot(pieces): 335 | """Return a + if we don't already have one, else return a .""" 336 | if "+" in pieces.get("closest-tag", ""): 337 | return "." 338 | return "+" 339 | 340 | 341 | def render_pep440(pieces): 342 | """Build up version string, with post-release "local version identifier". 343 | 344 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 345 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 346 | 347 | Exceptions: 348 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 349 | """ 350 | if pieces["closest-tag"]: 351 | rendered = pieces["closest-tag"] 352 | if pieces["distance"] or pieces["dirty"]: 353 | rendered += plus_or_dot(pieces) 354 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 355 | if pieces["dirty"]: 356 | rendered += ".dirty" 357 | else: 358 | # exception #1 359 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) 360 | if pieces["dirty"]: 361 | rendered += ".dirty" 362 | return rendered 363 | 364 | 365 | def render_pep440_pre(pieces): 366 | """TAG[.post.devDISTANCE] -- No -dirty. 367 | 368 | Exceptions: 369 | 1: no tags. 0.post.devDISTANCE 370 | """ 371 | if pieces["closest-tag"]: 372 | rendered = pieces["closest-tag"] 373 | if pieces["distance"]: 374 | rendered += ".post.dev%d" % pieces["distance"] 375 | else: 376 | # exception #1 377 | rendered = "0.post.dev%d" % pieces["distance"] 378 | return rendered 379 | 380 | 381 | def render_pep440_post(pieces): 382 | """TAG[.postDISTANCE[.dev0]+gHEX] . 383 | 384 | The ".dev0" means dirty. Note that .dev0 sorts backwards 385 | (a dirty tree will appear "older" than the corresponding clean one), 386 | but you shouldn't be releasing software with -dirty anyways. 387 | 388 | Exceptions: 389 | 1: no tags. 0.postDISTANCE[.dev0] 390 | """ 391 | if pieces["closest-tag"]: 392 | rendered = pieces["closest-tag"] 393 | if pieces["distance"] or pieces["dirty"]: 394 | rendered += ".post%d" % pieces["distance"] 395 | if pieces["dirty"]: 396 | rendered += ".dev0" 397 | rendered += plus_or_dot(pieces) 398 | rendered += "g%s" % pieces["short"] 399 | else: 400 | # exception #1 401 | rendered = "0.post%d" % pieces["distance"] 402 | if pieces["dirty"]: 403 | rendered += ".dev0" 404 | rendered += "+g%s" % pieces["short"] 405 | return rendered 406 | 407 | 408 | def render_pep440_old(pieces): 409 | """TAG[.postDISTANCE[.dev0]] . 410 | 411 | The ".dev0" means dirty. 412 | 413 | Eexceptions: 414 | 1: no tags. 0.postDISTANCE[.dev0] 415 | """ 416 | if pieces["closest-tag"]: 417 | rendered = pieces["closest-tag"] 418 | if pieces["distance"] or pieces["dirty"]: 419 | rendered += ".post%d" % pieces["distance"] 420 | if pieces["dirty"]: 421 | rendered += ".dev0" 422 | else: 423 | # exception #1 424 | rendered = "0.post%d" % pieces["distance"] 425 | if pieces["dirty"]: 426 | rendered += ".dev0" 427 | return rendered 428 | 429 | 430 | def render_git_describe(pieces): 431 | """TAG[-DISTANCE-gHEX][-dirty]. 432 | 433 | Like 'git describe --tags --dirty --always'. 434 | 435 | Exceptions: 436 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 437 | """ 438 | if pieces["closest-tag"]: 439 | rendered = pieces["closest-tag"] 440 | if pieces["distance"]: 441 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 442 | else: 443 | # exception #1 444 | rendered = pieces["short"] 445 | if pieces["dirty"]: 446 | rendered += "-dirty" 447 | return rendered 448 | 449 | 450 | def render_git_describe_long(pieces): 451 | """TAG-DISTANCE-gHEX[-dirty]. 452 | 453 | Like 'git describe --tags --dirty --always -long'. 454 | The distance/hash is unconditional. 455 | 456 | Exceptions: 457 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 458 | """ 459 | if pieces["closest-tag"]: 460 | rendered = pieces["closest-tag"] 461 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 462 | else: 463 | # exception #1 464 | rendered = pieces["short"] 465 | if pieces["dirty"]: 466 | rendered += "-dirty" 467 | return rendered 468 | 469 | 470 | def render(pieces, style): 471 | """Render the given version pieces into the requested style.""" 472 | if pieces["error"]: 473 | return { 474 | "version": "unknown", 475 | "full-revisionid": pieces.get("long"), 476 | "dirty": None, 477 | "error": pieces["error"], 478 | "date": None, 479 | } 480 | 481 | if not style or style == "default": 482 | style = "pep440" # the default 483 | 484 | if style == "pep440": 485 | rendered = render_pep440(pieces) 486 | elif style == "pep440-pre": 487 | rendered = render_pep440_pre(pieces) 488 | elif style == "pep440-post": 489 | rendered = render_pep440_post(pieces) 490 | elif style == "pep440-old": 491 | rendered = render_pep440_old(pieces) 492 | elif style == "git-describe": 493 | rendered = render_git_describe(pieces) 494 | elif style == "git-describe-long": 495 | rendered = render_git_describe_long(pieces) 496 | else: 497 | raise ValueError("unknown style '%s'" % style) 498 | 499 | return { 500 | "version": rendered, 501 | "full-revisionid": pieces["long"], 502 | "dirty": pieces["dirty"], 503 | "error": None, 504 | "date": pieces.get("date"), 505 | } 506 | 507 | 508 | def get_versions(): 509 | """Get version information or return default if unable to do so.""" 510 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 511 | # __file__, we can work backwards from there to the root. Some 512 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 513 | # case we can only use expanded keywords. 514 | 515 | cfg = get_config() 516 | verbose = cfg.verbose 517 | 518 | try: 519 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) 520 | except NotThisMethod: 521 | pass 522 | 523 | try: 524 | root = os.path.realpath(__file__) 525 | # versionfile_source is the relative path from the top of the source 526 | # tree (where the .git directory might live) to this file. Invert 527 | # this to find the root from __file__. 528 | for i in cfg.versionfile_source.split("/"): 529 | root = os.path.dirname(root) 530 | except NameError: 531 | return { 532 | "version": "0+unknown", 533 | "full-revisionid": None, 534 | "dirty": None, 535 | "error": "unable to find root of source tree", 536 | "date": None, 537 | } 538 | 539 | try: 540 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 541 | return render(pieces, cfg.style) 542 | except NotThisMethod: 543 | pass 544 | 545 | try: 546 | if cfg.parentdir_prefix: 547 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 548 | except NotThisMethod: 549 | pass 550 | 551 | return { 552 | "version": "0+unknown", 553 | "full-revisionid": None, 554 | "dirty": None, 555 | "error": "unable to compute version", 556 | "date": None, 557 | } 558 | -------------------------------------------------------------------------------- /unumpy/cupy_backend.py: -------------------------------------------------------------------------------- 1 | try: 2 | import numpy as np 3 | import cupy as cp 4 | from uarray import Dispatchable, wrap_single_convertor 5 | from unumpy import ufunc, ufunc_list, ndarray 6 | import unumpy 7 | import functools 8 | 9 | from typing import Dict 10 | 11 | _ufunc_mapping: Dict[ufunc, np.ufunc] = {} 12 | 13 | __ua_domain__ = "numpy" 14 | 15 | def overridden_class(self): 16 | module = self.__module__.split(".") 17 | module = ".".join(m for m in module if m != "_multimethods") 18 | return _get_from_name_domain(self.__name__, module) 19 | 20 | _implementations: Dict = { 21 | unumpy.ClassOverrideMeta.overridden_class.fget: overridden_class 22 | } 23 | 24 | def _get_from_name_domain(name, domain): 25 | module = cp 26 | name_hierarchy = name.split(".") 27 | domain_hierarchy = domain.split(".") + name_hierarchy[0:-1] 28 | for d in domain_hierarchy[1:]: 29 | if hasattr(module, d): 30 | module = getattr(module, d) 31 | else: 32 | return NotImplemented 33 | if hasattr(module, name_hierarchy[-1]): 34 | return getattr(module, name_hierarchy[-1]) 35 | else: 36 | return NotImplemented 37 | 38 | def _implements(np_func): 39 | def inner(func): 40 | _implementations[np_func] = func 41 | return func 42 | 43 | return inner 44 | 45 | def __ua_function__(method, args, kwargs): 46 | if method in _implementations: 47 | return _implementations[method](*args, **kwargs) 48 | 49 | if len(args) != 0 and isinstance(args[0], unumpy.ClassOverrideMeta): 50 | return NotImplemented 51 | 52 | cupy_method = _get_from_name_domain(method.__qualname__, method.domain) 53 | if cupy_method is NotImplemented: 54 | return NotImplemented 55 | 56 | return cupy_method(*args, **kwargs) 57 | 58 | @wrap_single_convertor 59 | def __ua_convert__(value, dispatch_type, coerce): 60 | if dispatch_type is ufunc and hasattr(cp, value.name): 61 | return getattr(cp, value.name) 62 | 63 | if value is None: 64 | return None 65 | 66 | if dispatch_type is ndarray: 67 | if not coerce and not isinstance(value, cp.ndarray): 68 | return NotImplemented 69 | 70 | return cp.asarray(value) 71 | 72 | return value 73 | 74 | def replace_self(func): 75 | @functools.wraps(func) 76 | def inner(self, *args, **kwargs): 77 | if self not in _ufunc_mapping: 78 | return NotImplemented 79 | 80 | return func(_ufunc_mapping[self], *args, **kwargs) 81 | 82 | return inner 83 | 84 | @_implements(unumpy.ascontiguousarray) 85 | def _ascontiguousarray(arr, dtype=None): 86 | return cp.asarray(arr, dtype=dtype, order="C") 87 | 88 | @_implements(unumpy.asfortranarray) 89 | def _asfortranarray(arr, dtype=None): 90 | return cp.asarray(arr, dtype=dtype, order="F") 91 | 92 | @_implements(unumpy.ufunc.__call__) 93 | def _ufunc_call(self, *args, **kwargs): 94 | fname = self.name 95 | f = getattr(cp, fname, lambda *a, **kw: NotImplemented) 96 | return f(*args, **kwargs) 97 | 98 | 99 | except ImportError: 100 | pass 101 | -------------------------------------------------------------------------------- /unumpy/dask_backend.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import dask.array as da 3 | from uarray import ( 4 | Dispatchable, 5 | wrap_single_convertor_instance, 6 | set_backend, 7 | get_state, 8 | set_state, 9 | set_backend, 10 | ) 11 | from unumpy import ufunc, ufunc_list, ndarray, dtype 12 | import unumpy 13 | import functools 14 | import sys 15 | import collections 16 | import itertools 17 | import random 18 | 19 | from typing import Dict 20 | 21 | _class_mapping = {ndarray: da.Array, dtype: np.dtype, ufunc: da.ufunc.ufunc} 22 | 23 | 24 | def overridden_class(self): 25 | if self in _class_mapping: 26 | return _class_mapping[self] 27 | module = self.__module__.split(".") 28 | module = ".".join(m for m in module if m != "_multimethods") 29 | return _get_from_name_domain(self.__name__, module) 30 | 31 | 32 | def _get_from_name_domain(name, domain): 33 | module = da 34 | name_hierarchy = name.split(".") 35 | domain_hierarchy = domain.split(".") + name_hierarchy[0:-1] 36 | for d in domain_hierarchy[1:]: 37 | if hasattr(module, d): 38 | module = getattr(module, d) 39 | else: 40 | return NotImplemented 41 | if hasattr(module, name_hierarchy[-1]): 42 | return getattr(module, name_hierarchy[-1]) 43 | else: 44 | return NotImplemented 45 | 46 | 47 | class DaskBackend: 48 | _ufunc_mapping: Dict[ufunc, np.ufunc] = {} 49 | __ua_domain__ = "numpy" 50 | 51 | def __init__(self, inner=None): 52 | from unumpy import numpy_backend as NumpyBackend 53 | 54 | _implementations: Dict = { 55 | unumpy.asarray: self.asarray, 56 | unumpy.ufunc.__call__: self.wrap_map_blocks(unumpy.ufunc.__call__), 57 | unumpy.ones: self.wrap_uniform_create(unumpy.ones), 58 | unumpy.zeros: self.wrap_uniform_create(unumpy.zeros), 59 | unumpy.full: self.wrap_uniform_create(unumpy.full), 60 | unumpy.trim_zeros: self.trim_zeros, 61 | unumpy.ClassOverrideMeta.overridden_class.fget: overridden_class, 62 | } 63 | 64 | self._implementations = _implementations 65 | self._inner = NumpyBackend if inner is None else inner 66 | 67 | @staticmethod 68 | def _wrap_current_state(func): 69 | state = get_state() 70 | 71 | @functools.wraps(func) 72 | def wrapped(*a, **kw): 73 | with set_state(state): 74 | return func(*a, **kw) 75 | 76 | return wrapped 77 | 78 | def asarray(self, a, dtype=None, order=None): 79 | if isinstance(a, da.Array): 80 | return self.wrap_map_blocks(unumpy.asarray)(a) 81 | with set_backend(self._inner, coerce=True): 82 | a = np.asarray(a, dtype=dtype, order=order) 83 | return da.from_array(a, asarray=False) 84 | 85 | def trim_zeros(self, filt, trim="fb"): 86 | nonzero_idxs = unumpy.nonzero(filt)[0].compute_chunk_sizes() 87 | 88 | if len(nonzero_idxs.compute()) == 0: 89 | return unumpy.asarray([], dtype=filt.dtype) 90 | 91 | start, end = None, None 92 | if "f" in trim: 93 | start = nonzero_idxs[0].compute() 94 | if "b" in trim: 95 | end = nonzero_idxs[-1].compute() + 1 96 | return filt[start:end] 97 | 98 | def wrap_map_blocks(self, func): 99 | @functools.wraps(func) 100 | def wrapped(*args, **kwargs): 101 | with set_backend(self._inner): 102 | return da.map_blocks(self._wrap_current_state(func), *args, **kwargs) 103 | 104 | return wrapped 105 | 106 | def wrap_uniform_create(self, func): 107 | @functools.wraps(func) 108 | def wrapped(shape, *args, **kwargs): 109 | if isinstance(shape, collections.abc.Iterable): 110 | shape = tuple(int(s) for s in shape) 111 | else: 112 | shape = (int(shape),) 113 | 114 | # Estimate 100 Mi elements per block 115 | blocksize = int((100 * (2 ** 20)) ** (1 / len(shape))) 116 | 117 | chunks = [] 118 | for l in shape: 119 | chunks.append([]) 120 | while l > 0: 121 | s = max(min(blocksize, l), 0) 122 | chunks[-1].append(s) 123 | l -= s 124 | 125 | name = func.__name__ + "-" + hex(random.randrange(2 ** 64)) 126 | dsk = {} 127 | with set_backend(self._inner): 128 | for chunk_id in itertools.product( 129 | *map(lambda x: range(len(x)), chunks) 130 | ): 131 | shape = tuple(chunks[i][j] for i, j in enumerate(chunk_id)) 132 | dsk[(name,) + chunk_id] = func(shape, *args, **kwargs) 133 | 134 | meta = func(tuple(0 for _ in shape), *args, **kwargs) 135 | dtype = str(meta.dtype) 136 | 137 | return da.Array(dsk, name, chunks, dtype=dtype, meta=meta) 138 | 139 | return wrapped 140 | 141 | def __ua_function__(self, method, args, kwargs): 142 | if method in self._implementations: 143 | return self._implementations[method](*args, **kwargs) 144 | 145 | if len(args) != 0 and isinstance(args[0], unumpy.ClassOverrideMeta): 146 | return NotImplemented 147 | 148 | dask_method = _get_from_name_domain(method.__qualname__, method.domain) 149 | if dask_method is NotImplemented: 150 | return NotImplemented 151 | 152 | return dask_method(*args, **kwargs) 153 | 154 | @wrap_single_convertor_instance 155 | def __ua_convert__(self, value, dispatch_type, coerce): 156 | if dispatch_type is not ufunc and value is None: 157 | return None 158 | 159 | if dispatch_type is ndarray: 160 | if not coerce and not isinstance(value, da.Array): 161 | return NotImplemented 162 | ret = da.asarray(value) 163 | with set_backend(self._inner): 164 | ret = ret.map_blocks(self._wrap_current_state(unumpy.asarray)) 165 | 166 | return ret 167 | 168 | return value 169 | -------------------------------------------------------------------------------- /unumpy/lib/__init__.py: -------------------------------------------------------------------------------- 1 | from ._multimethods import * 2 | from . import index_tricks 3 | from .index_tricks import c_, r_, s_ 4 | -------------------------------------------------------------------------------- /unumpy/lib/_multimethods.py: -------------------------------------------------------------------------------- 1 | import functools 2 | import operator 3 | from uarray import create_multimethod, mark_as, all_of_type, Dispatchable 4 | import builtins 5 | 6 | create_numpy = functools.partial(create_multimethod, domain="numpy.lib") 7 | 8 | from .._multimethods import ( 9 | ClassOverrideMetaWithGetAttr, 10 | _call_first_argreplacer, 11 | ndarray, 12 | ) 13 | 14 | 15 | class ClassOverrideMetaForArrayterator(ClassOverrideMetaWithGetAttr): 16 | @create_numpy( 17 | _call_first_argreplacer, 18 | default=lambda self, var, buf_size=None: self.overridden_class(var, buf_size), 19 | ) 20 | @all_of_type(ndarray) 21 | def __call__(self, var, buf_size=None): 22 | self._unwrapped = NotImplemented 23 | return (var,) 24 | 25 | 26 | class Arrayterator(metaclass=ClassOverrideMetaForArrayterator): 27 | pass 28 | -------------------------------------------------------------------------------- /unumpy/lib/index_tricks/__init__.py: -------------------------------------------------------------------------------- 1 | from ._multimethods import * 2 | -------------------------------------------------------------------------------- /unumpy/lib/index_tricks/_multimethods.py: -------------------------------------------------------------------------------- 1 | import functools 2 | import operator 3 | from uarray import create_multimethod, mark_as, all_of_type, Dispatchable 4 | import builtins 5 | 6 | create_numpy = functools.partial(create_multimethod, domain="numpy.lib.index_tricks") 7 | 8 | from ..._multimethods import ClassOverrideMetaWithConstructorAndGetAttr 9 | 10 | 11 | class CClass(metaclass=ClassOverrideMetaWithConstructorAndGetAttr): 12 | def __getitem__(self, key): 13 | return CClass().__getitem__(key) 14 | 15 | 16 | c_ = CClass() 17 | 18 | 19 | class RClass(metaclass=ClassOverrideMetaWithConstructorAndGetAttr): 20 | def __getitem__(self, key): 21 | return RClass().__getitem__(key) 22 | 23 | 24 | r_ = RClass() 25 | 26 | 27 | class IndexExpression(metaclass=ClassOverrideMetaWithConstructorAndGetAttr): 28 | def __init__(self, maketuple): 29 | self.maketuple = maketuple 30 | 31 | def __getitem__(self, key): 32 | return IndexExpression(self.maketuple).__getitem__(key) 33 | 34 | 35 | s_ = IndexExpression(maketuple=True) 36 | -------------------------------------------------------------------------------- /unumpy/linalg/__init__.py: -------------------------------------------------------------------------------- 1 | from ._multimethods import * 2 | -------------------------------------------------------------------------------- /unumpy/linalg/_multimethods.py: -------------------------------------------------------------------------------- 1 | import functools 2 | import operator 3 | from uarray import create_multimethod, mark_as, all_of_type, Dispatchable 4 | import builtins 5 | 6 | create_numpy = functools.partial(create_multimethod, domain="numpy.linalg") 7 | 8 | from .._multimethods import ( 9 | ndarray, 10 | _identity_argreplacer, 11 | _self_argreplacer, 12 | _dtype_argreplacer, 13 | mark_dtype, 14 | _first_argreplacer, 15 | _first2argreplacer, 16 | ndim, 17 | ) 18 | 19 | __all__ = [ 20 | "multi_dot", 21 | "matrix_power", 22 | "cholesky", 23 | "qr", 24 | "svd", 25 | "eig", 26 | "eigh", 27 | "eigvals", 28 | "eigvalsh", 29 | "norm", 30 | "cond", 31 | "det", 32 | "matrix_rank", 33 | "slogdet", 34 | "solve", 35 | "tensorsolve", 36 | "lstsq", 37 | "inv", 38 | "pinv", 39 | "tensorinv", 40 | ] 41 | 42 | 43 | def multi_dot_default(arrays): 44 | res = arrays[0] 45 | for a in arrays[1:]: 46 | res = res @ a 47 | 48 | return res 49 | 50 | 51 | @create_numpy(_first_argreplacer, default=multi_dot_default) 52 | @all_of_type(ndarray) 53 | def multi_dot(arrays): 54 | return arrays 55 | 56 | 57 | def matrix_power_default(a, n): 58 | eigenvalues, eigenvectors = eig(a) 59 | diagonal = diag(eigenvalues) 60 | return multi_dot([eigenvectors, power(diagonal, n), inv(eigenvectors)]) 61 | 62 | 63 | @create_numpy(_self_argreplacer, default=matrix_power_default) 64 | @all_of_type(ndarray) 65 | def matrix_power(a, n): 66 | return (a,) 67 | 68 | 69 | @create_numpy(_self_argreplacer) 70 | @all_of_type(ndarray) 71 | def cholesky(a): 72 | return (a,) 73 | 74 | 75 | @create_numpy(_self_argreplacer) 76 | @all_of_type(ndarray) 77 | def qr(a, mode="reduced"): 78 | return (a,) 79 | 80 | 81 | @create_numpy(_self_argreplacer) 82 | @all_of_type(ndarray) 83 | def svd(a, full_matrices=True, compute_uv=True, hermitian=False): 84 | return (a,) 85 | 86 | 87 | @create_numpy(_self_argreplacer) 88 | @all_of_type(ndarray) 89 | def eig(a): 90 | return (a,) 91 | 92 | 93 | @create_numpy(_self_argreplacer) 94 | @all_of_type(ndarray) 95 | def eigh(a, UPLO="L"): 96 | return (a,) 97 | 98 | 99 | @create_numpy(_self_argreplacer) 100 | @all_of_type(ndarray) 101 | def eigvals(a): 102 | return (a,) 103 | 104 | 105 | @create_numpy(_self_argreplacer) 106 | @all_of_type(ndarray) 107 | def eigvalsh(a, UPLO="L"): 108 | return (a,) 109 | 110 | 111 | @create_numpy(_self_argreplacer) 112 | @all_of_type(ndarray) 113 | def norm(x, ord=None, axis=None, keepdims=False): 114 | return (x,) 115 | 116 | 117 | def cond_default(x, p=None): 118 | if ndim(x) > 1: 119 | return norm_default(x, ord=p) 120 | else: 121 | raise ValueError("Array must be at least two-dimensional.") 122 | 123 | 124 | @create_numpy(_self_argreplacer) 125 | @all_of_type(ndarray) 126 | def cond(x, p=None): 127 | return (x,) 128 | 129 | 130 | @create_numpy(_self_argreplacer) 131 | @all_of_type(ndarray) 132 | def det(a): 133 | return (a,) 134 | 135 | 136 | @create_numpy(_self_argreplacer) 137 | @all_of_type(ndarray) 138 | def matrix_rank(M, tol=None, hermitian=False): 139 | return (M,) 140 | 141 | 142 | @create_numpy(_self_argreplacer) 143 | @all_of_type(ndarray) 144 | def slogdet(a): 145 | return (a,) 146 | 147 | 148 | @create_numpy(_first2argreplacer) 149 | @all_of_type(ndarray) 150 | def solve(a, b): 151 | return (a, b) 152 | 153 | 154 | @create_numpy(_first2argreplacer) 155 | @all_of_type(ndarray) 156 | def tensorsolve(a, b, axes=None): 157 | return (a, b) 158 | 159 | 160 | @create_numpy(_first2argreplacer) 161 | @all_of_type(ndarray) 162 | def lstsq(a, b, rcond="warn"): 163 | return (a, b) 164 | 165 | 166 | @create_numpy(_self_argreplacer) 167 | @all_of_type(ndarray) 168 | def inv(a): 169 | return (a,) 170 | 171 | 172 | @create_numpy(_self_argreplacer) 173 | @all_of_type(ndarray) 174 | def pinv(a, rcond=1e-15, hermitian=False): 175 | return (a,) 176 | 177 | 178 | @create_numpy(_self_argreplacer) 179 | @all_of_type(ndarray) 180 | def tensorinv(a, ind=2): 181 | return (a,) 182 | -------------------------------------------------------------------------------- /unumpy/numpy_backend.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from uarray import Dispatchable, wrap_single_convertor 3 | from unumpy import ufunc, ufunc_list, ndarray, dtype 4 | import unumpy 5 | import functools 6 | 7 | from typing import Dict 8 | 9 | _ufunc_mapping: Dict[ufunc, np.ufunc] = {} 10 | 11 | __ua_domain__ = "numpy" 12 | 13 | 14 | def overridden_class(self): 15 | module = self.__module__.split(".") 16 | module = ".".join(m for m in module if m != "_multimethods") 17 | return _get_from_name_domain(self.__name__, module) 18 | 19 | 20 | _implementations: Dict = { 21 | unumpy.ufunc.__call__: np.ufunc.__call__, 22 | unumpy.ufunc.reduce: np.ufunc.reduce, 23 | unumpy.count_nonzero: lambda a, axis=None: np.asarray(np.count_nonzero(a, axis))[ 24 | () 25 | ], 26 | unumpy.ClassOverrideMeta.overridden_class.fget: overridden_class, 27 | } 28 | 29 | 30 | def _get_from_name_domain(name, domain): 31 | module = np 32 | name_hierarchy = name.split(".") 33 | domain_hierarchy = domain.split(".") + name_hierarchy[0:-1] 34 | for d in domain_hierarchy[1:]: 35 | module = getattr(module, d) 36 | if hasattr(module, name_hierarchy[-1]): 37 | return getattr(module, name_hierarchy[-1]) 38 | else: 39 | return NotImplemented 40 | 41 | 42 | def __ua_function__(method, args, kwargs): 43 | if method in _implementations: 44 | return _implementations[method](*args, **kwargs) 45 | 46 | if len(args) != 0 and isinstance(args[0], unumpy.ClassOverrideMeta): 47 | return NotImplemented 48 | 49 | method_numpy = _get_from_name_domain(method.__qualname__, method.domain) 50 | if method_numpy is NotImplemented: 51 | return NotImplemented 52 | 53 | return method_numpy(*args, **kwargs) 54 | 55 | 56 | @wrap_single_convertor 57 | def __ua_convert__(value, dispatch_type, coerce): 58 | if dispatch_type is ufunc: 59 | return getattr(np, value.name) 60 | 61 | if value is None: 62 | return None 63 | 64 | if dispatch_type is ndarray: 65 | if not coerce and not isinstance(value, np.ndarray): 66 | return NotImplemented 67 | 68 | return np.asarray(value) 69 | 70 | if dispatch_type is dtype: 71 | try: 72 | return np.dtype(str(value)) 73 | except TypeError: 74 | return np.dtype(value) 75 | 76 | return value 77 | 78 | 79 | def replace_self(func): 80 | @functools.wraps(func) 81 | def inner(self, *args, **kwargs): 82 | if self not in _ufunc_mapping: 83 | return NotImplemented 84 | 85 | return func(_ufunc_mapping[self], *args, **kwargs) 86 | 87 | return inner 88 | -------------------------------------------------------------------------------- /unumpy/random/__init__.py: -------------------------------------------------------------------------------- 1 | from ._multimethods import * 2 | -------------------------------------------------------------------------------- /unumpy/random/_multimethods.py: -------------------------------------------------------------------------------- 1 | import functools 2 | import operator 3 | from uarray import create_multimethod, mark_as, all_of_type, Dispatchable 4 | import builtins 5 | 6 | create_numpy = functools.partial(create_multimethod, domain="numpy.random") 7 | 8 | from .._multimethods import ( 9 | ClassOverrideMetaWithConstructor, 10 | ClassOverrideMetaWithGetAttr, 11 | ClassOverrideMetaWithConstructorAndGetAttr, 12 | ndarray, 13 | _identity_argreplacer, 14 | _dtype_argreplacer, 15 | _self_argreplacer, 16 | _skip_self_argreplacer, 17 | _first2argreplacer, 18 | _first3argreplacer, 19 | mark_dtype, 20 | mark_non_coercible, 21 | ) 22 | 23 | 24 | @create_numpy(_identity_argreplacer) 25 | def default_rng(seed=None): 26 | return () 27 | 28 | 29 | class RandomState(metaclass=ClassOverrideMetaWithConstructor): 30 | pass 31 | 32 | 33 | class BitGenerator(metaclass=ClassOverrideMetaWithGetAttr): 34 | pass 35 | 36 | 37 | class SeedSequence(metaclass=ClassOverrideMetaWithConstructorAndGetAttr): 38 | pass 39 | 40 | 41 | @create_numpy(_identity_argreplacer) 42 | def rand(*tup): 43 | return () 44 | 45 | 46 | @create_numpy(_identity_argreplacer) 47 | def randn(*tup): 48 | return () 49 | 50 | 51 | def _randint_argreplacer(args, kwargs, dispatchables): 52 | def replacer(low, high=None, size=None, dtype=int): 53 | return ( 54 | (dispatchables[0],), 55 | dict(high=dispatchables[1], size=size, dtype=dispatchables[2]), 56 | ) 57 | 58 | return replacer(*args, **kwargs) 59 | 60 | 61 | @create_numpy(_randint_argreplacer) 62 | @all_of_type(ndarray) 63 | def randint(low, high=None, size=None, dtype=int): 64 | return (low, high, mark_dtype(dtype)) 65 | 66 | 67 | @create_numpy(_identity_argreplacer) 68 | def random_integers(low, high=None, size=None): 69 | return () 70 | 71 | 72 | @create_numpy(_identity_argreplacer) 73 | def random_sample(size=None): 74 | return () 75 | 76 | 77 | @create_numpy(_identity_argreplacer) 78 | def random(size=None): 79 | return () 80 | 81 | 82 | @create_numpy(_identity_argreplacer) 83 | def ranf(size=None): 84 | return () 85 | 86 | 87 | @create_numpy(_identity_argreplacer) 88 | def sample(size=None): 89 | return () 90 | 91 | 92 | def _choice_argreplacer(args, kwargs, dispatchables): 93 | def replacer(a, size=None, replace=True, p=None): 94 | return (dispatchables[0],), dict(size=size, replace=replace, p=dispatchables[1]) 95 | 96 | return replacer(*args, **kwargs) 97 | 98 | 99 | @create_numpy(_choice_argreplacer) 100 | @all_of_type(ndarray) 101 | def choice(a, size=None, replace=True, p=None): 102 | return (a, p) 103 | 104 | 105 | @create_numpy(_identity_argreplacer) 106 | def bytes(length): 107 | return () 108 | 109 | 110 | @create_numpy(_self_argreplacer) 111 | @all_of_type(ndarray) 112 | def shuffle(x): 113 | return (x,) 114 | 115 | 116 | @create_numpy(_self_argreplacer) 117 | @all_of_type(ndarray) 118 | def permutation(x): 119 | return (x,) 120 | 121 | 122 | @create_numpy(_first2argreplacer) 123 | @all_of_type(ndarray) 124 | def beta(a, b, size=None): 125 | return (a, b) 126 | 127 | 128 | @create_numpy(_first2argreplacer) 129 | @all_of_type(ndarray) 130 | def binomial(n, p, size=None): 131 | return (n, p) 132 | 133 | 134 | @create_numpy(_self_argreplacer) 135 | @all_of_type(ndarray) 136 | def chisquare(df, size=None): 137 | return (df,) 138 | 139 | 140 | @create_numpy(_identity_argreplacer) 141 | def dirichlet(alpha, size=None): 142 | return () 143 | 144 | 145 | def _exponential_argreplacer(args, kwargs, dispatchables): 146 | def replacer(scale=1.0, size=None): 147 | return (), dict(scale=dispatchables[0], size=size) 148 | 149 | return replacer(*args, **kwargs) 150 | 151 | 152 | @create_numpy(_exponential_argreplacer) 153 | @all_of_type(ndarray) 154 | def exponential(scale=1.0, size=None): 155 | return (scale,) 156 | 157 | 158 | @create_numpy(_first2argreplacer) 159 | @all_of_type(ndarray) 160 | def f(dfnum, dfden, size=None): 161 | return (dfnum, dfden) 162 | 163 | 164 | def _gamma_argreplacer(args, kwargs, dispatchables): 165 | def replacer(shape, scale=1.0, size=None): 166 | return (dispatchables[0],), dict(scale=dispatchables[1], size=size) 167 | 168 | return replacer(*args, **kwargs) 169 | 170 | 171 | @create_numpy(_gamma_argreplacer) 172 | @all_of_type(ndarray) 173 | def gamma(shape, scale=1.0, size=None): 174 | return (shape, scale) 175 | 176 | 177 | @create_numpy(_self_argreplacer) 178 | @all_of_type(ndarray) 179 | def geometric(p, size=None): 180 | return (p,) 181 | 182 | 183 | def _loc_scale_argreplacer(args, kwargs, dispatchables): 184 | def replacer(loc=0.0, scale=1.0, size=None): 185 | return (), dict(loc=dispatchables[0], scale=dispatchables[1], size=size) 186 | 187 | return replacer(*args, **kwargs) 188 | 189 | 190 | @create_numpy(_loc_scale_argreplacer) 191 | @all_of_type(ndarray) 192 | def gumbel(loc=0.0, scale=1.0, size=None): 193 | return (loc, scale) 194 | 195 | 196 | @create_numpy(_first3argreplacer) 197 | @all_of_type(ndarray) 198 | def hypergeometric(ngood, nbad, nsample, size=None): 199 | return (ngood, nbad, nsample) 200 | 201 | 202 | @create_numpy(_loc_scale_argreplacer) 203 | @all_of_type(ndarray) 204 | def laplace(loc=0.0, scale=1.0, size=None): 205 | return (loc, scale) 206 | 207 | 208 | @create_numpy(_loc_scale_argreplacer) 209 | @all_of_type(ndarray) 210 | def logistic(loc=0.0, scale=1.0, size=None): 211 | return (loc, scale) 212 | 213 | 214 | def _lognormal_argreplacer(args, kwargs, dispatchables): 215 | def replacer(mean=0.0, sigma=1.0, size=None): 216 | return (), dict(mean=dispatchables[0], sigma=dispatchables[1], size=size) 217 | 218 | return replacer(*args, **kwargs) 219 | 220 | 221 | @create_numpy(_lognormal_argreplacer) 222 | @all_of_type(ndarray) 223 | def lognormal(mean=0.0, sigma=1.0, size=None): 224 | return (mean, sigma) 225 | 226 | 227 | @create_numpy(_self_argreplacer) 228 | @all_of_type(ndarray) 229 | def logseries(p, size=None): 230 | return (p,) 231 | 232 | 233 | @create_numpy(_self_argreplacer) 234 | @all_of_type(ndarray) 235 | def multinomial(n, pvals, size=None): 236 | return (n,) 237 | 238 | 239 | @create_numpy(_first2argreplacer) 240 | @all_of_type(ndarray) 241 | def multivariate_normal(mean, cov, size=None, check_valid="warn", tol=1e-8): 242 | return (mean, cov) 243 | 244 | 245 | @create_numpy(_first2argreplacer) 246 | @all_of_type(ndarray) 247 | def negative_binomial(n, p, size=None): 248 | return (n, p) 249 | 250 | 251 | @create_numpy(_first2argreplacer) 252 | @all_of_type(ndarray) 253 | def noncentral_chisquare(df, nonc, size=None): 254 | return (df, nonc) 255 | 256 | 257 | @create_numpy(_first3argreplacer) 258 | @all_of_type(ndarray) 259 | def noncentral_f(dfnum, dfden, nonc, size=None): 260 | return (dfnum, dfden, nonc) 261 | 262 | 263 | @create_numpy(_loc_scale_argreplacer) 264 | @all_of_type(ndarray) 265 | def normal(loc=0.0, scale=1.0, size=None): 266 | return (loc, scale) 267 | 268 | 269 | @create_numpy(_self_argreplacer) 270 | @all_of_type(ndarray) 271 | def pareto(a, size=None): 272 | return (a,) 273 | 274 | 275 | def _poisson_argreplacer(args, kwargs, dispatchables): 276 | def replacer(lam=1.0, size=None): 277 | return (), dict(lam=dispatchables[0], size=size) 278 | 279 | return replacer(*args, **kwargs) 280 | 281 | 282 | @create_numpy(_poisson_argreplacer) 283 | @all_of_type(ndarray) 284 | def poisson(lam=1.0, size=None): 285 | return (lam,) 286 | 287 | 288 | @create_numpy(_self_argreplacer) 289 | @all_of_type(ndarray) 290 | def power(a, size=None): 291 | return (a,) 292 | 293 | 294 | def _rayleigh_argreplacer(args, kwargs, dispatchables): 295 | def replacer(scale=1.0, size=None): 296 | return (), dict(scale=dispatchables[0], size=size) 297 | 298 | return replacer(*args, **kwargs) 299 | 300 | 301 | @create_numpy(_rayleigh_argreplacer) 302 | @all_of_type(ndarray) 303 | def rayleigh(scale=1.0, size=None): 304 | return (scale,) 305 | 306 | 307 | @create_numpy(_identity_argreplacer) 308 | def standard_cauchy(size=None): 309 | return () 310 | 311 | 312 | @create_numpy(_identity_argreplacer) 313 | def standard_exponential(size=None): 314 | return () 315 | 316 | 317 | @create_numpy(_identity_argreplacer) 318 | def standard_gamma(shape, size=None): 319 | return () 320 | 321 | 322 | @create_numpy(_identity_argreplacer) 323 | def standard_normal(size=None): 324 | return () 325 | 326 | 327 | @create_numpy(_self_argreplacer) 328 | @all_of_type(ndarray) 329 | def standard_t(df, size=None): 330 | return (df,) 331 | 332 | 333 | @create_numpy(_first3argreplacer) 334 | @all_of_type(ndarray) 335 | def triangular(left, mode, right, size=None): 336 | return (left, mode, right) 337 | 338 | 339 | def _uniform_argreplacer(args, kwargs, dispatchables): 340 | def replacer(low=0.0, high=1.0, size=None): 341 | return (), dict(low=dispatchables[0], high=dispatchables[1], size=size) 342 | 343 | return replacer(*args, **kwargs) 344 | 345 | 346 | @create_numpy(_uniform_argreplacer) 347 | @all_of_type(ndarray) 348 | def uniform(low=0.0, high=1.0, size=None): 349 | return (low, high) 350 | 351 | 352 | @create_numpy(_first2argreplacer) 353 | @all_of_type(ndarray) 354 | def vonmises(mu, kappa, size=None): 355 | return (mu, kappa) 356 | 357 | 358 | @create_numpy(_first2argreplacer) 359 | @all_of_type(ndarray) 360 | def wald(mean, scale, size=None): 361 | return (mean, scale) 362 | 363 | 364 | @create_numpy(_self_argreplacer) 365 | @all_of_type(ndarray) 366 | def weibull(a, size=None): 367 | return (a,) 368 | 369 | 370 | @create_numpy(_self_argreplacer) 371 | @all_of_type(ndarray) 372 | def zipf(a, size=None): 373 | return (a,) 374 | 375 | 376 | @create_numpy(_identity_argreplacer) 377 | def seed(seed=None): 378 | return () 379 | 380 | 381 | @create_numpy(_identity_argreplacer) 382 | def get_state(): 383 | return () 384 | 385 | 386 | @create_numpy(_identity_argreplacer) 387 | def set_state(state): 388 | return () 389 | 390 | 391 | def _integers_argreplacer(args, kwargs, dispatchables): 392 | def replacer(self, low, high=None, size=None, dtype=int, endpoint=False): 393 | return ( 394 | ( 395 | self, 396 | dispatchables[0], 397 | ), 398 | dict( 399 | high=dispatchables[1], 400 | size=size, 401 | dtype=dispatchables[2], 402 | endpoint=endpoint, 403 | ), 404 | ) 405 | 406 | return replacer(*args, **kwargs) 407 | 408 | 409 | def _Generator_choice_argreplacer(args, kwargs, dispatchables): 410 | def replacer(self, a, size=None, replace=True, p=None, axis=0, shuffle=True): 411 | return ( 412 | ( 413 | self, 414 | dispatchables[0], 415 | ), 416 | dict( 417 | size=size, 418 | replace=replace, 419 | p=dispatchables[1], 420 | axis=axis, 421 | shuffle=shuffle, 422 | ), 423 | ) 424 | 425 | return replacer(*args, **kwargs) 426 | 427 | 428 | def _Generator_exponential_argreplacer(args, kwargs, dispatchables): 429 | def replacer(self, scale=1.0, size=None): 430 | return (self,), dict(scale=dispatchables[0], size=size) 431 | 432 | return replacer(*args, **kwargs) 433 | 434 | 435 | def _Generator_gamma_argreplacer(args, kwargs, dispatchables): 436 | def replacer(self, shape, scale=1.0, size=None): 437 | return ( 438 | self, 439 | dispatchables[0], 440 | ), dict(scale=dispatchables[1], size=size) 441 | 442 | return replacer(*args, **kwargs) 443 | 444 | 445 | def _Generator_loc_scale_argreplacer(args, kwargs, dispatchables): 446 | def replacer(self, loc=0.0, scale=1.0, size=None): 447 | return (self,), dict(loc=dispatchables[0], scale=dispatchables[1], size=size) 448 | 449 | return replacer(*args, **kwargs) 450 | 451 | 452 | def _Generator_lognormal_argreplacer(args, kwargs, dispatchables): 453 | def replacer(self, mean=0.0, sigma=1.0, size=None): 454 | return (self,), dict(mean=dispatchables[0], sigma=dispatchables[1], size=size) 455 | 456 | return replacer(*args, **kwargs) 457 | 458 | 459 | def _Generator_multinomial_argreplacer(args, kwargs, dispatchables): 460 | def replacer(self, n, pvals, size=None): 461 | return (self, dispatchables[0], pvals), dict(size=size) 462 | 463 | return replacer(*args, **kwargs) 464 | 465 | 466 | def _Generator_poisson_argreplacer(args, kwargs, dispatchables): 467 | def replacer(self, lam=1.0, size=None): 468 | return (self,), dict(lam=dispatchables[0], size=size) 469 | 470 | return replacer(*args, **kwargs) 471 | 472 | 473 | def _Generator_rayleigh_argreplacer(args, kwargs, dispatchables): 474 | def replacer(self, scale=1.0, size=None): 475 | return (self,), dict(scale=dispatchables[0], size=size) 476 | 477 | return replacer(*args, **kwargs) 478 | 479 | 480 | def _Generator_uniform_argreplacer(args, kwargs, dispatchables): 481 | def replacer(self, low=0.0, high=1.0, size=None): 482 | return (self,), dict(low=dispatchables[0], high=dispatchables[1], size=size) 483 | 484 | return replacer(*args, **kwargs) 485 | 486 | 487 | class Generator(metaclass=ClassOverrideMetaWithConstructorAndGetAttr): 488 | @create_numpy(_integers_argreplacer) 489 | @all_of_type(ndarray) 490 | def integers(self, low, high=None, size=None, dtype=int, endpoint=False): 491 | return (low, high, mark_dtype(dtype)) 492 | 493 | @create_numpy(_dtype_argreplacer) 494 | def random(self, size=None, dtype=float, out=None): 495 | return (mark_dtype(dtype), mark_non_coercible(out)) 496 | 497 | @create_numpy(_Generator_choice_argreplacer) 498 | @all_of_type(ndarray) 499 | def choice(self, a, size=None, replace=True, p=None, axis=0, shuffle=True): 500 | return (a, p) 501 | 502 | @create_numpy(_identity_argreplacer) 503 | def bytes(self, length): 504 | return () 505 | 506 | @create_numpy(_skip_self_argreplacer) 507 | @all_of_type(ndarray) 508 | def shuffle(self, x, axis=0): 509 | return (x,) 510 | 511 | @create_numpy(_skip_self_argreplacer) 512 | @all_of_type(ndarray) 513 | def permutation(self, x, axis=0): 514 | return (x,) 515 | 516 | @create_numpy(_skip_self_argreplacer) 517 | @all_of_type(ndarray) 518 | def beta(self, a, b, size=None): 519 | return (a, b) 520 | 521 | @create_numpy(_skip_self_argreplacer) 522 | @all_of_type(ndarray) 523 | def binomial(self, n, p, size=None): 524 | return (n, p) 525 | 526 | @create_numpy(_skip_self_argreplacer) 527 | @all_of_type(ndarray) 528 | def chisquare(self, df, size=None): 529 | return (df,) 530 | 531 | @create_numpy(_identity_argreplacer) 532 | def dirichlet(self, alpha, size=None): 533 | return () 534 | 535 | @create_numpy(_Generator_exponential_argreplacer) 536 | @all_of_type(ndarray) 537 | def exponential(self, scale=1.0, size=None): 538 | return (scale,) 539 | 540 | @create_numpy(_skip_self_argreplacer) 541 | @all_of_type(ndarray) 542 | def f(self, dfnum, dfden, size=None): 543 | return (dfnum, dfden) 544 | 545 | @create_numpy(_Generator_gamma_argreplacer) 546 | @all_of_type(ndarray) 547 | def gamma(self, shape, scale=1.0, size=None): 548 | return (shape, scale) 549 | 550 | @create_numpy(_skip_self_argreplacer) 551 | @all_of_type(ndarray) 552 | def geometric(self, p, size=None): 553 | return (p,) 554 | 555 | @create_numpy(_Generator_loc_scale_argreplacer) 556 | @all_of_type(ndarray) 557 | def gumbel(self, loc=0.0, scale=1.0, size=None): 558 | return (loc, scale) 559 | 560 | @create_numpy(_skip_self_argreplacer) 561 | @all_of_type(ndarray) 562 | def hypergeometric(self, ngood, nbad, nsample, size=None): 563 | return (ngood, nbad, nsample) 564 | 565 | @create_numpy(_Generator_loc_scale_argreplacer) 566 | @all_of_type(ndarray) 567 | def laplace(self, loc=0.0, scale=1.0, size=None): 568 | return (loc, scale) 569 | 570 | @create_numpy(_Generator_loc_scale_argreplacer) 571 | @all_of_type(ndarray) 572 | def logistic(self, loc=0.0, scale=1.0, size=None): 573 | return (loc, scale) 574 | 575 | @create_numpy(_Generator_lognormal_argreplacer) 576 | @all_of_type(ndarray) 577 | def lognormal(self, mean=0.0, sigma=1.0, size=None): 578 | return (mean, sigma) 579 | 580 | @create_numpy(_skip_self_argreplacer) 581 | @all_of_type(ndarray) 582 | def logseries(self, p, size=None): 583 | return (p,) 584 | 585 | @create_numpy(_Generator_multinomial_argreplacer) 586 | @all_of_type(ndarray) 587 | def multinomial(self, n, pvals, size=None): 588 | return (n,) 589 | 590 | @create_numpy(_identity_argreplacer) 591 | def multivariate_hypergeometric( 592 | self, colors, nsample, size=None, method="marginals" 593 | ): 594 | return () 595 | 596 | @create_numpy(_skip_self_argreplacer) 597 | @all_of_type(ndarray) 598 | def multivariate_normal(self, mean, cov, size=None, check_valid="warn", tol=1e-8): 599 | return (mean, cov) 600 | 601 | @create_numpy(_skip_self_argreplacer) 602 | @all_of_type(ndarray) 603 | def negative_binomial(self, n, p, size=None): 604 | return (n, p) 605 | 606 | @create_numpy(_skip_self_argreplacer) 607 | @all_of_type(ndarray) 608 | def noncentral_chisquare(self, df, nonc, size=None): 609 | return (df, nonc) 610 | 611 | @create_numpy(_skip_self_argreplacer) 612 | @all_of_type(ndarray) 613 | def noncentral_f(self, dfnum, dfden, nonc, size=None): 614 | return (dfnum, dfden, nonc) 615 | 616 | @create_numpy(_Generator_loc_scale_argreplacer) 617 | @all_of_type(ndarray) 618 | def normal(self, loc=0.0, scale=1.0, size=None): 619 | return (loc, scale) 620 | 621 | @create_numpy(_skip_self_argreplacer) 622 | @all_of_type(ndarray) 623 | def pareto(self, a, size=None): 624 | return (a,) 625 | 626 | @create_numpy(_Generator_poisson_argreplacer) 627 | @all_of_type(ndarray) 628 | def poisson(self, lam=1.0, size=None): 629 | return (lam,) 630 | 631 | @create_numpy(_skip_self_argreplacer) 632 | @all_of_type(ndarray) 633 | def power(self, a, size=None): 634 | return (a,) 635 | 636 | @create_numpy(_Generator_rayleigh_argreplacer) 637 | @all_of_type(ndarray) 638 | def rayleigh(self, scale=1.0, size=None): 639 | return (scale,) 640 | 641 | @create_numpy(_identity_argreplacer) 642 | def standard_cauchy(self, size=None): 643 | return () 644 | 645 | @create_numpy(_dtype_argreplacer) 646 | def standard_exponential(self, size=None, dtype=float, method="zig", out=None): 647 | return (mark_dtype(dtype), mark_non_coercible(out)) 648 | 649 | @create_numpy(_dtype_argreplacer) 650 | def standard_gamma(self, shape, size=None, dtype=float, out=None): 651 | return (mark_dtype(dtype), mark_non_coercible(out)) 652 | 653 | @create_numpy(_dtype_argreplacer) 654 | def standard_normal(self, size=None, dtype=float, out=None): 655 | return (mark_dtype(dtype), mark_non_coercible(out)) 656 | 657 | @create_numpy(_skip_self_argreplacer) 658 | @all_of_type(ndarray) 659 | def standard_t(self, df, size=None): 660 | return (df,) 661 | 662 | @create_numpy(_skip_self_argreplacer) 663 | @all_of_type(ndarray) 664 | def triangular(self, left, mode, right, size=None): 665 | return (left, mode, right) 666 | 667 | @create_numpy(_Generator_uniform_argreplacer) 668 | @all_of_type(ndarray) 669 | def uniform(self, low=0.0, high=1.0, size=None): 670 | return (low, high) 671 | 672 | @create_numpy(_skip_self_argreplacer) 673 | @all_of_type(ndarray) 674 | def vonmises(self, mu, kappa, size=None): 675 | return (mu, kappa) 676 | 677 | @create_numpy(_skip_self_argreplacer) 678 | @all_of_type(ndarray) 679 | def wald(self, mean, scale, size=None): 680 | return (mean, scale) 681 | 682 | @create_numpy(_skip_self_argreplacer) 683 | @all_of_type(ndarray) 684 | def weibull(self, a, size=None): 685 | return (a,) 686 | 687 | @create_numpy(_skip_self_argreplacer) 688 | @all_of_type(ndarray) 689 | def zipf(self, a, size=None): 690 | return (a,) 691 | -------------------------------------------------------------------------------- /unumpy/sparse_backend.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import sparse 3 | from uarray import Dispatchable, wrap_single_convertor 4 | from unumpy import ufunc, ufunc_list, ndarray, dtype 5 | from unumpy.random import RandomState, Generator 6 | import unumpy 7 | import functools 8 | from collections.abc import Iterable 9 | 10 | from typing import Dict 11 | 12 | _ufunc_mapping: Dict[ufunc, np.ufunc] = {} 13 | 14 | __ua_domain__ = "numpy" 15 | 16 | 17 | def array(x, *args, **kwargs): 18 | if isinstance(x, sparse.SparseArray): 19 | return x 20 | 21 | if "dtype" in kwargs: 22 | dtype = kwargs["dtype"] 23 | return sparse.COO.from_numpy(np.asarray(x, dtype=dtype)) 24 | 25 | return sparse.COO.from_numpy(np.asarray(x)) 26 | 27 | 28 | _class_mapping = { 29 | ndarray: sparse.SparseArray, 30 | dtype: np.dtype, 31 | ufunc: np.ufunc, 32 | RandomState: np.random.RandomState, 33 | Generator: np.random.Generator, 34 | } 35 | 36 | 37 | def overridden_class(self): 38 | if self in _class_mapping: 39 | return _class_mapping[self] 40 | module = self.__module__.split(".") 41 | module = ".".join(m for m in module if m != "_multimethods") 42 | return _get_from_name_domain(self.__name__, module) 43 | 44 | 45 | _implementations: Dict = { 46 | unumpy.ufunc.__call__: np.ufunc.__call__, 47 | unumpy.ufunc.reduce: np.ufunc.reduce, 48 | unumpy.array: array, 49 | unumpy.asarray: array, 50 | unumpy.ClassOverrideMeta.overridden_class.fget: overridden_class, 51 | } 52 | 53 | 54 | def _get_from_name_domain(name, domain): 55 | module = sparse 56 | name_hierarchy = name.split(".") 57 | domain_hierarchy = domain.split(".") + name_hierarchy[0:-1] 58 | for d in domain_hierarchy[1:]: 59 | if hasattr(module, d): 60 | module = getattr(module, d) 61 | else: 62 | return NotImplemented 63 | if hasattr(module, name_hierarchy[-1]): 64 | return getattr(module, name_hierarchy[-1]) 65 | else: 66 | return NotImplemented 67 | 68 | 69 | def __ua_function__(method, args, kwargs): 70 | if method in _implementations: 71 | return _implementations[method](*args, **kwargs) 72 | 73 | if len(args) != 0 and isinstance(args[0], unumpy.ClassOverrideMeta): 74 | return NotImplemented 75 | 76 | sparse_method = _get_from_name_domain(method.__qualname__, method.domain) 77 | if sparse_method is NotImplemented: 78 | return NotImplemented 79 | 80 | return sparse_method(*args, **kwargs) 81 | 82 | 83 | @wrap_single_convertor 84 | def __ua_convert__(value, dispatch_type, coerce): 85 | if dispatch_type is ufunc: 86 | return getattr(np, value.name) 87 | 88 | if value is None: 89 | return None 90 | 91 | if dispatch_type is ndarray: 92 | if not coerce: 93 | if not isinstance(value, sparse.SparseArray): 94 | return NotImplemented 95 | 96 | return convert_ndarray(value) 97 | 98 | return value 99 | 100 | 101 | def convert_ndarray(value): 102 | if isinstance(value, sparse.SparseArray): 103 | return value 104 | 105 | if isinstance(value, np.ndarray): 106 | return sparse.COO(value) 107 | 108 | try: 109 | return sparse.COO(np.asarray(value)) 110 | except RuntimeError: 111 | return sparse.stack([convert_ndarray(v) for v in value]) 112 | 113 | 114 | def replace_self(func): 115 | @functools.wraps(func) 116 | def inner(self, *args, **kwargs): 117 | if self not in _ufunc_mapping: 118 | return NotImplemented 119 | 120 | return func(_ufunc_mapping[self], *args, **kwargs) 121 | 122 | return inner 123 | -------------------------------------------------------------------------------- /unumpy/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Quansight-Labs/unumpy/97dcc82e8f3343c2a66b5ee6d8081886652de1d0/unumpy/tests/__init__.py -------------------------------------------------------------------------------- /unumpy/tests/test_numpy.py: -------------------------------------------------------------------------------- 1 | import collections 2 | import pytest 3 | import uarray as ua 4 | import unumpy as np 5 | import numpy as onp 6 | import torch 7 | import dask.array as da 8 | import sparse 9 | import unumpy.numpy_backend as NumpyBackend 10 | 11 | import unumpy.torch_backend as TorchBackend 12 | from unumpy.dask_backend import DaskBackend 13 | import unumpy.sparse_backend as SparseBackend 14 | 15 | ua.set_global_backend(NumpyBackend) 16 | 17 | dtypes = ["int8", "int16", "int32", "float32", "float64"] 18 | LIST_BACKENDS = [ 19 | ( 20 | NumpyBackend, 21 | ( 22 | onp.ndarray, 23 | onp.generic, 24 | onp.ufunc, 25 | onp.random.RandomState, 26 | onp.random.Generator, 27 | onp.random.SeedSequence, 28 | ), 29 | ), 30 | (DaskBackend(), (da.Array, onp.generic, da.ufunc.ufunc, da.random.RandomState)), 31 | ( 32 | SparseBackend, 33 | (sparse.SparseArray, onp.ndarray, onp.generic, onp.random.RandomState), 34 | ), 35 | pytest.param( 36 | (TorchBackend, (torch.Tensor,)), 37 | marks=pytest.mark.xfail(reason="PyTorch not fully NumPy compatible."), 38 | ), 39 | ] 40 | 41 | 42 | FULLY_TESTED_BACKENDS = [NumpyBackend, DaskBackend] 43 | 44 | try: 45 | import unumpy.cupy_backend as CupyBackend 46 | import cupy as cp 47 | 48 | LIST_BACKENDS.append(pytest.param((CupyBackend, (cp.ndarray, cp.generic)))) 49 | except ImportError: 50 | LIST_BACKENDS.append( 51 | pytest.param( 52 | (None, None), marks=pytest.mark.skip(reason="cupy is not importable") 53 | ) 54 | ) 55 | CupyBackend = object() # type: ignore 56 | 57 | 58 | EXCEPTIONS = { 59 | (DaskBackend, np.in1d), 60 | (DaskBackend, np.intersect1d), 61 | (DaskBackend, np.setdiff1d), 62 | (DaskBackend, np.setxor1d), 63 | (DaskBackend, np.union1d), 64 | (DaskBackend, np.sort), 65 | (DaskBackend, np.argsort), 66 | (DaskBackend, np.lexsort), 67 | (DaskBackend, np.partition), 68 | (DaskBackend, np.argpartition), 69 | (DaskBackend, np.sort_complex), 70 | (DaskBackend, np.msort), 71 | (DaskBackend, np.searchsorted), 72 | } 73 | 74 | 75 | @pytest.fixture(scope="session", params=LIST_BACKENDS) 76 | def backend(request): 77 | backend = request.param 78 | return backend 79 | 80 | 81 | @pytest.mark.parametrize( 82 | "method, args, kwargs", 83 | [ 84 | (np.add, ([1], [2]), {}), # type: ignore 85 | (np.sin, ([1.0],), {}), # type: ignore 86 | (np.arange, (5, 20, 5), {}), 87 | (np.arange, (5, 20), {}), 88 | (np.arange, (5,), {}), 89 | (np.isinf, ([np.inf, np.NINF, 1.0, np.nan],), {}), 90 | ], 91 | ) 92 | def test_ufuncs_coerce(backend, method, args, kwargs): 93 | backend, types = backend 94 | try: 95 | with ua.set_backend(backend, coerce=True): 96 | ret = method(*args, **kwargs) 97 | except ua.BackendNotImplementedError: 98 | if backend in FULLY_TESTED_BACKENDS and (backend, method) not in EXCEPTIONS: 99 | raise 100 | pytest.xfail(reason="The backend has no implementation for this ufunc.") 101 | 102 | assert isinstance(ret, types) 103 | if isinstance(ret, da.Array): 104 | ret.compute() 105 | 106 | 107 | def replace_args_kwargs(method, backend, args, kwargs): 108 | instance = () 109 | while not hasattr(method, "_coerce_args"): 110 | instance += (method,) 111 | method = method.__call__ 112 | 113 | if method is method.__call__: 114 | raise ValueError("Nowhere up the chain is there a multimethod.") 115 | 116 | args, kwargs, *_ = method._coerce_args( 117 | backend, instance + args, kwargs, coerce=True 118 | ) 119 | return args[len(instance) :], kwargs 120 | 121 | 122 | @pytest.mark.parametrize( 123 | "method, args, kwargs", 124 | [ 125 | (np.ndim, ([1, 2],), {}), 126 | (np.shape, ([1, 2],), {}), 127 | (np.size, ([1, 2],), {}), 128 | (np.any, ([True, False],), {}), 129 | (np.all, ([True, False],), {}), 130 | (np.min, ([1, 3, 2],), {}), 131 | (np.max, ([1, 3, 2],), {}), 132 | (np.argmin, ([1, 3, 2],), {}), 133 | (np.argmax, ([1, 3, 2],), {}), 134 | (np.nanargmin, ([1, 3, 2],), {}), 135 | (np.nanargmax, ([1, 3, 2],), {}), 136 | (np.amin, ([1, 3, 2],), {}), 137 | (np.amax, ([1, 3, 2],), {}), 138 | (np.nanmin, ([1, 3, 2],), {}), 139 | (np.nanmax, ([1, 3, 2],), {}), 140 | (np.ptp, ([1, 3, 2],), {}), 141 | (np.unique, ([1, 2, 2],), {}), 142 | (np.in1d, ([1], [1, 2, 2]), {}), 143 | (np.isin, ([1], [1, 2, 2]), {}), 144 | (np.intersect1d, ([1, 3, 4, 3], [3, 1, 2, 1]), {}), 145 | (np.setdiff1d, ([1, 3, 4, 3], [3, 1, 2, 1]), {}), 146 | (np.setxor1d, ([1, 3, 4, 3], [3, 1, 2, 1]), {}), 147 | (np.sort, ([3, 1, 2, 4],), {}), 148 | (np.lexsort, (([1, 2, 2, 3], [3, 1, 2, 1]),), {}), 149 | (np.stack, (([1, 2], [3, 4]),), {}), 150 | (np.concatenate, (([1, 2, 3], [3, 4]),), {}), 151 | (np.broadcast_to, ([1, 2], (2, 2)), {}), 152 | (np.expand_dims, ([1, 2], 1), {}), 153 | (np.squeeze, ([[[0], [1], [2]]],), {}), 154 | (np.argsort, ([3, 1, 2, 4],), {}), 155 | (np.msort, ([3, 1, 2, 4],), {}), 156 | (np.sort_complex, ([3.0 + 1.0j, 1.0 - 1.0j, 2.0 - 3.0j, 4 - 3.0j],), {}), 157 | (np.partition, ([3, 1, 2, 4], 2), {}), 158 | (np.argpartition, ([3, 1, 2, 4], 2), {}), 159 | (np.transpose, ([[3, 1, 2, 4]],), {}), 160 | (np.swapaxes, ([[1, 2, 3]], 0, 1), {}), 161 | (np.rollaxis, ([[1, 2, 3], [1, 2, 3]], 0, 1), {}), 162 | (np.moveaxis, ([[1, 2, 3], [1, 2, 3]], 0, 1), {}), 163 | (np.column_stack, ((((1, 2, 3)), ((1, 2, 3))),), {}), 164 | (np.dstack, (([1, 2, 3], [2, 3, 4]),), {}), 165 | (np.hstack, ((((1, 2, 3)), ((1, 2, 3))),), {}), 166 | (np.vstack, ((((1, 2, 3)), ((1, 2, 3))),), {}), 167 | (np.block, ([([1, 2, 3]), ([1, 2, 3])],), {}), 168 | (np.reshape, ([[1, 2, 3], [1, 2, 3]], (6,)), {}), 169 | (np.argwhere, ([[3, 1, 2, 4]],), {}), 170 | (np.ravel, ([[3, 1, 2, 4]],), {}), 171 | (np.flatnonzero, ([[3, 1, 2, 4]],), {}), 172 | (np.where, ([[True, False, True, False]], [[1]], [[2]]), {}), 173 | (np.pad, ([1, 2, 3, 4, 5], (2, 3), "constant"), dict(constant_values=(4, 6))), 174 | (np.searchsorted, ([1, 2, 3, 4, 5], 2), {}), 175 | (np.compress, ([True, False, True, False], [0, 1, 2, 3]), {}), 176 | # the following case tests the fix in Quansight-Labs/unumpy#36 177 | (np.compress, ([False, True], [[1, 2], [3, 4], [5, 6]], 1), {}), 178 | (np.extract, ([True, False, True, False], [0, 1, 2, 3]), {}), 179 | (np.count_nonzero, ([True, False, True, False],), {}), 180 | (np.linspace, (0, 100, 200), {}), 181 | (np.logspace, (0, 4, 200), {}), 182 | (np.unwrap, ([0.0, 0.78539816, 1.57079633, 5.49778714, 6.28318531],), {}), 183 | (np.around, ([0.5, 1.5, 2.5, 3.5, 4.5],), {}), 184 | (np.round_, ([0.5, 1.5, 2.5, 3.5, 4.5],), {}), 185 | (np.fix, ([2.1, 2.9, -2.1, -2.9],), {}), 186 | (np.cumprod, ([1, 2, 3],), {}), 187 | (np.cumsum, ([1, 2, 3],), {}), 188 | (np.nancumprod, ([1, np.nan],), {"axis": 0}), 189 | (np.nancumsum, ([1, np.nan],), {"axis": 0}), 190 | (np.diff, ([1, 3, 2],), {}), 191 | (np.ediff1d, ([1, 2, 4, 7, 0],), {}), 192 | (np.cross, ([1, 2, 3], [4, 5, 6]), {}), 193 | (np.trapz, ([1, 2, 3],), {}), 194 | (np.i0, ([0.0, 1.0],), {}), 195 | (np.sinc, ([0, 1, 2],), {}), 196 | (np.isclose, ([1, 3, 2], [3, 2, 1]), {}), 197 | (np.allclose, ([1, 3, 2], [3, 2, 1]), {}), 198 | (np.isposinf, ([np.NINF, 0.0, np.inf],), {}), 199 | (np.isneginf, ([np.NINF, 0.0, np.inf],), {}), 200 | (np.iscomplex, ([1 + 1j, 1 + 0j, 4.5, 3, 2, 2j],), {}), 201 | (np.iscomplexobj, ([3, 1 + 0j],), {}), 202 | (np.isreal, ([1 + 1j, 1 + 0j, 4.5, 3, 2, 2j],), {}), 203 | (np.isrealobj, ([3, 1 + 0j],), {}), 204 | (np.isscalar, ([3.1],), {}), 205 | (np.array_equal, ([1, 2, 3], [1, 2, 3]), {}), 206 | (np.array_equiv, ([1, 2], [[1, 2], [1, 2]]), {}), 207 | (np.diag, ([1, 2, 3],), {}), 208 | (np.diagonal, ([[0, 1], [2, 3]],), {}), 209 | (np.diagflat, ([[1, 2], [3, 4]],), {}), 210 | (np.copy, ([1, 2, 3],), {}), 211 | (np.tril, ([[1, 2], [3, 4]],), {}), 212 | (np.triu, ([[1, 2], [3, 4]],), {}), 213 | (np.vander, ([1, 2, 3, 5],), {}), 214 | (np.tile, ([[1, 2], [3, 4]], 2), {}), 215 | (np.repeat, ([[1, 2], [3, 4]], 2), {"axis": 0}), 216 | (np.delete, ([1, 2, 3], 1), {}), 217 | (np.insert, ([1, 2, 3], 2, 0), {"axis": 0}), 218 | (np.append, ([1, 2, 3], [4, 5, 6]), {}), 219 | (np.resize, ([[1, 2], [3, 4]], (2, 3)), {}), 220 | (np.trim_zeros, ([0, 1, 2, 0],), {}), 221 | (np.flip, ([[1, 2], [3, 4]],), {"axis": 0}), 222 | (np.fliplr, ([[1, 2], [3, 4]],), {}), 223 | (np.flipud, ([[1, 2], [3, 4]],), {}), 224 | (np.roll, ([1, 2, 3], 1), {}), 225 | (np.rot90, ([[1, 2], [3, 4]],), {}), 226 | (np.angle, ([1.0, 1.0j, 1 + 1j],), {}), 227 | (np.real, ([1 + 2j, 3 + 4j, 5 + 6j],), {}), 228 | (np.imag, ([1 + 2j, 3 + 4j, 5 + 6j],), {}), 229 | (np.convolve, ([1, 2, 3], [0, 1, 0.5]), {}), 230 | (np.nan_to_num, ([np.inf, np.NINF, np.nan],), {}), 231 | (np.real_if_close, ([2.1 + 4e-14j, 5.2 + 3e-15j],), {}), 232 | (np.interp, (2.5, [1, 2, 3], [3, 2, 0]), {}), 233 | (np.indices, ((2, 3),), {}), 234 | (np.ravel_multi_index, ([[3, 6, 6], [4, 5, 1]], (7, 6)), {}), 235 | (np.take, ([4, 3, 5, 7, 6, 8], [0, 1, 4]), {}), 236 | (np.take_along_axis, ([[1, 2, 3], [4, 5, 6]], [[0, -1]], 1), {}), 237 | (np.choose, ([1, 2, 0], [[0, 1, 2], [10, 11, 12], [20, 21, 22]]), {}), 238 | (np.select, ([[True, False], [False, True]], [[0, 0], [1, 1]]), {}), 239 | (np.place, ([[1, 2], [3, 4]], [[False, False], [True, True]], [-99, 99]), {}), 240 | (np.put, ([0, 1, 2, 3, 4, 5], [0, 2], [-44, -55]), {}), 241 | (np.put_along_axis, ([[10, 30, 20], [60, 40, 50]], [[0], [1]], 99, 1), {}), 242 | (np.putmask, ([[1, 2], [3, 4]], [[False, False], [True, True]], [-99, 99]), {}), 243 | (np.fill_diagonal, ([[0, 0], [0, 0]], 1), {}), 244 | (np.nditer, ([[1, 2, 3]],), {}), 245 | (np.ndenumerate, ([[1, 2], [3, 4]],), {}), 246 | (np.ndindex, (3, 2, 1), {}), 247 | (np.lib.Arrayterator, ([[1, 2], [3, 4]],), {}), 248 | (np.median, ([[10, 7, 4], [3, 2, 1]],), {"axis": 0}), 249 | (np.average, ([1, 2, 3, 4],), {}), 250 | (np.nanmedian, ([[10.0, np.nan, 4], [3, 2, 1]],), {"axis": 0}), 251 | (np.corrcoef, ([1, 2, 3, 4],), {}), 252 | (np.correlate, ([1, 2, 3], [0, 1, 0.5]), {}), 253 | (np.cov, ([[0, 1, 2], [2, 1, 0]],), {}), 254 | (np.bincount, ([1, 2, 3, 4, 5],), {}), 255 | (np.histogram_bin_edges, ([1, 2, 3, 4, 5],), {"bins": "auto"}), 256 | (np.digitize, ([0.2, 6.4, 3.0, 1.6], [0.0, 1.0, 2.5, 4.0, 10.0]), {}), 257 | ], 258 | ) 259 | def test_functions_coerce(backend, method, args, kwargs): 260 | backend, types = backend 261 | try: 262 | with ua.set_backend(backend, coerce=True): 263 | ret = method(*args, **kwargs) 264 | except ua.BackendNotImplementedError: 265 | if backend in FULLY_TESTED_BACKENDS and (backend, method) not in EXCEPTIONS: 266 | raise 267 | pytest.xfail(reason="The backend has no implementation for this ufunc.") 268 | except TypeError: 269 | if backend is CupyBackend: 270 | if method is np.flip: 271 | pytest.xfail(reason="CuPy requires axis argument") 272 | elif method in {np.repeat, np.tile}: 273 | pytest.xfail(reason="CuPy does not accept array repeats") 274 | raise 275 | except ValueError: 276 | if isinstance(backend, DaskBackend) and method is np.place: 277 | pytest.xfail(reason="Default relies on delete and copyto") 278 | if backend is CupyBackend and method in {np.argwhere, np.block}: 279 | pytest.xfail(reason="Default relies on array_like coercion") 280 | raise 281 | except NotImplementedError: 282 | if backend is CupyBackend and method is np.sort_complex: 283 | pytest.xfail(reason="CuPy cannot sort complex data") 284 | raise 285 | except AttributeError: 286 | if backend is CupyBackend and method is np.lexsort: 287 | pytest.xfail(reason="CuPy doesn't accept tuples of arrays") 288 | raise 289 | 290 | if method is np.shape: 291 | assert isinstance(ret, tuple) and all(isinstance(s, int) for s in ret) 292 | elif method in (np.ndim, np.size): 293 | assert isinstance(ret, int) 294 | elif method in ( 295 | np.allclose, 296 | np.iscomplex, 297 | np.iscomplexobj, 298 | np.isreal, 299 | np.isrealobj, 300 | np.isscalar, 301 | np.array_equal, 302 | np.array_equiv, 303 | ): 304 | assert isinstance(ret, (bool,) + types) 305 | elif method in {np.place, np.put, np.put_along_axis, np.putmask, np.fill_diagonal}: 306 | assert ret is None 307 | elif method in {np.nditer, np.ndenumerate, np.ndindex}: 308 | assert isinstance(ret, collections.abc.Iterator) 309 | elif method is np.lib.Arrayterator: 310 | assert isinstance(ret, collections.abc.Iterable) 311 | else: 312 | assert isinstance(ret, types) 313 | 314 | if isinstance(ret, da.Array): 315 | ret.compute() 316 | 317 | 318 | def test_copyto(backend): 319 | backend, types = backend 320 | try: 321 | with ua.set_backend(backend, coerce=True): 322 | dst = np.asarray([1, 2]) 323 | src = np.asarray([3, 4]) 324 | np.copyto(dst, src) 325 | assert np.array_equal(dst, src) 326 | except ua.BackendNotImplementedError: 327 | if backend in FULLY_TESTED_BACKENDS and (backend, np.copyto) not in EXCEPTIONS: 328 | raise pytest.xfail( 329 | reason="The backend has no implementation for this ufunc." 330 | ) 331 | 332 | assert isinstance(dst, types) 333 | 334 | 335 | @pytest.mark.parametrize( 336 | "method, args, kwargs", 337 | [ 338 | (np.prod, ([1],), {}), 339 | (np.sum, ([1],), {}), 340 | (np.mean, ([[1, 2], [3, 4]],), {}), 341 | (np.std, ([1, 3, 2],), {}), 342 | (np.var, ([1, 3, 2],), {}), 343 | (np.nanmean, ([[1, np.nan], [3, 4]],), {}), 344 | (np.nanstd, ([[1, np.nan], [3, 4]],), {}), 345 | (np.nanvar, ([[1, np.nan], [3, 4]],), {}), 346 | ], 347 | ) 348 | def test_functions_coerce_with_dtype(backend, method, args, kwargs): 349 | backend, types = backend 350 | for dtype in dtypes: 351 | try: 352 | with ua.set_backend(backend, coerce=True): 353 | kwargs["dtype"] = dtype 354 | ret = method(*args, **kwargs) 355 | except ua.BackendNotImplementedError: 356 | if backend in FULLY_TESTED_BACKENDS and (backend, method) not in EXCEPTIONS: 357 | raise 358 | pytest.xfail(reason="The backend has no implementation for this ufunc.") 359 | except TypeError: 360 | if backend is CupyBackend: 361 | if method in {np.std, np.var} and not dtype.startswith("float"): 362 | pytest.xfail(reason="CuPy doesn't allow mean to cast to int") 363 | 364 | assert isinstance(ret, types) 365 | 366 | assert ret.dtype == dtype 367 | 368 | 369 | @pytest.mark.parametrize( 370 | "method, args, kwargs", 371 | [ 372 | (np.broadcast_arrays, ([1, 2], [[3, 4]]), {}), 373 | (np.meshgrid, ([1, 2, 3], [4, 5], [0, 1]), {}), 374 | (np.nonzero, ([3, 1, 2, 4],), {}), 375 | (np.where, ([[3, 1, 2, 4]],), {}), 376 | (np.gradient, ([[0, 1, 2], [3, 4, 5], [6, 7, 8]],), {}), 377 | (np.split, ([1, 2, 3, 4], 2), {}), 378 | (np.array_split, ([1, 2, 3, 4, 5, 6, 7, 8], 3), {}), 379 | (np.dsplit, ([[[1, 2], [3, 4]], [[5, 6], [7, 8]]], 2), {}), 380 | (np.hsplit, ([[1, 2], [3, 4]], 2), {}), 381 | (np.vsplit, ([[1, 2], [3, 4]], 2), {}), 382 | (np.ix_, ([0, 1], [2, 4]), {}), 383 | (np.unravel_index, ([22, 41, 37], (7, 6)), {}), 384 | (np.diag_indices, (4,), {}), 385 | (np.diag_indices_from, ([[1, 2], [3, 4]],), {}), 386 | (np.mask_indices, (3, np.triu), {}), 387 | (np.tril_indices, (4,), {}), 388 | (np.tril_indices_from, ([[1, 2], [3, 4]],), {}), 389 | (np.triu_indices, (4,), {}), 390 | (np.triu_indices_from, ([[1, 2], [3, 4]],), {}), 391 | (np.nested_iters, ([[0, 1], [2, 3]], [[0], [1]]), {}), 392 | (np.histogram, ([1, 2, 1],), {"bins": [0, 1, 2, 3]}), 393 | (np.histogram2d, ([1, 2], [1, 2]), {"bins": 2}), 394 | (np.histogramdd, ([[1, 2], [1, 2]],), {"bins": 2}), 395 | ], 396 | ) 397 | def test_multiple_output(backend, method, args, kwargs): 398 | backend, types = backend 399 | try: 400 | with ua.set_backend(backend, coerce=True): 401 | ret = method(*args, **kwargs) 402 | except ua.BackendNotImplementedError: 403 | if backend in FULLY_TESTED_BACKENDS and (backend, method) not in EXCEPTIONS: 404 | raise 405 | pytest.xfail(reason="The backend has no implementation for this ufunc.") 406 | except ValueError: 407 | if backend is SparseBackend: 408 | if method in { 409 | np.mask_indices, 410 | np.tril_indices, 411 | np.tril_indices_from, 412 | np.triu_indices, 413 | np.triu_indices_from, 414 | }: 415 | raise pytest.xfail( 416 | reason="Sparse's methods for triangular matrices require an array with zero fill-values as argument." 417 | ) 418 | except TypeError: 419 | if backend is CupyBackend and method is np.unravel_index: 420 | pytest.xfail(reason="cupy.unravel_index is broken in version 6.0") 421 | raise 422 | 423 | if method is np.nested_iters: 424 | assert all(isinstance(ite, collections.abc.Iterator) for ite in ret) 425 | elif (isinstance(backend, DaskBackend) and method is np.histogram) or ( 426 | backend is NumpyBackend and method is np.histogramdd 427 | ): 428 | assert all(isinstance(arr, types + (list,)) for arr in ret) 429 | else: 430 | assert all(isinstance(arr, types) for arr in ret) 431 | 432 | for arr in ret: 433 | if isinstance(arr, da.Array): 434 | arr.compute() 435 | 436 | 437 | @pytest.mark.parametrize( 438 | "method, args, kwargs", 439 | [ 440 | (np.empty, (2,), {}), 441 | (np.empty_like, ([1, 2, 3],), {}), 442 | (np.eye, (2,), {}), 443 | (np.identity, (2,), {}), 444 | (np.full, ((1, 2, 3), 1.3), {}), 445 | (np.full_like, ([1, 2, 3], 2), {}), 446 | (np.ones, ((1, 2, 3),), {}), 447 | (np.ones_like, ([1, 2, 3],), {}), 448 | (np.zeros, ((1, 2, 3),), {}), 449 | (np.zeros_like, ([1, 2, 3],), {}), 450 | (np.asanyarray, ([1, 2, 3],), {}), 451 | (np.asfarray, ([1, 2, 3],), {}), 452 | (np.asfortranarray, ([[1, 2], [3, 4]],), {}), 453 | (np.asarray_chkfinite, ([1, 2, 3],), {}), 454 | (np.require, ([[1, 2], [3, 4]],), {}), 455 | (np.ascontiguousarray, ([1, 2, 3],), {}), 456 | (np.frombuffer, (), {}), 457 | (np.fromfunction, (lambda i: i + 1,), {"shape": (3,)}), 458 | (np.fromiter, (range(1, 4),), {}), 459 | (np.fromstring, ("1 2 3",), {"sep": " "}), 460 | (np.geomspace, (1, 1000), {"num": 4}), 461 | (np.tri, (3, 5, -1), {}), 462 | ], 463 | ) 464 | def test_array_creation(backend, method, args, kwargs): 465 | backend, types = backend 466 | if method is np.frombuffer: 467 | buffer = onp.array([1, 2, 3]).tobytes() 468 | args = args + (buffer,) 469 | for dtype in dtypes: 470 | try: 471 | with ua.set_backend(backend, coerce=True): 472 | kwargs["dtype"] = dtype 473 | ret = method(*args, **kwargs) 474 | except ua.BackendNotImplementedError: 475 | if backend in FULLY_TESTED_BACKENDS and (backend, method) not in EXCEPTIONS: 476 | raise 477 | pytest.xfail(reason="The backend has no implementation for this ufunc.") 478 | except TypeError: 479 | if method is np.asanyarray: 480 | raise pytest.xfail( 481 | reason="The ufunc for this backend got an unexpected keyword." 482 | ) 483 | else: 484 | raise 485 | 486 | assert isinstance(ret, types) 487 | 488 | if isinstance(ret, da.Array): 489 | ret.compute() 490 | 491 | assert ret.dtype == dtype 492 | 493 | 494 | @pytest.mark.parametrize( 495 | "method, args, kwargs, res", 496 | [ 497 | (np.add, ([1, 2], [3, 4]), {}, [4, 6]), 498 | (np.subtract, ([3, 4], [1, 2]), {}, [2, 2]), 499 | (np.multiply, ([1, 2], [4, 3]), {}, [4, 6]), 500 | (np.divide, ([6, 1], [3, 2]), {}, [2.0, 0.5]), 501 | (np.true_divide, ([6, 1], [3, 2]), {}, [2.0, 0.5]), 502 | (np.power, ([2, 3], [3, 2]), {}, [8, 9]), 503 | (np.float_power, ([2, 3], [3, 2]), {}, [8, 9]), 504 | (np.positive, ([1, -2],), {}, [1, -2]), 505 | (np.negative, ([-2, 3],), {}, [2, -3]), 506 | (np.conjugate, ([1.0 + 2.0j, -1.0 - 1j],), {}, [1.0 - 2.0j, -1.0 + 1j]), 507 | (np.conj, ([1.0 + 2.0j, -1.0 - 1j],), {}, [1.0 - 2.0j, -1.0 + 1j]), 508 | (np.exp, ([0, 1, 2],), {}, [1.0, 2.718281828459045, 7.38905609893065]), 509 | (np.exp2, ([3, 4],), {}, [8, 16]), 510 | (np.log, ([1.0, np.e, np.e ** 2],), {}, [0.0, 1.0, 2.0]), 511 | (np.log2, ([1, 2, 2 ** 4],), {}, [0.0, 1.0, 4.0]), 512 | (np.log10, ([1e-5, -3.0],), {}, [-5.0, np.NaN]), 513 | (np.sqrt, ([1, 4, 9],), {}, [1, 2, 3]), 514 | (np.square, ([2, 3, 4],), {}, [4, 9, 16]), 515 | (np.cbrt, ([1, 8, 27],), {}, [1.0, 2.0, 3.0]), 516 | (np.reciprocal, ([1.0, 2.0, 4.0],), {}, [1.0, 0.5, 0.25]), 517 | ( 518 | np.broadcast_to, 519 | ([1, 2, 3], (3, 3)), 520 | {}, 521 | np.array([[1, 2, 3], [1, 2, 3], [1, 2, 3]]), 522 | ), 523 | ( 524 | np.degrees, 525 | ([0.0, 0.52359878, 1.04719755, 1.57079633],), 526 | {}, 527 | [0.0, 30.0, 60.0, 90.0], 528 | ), 529 | ( 530 | np.radians, 531 | ([0.0, 30.0, 60.0, 90.0],), 532 | {}, 533 | [0.0, 0.52359878, 1.04719755, 1.57079633], 534 | ), 535 | ], 536 | ) 537 | def test_ufuncs_results(backend, method, args, kwargs, res): 538 | backend, types = backend 539 | try: 540 | with ua.set_backend(backend, coerce=True): 541 | ret = method(*args, **kwargs) 542 | 543 | res = np.asarray(res) 544 | assert np.allclose(ret, res, equal_nan=True) 545 | except ua.BackendNotImplementedError: 546 | if backend in FULLY_TESTED_BACKENDS: 547 | raise 548 | pytest.xfail(reason="The backend has no implementation for this ufunc.") 549 | 550 | 551 | @pytest.mark.parametrize( 552 | "method, args, kwargs", 553 | [ 554 | (np.linalg.multi_dot, ([[0, 1], [[1, 2], [3, 4]], [1, 0]],), {}), 555 | (np.linalg.matrix_power, ([[1, 2], [3, 4]], 2), {}), 556 | (np.linalg.cholesky, ([[1, -2j], [2j, 5]],), {}), 557 | (np.linalg.qr, ([[1, 2], [3, 4]],), {}), 558 | (np.linalg.svd, ([[1, 2], [3, 4]],), {}), 559 | (np.linalg.eig, ([[1, 1j], [-1j, 1]],), {}), 560 | (np.linalg.eigh, ([[1, -2j], [2j, 5]],), {}), 561 | (np.linalg.eigvals, ([[1, 2], [3, 4]],), {}), 562 | (np.linalg.eigvalsh, ([[1, -2j], [2j, 5]],), {}), 563 | (np.linalg.norm, ([[1, 2], [3, 4]],), {}), 564 | (np.linalg.cond, ([[1, 0, -1], [0, 1, 0], [1, 0, 1]],), {}), 565 | (np.linalg.det, ([[1, 2], [3, 4]],), {}), 566 | (np.linalg.matrix_rank, (np.eye(4),), {}), 567 | (np.linalg.slogdet, ([[1, 2], [3, 4]],), {}), 568 | (np.linalg.solve, ([[3, 1], [1, 2]], [9, 8]), {}), 569 | ( 570 | np.linalg.tensorsolve, 571 | ( 572 | np.eye((2 * 3 * 4)).reshape(2 * 3, 4, 2, 3, 4), 573 | np.empty(shape=(2 * 3, 4)), 574 | ), 575 | {}, 576 | ), 577 | (np.linalg.lstsq, ([[3, 1], [1, 2]], [9, 8]), {}), 578 | (np.linalg.inv, ([[1.0, 2.0], [3.0, 4.0]],), {}), 579 | (np.linalg.pinv, ([[1.0, 2.0], [3.0, 4.0]],), {}), 580 | (np.linalg.tensorinv, (np.eye(4 * 6).reshape((4, 6, 8, 3)),), {}), 581 | ], 582 | ) 583 | def test_linalg(backend, method, args, kwargs): 584 | backend, types = backend 585 | try: 586 | with ua.set_backend(backend, coerce=True): 587 | ret = method(*args, **kwargs) 588 | except ua.BackendNotImplementedError: 589 | if backend in FULLY_TESTED_BACKENDS and (backend, method) not in EXCEPTIONS: 590 | raise 591 | pytest.xfail(reason="The backend has no implementation for this ufunc.") 592 | 593 | if method in { 594 | np.linalg.qr, 595 | np.linalg.svd, 596 | np.linalg.eig, 597 | np.linalg.eigh, 598 | np.linalg.slogdet, 599 | np.linalg.lstsq, 600 | }: 601 | assert all(isinstance(arr, types) for arr in ret) 602 | 603 | for arr in ret: 604 | if isinstance(arr, da.Array): 605 | arr.compute() 606 | else: 607 | assert isinstance(ret, types) 608 | 609 | if isinstance(ret, da.Array): 610 | ret.compute() 611 | 612 | 613 | @pytest.mark.parametrize( 614 | "method, args, kwargs", 615 | [ 616 | (np.random.default_rng, (42,), {}), 617 | (np.random.RandomState, (42,), {}), 618 | # (np.random.Generator, (), {}), 619 | # (np.random.BitGenerator, (), {}), 620 | (np.random.SeedSequence, (42,), {}), 621 | (np.random.rand, (1, 2), {}), 622 | (np.random.randn, (1, 2), {}), 623 | (np.random.randint, ([1, 2],), {}), 624 | (np.random.random_integers, (2,), {}), 625 | (np.random.random_sample, (), {"size": 2}), 626 | (np.random.random, (), {"size": 2}), 627 | (np.random.ranf, (), {"size": 2}), 628 | (np.random.sample, (), {"size": 2}), 629 | (np.random.choice, ([1, 2],), {}), 630 | (np.random.bytes, (10,), {}), 631 | (np.random.shuffle, ([1, 2, 3, 4],), {}), 632 | (np.random.permutation, ([1, 2, 3, 4],), {}), 633 | (np.random.beta, (1, 2), {"size": 2}), 634 | (np.random.binomial, (10, 0.5), {"size": 2}), 635 | (np.random.chisquare, (2,), {"size": 2}), 636 | (np.random.dirichlet, ((10, 5, 3),), {}), 637 | (np.random.exponential, (), {"size": 2}), 638 | (np.random.f, (1.0, 48.0), {"size": 2}), 639 | (np.random.gamma, (2.0, 2.0), {"size": 2}), 640 | (np.random.geometric, (0.35,), {"size": 2}), 641 | (np.random.gumbel, (0.0, 0.1), {"size": 2}), 642 | (np.random.hypergeometric, (100, 2, 10), {"size": 2}), 643 | (np.random.laplace, (0.0, 1.0), {"size": 2}), 644 | (np.random.logistic, (10, 1), {"size": 2}), 645 | (np.random.lognormal, (3.0, 1.0), {"size": 2}), 646 | (np.random.logseries, (0.6,), {"size": 2}), 647 | (np.random.multinomial, (20, [1 / 6.0] * 6), {}), 648 | (np.random.multivariate_normal, ([0, 0], [[1, 0], [0, 100]]), {}), 649 | (np.random.negative_binomial, (1, 0.1), {"size": 2}), 650 | (np.random.noncentral_chisquare, (3, 20), {"size": 2}), 651 | (np.random.noncentral_f, (3, 20, 3.0), {"size": 2}), 652 | (np.random.normal, (0, 0.1), {"size": 2}), 653 | (np.random.pareto, (3.0,), {"size": 2}), 654 | (np.random.poisson, (5,), {"size": 2}), 655 | (np.random.power, (5.0,), {"size": 2}), 656 | (np.random.rayleigh, (3,), {"size": 2}), 657 | (np.random.standard_cauchy, (), {"size": 2}), 658 | (np.random.standard_exponential, (), {"size": 2}), 659 | (np.random.standard_gamma, (2.0,), {"size": 2}), 660 | (np.random.standard_normal, (), {"size": 2}), 661 | (np.random.standard_t, (10,), {"size": 2}), 662 | (np.random.triangular, (-3, 0, 8), {"size": 2}), 663 | (np.random.uniform, (-1, 0), {"size": 2}), 664 | (np.random.vonmises, (0.0, 4.0), {"size": 2}), 665 | (np.random.wald, (3, 2), {"size": 2}), 666 | (np.random.weibull, (5.0,), {"size": 2}), 667 | (np.random.zipf, (2.0,), {"size": 2}), 668 | (np.random.seed, (), {}), 669 | (np.random.get_state, (), {}), 670 | # (np.random.set_state, (), {}), 671 | ], 672 | ) 673 | def test_random(backend, method, args, kwargs): 674 | backend, types = backend 675 | try: 676 | with ua.set_backend(backend, coerce=True): 677 | ret = method(*args, **kwargs) 678 | except ua.BackendNotImplementedError: 679 | if backend in FULLY_TESTED_BACKENDS and (backend, method) not in EXCEPTIONS: 680 | raise 681 | pytest.xfail(reason="The backend has no implementation for this ufunc.") 682 | 683 | if method is np.random.bytes: 684 | assert isinstance(ret, bytes) 685 | elif method in {np.random.shuffle, np.random.seed}: 686 | assert ret is None 687 | elif method is np.random.get_state: 688 | assert isinstance(ret, tuple) 689 | else: 690 | assert isinstance(ret, types) 691 | 692 | if isinstance(ret, da.Array): 693 | ret.compute() 694 | 695 | 696 | @pytest.mark.parametrize( 697 | "method, args, kwargs", 698 | [ 699 | (np.random.Generator.random, (), {"size": 2}), 700 | (np.random.Generator.choice, ([1, 2],), {}), 701 | (np.random.Generator.bytes, (10,), {}), 702 | (np.random.Generator.shuffle, ([1, 2, 3, 4],), {}), 703 | (np.random.Generator.permutation, ([1, 2, 3, 4],), {}), 704 | (np.random.Generator.beta, (1, 2), {"size": 2}), 705 | (np.random.Generator.binomial, (10, 0.5), {"size": 2}), 706 | (np.random.Generator.chisquare, (2,), {"size": 2}), 707 | (np.random.Generator.dirichlet, ((10, 5, 3),), {}), 708 | (np.random.Generator.exponential, (), {"size": 2}), 709 | (np.random.Generator.f, (1.0, 48.0), {"size": 2}), 710 | (np.random.Generator.gamma, (2.0, 2.0), {"size": 2}), 711 | (np.random.Generator.geometric, (0.35,), {"size": 2}), 712 | (np.random.Generator.gumbel, (0.0, 0.1), {"size": 2}), 713 | (np.random.Generator.hypergeometric, (100, 2, 10), {"size": 2}), 714 | (np.random.Generator.laplace, (0.0, 1.0), {"size": 2}), 715 | (np.random.Generator.logistic, (10, 1), {"size": 2}), 716 | (np.random.Generator.lognormal, (3.0, 1.0), {"size": 2}), 717 | (np.random.Generator.logseries, (0.6,), {"size": 2}), 718 | (np.random.Generator.multinomial, (20, [1 / 6.0] * 6), {}), 719 | (np.random.Generator.multivariate_normal, ([0, 0], [[1, 0], [0, 100]]), {}), 720 | (np.random.Generator.negative_binomial, (1, 0.1), {"size": 2}), 721 | (np.random.Generator.noncentral_chisquare, (3, 20), {"size": 2}), 722 | (np.random.Generator.noncentral_f, (3, 20, 3.0), {"size": 2}), 723 | (np.random.Generator.normal, (0, 0.1), {"size": 2}), 724 | (np.random.Generator.pareto, (3.0,), {"size": 2}), 725 | (np.random.Generator.poisson, (5,), {"size": 2}), 726 | (np.random.Generator.power, (5.0,), {"size": 2}), 727 | (np.random.Generator.rayleigh, (3,), {"size": 2}), 728 | (np.random.Generator.standard_cauchy, (), {"size": 2}), 729 | (np.random.Generator.standard_exponential, (), {"size": 2}), 730 | (np.random.Generator.standard_gamma, (2.0,), {"size": 2}), 731 | (np.random.Generator.standard_normal, (), {"size": 2}), 732 | (np.random.Generator.standard_t, (10,), {"size": 2}), 733 | (np.random.Generator.triangular, (-3, 0, 8), {"size": 2}), 734 | (np.random.Generator.uniform, (-1, 0), {"size": 2}), 735 | (np.random.Generator.vonmises, (0.0, 4.0), {"size": 2}), 736 | (np.random.Generator.wald, (3, 2), {"size": 2}), 737 | (np.random.Generator.weibull, (5.0,), {"size": 2}), 738 | (np.random.Generator.zipf, (2.0,), {"size": 2}), 739 | ], 740 | ) 741 | def test_Generator(backend, method, args, kwargs): 742 | backend, types = backend 743 | try: 744 | with ua.set_backend(backend, coerce=True): 745 | rng = np.random.default_rng() 746 | ret = method(rng, *args, **kwargs) 747 | except ua.BackendNotImplementedError: 748 | if backend in FULLY_TESTED_BACKENDS and (backend, method) not in EXCEPTIONS: 749 | raise 750 | pytest.xfail(reason="The backend has no implementation for this ufunc.") 751 | 752 | if method is np.random.Generator.bytes: 753 | assert isinstance(ret, bytes) 754 | elif method is np.random.Generator.shuffle: 755 | assert ret is None 756 | else: 757 | assert isinstance(ret, types) 758 | 759 | if isinstance(ret, da.Array): 760 | ret.compute() 761 | 762 | 763 | @pytest.mark.parametrize( 764 | "method, args, kwargs", 765 | [ 766 | ( 767 | np.apply_along_axis, 768 | (lambda a: (a[0] + a[-1]) * 0.5, 1, [[1, 2, 3], [4, 5, 6], [7, 8, 9]]), 769 | {}, 770 | ), 771 | (np.apply_over_axes, (np.sum, [[1, 2, 3], [4, 5, 6], [7, 8, 9]], [0, 1]), {}), 772 | (np.frompyfunc, (bin, 1, 1), {}), 773 | ( 774 | np.piecewise, 775 | ( 776 | [0, 1, 2, 3], 777 | [[True, False, True, False], [False, True, False, True]], 778 | [0, 1], 779 | ), 780 | {}, 781 | ), 782 | ], 783 | ) 784 | def test_functional(backend, method, args, kwargs): 785 | backend, types = backend 786 | try: 787 | with ua.set_backend(backend, coerce=True): 788 | ret = method(*args, **kwargs) 789 | except ua.BackendNotImplementedError: 790 | if backend in FULLY_TESTED_BACKENDS and (backend, method) not in EXCEPTIONS: 791 | raise 792 | pytest.xfail(reason="The backend has no implementation for this ufunc.") 793 | 794 | assert isinstance(ret, types) 795 | 796 | if isinstance(ret, da.Array): 797 | ret.compute() 798 | 799 | 800 | @pytest.mark.parametrize( 801 | "method, args", 802 | [ 803 | (np.c_, ([1, 2, 3], [4, 5, 6])), 804 | (np.r_, ([1, 2, 3], [4, 5, 6])), 805 | (np.s_, (slice(2, None, 2),)), 806 | ], 807 | ) 808 | def test_class_getitem(backend, method, args): 809 | ( 810 | backend, 811 | types, 812 | ) = backend 813 | try: 814 | with ua.set_backend(backend, coerce=True): 815 | ret = method[args] 816 | except ua.BackendNotImplementedError: 817 | if backend in FULLY_TESTED_BACKENDS and (backend, method) not in EXCEPTIONS: 818 | raise 819 | pytest.xfail(reason="The backend has no implementation for this class.") 820 | 821 | if method is np.s_: 822 | all(isinstance(s, slice) for s in ret) 823 | else: 824 | assert isinstance(ret, types) 825 | 826 | 827 | def test_class_overriding(): 828 | with ua.set_backend(NumpyBackend, coerce=True): 829 | assert isinstance(onp.add, np.ufunc) 830 | assert isinstance(onp.dtype("float64"), np.dtype) 831 | assert np.dtype("float64") == onp.float64 832 | assert isinstance(np.dtype("float64"), onp.dtype) 833 | assert isinstance(onp.random.RandomState(), np.random.RandomState) 834 | assert isinstance(onp.random.Generator(onp.random.PCG64()), np.random.Generator) 835 | assert issubclass(onp.ufunc, np.ufunc) 836 | assert issubclass(onp.random.RandomState, np.random.RandomState) 837 | assert issubclass(onp.random.Generator, np.random.Generator) 838 | 839 | with ua.set_backend(DaskBackend(), coerce=True): 840 | assert isinstance(da.add, np.ufunc) 841 | assert isinstance(onp.dtype("float64"), np.dtype) 842 | assert np.dtype("float64") == onp.float64 843 | assert isinstance(np.dtype("float64"), onp.dtype) 844 | assert isinstance(da.random.RandomState(), np.random.RandomState) 845 | assert issubclass(da.ufunc.ufunc, np.ufunc) 846 | assert issubclass(da.random.RandomState, np.random.RandomState) 847 | 848 | with ua.set_backend(SparseBackend, coerce=True): 849 | assert isinstance(onp.add, np.ufunc) 850 | assert isinstance(onp.dtype("float64"), np.dtype) 851 | assert np.dtype("float64") == onp.float64 852 | assert isinstance(np.dtype("float64"), onp.dtype) 853 | assert isinstance(onp.random.RandomState(), np.random.RandomState) 854 | assert isinstance(onp.random.Generator(onp.random.PCG64()), np.random.Generator) 855 | assert issubclass(onp.ufunc, np.ufunc) 856 | assert issubclass(onp.random.RandomState, np.random.RandomState) 857 | assert issubclass(onp.random.Generator, np.random.Generator) 858 | 859 | if hasattr(CupyBackend, "__ua_function__"): 860 | with ua.set_backend(CupyBackend, coerce=True): 861 | assert isinstance(cp.add, np.ufunc) 862 | assert isinstance(cp.dtype("float64"), np.dtype) 863 | assert np.dtype("float64") == cp.float64 864 | assert isinstance(np.dtype("float64"), cp.dtype) 865 | assert isinstance(cp.random.RandomState(), np.random.RandomState) 866 | assert issubclass(cp.ufunc, np.ufunc) 867 | assert issubclass(cp.random.RandomState, np.random.RandomState) 868 | -------------------------------------------------------------------------------- /unumpy/torch_backend.py: -------------------------------------------------------------------------------- 1 | import unumpy 2 | import torch 3 | from uarray import Dispatchable, wrap_single_convertor 4 | import unumpy 5 | from unumpy import ufunc, ufunc_list, ndarray 6 | 7 | __ua_domain__ = "numpy" 8 | 9 | 10 | def asarray(a, dtype=None, order=None): 11 | if torch.is_tensor(a): 12 | if dtype is not None and a.dtype != dtype: 13 | ret = a.clone() 14 | if a.requires_grad: 15 | ret = ret.requires_grad_() 16 | return ret 17 | 18 | return a 19 | try: 20 | import numpy as np 21 | 22 | if isinstance(a, np.ndarray): 23 | return torch.from_numpy(a) 24 | except ImportError: 25 | pass 26 | 27 | return torch.tensor(a, dtype=dtype) 28 | 29 | 30 | _implementations = { 31 | unumpy.ufunc.__call__: lambda x, *a, **kw: x(*a, **kw), 32 | unumpy.asarray: asarray, 33 | unumpy.array: torch.Tensor, 34 | unumpy.arange: lambda start, stop, step, **kwargs: torch.arange( 35 | start, stop, step, **kwargs 36 | ), 37 | } 38 | 39 | 40 | def __ua_function__(method, args, kwargs): 41 | if method in _implementations: 42 | return _implementations[method](*args, **kwargs) 43 | 44 | if not hasattr(torch, method.__name__): 45 | return NotImplemented 46 | 47 | return getattr(torch, method.__name__)(*args, **kwargs) 48 | 49 | 50 | @wrap_single_convertor 51 | def __ua_convert__(value, dispatch_type, coerce): 52 | if dispatch_type is ufunc and value in _ufunc_mapping: 53 | return _ufunc_mapping[value] 54 | 55 | if value is None: 56 | return None 57 | 58 | if dispatch_type is ndarray: 59 | if not coerce and not torch.is_tensor(value): 60 | return NotImplemented 61 | 62 | return asarray(value) if value is not None else None 63 | 64 | return value 65 | 66 | 67 | _ufunc_mapping = {} 68 | 69 | 70 | for ufunc_name in ufunc_list: 71 | if ufunc_name.startswith("arc"): 72 | torch_name = ufunc_name.replace("arc", "a") 73 | else: 74 | torch_name = ufunc_name 75 | 76 | if hasattr(torch, torch_name): 77 | _ufunc_mapping[getattr(unumpy, ufunc_name)] = getattr(torch, torch_name) 78 | -------------------------------------------------------------------------------- /versioneer.py: -------------------------------------------------------------------------------- 1 | # Version: 0.18 2 | 3 | """The Versioneer - like a rocketeer, but for versions. 4 | 5 | The Versioneer 6 | ============== 7 | 8 | * like a rocketeer, but for versions! 9 | * https://github.com/warner/python-versioneer 10 | * Brian Warner 11 | * License: Public Domain 12 | * Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy 13 | * [![Latest Version] 14 | (https://pypip.in/version/versioneer/badge.svg?style=flat) 15 | ](https://pypi.python.org/pypi/versioneer/) 16 | * [![Build Status] 17 | (https://travis-ci.org/warner/python-versioneer.png?branch=master) 18 | ](https://travis-ci.org/warner/python-versioneer) 19 | 20 | This is a tool for managing a recorded version number in distutils-based 21 | python projects. The goal is to remove the tedious and error-prone "update 22 | the embedded version string" step from your release process. Making a new 23 | release should be as easy as recording a new tag in your version-control 24 | system, and maybe making new tarballs. 25 | 26 | 27 | ## Quick Install 28 | 29 | * `pip install versioneer` to somewhere to your $PATH 30 | * add a `[versioneer]` section to your setup.cfg (see below) 31 | * run `versioneer install` in your source tree, commit the results 32 | 33 | ## Version Identifiers 34 | 35 | Source trees come from a variety of places: 36 | 37 | * a version-control system checkout (mostly used by developers) 38 | * a nightly tarball, produced by build automation 39 | * a snapshot tarball, produced by a web-based VCS browser, like github's 40 | "tarball from tag" feature 41 | * a release tarball, produced by "setup.py sdist", distributed through PyPI 42 | 43 | Within each source tree, the version identifier (either a string or a number, 44 | this tool is format-agnostic) can come from a variety of places: 45 | 46 | * ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows 47 | about recent "tags" and an absolute revision-id 48 | * the name of the directory into which the tarball was unpacked 49 | * an expanded VCS keyword ($Id$, etc) 50 | * a `_version.py` created by some earlier build step 51 | 52 | For released software, the version identifier is closely related to a VCS 53 | tag. Some projects use tag names that include more than just the version 54 | string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool 55 | needs to strip the tag prefix to extract the version identifier. For 56 | unreleased software (between tags), the version identifier should provide 57 | enough information to help developers recreate the same tree, while also 58 | giving them an idea of roughly how old the tree is (after version 1.2, before 59 | version 1.3). Many VCS systems can report a description that captures this, 60 | for example `git describe --tags --dirty --always` reports things like 61 | "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the 62 | 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has 63 | uncommitted changes. 64 | 65 | The version identifier is used for multiple purposes: 66 | 67 | * to allow the module to self-identify its version: `myproject.__version__` 68 | * to choose a name and prefix for a 'setup.py sdist' tarball 69 | 70 | ## Theory of Operation 71 | 72 | Versioneer works by adding a special `_version.py` file into your source 73 | tree, where your `__init__.py` can import it. This `_version.py` knows how to 74 | dynamically ask the VCS tool for version information at import time. 75 | 76 | `_version.py` also contains `$Revision$` markers, and the installation 77 | process marks `_version.py` to have this marker rewritten with a tag name 78 | during the `git archive` command. As a result, generated tarballs will 79 | contain enough information to get the proper version. 80 | 81 | To allow `setup.py` to compute a version too, a `versioneer.py` is added to 82 | the top level of your source tree, next to `setup.py` and the `setup.cfg` 83 | that configures it. This overrides several distutils/setuptools commands to 84 | compute the version when invoked, and changes `setup.py build` and `setup.py 85 | sdist` to replace `_version.py` with a small static file that contains just 86 | the generated version data. 87 | 88 | ## Installation 89 | 90 | See [INSTALL.md](./INSTALL.md) for detailed installation instructions. 91 | 92 | ## Version-String Flavors 93 | 94 | Code which uses Versioneer can learn about its version string at runtime by 95 | importing `_version` from your main `__init__.py` file and running the 96 | `get_versions()` function. From the "outside" (e.g. in `setup.py`), you can 97 | import the top-level `versioneer.py` and run `get_versions()`. 98 | 99 | Both functions return a dictionary with different flavors of version 100 | information: 101 | 102 | * `['version']`: A condensed version string, rendered using the selected 103 | style. This is the most commonly used value for the project's version 104 | string. The default "pep440" style yields strings like `0.11`, 105 | `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section 106 | below for alternative styles. 107 | 108 | * `['full-revisionid']`: detailed revision identifier. For Git, this is the 109 | full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". 110 | 111 | * `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the 112 | commit date in ISO 8601 format. This will be None if the date is not 113 | available. 114 | 115 | * `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that 116 | this is only accurate if run in a VCS checkout, otherwise it is likely to 117 | be False or None 118 | 119 | * `['error']`: if the version string could not be computed, this will be set 120 | to a string describing the problem, otherwise it will be None. It may be 121 | useful to throw an exception in setup.py if this is set, to avoid e.g. 122 | creating tarballs with a version string of "unknown". 123 | 124 | Some variants are more useful than others. Including `full-revisionid` in a 125 | bug report should allow developers to reconstruct the exact code being tested 126 | (or indicate the presence of local changes that should be shared with the 127 | developers). `version` is suitable for display in an "about" box or a CLI 128 | `--version` output: it can be easily compared against release notes and lists 129 | of bugs fixed in various releases. 130 | 131 | The installer adds the following text to your `__init__.py` to place a basic 132 | version in `YOURPROJECT.__version__`: 133 | 134 | from ._version import get_versions 135 | __version__ = get_versions()['version'] 136 | del get_versions 137 | 138 | ## Styles 139 | 140 | The setup.cfg `style=` configuration controls how the VCS information is 141 | rendered into a version string. 142 | 143 | The default style, "pep440", produces a PEP440-compliant string, equal to the 144 | un-prefixed tag name for actual releases, and containing an additional "local 145 | version" section with more detail for in-between builds. For Git, this is 146 | TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags 147 | --dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the 148 | tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and 149 | that this commit is two revisions ("+2") beyond the "0.11" tag. For released 150 | software (exactly equal to a known tag), the identifier will only contain the 151 | stripped tag, e.g. "0.11". 152 | 153 | Other styles are available. See [details.md](details.md) in the Versioneer 154 | source tree for descriptions. 155 | 156 | ## Debugging 157 | 158 | Versioneer tries to avoid fatal errors: if something goes wrong, it will tend 159 | to return a version of "0+unknown". To investigate the problem, run `setup.py 160 | version`, which will run the version-lookup code in a verbose mode, and will 161 | display the full contents of `get_versions()` (including the `error` string, 162 | which may help identify what went wrong). 163 | 164 | ## Known Limitations 165 | 166 | Some situations are known to cause problems for Versioneer. This details the 167 | most significant ones. More can be found on Github 168 | [issues page](https://github.com/warner/python-versioneer/issues). 169 | 170 | ### Subprojects 171 | 172 | Versioneer has limited support for source trees in which `setup.py` is not in 173 | the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are 174 | two common reasons why `setup.py` might not be in the root: 175 | 176 | * Source trees which contain multiple subprojects, such as 177 | [Buildbot](https://github.com/buildbot/buildbot), which contains both 178 | "master" and "slave" subprojects, each with their own `setup.py`, 179 | `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI 180 | distributions (and upload multiple independently-installable tarballs). 181 | * Source trees whose main purpose is to contain a C library, but which also 182 | provide bindings to Python (and perhaps other langauges) in subdirectories. 183 | 184 | Versioneer will look for `.git` in parent directories, and most operations 185 | should get the right version string. However `pip` and `setuptools` have bugs 186 | and implementation details which frequently cause `pip install .` from a 187 | subproject directory to fail to find a correct version string (so it usually 188 | defaults to `0+unknown`). 189 | 190 | `pip install --editable .` should work correctly. `setup.py install` might 191 | work too. 192 | 193 | Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in 194 | some later version. 195 | 196 | [Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking 197 | this issue. The discussion in 198 | [PR #61](https://github.com/warner/python-versioneer/pull/61) describes the 199 | issue from the Versioneer side in more detail. 200 | [pip PR#3176](https://github.com/pypa/pip/pull/3176) and 201 | [pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve 202 | pip to let Versioneer work correctly. 203 | 204 | Versioneer-0.16 and earlier only looked for a `.git` directory next to the 205 | `setup.cfg`, so subprojects were completely unsupported with those releases. 206 | 207 | ### Editable installs with setuptools <= 18.5 208 | 209 | `setup.py develop` and `pip install --editable .` allow you to install a 210 | project into a virtualenv once, then continue editing the source code (and 211 | test) without re-installing after every change. 212 | 213 | "Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a 214 | convenient way to specify executable scripts that should be installed along 215 | with the python package. 216 | 217 | These both work as expected when using modern setuptools. When using 218 | setuptools-18.5 or earlier, however, certain operations will cause 219 | `pkg_resources.DistributionNotFound` errors when running the entrypoint 220 | script, which must be resolved by re-installing the package. This happens 221 | when the install happens with one version, then the egg_info data is 222 | regenerated while a different version is checked out. Many setup.py commands 223 | cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into 224 | a different virtualenv), so this can be surprising. 225 | 226 | [Bug #83](https://github.com/warner/python-versioneer/issues/83) describes 227 | this one, but upgrading to a newer version of setuptools should probably 228 | resolve it. 229 | 230 | ### Unicode version strings 231 | 232 | While Versioneer works (and is continually tested) with both Python 2 and 233 | Python 3, it is not entirely consistent with bytes-vs-unicode distinctions. 234 | Newer releases probably generate unicode version strings on py2. It's not 235 | clear that this is wrong, but it may be surprising for applications when then 236 | write these strings to a network connection or include them in bytes-oriented 237 | APIs like cryptographic checksums. 238 | 239 | [Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates 240 | this question. 241 | 242 | 243 | ## Updating Versioneer 244 | 245 | To upgrade your project to a new release of Versioneer, do the following: 246 | 247 | * install the new Versioneer (`pip install -U versioneer` or equivalent) 248 | * edit `setup.cfg`, if necessary, to include any new configuration settings 249 | indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. 250 | * re-run `versioneer install` in your source tree, to replace 251 | `SRC/_version.py` 252 | * commit any changed files 253 | 254 | ## Future Directions 255 | 256 | This tool is designed to make it easily extended to other version-control 257 | systems: all VCS-specific components are in separate directories like 258 | src/git/ . The top-level `versioneer.py` script is assembled from these 259 | components by running make-versioneer.py . In the future, make-versioneer.py 260 | will take a VCS name as an argument, and will construct a version of 261 | `versioneer.py` that is specific to the given VCS. It might also take the 262 | configuration arguments that are currently provided manually during 263 | installation by editing setup.py . Alternatively, it might go the other 264 | direction and include code from all supported VCS systems, reducing the 265 | number of intermediate scripts. 266 | 267 | 268 | ## License 269 | 270 | To make Versioneer easier to embed, all its code is dedicated to the public 271 | domain. The `_version.py` that it creates is also in the public domain. 272 | Specifically, both are released under the Creative Commons "Public Domain 273 | Dedication" license (CC0-1.0), as described in 274 | https://creativecommons.org/publicdomain/zero/1.0/ . 275 | 276 | """ 277 | 278 | from __future__ import print_function 279 | 280 | try: 281 | import configparser 282 | except ImportError: 283 | import ConfigParser as configparser # type: ignore 284 | import errno 285 | import json 286 | import os 287 | import re 288 | import subprocess 289 | import sys 290 | 291 | 292 | class VersioneerConfig: 293 | """Container for Versioneer configuration parameters.""" 294 | 295 | 296 | def get_root(): 297 | """Get the project root directory. 298 | 299 | We require that all commands are run from the project root, i.e. the 300 | directory that contains setup.py, setup.cfg, and versioneer.py . 301 | """ 302 | root = os.path.realpath(os.path.abspath(os.getcwd())) 303 | setup_py = os.path.join(root, "setup.py") 304 | versioneer_py = os.path.join(root, "versioneer.py") 305 | if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): 306 | # allow 'python path/to/setup.py COMMAND' 307 | root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) 308 | setup_py = os.path.join(root, "setup.py") 309 | versioneer_py = os.path.join(root, "versioneer.py") 310 | if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): 311 | err = ( 312 | "Versioneer was unable to run the project root directory. " 313 | "Versioneer requires setup.py to be executed from " 314 | "its immediate directory (like 'python setup.py COMMAND'), " 315 | "or in a way that lets it use sys.argv[0] to find the root " 316 | "(like 'python path/to/setup.py COMMAND')." 317 | ) 318 | raise VersioneerBadRootError(err) 319 | try: 320 | # Certain runtime workflows (setup.py install/develop in a setuptools 321 | # tree) execute all dependencies in a single python process, so 322 | # "versioneer" may be imported multiple times, and python's shared 323 | # module-import table will cache the first one. So we can't use 324 | # os.path.dirname(__file__), as that will find whichever 325 | # versioneer.py was first imported, even in later projects. 326 | me = os.path.realpath(os.path.abspath(__file__)) 327 | me_dir = os.path.normcase(os.path.splitext(me)[0]) 328 | vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) 329 | if me_dir != vsr_dir: 330 | print( 331 | "Warning: build in %s is using versioneer.py from %s" 332 | % (os.path.dirname(me), versioneer_py) 333 | ) 334 | except NameError: 335 | pass 336 | return root 337 | 338 | 339 | def get_config_from_root(root): 340 | """Read the project setup.cfg file to determine Versioneer config.""" 341 | # This might raise EnvironmentError (if setup.cfg is missing), or 342 | # configparser.NoSectionError (if it lacks a [versioneer] section), or 343 | # configparser.NoOptionError (if it lacks "VCS="). See the docstring at 344 | # the top of versioneer.py for instructions on writing your setup.cfg . 345 | setup_cfg = os.path.join(root, "setup.cfg") 346 | parser = configparser.SafeConfigParser() 347 | with open(setup_cfg, "r") as f: 348 | parser.readfp(f) 349 | VCS = parser.get("versioneer", "VCS") # mandatory 350 | 351 | def get(parser, name): 352 | if parser.has_option("versioneer", name): 353 | return parser.get("versioneer", name) 354 | return None 355 | 356 | cfg = VersioneerConfig() 357 | cfg.VCS = VCS 358 | cfg.style = get(parser, "style") or "" 359 | cfg.versionfile_source = get(parser, "versionfile_source") 360 | cfg.versionfile_build = get(parser, "versionfile_build") 361 | cfg.tag_prefix = get(parser, "tag_prefix") 362 | if cfg.tag_prefix in ("''", '""'): 363 | cfg.tag_prefix = "" 364 | cfg.parentdir_prefix = get(parser, "parentdir_prefix") 365 | cfg.verbose = get(parser, "verbose") 366 | return cfg 367 | 368 | 369 | class NotThisMethod(Exception): 370 | """Exception raised if a method is not valid for the current scenario.""" 371 | 372 | 373 | # these dictionaries contain VCS-specific tools 374 | LONG_VERSION_PY = {} 375 | HANDLERS = {} # type: ignore 376 | 377 | 378 | def register_vcs_handler(vcs, method): # decorator 379 | """Decorator to mark a method as the handler for a particular VCS.""" 380 | 381 | def decorate(f): 382 | """Store f in HANDLERS[vcs][method].""" 383 | if vcs not in HANDLERS: 384 | HANDLERS[vcs] = {} 385 | HANDLERS[vcs][method] = f 386 | return f 387 | 388 | return decorate 389 | 390 | 391 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): 392 | """Call the given command(s).""" 393 | assert isinstance(commands, list) 394 | p = None 395 | for c in commands: 396 | try: 397 | dispcmd = str([c] + args) 398 | # remember shell=False, so use git.cmd on windows, not just git 399 | p = subprocess.Popen( 400 | [c] + args, 401 | cwd=cwd, 402 | env=env, 403 | stdout=subprocess.PIPE, 404 | stderr=(subprocess.PIPE if hide_stderr else None), 405 | ) 406 | break 407 | except EnvironmentError: 408 | e = sys.exc_info()[1] 409 | if e.errno == errno.ENOENT: 410 | continue 411 | if verbose: 412 | print("unable to run %s" % dispcmd) 413 | print(e) 414 | return None, None 415 | else: 416 | if verbose: 417 | print("unable to find command, tried %s" % (commands,)) 418 | return None, None 419 | stdout = p.communicate()[0].strip() 420 | if sys.version_info[0] >= 3: 421 | stdout = stdout.decode() 422 | if p.returncode != 0: 423 | if verbose: 424 | print("unable to run %s (error)" % dispcmd) 425 | print("stdout was %s" % stdout) 426 | return None, p.returncode 427 | return stdout, p.returncode 428 | 429 | 430 | LONG_VERSION_PY[ 431 | "git" 432 | ] = ''' 433 | # This file helps to compute a version number in source trees obtained from 434 | # git-archive tarball (such as those provided by githubs download-from-tag 435 | # feature). Distribution tarballs (built by setup.py sdist) and build 436 | # directories (produced by setup.py build) will contain a much shorter file 437 | # that just contains the computed version number. 438 | 439 | # This file is released into the public domain. Generated by 440 | # versioneer-0.18 (https://github.com/warner/python-versioneer) 441 | 442 | """Git implementation of _version.py.""" 443 | 444 | import errno 445 | import os 446 | import re 447 | import subprocess 448 | import sys 449 | 450 | 451 | def get_keywords(): 452 | """Get the keywords needed to look up the version information.""" 453 | # these strings will be replaced by git during git-archive. 454 | # setup.py/versioneer.py will grep for the variable names, so they must 455 | # each be defined on a line of their own. _version.py will just call 456 | # get_keywords(). 457 | git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" 458 | git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" 459 | git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" 460 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} 461 | return keywords 462 | 463 | 464 | class VersioneerConfig: 465 | """Container for Versioneer configuration parameters.""" 466 | 467 | 468 | def get_config(): 469 | """Create, populate and return the VersioneerConfig() object.""" 470 | # these strings are filled in when 'setup.py versioneer' creates 471 | # _version.py 472 | cfg = VersioneerConfig() 473 | cfg.VCS = "git" 474 | cfg.style = "%(STYLE)s" 475 | cfg.tag_prefix = "%(TAG_PREFIX)s" 476 | cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" 477 | cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" 478 | cfg.verbose = False 479 | return cfg 480 | 481 | 482 | class NotThisMethod(Exception): 483 | """Exception raised if a method is not valid for the current scenario.""" 484 | 485 | 486 | LONG_VERSION_PY = {} 487 | HANDLERS = {} 488 | 489 | 490 | def register_vcs_handler(vcs, method): # decorator 491 | """Decorator to mark a method as the handler for a particular VCS.""" 492 | def decorate(f): 493 | """Store f in HANDLERS[vcs][method].""" 494 | if vcs not in HANDLERS: 495 | HANDLERS[vcs] = {} 496 | HANDLERS[vcs][method] = f 497 | return f 498 | return decorate 499 | 500 | 501 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, 502 | env=None): 503 | """Call the given command(s).""" 504 | assert isinstance(commands, list) 505 | p = None 506 | for c in commands: 507 | try: 508 | dispcmd = str([c] + args) 509 | # remember shell=False, so use git.cmd on windows, not just git 510 | p = subprocess.Popen([c] + args, cwd=cwd, env=env, 511 | stdout=subprocess.PIPE, 512 | stderr=(subprocess.PIPE if hide_stderr 513 | else None)) 514 | break 515 | except EnvironmentError: 516 | e = sys.exc_info()[1] 517 | if e.errno == errno.ENOENT: 518 | continue 519 | if verbose: 520 | print("unable to run %%s" %% dispcmd) 521 | print(e) 522 | return None, None 523 | else: 524 | if verbose: 525 | print("unable to find command, tried %%s" %% (commands,)) 526 | return None, None 527 | stdout = p.communicate()[0].strip() 528 | if sys.version_info[0] >= 3: 529 | stdout = stdout.decode() 530 | if p.returncode != 0: 531 | if verbose: 532 | print("unable to run %%s (error)" %% dispcmd) 533 | print("stdout was %%s" %% stdout) 534 | return None, p.returncode 535 | return stdout, p.returncode 536 | 537 | 538 | def versions_from_parentdir(parentdir_prefix, root, verbose): 539 | """Try to determine the version from the parent directory name. 540 | 541 | Source tarballs conventionally unpack into a directory that includes both 542 | the project name and a version string. We will also support searching up 543 | two directory levels for an appropriately named parent directory 544 | """ 545 | rootdirs = [] 546 | 547 | for i in range(3): 548 | dirname = os.path.basename(root) 549 | if dirname.startswith(parentdir_prefix): 550 | return {"version": dirname[len(parentdir_prefix):], 551 | "full-revisionid": None, 552 | "dirty": False, "error": None, "date": None} 553 | else: 554 | rootdirs.append(root) 555 | root = os.path.dirname(root) # up a level 556 | 557 | if verbose: 558 | print("Tried directories %%s but none started with prefix %%s" %% 559 | (str(rootdirs), parentdir_prefix)) 560 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 561 | 562 | 563 | @register_vcs_handler("git", "get_keywords") 564 | def git_get_keywords(versionfile_abs): 565 | """Extract version information from the given file.""" 566 | # the code embedded in _version.py can just fetch the value of these 567 | # keywords. When used from setup.py, we don't want to import _version.py, 568 | # so we do it with a regexp instead. This function is not used from 569 | # _version.py. 570 | keywords = {} 571 | try: 572 | f = open(versionfile_abs, "r") 573 | for line in f.readlines(): 574 | if line.strip().startswith("git_refnames ="): 575 | mo = re.search(r'=\s*"(.*)"', line) 576 | if mo: 577 | keywords["refnames"] = mo.group(1) 578 | if line.strip().startswith("git_full ="): 579 | mo = re.search(r'=\s*"(.*)"', line) 580 | if mo: 581 | keywords["full"] = mo.group(1) 582 | if line.strip().startswith("git_date ="): 583 | mo = re.search(r'=\s*"(.*)"', line) 584 | if mo: 585 | keywords["date"] = mo.group(1) 586 | f.close() 587 | except EnvironmentError: 588 | pass 589 | return keywords 590 | 591 | 592 | @register_vcs_handler("git", "keywords") 593 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 594 | """Get version information from git keywords.""" 595 | if not keywords: 596 | raise NotThisMethod("no keywords at all, weird") 597 | date = keywords.get("date") 598 | if date is not None: 599 | # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant 600 | # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 601 | # -like" string, which we must then edit to make compliant), because 602 | # it's been around since git-1.5.3, and it's too difficult to 603 | # discover which version we're using, or to work around using an 604 | # older one. 605 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 606 | refnames = keywords["refnames"].strip() 607 | if refnames.startswith("$Format"): 608 | if verbose: 609 | print("keywords are unexpanded, not using") 610 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 611 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) 612 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 613 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 614 | TAG = "tag: " 615 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) 616 | if not tags: 617 | # Either we're using git < 1.8.3, or there really are no tags. We use 618 | # a heuristic: assume all version tags have a digit. The old git %%d 619 | # expansion behaves like git log --decorate=short and strips out the 620 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 621 | # between branches and tags. By ignoring refnames without digits, we 622 | # filter out many common branch names like "release" and 623 | # "stabilization", as well as "HEAD" and "master". 624 | tags = set([r for r in refs if re.search(r'\d', r)]) 625 | if verbose: 626 | print("discarding '%%s', no digits" %% ",".join(refs - tags)) 627 | if verbose: 628 | print("likely tags: %%s" %% ",".join(sorted(tags))) 629 | for ref in sorted(tags): 630 | # sorting will prefer e.g. "2.0" over "2.0rc1" 631 | if ref.startswith(tag_prefix): 632 | r = ref[len(tag_prefix):] 633 | if verbose: 634 | print("picking %%s" %% r) 635 | return {"version": r, 636 | "full-revisionid": keywords["full"].strip(), 637 | "dirty": False, "error": None, 638 | "date": date} 639 | # no suitable tags, so version is "0+unknown", but full hex is still there 640 | if verbose: 641 | print("no suitable tags, using unknown + full revision id") 642 | return {"version": "0+unknown", 643 | "full-revisionid": keywords["full"].strip(), 644 | "dirty": False, "error": "no suitable tags", "date": None} 645 | 646 | 647 | @register_vcs_handler("git", "pieces_from_vcs") 648 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 649 | """Get version from 'git describe' in the root of the source tree. 650 | 651 | This only gets called if the git-archive 'subst' keywords were *not* 652 | expanded, and _version.py hasn't already been rewritten with a short 653 | version string, meaning we're inside a checked out source tree. 654 | """ 655 | GITS = ["git"] 656 | if sys.platform == "win32": 657 | GITS = ["git.cmd", "git.exe"] 658 | 659 | out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, 660 | hide_stderr=True) 661 | if rc != 0: 662 | if verbose: 663 | print("Directory %%s not under git control" %% root) 664 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 665 | 666 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 667 | # if there isn't one, this yields HEX[-dirty] (no NUM) 668 | describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", 669 | "--always", "--long", 670 | "--match", "%%s*" %% tag_prefix], 671 | cwd=root) 672 | # --long was added in git-1.5.5 673 | if describe_out is None: 674 | raise NotThisMethod("'git describe' failed") 675 | describe_out = describe_out.strip() 676 | full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 677 | if full_out is None: 678 | raise NotThisMethod("'git rev-parse' failed") 679 | full_out = full_out.strip() 680 | 681 | pieces = {} 682 | pieces["long"] = full_out 683 | pieces["short"] = full_out[:7] # maybe improved later 684 | pieces["error"] = None 685 | 686 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 687 | # TAG might have hyphens. 688 | git_describe = describe_out 689 | 690 | # look for -dirty suffix 691 | dirty = git_describe.endswith("-dirty") 692 | pieces["dirty"] = dirty 693 | if dirty: 694 | git_describe = git_describe[:git_describe.rindex("-dirty")] 695 | 696 | # now we have TAG-NUM-gHEX or HEX 697 | 698 | if "-" in git_describe: 699 | # TAG-NUM-gHEX 700 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 701 | if not mo: 702 | # unparseable. Maybe git-describe is misbehaving? 703 | pieces["error"] = ("unable to parse git-describe output: '%%s'" 704 | %% describe_out) 705 | return pieces 706 | 707 | # tag 708 | full_tag = mo.group(1) 709 | if not full_tag.startswith(tag_prefix): 710 | if verbose: 711 | fmt = "tag '%%s' doesn't start with prefix '%%s'" 712 | print(fmt %% (full_tag, tag_prefix)) 713 | pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" 714 | %% (full_tag, tag_prefix)) 715 | return pieces 716 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 717 | 718 | # distance: number of commits since tag 719 | pieces["distance"] = int(mo.group(2)) 720 | 721 | # commit: short hex revision ID 722 | pieces["short"] = mo.group(3) 723 | 724 | else: 725 | # HEX: no tags 726 | pieces["closest-tag"] = None 727 | count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], 728 | cwd=root) 729 | pieces["distance"] = int(count_out) # total number of commits 730 | 731 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 732 | date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"], 733 | cwd=root)[0].strip() 734 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 735 | 736 | return pieces 737 | 738 | 739 | def plus_or_dot(pieces): 740 | """Return a + if we don't already have one, else return a .""" 741 | if "+" in pieces.get("closest-tag", ""): 742 | return "." 743 | return "+" 744 | 745 | 746 | def render_pep440(pieces): 747 | """Build up version string, with post-release "local version identifier". 748 | 749 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 750 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 751 | 752 | Exceptions: 753 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 754 | """ 755 | if pieces["closest-tag"]: 756 | rendered = pieces["closest-tag"] 757 | if pieces["distance"] or pieces["dirty"]: 758 | rendered += plus_or_dot(pieces) 759 | rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) 760 | if pieces["dirty"]: 761 | rendered += ".dirty" 762 | else: 763 | # exception #1 764 | rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], 765 | pieces["short"]) 766 | if pieces["dirty"]: 767 | rendered += ".dirty" 768 | return rendered 769 | 770 | 771 | def render_pep440_pre(pieces): 772 | """TAG[.post.devDISTANCE] -- No -dirty. 773 | 774 | Exceptions: 775 | 1: no tags. 0.post.devDISTANCE 776 | """ 777 | if pieces["closest-tag"]: 778 | rendered = pieces["closest-tag"] 779 | if pieces["distance"]: 780 | rendered += ".post.dev%%d" %% pieces["distance"] 781 | else: 782 | # exception #1 783 | rendered = "0.post.dev%%d" %% pieces["distance"] 784 | return rendered 785 | 786 | 787 | def render_pep440_post(pieces): 788 | """TAG[.postDISTANCE[.dev0]+gHEX] . 789 | 790 | The ".dev0" means dirty. Note that .dev0 sorts backwards 791 | (a dirty tree will appear "older" than the corresponding clean one), 792 | but you shouldn't be releasing software with -dirty anyways. 793 | 794 | Exceptions: 795 | 1: no tags. 0.postDISTANCE[.dev0] 796 | """ 797 | if pieces["closest-tag"]: 798 | rendered = pieces["closest-tag"] 799 | if pieces["distance"] or pieces["dirty"]: 800 | rendered += ".post%%d" %% pieces["distance"] 801 | if pieces["dirty"]: 802 | rendered += ".dev0" 803 | rendered += plus_or_dot(pieces) 804 | rendered += "g%%s" %% pieces["short"] 805 | else: 806 | # exception #1 807 | rendered = "0.post%%d" %% pieces["distance"] 808 | if pieces["dirty"]: 809 | rendered += ".dev0" 810 | rendered += "+g%%s" %% pieces["short"] 811 | return rendered 812 | 813 | 814 | def render_pep440_old(pieces): 815 | """TAG[.postDISTANCE[.dev0]] . 816 | 817 | The ".dev0" means dirty. 818 | 819 | Eexceptions: 820 | 1: no tags. 0.postDISTANCE[.dev0] 821 | """ 822 | if pieces["closest-tag"]: 823 | rendered = pieces["closest-tag"] 824 | if pieces["distance"] or pieces["dirty"]: 825 | rendered += ".post%%d" %% pieces["distance"] 826 | if pieces["dirty"]: 827 | rendered += ".dev0" 828 | else: 829 | # exception #1 830 | rendered = "0.post%%d" %% pieces["distance"] 831 | if pieces["dirty"]: 832 | rendered += ".dev0" 833 | return rendered 834 | 835 | 836 | def render_git_describe(pieces): 837 | """TAG[-DISTANCE-gHEX][-dirty]. 838 | 839 | Like 'git describe --tags --dirty --always'. 840 | 841 | Exceptions: 842 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 843 | """ 844 | if pieces["closest-tag"]: 845 | rendered = pieces["closest-tag"] 846 | if pieces["distance"]: 847 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) 848 | else: 849 | # exception #1 850 | rendered = pieces["short"] 851 | if pieces["dirty"]: 852 | rendered += "-dirty" 853 | return rendered 854 | 855 | 856 | def render_git_describe_long(pieces): 857 | """TAG-DISTANCE-gHEX[-dirty]. 858 | 859 | Like 'git describe --tags --dirty --always -long'. 860 | The distance/hash is unconditional. 861 | 862 | Exceptions: 863 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 864 | """ 865 | if pieces["closest-tag"]: 866 | rendered = pieces["closest-tag"] 867 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) 868 | else: 869 | # exception #1 870 | rendered = pieces["short"] 871 | if pieces["dirty"]: 872 | rendered += "-dirty" 873 | return rendered 874 | 875 | 876 | def render(pieces, style): 877 | """Render the given version pieces into the requested style.""" 878 | if pieces["error"]: 879 | return {"version": "unknown", 880 | "full-revisionid": pieces.get("long"), 881 | "dirty": None, 882 | "error": pieces["error"], 883 | "date": None} 884 | 885 | if not style or style == "default": 886 | style = "pep440" # the default 887 | 888 | if style == "pep440": 889 | rendered = render_pep440(pieces) 890 | elif style == "pep440-pre": 891 | rendered = render_pep440_pre(pieces) 892 | elif style == "pep440-post": 893 | rendered = render_pep440_post(pieces) 894 | elif style == "pep440-old": 895 | rendered = render_pep440_old(pieces) 896 | elif style == "git-describe": 897 | rendered = render_git_describe(pieces) 898 | elif style == "git-describe-long": 899 | rendered = render_git_describe_long(pieces) 900 | else: 901 | raise ValueError("unknown style '%%s'" %% style) 902 | 903 | return {"version": rendered, "full-revisionid": pieces["long"], 904 | "dirty": pieces["dirty"], "error": None, 905 | "date": pieces.get("date")} 906 | 907 | 908 | def get_versions(): 909 | """Get version information or return default if unable to do so.""" 910 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 911 | # __file__, we can work backwards from there to the root. Some 912 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 913 | # case we can only use expanded keywords. 914 | 915 | cfg = get_config() 916 | verbose = cfg.verbose 917 | 918 | try: 919 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, 920 | verbose) 921 | except NotThisMethod: 922 | pass 923 | 924 | try: 925 | root = os.path.realpath(__file__) 926 | # versionfile_source is the relative path from the top of the source 927 | # tree (where the .git directory might live) to this file. Invert 928 | # this to find the root from __file__. 929 | for i in cfg.versionfile_source.split('/'): 930 | root = os.path.dirname(root) 931 | except NameError: 932 | return {"version": "0+unknown", "full-revisionid": None, 933 | "dirty": None, 934 | "error": "unable to find root of source tree", 935 | "date": None} 936 | 937 | try: 938 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 939 | return render(pieces, cfg.style) 940 | except NotThisMethod: 941 | pass 942 | 943 | try: 944 | if cfg.parentdir_prefix: 945 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 946 | except NotThisMethod: 947 | pass 948 | 949 | return {"version": "0+unknown", "full-revisionid": None, 950 | "dirty": None, 951 | "error": "unable to compute version", "date": None} 952 | ''' 953 | 954 | 955 | @register_vcs_handler("git", "get_keywords") 956 | def git_get_keywords(versionfile_abs): 957 | """Extract version information from the given file.""" 958 | # the code embedded in _version.py can just fetch the value of these 959 | # keywords. When used from setup.py, we don't want to import _version.py, 960 | # so we do it with a regexp instead. This function is not used from 961 | # _version.py. 962 | keywords = {} 963 | try: 964 | f = open(versionfile_abs, "r") 965 | for line in f.readlines(): 966 | if line.strip().startswith("git_refnames ="): 967 | mo = re.search(r'=\s*"(.*)"', line) 968 | if mo: 969 | keywords["refnames"] = mo.group(1) 970 | if line.strip().startswith("git_full ="): 971 | mo = re.search(r'=\s*"(.*)"', line) 972 | if mo: 973 | keywords["full"] = mo.group(1) 974 | if line.strip().startswith("git_date ="): 975 | mo = re.search(r'=\s*"(.*)"', line) 976 | if mo: 977 | keywords["date"] = mo.group(1) 978 | f.close() 979 | except EnvironmentError: 980 | pass 981 | return keywords 982 | 983 | 984 | @register_vcs_handler("git", "keywords") 985 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 986 | """Get version information from git keywords.""" 987 | if not keywords: 988 | raise NotThisMethod("no keywords at all, weird") 989 | date = keywords.get("date") 990 | if date is not None: 991 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant 992 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 993 | # -like" string, which we must then edit to make compliant), because 994 | # it's been around since git-1.5.3, and it's too difficult to 995 | # discover which version we're using, or to work around using an 996 | # older one. 997 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 998 | refnames = keywords["refnames"].strip() 999 | if refnames.startswith("$Format"): 1000 | if verbose: 1001 | print("keywords are unexpanded, not using") 1002 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 1003 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) 1004 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 1005 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 1006 | TAG = "tag: " 1007 | tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)]) 1008 | if not tags: 1009 | # Either we're using git < 1.8.3, or there really are no tags. We use 1010 | # a heuristic: assume all version tags have a digit. The old git %d 1011 | # expansion behaves like git log --decorate=short and strips out the 1012 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 1013 | # between branches and tags. By ignoring refnames without digits, we 1014 | # filter out many common branch names like "release" and 1015 | # "stabilization", as well as "HEAD" and "master". 1016 | tags = set([r for r in refs if re.search(r"\d", r)]) 1017 | if verbose: 1018 | print("discarding '%s', no digits" % ",".join(refs - tags)) 1019 | if verbose: 1020 | print("likely tags: %s" % ",".join(sorted(tags))) 1021 | for ref in sorted(tags): 1022 | # sorting will prefer e.g. "2.0" over "2.0rc1" 1023 | if ref.startswith(tag_prefix): 1024 | r = ref[len(tag_prefix) :] 1025 | if verbose: 1026 | print("picking %s" % r) 1027 | return { 1028 | "version": r, 1029 | "full-revisionid": keywords["full"].strip(), 1030 | "dirty": False, 1031 | "error": None, 1032 | "date": date, 1033 | } 1034 | # no suitable tags, so version is "0+unknown", but full hex is still there 1035 | if verbose: 1036 | print("no suitable tags, using unknown + full revision id") 1037 | return { 1038 | "version": "0+unknown", 1039 | "full-revisionid": keywords["full"].strip(), 1040 | "dirty": False, 1041 | "error": "no suitable tags", 1042 | "date": None, 1043 | } 1044 | 1045 | 1046 | @register_vcs_handler("git", "pieces_from_vcs") 1047 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 1048 | """Get version from 'git describe' in the root of the source tree. 1049 | 1050 | This only gets called if the git-archive 'subst' keywords were *not* 1051 | expanded, and _version.py hasn't already been rewritten with a short 1052 | version string, meaning we're inside a checked out source tree. 1053 | """ 1054 | GITS = ["git"] 1055 | if sys.platform == "win32": 1056 | GITS = ["git.cmd", "git.exe"] 1057 | 1058 | out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) 1059 | if rc != 0: 1060 | if verbose: 1061 | print("Directory %s not under git control" % root) 1062 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 1063 | 1064 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 1065 | # if there isn't one, this yields HEX[-dirty] (no NUM) 1066 | describe_out, rc = run_command( 1067 | GITS, 1068 | [ 1069 | "describe", 1070 | "--tags", 1071 | "--dirty", 1072 | "--always", 1073 | "--long", 1074 | "--match", 1075 | "%s*" % tag_prefix, 1076 | ], 1077 | cwd=root, 1078 | ) 1079 | # --long was added in git-1.5.5 1080 | if describe_out is None: 1081 | raise NotThisMethod("'git describe' failed") 1082 | describe_out = describe_out.strip() 1083 | full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 1084 | if full_out is None: 1085 | raise NotThisMethod("'git rev-parse' failed") 1086 | full_out = full_out.strip() 1087 | 1088 | pieces = {} 1089 | pieces["long"] = full_out 1090 | pieces["short"] = full_out[:7] # maybe improved later 1091 | pieces["error"] = None 1092 | 1093 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 1094 | # TAG might have hyphens. 1095 | git_describe = describe_out 1096 | 1097 | # look for -dirty suffix 1098 | dirty = git_describe.endswith("-dirty") 1099 | pieces["dirty"] = dirty 1100 | if dirty: 1101 | git_describe = git_describe[: git_describe.rindex("-dirty")] 1102 | 1103 | # now we have TAG-NUM-gHEX or HEX 1104 | 1105 | if "-" in git_describe: 1106 | # TAG-NUM-gHEX 1107 | mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) 1108 | if not mo: 1109 | # unparseable. Maybe git-describe is misbehaving? 1110 | pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out 1111 | return pieces 1112 | 1113 | # tag 1114 | full_tag = mo.group(1) 1115 | if not full_tag.startswith(tag_prefix): 1116 | if verbose: 1117 | fmt = "tag '%s' doesn't start with prefix '%s'" 1118 | print(fmt % (full_tag, tag_prefix)) 1119 | pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( 1120 | full_tag, 1121 | tag_prefix, 1122 | ) 1123 | return pieces 1124 | pieces["closest-tag"] = full_tag[len(tag_prefix) :] 1125 | 1126 | # distance: number of commits since tag 1127 | pieces["distance"] = int(mo.group(2)) 1128 | 1129 | # commit: short hex revision ID 1130 | pieces["short"] = mo.group(3) 1131 | 1132 | else: 1133 | # HEX: no tags 1134 | pieces["closest-tag"] = None 1135 | count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) 1136 | pieces["distance"] = int(count_out) # total number of commits 1137 | 1138 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 1139 | date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[ 1140 | 0 1141 | ].strip() 1142 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 1143 | 1144 | return pieces 1145 | 1146 | 1147 | def do_vcs_install(manifest_in, versionfile_source, ipy): 1148 | """Git-specific installation logic for Versioneer. 1149 | 1150 | For Git, this means creating/changing .gitattributes to mark _version.py 1151 | for export-subst keyword substitution. 1152 | """ 1153 | GITS = ["git"] 1154 | if sys.platform == "win32": 1155 | GITS = ["git.cmd", "git.exe"] 1156 | files = [manifest_in, versionfile_source] 1157 | if ipy: 1158 | files.append(ipy) 1159 | try: 1160 | me = __file__ 1161 | if me.endswith(".pyc") or me.endswith(".pyo"): 1162 | me = os.path.splitext(me)[0] + ".py" 1163 | versioneer_file = os.path.relpath(me) 1164 | except NameError: 1165 | versioneer_file = "versioneer.py" 1166 | files.append(versioneer_file) 1167 | present = False 1168 | try: 1169 | f = open(".gitattributes", "r") 1170 | for line in f.readlines(): 1171 | if line.strip().startswith(versionfile_source): 1172 | if "export-subst" in line.strip().split()[1:]: 1173 | present = True 1174 | f.close() 1175 | except EnvironmentError: 1176 | pass 1177 | if not present: 1178 | f = open(".gitattributes", "a+") 1179 | f.write("%s export-subst\n" % versionfile_source) 1180 | f.close() 1181 | files.append(".gitattributes") 1182 | run_command(GITS, ["add", "--"] + files) 1183 | 1184 | 1185 | def versions_from_parentdir(parentdir_prefix, root, verbose): 1186 | """Try to determine the version from the parent directory name. 1187 | 1188 | Source tarballs conventionally unpack into a directory that includes both 1189 | the project name and a version string. We will also support searching up 1190 | two directory levels for an appropriately named parent directory 1191 | """ 1192 | rootdirs = [] 1193 | 1194 | for i in range(3): 1195 | dirname = os.path.basename(root) 1196 | if dirname.startswith(parentdir_prefix): 1197 | return { 1198 | "version": dirname[len(parentdir_prefix) :], 1199 | "full-revisionid": None, 1200 | "dirty": False, 1201 | "error": None, 1202 | "date": None, 1203 | } 1204 | else: 1205 | rootdirs.append(root) 1206 | root = os.path.dirname(root) # up a level 1207 | 1208 | if verbose: 1209 | print( 1210 | "Tried directories %s but none started with prefix %s" 1211 | % (str(rootdirs), parentdir_prefix) 1212 | ) 1213 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 1214 | 1215 | 1216 | SHORT_VERSION_PY = """ 1217 | # This file was generated by 'versioneer.py' (0.18) from 1218 | # revision-control system data, or from the parent directory name of an 1219 | # unpacked source archive. Distribution tarballs contain a pre-generated copy 1220 | # of this file. 1221 | 1222 | import json 1223 | 1224 | version_json = ''' 1225 | %s 1226 | ''' # END VERSION_JSON 1227 | 1228 | 1229 | def get_versions(): 1230 | return json.loads(version_json) 1231 | """ 1232 | 1233 | 1234 | def versions_from_file(filename): 1235 | """Try to determine the version from _version.py if present.""" 1236 | try: 1237 | with open(filename) as f: 1238 | contents = f.read() 1239 | except EnvironmentError: 1240 | raise NotThisMethod("unable to read _version.py") 1241 | mo = re.search( 1242 | r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S 1243 | ) 1244 | if not mo: 1245 | mo = re.search( 1246 | r"version_json = '''\r\n(.*)''' # END VERSION_JSON", contents, re.M | re.S 1247 | ) 1248 | if not mo: 1249 | raise NotThisMethod("no version_json in _version.py") 1250 | return json.loads(mo.group(1)) 1251 | 1252 | 1253 | def write_to_version_file(filename, versions): 1254 | """Write the given version number to the given _version.py file.""" 1255 | os.unlink(filename) 1256 | contents = json.dumps(versions, sort_keys=True, indent=1, separators=(",", ": ")) 1257 | with open(filename, "w") as f: 1258 | f.write(SHORT_VERSION_PY % contents) 1259 | 1260 | print("set %s to '%s'" % (filename, versions["version"])) 1261 | 1262 | 1263 | def plus_or_dot(pieces): 1264 | """Return a + if we don't already have one, else return a .""" 1265 | if "+" in pieces.get("closest-tag", ""): 1266 | return "." 1267 | return "+" 1268 | 1269 | 1270 | def render_pep440(pieces): 1271 | """Build up version string, with post-release "local version identifier". 1272 | 1273 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 1274 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 1275 | 1276 | Exceptions: 1277 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 1278 | """ 1279 | if pieces["closest-tag"]: 1280 | rendered = pieces["closest-tag"] 1281 | if pieces["distance"] or pieces["dirty"]: 1282 | rendered += plus_or_dot(pieces) 1283 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 1284 | if pieces["dirty"]: 1285 | rendered += ".dirty" 1286 | else: 1287 | # exception #1 1288 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) 1289 | if pieces["dirty"]: 1290 | rendered += ".dirty" 1291 | return rendered 1292 | 1293 | 1294 | def render_pep440_pre(pieces): 1295 | """TAG[.post.devDISTANCE] -- No -dirty. 1296 | 1297 | Exceptions: 1298 | 1: no tags. 0.post.devDISTANCE 1299 | """ 1300 | if pieces["closest-tag"]: 1301 | rendered = pieces["closest-tag"] 1302 | if pieces["distance"]: 1303 | rendered += ".post.dev%d" % pieces["distance"] 1304 | else: 1305 | # exception #1 1306 | rendered = "0.post.dev%d" % pieces["distance"] 1307 | return rendered 1308 | 1309 | 1310 | def render_pep440_post(pieces): 1311 | """TAG[.postDISTANCE[.dev0]+gHEX] . 1312 | 1313 | The ".dev0" means dirty. Note that .dev0 sorts backwards 1314 | (a dirty tree will appear "older" than the corresponding clean one), 1315 | but you shouldn't be releasing software with -dirty anyways. 1316 | 1317 | Exceptions: 1318 | 1: no tags. 0.postDISTANCE[.dev0] 1319 | """ 1320 | if pieces["closest-tag"]: 1321 | rendered = pieces["closest-tag"] 1322 | if pieces["distance"] or pieces["dirty"]: 1323 | rendered += ".post%d" % pieces["distance"] 1324 | if pieces["dirty"]: 1325 | rendered += ".dev0" 1326 | rendered += plus_or_dot(pieces) 1327 | rendered += "g%s" % pieces["short"] 1328 | else: 1329 | # exception #1 1330 | rendered = "0.post%d" % pieces["distance"] 1331 | if pieces["dirty"]: 1332 | rendered += ".dev0" 1333 | rendered += "+g%s" % pieces["short"] 1334 | return rendered 1335 | 1336 | 1337 | def render_pep440_old(pieces): 1338 | """TAG[.postDISTANCE[.dev0]] . 1339 | 1340 | The ".dev0" means dirty. 1341 | 1342 | Eexceptions: 1343 | 1: no tags. 0.postDISTANCE[.dev0] 1344 | """ 1345 | if pieces["closest-tag"]: 1346 | rendered = pieces["closest-tag"] 1347 | if pieces["distance"] or pieces["dirty"]: 1348 | rendered += ".post%d" % pieces["distance"] 1349 | if pieces["dirty"]: 1350 | rendered += ".dev0" 1351 | else: 1352 | # exception #1 1353 | rendered = "0.post%d" % pieces["distance"] 1354 | if pieces["dirty"]: 1355 | rendered += ".dev0" 1356 | return rendered 1357 | 1358 | 1359 | def render_git_describe(pieces): 1360 | """TAG[-DISTANCE-gHEX][-dirty]. 1361 | 1362 | Like 'git describe --tags --dirty --always'. 1363 | 1364 | Exceptions: 1365 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 1366 | """ 1367 | if pieces["closest-tag"]: 1368 | rendered = pieces["closest-tag"] 1369 | if pieces["distance"]: 1370 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 1371 | else: 1372 | # exception #1 1373 | rendered = pieces["short"] 1374 | if pieces["dirty"]: 1375 | rendered += "-dirty" 1376 | return rendered 1377 | 1378 | 1379 | def render_git_describe_long(pieces): 1380 | """TAG-DISTANCE-gHEX[-dirty]. 1381 | 1382 | Like 'git describe --tags --dirty --always -long'. 1383 | The distance/hash is unconditional. 1384 | 1385 | Exceptions: 1386 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 1387 | """ 1388 | if pieces["closest-tag"]: 1389 | rendered = pieces["closest-tag"] 1390 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 1391 | else: 1392 | # exception #1 1393 | rendered = pieces["short"] 1394 | if pieces["dirty"]: 1395 | rendered += "-dirty" 1396 | return rendered 1397 | 1398 | 1399 | def render(pieces, style): 1400 | """Render the given version pieces into the requested style.""" 1401 | if pieces["error"]: 1402 | return { 1403 | "version": "unknown", 1404 | "full-revisionid": pieces.get("long"), 1405 | "dirty": None, 1406 | "error": pieces["error"], 1407 | "date": None, 1408 | } 1409 | 1410 | if not style or style == "default": 1411 | style = "pep440" # the default 1412 | 1413 | if style == "pep440": 1414 | rendered = render_pep440(pieces) 1415 | elif style == "pep440-pre": 1416 | rendered = render_pep440_pre(pieces) 1417 | elif style == "pep440-post": 1418 | rendered = render_pep440_post(pieces) 1419 | elif style == "pep440-old": 1420 | rendered = render_pep440_old(pieces) 1421 | elif style == "git-describe": 1422 | rendered = render_git_describe(pieces) 1423 | elif style == "git-describe-long": 1424 | rendered = render_git_describe_long(pieces) 1425 | else: 1426 | raise ValueError("unknown style '%s'" % style) 1427 | 1428 | return { 1429 | "version": rendered, 1430 | "full-revisionid": pieces["long"], 1431 | "dirty": pieces["dirty"], 1432 | "error": None, 1433 | "date": pieces.get("date"), 1434 | } 1435 | 1436 | 1437 | class VersioneerBadRootError(Exception): 1438 | """The project root directory is unknown or missing key files.""" 1439 | 1440 | 1441 | def get_versions(verbose=False): 1442 | """Get the project version from whatever source is available. 1443 | 1444 | Returns dict with two keys: 'version' and 'full'. 1445 | """ 1446 | if "versioneer" in sys.modules: 1447 | # see the discussion in cmdclass.py:get_cmdclass() 1448 | del sys.modules["versioneer"] 1449 | 1450 | root = get_root() 1451 | cfg = get_config_from_root(root) 1452 | 1453 | assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" 1454 | handlers = HANDLERS.get(cfg.VCS) 1455 | assert handlers, "unrecognized VCS '%s'" % cfg.VCS 1456 | verbose = verbose or cfg.verbose 1457 | assert ( 1458 | cfg.versionfile_source is not None 1459 | ), "please set versioneer.versionfile_source" 1460 | assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" 1461 | 1462 | versionfile_abs = os.path.join(root, cfg.versionfile_source) 1463 | 1464 | # extract version from first of: _version.py, VCS command (e.g. 'git 1465 | # describe'), parentdir. This is meant to work for developers using a 1466 | # source checkout, for users of a tarball created by 'setup.py sdist', 1467 | # and for users of a tarball/zipball created by 'git archive' or github's 1468 | # download-from-tag feature or the equivalent in other VCSes. 1469 | 1470 | get_keywords_f = handlers.get("get_keywords") 1471 | from_keywords_f = handlers.get("keywords") 1472 | if get_keywords_f and from_keywords_f: 1473 | try: 1474 | keywords = get_keywords_f(versionfile_abs) 1475 | ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) 1476 | if verbose: 1477 | print("got version from expanded keyword %s" % ver) 1478 | return ver 1479 | except NotThisMethod: 1480 | pass 1481 | 1482 | try: 1483 | ver = versions_from_file(versionfile_abs) 1484 | if verbose: 1485 | print("got version from file %s %s" % (versionfile_abs, ver)) 1486 | return ver 1487 | except NotThisMethod: 1488 | pass 1489 | 1490 | from_vcs_f = handlers.get("pieces_from_vcs") 1491 | if from_vcs_f: 1492 | try: 1493 | pieces = from_vcs_f(cfg.tag_prefix, root, verbose) 1494 | ver = render(pieces, cfg.style) 1495 | if verbose: 1496 | print("got version from VCS %s" % ver) 1497 | return ver 1498 | except NotThisMethod: 1499 | pass 1500 | 1501 | try: 1502 | if cfg.parentdir_prefix: 1503 | ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 1504 | if verbose: 1505 | print("got version from parentdir %s" % ver) 1506 | return ver 1507 | except NotThisMethod: 1508 | pass 1509 | 1510 | if verbose: 1511 | print("unable to compute version") 1512 | 1513 | return { 1514 | "version": "0+unknown", 1515 | "full-revisionid": None, 1516 | "dirty": None, 1517 | "error": "unable to compute version", 1518 | "date": None, 1519 | } 1520 | 1521 | 1522 | def get_version(): 1523 | """Get the short version string for this project.""" 1524 | return get_versions()["version"] 1525 | 1526 | 1527 | def get_cmdclass(): 1528 | """Get the custom setuptools/distutils subclasses used by Versioneer.""" 1529 | if "versioneer" in sys.modules: 1530 | del sys.modules["versioneer"] 1531 | # this fixes the "python setup.py develop" case (also 'install' and 1532 | # 'easy_install .'), in which subdependencies of the main project are 1533 | # built (using setup.py bdist_egg) in the same python process. Assume 1534 | # a main project A and a dependency B, which use different versions 1535 | # of Versioneer. A's setup.py imports A's Versioneer, leaving it in 1536 | # sys.modules by the time B's setup.py is executed, causing B to run 1537 | # with the wrong versioneer. Setuptools wraps the sub-dep builds in a 1538 | # sandbox that restores sys.modules to it's pre-build state, so the 1539 | # parent is protected against the child's "import versioneer". By 1540 | # removing ourselves from sys.modules here, before the child build 1541 | # happens, we protect the child from the parent's versioneer too. 1542 | # Also see https://github.com/warner/python-versioneer/issues/52 1543 | 1544 | cmds = {} 1545 | 1546 | # we add "version" to both distutils and setuptools 1547 | from distutils.core import Command 1548 | 1549 | class cmd_version(Command): 1550 | description = "report generated version string" 1551 | user_options = [] 1552 | boolean_options = [] 1553 | 1554 | def initialize_options(self): 1555 | pass 1556 | 1557 | def finalize_options(self): 1558 | pass 1559 | 1560 | def run(self): 1561 | vers = get_versions(verbose=True) 1562 | print("Version: %s" % vers["version"]) 1563 | print(" full-revisionid: %s" % vers.get("full-revisionid")) 1564 | print(" dirty: %s" % vers.get("dirty")) 1565 | print(" date: %s" % vers.get("date")) 1566 | if vers["error"]: 1567 | print(" error: %s" % vers["error"]) 1568 | 1569 | cmds["version"] = cmd_version 1570 | 1571 | # we override "build_py" in both distutils and setuptools 1572 | # 1573 | # most invocation pathways end up running build_py: 1574 | # distutils/build -> build_py 1575 | # distutils/install -> distutils/build ->.. 1576 | # setuptools/bdist_wheel -> distutils/install ->.. 1577 | # setuptools/bdist_egg -> distutils/install_lib -> build_py 1578 | # setuptools/install -> bdist_egg ->.. 1579 | # setuptools/develop -> ? 1580 | # pip install: 1581 | # copies source tree to a tempdir before running egg_info/etc 1582 | # if .git isn't copied too, 'git describe' will fail 1583 | # then does setup.py bdist_wheel, or sometimes setup.py install 1584 | # setup.py egg_info -> ? 1585 | 1586 | # we override different "build_py" commands for both environments 1587 | if "setuptools" in sys.modules: 1588 | from setuptools.command.build_py import build_py as _build_py 1589 | else: 1590 | from distutils.command.build_py import build_py as _build_py 1591 | 1592 | class cmd_build_py(_build_py): 1593 | def run(self): 1594 | root = get_root() 1595 | cfg = get_config_from_root(root) 1596 | versions = get_versions() 1597 | _build_py.run(self) 1598 | # now locate _version.py in the new build/ directory and replace 1599 | # it with an updated value 1600 | if cfg.versionfile_build: 1601 | target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build) 1602 | print("UPDATING %s" % target_versionfile) 1603 | write_to_version_file(target_versionfile, versions) 1604 | 1605 | cmds["build_py"] = cmd_build_py 1606 | 1607 | if "cx_Freeze" in sys.modules: # cx_freeze enabled? 1608 | from cx_Freeze.dist import build_exe as _build_exe 1609 | 1610 | # nczeczulin reports that py2exe won't like the pep440-style string 1611 | # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. 1612 | # setup(console=[{ 1613 | # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION 1614 | # "product_version": versioneer.get_version(), 1615 | # ... 1616 | 1617 | class cmd_build_exe(_build_exe): 1618 | def run(self): 1619 | root = get_root() 1620 | cfg = get_config_from_root(root) 1621 | versions = get_versions() 1622 | target_versionfile = cfg.versionfile_source 1623 | print("UPDATING %s" % target_versionfile) 1624 | write_to_version_file(target_versionfile, versions) 1625 | 1626 | _build_exe.run(self) 1627 | os.unlink(target_versionfile) 1628 | with open(cfg.versionfile_source, "w") as f: 1629 | LONG = LONG_VERSION_PY[cfg.VCS] 1630 | f.write( 1631 | LONG 1632 | % { 1633 | "DOLLAR": "$", 1634 | "STYLE": cfg.style, 1635 | "TAG_PREFIX": cfg.tag_prefix, 1636 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 1637 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 1638 | } 1639 | ) 1640 | 1641 | cmds["build_exe"] = cmd_build_exe 1642 | del cmds["build_py"] 1643 | 1644 | if "py2exe" in sys.modules: # py2exe enabled? 1645 | try: 1646 | from py2exe.distutils_buildexe import py2exe as _py2exe # py3 1647 | except ImportError: 1648 | from py2exe.build_exe import py2exe as _py2exe # py2 1649 | 1650 | class cmd_py2exe(_py2exe): 1651 | def run(self): 1652 | root = get_root() 1653 | cfg = get_config_from_root(root) 1654 | versions = get_versions() 1655 | target_versionfile = cfg.versionfile_source 1656 | print("UPDATING %s" % target_versionfile) 1657 | write_to_version_file(target_versionfile, versions) 1658 | 1659 | _py2exe.run(self) 1660 | os.unlink(target_versionfile) 1661 | with open(cfg.versionfile_source, "w") as f: 1662 | LONG = LONG_VERSION_PY[cfg.VCS] 1663 | f.write( 1664 | LONG 1665 | % { 1666 | "DOLLAR": "$", 1667 | "STYLE": cfg.style, 1668 | "TAG_PREFIX": cfg.tag_prefix, 1669 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 1670 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 1671 | } 1672 | ) 1673 | 1674 | cmds["py2exe"] = cmd_py2exe 1675 | 1676 | # we override different "sdist" commands for both environments 1677 | if "setuptools" in sys.modules: 1678 | from setuptools.command.sdist import sdist as _sdist 1679 | else: 1680 | from distutils.command.sdist import sdist as _sdist 1681 | 1682 | class cmd_sdist(_sdist): 1683 | def run(self): 1684 | versions = get_versions() 1685 | self._versioneer_generated_versions = versions 1686 | # unless we update this, the command will keep using the old 1687 | # version 1688 | self.distribution.metadata.version = versions["version"] 1689 | return _sdist.run(self) 1690 | 1691 | def make_release_tree(self, base_dir, files): 1692 | root = get_root() 1693 | cfg = get_config_from_root(root) 1694 | _sdist.make_release_tree(self, base_dir, files) 1695 | # now locate _version.py in the new base_dir directory 1696 | # (remembering that it may be a hardlink) and replace it with an 1697 | # updated value 1698 | target_versionfile = os.path.join(base_dir, cfg.versionfile_source) 1699 | print("UPDATING %s" % target_versionfile) 1700 | write_to_version_file( 1701 | target_versionfile, self._versioneer_generated_versions 1702 | ) 1703 | 1704 | cmds["sdist"] = cmd_sdist 1705 | 1706 | return cmds 1707 | 1708 | 1709 | CONFIG_ERROR = """ 1710 | setup.cfg is missing the necessary Versioneer configuration. You need 1711 | a section like: 1712 | 1713 | [versioneer] 1714 | VCS = git 1715 | style = pep440 1716 | versionfile_source = src/myproject/_version.py 1717 | versionfile_build = myproject/_version.py 1718 | tag_prefix = 1719 | parentdir_prefix = myproject- 1720 | 1721 | You will also need to edit your setup.py to use the results: 1722 | 1723 | import versioneer 1724 | setup(version=versioneer.get_version(), 1725 | cmdclass=versioneer.get_cmdclass(), ...) 1726 | 1727 | Please read the docstring in ./versioneer.py for configuration instructions, 1728 | edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. 1729 | """ 1730 | 1731 | SAMPLE_CONFIG = """ 1732 | # See the docstring in versioneer.py for instructions. Note that you must 1733 | # re-run 'versioneer.py setup' after changing this section, and commit the 1734 | # resulting files. 1735 | 1736 | [versioneer] 1737 | #VCS = git 1738 | #style = pep440 1739 | #versionfile_source = 1740 | #versionfile_build = 1741 | #tag_prefix = 1742 | #parentdir_prefix = 1743 | 1744 | """ 1745 | 1746 | INIT_PY_SNIPPET = """ 1747 | from ._version import get_versions 1748 | __version__ = get_versions()['version'] 1749 | del get_versions 1750 | """ 1751 | 1752 | 1753 | def do_setup(): 1754 | """Main VCS-independent setup function for installing Versioneer.""" 1755 | root = get_root() 1756 | try: 1757 | cfg = get_config_from_root(root) 1758 | except ( 1759 | EnvironmentError, 1760 | configparser.NoSectionError, 1761 | configparser.NoOptionError, 1762 | ) as e: 1763 | if isinstance(e, (EnvironmentError, configparser.NoSectionError)): 1764 | print("Adding sample versioneer config to setup.cfg", file=sys.stderr) 1765 | with open(os.path.join(root, "setup.cfg"), "a") as f: 1766 | f.write(SAMPLE_CONFIG) 1767 | print(CONFIG_ERROR, file=sys.stderr) 1768 | return 1 1769 | 1770 | print(" creating %s" % cfg.versionfile_source) 1771 | with open(cfg.versionfile_source, "w") as f: 1772 | LONG = LONG_VERSION_PY[cfg.VCS] 1773 | f.write( 1774 | LONG 1775 | % { 1776 | "DOLLAR": "$", 1777 | "STYLE": cfg.style, 1778 | "TAG_PREFIX": cfg.tag_prefix, 1779 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 1780 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 1781 | } 1782 | ) 1783 | 1784 | ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py") 1785 | if os.path.exists(ipy): 1786 | try: 1787 | with open(ipy, "r") as f: 1788 | old = f.read() 1789 | except EnvironmentError: 1790 | old = "" 1791 | if INIT_PY_SNIPPET not in old: 1792 | print(" appending to %s" % ipy) 1793 | with open(ipy, "a") as f: 1794 | f.write(INIT_PY_SNIPPET) 1795 | else: 1796 | print(" %s unmodified" % ipy) 1797 | else: 1798 | print(" %s doesn't exist, ok" % ipy) 1799 | ipy = None 1800 | 1801 | # Make sure both the top-level "versioneer.py" and versionfile_source 1802 | # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so 1803 | # they'll be copied into source distributions. Pip won't be able to 1804 | # install the package without this. 1805 | manifest_in = os.path.join(root, "MANIFEST.in") 1806 | simple_includes = set() 1807 | try: 1808 | with open(manifest_in, "r") as f: 1809 | for line in f: 1810 | if line.startswith("include "): 1811 | for include in line.split()[1:]: 1812 | simple_includes.add(include) 1813 | except EnvironmentError: 1814 | pass 1815 | # That doesn't cover everything MANIFEST.in can do 1816 | # (http://docs.python.org/2/distutils/sourcedist.html#commands), so 1817 | # it might give some false negatives. Appending redundant 'include' 1818 | # lines is safe, though. 1819 | if "versioneer.py" not in simple_includes: 1820 | print(" appending 'versioneer.py' to MANIFEST.in") 1821 | with open(manifest_in, "a") as f: 1822 | f.write("include versioneer.py\n") 1823 | else: 1824 | print(" 'versioneer.py' already in MANIFEST.in") 1825 | if cfg.versionfile_source not in simple_includes: 1826 | print( 1827 | " appending versionfile_source ('%s') to MANIFEST.in" 1828 | % cfg.versionfile_source 1829 | ) 1830 | with open(manifest_in, "a") as f: 1831 | f.write("include %s\n" % cfg.versionfile_source) 1832 | else: 1833 | print(" versionfile_source already in MANIFEST.in") 1834 | 1835 | # Make VCS-specific changes. For git, this means creating/changing 1836 | # .gitattributes to mark _version.py for export-subst keyword 1837 | # substitution. 1838 | do_vcs_install(manifest_in, cfg.versionfile_source, ipy) 1839 | return 0 1840 | 1841 | 1842 | def scan_setup_py(): 1843 | """Validate the contents of setup.py against Versioneer's expectations.""" 1844 | found = set() 1845 | setters = False 1846 | errors = 0 1847 | with open("setup.py", "r") as f: 1848 | for line in f.readlines(): 1849 | if "import versioneer" in line: 1850 | found.add("import") 1851 | if "versioneer.get_cmdclass()" in line: 1852 | found.add("cmdclass") 1853 | if "versioneer.get_version()" in line: 1854 | found.add("get_version") 1855 | if "versioneer.VCS" in line: 1856 | setters = True 1857 | if "versioneer.versionfile_source" in line: 1858 | setters = True 1859 | if len(found) != 3: 1860 | print("") 1861 | print("Your setup.py appears to be missing some important items") 1862 | print("(but I might be wrong). Please make sure it has something") 1863 | print("roughly like the following:") 1864 | print("") 1865 | print(" import versioneer") 1866 | print(" setup( version=versioneer.get_version(),") 1867 | print(" cmdclass=versioneer.get_cmdclass(), ...)") 1868 | print("") 1869 | errors += 1 1870 | if setters: 1871 | print("You should remove lines like 'versioneer.VCS = ' and") 1872 | print("'versioneer.versionfile_source = ' . This configuration") 1873 | print("now lives in setup.cfg, and should be removed from setup.py") 1874 | print("") 1875 | errors += 1 1876 | return errors 1877 | 1878 | 1879 | if __name__ == "__main__": 1880 | cmd = sys.argv[1] 1881 | if cmd == "setup": 1882 | errors = do_setup() 1883 | errors += scan_setup_py() 1884 | if errors: 1885 | sys.exit(1) 1886 | --------------------------------------------------------------------------------