├── .gitattributes ├── .github ├── FUNDING.yml └── workflows │ ├── pypi.yml │ └── tests.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .pylintrc ├── LICENSE.txt ├── MANIFEST.in ├── Makefile ├── README.md ├── examples └── account.py ├── requirements.txt ├── setup.cfg ├── setup.py ├── tests ├── __init__.py ├── test_benchmarks.py ├── test_core.py ├── test_match.py ├── test_more.py ├── test_utils.py ├── test_variable.py └── utils.py ├── tox.ini ├── unification ├── __init__.py ├── _version.py ├── core.py ├── dispatch.py ├── match.py ├── more.py ├── utils.py └── variable.py └── versioneer.py /.gitattributes: -------------------------------------------------------------------------------- 1 | unification/_version.py export-subst 2 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: [brandonwillard] 2 | -------------------------------------------------------------------------------- /.github/workflows/pypi.yml: -------------------------------------------------------------------------------- 1 | name: PyPI 2 | on: 3 | push: 4 | branches: 5 | - main 6 | - auto-release 7 | pull_request: 8 | branches: [main] 9 | release: 10 | types: [published] 11 | 12 | jobs: 13 | build: 14 | name: Build source distribution 15 | runs-on: ubuntu-latest 16 | steps: 17 | - uses: actions/checkout@v2 18 | with: 19 | fetch-depth: 0 20 | - uses: actions/setup-python@v2 21 | with: 22 | python-version: "3.8" 23 | - name: Build the sdist 24 | run: | 25 | python setup.py sdist 26 | - name: Check the sdist installs and imports 27 | run: | 28 | mkdir -p test-sdist 29 | cd test-sdist 30 | python -m venv venv-sdist 31 | venv-sdist/bin/python -m pip install ../dist/logical-unification-*.tar.gz 32 | - uses: actions/upload-artifact@v2 33 | with: 34 | name: artifact 35 | path: dist/* 36 | 37 | upload_pypi: 38 | name: Upload to PyPI on release 39 | needs: [build] 40 | runs-on: ubuntu-latest 41 | if: github.event_name == 'release' && github.event.action == 'published' 42 | steps: 43 | - uses: actions/download-artifact@v4.1.7 44 | with: 45 | name: artifact 46 | path: dist 47 | - uses: pypa/gh-action-pypi-publish@master 48 | with: 49 | user: __token__ 50 | password: ${{ secrets.pypi_secret }} 51 | -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | branches: 9 | - main 10 | 11 | jobs: 12 | changes: 13 | name: "Check for changes" 14 | runs-on: ubuntu-latest 15 | outputs: 16 | changes: ${{ steps.changes.outputs.src }} 17 | steps: 18 | - uses: actions/checkout@v2 19 | with: 20 | fetch-depth: 0 21 | - uses: dorny/paths-filter@v2 22 | id: changes 23 | with: 24 | filters: | 25 | python: &python 26 | - 'unification/**/*.py' 27 | - 'tests/**/*.py' 28 | - '*.py' 29 | src: 30 | - *python 31 | - '.github/**/*.yml' 32 | - 'setup.cfg' 33 | - 'requirements.txt' 34 | - '.coveragerc' 35 | - '.pre-commit-config.yaml' 36 | 37 | style: 38 | name: Check code style 39 | needs: changes 40 | runs-on: ubuntu-latest 41 | if: ${{ needs.changes.outputs.changes == 'true' }} 42 | steps: 43 | - uses: actions/checkout@v2 44 | - uses: actions/setup-python@v2 45 | with: 46 | python-version: 3.8 47 | - uses: pre-commit/action@v2.0.0 48 | 49 | test: 50 | needs: 51 | - changes 52 | - style 53 | runs-on: ubuntu-latest 54 | if: ${{ needs.changes.outputs.changes == 'true' && needs.style.result == 'success' }} 55 | strategy: 56 | matrix: 57 | python-version: 58 | - 3.7 59 | - 3.8 60 | - 3.9 61 | - '3.10' 62 | - pypy3 63 | steps: 64 | - uses: actions/checkout@v2 65 | - uses: actions/setup-python@v2 66 | with: 67 | python-version: ${{ matrix.python-version }} 68 | - name: Install dependencies 69 | run: | 70 | python -m pip install --upgrade pip 71 | if [ -f requirements.txt ]; then pip install -r requirements.txt; fi 72 | - name: Test with pytest 73 | run: | 74 | pytest -v tests/ --benchmark-skip --cov=unification --cov-report=xml:./coverage.xml 75 | pytest -v tests/ --benchmark-only --benchmark-autosave --benchmark-group-by=group,param:size --benchmark-max-time=3 76 | - name: Coveralls 77 | uses: AndreMiras/coveralls-python-action@develop 78 | with: 79 | parallel: true 80 | flag-name: run-${{ matrix.python-version }} 81 | 82 | all-checks: 83 | if: ${{ always() }} 84 | runs-on: ubuntu-latest 85 | name: "All tests" 86 | needs: [changes, style, test] 87 | steps: 88 | - name: Check build matrix status 89 | if: ${{ needs.changes.outputs.changes == 'true' && (needs.style.result != 'success' || needs.test.result != 'success') }} 90 | run: exit 1 91 | 92 | upload-coverage: 93 | name: "Upload coverage" 94 | needs: [changes, all-checks] 95 | if: ${{ needs.changes.outputs.changes == 'true' && needs.all-checks.result == 'success' }} 96 | runs-on: ubuntu-latest 97 | steps: 98 | - name: Coveralls Finished 99 | uses: AndreMiras/coveralls-python-action@develop 100 | with: 101 | parallel-finished: true 102 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by https://www.gitignore.io/api/vim,emacs,python 2 | # Edit at https://www.gitignore.io/?templates=vim,emacs,python 3 | 4 | ### Emacs ### 5 | # -*- mode: gitignore; -*- 6 | *~ 7 | \#*\# 8 | /.emacs.desktop 9 | /.emacs.desktop.lock 10 | *.elc 11 | auto-save-list 12 | tramp 13 | .\#* 14 | 15 | # Org-mode 16 | .org-id-locations 17 | *_archive 18 | 19 | # flymake-mode 20 | *_flymake.* 21 | 22 | # eshell files 23 | /eshell/history 24 | /eshell/lastdir 25 | 26 | # elpa packages 27 | /elpa/ 28 | 29 | # reftex files 30 | *.rel 31 | 32 | # AUCTeX auto folder 33 | /auto/ 34 | 35 | # cask packages 36 | .cask/ 37 | dist/ 38 | 39 | # Flycheck 40 | flycheck_*.el 41 | 42 | # server auth directory 43 | /server/ 44 | 45 | # projectiles files 46 | .projectile 47 | 48 | # directory configuration 49 | .dir-locals.el 50 | 51 | # network security 52 | /network-security.data 53 | 54 | 55 | ### Python ### 56 | # Byte-compiled / optimized / DLL files 57 | __pycache__/ 58 | *.py[cod] 59 | *$py.class 60 | 61 | # C extensions 62 | *.so 63 | 64 | # Distribution / packaging 65 | .Python 66 | build/ 67 | develop-eggs/ 68 | downloads/ 69 | eggs/ 70 | .eggs/ 71 | lib/ 72 | lib64/ 73 | parts/ 74 | sdist/ 75 | var/ 76 | wheels/ 77 | pip-wheel-metadata/ 78 | share/python-wheels/ 79 | *.egg-info/ 80 | .installed.cfg 81 | *.egg 82 | MANIFEST 83 | 84 | # PyInstaller 85 | # Usually these files are written by a python script from a template 86 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 87 | *.manifest 88 | *.spec 89 | 90 | # Installer logs 91 | pip-log.txt 92 | pip-delete-this-directory.txt 93 | 94 | # Unit test / coverage reports 95 | htmlcov/ 96 | .tox/ 97 | .nox/ 98 | .coverage 99 | .coverage.* 100 | .cache 101 | nosetests.xml 102 | coverage.xml 103 | *.cover 104 | .hypothesis/ 105 | .pytest_cache/ 106 | testing-report.html 107 | 108 | # Translations 109 | *.mo 110 | *.pot 111 | 112 | # Django stuff: 113 | *.log 114 | local_settings.py 115 | db.sqlite3 116 | db.sqlite3-journal 117 | 118 | # Flask stuff: 119 | instance/ 120 | .webassets-cache 121 | 122 | # Scrapy stuff: 123 | .scrapy 124 | 125 | # Sphinx documentation 126 | docs/_build/ 127 | 128 | # PyBuilder 129 | target/ 130 | 131 | # Jupyter Notebook 132 | .ipynb_checkpoints 133 | 134 | # IPython 135 | profile_default/ 136 | ipython_config.py 137 | 138 | # pyenv 139 | .python-version 140 | 141 | # pipenv 142 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 143 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 144 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 145 | # install all needed dependencies. 146 | #Pipfile.lock 147 | 148 | # celery beat schedule file 149 | celerybeat-schedule 150 | 151 | # SageMath parsed files 152 | *.sage.py 153 | 154 | # Environments 155 | .env 156 | .venv 157 | env/ 158 | venv/ 159 | ENV/ 160 | env.bak/ 161 | venv.bak/ 162 | 163 | # Spyder project settings 164 | .spyderproject 165 | .spyproject 166 | 167 | # Rope project settings 168 | .ropeproject 169 | 170 | # mkdocs documentation 171 | /site 172 | 173 | # mypy 174 | .mypy_cache/ 175 | .dmypy.json 176 | dmypy.json 177 | 178 | # Pyre type checker 179 | .pyre/ 180 | 181 | ### Vim ### 182 | # Swap 183 | [._]*.s[a-v][a-z] 184 | [._]*.sw[a-p] 185 | [._]s[a-rt-v][a-z] 186 | [._]ss[a-gi-z] 187 | [._]sw[a-p] 188 | 189 | # Session 190 | Session.vim 191 | Sessionx.vim 192 | 193 | # Temporary 194 | .netrwhist 195 | # Auto-generated tag files 196 | tags 197 | # Persistent undo 198 | [._]*.un~ 199 | 200 | # End of https://www.gitignore.io/api/vim,emacs,python 201 | 202 | .benchmarks/ -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | exclude: | 2 | (?x)^( 3 | versioneer\.py| 4 | unification/_version\.py| 5 | doc/.*| 6 | bin/.* 7 | )$ 8 | repos: 9 | - repo: https://github.com/psf/black 10 | rev: 22.3.0 11 | hooks: 12 | - id: black 13 | language_version: python3 14 | - repo: https://gitlab.com/pycqa/flake8 15 | rev: 3.8.4 16 | hooks: 17 | - id: flake8 18 | - repo: https://github.com/pycqa/isort 19 | rev: 5.5.2 20 | hooks: 21 | - id: isort 22 | -------------------------------------------------------------------------------- /.pylintrc: -------------------------------------------------------------------------------- 1 | [MASTER] 2 | # Use multiple processes to speed up Pylint. 3 | jobs=0 4 | 5 | # Allow loading of arbitrary C extensions. Extensions are imported into the 6 | # active Python interpreter and may run arbitrary code. 7 | unsafe-load-any-extension=no 8 | 9 | # Allow optimization of some AST trees. This will activate a peephole AST 10 | # optimizer, which will apply various small optimizations. For instance, it can 11 | # be used to obtain the result of joining multiple strings with the addition 12 | # operator. Joining a lot of strings can lead to a maximum recursion error in 13 | # Pylint and this flag can prevent that. It has one side effect, the resulting 14 | # AST will be different than the one from reality. 15 | optimize-ast=no 16 | 17 | [MESSAGES CONTROL] 18 | 19 | # Only show warnings with the listed confidence levels. Leave empty to show 20 | # all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED 21 | confidence= 22 | 23 | # Disable the message, report, category or checker with the given id(s). You 24 | # can either give multiple identifiers separated by comma (,) or put this 25 | # option multiple times (only on the command line, not in the configuration 26 | # file where it should appear only once).You can also use "--disable=all" to 27 | # disable everything first and then reenable specific checks. For example, if 28 | # you want to run only the similarities checker, you can use "--disable=all 29 | # --enable=similarities". If you want to run only the classes checker, but have 30 | # no Warning level messages displayed, use"--disable=all --enable=classes 31 | # --disable=W" 32 | disable=all 33 | 34 | # Enable the message, report, category or checker with the given id(s). You can 35 | # either give multiple identifier separated by comma (,) or put this option 36 | # multiple time. See also the "--disable" option for examples. 37 | enable=import-error, 38 | import-self, 39 | reimported, 40 | wildcard-import, 41 | misplaced-future, 42 | relative-import, 43 | deprecated-module, 44 | unpacking-non-sequence, 45 | invalid-all-object, 46 | undefined-all-variable, 47 | used-before-assignment, 48 | cell-var-from-loop, 49 | global-variable-undefined, 50 | dangerous-default-value, 51 | # redefined-builtin, 52 | redefine-in-handler, 53 | unused-import, 54 | unused-wildcard-import, 55 | global-variable-not-assigned, 56 | undefined-loop-variable, 57 | global-at-module-level, 58 | bad-open-mode, 59 | redundant-unittest-assert, 60 | boolean-datetime, 61 | # unused-variable 62 | 63 | 64 | [REPORTS] 65 | 66 | # Set the output format. Available formats are text, parseable, colorized, msvs 67 | # (visual studio) and html. You can also give a reporter class, eg 68 | # mypackage.mymodule.MyReporterClass. 69 | output-format=parseable 70 | 71 | # Put messages in a separate file for each module / package specified on the 72 | # command line instead of printing them on stdout. Reports (if any) will be 73 | # written in a file name "pylint_global.[txt|html]". 74 | files-output=no 75 | 76 | # Tells whether to display a full report or only the messages 77 | reports=no 78 | 79 | # Python expression which should return a note less than 10 (10 is the highest 80 | # note). You have access to the variables errors warning, statement which 81 | # respectively contain the number of errors / warnings messages and the total 82 | # number of statements analyzed. This is used by the global evaluation report 83 | # (RP0004). 84 | evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) 85 | 86 | [BASIC] 87 | 88 | # List of builtins function names that should not be used, separated by a comma 89 | bad-functions=map,filter,input 90 | 91 | # Good variable names which should always be accepted, separated by a comma 92 | good-names=i,j,k,ex,Run,_ 93 | 94 | # Bad variable names which should always be refused, separated by a comma 95 | bad-names=foo,bar,baz,toto,tutu,tata 96 | 97 | # Colon-delimited sets of names that determine each other's naming style when 98 | # the name regexes allow several styles. 99 | name-group= 100 | 101 | # Include a hint for the correct naming format with invalid-name 102 | include-naming-hint=yes 103 | 104 | # Regular expression matching correct method names 105 | method-rgx=[a-z_][a-z0-9_]{2,30}$ 106 | 107 | # Naming hint for method names 108 | method-name-hint=[a-z_][a-z0-9_]{2,30}$ 109 | 110 | # Regular expression matching correct function names 111 | function-rgx=[a-z_][a-z0-9_]{2,30}$ 112 | 113 | # Naming hint for function names 114 | function-name-hint=[a-z_][a-z0-9_]{2,30}$ 115 | 116 | # Regular expression matching correct module names 117 | module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ 118 | 119 | # Naming hint for module names 120 | module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ 121 | 122 | # Regular expression matching correct attribute names 123 | attr-rgx=[a-z_][a-z0-9_]{2,30}$ 124 | 125 | # Naming hint for attribute names 126 | attr-name-hint=[a-z_][a-z0-9_]{2,30}$ 127 | 128 | # Regular expression matching correct class attribute names 129 | class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ 130 | 131 | # Naming hint for class attribute names 132 | class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ 133 | 134 | # Regular expression matching correct constant names 135 | const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ 136 | 137 | # Naming hint for constant names 138 | const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$ 139 | 140 | # Regular expression matching correct class names 141 | class-rgx=[A-Z_][a-zA-Z0-9]+$ 142 | 143 | # Naming hint for class names 144 | class-name-hint=[A-Z_][a-zA-Z0-9]+$ 145 | 146 | # Regular expression matching correct argument names 147 | argument-rgx=[a-z_][a-z0-9_]{2,30}$ 148 | 149 | # Naming hint for argument names 150 | argument-name-hint=[a-z_][a-z0-9_]{2,30}$ 151 | 152 | # Regular expression matching correct inline iteration names 153 | inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ 154 | 155 | # Naming hint for inline iteration names 156 | inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$ 157 | 158 | # Regular expression matching correct variable names 159 | variable-rgx=[a-z_][a-z0-9_]{2,30}$ 160 | 161 | # Naming hint for variable names 162 | variable-name-hint=[a-z_][a-z0-9_]{2,30}$ 163 | 164 | # Regular expression which should only match function or class names that do 165 | # not require a docstring. 166 | no-docstring-rgx=^_ 167 | 168 | # Minimum line length for functions/classes that require docstrings, shorter 169 | # ones are exempt. 170 | docstring-min-length=-1 171 | 172 | 173 | [ELIF] 174 | 175 | # Maximum number of nested blocks for function / method body 176 | max-nested-blocks=5 177 | 178 | 179 | [FORMAT] 180 | 181 | # Maximum number of characters on a single line. 182 | max-line-length=100 183 | 184 | # Regexp for a line that is allowed to be longer than the limit. 185 | ignore-long-lines=^\s*(# )??$ 186 | 187 | # Allow the body of an if to be on the same line as the test if there is no 188 | # else. 189 | single-line-if-stmt=no 190 | 191 | # List of optional constructs for which whitespace checking is disabled. `dict- 192 | # separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. 193 | # `trailing-comma` allows a space between comma and closing bracket: (a, ). 194 | # `empty-line` allows space-only lines. 195 | no-space-check=trailing-comma,dict-separator 196 | 197 | # Maximum number of lines in a module 198 | max-module-lines=1000 199 | 200 | # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 201 | # tab). 202 | indent-string=' ' 203 | 204 | # Number of spaces of indent required inside a hanging or continued line. 205 | indent-after-paren=4 206 | 207 | # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. 208 | expected-line-ending-format= 209 | 210 | 211 | [LOGGING] 212 | 213 | # Logging modules to check that the string format arguments are in logging 214 | # function parameter format 215 | logging-modules=logging 216 | 217 | 218 | [MISCELLANEOUS] 219 | 220 | # List of note tags to take in consideration, separated by a comma. 221 | notes=FIXME,XXX,TODO 222 | 223 | 224 | [SIMILARITIES] 225 | 226 | # Minimum lines number of a similarity. 227 | min-similarity-lines=4 228 | 229 | # Ignore comments when computing similarities. 230 | ignore-comments=yes 231 | 232 | # Ignore docstrings when computing similarities. 233 | ignore-docstrings=yes 234 | 235 | # Ignore imports when computing similarities. 236 | ignore-imports=no 237 | 238 | 239 | [SPELLING] 240 | 241 | # Spelling dictionary name. Available dictionaries: none. To make it working 242 | # install python-enchant package. 243 | spelling-dict= 244 | 245 | # List of comma separated words that should not be checked. 246 | spelling-ignore-words= 247 | 248 | # A path to a file that contains private dictionary; one word per line. 249 | spelling-private-dict-file= 250 | 251 | # Tells whether to store unknown words to indicated private dictionary in 252 | # --spelling-private-dict-file option instead of raising a message. 253 | spelling-store-unknown-words=no 254 | 255 | 256 | [TYPECHECK] 257 | 258 | # Tells whether missing members accessed in mixin class should be ignored. A 259 | # mixin class is detected if its name ends with "mixin" (case insensitive). 260 | ignore-mixin-members=yes 261 | 262 | # List of module names for which member attributes should not be checked 263 | # (useful for modules/projects where namespaces are manipulated during runtime 264 | # and thus existing member attributes cannot be deduced by static analysis. It 265 | # supports qualified module names, as well as Unix pattern matching. 266 | ignored-modules=tensorflow.core.framework,tensorflow.python.framework,tensorflow.python.ops.gen_linalg_ops 267 | 268 | # List of classes names for which member attributes should not be checked 269 | # (useful for classes with attributes dynamically set). This supports can work 270 | # with qualified names. 271 | ignored-classes= 272 | 273 | # List of members which are set dynamically and missed by pylint inference 274 | # system, and so shouldn't trigger E1101 when accessed. Python regular 275 | # expressions are accepted. 276 | generated-members= 277 | 278 | 279 | [VARIABLES] 280 | 281 | # Tells whether we should check for unused import in __init__ files. 282 | init-import=no 283 | 284 | # A regular expression matching the name of dummy variables (i.e. expectedly 285 | # not used). 286 | dummy-variables-rgx=_$|dummy 287 | 288 | # List of additional names supposed to be defined in builtins. Remember that 289 | # you should avoid to define new builtins when possible. 290 | additional-builtins= 291 | 292 | # List of strings which can identify a callback function by name. A callback 293 | # name must start or end with one of those strings. 294 | callbacks=cb_,_cb 295 | 296 | 297 | [CLASSES] 298 | 299 | # List of method names used to declare (i.e. assign) instance attributes. 300 | defining-attr-methods=__init__,__new__,setUp 301 | 302 | # List of valid names for the first argument in a class method. 303 | valid-classmethod-first-arg=cls 304 | 305 | # List of valid names for the first argument in a metaclass class method. 306 | valid-metaclass-classmethod-first-arg=mcs 307 | 308 | # List of member names, which should be excluded from the protected access 309 | # warning. 310 | exclude-protected=_asdict,_fields,_replace,_source,_make 311 | 312 | 313 | [DESIGN] 314 | 315 | # Maximum number of arguments for function / method 316 | max-args=5 317 | 318 | # Argument names that match this expression will be ignored. Default to name 319 | # with leading underscore 320 | ignored-argument-names=_.* 321 | 322 | # Maximum number of locals for function / method body 323 | max-locals=15 324 | 325 | # Maximum number of return / yield for function / method body 326 | max-returns=6 327 | 328 | # Maximum number of branch for function / method body 329 | max-branches=12 330 | 331 | # Maximum number of statements in function / method body 332 | max-statements=50 333 | 334 | # Maximum number of parents for a class (see R0901). 335 | max-parents=7 336 | 337 | # Maximum number of attributes for a class (see R0902). 338 | max-attributes=7 339 | 340 | # Minimum number of public methods for a class (see R0903). 341 | min-public-methods=2 342 | 343 | # Maximum number of public methods for a class (see R0904). 344 | max-public-methods=20 345 | 346 | # Maximum number of boolean expressions in a if statement 347 | max-bool-expr=5 348 | 349 | 350 | [IMPORTS] 351 | 352 | # Deprecated modules which should not be used, separated by a comma 353 | deprecated-modules=optparse 354 | 355 | # Create a graph of every (i.e. internal and external) dependencies in the 356 | # given file (report RP0402 must not be disabled) 357 | import-graph= 358 | 359 | # Create a graph of external dependencies in the given file (report RP0402 must 360 | # not be disabled) 361 | ext-import-graph= 362 | 363 | # Create a graph of internal dependencies in the given file (report RP0402 must 364 | # not be disabled) 365 | int-import-graph= 366 | 367 | 368 | [EXCEPTIONS] 369 | 370 | # Exceptions that will emit a warning when being caught. Defaults to 371 | # "Exception" 372 | overgeneral-exceptions=Exception 373 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2019 Brandon T. Willard 2 | Copyright (c) 2014 Matthew Rocklin 3 | 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | a. Redistributions of source code must retain the above copyright notice, 10 | this list of conditions and the following disclaimer. 11 | b. Redistributions in binary form must reproduce the above copyright 12 | notice, this list of conditions and the following disclaimer in the 13 | documentation and/or other materials provided with the distribution. 14 | c. Neither the name of Unification nor the names of its contributors 15 | may be used to endorse or promote products derived from this software 16 | without specific prior written permission. 17 | 18 | 19 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 20 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 21 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 22 | ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR 23 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 24 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 25 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 26 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 27 | LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 28 | OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH 29 | DAMAGE. 30 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include versioneer.py 2 | include unification/_version.py 3 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: help venv conda docker docstyle format style black test lint check coverage pypi 2 | .DEFAULT_GOAL = help 3 | 4 | PYTHON = python 5 | PIP = pip 6 | CONDA = conda 7 | SHELL = bash 8 | 9 | help: 10 | @printf "Usage:\n" 11 | @grep -E '^[a-zA-Z_-]+:.*?# .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?# "}; {printf "\033[1;34mmake %-10s\033[0m%s\n", $$1, $$2}' 12 | 13 | conda: # Set up a conda environment for development. 14 | @printf "Creating conda environment...\n" 15 | ${CONDA} create --yes --name unification-env python=3.6 16 | ( \ 17 | ${CONDA} activate unification-env; \ 18 | ${PIP} install -U pip; \ 19 | ${PIP} install -r requirements.txt; \ 20 | ${PIP} install -r requirements-dev.txt; \ 21 | ${CONDA} deactivate; \ 22 | ) 23 | @printf "\n\nConda environment created! \033[1;34mRun \`conda activate unification-env\` to activate it.\033[0m\n\n\n" 24 | 25 | venv: # Set up a Python virtual environment for development. 26 | @printf "Creating Python virtual environment...\n" 27 | rm -rf unification-venv 28 | ${PYTHON} -m venv unification-venv 29 | ( \ 30 | source unification-venv/bin/activate; \ 31 | ${PIP} install -U pip; \ 32 | ${PIP} install -r requirements.txt; \ 33 | ${PIP} install -r requirements-dev.txt; \ 34 | deactivate; \ 35 | ) 36 | @printf "\n\nVirtual environment created! \033[1;34mRun \`source unification-venv/bin/activate\` to activate it.\033[0m\n\n\n" 37 | 38 | docker: # Set up a Docker image for development. 39 | @printf "Creating Docker image...\n" 40 | ${SHELL} ./scripts/container.sh --build 41 | 42 | docstyle: 43 | @printf "Checking documentation with pydocstyle...\n" 44 | pydocstyle unification/ 45 | @printf "\033[1;34mPydocstyle passes!\033[0m\n\n" 46 | 47 | format: 48 | @printf "Checking code style with black...\n" 49 | black --check unification/ tests/ 50 | @printf "\033[1;34mBlack passes!\033[0m\n\n" 51 | 52 | style: 53 | @printf "Checking code style with pylint...\n" 54 | pylint unification/ tests/ 55 | @printf "\033[1;34mPylint passes!\033[0m\n\n" 56 | 57 | black: # Format code in-place using black. 58 | black unification/ tests/ 59 | 60 | test: # Test code using pytest. 61 | pytest -v tests/ --benchmark-skip --cov=unification/ --cov-report=xml --html=testing-report.html --self-contained-html 62 | 63 | benchmark: 64 | pytest -v tests/ --benchmark-only --benchmark-autosave --benchmark-group-by=group,param:size --benchmark-max-time=3 65 | 66 | coverage: test 67 | diff-cover coverage.xml --compare-branch=main --fail-under=100 68 | 69 | pypi: 70 | ${PYTHON} setup.py clean --all; \ 71 | ${PYTHON} setup.py rotate --match=.tar.gz,.whl,.egg,.zip --keep=0; \ 72 | ${PYTHON} setup.py sdist bdist_wheel; \ 73 | twine upload --skip-existing dist/*; 74 | 75 | lint: docstyle format style # Lint code using pydocstyle, black and pylint. 76 | 77 | check: lint test coverage benchmark # Both lint and test code. Runs `make lint` followed by `make test`. 78 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Logical Unification 2 | 3 | [![Build Status](https://travis-ci.org/pythological/unification.svg?branch=main)](https://travis-ci.org/pythological/unification) [![Coverage Status](https://coveralls.io/repos/github/pythological/unification/badge.svg?branch=main)](https://coveralls.io/github/pythological/unification?branch=main) [![PyPI](https://img.shields.io/pypi/v/logical-unification)](https://pypi.org/project/logical-unification/) 4 | 5 | [Logical unification](https://en.wikipedia.org/wiki/Unification_(computer_science)) in Python, extensible via dispatch. 6 | 7 | ## Installation 8 | 9 | Using `pip`: 10 | ```bash 11 | pip install logical-unification 12 | ``` 13 | 14 | To install from source: 15 | ```bash 16 | git clone git@github.com:pythological/unification.git 17 | cd unification 18 | pip install -r requirements.txt 19 | ``` 20 | 21 | Tests can be run with the provided `Makefile`: 22 | ```bash 23 | make check 24 | ``` 25 | 26 | ## Examples 27 | 28 | `unification` has built-in support for unifying most Python data types via the function `unify`: 29 | 30 | ```python 31 | >>> from unification import * 32 | >>> unify(1, 1) 33 | {} 34 | >>> unify(1, 2) 35 | False 36 | >>> x = var() 37 | >>> unify((1, x), (1, 2)) 38 | {~x: 2} 39 | >>> unify((x, x), (1, 2)) 40 | False 41 | ``` 42 | 43 | Unifiable objects containing logic variables can also be reified using `reify`: 44 | 45 | ```python 46 | >>> reify((1, x), {x: 2}) 47 | (1, 2) 48 | ``` 49 | 50 | And most Python data structures: 51 | 52 | ``` python 53 | >>> unify({"a": 1, "b": 2}, {"a": x, "b": 2}) 54 | {~x: 1} 55 | >>> unify({"a": 1, "b": 2}, {"a": x, "b": 2, "c": 3}) 56 | False 57 | >>> from collections import namedtuples 58 | >>> ntuple = namedtuple("ntuple", ("a", "b")) 59 | >>> unify(ntuple(1, 2), ntuple(x, 2)) 60 | {~x: 1} 61 | ``` 62 | 63 | Custom classes can be made "unifiable" with the `unifiable` decorator: 64 | 65 | ```python 66 | @unifiable 67 | class Account(object): 68 | def __init__(self, id, name, balance): 69 | self.id = id 70 | self.name = name 71 | self.balance = balance 72 | 73 | >>> data = [Account(1, 'Alice', 100), 74 | Account(2, 'Bob', 0), 75 | Account(2, 'Charlie', 0), 76 | Account(2, 'Denis', 400), 77 | Account(2, 'Edith', 500)] 78 | >>> id, name, balance = var('id'), var('name'), var('balance') 79 | >>> [unify(Account(id, name, balance), acct) for acct in data] 80 | [{~name: 'Alice', ~balance: 100, ~id: 1}, 81 | {~name: 'Bob', ~balance: 0, ~id: 2}, 82 | {~name: 'Charlie', ~balance: 0, ~id: 2}, 83 | {~name: 'Denis', ~balance: 400, ~id: 2}, 84 | {~name: 'Edith', ~balance: 500, ~id: 2}] 85 | >>> [unify(Account(id, name, 0), acct) for acct in data] 86 | [False, 87 | {~name: 'Bob', ~id: 2}, 88 | {~name: 'Charlie', ~id: 2}, 89 | False, 90 | False] 91 | ``` 92 | 93 | `unification` also supports function dispatch through pattern matching: 94 | 95 | ```python 96 | >> from unification.match import * 97 | >>> n = var('n') 98 | 99 | @match(0) 100 | def fib(n): 101 | return 0 102 | 103 | 104 | @match(1) 105 | def fib(n): 106 | return 1 107 | 108 | 109 | @match(n) 110 | def fib(n): 111 | return fib(n - 1) + fib(n - 2) 112 | 113 | >>> map(fib, [0, 1, 2, 3, 4, 5, 6, 7, 8, 0]) 114 | [0, 1, 1, 2, 3, 5, 8, 13, 21, 34] 115 | ``` 116 | 117 | The pattern matching can be fairly complex: 118 | 119 | ```python 120 | >> name, amount = var('name'), var('amount') 121 | 122 | @match({'status': 200, 'data': {'name': name, 'credit': amount}}) 123 | def respond(name, amount): 124 | balance[name] += amount 125 | 126 | 127 | @match({'status': 200, 'data': {'name': name, 'debit': amount}}) 128 | def respond(name, amount): 129 | balance[name] -= amount 130 | 131 | 132 | @match({'status': 404}) 133 | def respond(): 134 | print("Bad Request") 135 | 136 | ``` 137 | 138 | See the full example in the [examples directory](https://github.com/pythological/unification#examples). 139 | 140 | 141 | ## Performance and Reliability 142 | 143 | `unification`'s current design allows for unification and reification of nested structures that would otherwise break the Python stack recursion limit. It uses a generator-based design to "stream" the unifications and reifications. 144 | 145 | Below are some stack vs. stream benchmarks that demonstrate how well the stream-based approach scales against the stack-based approach in terms of unifying and reifying deeply nested lists containing integers. These benchmarks were generated from the tests in `tests/test_benchmarks.py` using CPython 3.7.3. 146 | 147 |
Stack vs. stream benchmarks 148 |

149 | 150 | ```python 151 | -------------------------------------------------------------------------------- benchmark 'reify_chain size=10': 2 tests ------------------------------------------------------------------------------- 152 | Name (time in us) Min Max Mean StdDev Median IQR Outliers OPS (Kops/s) Rounds Iterations 153 | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- 154 | test_reify_chain_stack[10] 41.0790 (1.0) 545.1940 (3.20) 52.9087 (1.07) 9.7964 (1.04) 50.8650 (1.08) 6.4301 (8.37) 11815;10849 18.9005 (0.93) 260164 1 155 | test_reify_chain_stream[10] 42.4410 (1.03) 170.5540 (1.0) 49.3080 (1.0) 9.3993 (1.0) 47.2400 (1.0) 0.7680 (1.0) 14962;102731 20.2807 (1.0) 278113 1 156 | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- 157 | 158 | ------------------------------------------ benchmark 'reify_chain size=1000': 1 tests ----------------------------------------- 159 | Name (time in ms) Min Max Mean StdDev Median IQR Outliers OPS Rounds Iterations 160 | ------------------------------------------------------------------------------------------------------------------------------- 161 | test_reify_chain_stream_large[1000] 7.7722 28.2579 10.0723 2.5087 9.4899 0.3106 70;155 99.2820 1528 1 162 | ------------------------------------------------------------------------------------------------------------------------------- 163 | 164 | ------------------------------------------------------------------------- benchmark 'reify_chain size=300': 2 tests -------------------------------------------------------------------------- 165 | Name (time in ms) Min Max Mean StdDev Median IQR Outliers OPS Rounds Iterations 166 | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- 167 | test_reify_chain_stack[300] 1.5183 (1.0) 22.1821 (1.19) 1.9826 (1.0) 1.5511 (1.16) 1.7410 (1.0) 0.0801 (1.0) 144;684 504.3878 (1.0) 7201 1 168 | test_reify_chain_stream[300] 1.7059 (1.12) 18.6020 (1.0) 2.1237 (1.07) 1.3389 (1.0) 1.9260 (1.11) 0.1020 (1.27) 118;585 470.8745 (0.93) 6416 1 169 | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- 170 | 171 | --------------------------------------------------------------------------------- benchmark 'reify_chain size=35': 2 tests -------------------------------------------------------------------------------- 172 | Name (time in us) Min Max Mean StdDev Median IQR Outliers OPS (Kops/s) Rounds Iterations 173 | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- 174 | test_reify_chain_stream[35] 129.2780 (1.0) 868.1510 (1.02) 190.0433 (1.11) 36.2784 (1.41) 179.5690 (1.08) 21.5360 (2.30) 1535;1455 5.2620 (0.90) 26072 1 175 | test_reify_chain_stack[35] 150.7850 (1.17) 853.7920 (1.0) 170.5166 (1.0) 25.7944 (1.0) 165.8500 (1.0) 9.3530 (1.0) 3724;5480 5.8645 (1.0) 81286 1 176 | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- 177 | 178 | ------------------------------------------- benchmark 'reify_chain size=5000': 1 tests ------------------------------------------ 179 | Name (time in ms) Min Max Mean StdDev Median IQR Outliers OPS Rounds Iterations 180 | --------------------------------------------------------------------------------------------------------------------------------- 181 | test_reify_chain_stream_large[5000] 46.9073 86.9737 52.9724 6.6919 49.6787 3.9609 68;68 18.8778 292 1 182 | --------------------------------------------------------------------------------------------------------------------------------- 183 | 184 | ------------------------------------------------------------------------------- benchmark 'unify_chain size=10': 2 tests ------------------------------------------------------------------------------- 185 | Name (time in us) Min Max Mean StdDev Median IQR Outliers OPS (Kops/s) Rounds Iterations 186 | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- 187 | test_unify_chain_stream[10] 77.6280 (1.0) 307.9130 (1.0) 86.7625 (1.0) 17.5355 (1.20) 82.7525 (1.0) 1.7290 (1.0) 809;1736 11.5257 (1.0) 15524 1 188 | test_unify_chain_stack[10] 92.9890 (1.20) 309.8770 (1.01) 104.2017 (1.20) 14.6694 (1.0) 101.0160 (1.22) 4.2368 (2.45) 3657;6651 9.5968 (0.83) 73379 1 189 | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- 190 | 191 | ------------------------------------------- benchmark 'unify_chain size=1000': 1 tests ------------------------------------------ 192 | Name (time in ms) Min Max Mean StdDev Median IQR Outliers OPS Rounds Iterations 193 | --------------------------------------------------------------------------------------------------------------------------------- 194 | test_unify_chain_stream_large[1000] 27.3518 65.5924 31.1374 4.2563 29.5148 3.5286 38;35 32.1158 496 1 195 | --------------------------------------------------------------------------------------------------------------------------------- 196 | 197 | ------------------------------------------------------------------------- benchmark 'unify_chain size=300': 2 tests -------------------------------------------------------------------------- 198 | Name (time in ms) Min Max Mean StdDev Median IQR Outliers OPS Rounds Iterations 199 | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- 200 | test_unify_chain_stream[300] 3.6957 (1.0) 13.1876 (1.0) 4.4439 (1.0) 1.0719 (1.42) 4.2080 (1.0) 0.2410 (1.67) 51;95 225.0298 (1.0) 1114 1 201 | test_unify_chain_stack[300] 4.2952 (1.16) 13.4294 (1.02) 4.7732 (1.07) 0.7555 (1.0) 4.6623 (1.11) 0.1446 (1.0) 36;136 209.5024 (0.93) 2911 1 202 | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- 203 | 204 | --------------------------------------------------------------------------------- benchmark 'unify_chain size=35': 2 tests --------------------------------------------------------------------------------- 205 | Name (time in us) Min Max Mean StdDev Median IQR Outliers OPS (Kops/s) Rounds Iterations 206 | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ 207 | test_unify_chain_stream[35] 285.6880 (1.0) 934.9690 (1.0) 324.5402 (1.0) 40.8338 (1.0) 319.8520 (1.0) 20.4375 (1.0) 962;1159 3.0813 (1.0) 24331 1 208 | test_unify_chain_stack[35] 345.2770 (1.21) 1,088.3650 (1.16) 407.9067 (1.26) 52.2263 (1.28) 396.6640 (1.24) 20.6560 (1.01) 2054;3027 2.4515 (0.80) 37594 1 209 | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ 210 | 211 | --------------------------------------------- benchmark 'unify_chain size=5000': 1 tests --------------------------------------------- 212 | Name (time in ms) Min Max Mean StdDev Median IQR Outliers OPS Rounds Iterations 213 | -------------------------------------------------------------------------------------------------------------------------------------- 214 | test_unify_chain_stream_large[5000] 555.2733 754.9897 605.4949 50.6124 591.1251 61.4030 2;2 1.6515 26 1 215 | -------------------------------------------------------------------------------------------------------------------------------------- 216 | 217 | Legend: 218 | Outliers: 1 Standard Deviation from Mean; 1.5 IQR (InterQuartile Range) from 1st Quartile and 3rd Quartile. 219 | OPS: Operations Per Second, computed as 1 / Mean 220 | ``` 221 | 222 |

223 |
224 | 225 | ## About 226 | 227 | This project is a fork of [`unification`](https://github.com/mrocklin/unification/). 228 | 229 | ## Development 230 | 231 | Install the development dependencies: 232 | 233 | ```bash 234 | $ pip install -r requirements.txt 235 | ``` 236 | 237 | Set up `pre-commit` hooks: 238 | 239 | ```bash 240 | $ pre-commit install --install-hooks 241 | ``` 242 | -------------------------------------------------------------------------------- /examples/account.py: -------------------------------------------------------------------------------- 1 | from collections import defaultdict 2 | from functools import partial 3 | 4 | from unification import var 5 | from unification.match import VarDispatcher, match 6 | 7 | match = partial(match, Dispatcher=VarDispatcher) 8 | 9 | balance = defaultdict(lambda: 0) 10 | 11 | name, amount = var("name"), var("amount") 12 | 13 | 14 | @match({"status": 200, "data": {"name": name, "credit": amount}}) 15 | def respond(name, amount): 16 | balance[name] += amount 17 | 18 | 19 | @match({"status": 200, "data": {"name": name, "debit": amount}}) 20 | def respond(name, amount): 21 | balance[name] -= amount 22 | 23 | 24 | @match({"status": 404}) 25 | def respond(): 26 | print("Bad Request") 27 | 28 | 29 | if __name__ == "__main__": 30 | respond({"status": 200, "data": {"name": "Alice", "credit": 100}}) 31 | respond({"status": 200, "data": {"name": "Bob", "debit": 100}}) 32 | respond({"status": 404}) 33 | print(dict(balance)) 34 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | -e ./ 2 | coveralls 3 | pydocstyle>=3.0.0 4 | pytest>=5.0.0 5 | pytest-cov>=2.6.1 6 | isort 7 | pytest-html>=1.20.0 8 | pytest-benchmark 9 | pylint>=2.3.1 10 | black>=19.3b0; platform.python_implementation!='PyPy' 11 | diff-cover 12 | versioneer 13 | coverage>=5.1 14 | pre-commit 15 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [versioneer] 2 | VCS = git 3 | style = pep440 4 | versionfile_source = unification/_version.py 5 | versionfile_build = unification/_version.py 6 | tag_prefix = v 7 | parentdir_prefix = unification- 8 | 9 | [pydocstyle] 10 | # Ignore errors for missing docstrings. 11 | # Ignore D202 (No blank lines allowed after function docstring) 12 | # due to bug in black: https://github.com/ambv/black/issues/355 13 | add-ignore = D100,D101,D102,D103,D104,D105,D106,D107,D202 14 | convention = numpy 15 | 16 | [tool:pytest] 17 | python_functions=test_* 18 | python_files=test*.py 19 | testpaths=tests 20 | 21 | [coverage:run] 22 | relative_files = True 23 | omit = 24 | unification/_version.py 25 | tests/* 26 | 27 | [coverage:report] 28 | exclude_lines = 29 | pragma: no cover 30 | 31 | raise NotImplementedError 32 | 33 | [isort] 34 | multi_line_output = 3 35 | include_trailing_comma = True 36 | force_grid_wrap = 0 37 | use_parentheses = True 38 | ensure_newline_before_comments = True 39 | line_length = 88 40 | 41 | [flake8] 42 | max-line-length = 88 43 | extend-ignore = E203, W503 44 | per-file-ignores = 45 | **/__init__.py:F401,E402,F403 46 | examples/account.py:F811 47 | tests/test_match.py:F811 48 | 49 | [pylint] 50 | max-line-length = 88 51 | 52 | [pylint.messages_control] 53 | disable = C0330, C0326 -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from os.path import exists 3 | 4 | from setuptools import setup 5 | 6 | import versioneer 7 | 8 | setup( 9 | name="logical-unification", 10 | version=versioneer.get_version(), 11 | cmdclass=versioneer.get_cmdclass(), 12 | description="Logical unification in Python", 13 | url="http://github.com/pythological/unification/", 14 | maintainer="Brandon T. Willard", 15 | maintainer_email="brandonwillard+unification@gmail.com", 16 | license="BSD", 17 | keywords="unification logic-programming dispatch", 18 | packages=["unification"], 19 | install_requires=[ 20 | "toolz", 21 | "multipledispatch", 22 | ], 23 | long_description=(open("README.md").read() if exists("README.md") else ""), 24 | long_description_content_type="text/markdown", 25 | zip_safe=False, 26 | python_requires=">=3.6", 27 | classifiers=[ 28 | "Development Status :: 5 - Production/Stable", 29 | "Intended Audience :: Science/Research", 30 | "Intended Audience :: Developers", 31 | "License :: OSI Approved :: BSD License", 32 | "Operating System :: OS Independent", 33 | "Programming Language :: Python", 34 | "Programming Language :: Python :: 3", 35 | "Programming Language :: Python :: 3.7", 36 | "Programming Language :: Python :: 3.8", 37 | "Programming Language :: Python :: 3.9", 38 | "Programming Language :: Python :: 3.10", 39 | "Programming Language :: Python :: Implementation :: CPython", 40 | "Programming Language :: Python :: Implementation :: PyPy", 41 | "Topic :: Software Development :: Libraries", 42 | ], 43 | ) 44 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pythological/unification/5e3e4aba1a4f63c6a4abe7d8a5c6fbbce36fa488/tests/__init__.py -------------------------------------------------------------------------------- /tests/test_benchmarks.py: -------------------------------------------------------------------------------- 1 | import platform 2 | import sys 3 | 4 | import pytest 5 | 6 | from tests.utils import gen_long_chain 7 | from unification import assoc, isvar, reify, unify, var 8 | from unification.utils import transitive_get as walk 9 | 10 | nesting_sizes = [10, 35, 300] 11 | 12 | 13 | def unify_stack(u, v, s): 14 | 15 | u = walk(u, s) 16 | v = walk(v, s) 17 | 18 | if u == v: 19 | return s 20 | if isvar(u): 21 | return assoc(s, u, v) 22 | if isvar(v): 23 | return assoc(s, v, u) 24 | 25 | if isinstance(u, (tuple, list)) and type(u) == type(v): 26 | for i_u, i_v in zip(u, v): 27 | s = unify_stack(i_u, i_v, s) 28 | if s is False: 29 | return s 30 | 31 | return s 32 | 33 | return False 34 | 35 | 36 | def reify_stack(u, s): 37 | 38 | u_ = walk(u, s) 39 | 40 | if u_ is not u: 41 | return reify_stack(u_, s) 42 | 43 | if isinstance(u_, (tuple, list)): 44 | return type(u_)(reify_stack(i_u, s) for i_u in u_) 45 | 46 | return u_ 47 | 48 | 49 | @pytest.mark.benchmark(group="unify_chain") 50 | @pytest.mark.parametrize("size", nesting_sizes) 51 | def test_unify_chain_stream(size, benchmark): 52 | a_lv = var() 53 | form, lvars = gen_long_chain(a_lv, size, use_lvars=True) 54 | term, _ = gen_long_chain("a", size) 55 | 56 | res = benchmark(unify, form, term, {}) 57 | assert res[a_lv] == "a" 58 | 59 | 60 | @pytest.mark.benchmark(group="unify_chain") 61 | @pytest.mark.parametrize("size", nesting_sizes) 62 | def test_unify_chain_stack(size, benchmark): 63 | a_lv = var() 64 | form, lvars = gen_long_chain(a_lv, size, use_lvars=True) 65 | term, _ = gen_long_chain("a", size) 66 | 67 | res = benchmark(unify_stack, form, term, {}) 68 | assert res[a_lv] == "a" 69 | 70 | 71 | @pytest.mark.benchmark(group="reify_chain") 72 | @pytest.mark.parametrize("size", nesting_sizes) 73 | def test_reify_chain_stream(size, benchmark): 74 | a_lv = var() 75 | form, lvars = gen_long_chain(a_lv, size, use_lvars=True) 76 | term, _ = gen_long_chain("a", size) 77 | 78 | lvars.update({a_lv: "a"}) 79 | res = benchmark(reify_stack, form, lvars) 80 | assert res == term 81 | 82 | 83 | @pytest.mark.benchmark(group="reify_chain") 84 | @pytest.mark.parametrize("size", nesting_sizes) 85 | def test_reify_chain_stack(size, benchmark): 86 | a_lv = var() 87 | form, lvars = gen_long_chain(a_lv, size, use_lvars=True) 88 | term, _ = gen_long_chain("a", size) 89 | 90 | lvars.update({a_lv: "a"}) 91 | res = benchmark(reify_stack, form, lvars) 92 | assert res == term 93 | 94 | 95 | @pytest.mark.benchmark(group="unify_chain") 96 | @pytest.mark.parametrize("size", [1000, 5000]) 97 | def test_unify_chain_stream_large(size, benchmark): 98 | a_lv = var() 99 | form, lvars = gen_long_chain(a_lv, size, use_lvars=True) 100 | term, _ = gen_long_chain("a", size) 101 | 102 | res = benchmark(unify, form, term, {}) 103 | assert res[a_lv] == "a" 104 | 105 | 106 | @pytest.mark.skipif( 107 | platform.python_implementation() == "PyPy", 108 | reason="PyPy's sys.getrecursionlimit changes", 109 | ) 110 | @pytest.mark.benchmark(group="reify_chain") 111 | @pytest.mark.parametrize("size", [sys.getrecursionlimit(), sys.getrecursionlimit() * 5]) 112 | def test_reify_chain_stream_large(size, benchmark): 113 | a_lv = var() 114 | form, lvars = gen_long_chain(a_lv, size, use_lvars=True) 115 | term, _ = gen_long_chain("a", size) 116 | 117 | lvars.update({a_lv: "a"}) 118 | 119 | res = benchmark(reify, form, lvars) 120 | 121 | if size < sys.getrecursionlimit(): 122 | assert res == term 123 | else: 124 | with pytest.raises(RecursionError): 125 | assert res == term 126 | -------------------------------------------------------------------------------- /tests/test_core.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from collections import OrderedDict 3 | from types import MappingProxyType 4 | 5 | import pytest 6 | 7 | from tests.utils import gen_long_chain 8 | from unification import var 9 | from unification.core import assoc, isground, reify, unground_lvars, unify 10 | from unification.utils import freeze 11 | 12 | 13 | def test_assoc(): 14 | d = {"a": 1, 2: 2} 15 | assert assoc(d, "c", 3) is not d 16 | assert assoc(d, "c", 3) == {"a": 1, 2: 2, "c": 3} 17 | assert assoc(d, 2, 3) == {"a": 1, 2: 3} 18 | assert assoc(d, "a", 0) == {"a": 0, 2: 2} 19 | assert d == {"a": 1, 2: 2} 20 | 21 | def assoc_OrderedDict(s, u, v): 22 | s[u] = v 23 | return s 24 | 25 | assoc.add((OrderedDict, object, object), assoc_OrderedDict) 26 | 27 | x = var() 28 | d2 = OrderedDict(d) 29 | assert assoc(d2, x, 3) is d2 30 | assert assoc(d2, x, 3) == {"a": 1, 2: 2, x: 3} 31 | assert assoc(d, x, 3) is not d 32 | 33 | 34 | def test_reify(): 35 | x, y, z = var(), var(), var() 36 | s = {x: 1, y: 2, z: (x, y)} 37 | assert reify(x, s) == 1 38 | assert reify(10, s) == 10 39 | assert reify((1, y), s) == (1, 2) 40 | assert reify((1, (x, (y, 2))), s) == (1, (1, (2, 2))) 41 | assert reify(z, s) == (1, 2) 42 | assert reify(z, MappingProxyType(s)) == (1, 2) 43 | 44 | 45 | def test_reify_Mapping(): 46 | x, y = var(), var() 47 | s = {x: 2, y: 4} 48 | e = [(1, x), (3, {5: y})] 49 | expected_res = [(1, 2), (3, {5: 4})] 50 | assert reify(dict(e), s) == dict(expected_res) 51 | assert reify(OrderedDict(e), s) == OrderedDict(expected_res) 52 | 53 | 54 | def test_reify_Set(): 55 | x, y = var(), var() 56 | assert reify({1, 2, x, y}, {x: 3}) == {1, 2, 3, y} 57 | assert reify(frozenset({1, 2, x, y}), {x: 3}) == frozenset({1, 2, 3, y}) 58 | 59 | 60 | def test_reify_list(): 61 | x, y = var(), var() 62 | s = {x: 2, y: 4} 63 | e = [1, [x, 3], y] 64 | assert reify(e, s) == [1, [2, 3], 4] 65 | 66 | 67 | def test_reify_complex(): 68 | x, y = var(), var() 69 | s = {x: 2, y: 4} 70 | e = {1: [x], 3: (y, 5)} 71 | 72 | assert reify(e, s) == {1: [2], 3: (4, 5)} 73 | assert reify((1, {2: x}), {x: slice(0, y), y: 3}) == (1, {2: slice(0, 3)}) 74 | 75 | 76 | def test_reify_slice(): 77 | x = var() 78 | assert reify(slice(1, x, 3), {x: 10}) == slice(1, 10, 3) 79 | 80 | 81 | def test_unify(): 82 | x, y, z = var(), var(), var() 83 | assert unify(x, x, {}) == {} 84 | assert unify(1, 1, {}) == {} 85 | assert unify(1, 2, {}) is False 86 | assert unify(x, 2, {}) == {x: 2} 87 | assert unify(2, x, {}) == {x: 2} 88 | assert unify(2, x, MappingProxyType({})) == {x: 2} 89 | assert unify(x, y, {}) == {x: y} 90 | assert unify(y, x, {}) == {y: x} 91 | assert unify(y, x, {y: x}) == {y: x} 92 | assert unify(x, y, {y: x}) == {y: x} 93 | assert unify(y, x, {x: y}) == {x: y} 94 | assert unify(x, y, {x: y}) == {x: y} 95 | assert unify(y, x, {y: z}) == {y: z, z: x} 96 | assert unify(x, y, {y: z}) == {y: z, x: z} 97 | 98 | 99 | def test_unify_slice(): 100 | x, y = var(), var() 101 | assert unify(slice(1), slice(1), {}) == {} 102 | assert unify(slice(1, 2, 1), slice(2, 2, 1), {}) is False 103 | assert unify(slice(1, 2, 1), slice(x, 2, 1), {x: 2}) is False 104 | assert unify(slice(1, 2, 1), slice(1, 3, 1), {}) is False 105 | assert unify(slice(1, 4, 2), slice(1, 4, 1), {}) is False 106 | assert unify(slice(x), slice(x), {}) == {} 107 | assert unify(slice(1, 2, 3), x, {}) == {x: slice(1, 2, 3)} 108 | assert unify(slice(1, 2, None), slice(x, y), {}) == {x: 1, y: 2} 109 | 110 | 111 | def test_unify_iter(): 112 | x = var() 113 | assert unify([1], (1,)) is False 114 | assert unify((i for i in [1, 2]), [1, 2]) is False 115 | assert unify(iter([1, x]), iter([1, 2])) == {x: 2} 116 | 117 | 118 | def test_unify_seq(): 119 | x = var() 120 | assert unify([], [], {}) == {} 121 | assert unify([x], [x], {}) == {} 122 | assert unify((1, 2), (1, 2), {}) == {} 123 | assert unify([1, 2], [1, 2], {}) == {} 124 | assert unify((1, 2), (1, 2, 3), {}) is False 125 | assert unify((1, x), (1, 2), {}) == {x: 2} 126 | assert unify((1, x), (1, 2), {x: 3}) is False 127 | 128 | a, b, z = var(), var(), var() 129 | assert unify([a, b], x, {x: [z, 1]}) == {x: [z, 1], a: z, b: 1} 130 | 131 | 132 | def test_unify_set(): 133 | x, y = var(), var() 134 | assert unify(set(), set(), {}) == {} 135 | assert unify({x}, {x}, {}) == {} 136 | assert unify({1, 2}, {1, 2}, {}) == {} 137 | assert unify({1, x}, {1, 2}, {}) == {x: 2} 138 | assert unify({x, 2}, {1, 2}, {}) == {x: 1} 139 | assert unify({1, y, x}, {2, 1}, {x: 2}) is False 140 | 141 | 142 | def test_unify_dict(): 143 | x = var() 144 | assert unify({1: 2}, {1: 2}, {}) == {} 145 | assert unify({1: x}, {1: x}, {}) == {} 146 | assert unify({1: 2}, {1: 3}, {}) is False 147 | assert unify({2: 2}, {1: 2}, {}) is False 148 | assert unify({2: 2, 3: 3}, {1: 2}, {}) is False 149 | assert unify({1: x}, {1: 2}, {}) == {x: 2} 150 | 151 | 152 | def test_unify_complex(): 153 | x, y = var(), var() 154 | assert unify((1, {2: 3}), (1, {2: 3}), {}) == {} 155 | assert unify((1, {2: 3}), (1, {2: 4}), {}) is False 156 | assert unify((1, {2: x}), (1, {2: 4}), {}) == {x: 4} 157 | assert unify((1, {2: x}), (1, {2: slice(1, y)}), {y: 2}) == {x: slice(1, y), y: 2} 158 | assert unify({1: (2, 3)}, {1: (2, x)}, {}) == {x: 3} 159 | assert unify({1: [2, 3]}, {1: [2, x]}, {}) == {x: 3} 160 | 161 | 162 | def test_unground_lvars(): 163 | a_lv, b_lv = var(), var() 164 | 165 | for ctor in (tuple, list, iter, set, frozenset): 166 | 167 | if ctor not in (set, frozenset): 168 | sub_ctor = list 169 | else: 170 | sub_ctor = tuple 171 | 172 | assert unground_lvars(ctor((1, 2)), {}) == set() 173 | assert unground_lvars( 174 | ctor((1, sub_ctor((a_lv, sub_ctor((b_lv, 2)), 3)))), {} 175 | ) == {a_lv, b_lv} 176 | assert unground_lvars( 177 | ctor((1, sub_ctor((a_lv, sub_ctor((b_lv, 2)), 3)))), {a_lv: 4} 178 | ) == {b_lv} 179 | assert ( 180 | unground_lvars( 181 | ctor((1, sub_ctor((a_lv, sub_ctor((b_lv, 2)), 3)))), {a_lv: 4, b_lv: 5} 182 | ) 183 | == set() 184 | ) 185 | 186 | assert isground(ctor((1, 2)), {}) 187 | assert isground(ctor((1, a_lv)), {a_lv: 2}) 188 | assert isground(ctor((a_lv, sub_ctor((b_lv, 2)), 3)), {a_lv: b_lv, b_lv: 1}) 189 | 190 | assert not isground(ctor((1, a_lv)), {a_lv: b_lv}) 191 | assert not isground(ctor((1, var())), {}) 192 | assert not isground(ctor((1, sub_ctor((a_lv, sub_ctor((b_lv, 2)), 3)))), {}) 193 | assert not isground( 194 | ctor((a_lv, sub_ctor((b_lv, 2)), 3)), {a_lv: b_lv, b_lv: var("c")} 195 | ) 196 | 197 | # Make sure that no composite elements are constructed within the 198 | # groundedness checks. 199 | class CounterList(list): 200 | constructions = 0 201 | 202 | def __new__(cls, *args, **kwargs): 203 | cls.constructions += 1 204 | return super().__new__(cls, *args, **kwargs) 205 | 206 | test_l = CounterList([1, 2, CounterList([a_lv, CounterList([4])])]) 207 | 208 | assert CounterList.constructions == 3 209 | 210 | assert not isground(test_l, {}) 211 | assert CounterList.constructions == 3 212 | 213 | assert unground_lvars(test_l, {}) == {a_lv} 214 | 215 | 216 | def test_reify_recursion_limit(): 217 | import platform 218 | 219 | a_lv = var() 220 | 221 | b, _ = gen_long_chain(a_lv, 10) 222 | res = reify(b, {a_lv: "a"}) 223 | assert res == gen_long_chain("a", 10)[0] 224 | 225 | r_limit = sys.getrecursionlimit() 226 | 227 | try: 228 | sys.setrecursionlimit(100) 229 | 230 | b, _ = gen_long_chain(a_lv, 200) 231 | res = reify(b, {a_lv: "a"}) 232 | exp_res, _ = gen_long_chain("a", 200) 233 | 234 | if platform.python_implementation().lower() != "pypy": 235 | # CPython has stack limit issues when comparing nested lists, but 236 | # PyPy doesn't. 237 | with pytest.raises(RecursionError): 238 | assert res == exp_res 239 | 240 | sys.setrecursionlimit(300) 241 | 242 | assert res == exp_res 243 | 244 | finally: 245 | sys.setrecursionlimit(r_limit) 246 | 247 | 248 | def test_unify_recursion_limit(): 249 | a_lv = var() 250 | 251 | b, _ = gen_long_chain("a") 252 | b_var, _ = gen_long_chain(a_lv) 253 | 254 | s = unify(b, b_var, {}) 255 | 256 | assert s[a_lv] == "a" 257 | 258 | 259 | def test_unify_freeze(): 260 | 261 | # These will sometimes be in different orders after conversion to 262 | # `iter`/`list`/`tuple`! 263 | # u = frozenset({("name", a), ("debit", b)}) 264 | # v = frozenset({("name", "Bob"), ("debit", 100)}) 265 | 266 | a, b = var("name"), var("amount") 267 | u = freeze({"name": a, "debit": b}) 268 | v = freeze({"name": "Bob", "debit": 100}) 269 | 270 | assert unify(u, v, {}) == {a: "Bob", b: 100} 271 | -------------------------------------------------------------------------------- /tests/test_match.py: -------------------------------------------------------------------------------- 1 | from pytest import mark, raises 2 | 3 | from unification.match import Dispatcher, VarDispatcher, match, ordering, supercedes 4 | from unification.variable import var 5 | 6 | 7 | def identity(x): 8 | return x 9 | 10 | 11 | def inc(x): 12 | return x + 1 13 | 14 | 15 | def dec(x): 16 | return x - 1 17 | 18 | 19 | def add(x, y): 20 | return x + y 21 | 22 | 23 | def mul(x, y): 24 | return x * y 25 | 26 | 27 | def foo(*args): 28 | return args 29 | 30 | 31 | def test_simple(): 32 | d = Dispatcher("d") 33 | 34 | d.add((1,), inc) 35 | d.add((10,), dec) 36 | 37 | assert d(1) == 2 38 | assert d(10) == 9 39 | 40 | 41 | def test_complex(): 42 | d = Dispatcher("d") 43 | x = var("x") 44 | y = var("y") 45 | 46 | d.add((1,), inc) 47 | d.add((x,), inc) 48 | d.add((x, 1), add) 49 | d.add((y, y), mul) 50 | d.add((x, (x, x)), foo) 51 | 52 | assert d(1) == 2 53 | assert d(2) == 3 54 | assert d(2, 1) == 3 55 | assert d(10, 10) == 100 56 | assert d(10, (10, 10)) == (10, (10, 10)) 57 | with raises(NotImplementedError): 58 | d(1, 2) 59 | 60 | 61 | def test_dict(): 62 | d = Dispatcher("d") 63 | x = var("x") 64 | 65 | d.add(({"x": x, "key": 1},), identity) 66 | 67 | d({"x": 1, "key": 1}) == {"x": 1, "key": 1} 68 | 69 | 70 | def test_ordering(): 71 | x = var("x") 72 | y = var("y") 73 | o = ordering([(1,), (x,), (2,), (y,), (x, x), (1, x), (x, 1), (1, 2)]) 74 | 75 | for a, b in zip(o, o[1:]): 76 | assert supercedes(a, b) or not supercedes(b, a) 77 | 78 | 79 | def test_raises_error(): 80 | d = Dispatcher("d") 81 | 82 | with raises(NotImplementedError): 83 | d(1, 2, 3) 84 | 85 | 86 | def test_register(): 87 | d = Dispatcher("d") 88 | 89 | @d.register(1) 90 | def f(x): 91 | return 10 92 | 93 | @d.register(2) 94 | def f(x): 95 | return 20 96 | 97 | assert d(1) == 10 98 | assert d(2) == 20 99 | 100 | 101 | def test_dispatcher(): 102 | x = var("x") 103 | 104 | @match(1) 105 | def fib(x): 106 | return 1 107 | 108 | @match(0) 109 | def fib(x): 110 | return 0 111 | 112 | @match(x) 113 | def fib(n): 114 | return fib(n - 1) + fib(n - 2) 115 | 116 | assert [fib(i) for i in range(10)] == [0, 1, 1, 2, 3, 5, 8, 13, 21, 34] 117 | 118 | 119 | def test_supercedes(): 120 | x, y, z = var("x"), var("y"), var("z") 121 | assert not supercedes(1, 2) 122 | assert supercedes(1, x) 123 | assert not supercedes(x, 1) 124 | assert supercedes((1, 2), (1, x)) 125 | assert not supercedes((1, x), (1, 2)) 126 | assert supercedes((1, x), (y, z)) 127 | assert supercedes(x, y) 128 | assert supercedes((1, (x, 3)), (1, y)) 129 | assert not supercedes((1, y), (1, (x, 3))) 130 | 131 | 132 | @mark.xfail() 133 | def test_supercedes_more(): 134 | x, y = var("x"), var("y") 135 | assert supercedes((1, x), (y, y)) 136 | assert supercedes((1, x), (x, x)) 137 | 138 | 139 | def test_VarDispatcher(): 140 | d = VarDispatcher("d") 141 | x, y, z = var("x"), var("y"), var("z") 142 | 143 | @d.register(x, y) 144 | def swap(y, x): 145 | return y, x 146 | 147 | assert d(1, 2) == (2, 1) 148 | 149 | @d.register((1, z), 2) 150 | def foo(z): 151 | return z 152 | 153 | assert d((1, 3), 2) == 3 154 | -------------------------------------------------------------------------------- /tests/test_more.py: -------------------------------------------------------------------------------- 1 | import ast 2 | from collections.abc import Mapping 3 | 4 | from unification import var 5 | from unification.core import _reify, _unify, reify, stream_eval, unify 6 | from unification.more import _reify_object, _unify_object, unifiable 7 | 8 | 9 | class Foo(object): 10 | def __init__(self, a, b): 11 | self.a = a 12 | self.b = b 13 | 14 | def __eq__(self, other): 15 | return type(self) == type(other) and (self.a, self.b) == (other.a, other.b) 16 | 17 | 18 | class Bar(object): 19 | def __init__(self, c): 20 | self.c = c 21 | 22 | def __eq__(self, other): 23 | return type(self) == type(other) and self.c == other.c 24 | 25 | 26 | def test_unify_object(): 27 | x = var() 28 | assert stream_eval(_unify_object(Foo(1, 2), Foo(1, 2), {})) == {} 29 | assert stream_eval(_unify_object(Foo(1, 2), Foo(1, 3), {})) is False 30 | assert stream_eval(_unify_object(Foo(1, 2), Foo(1, x), {})) == {x: 2} 31 | 32 | 33 | def test_unify_nonstandard_object(): 34 | _unify.add((ast.AST, ast.AST, Mapping), _unify_object) 35 | x = var() 36 | assert unify(ast.Num(n=1), ast.Num(n=1), {}) == {} 37 | assert unify(ast.Num(n=1), ast.Num(n=2), {}) is False 38 | assert unify(ast.Num(n=1), ast.Num(n=x), {}) == {x: 1} 39 | 40 | 41 | def test_reify_object(): 42 | x = var() 43 | obj = stream_eval(_reify_object(Foo(1, x), {x: 4})) 44 | assert obj.a == 1 45 | assert obj.b == 4 46 | 47 | f = Foo(1, 2) 48 | assert stream_eval(_reify_object(f, {})) is f 49 | 50 | 51 | def test_reify_nonstandard_object(): 52 | _reify.add((ast.AST, Mapping), _reify_object) 53 | x = var() 54 | assert reify(ast.Num(n=1), {}).n == 1 55 | assert reify(ast.Num(n=x), {}).n == x 56 | assert reify(ast.Num(n=x), {x: 2}).n == 2 57 | 58 | 59 | def test_reify_slots(): 60 | class SlotsObject(object): 61 | __slots__ = ["myattr"] 62 | 63 | def __init__(self, myattr): 64 | self.myattr = myattr 65 | 66 | def __eq__(self, other): 67 | return type(self) == type(other) and self.myattr == other.myattr 68 | 69 | x = var() 70 | s = {x: 1} 71 | e = SlotsObject(x) 72 | assert stream_eval(_reify_object(e, s)) == SlotsObject(1) 73 | assert stream_eval(_reify_object(SlotsObject(1), s)) == SlotsObject(1) 74 | 75 | 76 | def test_objects_full(): 77 | _unify.add((Foo, Foo, Mapping), _unify_object) 78 | _unify.add((Bar, Bar, Mapping), _unify_object) 79 | _reify.add((Foo, Mapping), _reify_object) 80 | _reify.add((Bar, Mapping), _reify_object) 81 | 82 | x, y = var(), var() 83 | assert unify(Foo(1, 2), Bar(1), {}) is False 84 | assert unify(Foo(1, Bar(2)), Foo(1, Bar(x)), {}) == {x: 2} 85 | assert reify(Foo(x, Bar(Foo(y, 3))), {x: 1, y: 2}) == Foo(1, Bar(Foo(2, 3))) 86 | 87 | class SubFoo(Foo): 88 | pass 89 | 90 | assert unify(Foo(1, 2), SubFoo(1, 2), {}) is False 91 | 92 | 93 | @unifiable 94 | class A(object): 95 | def __init__(self, a, b): 96 | self.a = a 97 | self.b = b 98 | 99 | def __eq__(self, other): 100 | return type(self) == type(other) and self.__dict__ == other.__dict__ 101 | 102 | 103 | def test_unifiable_dict(): 104 | x = var() 105 | f = A(1, 2) 106 | g = A(1, x) 107 | assert unify(f, g, {}) == {x: 2} 108 | assert reify(g, {x: 2}) == f 109 | 110 | 111 | @unifiable 112 | class Aslot(object): 113 | __slots__ = ("a", "b") 114 | 115 | def __init__(self, a, b): 116 | self.a = a 117 | self.b = b 118 | 119 | def __eq__(self, other): 120 | return type(self) == type(other) and all( 121 | a == b for a, b in zip(self.__slots__, other.__slots__) 122 | ) 123 | 124 | 125 | def test_unifiable_slots(): 126 | x = var() 127 | f = Aslot(1, 2) 128 | g = Aslot(1, x) 129 | assert unify(f, g, {}) == {x: 2} 130 | assert reify(g, {x: 2}) == f 131 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | from unification.utils import freeze, transitive_get 2 | from unification.variable import var 3 | 4 | 5 | def test_transitive_get(): 6 | x, y = var(), var() 7 | assert transitive_get(x, {x: y, y: 1}) == 1 8 | assert transitive_get({1: 2}, {x: y, y: 1}) == {1: 2} 9 | # Cycles are not handled 10 | # assert transitive_get(x, {x: x}) == x 11 | # assert transitive_get(x, {x: y, y: x}) == x 12 | 13 | 14 | def test_freeze(): 15 | assert freeze({1: [2, 3]}) == ((1, (2, 3)),) 16 | assert freeze(set([1])) == (1,) 17 | assert freeze(([1],)) == ((1,),) 18 | -------------------------------------------------------------------------------- /tests/test_variable.py: -------------------------------------------------------------------------------- 1 | from unification.variable import Var, isvar, var, variables, vars 2 | 3 | 4 | def test_isvar(): 5 | assert not isvar(3) 6 | assert isvar(var(3)) 7 | 8 | class CustomVar(Var): 9 | pass 10 | 11 | assert isvar(CustomVar()) 12 | 13 | 14 | def test_var(): 15 | assert var(1) == var(1) 16 | one_lv = var(1) 17 | assert var(1) is one_lv 18 | assert var() != var() 19 | assert var(prefix="a") != var(prefix="a") 20 | 21 | 22 | def test_var_inputs(): 23 | assert var(1) == var(1) 24 | assert var() != var() 25 | 26 | 27 | def test_vars(): 28 | vs = vars(3) 29 | assert len(vs) == 3 30 | assert all(map(isvar, vs)) 31 | 32 | 33 | def test_context_manager(): 34 | with variables(1): 35 | assert isvar(1) 36 | assert not isvar(1) 37 | -------------------------------------------------------------------------------- /tests/utils.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | from unification.variable import var 4 | 5 | 6 | def gen_long_chain(last_elem=None, N=None, use_lvars=False): 7 | """Generate a nested list of length `N` with the last element set to `last_elm`. 8 | 9 | Parameters 10 | ---------- 11 | last_elem: object 12 | The element to be placed in the inner-most nested list. 13 | N: int 14 | The number of nested lists. 15 | use_lvars: bool 16 | Whether or not to add `var`s to the first elements of each nested list 17 | or simply integers. If ``True``, each `var` is passed the nesting 18 | level integer (i.e. ``var(i)``). 19 | 20 | Returns 21 | ------- 22 | list, dict 23 | The generated nested list and a ``dict`` containing the generated 24 | `var`s and their nesting level integers, if any. 25 | 26 | """ 27 | b_struct = None 28 | if N is None: 29 | N = sys.getrecursionlimit() 30 | lvars = {} 31 | for i in range(N - 1, 0, -1): 32 | i_el = var(i) if use_lvars else i 33 | if use_lvars: 34 | lvars[i_el] = i 35 | b_struct = [i_el, last_elem if i == N - 1 else b_struct] 36 | return b_struct, lvars 37 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | install_command = pip install {opts} {packages} 3 | envlist = py36,py37,pypy 4 | 5 | [testenv] 6 | usedevelop = True 7 | commands = 8 | pytest {posargs:--with-doctest --with-coverage --cover-package=unification} -v 9 | deps = 10 | pytest-coverage 11 | pytest 12 | -------------------------------------------------------------------------------- /unification/__init__.py: -------------------------------------------------------------------------------- 1 | from ._version import get_versions 2 | from .core import assoc, reify, unify 3 | from .more import unifiable 4 | from .variable import Var, isvar, var, variables, vars 5 | 6 | __version__ = get_versions()["version"] 7 | del get_versions 8 | -------------------------------------------------------------------------------- /unification/_version.py: -------------------------------------------------------------------------------- 1 | 2 | # This file helps to compute a version number in source trees obtained from 3 | # git-archive tarball (such as those provided by githubs download-from-tag 4 | # feature). Distribution tarballs (built by setup.py sdist) and build 5 | # directories (produced by setup.py build) will contain a much shorter file 6 | # that just contains the computed version number. 7 | 8 | # This file is released into the public domain. Generated by 9 | # versioneer-0.21 (https://github.com/python-versioneer/python-versioneer) 10 | 11 | """Git implementation of _version.py.""" 12 | 13 | import errno 14 | import os 15 | import re 16 | import subprocess 17 | import sys 18 | from typing import Callable, Dict 19 | 20 | 21 | def get_keywords(): 22 | """Get the keywords needed to look up the version information.""" 23 | # these strings will be replaced by git during git-archive. 24 | # setup.py/versioneer.py will grep for the variable names, so they must 25 | # each be defined on a line of their own. _version.py will just call 26 | # get_keywords(). 27 | git_refnames = " (HEAD -> main)" 28 | git_full = "5e3e4aba1a4f63c6a4abe7d8a5c6fbbce36fa488" 29 | git_date = "2024-09-03 17:30:56 -0500" 30 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} 31 | return keywords 32 | 33 | 34 | class VersioneerConfig: 35 | """Container for Versioneer configuration parameters.""" 36 | 37 | 38 | def get_config(): 39 | """Create, populate and return the VersioneerConfig() object.""" 40 | # these strings are filled in when 'setup.py versioneer' creates 41 | # _version.py 42 | cfg = VersioneerConfig() 43 | cfg.VCS = "git" 44 | cfg.style = "pep440" 45 | cfg.tag_prefix = "v" 46 | cfg.parentdir_prefix = "unification-" 47 | cfg.versionfile_source = "unification/_version.py" 48 | cfg.verbose = False 49 | return cfg 50 | 51 | 52 | class NotThisMethod(Exception): 53 | """Exception raised if a method is not valid for the current scenario.""" 54 | 55 | 56 | LONG_VERSION_PY: Dict[str, str] = {} 57 | HANDLERS: Dict[str, Dict[str, Callable]] = {} 58 | 59 | 60 | def register_vcs_handler(vcs, method): # decorator 61 | """Create decorator to mark a method as the handler of a VCS.""" 62 | def decorate(f): 63 | """Store f in HANDLERS[vcs][method].""" 64 | if vcs not in HANDLERS: 65 | HANDLERS[vcs] = {} 66 | HANDLERS[vcs][method] = f 67 | return f 68 | return decorate 69 | 70 | 71 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, 72 | env=None): 73 | """Call the given command(s).""" 74 | assert isinstance(commands, list) 75 | process = None 76 | for command in commands: 77 | try: 78 | dispcmd = str([command] + args) 79 | # remember shell=False, so use git.cmd on windows, not just git 80 | process = subprocess.Popen([command] + args, cwd=cwd, env=env, 81 | stdout=subprocess.PIPE, 82 | stderr=(subprocess.PIPE if hide_stderr 83 | else None)) 84 | break 85 | except OSError: 86 | e = sys.exc_info()[1] 87 | if e.errno == errno.ENOENT: 88 | continue 89 | if verbose: 90 | print("unable to run %s" % dispcmd) 91 | print(e) 92 | return None, None 93 | else: 94 | if verbose: 95 | print("unable to find command, tried %s" % (commands,)) 96 | return None, None 97 | stdout = process.communicate()[0].strip().decode() 98 | if process.returncode != 0: 99 | if verbose: 100 | print("unable to run %s (error)" % dispcmd) 101 | print("stdout was %s" % stdout) 102 | return None, process.returncode 103 | return stdout, process.returncode 104 | 105 | 106 | def versions_from_parentdir(parentdir_prefix, root, verbose): 107 | """Try to determine the version from the parent directory name. 108 | 109 | Source tarballs conventionally unpack into a directory that includes both 110 | the project name and a version string. We will also support searching up 111 | two directory levels for an appropriately named parent directory 112 | """ 113 | rootdirs = [] 114 | 115 | for _ in range(3): 116 | dirname = os.path.basename(root) 117 | if dirname.startswith(parentdir_prefix): 118 | return {"version": dirname[len(parentdir_prefix):], 119 | "full-revisionid": None, 120 | "dirty": False, "error": None, "date": None} 121 | rootdirs.append(root) 122 | root = os.path.dirname(root) # up a level 123 | 124 | if verbose: 125 | print("Tried directories %s but none started with prefix %s" % 126 | (str(rootdirs), parentdir_prefix)) 127 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 128 | 129 | 130 | @register_vcs_handler("git", "get_keywords") 131 | def git_get_keywords(versionfile_abs): 132 | """Extract version information from the given file.""" 133 | # the code embedded in _version.py can just fetch the value of these 134 | # keywords. When used from setup.py, we don't want to import _version.py, 135 | # so we do it with a regexp instead. This function is not used from 136 | # _version.py. 137 | keywords = {} 138 | try: 139 | with open(versionfile_abs, "r") as fobj: 140 | for line in fobj: 141 | if line.strip().startswith("git_refnames ="): 142 | mo = re.search(r'=\s*"(.*)"', line) 143 | if mo: 144 | keywords["refnames"] = mo.group(1) 145 | if line.strip().startswith("git_full ="): 146 | mo = re.search(r'=\s*"(.*)"', line) 147 | if mo: 148 | keywords["full"] = mo.group(1) 149 | if line.strip().startswith("git_date ="): 150 | mo = re.search(r'=\s*"(.*)"', line) 151 | if mo: 152 | keywords["date"] = mo.group(1) 153 | except OSError: 154 | pass 155 | return keywords 156 | 157 | 158 | @register_vcs_handler("git", "keywords") 159 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 160 | """Get version information from git keywords.""" 161 | if "refnames" not in keywords: 162 | raise NotThisMethod("Short version file found") 163 | date = keywords.get("date") 164 | if date is not None: 165 | # Use only the last line. Previous lines may contain GPG signature 166 | # information. 167 | date = date.splitlines()[-1] 168 | 169 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant 170 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 171 | # -like" string, which we must then edit to make compliant), because 172 | # it's been around since git-1.5.3, and it's too difficult to 173 | # discover which version we're using, or to work around using an 174 | # older one. 175 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 176 | refnames = keywords["refnames"].strip() 177 | if refnames.startswith("$Format"): 178 | if verbose: 179 | print("keywords are unexpanded, not using") 180 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 181 | refs = {r.strip() for r in refnames.strip("()").split(",")} 182 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 183 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 184 | TAG = "tag: " 185 | tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} 186 | if not tags: 187 | # Either we're using git < 1.8.3, or there really are no tags. We use 188 | # a heuristic: assume all version tags have a digit. The old git %d 189 | # expansion behaves like git log --decorate=short and strips out the 190 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 191 | # between branches and tags. By ignoring refnames without digits, we 192 | # filter out many common branch names like "release" and 193 | # "stabilization", as well as "HEAD" and "master". 194 | tags = {r for r in refs if re.search(r'\d', r)} 195 | if verbose: 196 | print("discarding '%s', no digits" % ",".join(refs - tags)) 197 | if verbose: 198 | print("likely tags: %s" % ",".join(sorted(tags))) 199 | for ref in sorted(tags): 200 | # sorting will prefer e.g. "2.0" over "2.0rc1" 201 | if ref.startswith(tag_prefix): 202 | r = ref[len(tag_prefix):] 203 | # Filter out refs that exactly match prefix or that don't start 204 | # with a number once the prefix is stripped (mostly a concern 205 | # when prefix is '') 206 | if not re.match(r'\d', r): 207 | continue 208 | if verbose: 209 | print("picking %s" % r) 210 | return {"version": r, 211 | "full-revisionid": keywords["full"].strip(), 212 | "dirty": False, "error": None, 213 | "date": date} 214 | # no suitable tags, so version is "0+unknown", but full hex is still there 215 | if verbose: 216 | print("no suitable tags, using unknown + full revision id") 217 | return {"version": "0+unknown", 218 | "full-revisionid": keywords["full"].strip(), 219 | "dirty": False, "error": "no suitable tags", "date": None} 220 | 221 | 222 | @register_vcs_handler("git", "pieces_from_vcs") 223 | def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): 224 | """Get version from 'git describe' in the root of the source tree. 225 | 226 | This only gets called if the git-archive 'subst' keywords were *not* 227 | expanded, and _version.py hasn't already been rewritten with a short 228 | version string, meaning we're inside a checked out source tree. 229 | """ 230 | GITS = ["git"] 231 | TAG_PREFIX_REGEX = "*" 232 | if sys.platform == "win32": 233 | GITS = ["git.cmd", "git.exe"] 234 | TAG_PREFIX_REGEX = r"\*" 235 | 236 | _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, 237 | hide_stderr=True) 238 | if rc != 0: 239 | if verbose: 240 | print("Directory %s not under git control" % root) 241 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 242 | 243 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 244 | # if there isn't one, this yields HEX[-dirty] (no NUM) 245 | describe_out, rc = runner(GITS, ["describe", "--tags", "--dirty", 246 | "--always", "--long", 247 | "--match", 248 | "%s%s" % (tag_prefix, TAG_PREFIX_REGEX)], 249 | cwd=root) 250 | # --long was added in git-1.5.5 251 | if describe_out is None: 252 | raise NotThisMethod("'git describe' failed") 253 | describe_out = describe_out.strip() 254 | full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) 255 | if full_out is None: 256 | raise NotThisMethod("'git rev-parse' failed") 257 | full_out = full_out.strip() 258 | 259 | pieces = {} 260 | pieces["long"] = full_out 261 | pieces["short"] = full_out[:7] # maybe improved later 262 | pieces["error"] = None 263 | 264 | branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], 265 | cwd=root) 266 | # --abbrev-ref was added in git-1.6.3 267 | if rc != 0 or branch_name is None: 268 | raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") 269 | branch_name = branch_name.strip() 270 | 271 | if branch_name == "HEAD": 272 | # If we aren't exactly on a branch, pick a branch which represents 273 | # the current commit. If all else fails, we are on a branchless 274 | # commit. 275 | branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) 276 | # --contains was added in git-1.5.4 277 | if rc != 0 or branches is None: 278 | raise NotThisMethod("'git branch --contains' returned error") 279 | branches = branches.split("\n") 280 | 281 | # Remove the first line if we're running detached 282 | if "(" in branches[0]: 283 | branches.pop(0) 284 | 285 | # Strip off the leading "* " from the list of branches. 286 | branches = [branch[2:] for branch in branches] 287 | if "master" in branches: 288 | branch_name = "master" 289 | elif not branches: 290 | branch_name = None 291 | else: 292 | # Pick the first branch that is returned. Good or bad. 293 | branch_name = branches[0] 294 | 295 | pieces["branch"] = branch_name 296 | 297 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 298 | # TAG might have hyphens. 299 | git_describe = describe_out 300 | 301 | # look for -dirty suffix 302 | dirty = git_describe.endswith("-dirty") 303 | pieces["dirty"] = dirty 304 | if dirty: 305 | git_describe = git_describe[:git_describe.rindex("-dirty")] 306 | 307 | # now we have TAG-NUM-gHEX or HEX 308 | 309 | if "-" in git_describe: 310 | # TAG-NUM-gHEX 311 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 312 | if not mo: 313 | # unparsable. Maybe git-describe is misbehaving? 314 | pieces["error"] = ("unable to parse git-describe output: '%s'" 315 | % describe_out) 316 | return pieces 317 | 318 | # tag 319 | full_tag = mo.group(1) 320 | if not full_tag.startswith(tag_prefix): 321 | if verbose: 322 | fmt = "tag '%s' doesn't start with prefix '%s'" 323 | print(fmt % (full_tag, tag_prefix)) 324 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" 325 | % (full_tag, tag_prefix)) 326 | return pieces 327 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 328 | 329 | # distance: number of commits since tag 330 | pieces["distance"] = int(mo.group(2)) 331 | 332 | # commit: short hex revision ID 333 | pieces["short"] = mo.group(3) 334 | 335 | else: 336 | # HEX: no tags 337 | pieces["closest-tag"] = None 338 | count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) 339 | pieces["distance"] = int(count_out) # total number of commits 340 | 341 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 342 | date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() 343 | # Use only the last line. Previous lines may contain GPG signature 344 | # information. 345 | date = date.splitlines()[-1] 346 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 347 | 348 | return pieces 349 | 350 | 351 | def plus_or_dot(pieces): 352 | """Return a + if we don't already have one, else return a .""" 353 | if "+" in pieces.get("closest-tag", ""): 354 | return "." 355 | return "+" 356 | 357 | 358 | def render_pep440(pieces): 359 | """Build up version string, with post-release "local version identifier". 360 | 361 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 362 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 363 | 364 | Exceptions: 365 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 366 | """ 367 | if pieces["closest-tag"]: 368 | rendered = pieces["closest-tag"] 369 | if pieces["distance"] or pieces["dirty"]: 370 | rendered += plus_or_dot(pieces) 371 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 372 | if pieces["dirty"]: 373 | rendered += ".dirty" 374 | else: 375 | # exception #1 376 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], 377 | pieces["short"]) 378 | if pieces["dirty"]: 379 | rendered += ".dirty" 380 | return rendered 381 | 382 | 383 | def render_pep440_branch(pieces): 384 | """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . 385 | 386 | The ".dev0" means not master branch. Note that .dev0 sorts backwards 387 | (a feature branch will appear "older" than the master branch). 388 | 389 | Exceptions: 390 | 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] 391 | """ 392 | if pieces["closest-tag"]: 393 | rendered = pieces["closest-tag"] 394 | if pieces["distance"] or pieces["dirty"]: 395 | if pieces["branch"] != "master": 396 | rendered += ".dev0" 397 | rendered += plus_or_dot(pieces) 398 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 399 | if pieces["dirty"]: 400 | rendered += ".dirty" 401 | else: 402 | # exception #1 403 | rendered = "0" 404 | if pieces["branch"] != "master": 405 | rendered += ".dev0" 406 | rendered += "+untagged.%d.g%s" % (pieces["distance"], 407 | pieces["short"]) 408 | if pieces["dirty"]: 409 | rendered += ".dirty" 410 | return rendered 411 | 412 | 413 | def pep440_split_post(ver): 414 | """Split pep440 version string at the post-release segment. 415 | 416 | Returns the release segments before the post-release and the 417 | post-release version number (or -1 if no post-release segment is present). 418 | """ 419 | vc = str.split(ver, ".post") 420 | return vc[0], int(vc[1] or 0) if len(vc) == 2 else None 421 | 422 | 423 | def render_pep440_pre(pieces): 424 | """TAG[.postN.devDISTANCE] -- No -dirty. 425 | 426 | Exceptions: 427 | 1: no tags. 0.post0.devDISTANCE 428 | """ 429 | if pieces["closest-tag"]: 430 | if pieces["distance"]: 431 | # update the post release segment 432 | tag_version, post_version = pep440_split_post(pieces["closest-tag"]) 433 | rendered = tag_version 434 | if post_version is not None: 435 | rendered += ".post%d.dev%d" % (post_version+1, pieces["distance"]) 436 | else: 437 | rendered += ".post0.dev%d" % (pieces["distance"]) 438 | else: 439 | # no commits, use the tag as the version 440 | rendered = pieces["closest-tag"] 441 | else: 442 | # exception #1 443 | rendered = "0.post0.dev%d" % pieces["distance"] 444 | return rendered 445 | 446 | 447 | def render_pep440_post(pieces): 448 | """TAG[.postDISTANCE[.dev0]+gHEX] . 449 | 450 | The ".dev0" means dirty. Note that .dev0 sorts backwards 451 | (a dirty tree will appear "older" than the corresponding clean one), 452 | but you shouldn't be releasing software with -dirty anyways. 453 | 454 | Exceptions: 455 | 1: no tags. 0.postDISTANCE[.dev0] 456 | """ 457 | if pieces["closest-tag"]: 458 | rendered = pieces["closest-tag"] 459 | if pieces["distance"] or pieces["dirty"]: 460 | rendered += ".post%d" % pieces["distance"] 461 | if pieces["dirty"]: 462 | rendered += ".dev0" 463 | rendered += plus_or_dot(pieces) 464 | rendered += "g%s" % pieces["short"] 465 | else: 466 | # exception #1 467 | rendered = "0.post%d" % pieces["distance"] 468 | if pieces["dirty"]: 469 | rendered += ".dev0" 470 | rendered += "+g%s" % pieces["short"] 471 | return rendered 472 | 473 | 474 | def render_pep440_post_branch(pieces): 475 | """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . 476 | 477 | The ".dev0" means not master branch. 478 | 479 | Exceptions: 480 | 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] 481 | """ 482 | if pieces["closest-tag"]: 483 | rendered = pieces["closest-tag"] 484 | if pieces["distance"] or pieces["dirty"]: 485 | rendered += ".post%d" % pieces["distance"] 486 | if pieces["branch"] != "master": 487 | rendered += ".dev0" 488 | rendered += plus_or_dot(pieces) 489 | rendered += "g%s" % pieces["short"] 490 | if pieces["dirty"]: 491 | rendered += ".dirty" 492 | else: 493 | # exception #1 494 | rendered = "0.post%d" % pieces["distance"] 495 | if pieces["branch"] != "master": 496 | rendered += ".dev0" 497 | rendered += "+g%s" % pieces["short"] 498 | if pieces["dirty"]: 499 | rendered += ".dirty" 500 | return rendered 501 | 502 | 503 | def render_pep440_old(pieces): 504 | """TAG[.postDISTANCE[.dev0]] . 505 | 506 | The ".dev0" means dirty. 507 | 508 | Exceptions: 509 | 1: no tags. 0.postDISTANCE[.dev0] 510 | """ 511 | if pieces["closest-tag"]: 512 | rendered = pieces["closest-tag"] 513 | if pieces["distance"] or pieces["dirty"]: 514 | rendered += ".post%d" % pieces["distance"] 515 | if pieces["dirty"]: 516 | rendered += ".dev0" 517 | else: 518 | # exception #1 519 | rendered = "0.post%d" % pieces["distance"] 520 | if pieces["dirty"]: 521 | rendered += ".dev0" 522 | return rendered 523 | 524 | 525 | def render_git_describe(pieces): 526 | """TAG[-DISTANCE-gHEX][-dirty]. 527 | 528 | Like 'git describe --tags --dirty --always'. 529 | 530 | Exceptions: 531 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 532 | """ 533 | if pieces["closest-tag"]: 534 | rendered = pieces["closest-tag"] 535 | if pieces["distance"]: 536 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 537 | else: 538 | # exception #1 539 | rendered = pieces["short"] 540 | if pieces["dirty"]: 541 | rendered += "-dirty" 542 | return rendered 543 | 544 | 545 | def render_git_describe_long(pieces): 546 | """TAG-DISTANCE-gHEX[-dirty]. 547 | 548 | Like 'git describe --tags --dirty --always -long'. 549 | The distance/hash is unconditional. 550 | 551 | Exceptions: 552 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 553 | """ 554 | if pieces["closest-tag"]: 555 | rendered = pieces["closest-tag"] 556 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 557 | else: 558 | # exception #1 559 | rendered = pieces["short"] 560 | if pieces["dirty"]: 561 | rendered += "-dirty" 562 | return rendered 563 | 564 | 565 | def render(pieces, style): 566 | """Render the given version pieces into the requested style.""" 567 | if pieces["error"]: 568 | return {"version": "unknown", 569 | "full-revisionid": pieces.get("long"), 570 | "dirty": None, 571 | "error": pieces["error"], 572 | "date": None} 573 | 574 | if not style or style == "default": 575 | style = "pep440" # the default 576 | 577 | if style == "pep440": 578 | rendered = render_pep440(pieces) 579 | elif style == "pep440-branch": 580 | rendered = render_pep440_branch(pieces) 581 | elif style == "pep440-pre": 582 | rendered = render_pep440_pre(pieces) 583 | elif style == "pep440-post": 584 | rendered = render_pep440_post(pieces) 585 | elif style == "pep440-post-branch": 586 | rendered = render_pep440_post_branch(pieces) 587 | elif style == "pep440-old": 588 | rendered = render_pep440_old(pieces) 589 | elif style == "git-describe": 590 | rendered = render_git_describe(pieces) 591 | elif style == "git-describe-long": 592 | rendered = render_git_describe_long(pieces) 593 | else: 594 | raise ValueError("unknown style '%s'" % style) 595 | 596 | return {"version": rendered, "full-revisionid": pieces["long"], 597 | "dirty": pieces["dirty"], "error": None, 598 | "date": pieces.get("date")} 599 | 600 | 601 | def get_versions(): 602 | """Get version information or return default if unable to do so.""" 603 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 604 | # __file__, we can work backwards from there to the root. Some 605 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 606 | # case we can only use expanded keywords. 607 | 608 | cfg = get_config() 609 | verbose = cfg.verbose 610 | 611 | try: 612 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, 613 | verbose) 614 | except NotThisMethod: 615 | pass 616 | 617 | try: 618 | root = os.path.realpath(__file__) 619 | # versionfile_source is the relative path from the top of the source 620 | # tree (where the .git directory might live) to this file. Invert 621 | # this to find the root from __file__. 622 | for _ in cfg.versionfile_source.split('/'): 623 | root = os.path.dirname(root) 624 | except NameError: 625 | return {"version": "0+unknown", "full-revisionid": None, 626 | "dirty": None, 627 | "error": "unable to find root of source tree", 628 | "date": None} 629 | 630 | try: 631 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 632 | return render(pieces, cfg.style) 633 | except NotThisMethod: 634 | pass 635 | 636 | try: 637 | if cfg.parentdir_prefix: 638 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 639 | except NotThisMethod: 640 | pass 641 | 642 | return {"version": "0+unknown", "full-revisionid": None, 643 | "dirty": None, 644 | "error": "unable to compute version", "date": None} 645 | -------------------------------------------------------------------------------- /unification/core.py: -------------------------------------------------------------------------------- 1 | from collections import OrderedDict, deque 2 | from collections.abc import Generator, Iterator, Mapping, Set 3 | from copy import copy 4 | from functools import partial 5 | from operator import length_hint 6 | 7 | from .dispatch import dispatch 8 | from .utils import transitive_get as walk 9 | from .variable import Var, isvar 10 | 11 | # An object used to tell the reifier that the next yield constructs the reified 12 | # object from its constituent refications (if any). 13 | construction_sentinel = object() 14 | 15 | 16 | @dispatch(Mapping, object, object) 17 | def assoc(s, u, v): 18 | """Add an entry to a `Mapping` and return it.""" 19 | if hasattr(s, "copy"): 20 | s = s.copy() 21 | else: 22 | s = copy(s) # pragma: no cover 23 | s[u] = v 24 | return s 25 | 26 | 27 | def stream_eval(z, res_filter=None): 28 | r"""Evaluate a stream of `_reify`/`_unify` results. 29 | 30 | This implementation consists of a deque that simulates an evaluation stack 31 | of `_reify`/`_unify`-produced generators. We're able to overcome 32 | `RecursionError`\s this way. 33 | """ 34 | 35 | if not isinstance(z, Generator): 36 | return z 37 | 38 | stack = deque() 39 | z_args, z_out = None, None 40 | stack.append(z) 41 | 42 | while stack: 43 | z = stack[-1] 44 | try: 45 | z_out = z.send(z_args) 46 | 47 | if res_filter: 48 | _ = res_filter(z, z_out) 49 | 50 | if isinstance(z_out, Generator): 51 | stack.append(z_out) 52 | z_args = None 53 | else: 54 | z_args = z_out 55 | 56 | except StopIteration: 57 | _ = stack.pop() 58 | 59 | return z_out 60 | 61 | 62 | class UngroundLVarException(Exception): 63 | """An exception signaling that an unground variable was found.""" 64 | 65 | 66 | @dispatch(object, Mapping) 67 | def _reify(o, s): 68 | return o 69 | 70 | 71 | @_reify.register(Var, Mapping) 72 | def _reify_Var(o, s): 73 | o_w = walk(o, s) 74 | 75 | if o_w is o: 76 | yield o_w 77 | else: 78 | yield _reify(o_w, s) 79 | 80 | 81 | def _reify_Iterable_ctor(ctor, t, s): 82 | """Create a generator that yields `_reify` generators. 83 | 84 | The yielded generators need to be evaluated by the caller and the fully 85 | reified results "sent" back to this generator so that it can finish 86 | constructing reified iterable. 87 | 88 | This approach allows us "collapse" nested `_reify` calls by pushing nested 89 | calls up the stack. 90 | """ 91 | res = [] 92 | 93 | if isinstance(t, Mapping): 94 | t = t.items() 95 | 96 | for y in t: 97 | r = _reify(y, s) 98 | if isinstance(r, Generator): 99 | r = yield r 100 | res.append(r) 101 | 102 | yield construction_sentinel 103 | 104 | yield ctor(res) 105 | 106 | 107 | for seq, ctor in ( 108 | (tuple, tuple), 109 | (list, list), 110 | (Iterator, iter), 111 | (set, set), 112 | (frozenset, frozenset), 113 | ): 114 | _reify.add((seq, Mapping), partial(_reify_Iterable_ctor, ctor)) 115 | 116 | 117 | for seq in (dict, OrderedDict): 118 | _reify.add((seq, Mapping), partial(_reify_Iterable_ctor, seq)) 119 | 120 | 121 | @_reify.register(slice, Mapping) 122 | def _reify_slice(o, s): 123 | start = yield _reify(o.start, s) 124 | stop = yield _reify(o.stop, s) 125 | step = yield _reify(o.step, s) 126 | 127 | yield construction_sentinel 128 | 129 | yield slice(start, stop, step) 130 | 131 | 132 | @dispatch(object, Mapping) 133 | def reify(e, s): 134 | """Replace logic variables in a term, `e`, with their substitutions in `s`. 135 | 136 | >>> x, y = var(), var() 137 | >>> e = (1, x, (3, y)) 138 | >>> s = {x: 2, y: 4} 139 | >>> reify(e, s) 140 | (1, 2, (3, 4)) 141 | 142 | >>> e = {1: x, 3: (y, 5)} 143 | >>> reify(e, s) 144 | {1: 2, 3: (4, 5)} 145 | """ 146 | 147 | if len(s) == 0: 148 | return e 149 | 150 | return stream_eval(_reify(e, s)) 151 | 152 | 153 | @dispatch(object, object, Mapping) 154 | def _unify(u, v, s): 155 | return s if u == v else False 156 | 157 | 158 | @_unify.register(Var, (Var, object), Mapping) 159 | def _unify_Var_object(u, v, s): 160 | u_w = walk(u, s) 161 | 162 | if isvar(v): 163 | v_w = walk(v, s) 164 | else: 165 | v_w = v 166 | 167 | if u_w == v_w: 168 | yield s 169 | elif isvar(u_w): 170 | yield assoc(s, u_w, v_w) 171 | elif isvar(v_w): 172 | yield assoc(s, v_w, u_w) 173 | else: 174 | yield _unify(u_w, v_w, s) 175 | 176 | 177 | _unify.add((object, Var, Mapping), _unify_Var_object) 178 | 179 | 180 | def _unify_Iterable(u, v, s): 181 | len_u = length_hint(u, -1) 182 | len_v = length_hint(v, -1) 183 | 184 | if len_u != len_v: 185 | yield False 186 | return 187 | 188 | for uu, vv in zip(u, v): 189 | s = yield _unify(uu, vv, s) 190 | if s is False: 191 | return 192 | else: 193 | yield s 194 | 195 | 196 | for seq in (tuple, list, Iterator): 197 | _unify.add((seq, seq, Mapping), _unify_Iterable) 198 | 199 | 200 | @_unify.register(Set, Set, Mapping) 201 | def _unify_Set(u, v, s): 202 | i = u & v 203 | u = u - i 204 | v = v - i 205 | yield _unify(iter(u), iter(v), s) 206 | 207 | 208 | @_unify.register(Mapping, Mapping, Mapping) 209 | def _unify_Mapping(u, v, s): 210 | if len(u) != len(v): 211 | yield False 212 | return 213 | 214 | for key, uval in u.items(): 215 | if key not in v: 216 | yield False 217 | return 218 | 219 | s = yield _unify(uval, v[key], s) 220 | 221 | if s is False: 222 | return 223 | else: 224 | yield s 225 | 226 | 227 | @_unify.register(slice, slice, Mapping) 228 | def _unify_slice(u, v, s): 229 | s = yield _unify(u.start, v.start, s) 230 | if s is False: 231 | return 232 | s = yield _unify(u.stop, v.stop, s) 233 | if s is False: 234 | return 235 | s = yield _unify(u.step, v.step, s) 236 | 237 | 238 | @dispatch(object, object, Mapping) 239 | def unify(u, v, s): 240 | """Find substitution so that ``u == v`` while satisfying `s`. 241 | 242 | >>> x = var('x') 243 | >>> unify((1, x), (1, 2), {}) 244 | {~x: 2} 245 | """ 246 | if u is v: 247 | return s 248 | 249 | return stream_eval(_unify(u, v, s)) 250 | 251 | 252 | @unify.register(object, object) 253 | def unify_NoMap(u, v): 254 | return unify(u, v, {}) 255 | 256 | 257 | def unground_lvars(u, s): 258 | """Return the unground logic variables from a term and state.""" 259 | 260 | lvars = set() 261 | 262 | def lvar_filter(z, r): 263 | nonlocal lvars 264 | 265 | if isvar(r): 266 | lvars.add(r) 267 | 268 | if r is construction_sentinel: 269 | z.close() 270 | 271 | # Remove this generator from the stack. 272 | raise StopIteration() 273 | 274 | z = _reify(u, s) 275 | stream_eval(z, lvar_filter) 276 | 277 | return lvars 278 | 279 | 280 | def isground(u, s): 281 | """Determine whether or not `u` contains an unground logic variable under mappings `s`.""" # noqa: E501 282 | 283 | def lvar_filter(z, r): 284 | 285 | if isvar(r): 286 | raise UngroundLVarException() 287 | elif r is construction_sentinel: 288 | z.close() 289 | 290 | # Remove this generator from the stack. 291 | raise StopIteration() 292 | 293 | try: 294 | z = _reify(u, s) 295 | stream_eval(z, lvar_filter) 296 | except UngroundLVarException: 297 | return False 298 | 299 | return True 300 | 301 | 302 | def debug_unify(u, v, s): # pragma: no cover 303 | """Stop in the debugger when unify fails. 304 | 305 | You can inspect the generator-based stack by looking through the 306 | generator frames in the `stack` variable in `stream_eval`: 307 | 308 | (Pdb) up 309 | > .../unification/unification/core.py(39)stream_eval() 310 | -> _ = res_filter(z, z_out) 311 | (Pdb) stack[-2].gi_frame.f_locals 312 | {'u': , 313 | 'v': , 314 | 's': {}, 315 | 'len_u': 2, 316 | 'len_v': 2, 317 | 'uu': ('debit', ~amount), 318 | 'vv': ('name', 'Bob')} 319 | """ 320 | 321 | def _filter(z, r): 322 | if r is False: 323 | import pdb 324 | 325 | pdb.set_trace() 326 | 327 | z = _unify(u, v, s) 328 | return stream_eval(z, _filter) 329 | -------------------------------------------------------------------------------- /unification/dispatch.py: -------------------------------------------------------------------------------- 1 | from functools import partial 2 | 3 | from multipledispatch import dispatch 4 | 5 | namespace = dict() 6 | 7 | dispatch = partial(dispatch, namespace=namespace) 8 | -------------------------------------------------------------------------------- /unification/match.py: -------------------------------------------------------------------------------- 1 | from toolz import first, groupby 2 | 3 | from .core import reify, unify 4 | from .utils import _toposort, freeze 5 | from .variable import isvar 6 | 7 | 8 | class Dispatcher(object): 9 | def __init__(self, name): 10 | self.name = name 11 | self.funcs = dict() 12 | self.ordering = [] 13 | 14 | def add(self, signature, func): 15 | self.funcs[freeze(signature)] = func 16 | self.ordering = ordering(self.funcs) 17 | 18 | def __call__(self, *args, **kwargs): 19 | func, s = self.resolve(args) 20 | return func(*args, **kwargs) 21 | 22 | def resolve(self, args): 23 | n = len(args) 24 | frozen_args = freeze(args) 25 | for signature in self.ordering: 26 | if len(signature) != n: 27 | continue 28 | s = unify(frozen_args, signature) 29 | if s is not False: 30 | result = self.funcs[signature] 31 | return result, s 32 | raise NotImplementedError( 33 | f"No match found. \nKnown matches: {self.ordering} \nInput: {args}" 34 | ) 35 | 36 | def register(self, *signature): 37 | def _(func): 38 | self.add(signature, func) 39 | return self 40 | 41 | return _ 42 | 43 | 44 | class VarDispatcher(Dispatcher): 45 | """A dispatcher that calls functions with variable names. 46 | 47 | >>> d = VarDispatcher('d') 48 | >>> x = var('x') 49 | 50 | >>> @d.register('inc', x) 51 | ... def f(x): 52 | ... return x + 1 53 | 54 | >>> @d.register('double', x) 55 | ... def f(x): 56 | ... return x * 2 57 | 58 | >>> d('inc', 10) 59 | 11 60 | 61 | >>> d('double', 10) 62 | 20 63 | 64 | """ 65 | 66 | def __call__(self, *args, **kwargs): 67 | func, s = self.resolve(args) 68 | d = dict((k.token, v) for k, v in s.items()) 69 | return func(**d) 70 | 71 | 72 | global_namespace = dict() 73 | 74 | 75 | def match(*signature, **kwargs): 76 | namespace = kwargs.get("namespace", global_namespace) 77 | dispatcher = kwargs.get("Dispatcher", Dispatcher) 78 | 79 | def _(func): 80 | name = func.__name__ 81 | 82 | if name not in namespace: 83 | namespace[name] = dispatcher(name) 84 | d = namespace[name] 85 | 86 | d.add(signature, func) 87 | 88 | return d 89 | 90 | return _ 91 | 92 | 93 | def supercedes(a, b): 94 | """Check if ``a`` is a more specific match than ``b``.""" 95 | if isvar(b) and not isvar(a): 96 | return True 97 | s = unify(a, b) 98 | if s is False: 99 | return False 100 | s = dict((k, v) for k, v in s.items() if not isvar(k) or not isvar(v)) 101 | if reify(a, s) == a: 102 | return True 103 | if reify(b, s) == b: 104 | return False 105 | 106 | 107 | def edge(a, b, tie_breaker=hash): 108 | """Check A before B. 109 | 110 | Tie broken by tie_breaker, defaults to ``hash`` 111 | """ 112 | if supercedes(a, b): 113 | if supercedes(b, a): 114 | return tie_breaker(a) > tie_breaker(b) 115 | else: 116 | return True 117 | return False 118 | 119 | 120 | def ordering(signatures): 121 | """Check a sane ordering of signatures, first to last. 122 | 123 | Topological sort of edges as given by ``edge`` and ``supercedes`` 124 | """ 125 | signatures = list(map(tuple, signatures)) 126 | edges = [(a, b) for a in signatures for b in signatures if edge(a, b)] 127 | edges = groupby(first, edges) 128 | for s in signatures: 129 | if s not in edges: 130 | edges[s] = [] 131 | edges = dict((k, [b for a, b in v]) for k, v in edges.items()) 132 | return _toposort(edges) 133 | -------------------------------------------------------------------------------- /unification/more.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Mapping 2 | 3 | from .core import _reify, _unify, construction_sentinel 4 | 5 | 6 | def unifiable(cls): 7 | """Register standard unify and reify operations on a class. 8 | 9 | This uses the type and __dict__ or __slots__ attributes to define the 10 | nature of the term. 11 | 12 | >>> class A(object): 13 | ... def __init__(self, a, b): 14 | ... self.a = a 15 | ... self.b = b 16 | >>> unifiable(A) 17 | 18 | 19 | >>> x = var('x') 20 | >>> a = A(1, 2) 21 | >>> b = A(1, x) 22 | 23 | >>> unify(a, b, {}) 24 | {~x: 2} 25 | """ 26 | _unify.add((cls, cls, Mapping), _unify_object) 27 | _reify.add((cls, Mapping), _reify_object) 28 | 29 | return cls 30 | 31 | 32 | def _reify_object(o, s): 33 | """Reify a Python object with a substitution. 34 | 35 | >>> class Foo(object): 36 | ... def __init__(self, a, b): 37 | ... self.a = a 38 | ... self.b = b 39 | ... def __str__(self): 40 | ... return "Foo(%s, %s)"%(str(self.a), str(self.b)) 41 | 42 | >>> x = var('x') 43 | >>> f = Foo(1, x) 44 | >>> print(f) 45 | Foo(1, ~x) 46 | >>> print(reify_object(f, {x: 2})) 47 | Foo(1, 2) 48 | """ 49 | if hasattr(o, "__slots__"): 50 | return _reify_object_slots(o, s) 51 | else: 52 | return _reify_object_dict(o, s) 53 | 54 | 55 | def _reify_object_dict(o, s): 56 | obj = type(o).__new__(type(o)) 57 | 58 | d = yield _reify(o.__dict__, s) 59 | 60 | yield construction_sentinel 61 | 62 | if d == o.__dict__: 63 | yield o 64 | else: 65 | obj.__dict__.update(d) 66 | yield obj 67 | 68 | 69 | def _reify_object_slots(o, s): 70 | attrs = [getattr(o, attr) for attr in o.__slots__] 71 | new_attrs = yield _reify(attrs, s) 72 | 73 | yield construction_sentinel 74 | 75 | if attrs == new_attrs: 76 | yield o 77 | else: 78 | newobj = object.__new__(type(o)) 79 | for slot, attr in zip(o.__slots__, new_attrs): 80 | setattr(newobj, slot, attr) 81 | 82 | yield newobj 83 | 84 | 85 | def _unify_object(u, v, s): 86 | """Unify two Python objects. 87 | 88 | Unifies their type and ``__dict__`` attributes 89 | 90 | >>> class Foo(object): 91 | ... def __init__(self, a, b): 92 | ... self.a = a 93 | ... self.b = b 94 | ... def __str__(self): 95 | ... return "Foo(%s, %s)"%(str(self.a), str(self.b)) 96 | 97 | >>> x = var('x') 98 | >>> f = Foo(1, x) 99 | >>> g = Foo(1, 2) 100 | >>> unify_object(f, g, {}) 101 | {~x: 2} 102 | """ 103 | if type(u) != type(v): 104 | yield False 105 | return 106 | 107 | if hasattr(u, "__slots__"): 108 | yield _unify( 109 | tuple(getattr(u, slot) for slot in u.__slots__), 110 | tuple(getattr(v, slot) for slot in v.__slots__), 111 | s, 112 | ) 113 | else: 114 | yield _unify(u.__dict__, v.__dict__, s) 115 | -------------------------------------------------------------------------------- /unification/utils.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Mapping, Set 2 | from contextlib import suppress 3 | 4 | 5 | def transitive_get(key, d): 6 | """Get a value for a dict key in a transitive fashion. 7 | 8 | >>> d = {1: 2, 2: 3, 3: 4} 9 | >>> d.get(1) 10 | 2 11 | >>> transitive_get(1, d) 12 | 4 13 | """ 14 | with suppress(TypeError): 15 | while key in d: 16 | key = d[key] 17 | return key 18 | 19 | 20 | def _toposort(edges): 21 | """Topologically sort a dictionary. 22 | 23 | Algorithm by Kahn [1] - O(nodes + vertices). 24 | 25 | inputs: 26 | edges - a dict of the form {a: {b, c}} where b and c depend on a 27 | outputs: 28 | L - an ordered list of nodes that satisfy the dependencies of edges 29 | 30 | >>> _toposort({1: (2, 3), 2: (3, )}) 31 | [1, 2, 3] 32 | 33 | Closely follows the wikipedia page [2] 34 | 35 | [1] Kahn, Arthur B. (1962), "Topological sorting of large networks", 36 | Communications of the ACM 37 | [2] http://en.wikipedia.org/wiki/Toposort#Algorithms 38 | """ 39 | incoming_edges = reverse_dict(edges) 40 | incoming_edges = dict((k, set(val)) for k, val in incoming_edges.items()) 41 | S = set((v for v in edges if v not in incoming_edges)) 42 | L = [] 43 | 44 | while S: 45 | n = S.pop() 46 | L.append(n) 47 | for m in edges.get(n, ()): 48 | assert n in incoming_edges[m] 49 | incoming_edges[m].remove(n) 50 | if not incoming_edges[m]: 51 | S.add(m) 52 | if any(incoming_edges.get(v, None) for v in edges): 53 | raise ValueError("Input has cycles") 54 | return L 55 | 56 | 57 | def reverse_dict(d): 58 | """Reverses the direction of a dependency dict. 59 | 60 | >>> d = {'a': (1, 2), 'b': (2, 3), 'c':()} 61 | >>> reverse_dict(d) # doctest: +SKIP 62 | {1: ('a',), 2: ('a', 'b'), 3: ('b',)} 63 | 64 | :note: dict order are not deterministic. As we iterate on the 65 | input dict, it make the output of this function depend on the 66 | dict order. So this function output order should be considered 67 | as undeterministic. 68 | 69 | """ 70 | result = {} 71 | for key in d: 72 | for val in d[key]: 73 | result[val] = result.get(val, tuple()) + (key,) 74 | return result 75 | 76 | 77 | def freeze(d): 78 | """Freeze container to hashable a form. 79 | 80 | >>> freeze(1) 81 | 1 82 | 83 | >>> freeze([1, 2]) 84 | (1, 2) 85 | 86 | >>> freeze({1: 2}) # doctest: +SKIP 87 | ((1, 2),) 88 | """ 89 | if isinstance(d, Mapping): 90 | return tuple(map(freeze, sorted(d.items(), key=lambda x: hash(x[0])))) 91 | if isinstance(d, Set): 92 | return tuple(map(freeze, sorted(d, key=hash))) 93 | if isinstance(d, (tuple, list)): 94 | return tuple(map(freeze, d)) 95 | return d 96 | -------------------------------------------------------------------------------- /unification/variable.py: -------------------------------------------------------------------------------- 1 | import weakref 2 | from abc import ABCMeta 3 | from contextlib import contextmanager, suppress 4 | 5 | _global_logic_variables = set() 6 | _glv = _global_logic_variables 7 | 8 | 9 | class LVarType(ABCMeta): 10 | def __instancecheck__(self, o): 11 | with suppress(TypeError): 12 | return issubclass(type(o), (Var, LVarType)) or o in _glv 13 | 14 | 15 | class Var(metaclass=LVarType): 16 | """A logic variable type. 17 | 18 | Fresh logic variables will unify with anything: 19 | 20 | >>> unify(var(), 1) 21 | {~_1: 1} 22 | >>> unify(var(), [2]) 23 | {~_2: [2]} 24 | >>> unify(var(), var()) 25 | {~_3: ~_4} 26 | 27 | """ 28 | 29 | __slots__ = ("token", "__weakref__") 30 | _refs = weakref.WeakValueDictionary() 31 | _id = 1 32 | 33 | def __new__(cls, token=None, prefix=""): 34 | """Construct a new logic variable. 35 | 36 | Parameters 37 | ---------- 38 | token: Hashable (optional) 39 | A unique identifier for the logic variable. 40 | prefix: str (optional) 41 | A prefix to use when token isn't specified and the internal count 42 | value is used. Useful as a means of identifying 43 | "non-globally"-scoped logic variables from their `str`/`repr` 44 | output. 45 | """ 46 | if token is None: 47 | token = f"{prefix}_{Var._id}" 48 | cls._id += 1 49 | 50 | obj = cls._refs.get(token, None) 51 | 52 | if obj is None: 53 | obj = object.__new__(cls) 54 | obj.token = token 55 | cls._refs[token] = obj 56 | 57 | return obj 58 | 59 | def __str__(self): 60 | return f"~{self.token}" 61 | 62 | __repr__ = __str__ 63 | 64 | def __eq__(self, other): 65 | if type(self) == type(other): 66 | return self.token == other.token 67 | return NotImplemented 68 | 69 | def __hash__(self): 70 | return hash((type(self), self.token)) 71 | 72 | 73 | var = Var 74 | 75 | 76 | def vars(n, **kwargs): 77 | """Create n-many fresh logic variables.""" 78 | return [var(**kwargs) for i in range(n)] 79 | 80 | 81 | def isvar(o): 82 | return isinstance(o, Var) 83 | 84 | 85 | @contextmanager 86 | def variables(*variables): 87 | """Create a context manager within which arbitrary objects can be logic variables. 88 | 89 | >>> with variables(1): 90 | ... print(isvar(1)) 91 | True 92 | 93 | >>> print(isvar(1)) 94 | False 95 | 96 | Normal approach 97 | 98 | >>> from unification import unify 99 | >>> x = var('x') 100 | >>> unify(x, 1) 101 | {~x: 1} 102 | 103 | Context Manager approach 104 | >>> with variables('x'): 105 | ... print(unify('x', 1)) 106 | {'x': 1} 107 | """ 108 | old_global_logic_variables = _global_logic_variables.copy() 109 | _global_logic_variables.update(set(variables)) 110 | try: 111 | yield 112 | finally: 113 | _global_logic_variables.clear() 114 | _global_logic_variables.update(old_global_logic_variables) 115 | -------------------------------------------------------------------------------- /versioneer.py: -------------------------------------------------------------------------------- 1 | 2 | # Version: 0.21 3 | 4 | """The Versioneer - like a rocketeer, but for versions. 5 | 6 | The Versioneer 7 | ============== 8 | 9 | * like a rocketeer, but for versions! 10 | * https://github.com/python-versioneer/python-versioneer 11 | * Brian Warner 12 | * License: Public Domain 13 | * Compatible with: Python 3.6, 3.7, 3.8, 3.9 and pypy3 14 | * [![Latest Version][pypi-image]][pypi-url] 15 | * [![Build Status][travis-image]][travis-url] 16 | 17 | This is a tool for managing a recorded version number in distutils-based 18 | python projects. The goal is to remove the tedious and error-prone "update 19 | the embedded version string" step from your release process. Making a new 20 | release should be as easy as recording a new tag in your version-control 21 | system, and maybe making new tarballs. 22 | 23 | 24 | ## Quick Install 25 | 26 | * `pip install versioneer` to somewhere in your $PATH 27 | * add a `[versioneer]` section to your setup.cfg (see [Install](INSTALL.md)) 28 | * run `versioneer install` in your source tree, commit the results 29 | * Verify version information with `python setup.py version` 30 | 31 | ## Version Identifiers 32 | 33 | Source trees come from a variety of places: 34 | 35 | * a version-control system checkout (mostly used by developers) 36 | * a nightly tarball, produced by build automation 37 | * a snapshot tarball, produced by a web-based VCS browser, like github's 38 | "tarball from tag" feature 39 | * a release tarball, produced by "setup.py sdist", distributed through PyPI 40 | 41 | Within each source tree, the version identifier (either a string or a number, 42 | this tool is format-agnostic) can come from a variety of places: 43 | 44 | * ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows 45 | about recent "tags" and an absolute revision-id 46 | * the name of the directory into which the tarball was unpacked 47 | * an expanded VCS keyword ($Id$, etc) 48 | * a `_version.py` created by some earlier build step 49 | 50 | For released software, the version identifier is closely related to a VCS 51 | tag. Some projects use tag names that include more than just the version 52 | string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool 53 | needs to strip the tag prefix to extract the version identifier. For 54 | unreleased software (between tags), the version identifier should provide 55 | enough information to help developers recreate the same tree, while also 56 | giving them an idea of roughly how old the tree is (after version 1.2, before 57 | version 1.3). Many VCS systems can report a description that captures this, 58 | for example `git describe --tags --dirty --always` reports things like 59 | "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the 60 | 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has 61 | uncommitted changes). 62 | 63 | The version identifier is used for multiple purposes: 64 | 65 | * to allow the module to self-identify its version: `myproject.__version__` 66 | * to choose a name and prefix for a 'setup.py sdist' tarball 67 | 68 | ## Theory of Operation 69 | 70 | Versioneer works by adding a special `_version.py` file into your source 71 | tree, where your `__init__.py` can import it. This `_version.py` knows how to 72 | dynamically ask the VCS tool for version information at import time. 73 | 74 | `_version.py` also contains `$Revision$` markers, and the installation 75 | process marks `_version.py` to have this marker rewritten with a tag name 76 | during the `git archive` command. As a result, generated tarballs will 77 | contain enough information to get the proper version. 78 | 79 | To allow `setup.py` to compute a version too, a `versioneer.py` is added to 80 | the top level of your source tree, next to `setup.py` and the `setup.cfg` 81 | that configures it. This overrides several distutils/setuptools commands to 82 | compute the version when invoked, and changes `setup.py build` and `setup.py 83 | sdist` to replace `_version.py` with a small static file that contains just 84 | the generated version data. 85 | 86 | ## Installation 87 | 88 | See [INSTALL.md](./INSTALL.md) for detailed installation instructions. 89 | 90 | ## Version-String Flavors 91 | 92 | Code which uses Versioneer can learn about its version string at runtime by 93 | importing `_version` from your main `__init__.py` file and running the 94 | `get_versions()` function. From the "outside" (e.g. in `setup.py`), you can 95 | import the top-level `versioneer.py` and run `get_versions()`. 96 | 97 | Both functions return a dictionary with different flavors of version 98 | information: 99 | 100 | * `['version']`: A condensed version string, rendered using the selected 101 | style. This is the most commonly used value for the project's version 102 | string. The default "pep440" style yields strings like `0.11`, 103 | `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section 104 | below for alternative styles. 105 | 106 | * `['full-revisionid']`: detailed revision identifier. For Git, this is the 107 | full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". 108 | 109 | * `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the 110 | commit date in ISO 8601 format. This will be None if the date is not 111 | available. 112 | 113 | * `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that 114 | this is only accurate if run in a VCS checkout, otherwise it is likely to 115 | be False or None 116 | 117 | * `['error']`: if the version string could not be computed, this will be set 118 | to a string describing the problem, otherwise it will be None. It may be 119 | useful to throw an exception in setup.py if this is set, to avoid e.g. 120 | creating tarballs with a version string of "unknown". 121 | 122 | Some variants are more useful than others. Including `full-revisionid` in a 123 | bug report should allow developers to reconstruct the exact code being tested 124 | (or indicate the presence of local changes that should be shared with the 125 | developers). `version` is suitable for display in an "about" box or a CLI 126 | `--version` output: it can be easily compared against release notes and lists 127 | of bugs fixed in various releases. 128 | 129 | The installer adds the following text to your `__init__.py` to place a basic 130 | version in `YOURPROJECT.__version__`: 131 | 132 | from ._version import get_versions 133 | __version__ = get_versions()['version'] 134 | del get_versions 135 | 136 | ## Styles 137 | 138 | The setup.cfg `style=` configuration controls how the VCS information is 139 | rendered into a version string. 140 | 141 | The default style, "pep440", produces a PEP440-compliant string, equal to the 142 | un-prefixed tag name for actual releases, and containing an additional "local 143 | version" section with more detail for in-between builds. For Git, this is 144 | TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags 145 | --dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the 146 | tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and 147 | that this commit is two revisions ("+2") beyond the "0.11" tag. For released 148 | software (exactly equal to a known tag), the identifier will only contain the 149 | stripped tag, e.g. "0.11". 150 | 151 | Other styles are available. See [details.md](details.md) in the Versioneer 152 | source tree for descriptions. 153 | 154 | ## Debugging 155 | 156 | Versioneer tries to avoid fatal errors: if something goes wrong, it will tend 157 | to return a version of "0+unknown". To investigate the problem, run `setup.py 158 | version`, which will run the version-lookup code in a verbose mode, and will 159 | display the full contents of `get_versions()` (including the `error` string, 160 | which may help identify what went wrong). 161 | 162 | ## Known Limitations 163 | 164 | Some situations are known to cause problems for Versioneer. This details the 165 | most significant ones. More can be found on Github 166 | [issues page](https://github.com/python-versioneer/python-versioneer/issues). 167 | 168 | ### Subprojects 169 | 170 | Versioneer has limited support for source trees in which `setup.py` is not in 171 | the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are 172 | two common reasons why `setup.py` might not be in the root: 173 | 174 | * Source trees which contain multiple subprojects, such as 175 | [Buildbot](https://github.com/buildbot/buildbot), which contains both 176 | "master" and "slave" subprojects, each with their own `setup.py`, 177 | `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI 178 | distributions (and upload multiple independently-installable tarballs). 179 | * Source trees whose main purpose is to contain a C library, but which also 180 | provide bindings to Python (and perhaps other languages) in subdirectories. 181 | 182 | Versioneer will look for `.git` in parent directories, and most operations 183 | should get the right version string. However `pip` and `setuptools` have bugs 184 | and implementation details which frequently cause `pip install .` from a 185 | subproject directory to fail to find a correct version string (so it usually 186 | defaults to `0+unknown`). 187 | 188 | `pip install --editable .` should work correctly. `setup.py install` might 189 | work too. 190 | 191 | Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in 192 | some later version. 193 | 194 | [Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking 195 | this issue. The discussion in 196 | [PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the 197 | issue from the Versioneer side in more detail. 198 | [pip PR#3176](https://github.com/pypa/pip/pull/3176) and 199 | [pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve 200 | pip to let Versioneer work correctly. 201 | 202 | Versioneer-0.16 and earlier only looked for a `.git` directory next to the 203 | `setup.cfg`, so subprojects were completely unsupported with those releases. 204 | 205 | ### Editable installs with setuptools <= 18.5 206 | 207 | `setup.py develop` and `pip install --editable .` allow you to install a 208 | project into a virtualenv once, then continue editing the source code (and 209 | test) without re-installing after every change. 210 | 211 | "Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a 212 | convenient way to specify executable scripts that should be installed along 213 | with the python package. 214 | 215 | These both work as expected when using modern setuptools. When using 216 | setuptools-18.5 or earlier, however, certain operations will cause 217 | `pkg_resources.DistributionNotFound` errors when running the entrypoint 218 | script, which must be resolved by re-installing the package. This happens 219 | when the install happens with one version, then the egg_info data is 220 | regenerated while a different version is checked out. Many setup.py commands 221 | cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into 222 | a different virtualenv), so this can be surprising. 223 | 224 | [Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes 225 | this one, but upgrading to a newer version of setuptools should probably 226 | resolve it. 227 | 228 | 229 | ## Updating Versioneer 230 | 231 | To upgrade your project to a new release of Versioneer, do the following: 232 | 233 | * install the new Versioneer (`pip install -U versioneer` or equivalent) 234 | * edit `setup.cfg`, if necessary, to include any new configuration settings 235 | indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. 236 | * re-run `versioneer install` in your source tree, to replace 237 | `SRC/_version.py` 238 | * commit any changed files 239 | 240 | ## Future Directions 241 | 242 | This tool is designed to make it easily extended to other version-control 243 | systems: all VCS-specific components are in separate directories like 244 | src/git/ . The top-level `versioneer.py` script is assembled from these 245 | components by running make-versioneer.py . In the future, make-versioneer.py 246 | will take a VCS name as an argument, and will construct a version of 247 | `versioneer.py` that is specific to the given VCS. It might also take the 248 | configuration arguments that are currently provided manually during 249 | installation by editing setup.py . Alternatively, it might go the other 250 | direction and include code from all supported VCS systems, reducing the 251 | number of intermediate scripts. 252 | 253 | ## Similar projects 254 | 255 | * [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time 256 | dependency 257 | * [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of 258 | versioneer 259 | * [versioningit](https://github.com/jwodder/versioningit) - a PEP 518-based setuptools 260 | plugin 261 | 262 | ## License 263 | 264 | To make Versioneer easier to embed, all its code is dedicated to the public 265 | domain. The `_version.py` that it creates is also in the public domain. 266 | Specifically, both are released under the Creative Commons "Public Domain 267 | Dedication" license (CC0-1.0), as described in 268 | https://creativecommons.org/publicdomain/zero/1.0/ . 269 | 270 | [pypi-image]: https://img.shields.io/pypi/v/versioneer.svg 271 | [pypi-url]: https://pypi.python.org/pypi/versioneer/ 272 | [travis-image]: 273 | https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg 274 | [travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer 275 | 276 | """ 277 | # pylint:disable=invalid-name,import-outside-toplevel,missing-function-docstring 278 | # pylint:disable=missing-class-docstring,too-many-branches,too-many-statements 279 | # pylint:disable=raise-missing-from,too-many-lines,too-many-locals,import-error 280 | # pylint:disable=too-few-public-methods,redefined-outer-name,consider-using-with 281 | # pylint:disable=attribute-defined-outside-init,too-many-arguments 282 | 283 | import configparser 284 | import errno 285 | import json 286 | import os 287 | import re 288 | import subprocess 289 | import sys 290 | from typing import Callable, Dict 291 | 292 | 293 | class VersioneerConfig: 294 | """Container for Versioneer configuration parameters.""" 295 | 296 | 297 | def get_root(): 298 | """Get the project root directory. 299 | 300 | We require that all commands are run from the project root, i.e. the 301 | directory that contains setup.py, setup.cfg, and versioneer.py . 302 | """ 303 | root = os.path.realpath(os.path.abspath(os.getcwd())) 304 | setup_py = os.path.join(root, "setup.py") 305 | versioneer_py = os.path.join(root, "versioneer.py") 306 | if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): 307 | # allow 'python path/to/setup.py COMMAND' 308 | root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) 309 | setup_py = os.path.join(root, "setup.py") 310 | versioneer_py = os.path.join(root, "versioneer.py") 311 | if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): 312 | err = ("Versioneer was unable to run the project root directory. " 313 | "Versioneer requires setup.py to be executed from " 314 | "its immediate directory (like 'python setup.py COMMAND'), " 315 | "or in a way that lets it use sys.argv[0] to find the root " 316 | "(like 'python path/to/setup.py COMMAND').") 317 | raise VersioneerBadRootError(err) 318 | try: 319 | # Certain runtime workflows (setup.py install/develop in a setuptools 320 | # tree) execute all dependencies in a single python process, so 321 | # "versioneer" may be imported multiple times, and python's shared 322 | # module-import table will cache the first one. So we can't use 323 | # os.path.dirname(__file__), as that will find whichever 324 | # versioneer.py was first imported, even in later projects. 325 | my_path = os.path.realpath(os.path.abspath(__file__)) 326 | me_dir = os.path.normcase(os.path.splitext(my_path)[0]) 327 | vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) 328 | if me_dir != vsr_dir: 329 | print("Warning: build in %s is using versioneer.py from %s" 330 | % (os.path.dirname(my_path), versioneer_py)) 331 | except NameError: 332 | pass 333 | return root 334 | 335 | 336 | def get_config_from_root(root): 337 | """Read the project setup.cfg file to determine Versioneer config.""" 338 | # This might raise OSError (if setup.cfg is missing), or 339 | # configparser.NoSectionError (if it lacks a [versioneer] section), or 340 | # configparser.NoOptionError (if it lacks "VCS="). See the docstring at 341 | # the top of versioneer.py for instructions on writing your setup.cfg . 342 | setup_cfg = os.path.join(root, "setup.cfg") 343 | parser = configparser.ConfigParser() 344 | with open(setup_cfg, "r") as cfg_file: 345 | parser.read_file(cfg_file) 346 | VCS = parser.get("versioneer", "VCS") # mandatory 347 | 348 | # Dict-like interface for non-mandatory entries 349 | section = parser["versioneer"] 350 | 351 | cfg = VersioneerConfig() 352 | cfg.VCS = VCS 353 | cfg.style = section.get("style", "") 354 | cfg.versionfile_source = section.get("versionfile_source") 355 | cfg.versionfile_build = section.get("versionfile_build") 356 | cfg.tag_prefix = section.get("tag_prefix") 357 | if cfg.tag_prefix in ("''", '""'): 358 | cfg.tag_prefix = "" 359 | cfg.parentdir_prefix = section.get("parentdir_prefix") 360 | cfg.verbose = section.get("verbose") 361 | return cfg 362 | 363 | 364 | class NotThisMethod(Exception): 365 | """Exception raised if a method is not valid for the current scenario.""" 366 | 367 | 368 | # these dictionaries contain VCS-specific tools 369 | LONG_VERSION_PY: Dict[str, str] = {} 370 | HANDLERS: Dict[str, Dict[str, Callable]] = {} 371 | 372 | 373 | def register_vcs_handler(vcs, method): # decorator 374 | """Create decorator to mark a method as the handler of a VCS.""" 375 | def decorate(f): 376 | """Store f in HANDLERS[vcs][method].""" 377 | HANDLERS.setdefault(vcs, {})[method] = f 378 | return f 379 | return decorate 380 | 381 | 382 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, 383 | env=None): 384 | """Call the given command(s).""" 385 | assert isinstance(commands, list) 386 | process = None 387 | for command in commands: 388 | try: 389 | dispcmd = str([command] + args) 390 | # remember shell=False, so use git.cmd on windows, not just git 391 | process = subprocess.Popen([command] + args, cwd=cwd, env=env, 392 | stdout=subprocess.PIPE, 393 | stderr=(subprocess.PIPE if hide_stderr 394 | else None)) 395 | break 396 | except OSError: 397 | e = sys.exc_info()[1] 398 | if e.errno == errno.ENOENT: 399 | continue 400 | if verbose: 401 | print("unable to run %s" % dispcmd) 402 | print(e) 403 | return None, None 404 | else: 405 | if verbose: 406 | print("unable to find command, tried %s" % (commands,)) 407 | return None, None 408 | stdout = process.communicate()[0].strip().decode() 409 | if process.returncode != 0: 410 | if verbose: 411 | print("unable to run %s (error)" % dispcmd) 412 | print("stdout was %s" % stdout) 413 | return None, process.returncode 414 | return stdout, process.returncode 415 | 416 | 417 | LONG_VERSION_PY['git'] = r''' 418 | # This file helps to compute a version number in source trees obtained from 419 | # git-archive tarball (such as those provided by githubs download-from-tag 420 | # feature). Distribution tarballs (built by setup.py sdist) and build 421 | # directories (produced by setup.py build) will contain a much shorter file 422 | # that just contains the computed version number. 423 | 424 | # This file is released into the public domain. Generated by 425 | # versioneer-0.21 (https://github.com/python-versioneer/python-versioneer) 426 | 427 | """Git implementation of _version.py.""" 428 | 429 | import errno 430 | import os 431 | import re 432 | import subprocess 433 | import sys 434 | from typing import Callable, Dict 435 | 436 | 437 | def get_keywords(): 438 | """Get the keywords needed to look up the version information.""" 439 | # these strings will be replaced by git during git-archive. 440 | # setup.py/versioneer.py will grep for the variable names, so they must 441 | # each be defined on a line of their own. _version.py will just call 442 | # get_keywords(). 443 | git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" 444 | git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" 445 | git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" 446 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} 447 | return keywords 448 | 449 | 450 | class VersioneerConfig: 451 | """Container for Versioneer configuration parameters.""" 452 | 453 | 454 | def get_config(): 455 | """Create, populate and return the VersioneerConfig() object.""" 456 | # these strings are filled in when 'setup.py versioneer' creates 457 | # _version.py 458 | cfg = VersioneerConfig() 459 | cfg.VCS = "git" 460 | cfg.style = "%(STYLE)s" 461 | cfg.tag_prefix = "%(TAG_PREFIX)s" 462 | cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" 463 | cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" 464 | cfg.verbose = False 465 | return cfg 466 | 467 | 468 | class NotThisMethod(Exception): 469 | """Exception raised if a method is not valid for the current scenario.""" 470 | 471 | 472 | LONG_VERSION_PY: Dict[str, str] = {} 473 | HANDLERS: Dict[str, Dict[str, Callable]] = {} 474 | 475 | 476 | def register_vcs_handler(vcs, method): # decorator 477 | """Create decorator to mark a method as the handler of a VCS.""" 478 | def decorate(f): 479 | """Store f in HANDLERS[vcs][method].""" 480 | if vcs not in HANDLERS: 481 | HANDLERS[vcs] = {} 482 | HANDLERS[vcs][method] = f 483 | return f 484 | return decorate 485 | 486 | 487 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, 488 | env=None): 489 | """Call the given command(s).""" 490 | assert isinstance(commands, list) 491 | process = None 492 | for command in commands: 493 | try: 494 | dispcmd = str([command] + args) 495 | # remember shell=False, so use git.cmd on windows, not just git 496 | process = subprocess.Popen([command] + args, cwd=cwd, env=env, 497 | stdout=subprocess.PIPE, 498 | stderr=(subprocess.PIPE if hide_stderr 499 | else None)) 500 | break 501 | except OSError: 502 | e = sys.exc_info()[1] 503 | if e.errno == errno.ENOENT: 504 | continue 505 | if verbose: 506 | print("unable to run %%s" %% dispcmd) 507 | print(e) 508 | return None, None 509 | else: 510 | if verbose: 511 | print("unable to find command, tried %%s" %% (commands,)) 512 | return None, None 513 | stdout = process.communicate()[0].strip().decode() 514 | if process.returncode != 0: 515 | if verbose: 516 | print("unable to run %%s (error)" %% dispcmd) 517 | print("stdout was %%s" %% stdout) 518 | return None, process.returncode 519 | return stdout, process.returncode 520 | 521 | 522 | def versions_from_parentdir(parentdir_prefix, root, verbose): 523 | """Try to determine the version from the parent directory name. 524 | 525 | Source tarballs conventionally unpack into a directory that includes both 526 | the project name and a version string. We will also support searching up 527 | two directory levels for an appropriately named parent directory 528 | """ 529 | rootdirs = [] 530 | 531 | for _ in range(3): 532 | dirname = os.path.basename(root) 533 | if dirname.startswith(parentdir_prefix): 534 | return {"version": dirname[len(parentdir_prefix):], 535 | "full-revisionid": None, 536 | "dirty": False, "error": None, "date": None} 537 | rootdirs.append(root) 538 | root = os.path.dirname(root) # up a level 539 | 540 | if verbose: 541 | print("Tried directories %%s but none started with prefix %%s" %% 542 | (str(rootdirs), parentdir_prefix)) 543 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 544 | 545 | 546 | @register_vcs_handler("git", "get_keywords") 547 | def git_get_keywords(versionfile_abs): 548 | """Extract version information from the given file.""" 549 | # the code embedded in _version.py can just fetch the value of these 550 | # keywords. When used from setup.py, we don't want to import _version.py, 551 | # so we do it with a regexp instead. This function is not used from 552 | # _version.py. 553 | keywords = {} 554 | try: 555 | with open(versionfile_abs, "r") as fobj: 556 | for line in fobj: 557 | if line.strip().startswith("git_refnames ="): 558 | mo = re.search(r'=\s*"(.*)"', line) 559 | if mo: 560 | keywords["refnames"] = mo.group(1) 561 | if line.strip().startswith("git_full ="): 562 | mo = re.search(r'=\s*"(.*)"', line) 563 | if mo: 564 | keywords["full"] = mo.group(1) 565 | if line.strip().startswith("git_date ="): 566 | mo = re.search(r'=\s*"(.*)"', line) 567 | if mo: 568 | keywords["date"] = mo.group(1) 569 | except OSError: 570 | pass 571 | return keywords 572 | 573 | 574 | @register_vcs_handler("git", "keywords") 575 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 576 | """Get version information from git keywords.""" 577 | if "refnames" not in keywords: 578 | raise NotThisMethod("Short version file found") 579 | date = keywords.get("date") 580 | if date is not None: 581 | # Use only the last line. Previous lines may contain GPG signature 582 | # information. 583 | date = date.splitlines()[-1] 584 | 585 | # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant 586 | # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 587 | # -like" string, which we must then edit to make compliant), because 588 | # it's been around since git-1.5.3, and it's too difficult to 589 | # discover which version we're using, or to work around using an 590 | # older one. 591 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 592 | refnames = keywords["refnames"].strip() 593 | if refnames.startswith("$Format"): 594 | if verbose: 595 | print("keywords are unexpanded, not using") 596 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 597 | refs = {r.strip() for r in refnames.strip("()").split(",")} 598 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 599 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 600 | TAG = "tag: " 601 | tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} 602 | if not tags: 603 | # Either we're using git < 1.8.3, or there really are no tags. We use 604 | # a heuristic: assume all version tags have a digit. The old git %%d 605 | # expansion behaves like git log --decorate=short and strips out the 606 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 607 | # between branches and tags. By ignoring refnames without digits, we 608 | # filter out many common branch names like "release" and 609 | # "stabilization", as well as "HEAD" and "master". 610 | tags = {r for r in refs if re.search(r'\d', r)} 611 | if verbose: 612 | print("discarding '%%s', no digits" %% ",".join(refs - tags)) 613 | if verbose: 614 | print("likely tags: %%s" %% ",".join(sorted(tags))) 615 | for ref in sorted(tags): 616 | # sorting will prefer e.g. "2.0" over "2.0rc1" 617 | if ref.startswith(tag_prefix): 618 | r = ref[len(tag_prefix):] 619 | # Filter out refs that exactly match prefix or that don't start 620 | # with a number once the prefix is stripped (mostly a concern 621 | # when prefix is '') 622 | if not re.match(r'\d', r): 623 | continue 624 | if verbose: 625 | print("picking %%s" %% r) 626 | return {"version": r, 627 | "full-revisionid": keywords["full"].strip(), 628 | "dirty": False, "error": None, 629 | "date": date} 630 | # no suitable tags, so version is "0+unknown", but full hex is still there 631 | if verbose: 632 | print("no suitable tags, using unknown + full revision id") 633 | return {"version": "0+unknown", 634 | "full-revisionid": keywords["full"].strip(), 635 | "dirty": False, "error": "no suitable tags", "date": None} 636 | 637 | 638 | @register_vcs_handler("git", "pieces_from_vcs") 639 | def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): 640 | """Get version from 'git describe' in the root of the source tree. 641 | 642 | This only gets called if the git-archive 'subst' keywords were *not* 643 | expanded, and _version.py hasn't already been rewritten with a short 644 | version string, meaning we're inside a checked out source tree. 645 | """ 646 | GITS = ["git"] 647 | TAG_PREFIX_REGEX = "*" 648 | if sys.platform == "win32": 649 | GITS = ["git.cmd", "git.exe"] 650 | TAG_PREFIX_REGEX = r"\*" 651 | 652 | _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, 653 | hide_stderr=True) 654 | if rc != 0: 655 | if verbose: 656 | print("Directory %%s not under git control" %% root) 657 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 658 | 659 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 660 | # if there isn't one, this yields HEX[-dirty] (no NUM) 661 | describe_out, rc = runner(GITS, ["describe", "--tags", "--dirty", 662 | "--always", "--long", 663 | "--match", 664 | "%%s%%s" %% (tag_prefix, TAG_PREFIX_REGEX)], 665 | cwd=root) 666 | # --long was added in git-1.5.5 667 | if describe_out is None: 668 | raise NotThisMethod("'git describe' failed") 669 | describe_out = describe_out.strip() 670 | full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) 671 | if full_out is None: 672 | raise NotThisMethod("'git rev-parse' failed") 673 | full_out = full_out.strip() 674 | 675 | pieces = {} 676 | pieces["long"] = full_out 677 | pieces["short"] = full_out[:7] # maybe improved later 678 | pieces["error"] = None 679 | 680 | branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], 681 | cwd=root) 682 | # --abbrev-ref was added in git-1.6.3 683 | if rc != 0 or branch_name is None: 684 | raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") 685 | branch_name = branch_name.strip() 686 | 687 | if branch_name == "HEAD": 688 | # If we aren't exactly on a branch, pick a branch which represents 689 | # the current commit. If all else fails, we are on a branchless 690 | # commit. 691 | branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) 692 | # --contains was added in git-1.5.4 693 | if rc != 0 or branches is None: 694 | raise NotThisMethod("'git branch --contains' returned error") 695 | branches = branches.split("\n") 696 | 697 | # Remove the first line if we're running detached 698 | if "(" in branches[0]: 699 | branches.pop(0) 700 | 701 | # Strip off the leading "* " from the list of branches. 702 | branches = [branch[2:] for branch in branches] 703 | if "master" in branches: 704 | branch_name = "master" 705 | elif not branches: 706 | branch_name = None 707 | else: 708 | # Pick the first branch that is returned. Good or bad. 709 | branch_name = branches[0] 710 | 711 | pieces["branch"] = branch_name 712 | 713 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 714 | # TAG might have hyphens. 715 | git_describe = describe_out 716 | 717 | # look for -dirty suffix 718 | dirty = git_describe.endswith("-dirty") 719 | pieces["dirty"] = dirty 720 | if dirty: 721 | git_describe = git_describe[:git_describe.rindex("-dirty")] 722 | 723 | # now we have TAG-NUM-gHEX or HEX 724 | 725 | if "-" in git_describe: 726 | # TAG-NUM-gHEX 727 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 728 | if not mo: 729 | # unparsable. Maybe git-describe is misbehaving? 730 | pieces["error"] = ("unable to parse git-describe output: '%%s'" 731 | %% describe_out) 732 | return pieces 733 | 734 | # tag 735 | full_tag = mo.group(1) 736 | if not full_tag.startswith(tag_prefix): 737 | if verbose: 738 | fmt = "tag '%%s' doesn't start with prefix '%%s'" 739 | print(fmt %% (full_tag, tag_prefix)) 740 | pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" 741 | %% (full_tag, tag_prefix)) 742 | return pieces 743 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 744 | 745 | # distance: number of commits since tag 746 | pieces["distance"] = int(mo.group(2)) 747 | 748 | # commit: short hex revision ID 749 | pieces["short"] = mo.group(3) 750 | 751 | else: 752 | # HEX: no tags 753 | pieces["closest-tag"] = None 754 | count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) 755 | pieces["distance"] = int(count_out) # total number of commits 756 | 757 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 758 | date = runner(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip() 759 | # Use only the last line. Previous lines may contain GPG signature 760 | # information. 761 | date = date.splitlines()[-1] 762 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 763 | 764 | return pieces 765 | 766 | 767 | def plus_or_dot(pieces): 768 | """Return a + if we don't already have one, else return a .""" 769 | if "+" in pieces.get("closest-tag", ""): 770 | return "." 771 | return "+" 772 | 773 | 774 | def render_pep440(pieces): 775 | """Build up version string, with post-release "local version identifier". 776 | 777 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 778 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 779 | 780 | Exceptions: 781 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 782 | """ 783 | if pieces["closest-tag"]: 784 | rendered = pieces["closest-tag"] 785 | if pieces["distance"] or pieces["dirty"]: 786 | rendered += plus_or_dot(pieces) 787 | rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) 788 | if pieces["dirty"]: 789 | rendered += ".dirty" 790 | else: 791 | # exception #1 792 | rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], 793 | pieces["short"]) 794 | if pieces["dirty"]: 795 | rendered += ".dirty" 796 | return rendered 797 | 798 | 799 | def render_pep440_branch(pieces): 800 | """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . 801 | 802 | The ".dev0" means not master branch. Note that .dev0 sorts backwards 803 | (a feature branch will appear "older" than the master branch). 804 | 805 | Exceptions: 806 | 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] 807 | """ 808 | if pieces["closest-tag"]: 809 | rendered = pieces["closest-tag"] 810 | if pieces["distance"] or pieces["dirty"]: 811 | if pieces["branch"] != "master": 812 | rendered += ".dev0" 813 | rendered += plus_or_dot(pieces) 814 | rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) 815 | if pieces["dirty"]: 816 | rendered += ".dirty" 817 | else: 818 | # exception #1 819 | rendered = "0" 820 | if pieces["branch"] != "master": 821 | rendered += ".dev0" 822 | rendered += "+untagged.%%d.g%%s" %% (pieces["distance"], 823 | pieces["short"]) 824 | if pieces["dirty"]: 825 | rendered += ".dirty" 826 | return rendered 827 | 828 | 829 | def pep440_split_post(ver): 830 | """Split pep440 version string at the post-release segment. 831 | 832 | Returns the release segments before the post-release and the 833 | post-release version number (or -1 if no post-release segment is present). 834 | """ 835 | vc = str.split(ver, ".post") 836 | return vc[0], int(vc[1] or 0) if len(vc) == 2 else None 837 | 838 | 839 | def render_pep440_pre(pieces): 840 | """TAG[.postN.devDISTANCE] -- No -dirty. 841 | 842 | Exceptions: 843 | 1: no tags. 0.post0.devDISTANCE 844 | """ 845 | if pieces["closest-tag"]: 846 | if pieces["distance"]: 847 | # update the post release segment 848 | tag_version, post_version = pep440_split_post(pieces["closest-tag"]) 849 | rendered = tag_version 850 | if post_version is not None: 851 | rendered += ".post%%d.dev%%d" %% (post_version+1, pieces["distance"]) 852 | else: 853 | rendered += ".post0.dev%%d" %% (pieces["distance"]) 854 | else: 855 | # no commits, use the tag as the version 856 | rendered = pieces["closest-tag"] 857 | else: 858 | # exception #1 859 | rendered = "0.post0.dev%%d" %% pieces["distance"] 860 | return rendered 861 | 862 | 863 | def render_pep440_post(pieces): 864 | """TAG[.postDISTANCE[.dev0]+gHEX] . 865 | 866 | The ".dev0" means dirty. Note that .dev0 sorts backwards 867 | (a dirty tree will appear "older" than the corresponding clean one), 868 | but you shouldn't be releasing software with -dirty anyways. 869 | 870 | Exceptions: 871 | 1: no tags. 0.postDISTANCE[.dev0] 872 | """ 873 | if pieces["closest-tag"]: 874 | rendered = pieces["closest-tag"] 875 | if pieces["distance"] or pieces["dirty"]: 876 | rendered += ".post%%d" %% pieces["distance"] 877 | if pieces["dirty"]: 878 | rendered += ".dev0" 879 | rendered += plus_or_dot(pieces) 880 | rendered += "g%%s" %% pieces["short"] 881 | else: 882 | # exception #1 883 | rendered = "0.post%%d" %% pieces["distance"] 884 | if pieces["dirty"]: 885 | rendered += ".dev0" 886 | rendered += "+g%%s" %% pieces["short"] 887 | return rendered 888 | 889 | 890 | def render_pep440_post_branch(pieces): 891 | """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . 892 | 893 | The ".dev0" means not master branch. 894 | 895 | Exceptions: 896 | 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] 897 | """ 898 | if pieces["closest-tag"]: 899 | rendered = pieces["closest-tag"] 900 | if pieces["distance"] or pieces["dirty"]: 901 | rendered += ".post%%d" %% pieces["distance"] 902 | if pieces["branch"] != "master": 903 | rendered += ".dev0" 904 | rendered += plus_or_dot(pieces) 905 | rendered += "g%%s" %% pieces["short"] 906 | if pieces["dirty"]: 907 | rendered += ".dirty" 908 | else: 909 | # exception #1 910 | rendered = "0.post%%d" %% pieces["distance"] 911 | if pieces["branch"] != "master": 912 | rendered += ".dev0" 913 | rendered += "+g%%s" %% pieces["short"] 914 | if pieces["dirty"]: 915 | rendered += ".dirty" 916 | return rendered 917 | 918 | 919 | def render_pep440_old(pieces): 920 | """TAG[.postDISTANCE[.dev0]] . 921 | 922 | The ".dev0" means dirty. 923 | 924 | Exceptions: 925 | 1: no tags. 0.postDISTANCE[.dev0] 926 | """ 927 | if pieces["closest-tag"]: 928 | rendered = pieces["closest-tag"] 929 | if pieces["distance"] or pieces["dirty"]: 930 | rendered += ".post%%d" %% pieces["distance"] 931 | if pieces["dirty"]: 932 | rendered += ".dev0" 933 | else: 934 | # exception #1 935 | rendered = "0.post%%d" %% pieces["distance"] 936 | if pieces["dirty"]: 937 | rendered += ".dev0" 938 | return rendered 939 | 940 | 941 | def render_git_describe(pieces): 942 | """TAG[-DISTANCE-gHEX][-dirty]. 943 | 944 | Like 'git describe --tags --dirty --always'. 945 | 946 | Exceptions: 947 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 948 | """ 949 | if pieces["closest-tag"]: 950 | rendered = pieces["closest-tag"] 951 | if pieces["distance"]: 952 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) 953 | else: 954 | # exception #1 955 | rendered = pieces["short"] 956 | if pieces["dirty"]: 957 | rendered += "-dirty" 958 | return rendered 959 | 960 | 961 | def render_git_describe_long(pieces): 962 | """TAG-DISTANCE-gHEX[-dirty]. 963 | 964 | Like 'git describe --tags --dirty --always -long'. 965 | The distance/hash is unconditional. 966 | 967 | Exceptions: 968 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 969 | """ 970 | if pieces["closest-tag"]: 971 | rendered = pieces["closest-tag"] 972 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) 973 | else: 974 | # exception #1 975 | rendered = pieces["short"] 976 | if pieces["dirty"]: 977 | rendered += "-dirty" 978 | return rendered 979 | 980 | 981 | def render(pieces, style): 982 | """Render the given version pieces into the requested style.""" 983 | if pieces["error"]: 984 | return {"version": "unknown", 985 | "full-revisionid": pieces.get("long"), 986 | "dirty": None, 987 | "error": pieces["error"], 988 | "date": None} 989 | 990 | if not style or style == "default": 991 | style = "pep440" # the default 992 | 993 | if style == "pep440": 994 | rendered = render_pep440(pieces) 995 | elif style == "pep440-branch": 996 | rendered = render_pep440_branch(pieces) 997 | elif style == "pep440-pre": 998 | rendered = render_pep440_pre(pieces) 999 | elif style == "pep440-post": 1000 | rendered = render_pep440_post(pieces) 1001 | elif style == "pep440-post-branch": 1002 | rendered = render_pep440_post_branch(pieces) 1003 | elif style == "pep440-old": 1004 | rendered = render_pep440_old(pieces) 1005 | elif style == "git-describe": 1006 | rendered = render_git_describe(pieces) 1007 | elif style == "git-describe-long": 1008 | rendered = render_git_describe_long(pieces) 1009 | else: 1010 | raise ValueError("unknown style '%%s'" %% style) 1011 | 1012 | return {"version": rendered, "full-revisionid": pieces["long"], 1013 | "dirty": pieces["dirty"], "error": None, 1014 | "date": pieces.get("date")} 1015 | 1016 | 1017 | def get_versions(): 1018 | """Get version information or return default if unable to do so.""" 1019 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 1020 | # __file__, we can work backwards from there to the root. Some 1021 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 1022 | # case we can only use expanded keywords. 1023 | 1024 | cfg = get_config() 1025 | verbose = cfg.verbose 1026 | 1027 | try: 1028 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, 1029 | verbose) 1030 | except NotThisMethod: 1031 | pass 1032 | 1033 | try: 1034 | root = os.path.realpath(__file__) 1035 | # versionfile_source is the relative path from the top of the source 1036 | # tree (where the .git directory might live) to this file. Invert 1037 | # this to find the root from __file__. 1038 | for _ in cfg.versionfile_source.split('/'): 1039 | root = os.path.dirname(root) 1040 | except NameError: 1041 | return {"version": "0+unknown", "full-revisionid": None, 1042 | "dirty": None, 1043 | "error": "unable to find root of source tree", 1044 | "date": None} 1045 | 1046 | try: 1047 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 1048 | return render(pieces, cfg.style) 1049 | except NotThisMethod: 1050 | pass 1051 | 1052 | try: 1053 | if cfg.parentdir_prefix: 1054 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 1055 | except NotThisMethod: 1056 | pass 1057 | 1058 | return {"version": "0+unknown", "full-revisionid": None, 1059 | "dirty": None, 1060 | "error": "unable to compute version", "date": None} 1061 | ''' 1062 | 1063 | 1064 | @register_vcs_handler("git", "get_keywords") 1065 | def git_get_keywords(versionfile_abs): 1066 | """Extract version information from the given file.""" 1067 | # the code embedded in _version.py can just fetch the value of these 1068 | # keywords. When used from setup.py, we don't want to import _version.py, 1069 | # so we do it with a regexp instead. This function is not used from 1070 | # _version.py. 1071 | keywords = {} 1072 | try: 1073 | with open(versionfile_abs, "r") as fobj: 1074 | for line in fobj: 1075 | if line.strip().startswith("git_refnames ="): 1076 | mo = re.search(r'=\s*"(.*)"', line) 1077 | if mo: 1078 | keywords["refnames"] = mo.group(1) 1079 | if line.strip().startswith("git_full ="): 1080 | mo = re.search(r'=\s*"(.*)"', line) 1081 | if mo: 1082 | keywords["full"] = mo.group(1) 1083 | if line.strip().startswith("git_date ="): 1084 | mo = re.search(r'=\s*"(.*)"', line) 1085 | if mo: 1086 | keywords["date"] = mo.group(1) 1087 | except OSError: 1088 | pass 1089 | return keywords 1090 | 1091 | 1092 | @register_vcs_handler("git", "keywords") 1093 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 1094 | """Get version information from git keywords.""" 1095 | if "refnames" not in keywords: 1096 | raise NotThisMethod("Short version file found") 1097 | date = keywords.get("date") 1098 | if date is not None: 1099 | # Use only the last line. Previous lines may contain GPG signature 1100 | # information. 1101 | date = date.splitlines()[-1] 1102 | 1103 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant 1104 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 1105 | # -like" string, which we must then edit to make compliant), because 1106 | # it's been around since git-1.5.3, and it's too difficult to 1107 | # discover which version we're using, or to work around using an 1108 | # older one. 1109 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 1110 | refnames = keywords["refnames"].strip() 1111 | if refnames.startswith("$Format"): 1112 | if verbose: 1113 | print("keywords are unexpanded, not using") 1114 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 1115 | refs = {r.strip() for r in refnames.strip("()").split(",")} 1116 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 1117 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 1118 | TAG = "tag: " 1119 | tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} 1120 | if not tags: 1121 | # Either we're using git < 1.8.3, or there really are no tags. We use 1122 | # a heuristic: assume all version tags have a digit. The old git %d 1123 | # expansion behaves like git log --decorate=short and strips out the 1124 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 1125 | # between branches and tags. By ignoring refnames without digits, we 1126 | # filter out many common branch names like "release" and 1127 | # "stabilization", as well as "HEAD" and "master". 1128 | tags = {r for r in refs if re.search(r'\d', r)} 1129 | if verbose: 1130 | print("discarding '%s', no digits" % ",".join(refs - tags)) 1131 | if verbose: 1132 | print("likely tags: %s" % ",".join(sorted(tags))) 1133 | for ref in sorted(tags): 1134 | # sorting will prefer e.g. "2.0" over "2.0rc1" 1135 | if ref.startswith(tag_prefix): 1136 | r = ref[len(tag_prefix):] 1137 | # Filter out refs that exactly match prefix or that don't start 1138 | # with a number once the prefix is stripped (mostly a concern 1139 | # when prefix is '') 1140 | if not re.match(r'\d', r): 1141 | continue 1142 | if verbose: 1143 | print("picking %s" % r) 1144 | return {"version": r, 1145 | "full-revisionid": keywords["full"].strip(), 1146 | "dirty": False, "error": None, 1147 | "date": date} 1148 | # no suitable tags, so version is "0+unknown", but full hex is still there 1149 | if verbose: 1150 | print("no suitable tags, using unknown + full revision id") 1151 | return {"version": "0+unknown", 1152 | "full-revisionid": keywords["full"].strip(), 1153 | "dirty": False, "error": "no suitable tags", "date": None} 1154 | 1155 | 1156 | @register_vcs_handler("git", "pieces_from_vcs") 1157 | def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): 1158 | """Get version from 'git describe' in the root of the source tree. 1159 | 1160 | This only gets called if the git-archive 'subst' keywords were *not* 1161 | expanded, and _version.py hasn't already been rewritten with a short 1162 | version string, meaning we're inside a checked out source tree. 1163 | """ 1164 | GITS = ["git"] 1165 | TAG_PREFIX_REGEX = "*" 1166 | if sys.platform == "win32": 1167 | GITS = ["git.cmd", "git.exe"] 1168 | TAG_PREFIX_REGEX = r"\*" 1169 | 1170 | _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, 1171 | hide_stderr=True) 1172 | if rc != 0: 1173 | if verbose: 1174 | print("Directory %s not under git control" % root) 1175 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 1176 | 1177 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 1178 | # if there isn't one, this yields HEX[-dirty] (no NUM) 1179 | describe_out, rc = runner(GITS, ["describe", "--tags", "--dirty", 1180 | "--always", "--long", 1181 | "--match", 1182 | "%s%s" % (tag_prefix, TAG_PREFIX_REGEX)], 1183 | cwd=root) 1184 | # --long was added in git-1.5.5 1185 | if describe_out is None: 1186 | raise NotThisMethod("'git describe' failed") 1187 | describe_out = describe_out.strip() 1188 | full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) 1189 | if full_out is None: 1190 | raise NotThisMethod("'git rev-parse' failed") 1191 | full_out = full_out.strip() 1192 | 1193 | pieces = {} 1194 | pieces["long"] = full_out 1195 | pieces["short"] = full_out[:7] # maybe improved later 1196 | pieces["error"] = None 1197 | 1198 | branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], 1199 | cwd=root) 1200 | # --abbrev-ref was added in git-1.6.3 1201 | if rc != 0 or branch_name is None: 1202 | raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") 1203 | branch_name = branch_name.strip() 1204 | 1205 | if branch_name == "HEAD": 1206 | # If we aren't exactly on a branch, pick a branch which represents 1207 | # the current commit. If all else fails, we are on a branchless 1208 | # commit. 1209 | branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) 1210 | # --contains was added in git-1.5.4 1211 | if rc != 0 or branches is None: 1212 | raise NotThisMethod("'git branch --contains' returned error") 1213 | branches = branches.split("\n") 1214 | 1215 | # Remove the first line if we're running detached 1216 | if "(" in branches[0]: 1217 | branches.pop(0) 1218 | 1219 | # Strip off the leading "* " from the list of branches. 1220 | branches = [branch[2:] for branch in branches] 1221 | if "master" in branches: 1222 | branch_name = "master" 1223 | elif not branches: 1224 | branch_name = None 1225 | else: 1226 | # Pick the first branch that is returned. Good or bad. 1227 | branch_name = branches[0] 1228 | 1229 | pieces["branch"] = branch_name 1230 | 1231 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 1232 | # TAG might have hyphens. 1233 | git_describe = describe_out 1234 | 1235 | # look for -dirty suffix 1236 | dirty = git_describe.endswith("-dirty") 1237 | pieces["dirty"] = dirty 1238 | if dirty: 1239 | git_describe = git_describe[:git_describe.rindex("-dirty")] 1240 | 1241 | # now we have TAG-NUM-gHEX or HEX 1242 | 1243 | if "-" in git_describe: 1244 | # TAG-NUM-gHEX 1245 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 1246 | if not mo: 1247 | # unparsable. Maybe git-describe is misbehaving? 1248 | pieces["error"] = ("unable to parse git-describe output: '%s'" 1249 | % describe_out) 1250 | return pieces 1251 | 1252 | # tag 1253 | full_tag = mo.group(1) 1254 | if not full_tag.startswith(tag_prefix): 1255 | if verbose: 1256 | fmt = "tag '%s' doesn't start with prefix '%s'" 1257 | print(fmt % (full_tag, tag_prefix)) 1258 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" 1259 | % (full_tag, tag_prefix)) 1260 | return pieces 1261 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 1262 | 1263 | # distance: number of commits since tag 1264 | pieces["distance"] = int(mo.group(2)) 1265 | 1266 | # commit: short hex revision ID 1267 | pieces["short"] = mo.group(3) 1268 | 1269 | else: 1270 | # HEX: no tags 1271 | pieces["closest-tag"] = None 1272 | count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) 1273 | pieces["distance"] = int(count_out) # total number of commits 1274 | 1275 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 1276 | date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() 1277 | # Use only the last line. Previous lines may contain GPG signature 1278 | # information. 1279 | date = date.splitlines()[-1] 1280 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 1281 | 1282 | return pieces 1283 | 1284 | 1285 | def do_vcs_install(manifest_in, versionfile_source, ipy): 1286 | """Git-specific installation logic for Versioneer. 1287 | 1288 | For Git, this means creating/changing .gitattributes to mark _version.py 1289 | for export-subst keyword substitution. 1290 | """ 1291 | GITS = ["git"] 1292 | if sys.platform == "win32": 1293 | GITS = ["git.cmd", "git.exe"] 1294 | files = [manifest_in, versionfile_source] 1295 | if ipy: 1296 | files.append(ipy) 1297 | try: 1298 | my_path = __file__ 1299 | if my_path.endswith(".pyc") or my_path.endswith(".pyo"): 1300 | my_path = os.path.splitext(my_path)[0] + ".py" 1301 | versioneer_file = os.path.relpath(my_path) 1302 | except NameError: 1303 | versioneer_file = "versioneer.py" 1304 | files.append(versioneer_file) 1305 | present = False 1306 | try: 1307 | with open(".gitattributes", "r") as fobj: 1308 | for line in fobj: 1309 | if line.strip().startswith(versionfile_source): 1310 | if "export-subst" in line.strip().split()[1:]: 1311 | present = True 1312 | break 1313 | except OSError: 1314 | pass 1315 | if not present: 1316 | with open(".gitattributes", "a+") as fobj: 1317 | fobj.write(f"{versionfile_source} export-subst\n") 1318 | files.append(".gitattributes") 1319 | run_command(GITS, ["add", "--"] + files) 1320 | 1321 | 1322 | def versions_from_parentdir(parentdir_prefix, root, verbose): 1323 | """Try to determine the version from the parent directory name. 1324 | 1325 | Source tarballs conventionally unpack into a directory that includes both 1326 | the project name and a version string. We will also support searching up 1327 | two directory levels for an appropriately named parent directory 1328 | """ 1329 | rootdirs = [] 1330 | 1331 | for _ in range(3): 1332 | dirname = os.path.basename(root) 1333 | if dirname.startswith(parentdir_prefix): 1334 | return {"version": dirname[len(parentdir_prefix):], 1335 | "full-revisionid": None, 1336 | "dirty": False, "error": None, "date": None} 1337 | rootdirs.append(root) 1338 | root = os.path.dirname(root) # up a level 1339 | 1340 | if verbose: 1341 | print("Tried directories %s but none started with prefix %s" % 1342 | (str(rootdirs), parentdir_prefix)) 1343 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 1344 | 1345 | 1346 | SHORT_VERSION_PY = """ 1347 | # This file was generated by 'versioneer.py' (0.21) from 1348 | # revision-control system data, or from the parent directory name of an 1349 | # unpacked source archive. Distribution tarballs contain a pre-generated copy 1350 | # of this file. 1351 | 1352 | import json 1353 | 1354 | version_json = ''' 1355 | %s 1356 | ''' # END VERSION_JSON 1357 | 1358 | 1359 | def get_versions(): 1360 | return json.loads(version_json) 1361 | """ 1362 | 1363 | 1364 | def versions_from_file(filename): 1365 | """Try to determine the version from _version.py if present.""" 1366 | try: 1367 | with open(filename) as f: 1368 | contents = f.read() 1369 | except OSError: 1370 | raise NotThisMethod("unable to read _version.py") 1371 | mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", 1372 | contents, re.M | re.S) 1373 | if not mo: 1374 | mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", 1375 | contents, re.M | re.S) 1376 | if not mo: 1377 | raise NotThisMethod("no version_json in _version.py") 1378 | return json.loads(mo.group(1)) 1379 | 1380 | 1381 | def write_to_version_file(filename, versions): 1382 | """Write the given version number to the given _version.py file.""" 1383 | os.unlink(filename) 1384 | contents = json.dumps(versions, sort_keys=True, 1385 | indent=1, separators=(",", ": ")) 1386 | with open(filename, "w") as f: 1387 | f.write(SHORT_VERSION_PY % contents) 1388 | 1389 | print("set %s to '%s'" % (filename, versions["version"])) 1390 | 1391 | 1392 | def plus_or_dot(pieces): 1393 | """Return a + if we don't already have one, else return a .""" 1394 | if "+" in pieces.get("closest-tag", ""): 1395 | return "." 1396 | return "+" 1397 | 1398 | 1399 | def render_pep440(pieces): 1400 | """Build up version string, with post-release "local version identifier". 1401 | 1402 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 1403 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 1404 | 1405 | Exceptions: 1406 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 1407 | """ 1408 | if pieces["closest-tag"]: 1409 | rendered = pieces["closest-tag"] 1410 | if pieces["distance"] or pieces["dirty"]: 1411 | rendered += plus_or_dot(pieces) 1412 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 1413 | if pieces["dirty"]: 1414 | rendered += ".dirty" 1415 | else: 1416 | # exception #1 1417 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], 1418 | pieces["short"]) 1419 | if pieces["dirty"]: 1420 | rendered += ".dirty" 1421 | return rendered 1422 | 1423 | 1424 | def render_pep440_branch(pieces): 1425 | """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . 1426 | 1427 | The ".dev0" means not master branch. Note that .dev0 sorts backwards 1428 | (a feature branch will appear "older" than the master branch). 1429 | 1430 | Exceptions: 1431 | 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] 1432 | """ 1433 | if pieces["closest-tag"]: 1434 | rendered = pieces["closest-tag"] 1435 | if pieces["distance"] or pieces["dirty"]: 1436 | if pieces["branch"] != "master": 1437 | rendered += ".dev0" 1438 | rendered += plus_or_dot(pieces) 1439 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 1440 | if pieces["dirty"]: 1441 | rendered += ".dirty" 1442 | else: 1443 | # exception #1 1444 | rendered = "0" 1445 | if pieces["branch"] != "master": 1446 | rendered += ".dev0" 1447 | rendered += "+untagged.%d.g%s" % (pieces["distance"], 1448 | pieces["short"]) 1449 | if pieces["dirty"]: 1450 | rendered += ".dirty" 1451 | return rendered 1452 | 1453 | 1454 | def pep440_split_post(ver): 1455 | """Split pep440 version string at the post-release segment. 1456 | 1457 | Returns the release segments before the post-release and the 1458 | post-release version number (or -1 if no post-release segment is present). 1459 | """ 1460 | vc = str.split(ver, ".post") 1461 | return vc[0], int(vc[1] or 0) if len(vc) == 2 else None 1462 | 1463 | 1464 | def render_pep440_pre(pieces): 1465 | """TAG[.postN.devDISTANCE] -- No -dirty. 1466 | 1467 | Exceptions: 1468 | 1: no tags. 0.post0.devDISTANCE 1469 | """ 1470 | if pieces["closest-tag"]: 1471 | if pieces["distance"]: 1472 | # update the post release segment 1473 | tag_version, post_version = pep440_split_post(pieces["closest-tag"]) 1474 | rendered = tag_version 1475 | if post_version is not None: 1476 | rendered += ".post%d.dev%d" % (post_version+1, pieces["distance"]) 1477 | else: 1478 | rendered += ".post0.dev%d" % (pieces["distance"]) 1479 | else: 1480 | # no commits, use the tag as the version 1481 | rendered = pieces["closest-tag"] 1482 | else: 1483 | # exception #1 1484 | rendered = "0.post0.dev%d" % pieces["distance"] 1485 | return rendered 1486 | 1487 | 1488 | def render_pep440_post(pieces): 1489 | """TAG[.postDISTANCE[.dev0]+gHEX] . 1490 | 1491 | The ".dev0" means dirty. Note that .dev0 sorts backwards 1492 | (a dirty tree will appear "older" than the corresponding clean one), 1493 | but you shouldn't be releasing software with -dirty anyways. 1494 | 1495 | Exceptions: 1496 | 1: no tags. 0.postDISTANCE[.dev0] 1497 | """ 1498 | if pieces["closest-tag"]: 1499 | rendered = pieces["closest-tag"] 1500 | if pieces["distance"] or pieces["dirty"]: 1501 | rendered += ".post%d" % pieces["distance"] 1502 | if pieces["dirty"]: 1503 | rendered += ".dev0" 1504 | rendered += plus_or_dot(pieces) 1505 | rendered += "g%s" % pieces["short"] 1506 | else: 1507 | # exception #1 1508 | rendered = "0.post%d" % pieces["distance"] 1509 | if pieces["dirty"]: 1510 | rendered += ".dev0" 1511 | rendered += "+g%s" % pieces["short"] 1512 | return rendered 1513 | 1514 | 1515 | def render_pep440_post_branch(pieces): 1516 | """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . 1517 | 1518 | The ".dev0" means not master branch. 1519 | 1520 | Exceptions: 1521 | 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] 1522 | """ 1523 | if pieces["closest-tag"]: 1524 | rendered = pieces["closest-tag"] 1525 | if pieces["distance"] or pieces["dirty"]: 1526 | rendered += ".post%d" % pieces["distance"] 1527 | if pieces["branch"] != "master": 1528 | rendered += ".dev0" 1529 | rendered += plus_or_dot(pieces) 1530 | rendered += "g%s" % pieces["short"] 1531 | if pieces["dirty"]: 1532 | rendered += ".dirty" 1533 | else: 1534 | # exception #1 1535 | rendered = "0.post%d" % pieces["distance"] 1536 | if pieces["branch"] != "master": 1537 | rendered += ".dev0" 1538 | rendered += "+g%s" % pieces["short"] 1539 | if pieces["dirty"]: 1540 | rendered += ".dirty" 1541 | return rendered 1542 | 1543 | 1544 | def render_pep440_old(pieces): 1545 | """TAG[.postDISTANCE[.dev0]] . 1546 | 1547 | The ".dev0" means dirty. 1548 | 1549 | Exceptions: 1550 | 1: no tags. 0.postDISTANCE[.dev0] 1551 | """ 1552 | if pieces["closest-tag"]: 1553 | rendered = pieces["closest-tag"] 1554 | if pieces["distance"] or pieces["dirty"]: 1555 | rendered += ".post%d" % pieces["distance"] 1556 | if pieces["dirty"]: 1557 | rendered += ".dev0" 1558 | else: 1559 | # exception #1 1560 | rendered = "0.post%d" % pieces["distance"] 1561 | if pieces["dirty"]: 1562 | rendered += ".dev0" 1563 | return rendered 1564 | 1565 | 1566 | def render_git_describe(pieces): 1567 | """TAG[-DISTANCE-gHEX][-dirty]. 1568 | 1569 | Like 'git describe --tags --dirty --always'. 1570 | 1571 | Exceptions: 1572 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 1573 | """ 1574 | if pieces["closest-tag"]: 1575 | rendered = pieces["closest-tag"] 1576 | if pieces["distance"]: 1577 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 1578 | else: 1579 | # exception #1 1580 | rendered = pieces["short"] 1581 | if pieces["dirty"]: 1582 | rendered += "-dirty" 1583 | return rendered 1584 | 1585 | 1586 | def render_git_describe_long(pieces): 1587 | """TAG-DISTANCE-gHEX[-dirty]. 1588 | 1589 | Like 'git describe --tags --dirty --always -long'. 1590 | The distance/hash is unconditional. 1591 | 1592 | Exceptions: 1593 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 1594 | """ 1595 | if pieces["closest-tag"]: 1596 | rendered = pieces["closest-tag"] 1597 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 1598 | else: 1599 | # exception #1 1600 | rendered = pieces["short"] 1601 | if pieces["dirty"]: 1602 | rendered += "-dirty" 1603 | return rendered 1604 | 1605 | 1606 | def render(pieces, style): 1607 | """Render the given version pieces into the requested style.""" 1608 | if pieces["error"]: 1609 | return {"version": "unknown", 1610 | "full-revisionid": pieces.get("long"), 1611 | "dirty": None, 1612 | "error": pieces["error"], 1613 | "date": None} 1614 | 1615 | if not style or style == "default": 1616 | style = "pep440" # the default 1617 | 1618 | if style == "pep440": 1619 | rendered = render_pep440(pieces) 1620 | elif style == "pep440-branch": 1621 | rendered = render_pep440_branch(pieces) 1622 | elif style == "pep440-pre": 1623 | rendered = render_pep440_pre(pieces) 1624 | elif style == "pep440-post": 1625 | rendered = render_pep440_post(pieces) 1626 | elif style == "pep440-post-branch": 1627 | rendered = render_pep440_post_branch(pieces) 1628 | elif style == "pep440-old": 1629 | rendered = render_pep440_old(pieces) 1630 | elif style == "git-describe": 1631 | rendered = render_git_describe(pieces) 1632 | elif style == "git-describe-long": 1633 | rendered = render_git_describe_long(pieces) 1634 | else: 1635 | raise ValueError("unknown style '%s'" % style) 1636 | 1637 | return {"version": rendered, "full-revisionid": pieces["long"], 1638 | "dirty": pieces["dirty"], "error": None, 1639 | "date": pieces.get("date")} 1640 | 1641 | 1642 | class VersioneerBadRootError(Exception): 1643 | """The project root directory is unknown or missing key files.""" 1644 | 1645 | 1646 | def get_versions(verbose=False): 1647 | """Get the project version from whatever source is available. 1648 | 1649 | Returns dict with two keys: 'version' and 'full'. 1650 | """ 1651 | if "versioneer" in sys.modules: 1652 | # see the discussion in cmdclass.py:get_cmdclass() 1653 | del sys.modules["versioneer"] 1654 | 1655 | root = get_root() 1656 | cfg = get_config_from_root(root) 1657 | 1658 | assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" 1659 | handlers = HANDLERS.get(cfg.VCS) 1660 | assert handlers, "unrecognized VCS '%s'" % cfg.VCS 1661 | verbose = verbose or cfg.verbose 1662 | assert cfg.versionfile_source is not None, \ 1663 | "please set versioneer.versionfile_source" 1664 | assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" 1665 | 1666 | versionfile_abs = os.path.join(root, cfg.versionfile_source) 1667 | 1668 | # extract version from first of: _version.py, VCS command (e.g. 'git 1669 | # describe'), parentdir. This is meant to work for developers using a 1670 | # source checkout, for users of a tarball created by 'setup.py sdist', 1671 | # and for users of a tarball/zipball created by 'git archive' or github's 1672 | # download-from-tag feature or the equivalent in other VCSes. 1673 | 1674 | get_keywords_f = handlers.get("get_keywords") 1675 | from_keywords_f = handlers.get("keywords") 1676 | if get_keywords_f and from_keywords_f: 1677 | try: 1678 | keywords = get_keywords_f(versionfile_abs) 1679 | ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) 1680 | if verbose: 1681 | print("got version from expanded keyword %s" % ver) 1682 | return ver 1683 | except NotThisMethod: 1684 | pass 1685 | 1686 | try: 1687 | ver = versions_from_file(versionfile_abs) 1688 | if verbose: 1689 | print("got version from file %s %s" % (versionfile_abs, ver)) 1690 | return ver 1691 | except NotThisMethod: 1692 | pass 1693 | 1694 | from_vcs_f = handlers.get("pieces_from_vcs") 1695 | if from_vcs_f: 1696 | try: 1697 | pieces = from_vcs_f(cfg.tag_prefix, root, verbose) 1698 | ver = render(pieces, cfg.style) 1699 | if verbose: 1700 | print("got version from VCS %s" % ver) 1701 | return ver 1702 | except NotThisMethod: 1703 | pass 1704 | 1705 | try: 1706 | if cfg.parentdir_prefix: 1707 | ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 1708 | if verbose: 1709 | print("got version from parentdir %s" % ver) 1710 | return ver 1711 | except NotThisMethod: 1712 | pass 1713 | 1714 | if verbose: 1715 | print("unable to compute version") 1716 | 1717 | return {"version": "0+unknown", "full-revisionid": None, 1718 | "dirty": None, "error": "unable to compute version", 1719 | "date": None} 1720 | 1721 | 1722 | def get_version(): 1723 | """Get the short version string for this project.""" 1724 | return get_versions()["version"] 1725 | 1726 | 1727 | def get_cmdclass(cmdclass=None): 1728 | """Get the custom setuptools/distutils subclasses used by Versioneer. 1729 | 1730 | If the package uses a different cmdclass (e.g. one from numpy), it 1731 | should be provide as an argument. 1732 | """ 1733 | if "versioneer" in sys.modules: 1734 | del sys.modules["versioneer"] 1735 | # this fixes the "python setup.py develop" case (also 'install' and 1736 | # 'easy_install .'), in which subdependencies of the main project are 1737 | # built (using setup.py bdist_egg) in the same python process. Assume 1738 | # a main project A and a dependency B, which use different versions 1739 | # of Versioneer. A's setup.py imports A's Versioneer, leaving it in 1740 | # sys.modules by the time B's setup.py is executed, causing B to run 1741 | # with the wrong versioneer. Setuptools wraps the sub-dep builds in a 1742 | # sandbox that restores sys.modules to it's pre-build state, so the 1743 | # parent is protected against the child's "import versioneer". By 1744 | # removing ourselves from sys.modules here, before the child build 1745 | # happens, we protect the child from the parent's versioneer too. 1746 | # Also see https://github.com/python-versioneer/python-versioneer/issues/52 1747 | 1748 | cmds = {} if cmdclass is None else cmdclass.copy() 1749 | 1750 | # we add "version" to both distutils and setuptools 1751 | from distutils.core import Command 1752 | 1753 | class cmd_version(Command): 1754 | description = "report generated version string" 1755 | user_options = [] 1756 | boolean_options = [] 1757 | 1758 | def initialize_options(self): 1759 | pass 1760 | 1761 | def finalize_options(self): 1762 | pass 1763 | 1764 | def run(self): 1765 | vers = get_versions(verbose=True) 1766 | print("Version: %s" % vers["version"]) 1767 | print(" full-revisionid: %s" % vers.get("full-revisionid")) 1768 | print(" dirty: %s" % vers.get("dirty")) 1769 | print(" date: %s" % vers.get("date")) 1770 | if vers["error"]: 1771 | print(" error: %s" % vers["error"]) 1772 | cmds["version"] = cmd_version 1773 | 1774 | # we override "build_py" in both distutils and setuptools 1775 | # 1776 | # most invocation pathways end up running build_py: 1777 | # distutils/build -> build_py 1778 | # distutils/install -> distutils/build ->.. 1779 | # setuptools/bdist_wheel -> distutils/install ->.. 1780 | # setuptools/bdist_egg -> distutils/install_lib -> build_py 1781 | # setuptools/install -> bdist_egg ->.. 1782 | # setuptools/develop -> ? 1783 | # pip install: 1784 | # copies source tree to a tempdir before running egg_info/etc 1785 | # if .git isn't copied too, 'git describe' will fail 1786 | # then does setup.py bdist_wheel, or sometimes setup.py install 1787 | # setup.py egg_info -> ? 1788 | 1789 | # we override different "build_py" commands for both environments 1790 | if 'build_py' in cmds: 1791 | _build_py = cmds['build_py'] 1792 | elif "setuptools" in sys.modules: 1793 | from setuptools.command.build_py import build_py as _build_py 1794 | else: 1795 | from distutils.command.build_py import build_py as _build_py 1796 | 1797 | class cmd_build_py(_build_py): 1798 | def run(self): 1799 | root = get_root() 1800 | cfg = get_config_from_root(root) 1801 | versions = get_versions() 1802 | _build_py.run(self) 1803 | # now locate _version.py in the new build/ directory and replace 1804 | # it with an updated value 1805 | if cfg.versionfile_build: 1806 | target_versionfile = os.path.join(self.build_lib, 1807 | cfg.versionfile_build) 1808 | print("UPDATING %s" % target_versionfile) 1809 | write_to_version_file(target_versionfile, versions) 1810 | cmds["build_py"] = cmd_build_py 1811 | 1812 | if 'build_ext' in cmds: 1813 | _build_ext = cmds['build_ext'] 1814 | elif "setuptools" in sys.modules: 1815 | from setuptools.command.build_ext import build_ext as _build_ext 1816 | else: 1817 | from distutils.command.build_ext import build_ext as _build_ext 1818 | 1819 | class cmd_build_ext(_build_ext): 1820 | def run(self): 1821 | root = get_root() 1822 | cfg = get_config_from_root(root) 1823 | versions = get_versions() 1824 | _build_ext.run(self) 1825 | if self.inplace: 1826 | # build_ext --inplace will only build extensions in 1827 | # build/lib<..> dir with no _version.py to write to. 1828 | # As in place builds will already have a _version.py 1829 | # in the module dir, we do not need to write one. 1830 | return 1831 | # now locate _version.py in the new build/ directory and replace 1832 | # it with an updated value 1833 | target_versionfile = os.path.join(self.build_lib, 1834 | cfg.versionfile_build) 1835 | print("UPDATING %s" % target_versionfile) 1836 | write_to_version_file(target_versionfile, versions) 1837 | cmds["build_ext"] = cmd_build_ext 1838 | 1839 | if "cx_Freeze" in sys.modules: # cx_freeze enabled? 1840 | from cx_Freeze.dist import build_exe as _build_exe 1841 | # nczeczulin reports that py2exe won't like the pep440-style string 1842 | # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. 1843 | # setup(console=[{ 1844 | # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION 1845 | # "product_version": versioneer.get_version(), 1846 | # ... 1847 | 1848 | class cmd_build_exe(_build_exe): 1849 | def run(self): 1850 | root = get_root() 1851 | cfg = get_config_from_root(root) 1852 | versions = get_versions() 1853 | target_versionfile = cfg.versionfile_source 1854 | print("UPDATING %s" % target_versionfile) 1855 | write_to_version_file(target_versionfile, versions) 1856 | 1857 | _build_exe.run(self) 1858 | os.unlink(target_versionfile) 1859 | with open(cfg.versionfile_source, "w") as f: 1860 | LONG = LONG_VERSION_PY[cfg.VCS] 1861 | f.write(LONG % 1862 | {"DOLLAR": "$", 1863 | "STYLE": cfg.style, 1864 | "TAG_PREFIX": cfg.tag_prefix, 1865 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 1866 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 1867 | }) 1868 | cmds["build_exe"] = cmd_build_exe 1869 | del cmds["build_py"] 1870 | 1871 | if 'py2exe' in sys.modules: # py2exe enabled? 1872 | from py2exe.distutils_buildexe import py2exe as _py2exe 1873 | 1874 | class cmd_py2exe(_py2exe): 1875 | def run(self): 1876 | root = get_root() 1877 | cfg = get_config_from_root(root) 1878 | versions = get_versions() 1879 | target_versionfile = cfg.versionfile_source 1880 | print("UPDATING %s" % target_versionfile) 1881 | write_to_version_file(target_versionfile, versions) 1882 | 1883 | _py2exe.run(self) 1884 | os.unlink(target_versionfile) 1885 | with open(cfg.versionfile_source, "w") as f: 1886 | LONG = LONG_VERSION_PY[cfg.VCS] 1887 | f.write(LONG % 1888 | {"DOLLAR": "$", 1889 | "STYLE": cfg.style, 1890 | "TAG_PREFIX": cfg.tag_prefix, 1891 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 1892 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 1893 | }) 1894 | cmds["py2exe"] = cmd_py2exe 1895 | 1896 | # we override different "sdist" commands for both environments 1897 | if 'sdist' in cmds: 1898 | _sdist = cmds['sdist'] 1899 | elif "setuptools" in sys.modules: 1900 | from setuptools.command.sdist import sdist as _sdist 1901 | else: 1902 | from distutils.command.sdist import sdist as _sdist 1903 | 1904 | class cmd_sdist(_sdist): 1905 | def run(self): 1906 | versions = get_versions() 1907 | self._versioneer_generated_versions = versions 1908 | # unless we update this, the command will keep using the old 1909 | # version 1910 | self.distribution.metadata.version = versions["version"] 1911 | return _sdist.run(self) 1912 | 1913 | def make_release_tree(self, base_dir, files): 1914 | root = get_root() 1915 | cfg = get_config_from_root(root) 1916 | _sdist.make_release_tree(self, base_dir, files) 1917 | # now locate _version.py in the new base_dir directory 1918 | # (remembering that it may be a hardlink) and replace it with an 1919 | # updated value 1920 | target_versionfile = os.path.join(base_dir, cfg.versionfile_source) 1921 | print("UPDATING %s" % target_versionfile) 1922 | write_to_version_file(target_versionfile, 1923 | self._versioneer_generated_versions) 1924 | cmds["sdist"] = cmd_sdist 1925 | 1926 | return cmds 1927 | 1928 | 1929 | CONFIG_ERROR = """ 1930 | setup.cfg is missing the necessary Versioneer configuration. You need 1931 | a section like: 1932 | 1933 | [versioneer] 1934 | VCS = git 1935 | style = pep440 1936 | versionfile_source = src/myproject/_version.py 1937 | versionfile_build = myproject/_version.py 1938 | tag_prefix = 1939 | parentdir_prefix = myproject- 1940 | 1941 | You will also need to edit your setup.py to use the results: 1942 | 1943 | import versioneer 1944 | setup(version=versioneer.get_version(), 1945 | cmdclass=versioneer.get_cmdclass(), ...) 1946 | 1947 | Please read the docstring in ./versioneer.py for configuration instructions, 1948 | edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. 1949 | """ 1950 | 1951 | SAMPLE_CONFIG = """ 1952 | # See the docstring in versioneer.py for instructions. Note that you must 1953 | # re-run 'versioneer.py setup' after changing this section, and commit the 1954 | # resulting files. 1955 | 1956 | [versioneer] 1957 | #VCS = git 1958 | #style = pep440 1959 | #versionfile_source = 1960 | #versionfile_build = 1961 | #tag_prefix = 1962 | #parentdir_prefix = 1963 | 1964 | """ 1965 | 1966 | OLD_SNIPPET = """ 1967 | from ._version import get_versions 1968 | __version__ = get_versions()['version'] 1969 | del get_versions 1970 | """ 1971 | 1972 | INIT_PY_SNIPPET = """ 1973 | from . import {0} 1974 | __version__ = {0}.get_versions()['version'] 1975 | """ 1976 | 1977 | 1978 | def do_setup(): 1979 | """Do main VCS-independent setup function for installing Versioneer.""" 1980 | root = get_root() 1981 | try: 1982 | cfg = get_config_from_root(root) 1983 | except (OSError, configparser.NoSectionError, 1984 | configparser.NoOptionError) as e: 1985 | if isinstance(e, (OSError, configparser.NoSectionError)): 1986 | print("Adding sample versioneer config to setup.cfg", 1987 | file=sys.stderr) 1988 | with open(os.path.join(root, "setup.cfg"), "a") as f: 1989 | f.write(SAMPLE_CONFIG) 1990 | print(CONFIG_ERROR, file=sys.stderr) 1991 | return 1 1992 | 1993 | print(" creating %s" % cfg.versionfile_source) 1994 | with open(cfg.versionfile_source, "w") as f: 1995 | LONG = LONG_VERSION_PY[cfg.VCS] 1996 | f.write(LONG % {"DOLLAR": "$", 1997 | "STYLE": cfg.style, 1998 | "TAG_PREFIX": cfg.tag_prefix, 1999 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 2000 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 2001 | }) 2002 | 2003 | ipy = os.path.join(os.path.dirname(cfg.versionfile_source), 2004 | "__init__.py") 2005 | if os.path.exists(ipy): 2006 | try: 2007 | with open(ipy, "r") as f: 2008 | old = f.read() 2009 | except OSError: 2010 | old = "" 2011 | module = os.path.splitext(os.path.basename(cfg.versionfile_source))[0] 2012 | snippet = INIT_PY_SNIPPET.format(module) 2013 | if OLD_SNIPPET in old: 2014 | print(" replacing boilerplate in %s" % ipy) 2015 | with open(ipy, "w") as f: 2016 | f.write(old.replace(OLD_SNIPPET, snippet)) 2017 | elif snippet not in old: 2018 | print(" appending to %s" % ipy) 2019 | with open(ipy, "a") as f: 2020 | f.write(snippet) 2021 | else: 2022 | print(" %s unmodified" % ipy) 2023 | else: 2024 | print(" %s doesn't exist, ok" % ipy) 2025 | ipy = None 2026 | 2027 | # Make sure both the top-level "versioneer.py" and versionfile_source 2028 | # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so 2029 | # they'll be copied into source distributions. Pip won't be able to 2030 | # install the package without this. 2031 | manifest_in = os.path.join(root, "MANIFEST.in") 2032 | simple_includes = set() 2033 | try: 2034 | with open(manifest_in, "r") as f: 2035 | for line in f: 2036 | if line.startswith("include "): 2037 | for include in line.split()[1:]: 2038 | simple_includes.add(include) 2039 | except OSError: 2040 | pass 2041 | # That doesn't cover everything MANIFEST.in can do 2042 | # (http://docs.python.org/2/distutils/sourcedist.html#commands), so 2043 | # it might give some false negatives. Appending redundant 'include' 2044 | # lines is safe, though. 2045 | if "versioneer.py" not in simple_includes: 2046 | print(" appending 'versioneer.py' to MANIFEST.in") 2047 | with open(manifest_in, "a") as f: 2048 | f.write("include versioneer.py\n") 2049 | else: 2050 | print(" 'versioneer.py' already in MANIFEST.in") 2051 | if cfg.versionfile_source not in simple_includes: 2052 | print(" appending versionfile_source ('%s') to MANIFEST.in" % 2053 | cfg.versionfile_source) 2054 | with open(manifest_in, "a") as f: 2055 | f.write("include %s\n" % cfg.versionfile_source) 2056 | else: 2057 | print(" versionfile_source already in MANIFEST.in") 2058 | 2059 | # Make VCS-specific changes. For git, this means creating/changing 2060 | # .gitattributes to mark _version.py for export-subst keyword 2061 | # substitution. 2062 | do_vcs_install(manifest_in, cfg.versionfile_source, ipy) 2063 | return 0 2064 | 2065 | 2066 | def scan_setup_py(): 2067 | """Validate the contents of setup.py against Versioneer's expectations.""" 2068 | found = set() 2069 | setters = False 2070 | errors = 0 2071 | with open("setup.py", "r") as f: 2072 | for line in f.readlines(): 2073 | if "import versioneer" in line: 2074 | found.add("import") 2075 | if "versioneer.get_cmdclass()" in line: 2076 | found.add("cmdclass") 2077 | if "versioneer.get_version()" in line: 2078 | found.add("get_version") 2079 | if "versioneer.VCS" in line: 2080 | setters = True 2081 | if "versioneer.versionfile_source" in line: 2082 | setters = True 2083 | if len(found) != 3: 2084 | print("") 2085 | print("Your setup.py appears to be missing some important items") 2086 | print("(but I might be wrong). Please make sure it has something") 2087 | print("roughly like the following:") 2088 | print("") 2089 | print(" import versioneer") 2090 | print(" setup( version=versioneer.get_version(),") 2091 | print(" cmdclass=versioneer.get_cmdclass(), ...)") 2092 | print("") 2093 | errors += 1 2094 | if setters: 2095 | print("You should remove lines like 'versioneer.VCS = ' and") 2096 | print("'versioneer.versionfile_source = ' . This configuration") 2097 | print("now lives in setup.cfg, and should be removed from setup.py") 2098 | print("") 2099 | errors += 1 2100 | return errors 2101 | 2102 | 2103 | if __name__ == "__main__": 2104 | cmd = sys.argv[1] 2105 | if cmd == "setup": 2106 | errors = do_setup() 2107 | errors += scan_setup_py() 2108 | if errors: 2109 | sys.exit(1) 2110 | --------------------------------------------------------------------------------