├── .gitattributes
├── CHANGELOG.md
├── docs
├── requirements.txt
├── index.rst
├── Makefile
├── make.bat
└── conf.py
├── MANIFEST.in
├── requirements-dev.txt
├── .github
└── workflows
│ ├── pre-commit.yml
│ ├── release.yml
│ ├── workflow.yml
│ └── coverage.yml
├── setup.cfg
├── .readthedocs.yml
├── LICENSE_SHORT
├── tests
├── __init__.py
├── casfs
│ ├── __init__.py
│ └── test_base.py
├── context.py
└── conftest.py
├── casfs
├── __init__.py
├── util.py
├── base.py
└── _version.py
├── .pre-commit-config.yaml
├── .gitignore
├── Makefile
├── setup.py
├── README.md
├── CONTRIBUTING.md
├── LICENSE
├── pylintrc
└── versioneer.py
/.gitattributes:
--------------------------------------------------------------------------------
1 | casfs/_version.py export-subst
2 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | ## 0.1.0
2 |
3 | First release.
4 |
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | sphinx==3.0.4
2 | sphinx_rtd_theme
3 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include versioneer.py
2 | include casfs/_version.py
3 |
--------------------------------------------------------------------------------
/requirements-dev.txt:
--------------------------------------------------------------------------------
1 | # Required for development, not publication.
2 | hypothesis
3 | pre-commit
4 | pytest==5.4.3
5 | pytest-cov==2.10.0
6 | twine
7 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | CASFS
2 | =====
3 |
4 | CASFS is a content-addressable store. Welcome!
5 |
6 | .. toctree::
7 | :maxdepth: 2
8 | :caption: Contents:
9 |
10 | Indices and tables
11 | ==================
12 |
13 | * :ref:`genindex`
14 | * :ref:`modindex`
15 | * :ref:`search`
16 |
--------------------------------------------------------------------------------
/.github/workflows/pre-commit.yml:
--------------------------------------------------------------------------------
1 | name: pre-commit
2 |
3 | on:
4 | pull_request:
5 | push:
6 | branches: [master]
7 |
8 | jobs:
9 | pre-commit:
10 | runs-on: ubuntu-latest
11 | strategy:
12 | matrix:
13 | python-version: [3.7]
14 |
15 | steps:
16 | - uses: actions/checkout@v2
17 | - uses: actions/setup-python@v2
18 | with:
19 | python-version: 3.7
20 | - uses: pre-commit/action@v2.0.0
21 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [tool:pytest]
2 | norecursedirs = env
3 | addopts = --doctest-modules -v -s
4 |
5 | [pycodestyle]
6 | ignore = E111,E114
7 |
8 | [yapf]
9 | based_on_style = google
10 | indent_width = 2
11 |
12 | # pytest coverage options
13 | [run]
14 | omit =
15 | */tests/*
16 | */test_*
17 | */_version.py
18 |
19 | [versioneer]
20 | VCS = git
21 | style = pep440
22 | versionfile_source = casfs/_version.py
23 | versionfile_build = casfs/_version.py
24 | tag_prefix =
25 |
--------------------------------------------------------------------------------
/.readthedocs.yml:
--------------------------------------------------------------------------------
1 | # .readthedocs.yml
2 | # Read the Docs configuration file
3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
4 |
5 | # Required
6 | version: 2
7 |
8 | # Build documentation in the docs/ directory with Sphinx
9 | sphinx:
10 | configuration: docs/conf.py
11 |
12 | # Optionally set the version of Python and requirements required to build your docs
13 | python:
14 | version: 3.7
15 | install:
16 | - requirements: docs/requirements.txt
17 |
--------------------------------------------------------------------------------
/LICENSE_SHORT:
--------------------------------------------------------------------------------
1 | # Copyright 2020 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | #
3 | # Copyright 2020 Google LLC
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License");
6 | # you may not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 |
--------------------------------------------------------------------------------
/tests/casfs/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | #
3 | # Copyright 2020 Google LLC
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License");
6 | # you may not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 | # You can set these variables from the command line, and also
4 | # from the environment for the first two.
5 | SPHINXOPTS ?=
6 | SPHINXBUILD ?= ../env/bin/sphinx-build
7 | SOURCEDIR = .
8 | BUILDDIR = _build
9 |
10 | # Put it first so that "make" without argument is like "make help".
11 | help:
12 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
13 |
14 | .PHONY: help Makefile
15 |
16 | # Catch-all target: route all unknown targets to Sphinx using the new
17 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
18 | %: Makefile
19 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
20 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | name: Release to PyPi
2 |
3 | on:
4 | release:
5 | types: [created]
6 |
7 | jobs:
8 | deploy:
9 | runs-on: ubuntu-latest
10 | steps:
11 | - uses: actions/checkout@v2
12 | - name: Set up Python 3.7
13 | uses: actions/setup-python@v2
14 | with:
15 | python-version: '3.7'
16 | - name: Install dependencies
17 | run: |
18 | python -m pip install --upgrade pip
19 | pip install setuptools wheel twine
20 | - name: Build and publish
21 | env:
22 | TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
23 | TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
24 | run: |
25 | python setup.py sdist bdist_wheel
26 | twine upload dist/*
27 |
--------------------------------------------------------------------------------
/tests/context.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | #
3 | # Copyright 2020 Google LLC
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License");
6 | # you may not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 | import os
17 | import sys
18 | sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__),
19 | '..')))
20 |
21 | import casfs
22 |
--------------------------------------------------------------------------------
/casfs/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | #
3 | # Copyright 2020 Google LLC
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License");
6 | # you may not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 | """Docs on CASFS.
17 | """
18 |
19 | from casfs.base import CASFS
20 | from casfs.util import HashAddress
21 |
22 | from ._version import get_versions
23 |
24 | __version__ = get_versions()['version']
25 | del get_versions
26 |
27 | __all__ = ("CASFS", "HashAddress")
28 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=.
11 | set BUILDDIR=_build
12 |
13 | if "%1" == "" goto help
14 |
15 | %SPHINXBUILD% >NUL 2>NUL
16 | if errorlevel 9009 (
17 | echo.
18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19 | echo.installed, then set the SPHINXBUILD environment variable to point
20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
21 | echo.may add the Sphinx directory to PATH.
22 | echo.
23 | echo.If you don't have Sphinx installed, grab it from
24 | echo.http://sphinx-doc.org/
25 | exit /b 1
26 | )
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | #
3 | # Copyright 2020 Google LLC
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License");
6 | # you may not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 |
17 | repos:
18 | - repo: https://github.com/pre-commit/pre-commit-hooks
19 | rev: v2.3.0
20 | hooks:
21 | - id: check-yaml
22 | - id: end-of-file-fixer
23 | - id: trailing-whitespace
24 | - repo: https://github.com/pre-commit/mirrors-yapf
25 | rev: 'v0.29.0'
26 | hooks:
27 | - id: yapf
28 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | #
3 | # Copyright 2020 Google LLC
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License");
6 | # you may not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 | """Configuration for Hypothesis tests."""
17 |
18 | import os
19 |
20 | from hypothesis import Verbosity, settings
21 |
22 | settings.register_profile("ci", max_examples=1000)
23 | settings.register_profile("dev", max_examples=10)
24 | settings.register_profile("debug", max_examples=10, verbosity=Verbosity.verbose)
25 | settings.load_profile(os.getenv(u'HYPOTHESIS_PROFILE', 'default'))
26 |
--------------------------------------------------------------------------------
/.github/workflows/workflow.yml:
--------------------------------------------------------------------------------
1 | name: build
2 |
3 | on:
4 | pull_request:
5 | push:
6 | branches: [master]
7 |
8 | jobs:
9 | build:
10 | runs-on: ubuntu-latest
11 | strategy:
12 | matrix:
13 | python-version: [3.6, 3.7, 3.8]
14 |
15 | steps:
16 | - uses: actions/checkout@v2
17 | - name: Set up Python ${{ matrix.python-version }}
18 | uses: actions/setup-python@v2
19 | with:
20 | python-version: ${{ matrix.python-version }}
21 | - name: Cache pip
22 | uses: actions/cache@v2
23 | with:
24 | # This path is specific to Ubuntu
25 | path: ~/.cache/pip
26 | # Look to see if there is a cache hit for the corresponding requirements file
27 | key: ${{ runner.os }}-pip-${{ hashFiles('requirements-dev.txt') }}
28 | restore-keys: |
29 | ${{ runner.os }}-pip-
30 | ${{ runner.os }}-
31 | - name: Install dependencies
32 | run: |
33 | python -m pip install --upgrade pip
34 | pip install .
35 | if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi
36 | - name: Run pytest
37 | run: |
38 | pytest --doctest-modules -v -s \
39 | --hypothesis-profile dev \
40 | casfs tests
41 |
--------------------------------------------------------------------------------
/.github/workflows/coverage.yml:
--------------------------------------------------------------------------------
1 | name: coverage
2 |
3 | on:
4 | pull_request:
5 | push:
6 | branches: [master]
7 |
8 | jobs:
9 | build:
10 | runs-on: ubuntu-latest
11 | strategy:
12 | matrix:
13 | python-version: [3.6]
14 |
15 | steps:
16 | - uses: actions/checkout@v2
17 | - name: Set up Python
18 | uses: actions/setup-python@v2
19 | with:
20 | python-version: 3.7
21 | - name: Cache pip
22 | uses: actions/cache@v2
23 | with:
24 | # This path is specific to Ubuntu
25 | path: ~/.cache/pip
26 | # Look to see if there is a cache hit for the corresponding requirements file
27 | key: ${{ runner.os }}-pip-${{ hashFiles('requirements-dev.txt') }}
28 | restore-keys: |
29 | ${{ runner.os }}-pip-
30 | ${{ runner.os }}-
31 | - name: Install dependencies
32 | run: |
33 | python -m pip install --upgrade pip
34 | pip install .
35 | if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi
36 | - name: Run pytest, generate coverage
37 | run: |
38 | pytest --doctest-modules -v -s \
39 | --hypothesis-profile dev \
40 | --cov-config setup.cfg \
41 | --cov-report=xml \
42 | --cov casfs \
43 | casfs tests
44 | - name: Upload coverage to Codecov
45 | uses: codecov/codecov-action@v1
46 | with:
47 | fail_ci_if_error: true
48 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Logging
2 | logs/
3 |
4 | # Byte-compiled / optimized / DLL files
5 | __pycache__/
6 | *.py[cod]
7 | *$py.class
8 |
9 | # C extensions
10 | *.so
11 |
12 | # Distribution / packaging
13 | .Python
14 | build/
15 | develop-eggs/
16 | dist/
17 | downloads/
18 | eggs/
19 | .eggs/
20 | lib/
21 | lib64/
22 | parts/
23 | sdist/
24 | var/
25 | wheels/
26 | *.egg-info/
27 | .installed.cfg
28 | *.egg
29 | MANIFEST
30 |
31 | # PyInstaller
32 | # Usually these files are written by a python script from a template
33 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
34 | *.manifest
35 | *.spec
36 |
37 | # Installer logs
38 | pip-log.txt
39 | pip-delete-this-directory.txt
40 |
41 | # Unit test / coverage reports
42 | htmlcov/
43 | .tox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | .hypothesis/
51 | .pytest_cache/
52 |
53 | # Translations
54 | *.mo
55 | *.pot
56 |
57 | # Django stuff:
58 | *.log
59 | local_settings.py
60 | db.sqlite3
61 |
62 | # Flask stuff:
63 | instance/
64 | .webassets-cache
65 |
66 | # Scrapy stuff:
67 | .scrapy
68 |
69 | # Sphinx documentation
70 | docs/_build/
71 |
72 | # PyBuilder
73 | target/
74 |
75 | # Jupyter Notebook
76 | .ipynb_checkpoints
77 |
78 | # pyenv
79 | .python-version
80 |
81 | # celery beat schedule file
82 | celerybeat-schedule
83 |
84 | # SageMath parsed files
85 | *.sage.py
86 |
87 | # Environments
88 | .env
89 | .venv
90 | env/
91 | venv/
92 | ENV/
93 | env.bak/
94 | venv.bak/
95 |
96 | # Spyder project settings
97 | .spyderproject
98 | .spyproject
99 |
100 | # Rope project settings
101 | .ropeproject
102 |
103 | # mkdocs documentation
104 | /site
105 |
106 | # mypy
107 | .mypy_cache/
108 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | ##
2 | # Variables
3 | ##
4 |
5 | ENV_NAME = env
6 | ENV_ACT = . env/bin/activate;
7 | PIP = $(ENV_NAME)/bin/pip
8 | PY = $(ENV_NAME)/bin/python
9 | PYTEST_ARGS = --doctest-modules -v -s --hypothesis-profile dev
10 | PYTEST_TARGET = casfs tests
11 | COVERAGE_ARGS = --cov-config setup.cfg --cov-report term-missing --cov
12 | COVERAGE_TARGET = casfs
13 | SCR_REPO = https://source.developers.google.com/p/blueshift-research/r/casfs
14 |
15 |
16 | ##
17 | # Targets
18 | ##
19 |
20 | .PHONY: build
21 | build: clean install
22 |
23 | .PHONY: clean
24 | clean: clean-env clean-files
25 |
26 | .PHONY: clean-env
27 | clean-env:
28 | rm -rf $(ENV_NAME)
29 |
30 | .PHONY: clean-files
31 | clean-files:
32 | rm -rf .tox
33 | rm -rf .coverage
34 | find . -name \*.pyc -type f -delete
35 | find . -name \*.test.db -type f -delete
36 | find . -depth -name __pycache__ -type d -exec rm -rf {} \;
37 | rm -rf dist *.egg* build
38 |
39 | .PHONY: install
40 | install:
41 | rm -rf $(ENV_NAME)
42 | virtualenv -p python3.5 $(ENV_NAME)
43 | $(PIP) install -r requirements-dev.txt && $(PIP) install -e .
44 |
45 | .PHONY: test
46 | test: lint pytest
47 |
48 | .PHONY: pytest
49 | pytest:
50 | $(ENV_ACT) pytest $(PYTEST_ARGS) $(COVERAGE_ARGS) $(COVERAGE_TARGET) $(PYTEST_TARGET)
51 |
52 | .PHONY: test-full
53 | test-full: lint test-setuppy clean-files
54 |
55 | .PHONY: test-setuppy
56 | test-setuppy:
57 | $(PY) setup.py test
58 |
59 | .PHONY: lint
60 | lint: pre-commit
61 |
62 | .PHONY: pre-commit
63 | pre-commit: $(ENV_ACT) pre-commit run --all-files
64 |
65 | .PHONY: push
66 | push:
67 | git push origin master
68 | git push --tags
69 |
70 | .PHONY: release-egg
71 | release-egg:
72 | $(ENV_ACT) python setup.py sdist bdist_wheel
73 | $(ENV_ACT) twine upload -r pypi dist/*
74 | rm -rf dist *.egg* build
75 |
76 | .PHONY: release
77 | release: push release-egg
78 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | #
3 | # Copyright 2020 Google LLC
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License");
6 | # you may not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 | from setuptools import setup, find_packages
17 |
18 |
19 | def with_versioneer(f, default=None):
20 | """Attempts to execute the supplied single-arg function by passing it
21 | versioneer if available; else, returns the default.
22 |
23 | """
24 | try:
25 | import versioneer
26 | return f(versioneer)
27 | except ModuleNotFoundError:
28 | return default
29 |
30 |
31 | def readme():
32 | try:
33 | with open('README.md') as f:
34 | return f.read()
35 | except Exception:
36 | return None
37 |
38 |
39 | REQUIRED_PACKAGES = ["fs", "fs-gcsfs"]
40 |
41 | setup(
42 | name='casfs',
43 | version=with_versioneer(lambda v: v.get_version()),
44 | cmdclass=with_versioneer(lambda v: v.get_cmdclass(), {}),
45 | description="Content-Addressable filesystem over Pyfilesystem2.",
46 | long_description=readme(),
47 | long_description_content_type="text/markdown",
48 | python_requires='>=3.5.3',
49 | author='Sam Ritchie',
50 | author_email='samritchie@google.com',
51 | url='https://github.com/google/casfs',
52 | packages=find_packages(exclude=('tests', 'docs')),
53 | install_requires=REQUIRED_PACKAGES,
54 | include_package_data=True,
55 | )
56 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # CASFS
2 |
3 | [](https://github.com/google/casfs/actions?query=workflow%3Abuild+branch%3Amaster)
4 | [](https://codecov.io/github/google/casfs)
5 | [](https://casfs.readthedocs.io/en/latest/?badge=latest)
6 | [](https://pypi.org/project/casfs)
7 |
8 | CASFS is a content-addressable filestore library, built on
9 | [pyfilesystem2](https://github.com/PyFilesystem/pyfilesystem2). CASFS was
10 | inspired by [hashfs](https://github.com/dgilland/hashfs).
11 |
12 | ## Installation and Usage
13 |
14 | Install CASFS via [pip](https://pypi.org/project/casfs/):
15 |
16 | ```bash
17 | pip install casfs
18 | ```
19 |
20 | Full documentation for CASFS lives at [Read The
21 | Docs](https://casfs.readthedocs.io/en/latest).
22 |
23 | ## Disclaimer
24 |
25 | This is a research project, not an official Google product. Expect bugs and
26 | sharp edges. Please help by trying out CASFS, [reporting
27 | bugs](https://github.com/google/casfs/issues), and letting us know what you
28 | think!
29 |
30 | ## Citing CASFS
31 |
32 | If Caliban helps you in your research, pleae consider citing the repository:
33 |
34 | ```
35 | @software{casfs2020github,
36 | author = {Sam Ritchie},
37 | title = {{CASFS}: Content-addressable filesystem abstraction for Python.},
38 | url = {http://github.com/google/casfs},
39 | version = {0.1.0},
40 | year = {2020},
41 | }
42 | ```
43 |
44 | In the above bibtex entry, names are in alphabetical order, the version number
45 | is intended to be that of the latest tag on github, and the year corresponds to
46 | the project's open-source release.
47 |
48 | ## License
49 |
50 | Copyright 2020 Google LLC.
51 |
52 | Licensed under the [Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0).
53 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # How to Contribute
2 |
3 | So you want to add some code to CASFS. Excellent! This page details ways that
4 | you can contribute.
5 |
6 | ## Contributor License Agreement
7 |
8 | Contributions to this project must be accompanied by a Contributor License
9 | Agreement. You (or your employer) retain the copyright to your contribution;
10 | this simply gives us permission to use and redistribute your contributions as
11 | part of the project. Head over to to see
12 | your current agreements on file or to sign a new one.
13 |
14 | You generally only need to submit a CLA once, so if you've already submitted one
15 | (even if it was for a different project), you probably don't need to do it
16 | again.
17 |
18 | ## Developing in CASFS
19 |
20 | We use [pre-commit](https://pre-commit.com/) to manage a series of git
21 | pre-commit hooks for the project; for example, each time you commit code, the
22 | hooks will make sure that your python is formatted properly. If your code isn't,
23 | the hook will format it, so when you try to commit the second time you'll get
24 | past the hook.
25 |
26 | All hooks are defined in `.pre-commit-config.yaml`. To install these hooks,
27 | install `pre-commit` if you don't yet have it. I prefer using
28 | [pipx](https://github.com/pipxproject/pipx) so that `pre-commit` stays globally
29 | available.
30 |
31 | ```bash
32 | pipx install pre-commit
33 | ```
34 |
35 | Then install the hooks with this command:
36 |
37 | ```bash
38 | pre-commit install
39 | ```
40 |
41 | Now they'll run on every commit. If you want to run them manually, you can run either of these commands:
42 |
43 | ```bash
44 | pre-commit run --all-files
45 |
46 | # or this, if you've previously run `make build`:
47 | make lint
48 | ```
49 |
50 | ## Documentation
51 |
52 | We use Sphinx to generate docs. To get live reloading working, use
53 | [sphinx-reload](https://pypi.org/project/sphinx-reload/):
54 |
55 | ```bash
56 | pipx install sphinx-reload
57 | ```
58 |
59 | Then, inside the casfs folder:
60 |
61 | ```bash
62 | sphinx-reload docs
63 | ```
64 |
65 | ## Publishing CASFS
66 |
67 | - First, run `make build` to get your virtual environment set up.
68 | - Make sure that you're on the master branch!
69 | - add a new tag, with `git tag 0.2.3` or the equivalent
70 | - run `make release` to push the latest code and tags to all relevant
71 | repositories.
72 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
6 |
7 | # -- Path setup --------------------------------------------------------------
8 |
9 | # If extensions (or modules to document with autodoc) are in another directory,
10 | # add these directories to sys.path here. If the directory is relative to the
11 | # documentation root, use os.path.abspath to make it absolute, like shown here.
12 | #
13 | # import os
14 | # import sys
15 | # sys.path.insert(0, os.path.abspath('.'))
16 |
17 | # -- Project information -----------------------------------------------------
18 |
19 | project = 'CASFS'
20 | copyright = '2020, Google LLC'
21 | author = 'Sam Ritchie'
22 |
23 | # The short X.Y version
24 | version = ''
25 | # The full version, including alpha/beta/rc tags
26 | release = ''
27 |
28 | # -- General configuration ---------------------------------------------------
29 |
30 | # Add any Sphinx extension module names here, as strings. They can be
31 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
32 | # ones.
33 | extensions = [
34 | 'sphinx.ext.autodoc',
35 | 'sphinx.ext.autosectionlabel',
36 | 'sphinx.ext.autosummary',
37 | 'sphinx.ext.intersphinx',
38 | 'sphinx.ext.napoleon',
39 | 'sphinx.ext.viewcode',
40 | ]
41 |
42 | intersphinx_mapping = {
43 | 'python': ('https://docs.python.org/3/', None),
44 | 'fs': ('https://pyfilesystem.readthedocs.io/en/latest/', None)
45 | }
46 |
47 | source_suffix = {'.rst': 'restructuredtext', '.txt': 'restructuredtext'}
48 |
49 | # Add any paths that contain templates here, relative to this directory.
50 | templates_path = ['_templates']
51 |
52 | # List of patterns, relative to source directory, that match files and
53 | # directories to ignore when looking for source files.
54 | # This pattern also affects html_static_path and html_extra_path.
55 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', 'requirements.txt']
56 |
57 | # The name of the Pygments (syntax highlighting) style to use.
58 | pygments_style = None
59 | autosummary_generate = True
60 | napolean_use_rtype = False
61 |
62 | mathjax_config = {
63 | 'TeX': {
64 | 'equationNumbers': {
65 | 'autoNumber': 'AMS',
66 | 'useLabelIds': True
67 | }
68 | },
69 | }
70 |
71 | # -- Options for HTML output -------------------------------------------------
72 |
73 | # The theme to use for HTML and HTML Help pages. See the documentation for
74 | # a list of builtin themes.
75 | #
76 | html_theme = 'sphinx_rtd_theme'
77 |
78 | # Theme options are theme-specific and customize the look and feel of a theme
79 | # further. For a list of options available for each theme, see the
80 | # documentation.
81 | html_theme_options = {
82 | 'logo_only': True,
83 | }
84 |
85 | # Add any paths that contain custom static files (such as style sheets) here,
86 | # relative to this directory. They are copied after the builtin static files,
87 | # so a file named "default.css" will overwrite the builtin "default.css".
88 | html_static_path = ['_static']
89 |
90 | htmlhelp_basename = 'CASFSdoc'
91 | epub_title = project
92 | epub_exclude_files = ['search.html']
93 |
--------------------------------------------------------------------------------
/casfs/util.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | #
3 | # Copyright 2020 Google LLC
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License");
6 | # you may not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 | """Utilities for sharding etc."""
17 |
18 | import hashlib
19 | import logging
20 | from collections import namedtuple
21 | from typing import Any, List, Optional, Union
22 |
23 | import fs as pyfs
24 | from fs.base import FS
25 |
26 |
27 | def compact(items: List[Optional[Any]]) -> List[Any]:
28 | """Return only truthy elements of `items`."""
29 | return [item for item in items if item]
30 |
31 |
32 | def to_bytes(item: Union[str, bytes]) -> bytes:
33 | """Accepts either a bytes instance or a string; if str, returns a bytes
34 | instance, else acts as identity.
35 |
36 | """
37 | ret = item
38 |
39 | if not isinstance(item, bytes):
40 | ret = bytes(item, "utf8")
41 |
42 | return ret
43 |
44 |
45 | def computehash(stream, algorithm: hashlib.algorithms_available) -> str:
46 | """Compute hash of file using the supplied `algorithm`."""
47 | hashobj = hashlib.new(algorithm)
48 | for data in stream:
49 | hashobj.update(to_bytes(data))
50 | return hashobj.hexdigest()
51 |
52 |
53 | def shard(digest: str, depth: int, width: int) -> str:
54 | """This creates a list of `depth` number of tokens with width `width` from the
55 | first part of the id plus the remainder.
56 |
57 | TODO examine Clojure's Blocks to see if there's some nicer style here.
58 |
59 | """
60 |
61 | first = [digest[i * width:width * (i + 1)] for i in range(depth)]
62 | remaining = [digest[depth * width:]]
63 | return compact(first + remaining)
64 |
65 |
66 | def load_fs(root: Union[FS, str]) -> FS:
67 | """If str is supplied, returns an instance of OSFS, backed by the filesystem."""
68 | if isinstance(root, str):
69 | return pyfs.open_fs(root, create=True)
70 |
71 | if isinstance(root, FS):
72 | return root
73 |
74 | raise Exception("Not a filesystem or path!")
75 |
76 |
77 | def syspath(fs: FS, path: str) -> Optional[str]:
78 | """Returns the true system path for the supplied path on the supplied
79 | filesystem, or none if that's not possible to fulfill (MemoryFS
80 | implementations, for example).
81 |
82 | """
83 | try:
84 | return fs.getsyspath(path)
85 | except pyfs.errors.NoSysPath:
86 | logging.error("Can't get a path.")
87 | return None
88 |
89 |
90 | # TODO add the hashing method here
91 | # TODO add a to and from string method
92 | class HashAddress(namedtuple("HashAddress", ["id", "relpath", "is_duplicate"])):
93 | """File address containing file's path on disk and it's content hash ID.
94 |
95 | Attributes:
96 | id (str): Hash ID (hexdigest) of file contents.
97 | relpath (str): Relative path location to :attr:`HashFS.root`.
98 | is_duplicate (boolean, optional): Whether the hash address created was
99 | a duplicate of a previously existing file. Can only be ``True``
100 | after a put operation. Defaults to ``False``.
101 | """
102 |
103 | def __new__(cls, id, relpath, is_duplicate=False):
104 | return super(HashAddress, cls).__new__(cls, id, relpath, is_duplicate)
105 |
106 | def __eq__(self, obj):
107 | return isinstance(obj, HashAddress) and \
108 | obj.id == self.id and \
109 | obj.relpath == self.relpath
110 |
111 |
112 | # TODO examine, allow this to handle wrapping another stream in addition to
113 | # itself.
114 | class Stream(object):
115 | """Common interface for file-like objects.
116 |
117 | The input `obj` can be a file-like object or a path to a file. If `obj` is
118 | a path to a file, then it will be opened until :meth:`close` is called.
119 | If `obj` is a file-like object, then it's original position will be
120 | restored when :meth:`close` is called instead of closing the object
121 | automatically. Closing of the stream is deferred to whatever process passed
122 | the stream in.
123 |
124 | Successive readings of the stream is supported without having to manually
125 | set it's position back to ``0``.
126 | """
127 |
128 | def __init__(self, obj, fs: Optional[FS] = None):
129 | if hasattr(obj, "read"):
130 | pos = obj.tell()
131 | elif fs:
132 | if fs.isfile(obj):
133 | obj = fs.open(obj, "rb")
134 | pos = None
135 | else:
136 | raise ValueError(
137 | "Object must be a valid file path or a readable object")
138 | else:
139 | raise ValueError(
140 | "Object must be readable, OR you must supply a filesystem.")
141 |
142 | try:
143 | file_stat = fs.getinfo(obj.name, namespaces=['stat'])
144 | buffer_size = file_stat.st_blksize
145 | except Exception:
146 | buffer_size = 8192
147 |
148 | self._obj = obj
149 | self._pos = pos
150 | self._buffer_size = buffer_size
151 |
152 | def __iter__(self):
153 | """Read underlying IO object and yield results. Return object to
154 | original position if we didn't open it originally.
155 | """
156 | self._obj.seek(0)
157 |
158 | while True:
159 | data = self._obj.read(self._buffer_size)
160 |
161 | if not data:
162 | break
163 |
164 | yield data
165 |
166 | if self._pos is not None:
167 | self._obj.seek(self._pos)
168 |
169 | def close(self):
170 | """Close underlying IO object if we opened it, else return it to
171 | original position.
172 | """
173 | if self._pos is None:
174 | self._obj.close()
175 | else:
176 | self._obj.seek(self._pos)
177 |
--------------------------------------------------------------------------------
/tests/casfs/test_base.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | #
3 | # Copyright 2020 Google LLC
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License");
6 | # you may not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 | """Basic namespace; examples and tests.
17 |
18 |
19 | TODO - handle strings for 'put'
20 |
21 | """
22 |
23 | from contextlib import closing
24 | from io import StringIO
25 |
26 | import casfs.util as u
27 | from casfs import CASFS
28 | from fs.copy import copy_fs
29 | from fs.memoryfs import MemoryFS
30 | from fs.opener.errors import UnsupportedProtocol
31 |
32 | import pytest
33 |
34 |
35 | @pytest.fixture
36 | def mem():
37 | return MemoryFS()
38 |
39 |
40 | @pytest.fixture
41 | def memcas(mem):
42 | return CASFS(mem)
43 |
44 |
45 | def test_put_get(memcas):
46 | """Test basic operations."""
47 |
48 | # Create some content and put it into the filesystem.
49 | a = StringIO('content')
50 | ak = memcas.put(a)
51 |
52 | # You can get the key back out with various paths.
53 | assert memcas.get(ak) == ak
54 | assert memcas.get(ak.id) == ak
55 | assert memcas.get(ak.relpath) == ak
56 | assert memcas.exists(ak)
57 | assert ak in memcas
58 |
59 | with closing(memcas.open(ak)) as a_file:
60 | rt_a = a_file.read()
61 |
62 | # Boom, content round-trips.
63 | assert rt_a == b'content'
64 |
65 |
66 | def test_get_missing(memcas):
67 | # Passing in junk results in junk.
68 | assert memcas.get('random') == None
69 |
70 |
71 | def test_delete(memcas):
72 | # Create some content and put it into the filesystem.
73 | s = 'content'
74 | a = StringIO(s)
75 | ak = memcas.put(a)
76 | assert memcas.count() == 1
77 | assert memcas.size() == len(s.encode('utf-8'))
78 | assert memcas.exists(ak)
79 |
80 | memcas.delete(ak)
81 | assert memcas.count() == 0
82 | assert memcas.size() == 0
83 | assert not memcas.exists(ak)
84 |
85 | # deleting again is a no-op:
86 | assert memcas.delete(ak) is None
87 |
88 | # You'll get an error if you attempt to open a closed file.
89 | with pytest.raises(IOError):
90 | memcas.open(ak)
91 |
92 |
93 | def test_key_matches_content(memcas):
94 | """Adding the same thing to the store twice """
95 | ak = memcas.put(StringIO('A'))
96 | bk = memcas.put(StringIO('A'))
97 |
98 | assert ak == bk
99 |
100 | # deleting ak means bk is no longer available.
101 | assert memcas.exists(bk)
102 | memcas.delete(ak)
103 | assert not memcas.exists(bk)
104 |
105 |
106 | def test_files_folders_repair(memcas):
107 | ak = memcas.put(StringIO('A'))
108 | bk = memcas.put(StringIO('B'))
109 | ck = memcas.put(StringIO('C'))
110 |
111 | # The sequence of files returns from files() consists of the set of all
112 | # relative paths inside the filesystem.
113 | assert set(memcas.files()) == {ak.relpath, bk.relpath, ck.relpath}
114 |
115 | # memcas itself is iterable!
116 | assert list(memcas) == list(memcas.files())
117 |
118 | # using a NEW CASFS instance with different settings for width and depth than the original
119 | newfs = CASFS(memcas.fs, width=7, depth=1)
120 |
121 | # guard against the defaults changing and invalidating the test.
122 | #
123 | # TODO I am quite suspicious of this test. I don't think we should store all
124 | # the is_duplicate info and the relative path inside of the key; it's not
125 | # really relevant. But maybe you do need to go get the actual system path.
126 | # Think about this.
127 | assert newfs.width != memcas.width
128 | assert newfs.depth != memcas.depth
129 | assert newfs.count() == memcas.count()
130 | assert newfs.size() == memcas.size()
131 | assert newfs.get(ak) == memcas.get(ak)
132 |
133 | # Then we repair, which should muck with stuff.
134 | old_folders, old_files = set(newfs.folders()), set(newfs.files())
135 | newfs.repair()
136 | new_folders, new_files = set(newfs.folders()), set(newfs.files())
137 |
138 | # folder names and file names were modified.
139 | assert old_folders != new_folders
140 | assert old_files != new_files
141 |
142 | # gets still work!
143 | assert newfs.get(ak) == memcas.get(ak)
144 |
145 |
146 | def test_folders(memcas):
147 | ak = memcas.put(StringIO('A'))
148 | bk = memcas.put(StringIO('B'))
149 |
150 | # almost certainly true, since we shard out the folders.
151 | assert len(list(memcas.folders())) == 2
152 |
153 |
154 | def test_repair_corruption():
155 | fs1 = CASFS(MemoryFS(), width=2, depth=2)
156 | fs2 = CASFS(MemoryFS(), width=7, depth=1)
157 |
158 | # populate EACH with the same kv pairs, but structured different ways inside
159 | # the filesystem.
160 | ak1, bk1 = fs1.put(StringIO('A')), fs1.put(StringIO('B'))
161 | ak2, bk2 = fs2.put(StringIO('A')), fs2.put(StringIO('B'))
162 |
163 | # fs2 of course only has two items in it.
164 | assert fs2.count() == 2
165 |
166 | # Now copy all of fs1 into fs2...
167 | copy_fs(fs1.fs, fs2.fs)
168 |
169 | # and note that it now has two copies of each item in the CAS.
170 | assert fs2.count() == 4
171 |
172 | # Repair should kill the duplicates.
173 | fs2.repair()
174 | assert fs2.count() == 2
175 |
176 | # fs2 itself is an iterable and has a length.
177 | assert len(fs2) == 2
178 |
179 |
180 | def test_internals():
181 | fs1 = CASFS(MemoryFS(), width=2, depth=2)
182 | fs2 = CASFS(MemoryFS(), width=7, depth=1)
183 | ak1 = fs1.put(StringIO('A'))
184 |
185 | # removing a path that doesn't exist should be a no-op.
186 | assert fs2._remove_empty(ak1.relpath) == None
187 |
188 | # if internally you try to make a directory that extends some non-directory
189 | # thing, some existing file, you'll see an error.
190 | with pytest.raises(AssertionError):
191 | fs1._makedirs(ak1.relpath + "/cake")
192 |
193 | # Unshard expects a path that actually exists.
194 | with pytest.raises(ValueError):
195 | fs1._unshard("random_path")
196 |
197 |
198 | def test_alternate_fs():
199 | # temp:// creates a temporary filesystem fs.
200 | fs = CASFS("temp://face")
201 | ak = fs.put(StringIO('A'))
202 |
203 | rt = None
204 | with closing(fs.open(ak)) as f:
205 | rt = f.read()
206 |
207 | # Boom, content round-trips!
208 | assert rt == b'A'
209 |
210 | with pytest.raises(UnsupportedProtocol):
211 | fs = CASFS("random://face")
212 |
213 | with pytest.raises(Exception):
214 | fs = CASFS(10)
215 |
216 |
217 | def test_syspath(memcas):
218 | # temp:// creates a temporary filesystem fs.
219 | fs = CASFS("temp://face")
220 | ak = fs.put(StringIO('A'))
221 | ak2 = memcas.put(StringIO('A'))
222 |
223 | rt = None
224 | with closing(fs.open(ak)) as f:
225 | rt = f.read()
226 |
227 | # it's possible in a filesystem-based FS to go get the ACTUAL path and use
228 | # that to read data out.
229 | with open(u.syspath(fs.fs, ak.relpath)) as f2:
230 | # the data should be the same.
231 | rt2 = f2.read()
232 | assert rt2.encode('utf-8') == rt
233 |
234 | # this trick does not work on a MemoryFS based CAS. It returns None to show
235 | # us that this isn't a thing.
236 | assert u.syspath(memcas.fs, ak.relpath) == None
237 |
238 |
239 | def test_stream(mem):
240 | # streams have to be something openable, for now... nothing primitive works.
241 | with pytest.raises(ValueError):
242 | u.Stream(10)
243 |
244 | # Looking like a path isn't enough. You have to pass the internal test of the
245 | # filesystem.
246 | with pytest.raises(ValueError):
247 | u.Stream("cake", fs=mem)
248 |
--------------------------------------------------------------------------------
/casfs/base.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | #
3 | # Copyright 2020 Google LLC
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License");
6 | # you may not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 | """Module for CASFS class.
17 |
18 | Potential next steps:
19 |
20 | TODO - store the actual FULL hash at the end of each.
21 |
22 | TODO - we need a function that will provide a temp path... and when we leave
23 | the context manager, move the stuff from the temp path into the content
24 | addressable store.
25 |
26 | """
27 |
28 | import hashlib
29 | import io
30 | import os
31 | from contextlib import closing
32 | from typing import Iterable, Optional, Text, Tuple, Union
33 |
34 | import fs as pyfs
35 | from fs.permissions import Permissions
36 |
37 | import casfs.util as u
38 |
39 | Key = Union[str, u.HashAddress]
40 |
41 |
42 | class CASFS(object):
43 | """Content addressable file manager. This is the Blueshift rewrite of
44 | https://github.com/dgilland/hashfs, using
45 | https://github.com/PyFilesystem/pyfilesystem2.
46 |
47 | Attributes:
48 | root: Either an instance of pyfs.base.FS, or a URI string parseable by
49 | pyfilesystem.
50 | depth: Depth of subfolders to create when saving a file. Defaults to 2,
51 | which means that actual hashes will be nested two items deep.
52 | width: Width of each subfolder to create when saving a file. This means
53 | that blocks of `width` characters of the hash will be used to
54 | bucket content into each successive folder.
55 | algorithm: Hash algorithm to use when computing file hash. Algorithm
56 | should be available in `hashlib` module, ie, a member of
57 | `hashlib.algorithms_available`. Defaults to `'sha256'`.
58 | dmode: Directory mode permission to set for subdirectories. Defaults to
59 | `0o755` which allows owner/group to read/write and everyone else to
60 | read and everyone to execute.
61 |
62 | """
63 |
64 | def __init__(self,
65 | root: Union[pyfs.base.FS, str],
66 | depth: Optional[int] = 2,
67 | width: Optional[int] = 2,
68 | algorithm: hashlib.algorithms_available = "sha256",
69 | dmode: Optional[int] = 0o755):
70 |
71 | self.fs = u.load_fs(root)
72 | self.depth = depth
73 | self.width = width
74 | self.algorithm = algorithm
75 | self.dmode = dmode
76 |
77 | def put(self, content) -> u.HashAddress:
78 | """Store contents of `content` in the backing filesystem using its content hash
79 | for the address.
80 |
81 | Args:
82 | content: Readable object or path to file.
83 |
84 | Returns:
85 | File's hash address.
86 |
87 | """
88 | with closing(u.Stream(content, fs=self.fs)) as stream:
89 | hashid = self._computehash(stream)
90 | path, is_duplicate = self._copy(stream, hashid)
91 |
92 | return u.HashAddress(hashid, path, is_duplicate)
93 |
94 | def get(self, k: Key) -> Optional[u.HashAddress]:
95 | """Return :class:`HashAddress` from given id or path. If `k` does not refer to
96 | a valid file, then `None` is returned.
97 |
98 | Args:
99 | k: Address ID or path of file.
100 |
101 | Returns:
102 | File's hash address or None.
103 |
104 | """
105 | path = self._fs_path(k)
106 |
107 | if path is None:
108 | return None
109 |
110 | return u.HashAddress(self._unshard(path), path)
111 |
112 | def open(self, k: Key) -> io.IOBase:
113 | """Return open IOBase object from given id or path.
114 |
115 | Args:
116 | k: Address ID or path of file.
117 |
118 | Returns:
119 | Buffer: A read-only `io` buffer into the underlying filesystem.
120 |
121 | Raises:
122 | IOError: If file doesn't exist.
123 |
124 | """
125 | path = self._fs_path(k)
126 | if path is None:
127 | raise IOError("Could not locate file: {0}".format(k))
128 |
129 | return self.fs.open(path, mode='rb')
130 |
131 | def delete(self, k: Key) -> None:
132 | """Delete file using id or path. Remove any empty directories after
133 | deleting. No exception is raised if file doesn't exist.
134 |
135 | Args:
136 | k: Key of the file to delete..
137 | """
138 | path = self._fs_path(k)
139 | if path is None:
140 | return None
141 |
142 | try:
143 | self.fs.remove(path)
144 | except OSError: # pragma: no cover
145 | # Attempting to delete a directory.
146 | pass
147 | else:
148 | print("REMOVING", pyfs.path.dirname(path))
149 | self._remove_empty(pyfs.path.dirname(path))
150 |
151 | def files(self) -> Iterable[Text]:
152 | """Return generator that yields all files in the :attr:`fs`.
153 |
154 | """
155 | return (pyfs.path.relpath(p) for p in self.fs.walk.files())
156 |
157 | def folders(self) -> Iterable[Text]:
158 | """Return generator that yields all directories in the :attr:`fs` that contain
159 | files.
160 |
161 | """
162 | for step in self.fs.walk():
163 | if step.files:
164 | yield step.path
165 |
166 | def count(self) -> int:
167 | """Return count of the number of files in the backing :attr:`fs`.
168 | """
169 | return sum(1 for _, info in self.fs.walk.info() if info.is_file)
170 |
171 | def size(self) -> int:
172 | """Return the total size in bytes of all files in the :attr:`root`
173 | directory.
174 | """
175 | return sum(info.size
176 | for _, info in self.fs.walk.info(namespaces=['details'])
177 | if info.is_file)
178 |
179 | def exists(self, k: Key) -> bool:
180 | """Check whether a given file id or path exists on disk."""
181 | return bool(self._fs_path(k))
182 |
183 | def repair(self) -> Iterable[Text]:
184 | """Repair any file locations whose content address doesn't match its file path.
185 | Returns a sequence of repaired files.
186 |
187 | """
188 | repaired = []
189 | corrupted = self._corrupted()
190 |
191 | for path, address in corrupted:
192 | if self.fs.isfile(address.relpath):
193 | # File already exists so just delete corrupted path.
194 | self.fs.remove(path)
195 |
196 | else:
197 | # File doesn't exist, so move it.
198 | self._makedirs(pyfs.path.dirname(address.relpath))
199 | self.fs.move(path, address.relpath)
200 |
201 | repaired.append((path, address))
202 |
203 | # check for empty directories created by the repair.
204 | for d in {pyfs.path.dirname(p) for p, _ in repaired}:
205 | self._remove_empty(d)
206 |
207 | return repaired
208 |
209 | def __contains__(self, k: Key) -> bool:
210 | """Return whether a given file id or path is contained in the
211 | :attr:`root` directory.
212 | """
213 | return self.exists(k)
214 |
215 | def __iter__(self) -> Iterable[str]:
216 | """Iterate over all files in the backing store."""
217 | return self.files()
218 |
219 | def __len__(self) -> int:
220 | """Return count of the number of files tracked by the backing filesystem.
221 |
222 | """
223 | return self.count()
224 |
225 | def _computehash(self, stream: u.Stream) -> str:
226 | """Compute hash of file using :attr:`algorithm`."""
227 | return u.computehash(stream, self.algorithm)
228 |
229 | def _copy(self, stream: u.Stream, hashid: str) -> Tuple[Text, bool]:
230 | """Copy the contents of `stream` onto disk.
231 |
232 | Returns a pair of
233 |
234 | - relative path,
235 | - boolean noting whether or not we have a duplicate.
236 |
237 | """
238 | path = self._hashid_to_path(hashid)
239 |
240 | if self.fs.isfile(path):
241 | is_duplicate = True
242 |
243 | else:
244 | # Only move file if it doesn't already exist.
245 | is_duplicate = False
246 | self._makedirs(pyfs.path.dirname(path))
247 | with closing(self.fs.open(path, mode='wb')) as p:
248 | for data in stream:
249 | p.write(u.to_bytes(data))
250 |
251 | return (path, is_duplicate)
252 |
253 | def _remove_empty(self, path: str) -> None:
254 | """Successively remove all empty folders starting with `subpath` and
255 | proceeding "up" through directory tree until reaching the :attr:`root`
256 | folder.
257 | """
258 | try:
259 | pyfs.tools.remove_empty(self.fs, path)
260 | except pyfs.errors.ResourceNotFound:
261 | # Guard against paths that don't exist in the FS.
262 | return None
263 |
264 | def _makedirs(self, dir_path):
265 | """Physically create the folder path on disk."""
266 |
267 | try:
268 | # this is creating a directory, so we use dmode here.
269 | perms = Permissions.create(self.dmode)
270 | self.fs.makedirs(dir_path, permissions=perms, recreate=True)
271 |
272 | except pyfs.errors.DirectoryExpected:
273 | assert self.fs.isdir(dir_path), "expected {} to be a directory".format(
274 | dir_path)
275 |
276 | def _fs_path(self, k: Union[str, u.HashAddress]) -> Optional[str]:
277 | """Attempt to determine the real path of a file id or path through successive
278 | checking of candidate paths.
279 |
280 | """
281 | # if the input is ALREADY a hash address, pull out the relative path.
282 | if isinstance(k, u.HashAddress):
283 | k = k.relpath
284 |
285 | # Check if input was a fs path already.
286 | if self.fs.isfile(k):
287 | return k
288 |
289 | # Check if input was an ID.
290 | filepath = self._hashid_to_path(k)
291 | if self.fs.isfile(filepath):
292 | return filepath
293 |
294 | # Could not determine a match.
295 | return None
296 |
297 | def _hashid_to_path(self, hashid: str) -> str:
298 | """Build the relative file path for a given hash id.
299 |
300 | """
301 | paths = self._shard(hashid)
302 | return pyfs.path.join(*paths)
303 |
304 | def _shard(self, hashid: str) -> str:
305 | """Shard content ID into subfolders."""
306 | return u.shard(hashid, self.depth, self.width)
307 |
308 | def _unshard(self, path: str) -> str:
309 | """Unshard path to determine hash value."""
310 | if not self.fs.isfile(path):
311 | raise ValueError("Cannot unshard path. The path {0!r} doesn't exist"
312 | "in the filesystem. {1!r}")
313 |
314 | return pyfs.path.splitext(path)[0].replace(os.sep, "")
315 |
316 | def _corrupted(self) -> Iterable[Tuple[Text, u.HashAddress]]:
317 | """Return generator that yields corrupted files as ``(path, address)``, where
318 | ``path`` is the path of the corrupted file and ``address`` is the
319 | :class:`HashAddress` of the expected location.
320 |
321 | """
322 | for path in self.files():
323 | with closing(u.Stream(path, fs=self.fs)) as stream:
324 | hashid = self._computehash(stream)
325 |
326 | expected_path = self._hashid_to_path(hashid)
327 |
328 | if pyfs.path.abspath(expected_path) != pyfs.path.abspath(path):
329 | yield (
330 | path,
331 | u.HashAddress(hashid, expected_path),
332 | )
333 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
179 | APPENDIX: How to apply the Apache License to your work.
180 |
181 | To apply the Apache License to your work, attach the following
182 | boilerplate notice, with the fields enclosed by brackets "[]"
183 | replaced with your own identifying information. (Don't include
184 | the brackets!) The text should be enclosed in the appropriate
185 | comment syntax for the file format. We also recommend that a
186 | file or class name and description of purpose be included on the
187 | same "printed page" as the copyright notice for easier
188 | identification within third-party archives.
189 |
190 | Copyright [yyyy] [name of copyright owner]
191 |
192 | Licensed under the Apache License, Version 2.0 (the "License");
193 | you may not use this file except in compliance with the License.
194 | You may obtain a copy of the License at
195 |
196 | http://www.apache.org/licenses/LICENSE-2.0
197 |
198 | Unless required by applicable law or agreed to in writing, software
199 | distributed under the License is distributed on an "AS IS" BASIS,
200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201 | See the License for the specific language governing permissions and
202 | limitations under the License.
203 |
--------------------------------------------------------------------------------
/pylintrc:
--------------------------------------------------------------------------------
1 | [MASTER]
2 |
3 | # Specify a configuration file.
4 | #rcfile=
5 |
6 | # Python code to execute, usually for sys.path manipulation such as
7 | # pygtk.require().
8 | #init-hook=
9 |
10 | # Profiled execution.
11 | profile=no
12 |
13 | # Add files or directories to the blacklist. They should be base names, not
14 | # paths.
15 | ignore=CVS
16 |
17 | # Pickle collected data for later comparisons.
18 | persistent=yes
19 |
20 | # List of plugins (as comma separated values of python modules names) to load,
21 | # usually to register additional checkers.
22 | load-plugins=
23 |
24 |
25 | [MESSAGES CONTROL]
26 |
27 | # Enable the message, report, category or checker with the given id(s). You can
28 | # either give multiple identifier separated by comma (,) or put this option
29 | # multiple time. See also the "--disable" option for examples.
30 | enable=indexing-exception,old-raise-syntax
31 |
32 | # Disable the message, report, category or checker with the given id(s). You
33 | # can either give multiple identifiers separated by comma (,) or put this
34 | # option multiple times (only on the command line, not in the configuration
35 | # file where it should appear only once).You can also use "--disable=all" to
36 | # disable everything first and then reenable specific checks. For example, if
37 | # you want to run only the similarities checker, you can use "--disable=all
38 | # --enable=similarities". If you want to run only the classes checker, but have
39 | # no Warning level messages displayed, use"--disable=all --enable=classes
40 | # --disable=W"
41 | disable=design,similarities,no-self-use,attribute-defined-outside-init,locally-disabled,star-args,pointless-except,bad-option-value,global-statement,fixme,suppressed-message,useless-suppression,locally-enabled,no-member,no-name-in-module,import-error,unsubscriptable-object,unbalanced-tuple-unpacking,undefined-variable,not-context-manager
42 |
43 |
44 | # Set the cache size for astng objects.
45 | cache-size=500
46 |
47 |
48 | [REPORTS]
49 |
50 | # Set the output format. Available formats are text, parseable, colorized, msvs
51 | # (visual studio) and html. You can also give a reporter class, eg
52 | # mypackage.mymodule.MyReporterClass.
53 | output-format=text
54 |
55 | # Put messages in a separate file for each module / package specified on the
56 | # command line instead of printing them on stdout. Reports (if any) will be
57 | # written in a file name "pylint_global.[txt|html]".
58 | files-output=no
59 |
60 | # Tells whether to display a full report or only the messages
61 | reports=no
62 |
63 | # Python expression which should return a note less than 10 (10 is the highest
64 | # note). You have access to the variables errors warning, statement which
65 | # respectively contain the number of errors / warnings messages and the total
66 | # number of statements analyzed. This is used by the global evaluation report
67 | # (RP0004).
68 | evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
69 |
70 | # Add a comment according to your evaluation note. This is used by the global
71 | # evaluation report (RP0004).
72 | comment=no
73 |
74 | # Template used to display messages. This is a python new-style format string
75 | # used to format the message information. See doc for all details
76 | #msg-template=
77 |
78 |
79 | [TYPECHECK]
80 |
81 | # Tells whether missing members accessed in mixin class should be ignored. A
82 | # mixin class is detected if its name ends with "mixin" (case insensitive).
83 | ignore-mixin-members=yes
84 |
85 | # List of classes names for which member attributes should not be checked
86 | # (useful for classes with attributes dynamically set).
87 | ignored-classes=SQLObject
88 |
89 | # When zope mode is activated, add a predefined set of Zope acquired attributes
90 | # to generated-members.
91 | zope=no
92 |
93 | # List of members which are set dynamically and missed by pylint inference
94 | # system, and so shouldn't trigger E0201 when accessed. Python regular
95 | # expressions are accepted.
96 | generated-members=REQUEST,acl_users,aq_parent
97 |
98 | # List of decorators that create context managers from functions, such as
99 | # contextlib.contextmanager.
100 | contextmanager-decorators=contextlib.contextmanager,contextlib2.contextmanager
101 |
102 |
103 | [VARIABLES]
104 |
105 | # Tells whether we should check for unused import in __init__ files.
106 | init-import=no
107 |
108 | # A regular expression matching the beginning of the name of dummy variables
109 | # (i.e. not used).
110 | dummy-variables-rgx=^\*{0,2}(_$|unused_|dummy_)
111 |
112 | # List of additional names supposed to be defined in builtins. Remember that
113 | # you should avoid to define new builtins when possible.
114 | additional-builtins=
115 |
116 |
117 | [BASIC]
118 |
119 | # Required attributes for module, separated by a comma
120 | required-attributes=
121 |
122 | # List of builtins function names that should not be used, separated by a comma
123 | bad-functions=apply,input,reduce
124 |
125 |
126 | # Disable the report(s) with the given id(s).
127 | # All non-Google reports are disabled by default.
128 | disable-report=R0001,R0002,R0003,R0004,R0101,R0102,R0201,R0202,R0220,R0401,R0402,R0701,R0801,R0901,R0902,R0903,R0904,R0911,R0912,R0913,R0914,R0915,R0921,R0922,R0923
129 |
130 | # Regular expression which should only match correct module names
131 | module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
132 |
133 | # Regular expression which should only match correct module level names
134 | const-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$
135 |
136 | # Regular expression which should only match correct class names
137 | class-rgx=^_?[A-Z][a-zA-Z0-9]*$
138 |
139 | # Regular expression which should only match correct function names
140 | function-rgx=^(?:(?P_?[A-Z][a-zA-Z0-9]*)|(?P_?[a-z][a-z0-9_]*))$
141 |
142 | # Regular expression which should only match correct method names
143 | method-rgx=^(?:(?P__[a-z0-9_]+__|next)|(?P_{0,2}[A-Z][a-zA-Z0-9]*)|(?P_{0,2}[a-z][a-z0-9_]*))$
144 |
145 | # Regular expression which should only match correct instance attribute names
146 | attr-rgx=^_{0,2}[a-z][a-z0-9_]*$
147 |
148 | # Regular expression which should only match correct argument names
149 | argument-rgx=^[a-z][a-z0-9_]*$
150 |
151 | # Regular expression which should only match correct variable names
152 | variable-rgx=^[a-z][a-z0-9_]*$
153 |
154 | # Regular expression which should only match correct attribute names in class
155 | # bodies
156 | class-attribute-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$
157 |
158 | # Regular expression which should only match correct list comprehension /
159 | # generator expression variable names
160 | inlinevar-rgx=^[a-z][a-z0-9_]*$
161 |
162 | # Good variable names which should always be accepted, separated by a comma
163 | good-names=main,_
164 |
165 | # Bad variable names which should always be refused, separated by a comma
166 | bad-names=
167 |
168 | # Regular expression which should only match function or class names that do
169 | # not require a docstring.
170 | no-docstring-rgx=(__.*__|main)
171 |
172 | # Minimum line length for functions/classes that require docstrings, shorter
173 | # ones are exempt.
174 | docstring-min-length=10
175 |
176 |
177 | [FORMAT]
178 |
179 | # Maximum number of characters on a single line.
180 | max-line-length=80
181 |
182 | # Regexp for a line that is allowed to be longer than the limit.
183 | ignore-long-lines=(?x)
184 | (^\s*(import|from)\s
185 | |\$Id:\s\/\/depot\/.+#\d+\s\$
186 | |^[a-zA-Z_][a-zA-Z0-9_]*\s*=\s*("[^"]\S+"|'[^']\S+')
187 | |^\s*\#\ LINT\.ThenChange
188 | |^[^#]*\#\ type:\ [a-zA-Z_][a-zA-Z0-9_.,[\] ]*$
189 | |pylint
190 | |"""
191 | |\#
192 | |lambda
193 | |(https?|ftp):)
194 |
195 | # Allow the body of an if to be on the same line as the test if there is no
196 | # else.
197 | single-line-if-stmt=y
198 |
199 | # List of optional constructs for which whitespace checking is disabled
200 | no-space-check=
201 |
202 | # Maximum number of lines in a module
203 | max-module-lines=99999
204 |
205 | # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
206 | # tab).
207 | indent-string=' '
208 |
209 |
210 | [SIMILARITIES]
211 |
212 | # Minimum lines number of a similarity.
213 | min-similarity-lines=4
214 |
215 | # Ignore comments when computing similarities.
216 | ignore-comments=yes
217 |
218 | # Ignore docstrings when computing similarities.
219 | ignore-docstrings=yes
220 |
221 | # Ignore imports when computing similarities.
222 | ignore-imports=no
223 |
224 |
225 | [MISCELLANEOUS]
226 |
227 | # List of note tags to take in consideration, separated by a comma.
228 | notes=
229 |
230 |
231 | [IMPORTS]
232 |
233 | # Deprecated modules which should not be used, separated by a comma
234 | deprecated-modules=regsub,TERMIOS,Bastion,rexec,sets
235 |
236 | # Create a graph of every (i.e. internal and external) dependencies in the
237 | # given file (report RP0402 must not be disabled)
238 | import-graph=
239 |
240 | # Create a graph of external dependencies in the given file (report RP0402 must
241 | # not be disabled)
242 | ext-import-graph=
243 |
244 | # Create a graph of internal dependencies in the given file (report RP0402 must
245 | # not be disabled)
246 | int-import-graph=
247 |
248 |
249 | [CLASSES]
250 |
251 | # List of interface methods to ignore, separated by a comma. This is used for
252 | # instance to not check methods defines in Zope's Interface base class.
253 | ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
254 |
255 | # List of method names used to declare (i.e. assign) instance attributes.
256 | defining-attr-methods=__init__,__new__,setUp
257 |
258 | # List of valid names for the first argument in a class method.
259 | valid-classmethod-first-arg=cls,class_
260 |
261 | # List of valid names for the first argument in a metaclass class method.
262 | valid-metaclass-classmethod-first-arg=mcs
263 |
264 |
265 | [DESIGN]
266 |
267 | # Maximum number of arguments for function / method
268 | max-args=5
269 |
270 | # Argument names that match this expression will be ignored. Default to name
271 | # with leading underscore
272 | ignored-argument-names=_.*
273 |
274 | # Maximum number of locals for function / method body
275 | max-locals=15
276 |
277 | # Maximum number of return / yield for function / method body
278 | max-returns=6
279 |
280 | # Maximum number of branch for function / method body
281 | max-branches=12
282 |
283 | # Maximum number of statements in function / method body
284 | max-statements=50
285 |
286 | # Maximum number of parents for a class (see R0901).
287 | max-parents=7
288 |
289 | # Maximum number of attributes for a class (see R0902).
290 | max-attributes=7
291 |
292 | # Minimum number of public methods for a class (see R0903).
293 | min-public-methods=2
294 |
295 | # Maximum number of public methods for a class (see R0904).
296 | max-public-methods=20
297 |
298 |
299 | [EXCEPTIONS]
300 |
301 | # Exceptions that will emit a warning when being caught. Defaults to
302 | # "Exception"
303 | overgeneral-exceptions=Exception,StandardError,BaseException
304 |
305 |
306 | [AST]
307 |
308 | # Maximum line length for lambdas
309 | short-func-length=1
310 |
311 | # List of module members that should be marked as deprecated.
312 | # All of the string functions are listed in 4.1.4 Deprecated string functions
313 | # in the Python 2.4 docs.
314 | deprecated-members=string.atof,string.atoi,string.atol,string.capitalize,string.expandtabs,string.find,string.rfind,string.index,string.rindex,string.count,string.lower,string.split,string.rsplit,string.splitfields,string.join,string.joinfields,string.lstrip,string.rstrip,string.strip,string.swapcase,string.translate,string.upper,string.ljust,string.rjust,string.center,string.zfill,string.replace,sys.exitfunc
315 |
316 |
317 | [DOCSTRING]
318 |
319 | # List of exceptions that do not need to be mentioned in the Raises section of
320 | # a docstring.
321 | ignore-exceptions=AssertionError,NotImplementedError,StopIteration,TypeError
322 |
323 |
324 |
325 | [TOKENS]
326 |
327 | # Number of spaces of indent required when the last token on the preceding line
328 | # is an open (, [, or {.
329 | indent-after-paren=4
330 |
--------------------------------------------------------------------------------
/casfs/_version.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | #
3 | # Copyright 2020 Google LLC
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License");
6 | # you may not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 | # This file helps to compute a version number in source trees obtained from
17 | # git-archive tarball (such as those provided by githubs download-from-tag
18 | # feature). Distribution tarballs (built by setup.py sdist) and build
19 | # directories (produced by setup.py build) will contain a much shorter file
20 | # that just contains the computed version number.
21 |
22 | # This file is released into the public domain. Generated by
23 | # versioneer-0.18 (https://github.com/warner/python-versioneer)
24 | """Git implementation of _version.py."""
25 |
26 | import errno
27 | import os
28 | import re
29 | import subprocess
30 | import sys
31 |
32 |
33 | def get_keywords():
34 | """Get the keywords needed to look up the version information."""
35 | # these strings will be replaced by git during git-archive.
36 | # setup.py/versioneer.py will grep for the variable names, so they must
37 | # each be defined on a line of their own. _version.py will just call
38 | # get_keywords().
39 | git_refnames = " (HEAD -> master, tag: 0.1.2)"
40 | git_full = "08303917ea5713156a2eb39c5b6c50d289c12a9a"
41 | git_date = "2020-06-17 07:15:21 -0600"
42 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
43 | return keywords
44 |
45 |
46 | class VersioneerConfig:
47 | """Container for Versioneer configuration parameters."""
48 |
49 |
50 | def get_config():
51 | """Create, populate and return the VersioneerConfig() object."""
52 | # these strings are filled in when 'setup.py versioneer' creates
53 | # _version.py
54 | cfg = VersioneerConfig()
55 | cfg.VCS = "git"
56 | cfg.style = "pep440"
57 | cfg.tag_prefix = ""
58 | cfg.parentdir_prefix = "None"
59 | cfg.versionfile_source = "casfs/_version.py"
60 | cfg.verbose = False
61 | return cfg
62 |
63 |
64 | class NotThisMethod(Exception):
65 | """Exception raised if a method is not valid for the current scenario."""
66 |
67 |
68 | LONG_VERSION_PY = {}
69 | HANDLERS = {}
70 |
71 |
72 | def register_vcs_handler(vcs, method): # decorator
73 | """Decorator to mark a method as the handler for a particular VCS."""
74 |
75 | def decorate(f):
76 | """Store f in HANDLERS[vcs][method]."""
77 | if vcs not in HANDLERS:
78 | HANDLERS[vcs] = {}
79 | HANDLERS[vcs][method] = f
80 | return f
81 |
82 | return decorate
83 |
84 |
85 | def run_command(commands,
86 | args,
87 | cwd=None,
88 | verbose=False,
89 | hide_stderr=False,
90 | env=None):
91 | """Call the given command(s)."""
92 | assert isinstance(commands, list)
93 | p = None
94 | for c in commands:
95 | try:
96 | dispcmd = str([c] + args)
97 | # remember shell=False, so use git.cmd on windows, not just git
98 | p = subprocess.Popen([c] + args,
99 | cwd=cwd,
100 | env=env,
101 | stdout=subprocess.PIPE,
102 | stderr=(subprocess.PIPE if hide_stderr else None))
103 | break
104 | except EnvironmentError:
105 | e = sys.exc_info()[1]
106 | if e.errno == errno.ENOENT:
107 | continue
108 | if verbose:
109 | print("unable to run %s" % dispcmd)
110 | print(e)
111 | return None, None
112 | else:
113 | if verbose:
114 | print("unable to find command, tried %s" % (commands,))
115 | return None, None
116 | stdout = p.communicate()[0].strip()
117 | if sys.version_info[0] >= 3:
118 | stdout = stdout.decode()
119 | if p.returncode != 0:
120 | if verbose:
121 | print("unable to run %s (error)" % dispcmd)
122 | print("stdout was %s" % stdout)
123 | return None, p.returncode
124 | return stdout, p.returncode
125 |
126 |
127 | def versions_from_parentdir(parentdir_prefix, root, verbose):
128 | """Try to determine the version from the parent directory name.
129 |
130 | Source tarballs conventionally unpack into a directory that includes both
131 | the project name and a version string. We will also support searching up
132 | two directory levels for an appropriately named parent directory
133 | """
134 | rootdirs = []
135 |
136 | for i in range(3):
137 | dirname = os.path.basename(root)
138 | if dirname.startswith(parentdir_prefix):
139 | return {
140 | "version": dirname[len(parentdir_prefix):],
141 | "full-revisionid": None,
142 | "dirty": False,
143 | "error": None,
144 | "date": None
145 | }
146 | else:
147 | rootdirs.append(root)
148 | root = os.path.dirname(root) # up a level
149 |
150 | if verbose:
151 | print("Tried directories %s but none started with prefix %s" %
152 | (str(rootdirs), parentdir_prefix))
153 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
154 |
155 |
156 | @register_vcs_handler("git", "get_keywords")
157 | def git_get_keywords(versionfile_abs):
158 | """Extract version information from the given file."""
159 | # the code embedded in _version.py can just fetch the value of these
160 | # keywords. When used from setup.py, we don't want to import _version.py,
161 | # so we do it with a regexp instead. This function is not used from
162 | # _version.py.
163 | keywords = {}
164 | try:
165 | f = open(versionfile_abs, "r")
166 | for line in f.readlines():
167 | if line.strip().startswith("git_refnames ="):
168 | mo = re.search(r'=\s*"(.*)"', line)
169 | if mo:
170 | keywords["refnames"] = mo.group(1)
171 | if line.strip().startswith("git_full ="):
172 | mo = re.search(r'=\s*"(.*)"', line)
173 | if mo:
174 | keywords["full"] = mo.group(1)
175 | if line.strip().startswith("git_date ="):
176 | mo = re.search(r'=\s*"(.*)"', line)
177 | if mo:
178 | keywords["date"] = mo.group(1)
179 | f.close()
180 | except EnvironmentError:
181 | pass
182 | return keywords
183 |
184 |
185 | @register_vcs_handler("git", "keywords")
186 | def git_versions_from_keywords(keywords, tag_prefix, verbose):
187 | """Get version information from git keywords."""
188 | if not keywords:
189 | raise NotThisMethod("no keywords at all, weird")
190 | date = keywords.get("date")
191 | if date is not None:
192 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
193 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601
194 | # -like" string, which we must then edit to make compliant), because
195 | # it's been around since git-1.5.3, and it's too difficult to
196 | # discover which version we're using, or to work around using an
197 | # older one.
198 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
199 | refnames = keywords["refnames"].strip()
200 | if refnames.startswith("$Format"):
201 | if verbose:
202 | print("keywords are unexpanded, not using")
203 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
204 | refs = set([r.strip() for r in refnames.strip("()").split(",")])
205 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
206 | # just "foo-1.0". If we see a "tag: " prefix, prefer those.
207 | TAG = "tag: "
208 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
209 | if not tags:
210 | # Either we're using git < 1.8.3, or there really are no tags. We use
211 | # a heuristic: assume all version tags have a digit. The old git %d
212 | # expansion behaves like git log --decorate=short and strips out the
213 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish
214 | # between branches and tags. By ignoring refnames without digits, we
215 | # filter out many common branch names like "release" and
216 | # "stabilization", as well as "HEAD" and "master".
217 | tags = set([r for r in refs if re.search(r'\d', r)])
218 | if verbose:
219 | print("discarding '%s', no digits" % ",".join(refs - tags))
220 | if verbose:
221 | print("likely tags: %s" % ",".join(sorted(tags)))
222 | for ref in sorted(tags):
223 | # sorting will prefer e.g. "2.0" over "2.0rc1"
224 | if ref.startswith(tag_prefix):
225 | r = ref[len(tag_prefix):]
226 | if verbose:
227 | print("picking %s" % r)
228 | return {
229 | "version": r,
230 | "full-revisionid": keywords["full"].strip(),
231 | "dirty": False,
232 | "error": None,
233 | "date": date
234 | }
235 | # no suitable tags, so version is "0+unknown", but full hex is still there
236 | if verbose:
237 | print("no suitable tags, using unknown + full revision id")
238 | return {
239 | "version": "0+unknown",
240 | "full-revisionid": keywords["full"].strip(),
241 | "dirty": False,
242 | "error": "no suitable tags",
243 | "date": None
244 | }
245 |
246 |
247 | @register_vcs_handler("git", "pieces_from_vcs")
248 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
249 | """Get version from 'git describe' in the root of the source tree.
250 |
251 | This only gets called if the git-archive 'subst' keywords were *not*
252 | expanded, and _version.py hasn't already been rewritten with a short
253 | version string, meaning we're inside a checked out source tree.
254 | """
255 | GITS = ["git"]
256 | if sys.platform == "win32":
257 | GITS = ["git.cmd", "git.exe"]
258 |
259 | out, rc = run_command(GITS, ["rev-parse", "--git-dir"],
260 | cwd=root,
261 | hide_stderr=True)
262 | if rc != 0:
263 | if verbose:
264 | print("Directory %s not under git control" % root)
265 | raise NotThisMethod("'git rev-parse --git-dir' returned error")
266 |
267 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
268 | # if there isn't one, this yields HEX[-dirty] (no NUM)
269 | describe_out, rc = run_command(GITS, [
270 | "describe", "--tags", "--dirty", "--always", "--long", "--match",
271 | "%s*" % tag_prefix
272 | ],
273 | cwd=root)
274 | # --long was added in git-1.5.5
275 | if describe_out is None:
276 | raise NotThisMethod("'git describe' failed")
277 | describe_out = describe_out.strip()
278 | full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
279 | if full_out is None:
280 | raise NotThisMethod("'git rev-parse' failed")
281 | full_out = full_out.strip()
282 |
283 | pieces = {}
284 | pieces["long"] = full_out
285 | pieces["short"] = full_out[:7] # maybe improved later
286 | pieces["error"] = None
287 |
288 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
289 | # TAG might have hyphens.
290 | git_describe = describe_out
291 |
292 | # look for -dirty suffix
293 | dirty = git_describe.endswith("-dirty")
294 | pieces["dirty"] = dirty
295 | if dirty:
296 | git_describe = git_describe[:git_describe.rindex("-dirty")]
297 |
298 | # now we have TAG-NUM-gHEX or HEX
299 |
300 | if "-" in git_describe:
301 | # TAG-NUM-gHEX
302 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
303 | if not mo:
304 | # unparseable. Maybe git-describe is misbehaving?
305 | pieces["error"] = ("unable to parse git-describe output: '%s'" %
306 | describe_out)
307 | return pieces
308 |
309 | # tag
310 | full_tag = mo.group(1)
311 | if not full_tag.startswith(tag_prefix):
312 | if verbose:
313 | fmt = "tag '%s' doesn't start with prefix '%s'"
314 | print(fmt % (full_tag, tag_prefix))
315 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" %
316 | (full_tag, tag_prefix))
317 | return pieces
318 | pieces["closest-tag"] = full_tag[len(tag_prefix):]
319 |
320 | # distance: number of commits since tag
321 | pieces["distance"] = int(mo.group(2))
322 |
323 | # commit: short hex revision ID
324 | pieces["short"] = mo.group(3)
325 |
326 | else:
327 | # HEX: no tags
328 | pieces["closest-tag"] = None
329 | count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root)
330 | pieces["distance"] = int(count_out) # total number of commits
331 |
332 | # commit date: see ISO-8601 comment in git_versions_from_keywords()
333 | date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
334 | cwd=root)[0].strip()
335 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
336 |
337 | return pieces
338 |
339 |
340 | def plus_or_dot(pieces):
341 | """Return a + if we don't already have one, else return a ."""
342 | if "+" in pieces.get("closest-tag", ""):
343 | return "."
344 | return "+"
345 |
346 |
347 | def render_pep440(pieces):
348 | """Build up version string, with post-release "local version identifier".
349 |
350 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
351 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
352 |
353 | Exceptions:
354 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
355 | """
356 | if pieces["closest-tag"]:
357 | rendered = pieces["closest-tag"]
358 | if pieces["distance"] or pieces["dirty"]:
359 | rendered += plus_or_dot(pieces)
360 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
361 | if pieces["dirty"]:
362 | rendered += ".dirty"
363 | else:
364 | # exception #1
365 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"])
366 | if pieces["dirty"]:
367 | rendered += ".dirty"
368 | return rendered
369 |
370 |
371 | def render_pep440_pre(pieces):
372 | """TAG[.post.devDISTANCE] -- No -dirty.
373 |
374 | Exceptions:
375 | 1: no tags. 0.post.devDISTANCE
376 | """
377 | if pieces["closest-tag"]:
378 | rendered = pieces["closest-tag"]
379 | if pieces["distance"]:
380 | rendered += ".post.dev%d" % pieces["distance"]
381 | else:
382 | # exception #1
383 | rendered = "0.post.dev%d" % pieces["distance"]
384 | return rendered
385 |
386 |
387 | def render_pep440_post(pieces):
388 | """TAG[.postDISTANCE[.dev0]+gHEX] .
389 |
390 | The ".dev0" means dirty. Note that .dev0 sorts backwards
391 | (a dirty tree will appear "older" than the corresponding clean one),
392 | but you shouldn't be releasing software with -dirty anyways.
393 |
394 | Exceptions:
395 | 1: no tags. 0.postDISTANCE[.dev0]
396 | """
397 | if pieces["closest-tag"]:
398 | rendered = pieces["closest-tag"]
399 | if pieces["distance"] or pieces["dirty"]:
400 | rendered += ".post%d" % pieces["distance"]
401 | if pieces["dirty"]:
402 | rendered += ".dev0"
403 | rendered += plus_or_dot(pieces)
404 | rendered += "g%s" % pieces["short"]
405 | else:
406 | # exception #1
407 | rendered = "0.post%d" % pieces["distance"]
408 | if pieces["dirty"]:
409 | rendered += ".dev0"
410 | rendered += "+g%s" % pieces["short"]
411 | return rendered
412 |
413 |
414 | def render_pep440_old(pieces):
415 | """TAG[.postDISTANCE[.dev0]] .
416 |
417 | The ".dev0" means dirty.
418 |
419 | Eexceptions:
420 | 1: no tags. 0.postDISTANCE[.dev0]
421 | """
422 | if pieces["closest-tag"]:
423 | rendered = pieces["closest-tag"]
424 | if pieces["distance"] or pieces["dirty"]:
425 | rendered += ".post%d" % pieces["distance"]
426 | if pieces["dirty"]:
427 | rendered += ".dev0"
428 | else:
429 | # exception #1
430 | rendered = "0.post%d" % pieces["distance"]
431 | if pieces["dirty"]:
432 | rendered += ".dev0"
433 | return rendered
434 |
435 |
436 | def render_git_describe(pieces):
437 | """TAG[-DISTANCE-gHEX][-dirty].
438 |
439 | Like 'git describe --tags --dirty --always'.
440 |
441 | Exceptions:
442 | 1: no tags. HEX[-dirty] (note: no 'g' prefix)
443 | """
444 | if pieces["closest-tag"]:
445 | rendered = pieces["closest-tag"]
446 | if pieces["distance"]:
447 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
448 | else:
449 | # exception #1
450 | rendered = pieces["short"]
451 | if pieces["dirty"]:
452 | rendered += "-dirty"
453 | return rendered
454 |
455 |
456 | def render_git_describe_long(pieces):
457 | """TAG-DISTANCE-gHEX[-dirty].
458 |
459 | Like 'git describe --tags --dirty --always -long'.
460 | The distance/hash is unconditional.
461 |
462 | Exceptions:
463 | 1: no tags. HEX[-dirty] (note: no 'g' prefix)
464 | """
465 | if pieces["closest-tag"]:
466 | rendered = pieces["closest-tag"]
467 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
468 | else:
469 | # exception #1
470 | rendered = pieces["short"]
471 | if pieces["dirty"]:
472 | rendered += "-dirty"
473 | return rendered
474 |
475 |
476 | def render(pieces, style):
477 | """Render the given version pieces into the requested style."""
478 | if pieces["error"]:
479 | return {
480 | "version": "unknown",
481 | "full-revisionid": pieces.get("long"),
482 | "dirty": None,
483 | "error": pieces["error"],
484 | "date": None
485 | }
486 |
487 | if not style or style == "default":
488 | style = "pep440" # the default
489 |
490 | if style == "pep440":
491 | rendered = render_pep440(pieces)
492 | elif style == "pep440-pre":
493 | rendered = render_pep440_pre(pieces)
494 | elif style == "pep440-post":
495 | rendered = render_pep440_post(pieces)
496 | elif style == "pep440-old":
497 | rendered = render_pep440_old(pieces)
498 | elif style == "git-describe":
499 | rendered = render_git_describe(pieces)
500 | elif style == "git-describe-long":
501 | rendered = render_git_describe_long(pieces)
502 | else:
503 | raise ValueError("unknown style '%s'" % style)
504 |
505 | return {
506 | "version": rendered,
507 | "full-revisionid": pieces["long"],
508 | "dirty": pieces["dirty"],
509 | "error": None,
510 | "date": pieces.get("date")
511 | }
512 |
513 |
514 | def get_versions():
515 | """Get version information or return default if unable to do so."""
516 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
517 | # __file__, we can work backwards from there to the root. Some
518 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
519 | # case we can only use expanded keywords.
520 |
521 | cfg = get_config()
522 | verbose = cfg.verbose
523 |
524 | try:
525 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose)
526 | except NotThisMethod:
527 | pass
528 |
529 | try:
530 | root = os.path.realpath(__file__)
531 | # versionfile_source is the relative path from the top of the source
532 | # tree (where the .git directory might live) to this file. Invert
533 | # this to find the root from __file__.
534 | for i in cfg.versionfile_source.split('/'):
535 | root = os.path.dirname(root)
536 | except NameError:
537 | return {
538 | "version": "0+unknown",
539 | "full-revisionid": None,
540 | "dirty": None,
541 | "error": "unable to find root of source tree",
542 | "date": None
543 | }
544 |
545 | try:
546 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
547 | return render(pieces, cfg.style)
548 | except NotThisMethod:
549 | pass
550 |
551 | try:
552 | if cfg.parentdir_prefix:
553 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
554 | except NotThisMethod:
555 | pass
556 |
557 | return {
558 | "version": "0+unknown",
559 | "full-revisionid": None,
560 | "dirty": None,
561 | "error": "unable to compute version",
562 | "date": None
563 | }
564 |
--------------------------------------------------------------------------------
/versioneer.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | #
3 | # Copyright 2020 Google LLC
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License");
6 | # you may not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 | # Version: 0.18
17 | """The Versioneer - like a rocketeer, but for versions.
18 |
19 | The Versioneer
20 | ==============
21 |
22 | * like a rocketeer, but for versions!
23 | * https://github.com/warner/python-versioneer
24 | * Brian Warner
25 | * License: Public Domain
26 | * Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy
27 | * [![Latest Version]
28 | (https://pypip.in/version/versioneer/badge.svg?style=flat)
29 | ](https://pypi.python.org/pypi/versioneer/)
30 | * [![Build Status]
31 | (https://travis-ci.org/warner/python-versioneer.png?branch=master)
32 | ](https://travis-ci.org/warner/python-versioneer)
33 |
34 | This is a tool for managing a recorded version number in distutils-based
35 | python projects. The goal is to remove the tedious and error-prone "update
36 | the embedded version string" step from your release process. Making a new
37 | release should be as easy as recording a new tag in your version-control
38 | system, and maybe making new tarballs.
39 |
40 |
41 | ## Quick Install
42 |
43 | * `pip install versioneer` to somewhere to your $PATH
44 | * add a `[versioneer]` section to your setup.cfg (see below)
45 | * run `versioneer install` in your source tree, commit the results
46 |
47 | ## Version Identifiers
48 |
49 | Source trees come from a variety of places:
50 |
51 | * a version-control system checkout (mostly used by developers)
52 | * a nightly tarball, produced by build automation
53 | * a snapshot tarball, produced by a web-based VCS browser, like github's
54 | "tarball from tag" feature
55 | * a release tarball, produced by "setup.py sdist", distributed through PyPI
56 |
57 | Within each source tree, the version identifier (either a string or a number,
58 | this tool is format-agnostic) can come from a variety of places:
59 |
60 | * ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows
61 | about recent "tags" and an absolute revision-id
62 | * the name of the directory into which the tarball was unpacked
63 | * an expanded VCS keyword ($Id$, etc)
64 | * a `_version.py` created by some earlier build step
65 |
66 | For released software, the version identifier is closely related to a VCS
67 | tag. Some projects use tag names that include more than just the version
68 | string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool
69 | needs to strip the tag prefix to extract the version identifier. For
70 | unreleased software (between tags), the version identifier should provide
71 | enough information to help developers recreate the same tree, while also
72 | giving them an idea of roughly how old the tree is (after version 1.2, before
73 | version 1.3). Many VCS systems can report a description that captures this,
74 | for example `git describe --tags --dirty --always` reports things like
75 | "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the
76 | 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has
77 | uncommitted changes.
78 |
79 | The version identifier is used for multiple purposes:
80 |
81 | * to allow the module to self-identify its version: `myproject.__version__`
82 | * to choose a name and prefix for a 'setup.py sdist' tarball
83 |
84 | ## Theory of Operation
85 |
86 | Versioneer works by adding a special `_version.py` file into your source
87 | tree, where your `__init__.py` can import it. This `_version.py` knows how to
88 | dynamically ask the VCS tool for version information at import time.
89 |
90 | `_version.py` also contains `$Revision$` markers, and the installation
91 | process marks `_version.py` to have this marker rewritten with a tag name
92 | during the `git archive` command. As a result, generated tarballs will
93 | contain enough information to get the proper version.
94 |
95 | To allow `setup.py` to compute a version too, a `versioneer.py` is added to
96 | the top level of your source tree, next to `setup.py` and the `setup.cfg`
97 | that configures it. This overrides several distutils/setuptools commands to
98 | compute the version when invoked, and changes `setup.py build` and `setup.py
99 | sdist` to replace `_version.py` with a small static file that contains just
100 | the generated version data.
101 |
102 | ## Installation
103 |
104 | See [INSTALL.md](./INSTALL.md) for detailed installation instructions.
105 |
106 | ## Version-String Flavors
107 |
108 | Code which uses Versioneer can learn about its version string at runtime by
109 | importing `_version` from your main `__init__.py` file and running the
110 | `get_versions()` function. From the "outside" (e.g. in `setup.py`), you can
111 | import the top-level `versioneer.py` and run `get_versions()`.
112 |
113 | Both functions return a dictionary with different flavors of version
114 | information:
115 |
116 | * `['version']`: A condensed version string, rendered using the selected
117 | style. This is the most commonly used value for the project's version
118 | string. The default "pep440" style yields strings like `0.11`,
119 | `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section
120 | below for alternative styles.
121 |
122 | * `['full-revisionid']`: detailed revision identifier. For Git, this is the
123 | full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac".
124 |
125 | * `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the
126 | commit date in ISO 8601 format. This will be None if the date is not
127 | available.
128 |
129 | * `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that
130 | this is only accurate if run in a VCS checkout, otherwise it is likely to
131 | be False or None
132 |
133 | * `['error']`: if the version string could not be computed, this will be set
134 | to a string describing the problem, otherwise it will be None. It may be
135 | useful to throw an exception in setup.py if this is set, to avoid e.g.
136 | creating tarballs with a version string of "unknown".
137 |
138 | Some variants are more useful than others. Including `full-revisionid` in a
139 | bug report should allow developers to reconstruct the exact code being tested
140 | (or indicate the presence of local changes that should be shared with the
141 | developers). `version` is suitable for display in an "about" box or a CLI
142 | `--version` output: it can be easily compared against release notes and lists
143 | of bugs fixed in various releases.
144 |
145 | The installer adds the following text to your `__init__.py` to place a basic
146 | version in `YOURPROJECT.__version__`:
147 |
148 | from ._version import get_versions
149 | __version__ = get_versions()['version']
150 | del get_versions
151 |
152 | ## Styles
153 |
154 | The setup.cfg `style=` configuration controls how the VCS information is
155 | rendered into a version string.
156 |
157 | The default style, "pep440", produces a PEP440-compliant string, equal to the
158 | un-prefixed tag name for actual releases, and containing an additional "local
159 | version" section with more detail for in-between builds. For Git, this is
160 | TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags
161 | --dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the
162 | tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and
163 | that this commit is two revisions ("+2") beyond the "0.11" tag. For released
164 | software (exactly equal to a known tag), the identifier will only contain the
165 | stripped tag, e.g. "0.11".
166 |
167 | Other styles are available. See [details.md](details.md) in the Versioneer
168 | source tree for descriptions.
169 |
170 | ## Debugging
171 |
172 | Versioneer tries to avoid fatal errors: if something goes wrong, it will tend
173 | to return a version of "0+unknown". To investigate the problem, run `setup.py
174 | version`, which will run the version-lookup code in a verbose mode, and will
175 | display the full contents of `get_versions()` (including the `error` string,
176 | which may help identify what went wrong).
177 |
178 | ## Known Limitations
179 |
180 | Some situations are known to cause problems for Versioneer. This details the
181 | most significant ones. More can be found on Github
182 | [issues page](https://github.com/warner/python-versioneer/issues).
183 |
184 | ### Subprojects
185 |
186 | Versioneer has limited support for source trees in which `setup.py` is not in
187 | the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are
188 | two common reasons why `setup.py` might not be in the root:
189 |
190 | * Source trees which contain multiple subprojects, such as
191 | [Buildbot](https://github.com/buildbot/buildbot), which contains both
192 | "master" and "slave" subprojects, each with their own `setup.py`,
193 | `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI
194 | distributions (and upload multiple independently-installable tarballs).
195 | * Source trees whose main purpose is to contain a C library, but which also
196 | provide bindings to Python (and perhaps other langauges) in subdirectories.
197 |
198 | Versioneer will look for `.git` in parent directories, and most operations
199 | should get the right version string. However `pip` and `setuptools` have bugs
200 | and implementation details which frequently cause `pip install .` from a
201 | subproject directory to fail to find a correct version string (so it usually
202 | defaults to `0+unknown`).
203 |
204 | `pip install --editable .` should work correctly. `setup.py install` might
205 | work too.
206 |
207 | Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in
208 | some later version.
209 |
210 | [Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking
211 | this issue. The discussion in
212 | [PR #61](https://github.com/warner/python-versioneer/pull/61) describes the
213 | issue from the Versioneer side in more detail.
214 | [pip PR#3176](https://github.com/pypa/pip/pull/3176) and
215 | [pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve
216 | pip to let Versioneer work correctly.
217 |
218 | Versioneer-0.16 and earlier only looked for a `.git` directory next to the
219 | `setup.cfg`, so subprojects were completely unsupported with those releases.
220 |
221 | ### Editable installs with setuptools <= 18.5
222 |
223 | `setup.py develop` and `pip install --editable .` allow you to install a
224 | project into a virtualenv once, then continue editing the source code (and
225 | test) without re-installing after every change.
226 |
227 | "Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a
228 | convenient way to specify executable scripts that should be installed along
229 | with the python package.
230 |
231 | These both work as expected when using modern setuptools. When using
232 | setuptools-18.5 or earlier, however, certain operations will cause
233 | `pkg_resources.DistributionNotFound` errors when running the entrypoint
234 | script, which must be resolved by re-installing the package. This happens
235 | when the install happens with one version, then the egg_info data is
236 | regenerated while a different version is checked out. Many setup.py commands
237 | cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into
238 | a different virtualenv), so this can be surprising.
239 |
240 | [Bug #83](https://github.com/warner/python-versioneer/issues/83) describes
241 | this one, but upgrading to a newer version of setuptools should probably
242 | resolve it.
243 |
244 | ### Unicode version strings
245 |
246 | While Versioneer works (and is continually tested) with both Python 2 and
247 | Python 3, it is not entirely consistent with bytes-vs-unicode distinctions.
248 | Newer releases probably generate unicode version strings on py2. It's not
249 | clear that this is wrong, but it may be surprising for applications when then
250 | write these strings to a network connection or include them in bytes-oriented
251 | APIs like cryptographic checksums.
252 |
253 | [Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates
254 | this question.
255 |
256 |
257 | ## Updating Versioneer
258 |
259 | To upgrade your project to a new release of Versioneer, do the following:
260 |
261 | * install the new Versioneer (`pip install -U versioneer` or equivalent)
262 | * edit `setup.cfg`, if necessary, to include any new configuration settings
263 | indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details.
264 | * re-run `versioneer install` in your source tree, to replace
265 | `SRC/_version.py`
266 | * commit any changed files
267 |
268 | ## Future Directions
269 |
270 | This tool is designed to make it easily extended to other version-control
271 | systems: all VCS-specific components are in separate directories like
272 | src/git/ . The top-level `versioneer.py` script is assembled from these
273 | components by running make-versioneer.py . In the future, make-versioneer.py
274 | will take a VCS name as an argument, and will construct a version of
275 | `versioneer.py` that is specific to the given VCS. It might also take the
276 | configuration arguments that are currently provided manually during
277 | installation by editing setup.py . Alternatively, it might go the other
278 | direction and include code from all supported VCS systems, reducing the
279 | number of intermediate scripts.
280 |
281 |
282 | ## License
283 |
284 | To make Versioneer easier to embed, all its code is dedicated to the public
285 | domain. The `_version.py` that it creates is also in the public domain.
286 | Specifically, both are released under the Creative Commons "Public Domain
287 | Dedication" license (CC0-1.0), as described in
288 | https://creativecommons.org/publicdomain/zero/1.0/ .
289 |
290 | """
291 |
292 | from __future__ import print_function
293 | try:
294 | import configparser
295 | except ImportError:
296 | import ConfigParser as configparser
297 | import errno
298 | import json
299 | import os
300 | import re
301 | import subprocess
302 | import sys
303 |
304 |
305 | class VersioneerConfig:
306 | """Container for Versioneer configuration parameters."""
307 |
308 |
309 | def get_root():
310 | """Get the project root directory.
311 |
312 | We require that all commands are run from the project root, i.e. the
313 | directory that contains setup.py, setup.cfg, and versioneer.py .
314 | """
315 | root = os.path.realpath(os.path.abspath(os.getcwd()))
316 | setup_py = os.path.join(root, "setup.py")
317 | versioneer_py = os.path.join(root, "versioneer.py")
318 | if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
319 | # allow 'python path/to/setup.py COMMAND'
320 | root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0])))
321 | setup_py = os.path.join(root, "setup.py")
322 | versioneer_py = os.path.join(root, "versioneer.py")
323 | if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
324 | err = ("Versioneer was unable to run the project root directory. "
325 | "Versioneer requires setup.py to be executed from "
326 | "its immediate directory (like 'python setup.py COMMAND'), "
327 | "or in a way that lets it use sys.argv[0] to find the root "
328 | "(like 'python path/to/setup.py COMMAND').")
329 | raise VersioneerBadRootError(err)
330 | try:
331 | # Certain runtime workflows (setup.py install/develop in a setuptools
332 | # tree) execute all dependencies in a single python process, so
333 | # "versioneer" may be imported multiple times, and python's shared
334 | # module-import table will cache the first one. So we can't use
335 | # os.path.dirname(__file__), as that will find whichever
336 | # versioneer.py was first imported, even in later projects.
337 | me = os.path.realpath(os.path.abspath(__file__))
338 | me_dir = os.path.normcase(os.path.splitext(me)[0])
339 | vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0])
340 | if me_dir != vsr_dir:
341 | print("Warning: build in %s is using versioneer.py from %s" %
342 | (os.path.dirname(me), versioneer_py))
343 | except NameError:
344 | pass
345 | return root
346 |
347 |
348 | def get_config_from_root(root):
349 | """Read the project setup.cfg file to determine Versioneer config."""
350 | # This might raise EnvironmentError (if setup.cfg is missing), or
351 | # configparser.NoSectionError (if it lacks a [versioneer] section), or
352 | # configparser.NoOptionError (if it lacks "VCS="). See the docstring at
353 | # the top of versioneer.py for instructions on writing your setup.cfg .
354 | setup_cfg = os.path.join(root, "setup.cfg")
355 | parser = configparser.SafeConfigParser()
356 | with open(setup_cfg, "r") as f:
357 | parser.readfp(f)
358 | VCS = parser.get("versioneer", "VCS") # mandatory
359 |
360 | def get(parser, name):
361 | if parser.has_option("versioneer", name):
362 | return parser.get("versioneer", name)
363 | return None
364 |
365 | cfg = VersioneerConfig()
366 | cfg.VCS = VCS
367 | cfg.style = get(parser, "style") or ""
368 | cfg.versionfile_source = get(parser, "versionfile_source")
369 | cfg.versionfile_build = get(parser, "versionfile_build")
370 | cfg.tag_prefix = get(parser, "tag_prefix")
371 | if cfg.tag_prefix in ("''", '""'):
372 | cfg.tag_prefix = ""
373 | cfg.parentdir_prefix = get(parser, "parentdir_prefix")
374 | cfg.verbose = get(parser, "verbose")
375 | return cfg
376 |
377 |
378 | class NotThisMethod(Exception):
379 | """Exception raised if a method is not valid for the current scenario."""
380 |
381 |
382 | # these dictionaries contain VCS-specific tools
383 | LONG_VERSION_PY = {}
384 | HANDLERS = {}
385 |
386 |
387 | def register_vcs_handler(vcs, method): # decorator
388 | """Decorator to mark a method as the handler for a particular VCS."""
389 |
390 | def decorate(f):
391 | """Store f in HANDLERS[vcs][method]."""
392 | if vcs not in HANDLERS:
393 | HANDLERS[vcs] = {}
394 | HANDLERS[vcs][method] = f
395 | return f
396 |
397 | return decorate
398 |
399 |
400 | def run_command(commands,
401 | args,
402 | cwd=None,
403 | verbose=False,
404 | hide_stderr=False,
405 | env=None):
406 | """Call the given command(s)."""
407 | assert isinstance(commands, list)
408 | p = None
409 | for c in commands:
410 | try:
411 | dispcmd = str([c] + args)
412 | # remember shell=False, so use git.cmd on windows, not just git
413 | p = subprocess.Popen([c] + args,
414 | cwd=cwd,
415 | env=env,
416 | stdout=subprocess.PIPE,
417 | stderr=(subprocess.PIPE if hide_stderr else None))
418 | break
419 | except EnvironmentError:
420 | e = sys.exc_info()[1]
421 | if e.errno == errno.ENOENT:
422 | continue
423 | if verbose:
424 | print("unable to run %s" % dispcmd)
425 | print(e)
426 | return None, None
427 | else:
428 | if verbose:
429 | print("unable to find command, tried %s" % (commands,))
430 | return None, None
431 | stdout = p.communicate()[0].strip()
432 | if sys.version_info[0] >= 3:
433 | stdout = stdout.decode()
434 | if p.returncode != 0:
435 | if verbose:
436 | print("unable to run %s (error)" % dispcmd)
437 | print("stdout was %s" % stdout)
438 | return None, p.returncode
439 | return stdout, p.returncode
440 |
441 |
442 | LONG_VERSION_PY['git'] = '''
443 | # This file helps to compute a version number in source trees obtained from
444 | # git-archive tarball (such as those provided by githubs download-from-tag
445 | # feature). Distribution tarballs (built by setup.py sdist) and build
446 | # directories (produced by setup.py build) will contain a much shorter file
447 | # that just contains the computed version number.
448 |
449 | # This file is released into the public domain. Generated by
450 | # versioneer-0.18 (https://github.com/warner/python-versioneer)
451 |
452 | """Git implementation of _version.py."""
453 |
454 | import errno
455 | import os
456 | import re
457 | import subprocess
458 | import sys
459 |
460 |
461 | def get_keywords():
462 | """Get the keywords needed to look up the version information."""
463 | # these strings will be replaced by git during git-archive.
464 | # setup.py/versioneer.py will grep for the variable names, so they must
465 | # each be defined on a line of their own. _version.py will just call
466 | # get_keywords().
467 | git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s"
468 | git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s"
469 | git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s"
470 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
471 | return keywords
472 |
473 |
474 | class VersioneerConfig:
475 | """Container for Versioneer configuration parameters."""
476 |
477 |
478 | def get_config():
479 | """Create, populate and return the VersioneerConfig() object."""
480 | # these strings are filled in when 'setup.py versioneer' creates
481 | # _version.py
482 | cfg = VersioneerConfig()
483 | cfg.VCS = "git"
484 | cfg.style = "%(STYLE)s"
485 | cfg.tag_prefix = "%(TAG_PREFIX)s"
486 | cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s"
487 | cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s"
488 | cfg.verbose = False
489 | return cfg
490 |
491 |
492 | class NotThisMethod(Exception):
493 | """Exception raised if a method is not valid for the current scenario."""
494 |
495 |
496 | LONG_VERSION_PY = {}
497 | HANDLERS = {}
498 |
499 |
500 | def register_vcs_handler(vcs, method): # decorator
501 | """Decorator to mark a method as the handler for a particular VCS."""
502 | def decorate(f):
503 | """Store f in HANDLERS[vcs][method]."""
504 | if vcs not in HANDLERS:
505 | HANDLERS[vcs] = {}
506 | HANDLERS[vcs][method] = f
507 | return f
508 | return decorate
509 |
510 |
511 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
512 | env=None):
513 | """Call the given command(s)."""
514 | assert isinstance(commands, list)
515 | p = None
516 | for c in commands:
517 | try:
518 | dispcmd = str([c] + args)
519 | # remember shell=False, so use git.cmd on windows, not just git
520 | p = subprocess.Popen([c] + args, cwd=cwd, env=env,
521 | stdout=subprocess.PIPE,
522 | stderr=(subprocess.PIPE if hide_stderr
523 | else None))
524 | break
525 | except EnvironmentError:
526 | e = sys.exc_info()[1]
527 | if e.errno == errno.ENOENT:
528 | continue
529 | if verbose:
530 | print("unable to run %%s" %% dispcmd)
531 | print(e)
532 | return None, None
533 | else:
534 | if verbose:
535 | print("unable to find command, tried %%s" %% (commands,))
536 | return None, None
537 | stdout = p.communicate()[0].strip()
538 | if sys.version_info[0] >= 3:
539 | stdout = stdout.decode()
540 | if p.returncode != 0:
541 | if verbose:
542 | print("unable to run %%s (error)" %% dispcmd)
543 | print("stdout was %%s" %% stdout)
544 | return None, p.returncode
545 | return stdout, p.returncode
546 |
547 |
548 | def versions_from_parentdir(parentdir_prefix, root, verbose):
549 | """Try to determine the version from the parent directory name.
550 |
551 | Source tarballs conventionally unpack into a directory that includes both
552 | the project name and a version string. We will also support searching up
553 | two directory levels for an appropriately named parent directory
554 | """
555 | rootdirs = []
556 |
557 | for i in range(3):
558 | dirname = os.path.basename(root)
559 | if dirname.startswith(parentdir_prefix):
560 | return {"version": dirname[len(parentdir_prefix):],
561 | "full-revisionid": None,
562 | "dirty": False, "error": None, "date": None}
563 | else:
564 | rootdirs.append(root)
565 | root = os.path.dirname(root) # up a level
566 |
567 | if verbose:
568 | print("Tried directories %%s but none started with prefix %%s" %%
569 | (str(rootdirs), parentdir_prefix))
570 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
571 |
572 |
573 | @register_vcs_handler("git", "get_keywords")
574 | def git_get_keywords(versionfile_abs):
575 | """Extract version information from the given file."""
576 | # the code embedded in _version.py can just fetch the value of these
577 | # keywords. When used from setup.py, we don't want to import _version.py,
578 | # so we do it with a regexp instead. This function is not used from
579 | # _version.py.
580 | keywords = {}
581 | try:
582 | f = open(versionfile_abs, "r")
583 | for line in f.readlines():
584 | if line.strip().startswith("git_refnames ="):
585 | mo = re.search(r'=\s*"(.*)"', line)
586 | if mo:
587 | keywords["refnames"] = mo.group(1)
588 | if line.strip().startswith("git_full ="):
589 | mo = re.search(r'=\s*"(.*)"', line)
590 | if mo:
591 | keywords["full"] = mo.group(1)
592 | if line.strip().startswith("git_date ="):
593 | mo = re.search(r'=\s*"(.*)"', line)
594 | if mo:
595 | keywords["date"] = mo.group(1)
596 | f.close()
597 | except EnvironmentError:
598 | pass
599 | return keywords
600 |
601 |
602 | @register_vcs_handler("git", "keywords")
603 | def git_versions_from_keywords(keywords, tag_prefix, verbose):
604 | """Get version information from git keywords."""
605 | if not keywords:
606 | raise NotThisMethod("no keywords at all, weird")
607 | date = keywords.get("date")
608 | if date is not None:
609 | # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant
610 | # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601
611 | # -like" string, which we must then edit to make compliant), because
612 | # it's been around since git-1.5.3, and it's too difficult to
613 | # discover which version we're using, or to work around using an
614 | # older one.
615 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
616 | refnames = keywords["refnames"].strip()
617 | if refnames.startswith("$Format"):
618 | if verbose:
619 | print("keywords are unexpanded, not using")
620 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
621 | refs = set([r.strip() for r in refnames.strip("()").split(",")])
622 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
623 | # just "foo-1.0". If we see a "tag: " prefix, prefer those.
624 | TAG = "tag: "
625 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
626 | if not tags:
627 | # Either we're using git < 1.8.3, or there really are no tags. We use
628 | # a heuristic: assume all version tags have a digit. The old git %%d
629 | # expansion behaves like git log --decorate=short and strips out the
630 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish
631 | # between branches and tags. By ignoring refnames without digits, we
632 | # filter out many common branch names like "release" and
633 | # "stabilization", as well as "HEAD" and "master".
634 | tags = set([r for r in refs if re.search(r'\d', r)])
635 | if verbose:
636 | print("discarding '%%s', no digits" %% ",".join(refs - tags))
637 | if verbose:
638 | print("likely tags: %%s" %% ",".join(sorted(tags)))
639 | for ref in sorted(tags):
640 | # sorting will prefer e.g. "2.0" over "2.0rc1"
641 | if ref.startswith(tag_prefix):
642 | r = ref[len(tag_prefix):]
643 | if verbose:
644 | print("picking %%s" %% r)
645 | return {"version": r,
646 | "full-revisionid": keywords["full"].strip(),
647 | "dirty": False, "error": None,
648 | "date": date}
649 | # no suitable tags, so version is "0+unknown", but full hex is still there
650 | if verbose:
651 | print("no suitable tags, using unknown + full revision id")
652 | return {"version": "0+unknown",
653 | "full-revisionid": keywords["full"].strip(),
654 | "dirty": False, "error": "no suitable tags", "date": None}
655 |
656 |
657 | @register_vcs_handler("git", "pieces_from_vcs")
658 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
659 | """Get version from 'git describe' in the root of the source tree.
660 |
661 | This only gets called if the git-archive 'subst' keywords were *not*
662 | expanded, and _version.py hasn't already been rewritten with a short
663 | version string, meaning we're inside a checked out source tree.
664 | """
665 | GITS = ["git"]
666 | if sys.platform == "win32":
667 | GITS = ["git.cmd", "git.exe"]
668 |
669 | out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
670 | hide_stderr=True)
671 | if rc != 0:
672 | if verbose:
673 | print("Directory %%s not under git control" %% root)
674 | raise NotThisMethod("'git rev-parse --git-dir' returned error")
675 |
676 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
677 | # if there isn't one, this yields HEX[-dirty] (no NUM)
678 | describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
679 | "--always", "--long",
680 | "--match", "%%s*" %% tag_prefix],
681 | cwd=root)
682 | # --long was added in git-1.5.5
683 | if describe_out is None:
684 | raise NotThisMethod("'git describe' failed")
685 | describe_out = describe_out.strip()
686 | full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
687 | if full_out is None:
688 | raise NotThisMethod("'git rev-parse' failed")
689 | full_out = full_out.strip()
690 |
691 | pieces = {}
692 | pieces["long"] = full_out
693 | pieces["short"] = full_out[:7] # maybe improved later
694 | pieces["error"] = None
695 |
696 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
697 | # TAG might have hyphens.
698 | git_describe = describe_out
699 |
700 | # look for -dirty suffix
701 | dirty = git_describe.endswith("-dirty")
702 | pieces["dirty"] = dirty
703 | if dirty:
704 | git_describe = git_describe[:git_describe.rindex("-dirty")]
705 |
706 | # now we have TAG-NUM-gHEX or HEX
707 |
708 | if "-" in git_describe:
709 | # TAG-NUM-gHEX
710 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
711 | if not mo:
712 | # unparseable. Maybe git-describe is misbehaving?
713 | pieces["error"] = ("unable to parse git-describe output: '%%s'"
714 | %% describe_out)
715 | return pieces
716 |
717 | # tag
718 | full_tag = mo.group(1)
719 | if not full_tag.startswith(tag_prefix):
720 | if verbose:
721 | fmt = "tag '%%s' doesn't start with prefix '%%s'"
722 | print(fmt %% (full_tag, tag_prefix))
723 | pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'"
724 | %% (full_tag, tag_prefix))
725 | return pieces
726 | pieces["closest-tag"] = full_tag[len(tag_prefix):]
727 |
728 | # distance: number of commits since tag
729 | pieces["distance"] = int(mo.group(2))
730 |
731 | # commit: short hex revision ID
732 | pieces["short"] = mo.group(3)
733 |
734 | else:
735 | # HEX: no tags
736 | pieces["closest-tag"] = None
737 | count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
738 | cwd=root)
739 | pieces["distance"] = int(count_out) # total number of commits
740 |
741 | # commit date: see ISO-8601 comment in git_versions_from_keywords()
742 | date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"],
743 | cwd=root)[0].strip()
744 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
745 |
746 | return pieces
747 |
748 |
749 | def plus_or_dot(pieces):
750 | """Return a + if we don't already have one, else return a ."""
751 | if "+" in pieces.get("closest-tag", ""):
752 | return "."
753 | return "+"
754 |
755 |
756 | def render_pep440(pieces):
757 | """Build up version string, with post-release "local version identifier".
758 |
759 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
760 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
761 |
762 | Exceptions:
763 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
764 | """
765 | if pieces["closest-tag"]:
766 | rendered = pieces["closest-tag"]
767 | if pieces["distance"] or pieces["dirty"]:
768 | rendered += plus_or_dot(pieces)
769 | rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"])
770 | if pieces["dirty"]:
771 | rendered += ".dirty"
772 | else:
773 | # exception #1
774 | rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"],
775 | pieces["short"])
776 | if pieces["dirty"]:
777 | rendered += ".dirty"
778 | return rendered
779 |
780 |
781 | def render_pep440_pre(pieces):
782 | """TAG[.post.devDISTANCE] -- No -dirty.
783 |
784 | Exceptions:
785 | 1: no tags. 0.post.devDISTANCE
786 | """
787 | if pieces["closest-tag"]:
788 | rendered = pieces["closest-tag"]
789 | if pieces["distance"]:
790 | rendered += ".post.dev%%d" %% pieces["distance"]
791 | else:
792 | # exception #1
793 | rendered = "0.post.dev%%d" %% pieces["distance"]
794 | return rendered
795 |
796 |
797 | def render_pep440_post(pieces):
798 | """TAG[.postDISTANCE[.dev0]+gHEX] .
799 |
800 | The ".dev0" means dirty. Note that .dev0 sorts backwards
801 | (a dirty tree will appear "older" than the corresponding clean one),
802 | but you shouldn't be releasing software with -dirty anyways.
803 |
804 | Exceptions:
805 | 1: no tags. 0.postDISTANCE[.dev0]
806 | """
807 | if pieces["closest-tag"]:
808 | rendered = pieces["closest-tag"]
809 | if pieces["distance"] or pieces["dirty"]:
810 | rendered += ".post%%d" %% pieces["distance"]
811 | if pieces["dirty"]:
812 | rendered += ".dev0"
813 | rendered += plus_or_dot(pieces)
814 | rendered += "g%%s" %% pieces["short"]
815 | else:
816 | # exception #1
817 | rendered = "0.post%%d" %% pieces["distance"]
818 | if pieces["dirty"]:
819 | rendered += ".dev0"
820 | rendered += "+g%%s" %% pieces["short"]
821 | return rendered
822 |
823 |
824 | def render_pep440_old(pieces):
825 | """TAG[.postDISTANCE[.dev0]] .
826 |
827 | The ".dev0" means dirty.
828 |
829 | Eexceptions:
830 | 1: no tags. 0.postDISTANCE[.dev0]
831 | """
832 | if pieces["closest-tag"]:
833 | rendered = pieces["closest-tag"]
834 | if pieces["distance"] or pieces["dirty"]:
835 | rendered += ".post%%d" %% pieces["distance"]
836 | if pieces["dirty"]:
837 | rendered += ".dev0"
838 | else:
839 | # exception #1
840 | rendered = "0.post%%d" %% pieces["distance"]
841 | if pieces["dirty"]:
842 | rendered += ".dev0"
843 | return rendered
844 |
845 |
846 | def render_git_describe(pieces):
847 | """TAG[-DISTANCE-gHEX][-dirty].
848 |
849 | Like 'git describe --tags --dirty --always'.
850 |
851 | Exceptions:
852 | 1: no tags. HEX[-dirty] (note: no 'g' prefix)
853 | """
854 | if pieces["closest-tag"]:
855 | rendered = pieces["closest-tag"]
856 | if pieces["distance"]:
857 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
858 | else:
859 | # exception #1
860 | rendered = pieces["short"]
861 | if pieces["dirty"]:
862 | rendered += "-dirty"
863 | return rendered
864 |
865 |
866 | def render_git_describe_long(pieces):
867 | """TAG-DISTANCE-gHEX[-dirty].
868 |
869 | Like 'git describe --tags --dirty --always -long'.
870 | The distance/hash is unconditional.
871 |
872 | Exceptions:
873 | 1: no tags. HEX[-dirty] (note: no 'g' prefix)
874 | """
875 | if pieces["closest-tag"]:
876 | rendered = pieces["closest-tag"]
877 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
878 | else:
879 | # exception #1
880 | rendered = pieces["short"]
881 | if pieces["dirty"]:
882 | rendered += "-dirty"
883 | return rendered
884 |
885 |
886 | def render(pieces, style):
887 | """Render the given version pieces into the requested style."""
888 | if pieces["error"]:
889 | return {"version": "unknown",
890 | "full-revisionid": pieces.get("long"),
891 | "dirty": None,
892 | "error": pieces["error"],
893 | "date": None}
894 |
895 | if not style or style == "default":
896 | style = "pep440" # the default
897 |
898 | if style == "pep440":
899 | rendered = render_pep440(pieces)
900 | elif style == "pep440-pre":
901 | rendered = render_pep440_pre(pieces)
902 | elif style == "pep440-post":
903 | rendered = render_pep440_post(pieces)
904 | elif style == "pep440-old":
905 | rendered = render_pep440_old(pieces)
906 | elif style == "git-describe":
907 | rendered = render_git_describe(pieces)
908 | elif style == "git-describe-long":
909 | rendered = render_git_describe_long(pieces)
910 | else:
911 | raise ValueError("unknown style '%%s'" %% style)
912 |
913 | return {"version": rendered, "full-revisionid": pieces["long"],
914 | "dirty": pieces["dirty"], "error": None,
915 | "date": pieces.get("date")}
916 |
917 |
918 | def get_versions():
919 | """Get version information or return default if unable to do so."""
920 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
921 | # __file__, we can work backwards from there to the root. Some
922 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
923 | # case we can only use expanded keywords.
924 |
925 | cfg = get_config()
926 | verbose = cfg.verbose
927 |
928 | try:
929 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
930 | verbose)
931 | except NotThisMethod:
932 | pass
933 |
934 | try:
935 | root = os.path.realpath(__file__)
936 | # versionfile_source is the relative path from the top of the source
937 | # tree (where the .git directory might live) to this file. Invert
938 | # this to find the root from __file__.
939 | for i in cfg.versionfile_source.split('/'):
940 | root = os.path.dirname(root)
941 | except NameError:
942 | return {"version": "0+unknown", "full-revisionid": None,
943 | "dirty": None,
944 | "error": "unable to find root of source tree",
945 | "date": None}
946 |
947 | try:
948 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
949 | return render(pieces, cfg.style)
950 | except NotThisMethod:
951 | pass
952 |
953 | try:
954 | if cfg.parentdir_prefix:
955 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
956 | except NotThisMethod:
957 | pass
958 |
959 | return {"version": "0+unknown", "full-revisionid": None,
960 | "dirty": None,
961 | "error": "unable to compute version", "date": None}
962 | '''
963 |
964 |
965 | @register_vcs_handler("git", "get_keywords")
966 | def git_get_keywords(versionfile_abs):
967 | """Extract version information from the given file."""
968 | # the code embedded in _version.py can just fetch the value of these
969 | # keywords. When used from setup.py, we don't want to import _version.py,
970 | # so we do it with a regexp instead. This function is not used from
971 | # _version.py.
972 | keywords = {}
973 | try:
974 | f = open(versionfile_abs, "r")
975 | for line in f.readlines():
976 | if line.strip().startswith("git_refnames ="):
977 | mo = re.search(r'=\s*"(.*)"', line)
978 | if mo:
979 | keywords["refnames"] = mo.group(1)
980 | if line.strip().startswith("git_full ="):
981 | mo = re.search(r'=\s*"(.*)"', line)
982 | if mo:
983 | keywords["full"] = mo.group(1)
984 | if line.strip().startswith("git_date ="):
985 | mo = re.search(r'=\s*"(.*)"', line)
986 | if mo:
987 | keywords["date"] = mo.group(1)
988 | f.close()
989 | except EnvironmentError:
990 | pass
991 | return keywords
992 |
993 |
994 | @register_vcs_handler("git", "keywords")
995 | def git_versions_from_keywords(keywords, tag_prefix, verbose):
996 | """Get version information from git keywords."""
997 | if not keywords:
998 | raise NotThisMethod("no keywords at all, weird")
999 | date = keywords.get("date")
1000 | if date is not None:
1001 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
1002 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601
1003 | # -like" string, which we must then edit to make compliant), because
1004 | # it's been around since git-1.5.3, and it's too difficult to
1005 | # discover which version we're using, or to work around using an
1006 | # older one.
1007 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
1008 | refnames = keywords["refnames"].strip()
1009 | if refnames.startswith("$Format"):
1010 | if verbose:
1011 | print("keywords are unexpanded, not using")
1012 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
1013 | refs = set([r.strip() for r in refnames.strip("()").split(",")])
1014 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
1015 | # just "foo-1.0". If we see a "tag: " prefix, prefer those.
1016 | TAG = "tag: "
1017 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
1018 | if not tags:
1019 | # Either we're using git < 1.8.3, or there really are no tags. We use
1020 | # a heuristic: assume all version tags have a digit. The old git %d
1021 | # expansion behaves like git log --decorate=short and strips out the
1022 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish
1023 | # between branches and tags. By ignoring refnames without digits, we
1024 | # filter out many common branch names like "release" and
1025 | # "stabilization", as well as "HEAD" and "master".
1026 | tags = set([r for r in refs if re.search(r'\d', r)])
1027 | if verbose:
1028 | print("discarding '%s', no digits" % ",".join(refs - tags))
1029 | if verbose:
1030 | print("likely tags: %s" % ",".join(sorted(tags)))
1031 | for ref in sorted(tags):
1032 | # sorting will prefer e.g. "2.0" over "2.0rc1"
1033 | if ref.startswith(tag_prefix):
1034 | r = ref[len(tag_prefix):]
1035 | if verbose:
1036 | print("picking %s" % r)
1037 | return {
1038 | "version": r,
1039 | "full-revisionid": keywords["full"].strip(),
1040 | "dirty": False,
1041 | "error": None,
1042 | "date": date
1043 | }
1044 | # no suitable tags, so version is "0+unknown", but full hex is still there
1045 | if verbose:
1046 | print("no suitable tags, using unknown + full revision id")
1047 | return {
1048 | "version": "0+unknown",
1049 | "full-revisionid": keywords["full"].strip(),
1050 | "dirty": False,
1051 | "error": "no suitable tags",
1052 | "date": None
1053 | }
1054 |
1055 |
1056 | @register_vcs_handler("git", "pieces_from_vcs")
1057 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
1058 | """Get version from 'git describe' in the root of the source tree.
1059 |
1060 | This only gets called if the git-archive 'subst' keywords were *not*
1061 | expanded, and _version.py hasn't already been rewritten with a short
1062 | version string, meaning we're inside a checked out source tree.
1063 | """
1064 | GITS = ["git"]
1065 | if sys.platform == "win32":
1066 | GITS = ["git.cmd", "git.exe"]
1067 |
1068 | out, rc = run_command(GITS, ["rev-parse", "--git-dir"],
1069 | cwd=root,
1070 | hide_stderr=True)
1071 | if rc != 0:
1072 | if verbose:
1073 | print("Directory %s not under git control" % root)
1074 | raise NotThisMethod("'git rev-parse --git-dir' returned error")
1075 |
1076 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
1077 | # if there isn't one, this yields HEX[-dirty] (no NUM)
1078 | describe_out, rc = run_command(GITS, [
1079 | "describe", "--tags", "--dirty", "--always", "--long", "--match",
1080 | "%s*" % tag_prefix
1081 | ],
1082 | cwd=root)
1083 | # --long was added in git-1.5.5
1084 | if describe_out is None:
1085 | raise NotThisMethod("'git describe' failed")
1086 | describe_out = describe_out.strip()
1087 | full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
1088 | if full_out is None:
1089 | raise NotThisMethod("'git rev-parse' failed")
1090 | full_out = full_out.strip()
1091 |
1092 | pieces = {}
1093 | pieces["long"] = full_out
1094 | pieces["short"] = full_out[:7] # maybe improved later
1095 | pieces["error"] = None
1096 |
1097 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
1098 | # TAG might have hyphens.
1099 | git_describe = describe_out
1100 |
1101 | # look for -dirty suffix
1102 | dirty = git_describe.endswith("-dirty")
1103 | pieces["dirty"] = dirty
1104 | if dirty:
1105 | git_describe = git_describe[:git_describe.rindex("-dirty")]
1106 |
1107 | # now we have TAG-NUM-gHEX or HEX
1108 |
1109 | if "-" in git_describe:
1110 | # TAG-NUM-gHEX
1111 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
1112 | if not mo:
1113 | # unparseable. Maybe git-describe is misbehaving?
1114 | pieces["error"] = ("unable to parse git-describe output: '%s'" %
1115 | describe_out)
1116 | return pieces
1117 |
1118 | # tag
1119 | full_tag = mo.group(1)
1120 | if not full_tag.startswith(tag_prefix):
1121 | if verbose:
1122 | fmt = "tag '%s' doesn't start with prefix '%s'"
1123 | print(fmt % (full_tag, tag_prefix))
1124 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" %
1125 | (full_tag, tag_prefix))
1126 | return pieces
1127 | pieces["closest-tag"] = full_tag[len(tag_prefix):]
1128 |
1129 | # distance: number of commits since tag
1130 | pieces["distance"] = int(mo.group(2))
1131 |
1132 | # commit: short hex revision ID
1133 | pieces["short"] = mo.group(3)
1134 |
1135 | else:
1136 | # HEX: no tags
1137 | pieces["closest-tag"] = None
1138 | count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root)
1139 | pieces["distance"] = int(count_out) # total number of commits
1140 |
1141 | # commit date: see ISO-8601 comment in git_versions_from_keywords()
1142 | date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
1143 | cwd=root)[0].strip()
1144 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
1145 |
1146 | return pieces
1147 |
1148 |
1149 | def do_vcs_install(manifest_in, versionfile_source, ipy):
1150 | """Git-specific installation logic for Versioneer.
1151 |
1152 | For Git, this means creating/changing .gitattributes to mark _version.py
1153 | for export-subst keyword substitution.
1154 | """
1155 | GITS = ["git"]
1156 | if sys.platform == "win32":
1157 | GITS = ["git.cmd", "git.exe"]
1158 | files = [manifest_in, versionfile_source]
1159 | if ipy:
1160 | files.append(ipy)
1161 | try:
1162 | me = __file__
1163 | if me.endswith(".pyc") or me.endswith(".pyo"):
1164 | me = os.path.splitext(me)[0] + ".py"
1165 | versioneer_file = os.path.relpath(me)
1166 | except NameError:
1167 | versioneer_file = "versioneer.py"
1168 | files.append(versioneer_file)
1169 | present = False
1170 | try:
1171 | f = open(".gitattributes", "r")
1172 | for line in f.readlines():
1173 | if line.strip().startswith(versionfile_source):
1174 | if "export-subst" in line.strip().split()[1:]:
1175 | present = True
1176 | f.close()
1177 | except EnvironmentError:
1178 | pass
1179 | if not present:
1180 | f = open(".gitattributes", "a+")
1181 | f.write("%s export-subst\n" % versionfile_source)
1182 | f.close()
1183 | files.append(".gitattributes")
1184 | run_command(GITS, ["add", "--"] + files)
1185 |
1186 |
1187 | def versions_from_parentdir(parentdir_prefix, root, verbose):
1188 | """Try to determine the version from the parent directory name.
1189 |
1190 | Source tarballs conventionally unpack into a directory that includes both
1191 | the project name and a version string. We will also support searching up
1192 | two directory levels for an appropriately named parent directory
1193 | """
1194 | rootdirs = []
1195 |
1196 | for i in range(3):
1197 | dirname = os.path.basename(root)
1198 | if dirname.startswith(parentdir_prefix):
1199 | return {
1200 | "version": dirname[len(parentdir_prefix):],
1201 | "full-revisionid": None,
1202 | "dirty": False,
1203 | "error": None,
1204 | "date": None
1205 | }
1206 | else:
1207 | rootdirs.append(root)
1208 | root = os.path.dirname(root) # up a level
1209 |
1210 | if verbose:
1211 | print("Tried directories %s but none started with prefix %s" %
1212 | (str(rootdirs), parentdir_prefix))
1213 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
1214 |
1215 |
1216 | SHORT_VERSION_PY = """
1217 | # This file was generated by 'versioneer.py' (0.18) from
1218 | # revision-control system data, or from the parent directory name of an
1219 | # unpacked source archive. Distribution tarballs contain a pre-generated copy
1220 | # of this file.
1221 |
1222 | import json
1223 |
1224 | version_json = '''
1225 | %s
1226 | ''' # END VERSION_JSON
1227 |
1228 |
1229 | def get_versions():
1230 | return json.loads(version_json)
1231 | """
1232 |
1233 |
1234 | def versions_from_file(filename):
1235 | """Try to determine the version from _version.py if present."""
1236 | try:
1237 | with open(filename) as f:
1238 | contents = f.read()
1239 | except EnvironmentError:
1240 | raise NotThisMethod("unable to read _version.py")
1241 | mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", contents,
1242 | re.M | re.S)
1243 | if not mo:
1244 | mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON",
1245 | contents, re.M | re.S)
1246 | if not mo:
1247 | raise NotThisMethod("no version_json in _version.py")
1248 | return json.loads(mo.group(1))
1249 |
1250 |
1251 | def write_to_version_file(filename, versions):
1252 | """Write the given version number to the given _version.py file."""
1253 | os.unlink(filename)
1254 | contents = json.dumps(versions,
1255 | sort_keys=True,
1256 | indent=1,
1257 | separators=(",", ": "))
1258 | with open(filename, "w") as f:
1259 | f.write(SHORT_VERSION_PY % contents)
1260 |
1261 | print("set %s to '%s'" % (filename, versions["version"]))
1262 |
1263 |
1264 | def plus_or_dot(pieces):
1265 | """Return a + if we don't already have one, else return a ."""
1266 | if "+" in pieces.get("closest-tag", ""):
1267 | return "."
1268 | return "+"
1269 |
1270 |
1271 | def render_pep440(pieces):
1272 | """Build up version string, with post-release "local version identifier".
1273 |
1274 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
1275 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
1276 |
1277 | Exceptions:
1278 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
1279 | """
1280 | if pieces["closest-tag"]:
1281 | rendered = pieces["closest-tag"]
1282 | if pieces["distance"] or pieces["dirty"]:
1283 | rendered += plus_or_dot(pieces)
1284 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
1285 | if pieces["dirty"]:
1286 | rendered += ".dirty"
1287 | else:
1288 | # exception #1
1289 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"])
1290 | if pieces["dirty"]:
1291 | rendered += ".dirty"
1292 | return rendered
1293 |
1294 |
1295 | def render_pep440_pre(pieces):
1296 | """TAG[.post.devDISTANCE] -- No -dirty.
1297 |
1298 | Exceptions:
1299 | 1: no tags. 0.post.devDISTANCE
1300 | """
1301 | if pieces["closest-tag"]:
1302 | rendered = pieces["closest-tag"]
1303 | if pieces["distance"]:
1304 | rendered += ".post.dev%d" % pieces["distance"]
1305 | else:
1306 | # exception #1
1307 | rendered = "0.post.dev%d" % pieces["distance"]
1308 | return rendered
1309 |
1310 |
1311 | def render_pep440_post(pieces):
1312 | """TAG[.postDISTANCE[.dev0]+gHEX] .
1313 |
1314 | The ".dev0" means dirty. Note that .dev0 sorts backwards
1315 | (a dirty tree will appear "older" than the corresponding clean one),
1316 | but you shouldn't be releasing software with -dirty anyways.
1317 |
1318 | Exceptions:
1319 | 1: no tags. 0.postDISTANCE[.dev0]
1320 | """
1321 | if pieces["closest-tag"]:
1322 | rendered = pieces["closest-tag"]
1323 | if pieces["distance"] or pieces["dirty"]:
1324 | rendered += ".post%d" % pieces["distance"]
1325 | if pieces["dirty"]:
1326 | rendered += ".dev0"
1327 | rendered += plus_or_dot(pieces)
1328 | rendered += "g%s" % pieces["short"]
1329 | else:
1330 | # exception #1
1331 | rendered = "0.post%d" % pieces["distance"]
1332 | if pieces["dirty"]:
1333 | rendered += ".dev0"
1334 | rendered += "+g%s" % pieces["short"]
1335 | return rendered
1336 |
1337 |
1338 | def render_pep440_old(pieces):
1339 | """TAG[.postDISTANCE[.dev0]] .
1340 |
1341 | The ".dev0" means dirty.
1342 |
1343 | Eexceptions:
1344 | 1: no tags. 0.postDISTANCE[.dev0]
1345 | """
1346 | if pieces["closest-tag"]:
1347 | rendered = pieces["closest-tag"]
1348 | if pieces["distance"] or pieces["dirty"]:
1349 | rendered += ".post%d" % pieces["distance"]
1350 | if pieces["dirty"]:
1351 | rendered += ".dev0"
1352 | else:
1353 | # exception #1
1354 | rendered = "0.post%d" % pieces["distance"]
1355 | if pieces["dirty"]:
1356 | rendered += ".dev0"
1357 | return rendered
1358 |
1359 |
1360 | def render_git_describe(pieces):
1361 | """TAG[-DISTANCE-gHEX][-dirty].
1362 |
1363 | Like 'git describe --tags --dirty --always'.
1364 |
1365 | Exceptions:
1366 | 1: no tags. HEX[-dirty] (note: no 'g' prefix)
1367 | """
1368 | if pieces["closest-tag"]:
1369 | rendered = pieces["closest-tag"]
1370 | if pieces["distance"]:
1371 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
1372 | else:
1373 | # exception #1
1374 | rendered = pieces["short"]
1375 | if pieces["dirty"]:
1376 | rendered += "-dirty"
1377 | return rendered
1378 |
1379 |
1380 | def render_git_describe_long(pieces):
1381 | """TAG-DISTANCE-gHEX[-dirty].
1382 |
1383 | Like 'git describe --tags --dirty --always -long'.
1384 | The distance/hash is unconditional.
1385 |
1386 | Exceptions:
1387 | 1: no tags. HEX[-dirty] (note: no 'g' prefix)
1388 | """
1389 | if pieces["closest-tag"]:
1390 | rendered = pieces["closest-tag"]
1391 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
1392 | else:
1393 | # exception #1
1394 | rendered = pieces["short"]
1395 | if pieces["dirty"]:
1396 | rendered += "-dirty"
1397 | return rendered
1398 |
1399 |
1400 | def render(pieces, style):
1401 | """Render the given version pieces into the requested style."""
1402 | if pieces["error"]:
1403 | return {
1404 | "version": "unknown",
1405 | "full-revisionid": pieces.get("long"),
1406 | "dirty": None,
1407 | "error": pieces["error"],
1408 | "date": None
1409 | }
1410 |
1411 | if not style or style == "default":
1412 | style = "pep440" # the default
1413 |
1414 | if style == "pep440":
1415 | rendered = render_pep440(pieces)
1416 | elif style == "pep440-pre":
1417 | rendered = render_pep440_pre(pieces)
1418 | elif style == "pep440-post":
1419 | rendered = render_pep440_post(pieces)
1420 | elif style == "pep440-old":
1421 | rendered = render_pep440_old(pieces)
1422 | elif style == "git-describe":
1423 | rendered = render_git_describe(pieces)
1424 | elif style == "git-describe-long":
1425 | rendered = render_git_describe_long(pieces)
1426 | else:
1427 | raise ValueError("unknown style '%s'" % style)
1428 |
1429 | return {
1430 | "version": rendered,
1431 | "full-revisionid": pieces["long"],
1432 | "dirty": pieces["dirty"],
1433 | "error": None,
1434 | "date": pieces.get("date")
1435 | }
1436 |
1437 |
1438 | class VersioneerBadRootError(Exception):
1439 | """The project root directory is unknown or missing key files."""
1440 |
1441 |
1442 | def get_versions(verbose=False):
1443 | """Get the project version from whatever source is available.
1444 |
1445 | Returns dict with two keys: 'version' and 'full'.
1446 | """
1447 | if "versioneer" in sys.modules:
1448 | # see the discussion in cmdclass.py:get_cmdclass()
1449 | del sys.modules["versioneer"]
1450 |
1451 | root = get_root()
1452 | cfg = get_config_from_root(root)
1453 |
1454 | assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg"
1455 | handlers = HANDLERS.get(cfg.VCS)
1456 | assert handlers, "unrecognized VCS '%s'" % cfg.VCS
1457 | verbose = verbose or cfg.verbose
1458 | assert cfg.versionfile_source is not None, \
1459 | "please set versioneer.versionfile_source"
1460 | assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix"
1461 |
1462 | versionfile_abs = os.path.join(root, cfg.versionfile_source)
1463 |
1464 | # extract version from first of: _version.py, VCS command (e.g. 'git
1465 | # describe'), parentdir. This is meant to work for developers using a
1466 | # source checkout, for users of a tarball created by 'setup.py sdist',
1467 | # and for users of a tarball/zipball created by 'git archive' or github's
1468 | # download-from-tag feature or the equivalent in other VCSes.
1469 |
1470 | get_keywords_f = handlers.get("get_keywords")
1471 | from_keywords_f = handlers.get("keywords")
1472 | if get_keywords_f and from_keywords_f:
1473 | try:
1474 | keywords = get_keywords_f(versionfile_abs)
1475 | ver = from_keywords_f(keywords, cfg.tag_prefix, verbose)
1476 | if verbose:
1477 | print("got version from expanded keyword %s" % ver)
1478 | return ver
1479 | except NotThisMethod:
1480 | pass
1481 |
1482 | try:
1483 | ver = versions_from_file(versionfile_abs)
1484 | if verbose:
1485 | print("got version from file %s %s" % (versionfile_abs, ver))
1486 | return ver
1487 | except NotThisMethod:
1488 | pass
1489 |
1490 | from_vcs_f = handlers.get("pieces_from_vcs")
1491 | if from_vcs_f:
1492 | try:
1493 | pieces = from_vcs_f(cfg.tag_prefix, root, verbose)
1494 | ver = render(pieces, cfg.style)
1495 | if verbose:
1496 | print("got version from VCS %s" % ver)
1497 | return ver
1498 | except NotThisMethod:
1499 | pass
1500 |
1501 | try:
1502 | if cfg.parentdir_prefix:
1503 | ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
1504 | if verbose:
1505 | print("got version from parentdir %s" % ver)
1506 | return ver
1507 | except NotThisMethod:
1508 | pass
1509 |
1510 | if verbose:
1511 | print("unable to compute version")
1512 |
1513 | return {
1514 | "version": "0+unknown",
1515 | "full-revisionid": None,
1516 | "dirty": None,
1517 | "error": "unable to compute version",
1518 | "date": None
1519 | }
1520 |
1521 |
1522 | def get_version():
1523 | """Get the short version string for this project."""
1524 | return get_versions()["version"]
1525 |
1526 |
1527 | def get_cmdclass():
1528 | """Get the custom setuptools/distutils subclasses used by Versioneer."""
1529 | if "versioneer" in sys.modules:
1530 | del sys.modules["versioneer"]
1531 | # this fixes the "python setup.py develop" case (also 'install' and
1532 | # 'easy_install .'), in which subdependencies of the main project are
1533 | # built (using setup.py bdist_egg) in the same python process. Assume
1534 | # a main project A and a dependency B, which use different versions
1535 | # of Versioneer. A's setup.py imports A's Versioneer, leaving it in
1536 | # sys.modules by the time B's setup.py is executed, causing B to run
1537 | # with the wrong versioneer. Setuptools wraps the sub-dep builds in a
1538 | # sandbox that restores sys.modules to it's pre-build state, so the
1539 | # parent is protected against the child's "import versioneer". By
1540 | # removing ourselves from sys.modules here, before the child build
1541 | # happens, we protect the child from the parent's versioneer too.
1542 | # Also see https://github.com/warner/python-versioneer/issues/52
1543 |
1544 | cmds = {}
1545 |
1546 | # we add "version" to both distutils and setuptools
1547 | from distutils.core import Command
1548 |
1549 | class cmd_version(Command):
1550 | description = "report generated version string"
1551 | user_options = []
1552 | boolean_options = []
1553 |
1554 | def initialize_options(self):
1555 | pass
1556 |
1557 | def finalize_options(self):
1558 | pass
1559 |
1560 | def run(self):
1561 | vers = get_versions(verbose=True)
1562 | print("Version: %s" % vers["version"])
1563 | print(" full-revisionid: %s" % vers.get("full-revisionid"))
1564 | print(" dirty: %s" % vers.get("dirty"))
1565 | print(" date: %s" % vers.get("date"))
1566 | if vers["error"]:
1567 | print(" error: %s" % vers["error"])
1568 |
1569 | cmds["version"] = cmd_version
1570 |
1571 | # we override "build_py" in both distutils and setuptools
1572 | #
1573 | # most invocation pathways end up running build_py:
1574 | # distutils/build -> build_py
1575 | # distutils/install -> distutils/build ->..
1576 | # setuptools/bdist_wheel -> distutils/install ->..
1577 | # setuptools/bdist_egg -> distutils/install_lib -> build_py
1578 | # setuptools/install -> bdist_egg ->..
1579 | # setuptools/develop -> ?
1580 | # pip install:
1581 | # copies source tree to a tempdir before running egg_info/etc
1582 | # if .git isn't copied too, 'git describe' will fail
1583 | # then does setup.py bdist_wheel, or sometimes setup.py install
1584 | # setup.py egg_info -> ?
1585 |
1586 | # we override different "build_py" commands for both environments
1587 | if "setuptools" in sys.modules:
1588 | from setuptools.command.build_py import build_py as _build_py
1589 | else:
1590 | from distutils.command.build_py import build_py as _build_py
1591 |
1592 | class cmd_build_py(_build_py):
1593 |
1594 | def run(self):
1595 | root = get_root()
1596 | cfg = get_config_from_root(root)
1597 | versions = get_versions()
1598 | _build_py.run(self)
1599 | # now locate _version.py in the new build/ directory and replace
1600 | # it with an updated value
1601 | if cfg.versionfile_build:
1602 | target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build)
1603 | print("UPDATING %s" % target_versionfile)
1604 | write_to_version_file(target_versionfile, versions)
1605 |
1606 | cmds["build_py"] = cmd_build_py
1607 |
1608 | if "cx_Freeze" in sys.modules: # cx_freeze enabled?
1609 | from cx_Freeze.dist import build_exe as _build_exe
1610 |
1611 | # nczeczulin reports that py2exe won't like the pep440-style string
1612 | # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g.
1613 | # setup(console=[{
1614 | # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION
1615 | # "product_version": versioneer.get_version(),
1616 | # ...
1617 |
1618 | class cmd_build_exe(_build_exe):
1619 |
1620 | def run(self):
1621 | root = get_root()
1622 | cfg = get_config_from_root(root)
1623 | versions = get_versions()
1624 | target_versionfile = cfg.versionfile_source
1625 | print("UPDATING %s" % target_versionfile)
1626 | write_to_version_file(target_versionfile, versions)
1627 |
1628 | _build_exe.run(self)
1629 | os.unlink(target_versionfile)
1630 | with open(cfg.versionfile_source, "w") as f:
1631 | LONG = LONG_VERSION_PY[cfg.VCS]
1632 | f.write(
1633 | LONG % {
1634 | "DOLLAR": "$",
1635 | "STYLE": cfg.style,
1636 | "TAG_PREFIX": cfg.tag_prefix,
1637 | "PARENTDIR_PREFIX": cfg.parentdir_prefix,
1638 | "VERSIONFILE_SOURCE": cfg.versionfile_source,
1639 | })
1640 |
1641 | cmds["build_exe"] = cmd_build_exe
1642 | del cmds["build_py"]
1643 |
1644 | if 'py2exe' in sys.modules: # py2exe enabled?
1645 | try:
1646 | from py2exe.distutils_buildexe import py2exe as _py2exe # py3
1647 | except ImportError:
1648 | from py2exe.build_exe import py2exe as _py2exe # py2
1649 |
1650 | class cmd_py2exe(_py2exe):
1651 |
1652 | def run(self):
1653 | root = get_root()
1654 | cfg = get_config_from_root(root)
1655 | versions = get_versions()
1656 | target_versionfile = cfg.versionfile_source
1657 | print("UPDATING %s" % target_versionfile)
1658 | write_to_version_file(target_versionfile, versions)
1659 |
1660 | _py2exe.run(self)
1661 | os.unlink(target_versionfile)
1662 | with open(cfg.versionfile_source, "w") as f:
1663 | LONG = LONG_VERSION_PY[cfg.VCS]
1664 | f.write(
1665 | LONG % {
1666 | "DOLLAR": "$",
1667 | "STYLE": cfg.style,
1668 | "TAG_PREFIX": cfg.tag_prefix,
1669 | "PARENTDIR_PREFIX": cfg.parentdir_prefix,
1670 | "VERSIONFILE_SOURCE": cfg.versionfile_source,
1671 | })
1672 |
1673 | cmds["py2exe"] = cmd_py2exe
1674 |
1675 | # we override different "sdist" commands for both environments
1676 | if "setuptools" in sys.modules:
1677 | from setuptools.command.sdist import sdist as _sdist
1678 | else:
1679 | from distutils.command.sdist import sdist as _sdist
1680 |
1681 | class cmd_sdist(_sdist):
1682 |
1683 | def run(self):
1684 | versions = get_versions()
1685 | self._versioneer_generated_versions = versions
1686 | # unless we update this, the command will keep using the old
1687 | # version
1688 | self.distribution.metadata.version = versions["version"]
1689 | return _sdist.run(self)
1690 |
1691 | def make_release_tree(self, base_dir, files):
1692 | root = get_root()
1693 | cfg = get_config_from_root(root)
1694 | _sdist.make_release_tree(self, base_dir, files)
1695 | # now locate _version.py in the new base_dir directory
1696 | # (remembering that it may be a hardlink) and replace it with an
1697 | # updated value
1698 | target_versionfile = os.path.join(base_dir, cfg.versionfile_source)
1699 | print("UPDATING %s" % target_versionfile)
1700 | write_to_version_file(target_versionfile,
1701 | self._versioneer_generated_versions)
1702 |
1703 | cmds["sdist"] = cmd_sdist
1704 |
1705 | return cmds
1706 |
1707 |
1708 | CONFIG_ERROR = """
1709 | setup.cfg is missing the necessary Versioneer configuration. You need
1710 | a section like:
1711 |
1712 | [versioneer]
1713 | VCS = git
1714 | style = pep440
1715 | versionfile_source = src/myproject/_version.py
1716 | versionfile_build = myproject/_version.py
1717 | tag_prefix =
1718 | parentdir_prefix = myproject-
1719 |
1720 | You will also need to edit your setup.py to use the results:
1721 |
1722 | import versioneer
1723 | setup(version=versioneer.get_version(),
1724 | cmdclass=versioneer.get_cmdclass(), ...)
1725 |
1726 | Please read the docstring in ./versioneer.py for configuration instructions,
1727 | edit setup.cfg, and re-run the installer or 'python versioneer.py setup'.
1728 | """
1729 |
1730 | SAMPLE_CONFIG = """
1731 | # See the docstring in versioneer.py for instructions. Note that you must
1732 | # re-run 'versioneer.py setup' after changing this section, and commit the
1733 | # resulting files.
1734 |
1735 | [versioneer]
1736 | #VCS = git
1737 | #style = pep440
1738 | #versionfile_source =
1739 | #versionfile_build =
1740 | #tag_prefix =
1741 | #parentdir_prefix =
1742 |
1743 | """
1744 |
1745 | INIT_PY_SNIPPET = """
1746 | from ._version import get_versions
1747 | __version__ = get_versions()['version']
1748 | del get_versions
1749 | """
1750 |
1751 |
1752 | def do_setup():
1753 | """Main VCS-independent setup function for installing Versioneer."""
1754 | root = get_root()
1755 | try:
1756 | cfg = get_config_from_root(root)
1757 | except (EnvironmentError, configparser.NoSectionError,
1758 | configparser.NoOptionError) as e:
1759 | if isinstance(e, (EnvironmentError, configparser.NoSectionError)):
1760 | print("Adding sample versioneer config to setup.cfg", file=sys.stderr)
1761 | with open(os.path.join(root, "setup.cfg"), "a") as f:
1762 | f.write(SAMPLE_CONFIG)
1763 | print(CONFIG_ERROR, file=sys.stderr)
1764 | return 1
1765 |
1766 | print(" creating %s" % cfg.versionfile_source)
1767 | with open(cfg.versionfile_source, "w") as f:
1768 | LONG = LONG_VERSION_PY[cfg.VCS]
1769 | f.write(
1770 | LONG % {
1771 | "DOLLAR": "$",
1772 | "STYLE": cfg.style,
1773 | "TAG_PREFIX": cfg.tag_prefix,
1774 | "PARENTDIR_PREFIX": cfg.parentdir_prefix,
1775 | "VERSIONFILE_SOURCE": cfg.versionfile_source,
1776 | })
1777 |
1778 | ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py")
1779 | if os.path.exists(ipy):
1780 | try:
1781 | with open(ipy, "r") as f:
1782 | old = f.read()
1783 | except EnvironmentError:
1784 | old = ""
1785 | if INIT_PY_SNIPPET not in old:
1786 | print(" appending to %s" % ipy)
1787 | with open(ipy, "a") as f:
1788 | f.write(INIT_PY_SNIPPET)
1789 | else:
1790 | print(" %s unmodified" % ipy)
1791 | else:
1792 | print(" %s doesn't exist, ok" % ipy)
1793 | ipy = None
1794 |
1795 | # Make sure both the top-level "versioneer.py" and versionfile_source
1796 | # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so
1797 | # they'll be copied into source distributions. Pip won't be able to
1798 | # install the package without this.
1799 | manifest_in = os.path.join(root, "MANIFEST.in")
1800 | simple_includes = set()
1801 | try:
1802 | with open(manifest_in, "r") as f:
1803 | for line in f:
1804 | if line.startswith("include "):
1805 | for include in line.split()[1:]:
1806 | simple_includes.add(include)
1807 | except EnvironmentError:
1808 | pass
1809 | # That doesn't cover everything MANIFEST.in can do
1810 | # (http://docs.python.org/2/distutils/sourcedist.html#commands), so
1811 | # it might give some false negatives. Appending redundant 'include'
1812 | # lines is safe, though.
1813 | if "versioneer.py" not in simple_includes:
1814 | print(" appending 'versioneer.py' to MANIFEST.in")
1815 | with open(manifest_in, "a") as f:
1816 | f.write("include versioneer.py\n")
1817 | else:
1818 | print(" 'versioneer.py' already in MANIFEST.in")
1819 | if cfg.versionfile_source not in simple_includes:
1820 | print(" appending versionfile_source ('%s') to MANIFEST.in" %
1821 | cfg.versionfile_source)
1822 | with open(manifest_in, "a") as f:
1823 | f.write("include %s\n" % cfg.versionfile_source)
1824 | else:
1825 | print(" versionfile_source already in MANIFEST.in")
1826 |
1827 | # Make VCS-specific changes. For git, this means creating/changing
1828 | # .gitattributes to mark _version.py for export-subst keyword
1829 | # substitution.
1830 | do_vcs_install(manifest_in, cfg.versionfile_source, ipy)
1831 | return 0
1832 |
1833 |
1834 | def scan_setup_py():
1835 | """Validate the contents of setup.py against Versioneer's expectations."""
1836 | found = set()
1837 | setters = False
1838 | errors = 0
1839 | with open("setup.py", "r") as f:
1840 | for line in f.readlines():
1841 | if "import versioneer" in line:
1842 | found.add("import")
1843 | if "versioneer.get_cmdclass()" in line:
1844 | found.add("cmdclass")
1845 | if "versioneer.get_version()" in line:
1846 | found.add("get_version")
1847 | if "versioneer.VCS" in line:
1848 | setters = True
1849 | if "versioneer.versionfile_source" in line:
1850 | setters = True
1851 | if len(found) != 3:
1852 | print("")
1853 | print("Your setup.py appears to be missing some important items")
1854 | print("(but I might be wrong). Please make sure it has something")
1855 | print("roughly like the following:")
1856 | print("")
1857 | print(" import versioneer")
1858 | print(" setup( version=versioneer.get_version(),")
1859 | print(" cmdclass=versioneer.get_cmdclass(), ...)")
1860 | print("")
1861 | errors += 1
1862 | if setters:
1863 | print("You should remove lines like 'versioneer.VCS = ' and")
1864 | print("'versioneer.versionfile_source = ' . This configuration")
1865 | print("now lives in setup.cfg, and should be removed from setup.py")
1866 | print("")
1867 | errors += 1
1868 | return errors
1869 |
1870 |
1871 | if __name__ == "__main__":
1872 | cmd = sys.argv[1]
1873 | if cmd == "setup":
1874 | errors = do_setup()
1875 | errors += scan_setup_py()
1876 | if errors:
1877 | sys.exit(1)
1878 |
--------------------------------------------------------------------------------