├── s3contents ├── tests │ ├── __init__.py │ ├── utils.py │ ├── test_gcsmanager.py │ ├── test_gcsmanager_prefix.py │ ├── test_s3manager.py │ └── test_s3manager_prefix.py ├── compat.py ├── __init__.py ├── gcsmanager.py ├── genericfs.py ├── ipycompat.py ├── s3manager.py ├── gcs_fs.py ├── s3_fs.py ├── genericmanager.py └── _version.py ├── .gitattributes ├── RELEASE.md ├── requirements.txt ├── MANIFEST.in ├── .style.yapf ├── .coveragerc ├── environment.yml ├── setup.cfg ├── ci ├── start_minio.sh └── install_python.sh ├── .travis.yml ├── setup.py ├── .gitignore ├── DEV.md ├── Makefile ├── README.md ├── LICENSE └── versioneer.py /s3contents/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | s3contents/_version.py export-subst 2 | -------------------------------------------------------------------------------- /RELEASE.md: -------------------------------------------------------------------------------- 1 | Tag version: `git tag 1.0.0` 2 | Build: `make build` 3 | Upload with twine: `make upload` 4 | 5 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | notebook 2 | ipykernel<5.* 3 | boto3 4 | requests 5 | s3fs==0.1.5 6 | gcsfs==0.1.2 7 | nose 8 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE 2 | include *requirements.txt 3 | include versioneer.py 4 | include s3contents/_version.py 5 | -------------------------------------------------------------------------------- /.style.yapf: -------------------------------------------------------------------------------- 1 | [style] 2 | based_on_style = google 3 | spaces_before_comment = 4 4 | split_before_logical_operator = true 5 | column_limit = 100 6 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | omit = 3 | s3contents/tests/* 4 | s3contents/_version.py 5 | s3contents/gcs_fs.py 6 | s3contents/gcsmanager.py 7 | source = 8 | s3contents 9 | -------------------------------------------------------------------------------- /s3contents/compat.py: -------------------------------------------------------------------------------- 1 | import six 2 | import s3fs 3 | 4 | if six.PY3: 5 | FileNotFoundError = FileNotFoundError 6 | else: 7 | try: 8 | FileNotFoundError = s3fs.core.FileNotFoundError 9 | except: 10 | class FileNotFoundError(IOError): 11 | pass 12 | -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: s3contents-dev 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python=3.7 6 | - pip: 7 | - pytest 8 | - boto3 9 | - requests 10 | - s3fs 11 | - gcsfs 12 | - nose 13 | - mock 14 | - twine>=1.11.0 15 | - wheel>=0.31.0 16 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | # See the docstring in versioneer.py for instructions. Note that you must 2 | # re-run 'versioneer.py setup' after changing this section, and commit the 3 | # resulting files. 4 | 5 | [versioneer] 6 | VCS = git 7 | style = pep440 8 | versionfile_source = s3contents/_version.py 9 | versionfile_build = s3contents/_version.py 10 | tag_prefix = 11 | parentdir_prefix = s3contents- 12 | -------------------------------------------------------------------------------- /ci/start_minio.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # go get -u github.com/minio/minio 4 | curl https://dl.minio.io/server/minio/release/linux-amd64/archive/minio.RELEASE.2018-06-29T02-11-29Z -o minio 5 | chmod +x minio 6 | 7 | export MINIO_ACCESS_KEY=access-key 8 | export MINIO_SECRET_KEY=secret-key 9 | 10 | mkdir -p ~/s3/notebooks 11 | ./minio version 12 | ./minio server ~/s3 > /tmp/minio.log 2>&1 & 13 | -------------------------------------------------------------------------------- /s3contents/__init__.py: -------------------------------------------------------------------------------- 1 | from ._version import get_versions 2 | __version__ = get_versions()["version"] 3 | del get_versions 4 | 5 | # We need this try/except here for tests to work 6 | try: 7 | # This is needed for notebook 5.0, 5.1, 5.2(maybe) 8 | # https://github.com/jupyter/notebook/issues/2798 9 | import notebook.transutils 10 | except: 11 | # Will fail in notebook 4.X - its ok 12 | pass 13 | 14 | from .s3manager import S3ContentsManager 15 | from .gcsmanager import GCSContentsManager 16 | -------------------------------------------------------------------------------- /ci/install_python.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Install miniconda 4 | wget http://repo.continuum.io/miniconda/Miniconda3-3.19.0-Linux-x86_64.sh -O ~/miniconda.sh 5 | bash ~/miniconda.sh -b -p $HOME/miniconda 6 | export PATH=$HOME/miniconda/bin:$PATH 7 | 8 | conda config --set always_yes yes --set changeps1 no 9 | conda --version 10 | 11 | # Create `test` environment 12 | conda create -n test python=$TRAVIS_PYTHON_VERSION 13 | export PATH=$HOME/miniconda/envs/test/bin:$PATH 14 | pip install pytest pytest-cov python-coveralls nose mock 15 | pip install -r requirements.txt 16 | 17 | # Install specific Jupyter Notebook version 18 | pip install -U "notebook==$JUPYTER_VERSION" 19 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | sudo: false 3 | 4 | notifications: 5 | email: false 6 | 7 | python: 8 | - 2.7 9 | - 3.5 10 | 11 | env: 12 | - JUPYTER_VERSION="4.*" 13 | - JUPYTER_VERSION="5.1" 14 | - JUPYTER_VERSION="5.2" 15 | - JUPYTER_VERSION="5.3" 16 | - JUPYTER_VERSION="5.4" 17 | - JUPYTER_VERSION="5.*" 18 | 19 | before_install: 20 | - pwd 21 | - source ci/start_minio.sh 22 | - source ci/install_python.sh 23 | 24 | install: 25 | - which python 26 | - pip freeze 27 | - python setup.py sdist 28 | - pip install dist/*.tar.gz 29 | - pip freeze 30 | 31 | script: 32 | - py.test -s -vv --cov=s3contents --cov-report term-missing 33 | 34 | after_success: 35 | - coveralls 36 | 37 | after_failure: 38 | - cat /tmp/minio.log 39 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | from setuptools import setup 3 | from setuptools import find_packages 4 | 5 | import versioneer 6 | 7 | BASE_DIR = os.path.abspath(os.path.dirname(__file__)) 8 | 9 | def read_file(filename): 10 | filepath = os.path.join(BASE_DIR, filename) 11 | with open(filepath) as f: 12 | return f.read() 13 | 14 | REQUIREMENTS = read_file("requirements.txt").splitlines() 15 | 16 | 17 | setup( 18 | name="s3contents", 19 | version=versioneer.get_version(), 20 | cmdclass=versioneer.get_cmdclass(), 21 | description="A S3-backed ContentsManager implementation for Jupyter", 22 | long_description=read_file('README.md'), 23 | long_description_content_type="text/markdown", 24 | url="https://github.com/danielfrg/s3contents", 25 | maintainer="Daniel Rodriguez", 26 | maintainer_email="df.rodriguez143@gmail.com", 27 | license="Apache 2.0", 28 | packages=find_packages(), 29 | install_requires=REQUIREMENTS, 30 | zip_safe=False, 31 | ) 32 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | .pytest_cache 48 | 49 | # Translations 50 | *.mo 51 | *.pot 52 | 53 | # Django stuff: 54 | *.log 55 | 56 | # Sphinx documentation 57 | docs/_build/ 58 | docs-internal/_build/ 59 | docs-scale/_build/ 60 | 61 | # PyBuilder 62 | target/ 63 | 64 | # Others 65 | .ipynb_checkpoints 66 | 67 | s3-data 68 | -------------------------------------------------------------------------------- /s3contents/tests/utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pytest 3 | 4 | 5 | def mark_class(marker): 6 | '''Workaround for https://github.com/pytest-dev/pytest/issues/568''' 7 | import types 8 | def copy_func(f): 9 | try: 10 | return types.FunctionType(f.__code__, f.__globals__, 11 | name=f.__name__, argdefs=f.__defaults__, 12 | closure=f.__closure__) 13 | except AttributeError: 14 | return types.FunctionType(f.func_code, f.func_globals, 15 | name=f.func_name, 16 | argdefs=f.func_defaults, 17 | closure=f.func_closure) 18 | 19 | def mark(cls): 20 | for method in dir(cls): 21 | if method.startswith('test_'): 22 | f = copy_func(getattr(cls, method)) 23 | setattr(cls, method, marker(f)) 24 | return cls 25 | return mark 26 | 27 | RUN_GCSFS_TESTS = "RUN_GCSFS_TESTS" not in os.environ 28 | GCS_TEST = mark_class(pytest.mark.skipif(RUN_GCSFS_TESTS, reason="Only run GCS if tell to")) 29 | -------------------------------------------------------------------------------- /DEV.md: -------------------------------------------------------------------------------- 1 | Create dev environment 2 | 3 | ``` 4 | make env 5 | conda activate s3contents-dev 6 | make deps 7 | ``` 8 | 9 | Start minio 10 | 11 | ``` 12 | make minio 13 | ``` 14 | 15 | Edit `~/.jupyter/jupyter_notebook_config.py`: 16 | 17 | ```python 18 | c = get_config() 19 | 20 | # Tell Jupyter to use S3ContentsManager for storage 21 | from s3contents import S3ContentsManager 22 | c.NotebookApp.contents_manager_class = S3ContentsManager 23 | c.S3ContentsManager.endpoint_url = "http://localhost:9000" 24 | c.S3ContentsManager.access_key_id = "access-key" 25 | c.S3ContentsManager.secret_access_key = "secret-key" 26 | c.S3ContentsManager.bucket_name = "notebooks" 27 | 28 | # from s3contents import GCSContentsManager 29 | # c.NotebookApp.contents_manager_class = GCSContentsManager 30 | # c.GCSContentsManager.project = "continuum-compute" 31 | # c.GCSContentsManager.token = "~/.config/gcloud/application_default_credentials.json" 32 | # c.GCSContentsManager.bucket = "gcsfs-test" 33 | # c.GCSContentsManager.prefix = "this/is/the/prefix" 34 | 35 | c.NotebookApp.open_browser = False 36 | c.NotebookApp.tornado_settings = {"debug": True} 37 | ``` 38 | 39 | Run `jupyter notebook` in the `s3-contents-dev` 40 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | PWD := $(shell pwd) 2 | S3DIR := ${PWD}/s3-data 3 | 4 | all: tests 5 | 6 | .PHONY: minio 7 | minio: ## Run minio server 8 | mkdir -p ${S3DIR}/notebooks; docker run -p 9000:9000 -v ${S3DIR}:/export -e MINIO_ACCESS_KEY=access-key -e MINIO_SECRET_KEY=secret-key minio/minio:RELEASE.2018-06-29T02-11-29Z server /export 9 | 10 | .PHONY: tests 11 | tests: ## Run tests 12 | py.test -s -vv s3contents/tests 13 | 14 | .PHONY: build 15 | build: ## Build package 16 | python setup.py sdist 17 | 18 | .PHONY: upload 19 | upload: ## Upload package to pypi 20 | twine upload dist/*.tar.gz 21 | 22 | .PHONY: env 23 | env: ## Create dev environment 24 | @conda create -y -n s3contents-dev python=3.7 25 | 26 | .PHONY: deps 27 | deps: ## Install dev dependencies 28 | @pip install pytest pytest-cov python-coveralls nose mock 29 | @pip install -r requirements.txt 30 | @pip install -e . 31 | 32 | .PHONY: clean 33 | clean: ## 34 | rm -rf ${S3DIR} ; source deactivate; conda env remove -y -n s3-contents-dev 35 | 36 | PHONY: help 37 | help: 38 | @grep -E '^[a-zA-Z_-]+:.*?##.*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?##"; OFS="\t\t"}; {printf "\033[36m%-30s\033[0m %s\n", $$1, ($$2==""?"":$$2)}' 39 | -------------------------------------------------------------------------------- /s3contents/tests/test_gcsmanager.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pytest 3 | 4 | from s3contents.tests.utils import GCS_TEST 5 | from s3contents import GCSContentsManager 6 | from s3contents.ipycompat import TestContentsManager 7 | 8 | 9 | @GCS_TEST 10 | class GCSContentsManagerTestCase(TestContentsManager): 11 | 12 | def setUp(self): 13 | """ 14 | This setup is a hardcoded to run on my laptop and GCP account :) 15 | """ 16 | self.contents_manager = GCSContentsManager( 17 | project="continuum-compute", 18 | token="~/.config/gcloud/application_default_credentials.json", 19 | bucket="gcsfs-test") 20 | 21 | self.tearDown() 22 | 23 | def tearDown(self): 24 | for item in self.contents_manager.fs.ls(""): 25 | self.contents_manager.fs.rm(item) 26 | self.contents_manager.fs.init() 27 | 28 | # Overwrites from TestContentsManager 29 | 30 | def make_dir(self, api_path): 31 | self.contents_manager.new( 32 | model={"type": "directory"}, 33 | path=api_path,) 34 | 35 | 36 | # This needs to be removed or else we'll run the main IPython tests as well. 37 | del TestContentsManager 38 | -------------------------------------------------------------------------------- /s3contents/tests/test_gcsmanager_prefix.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pytest 3 | 4 | from s3contents.tests.utils import GCS_TEST 5 | from s3contents import GCSContentsManager 6 | from s3contents.ipycompat import TestContentsManager 7 | 8 | 9 | @GCS_TEST 10 | class GCSContentsManagerTestCase_prefix(TestContentsManager): 11 | 12 | def setUp(self): 13 | """ 14 | This setup is a hardcoded to run on my laptop and GCP account :) 15 | """ 16 | self.contents_manager = GCSContentsManager( 17 | project="continuum-compute", 18 | token="~/.config/gcloud/application_default_credentials.json", 19 | bucket="gcsfs-test", 20 | prefix="this/is/the/prefix") 21 | 22 | self.tearDown() 23 | 24 | def tearDown(self): 25 | for item in self.contents_manager.fs.ls(""): 26 | self.contents_manager.fs.rm(item) 27 | self.contents_manager.fs.init() 28 | 29 | # Overwrites from TestContentsManager 30 | 31 | def make_dir(self, api_path): 32 | self.contents_manager.new( 33 | model={"type": "directory"}, 34 | path=api_path,) 35 | 36 | 37 | # This needs to be removed or else we'll run the main IPython tests as well. 38 | del TestContentsManager 39 | -------------------------------------------------------------------------------- /s3contents/tests/test_s3manager.py: -------------------------------------------------------------------------------- 1 | from s3contents.ipycompat import TestContentsManager 2 | 3 | from s3contents import S3ContentsManager 4 | 5 | 6 | class S3ContentsManagerTestCase(TestContentsManager): 7 | 8 | def setUp(self): 9 | """ 10 | This setup is a hardcoded to the use a little minio server that run on travis CI or play.minio.io:9000 11 | """ 12 | self.contents_manager = S3ContentsManager( 13 | access_key_id="access-key", 14 | secret_access_key="secret-key", 15 | endpoint_url="http://127.0.0.1:9000", 16 | bucket="notebooks", 17 | # endpoint_url="https://play.minio.io:9000", 18 | # bucket="s3contents-test2", 19 | signature_version="s3v4") 20 | 21 | self.tearDown() 22 | 23 | def tearDown(self): 24 | for item in self.contents_manager.fs.ls(""): 25 | self.contents_manager.fs.rm(item) 26 | self.contents_manager.fs.init() 27 | 28 | # Overwrites from TestContentsManager 29 | 30 | def make_dir(self, api_path): 31 | self.contents_manager.new( 32 | model={"type": "directory"}, 33 | path=api_path,) 34 | 35 | 36 | # This needs to be removed or else we'll run the main IPython tests as well. 37 | del TestContentsManager 38 | -------------------------------------------------------------------------------- /s3contents/tests/test_s3manager_prefix.py: -------------------------------------------------------------------------------- 1 | from s3contents.ipycompat import TestContentsManager 2 | 3 | from s3contents import S3ContentsManager 4 | 5 | 6 | class S3ContentsManagerTestCase_prefix(TestContentsManager): 7 | 8 | def setUp(self): 9 | """ 10 | This setup is a hardcoded to the use a little minio server that run on travis CI or play.minio.io:9000 11 | """ 12 | self.contents_manager = S3ContentsManager( 13 | access_key_id="access-key", 14 | secret_access_key="secret-key", 15 | endpoint_url="http://127.0.0.1:9000", 16 | bucket="notebooks", 17 | # endpoint_url="https://play.minio.io:9000", 18 | # bucket="s3contents-test", 19 | prefix="this/is/the/prefix", 20 | signature_version="s3v4") 21 | 22 | self.tearDown() 23 | 24 | def tearDown(self): 25 | for item in self.contents_manager.fs.ls(""): 26 | self.contents_manager.fs.rm(item) 27 | self.contents_manager.fs.init() 28 | 29 | # Overwrites from TestContentsManager 30 | 31 | def make_dir(self, api_path): 32 | self.contents_manager.new( 33 | model={"type": "directory"}, 34 | path=api_path,) 35 | 36 | 37 | # This needs to be removed or else we'll run the main IPython tests as well. 38 | del TestContentsManager 39 | -------------------------------------------------------------------------------- /s3contents/gcsmanager.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | import mimetypes 4 | import datetime 5 | 6 | from tornado.web import HTTPError 7 | 8 | from s3contents.gcs_fs import GCSFS 9 | from s3contents.ipycompat import Unicode 10 | from s3contents.genericmanager import GenericContentsManager 11 | 12 | 13 | class GCSContentsManager(GenericContentsManager): 14 | 15 | project = Unicode( 16 | help="GCP Project", allow_none=True, default_value=None).tag( 17 | config=True, env="JPYNB_GCS_PROJECT") 18 | token = Unicode( 19 | help="Path to the GCP token", allow_none=True, default_value=None).tag( 20 | config=True, env="JPYNB_GCS_TOKEN_PATH") 21 | 22 | region_name = Unicode( 23 | "us-east-1", help="Region name").tag( 24 | config=True, env="JPYNB_GCS_REGION_NAME") 25 | bucket = Unicode( 26 | "notebooks", help="Bucket name to store notebooks").tag( 27 | config=True, env="JPYNB_GCS_BUCKET") 28 | 29 | prefix = Unicode("", help="Prefix path inside the specified bucket").tag(config=True) 30 | separator = Unicode("/", help="Path separator").tag(config=True) 31 | 32 | def __init__(self, *args, **kwargs): 33 | super(GCSContentsManager, self).__init__(*args, **kwargs) 34 | 35 | self._fs = GCSFS( 36 | log=self.log, 37 | project=self.project, 38 | token=self.token, 39 | bucket=self.bucket, 40 | prefix=self.prefix, 41 | separator=self.separator) 42 | -------------------------------------------------------------------------------- /s3contents/genericfs.py: -------------------------------------------------------------------------------- 1 | """ 2 | Generic FileSystem class to be used by the Content Manager 3 | """ 4 | 5 | from s3contents.ipycompat import HasTraits 6 | 7 | 8 | class GenericFS(HasTraits): 9 | 10 | def ls(self, path=""): 11 | raise NotImplemented("Should be implemented by the file system abstraction") 12 | 13 | def isfile(self, path): 14 | raise NotImplemented("Should be implemented by the file system abstraction") 15 | 16 | def isdir(self, path): 17 | raise NotImplemented("Should be implemented by the file system abstraction") 18 | 19 | def mv(self, old_path, new_path): 20 | raise NotImplemented("Should be implemented by the file system abstraction") 21 | 22 | def cp(self, old_path, new_path): 23 | raise NotImplemented("Should be implemented by the file system abstraction") 24 | 25 | def rm(self, path): 26 | raise NotImplemented("Should be implemented by the file system abstraction") 27 | 28 | def mkdir(self, path): 29 | raise NotImplemented("Should be implemented by the file system abstraction") 30 | 31 | def read(self, path): 32 | raise NotImplemented("Should be implemented by the file system abstraction") 33 | 34 | def lstat(self, path): 35 | raise NotImplemented("Should be implemented by the file system abstraction") 36 | 37 | def write(self, path, content, format): 38 | raise NotImplemented("Should be implemented by the file system abstraction") 39 | 40 | 41 | class GenericFSError(Exception): 42 | pass 43 | 44 | 45 | class NoSuchFile(GenericFSError): 46 | 47 | def __init__(self, path, *args, **kwargs): 48 | self.path = path 49 | self.message = "No such file or directory: {}".format(path) 50 | super(NoSuchFile, self).__init__(self.message, *args, **kwargs) 51 | -------------------------------------------------------------------------------- /s3contents/ipycompat.py: -------------------------------------------------------------------------------- 1 | """ 2 | Utilities for managing compat between notebook versions. 3 | 4 | Taken from: https://github.com/quantopian/pgcontents/blob/master/pgcontents/utils/ipycompat.py 5 | """ 6 | 7 | import notebook 8 | if notebook.version_info[0] >= 6: # noqa 9 | raise ImportError("Jupyter Notebook versions 6 and up are not supported.") 10 | 11 | from traitlets.config import Config 12 | from notebook.services.contents.checkpoints import ( 13 | Checkpoints, 14 | GenericCheckpointsMixin, 15 | ) 16 | from notebook.services.contents.filemanager import FileContentsManager 17 | from notebook.services.contents.filecheckpoints import ( 18 | GenericFileCheckpoints 19 | ) 20 | from notebook.services.contents.manager import ContentsManager 21 | from notebook.services.contents.tests.test_manager import ( 22 | TestContentsManager 23 | ) 24 | from notebook.services.contents.tests.test_contents_api import ( 25 | APITest 26 | ) 27 | from notebook.tests.launchnotebook import assert_http_error 28 | from notebook.utils import to_os_path 29 | from nbformat import from_dict, reads, writes 30 | from nbformat.v4.nbbase import ( 31 | new_code_cell, 32 | new_markdown_cell, 33 | new_notebook, 34 | new_raw_cell, 35 | ) 36 | from nbformat.v4.rwbase import strip_transient 37 | from traitlets import ( 38 | Any, 39 | Bool, 40 | Dict, 41 | Instance, 42 | Integer, 43 | HasTraits, 44 | Unicode, 45 | ) 46 | 47 | 48 | __all__ = [ 49 | 'APITest', 50 | 'Any', 51 | 'assert_http_error', 52 | 'Bool', 53 | 'Checkpoints', 54 | 'Config', 55 | 'ContentsManager', 56 | 'Dict', 57 | 'FileContentsManager', 58 | 'GenericCheckpointsMixin', 59 | 'GenericFileCheckpoints', 60 | 'HasTraits', 61 | 'Instance', 62 | 'Integer', 63 | 'TestContentsManager', 64 | 'Unicode', 65 | 'from_dict', 66 | 'new_code_cell', 67 | 'new_markdown_cell', 68 | 'new_notebook', 69 | 'new_raw_cell', 70 | 'reads', 71 | 'strip_transient', 72 | 'to_os_path', 73 | 'writes', 74 | ] 75 | 76 | -------------------------------------------------------------------------------- /s3contents/s3manager.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from s3contents.ipycompat import Unicode 4 | 5 | from s3contents.s3_fs import S3FS 6 | from s3contents.genericmanager import from_dict, GenericContentsManager 7 | 8 | 9 | class S3ContentsManager(GenericContentsManager): 10 | 11 | access_key_id = Unicode( 12 | help="S3/AWS access key ID", allow_none=True, default_value=None).tag( 13 | config=True, env="JPYNB_S3_ACCESS_KEY_ID") 14 | secret_access_key = Unicode( 15 | help="S3/AWS secret access key", allow_none=True, default_value=None).tag( 16 | config=True, env="JPYNB_S3_SECRET_ACCESS_KEY") 17 | 18 | endpoint_url = Unicode( 19 | "https://s3.amazonaws.com", help="S3 endpoint URL").tag( 20 | config=True, env="JPYNB_S3_ENDPOINT_URL") 21 | region_name = Unicode( 22 | "us-east-1", help="Region name").tag( 23 | config=True, env="JPYNB_S3_REGION_NAME") 24 | bucket = Unicode( 25 | "notebooks", help="Bucket name to store notebooks").tag( 26 | config=True, env="JPYNB_S3_BUCKET") 27 | prefix = Unicode("", help="Prefix path inside the specified bucket").tag(config=True) 28 | signature_version = Unicode(help="").tag(config=True) 29 | delimiter = Unicode("/", help="Path delimiter").tag(config=True) 30 | sse = Unicode(help="Type of server-side encryption to use").tag(config=True) 31 | 32 | session_token = Unicode( 33 | help="S3/AWS session token", 34 | allow_none=True, 35 | default_value=None 36 | ).tag(config=True, env="JPYNB_S3_SESSION_TOKEN") 37 | 38 | def __init__(self, *args, **kwargs): 39 | super(S3ContentsManager, self).__init__(*args, **kwargs) 40 | 41 | self._fs = S3FS( 42 | log=self.log, 43 | access_key_id=self.access_key_id, 44 | secret_access_key=self.secret_access_key, 45 | endpoint_url=self.endpoint_url, 46 | region_name=self.region_name, 47 | bucket=self.bucket, 48 | prefix=self.prefix, 49 | session_token=self.session_token, 50 | signature_version=self.signature_version, 51 | delimiter=self.delimiter, 52 | sse=self.sse) 53 | 54 | def _save_notebook(self, model, path): 55 | nb_contents = from_dict(model['content']) 56 | self.check_and_sign(nb_contents, path) 57 | file_contents = json.dumps(model["content"]) 58 | self._fs.writenotebook(path, file_contents) 59 | self.validate_notebook_model(model) 60 | return model.get("message") 61 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | [![Build Status](https://travis-ci.org/danielfrg/s3contents.svg?branch=master)](https://travis-ci.org/danielfrg/s3contents) 3 | [![Coverage Status](https://coveralls.io/repos/github/danielfrg/s3contents/badge.svg?branch=master)](https://coveralls.io/github/danielfrg/s3contents?branch=master) 4 | 5 | # S3Contents 6 | 7 | A S3 and GCS backed ContentsManager implementation for Jupyter. 8 | 9 | It aims to a be a transparent, drop-in replacement for Jupyter standard filesystem-backed storage system. 10 | With this implementation of a Jupyter Contents Manager you can save all your notebooks, regular files, directories 11 | structure directly to a S3/GCS bucket, this could be on AWS/GCP or a self hosted S3 API compatible like [minio](http://minio.io). 12 | 13 | While there is some implementations of this functionality already available online [2] I wasn't able to make 14 | them work in newer Jupyter Notebook installations. This aims to be a better tested one 15 | by being highly based on the awesome [PGContents](https://github.com/quantopian/pgcontents)[1]. 16 | 17 | ## Prerequisites 18 | 19 | Write access (valid credentials) to an S3/GCS bucket, this could be on AWS/GCP or a self hosted S3 like [minio](http://minio.io). 20 | 21 | ## Installation 22 | 23 | ``` 24 | $ pip install s3contents 25 | ``` 26 | 27 | ## Jupyter config 28 | 29 | Edit `~/.jupyter/jupyter_notebook_config.py` by filling the missing values: 30 | 31 | ### S3 32 | 33 | ```python 34 | from s3contents import S3ContentsManager 35 | 36 | c = get_config() 37 | 38 | # Tell Jupyter to use S3ContentsManager for all storage. 39 | c.NotebookApp.contents_manager_class = S3ContentsManager 40 | c.S3ContentsManager.access_key_id = 41 | c.S3ContentsManager.secret_access_key = 42 | c.S3ContentsManager.session_token = 43 | c.S3ContentsManager.bucket = ">" 44 | ``` 45 | 46 | Example for `play.minio.io:9000`: 47 | 48 | ```python 49 | from s3contents import S3ContentsManager 50 | 51 | c = get_config() 52 | 53 | # Tell Jupyter to use S3ContentsManager for all storage. 54 | c.NotebookApp.contents_manager_class = S3ContentsManager 55 | c.S3ContentsManager.access_key_id = "Q3AM3UQ867SPQQA43P2F" 56 | c.S3ContentsManager.secret_access_key = "zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG" 57 | c.S3ContentsManager.endpoint_url = "http://play.minio.io:9000" 58 | c.S3ContentsManager.bucket = "s3contents-demo" 59 | c.S3ContentsManager.prefix = "notebooks/test" 60 | ``` 61 | 62 | ### GCP 63 | 64 | Note that the file `~/.config/gcloud/application_default_credentials.json` assumes a posix system 65 | when you did `gcloud init` 66 | 67 | ```python 68 | from s3contents import GCSContentsManager 69 | 70 | c = get_config( 71 | 72 | c.NotebookApp.contents_manager_class = GCSContentsManager 73 | c.GCSContentsManager.project = "" 74 | c.GCSContentsManager.token = "~/.config/gcloud/application_default_credentials.json" 75 | c.GCSContentsManager.bucket = "" 76 | ``` 77 | 78 | ## AWS IAM 79 | 80 | It is also possible to use IAM Role-based access to the S3 bucket from an Amazon EC2 instance; to do that, 81 | just leave ```access_key_id``` and ```secret_access_key``` set to their default values (```None```), and ensure that 82 | the EC2 instance has an IAM role which provides sufficient permissions for the bucket and the operations necessary. 83 | 84 | ## Access local files 85 | 86 | To access local file as well as remote files in S3 you can use `pgcontents.`. 87 | 88 | First: 89 | ``` 90 | pip install pgcontents 91 | ``` 92 | 93 | And use a configuration like this: 94 | 95 | ```python 96 | from s3contents import S3ContentsManager 97 | from pgcontents.hybridmanager import HybridContentsManager 98 | from IPython.html.services.contents.filemanager import FileContentsManager 99 | 100 | c = get_config() 101 | 102 | c.NotebookApp.contents_manager_class = HybridContentsManager 103 | 104 | c.HybridContentsManager.manager_classes = { 105 | # Associate the root directory with a PostgresContentsManager. 106 | # This manager will receive all requests that don"t fall under any of the 107 | # other managers. 108 | "": S3ContentsManager, 109 | # Associate /directory with a FileContentsManager. 110 | "local_directory": FileContentsManager, 111 | } 112 | 113 | c.HybridContentsManager.manager_kwargs = { 114 | # Args for root PostgresContentsManager. 115 | "": { 116 | "access_key_id": "access-key", 117 | "secret_access_key": "secret-key", 118 | "endpoint_url": "http://localhost:9000", 119 | "bucket": "notebooks", 120 | }, 121 | # Args for the FileContentsManager mapped to /directory 122 | "local_directory": { 123 | "root_dir": "/Users/drodriguez/Downloads", 124 | }, 125 | } 126 | ``` 127 | 128 | ## See also 129 | 130 | 1. [PGContents](https://github.com/quantopian/pgcontents) 131 | 2. [s3nb](https://github.com/monetate/s3nb) or [s3drive](https://github.com/stitchfix/s3drive) 132 | -------------------------------------------------------------------------------- /s3contents/gcs_fs.py: -------------------------------------------------------------------------------- 1 | import os 2 | import six 3 | import gcsfs 4 | 5 | from s3contents.compat import FileNotFoundError 6 | from s3contents.ipycompat import Unicode 7 | from s3contents.genericfs import GenericFS, NoSuchFile 8 | 9 | class GCSFS(GenericFS): 10 | 11 | project = Unicode( 12 | help="GCP Project", allow_none=True, default_value=None).tag( 13 | config=True, env="JPYNB_GCS_PROJECT") 14 | token = Unicode( 15 | help="Path to the GCP token", allow_none=True, default_value=None).tag( 16 | config=True, env="JPYNB_GCS_TOKEN_PATH") 17 | 18 | region_name = Unicode( 19 | "us-east-1", help="Region name").tag( 20 | config=True, env="JPYNB_GCS_REGION_NAME") 21 | bucket = Unicode( 22 | "notebooks", help="Bucket name to store notebooks").tag( 23 | config=True, env="JPYNB_GCS_BUCKET") 24 | 25 | prefix = Unicode("", help="Prefix path inside the specified bucket").tag(config=True) 26 | separator = Unicode("/", help="Path separator").tag(config=True) 27 | 28 | dir_keep_file = Unicode( 29 | ".gcskeep", help="Empty file to create when creating directories").tag(config=True) 30 | 31 | def __init__(self, log, **kwargs): 32 | super(GCSFS, self).__init__(**kwargs) 33 | self.log = log 34 | 35 | token = os.path.expanduser(self.token) 36 | self.fs = gcsfs.GCSFileSystem(project=self.project, token=token) 37 | 38 | self.init() 39 | 40 | def init(self): 41 | self.mkdir("") 42 | self.ls("") 43 | assert self.isdir(""), "The root directory should exists :)" 44 | 45 | # GenericFS methods ----------------------------------------------------------------------------------------------- 46 | 47 | def ls(self, path): 48 | path_ = self.path(path) 49 | self.log.debug("S3contents.GCSFS: Listing directory: `%s`", path_) 50 | files = self.fs.ls(path_) 51 | return self.unprefix(files) 52 | 53 | def isfile(self, path): 54 | path_ = self.path(path) 55 | is_file = False 56 | 57 | exists = self.fs.exists(path_) 58 | if not exists: 59 | is_file = False 60 | else: 61 | try: 62 | # Info will fail if path is a dir 63 | self.fs.info(path_) 64 | is_file = True 65 | except FileNotFoundError: 66 | pass 67 | 68 | self.log.debug("S3contents.GCSFS: `%s` is a file: %s", path_, is_file) 69 | return is_file 70 | 71 | def isdir(self, path): 72 | # GCSFS doesnt return exists=True for a directory with no files so 73 | # we need to check if the dir_keep_file exists 74 | is_dir = self.isfile(path + self.separator + self.dir_keep_file) 75 | path_ = self.path(path) 76 | self.log.debug("S3contents.GCSFS: `%s` is a directory: %s", path_, is_dir) 77 | return is_dir 78 | 79 | def mv(self, old_path, new_path): 80 | self.log.debug("S3contents.GCSFS: Move file `%s` to `%s`", old_path, new_path) 81 | self.cp(old_path, new_path) 82 | self.rm(old_path) 83 | 84 | def cp(self, old_path, new_path): 85 | old_path_, new_path_ = self.path(old_path), self.path(new_path) 86 | self.log.debug("S3contents.GCSFS: Coping `%s` to `%s`", old_path_, new_path_) 87 | 88 | if self.isdir(old_path): 89 | old_dir_path, new_dir_path = old_path, new_path 90 | for obj in self.ls(old_dir_path): 91 | old_item_path = obj 92 | new_item_path = old_item_path.replace(old_dir_path, new_dir_path, 1) 93 | self.cp(old_item_path, new_item_path) 94 | elif self.isfile(old_path): 95 | self.fs.copy(old_path_, new_path_) 96 | 97 | def rm(self, path): 98 | path_ = self.path(path) 99 | self.log.debug("S3contents.GCSFS: Removing: `%s`", path_) 100 | if self.isfile(path): 101 | self.log.debug("S3contents.GCSFS: Removing file: `%s`", path_) 102 | self.fs.rm(path_) 103 | elif self.isdir(path): 104 | self.log.debug("S3contents.GCSFS: Removing directory: `%s`", path_) 105 | files = self.fs.walk(path_) 106 | for f in files: 107 | self.fs.rm(f) 108 | 109 | def mkdir(self, path): 110 | path_ = self.path(path, self.dir_keep_file) 111 | self.log.debug("S3contents.GCSFS: Making dir (touch): `%s`", path_) 112 | self.fs.touch(path_) 113 | 114 | def read(self, path): 115 | path_ = self.path(path) 116 | if not self.isfile(path): 117 | raise NoSuchFile(path_) 118 | with self.fs.open(path_, mode='rb') as f: 119 | content = f.read().decode("utf-8") 120 | return content 121 | 122 | def lstat(self, path): 123 | path_ = self.path(path) 124 | info = self.fs.info(path_) 125 | ret = {} 126 | ret["ST_MTIME"] = info["updated"] 127 | return ret 128 | 129 | def write(self, path, content, format): 130 | path_ = self.path(self.unprefix(path)) 131 | self.log.debug("S3contents.GCSFS: Writing file: `%s`", path_) 132 | with self.fs.open(path_, mode='wb') as f: 133 | f.write(content.encode("utf-8")) 134 | 135 | # Utilities ------------------------------------------------------------------------------------------------------- 136 | 137 | def strip(self, path): 138 | if isinstance(path, six.string_types): 139 | return path.strip(self.separator) 140 | if isinstance(path, (list, tuple)): 141 | return list(map(self.strip, path)) 142 | 143 | def join(self, *paths): 144 | paths = self.strip(paths) 145 | return self.separator.join(paths) 146 | 147 | def get_prefix(self): 148 | """Full prefix: bucket + optional prefix""" 149 | prefix = self.bucket 150 | if self.prefix: 151 | prefix += self.separator + self.prefix 152 | return prefix 153 | prefix_ = property(get_prefix) 154 | 155 | def unprefix(self, path): 156 | """Remove the self.prefix_ (if present) from a path or list of paths""" 157 | path = self.strip(path) 158 | if isinstance(path, six.string_types): 159 | path = path[len(self.prefix_):] if path.startswith(self.prefix_) else path 160 | path = path[1:] if path.startswith(self.separator) else path 161 | return path 162 | if isinstance(path, (list, tuple)): 163 | path = [p[len(self.prefix_):] if p.startswith(self.prefix_) else p for p in path] 164 | path = [p[1:] if p.startswith(self.separator) else p for p in path] 165 | return path 166 | 167 | def path(self, *path): 168 | """Utility to join paths including the bucket and prefix""" 169 | path = list(filter(None, path)) 170 | path = self.unprefix(path) 171 | items = [self.prefix_] + path 172 | return self.join(*items) 173 | -------------------------------------------------------------------------------- /s3contents/s3_fs.py: -------------------------------------------------------------------------------- 1 | """ 2 | Utilities to make S3 look like a regular file system 3 | """ 4 | import six 5 | import s3fs 6 | import base64 7 | 8 | from s3contents.compat import FileNotFoundError 9 | from s3contents.ipycompat import Unicode 10 | from s3contents.genericfs import GenericFS, NoSuchFile 11 | 12 | from tornado.web import HTTPError 13 | 14 | class S3FS(GenericFS): 15 | 16 | access_key_id = Unicode( 17 | help="S3/AWS access key ID", allow_none=True, default_value=None).tag( 18 | config=True, env="JPYNB_S3_ACCESS_KEY_ID") 19 | secret_access_key = Unicode( 20 | help="S3/AWS secret access key", allow_none=True, default_value=None).tag( 21 | config=True, env="JPYNB_S3_SECRET_ACCESS_KEY") 22 | 23 | endpoint_url = Unicode( 24 | "s3.amazonaws.com", help="S3 endpoint URL").tag( 25 | config=True, env="JPYNB_S3_ENDPOINT_URL") 26 | region_name = Unicode( 27 | "us-east-1", help="Region name").tag( 28 | config=True, env="JPYNB_S3_REGION_NAME") 29 | bucket = Unicode( 30 | "notebooks", help="Bucket name to store notebooks").tag( 31 | config=True, env="JPYNB_S3_BUCKET") 32 | signature_version = Unicode(help="").tag(config=True) 33 | sse = Unicode(help="Type of server-side encryption to use").tag(config=True) 34 | 35 | prefix = Unicode("", help="Prefix path inside the specified bucket").tag(config=True) 36 | delimiter = Unicode("/", help="Path delimiter").tag(config=True) 37 | 38 | dir_keep_file = Unicode( 39 | ".s3keep", help="Empty file to create when creating directories").tag(config=True) 40 | 41 | session_token = Unicode( 42 | help="S3/AWS session token", 43 | allow_none=True, 44 | default_value=None 45 | ).tag(config=True, env="JPYNB_S3_SESSION_TOKEN") 46 | 47 | def __init__(self, log, **kwargs): 48 | super(S3FS, self).__init__(**kwargs) 49 | self.log = log 50 | 51 | client_kwargs = { 52 | "endpoint_url": self.endpoint_url, 53 | "region_name": self.region_name, 54 | } 55 | config_kwargs = {} 56 | if self.signature_version: 57 | config_kwargs["signature_version"] = self.signature_version 58 | s3_additional_kwargs = {} 59 | if self.sse: 60 | s3_additional_kwargs["ServerSideEncryption"] = self.sse 61 | 62 | self.fs = s3fs.S3FileSystem(key=self.access_key_id, 63 | secret=self.secret_access_key, 64 | token=self.session_token, 65 | client_kwargs=client_kwargs, 66 | config_kwargs=config_kwargs, 67 | s3_additional_kwargs=s3_additional_kwargs) 68 | 69 | self.init() 70 | 71 | def init(self): 72 | self.mkdir("") 73 | self.ls("") 74 | self.isdir("") 75 | 76 | # GenericFS methods ----------------------------------------------------------------------------------------------- 77 | 78 | def ls(self, path=""): 79 | path_ = self.path(path) 80 | self.log.debug("S3contents.S3FS: Listing directory: `%s`", path_) 81 | files = self.fs.ls(path_, refresh=True) 82 | return self.unprefix(files) 83 | 84 | def isfile(self, path): 85 | path_ = self.path(path) 86 | is_file = False 87 | 88 | exists = self.fs.exists(path_) 89 | if not exists: 90 | is_file = False 91 | else: 92 | try: 93 | # Info will fail if path is a dir 94 | self.fs.info(path_, refresh=True) 95 | is_file = True 96 | except FileNotFoundError: 97 | pass 98 | 99 | self.log.debug("S3contents.S3FS: `%s` is a file: %s", path_, is_file) 100 | return is_file 101 | 102 | def isdir(self, path): 103 | path_ = self.path(path) 104 | is_dir = False 105 | 106 | exists = self.fs.exists(path_) 107 | if not exists: 108 | is_dir = False 109 | else: 110 | try: 111 | # Info will fail if path is a dir 112 | self.fs.info(path_, refresh=True) 113 | is_dir = False 114 | except FileNotFoundError: 115 | is_dir = True 116 | 117 | self.log.debug("S3contents.S3FS: `%s` is a directory: %s", path_, is_dir) 118 | return is_dir 119 | 120 | def mv(self, old_path, new_path): 121 | self.log.debug("S3contents.S3FS: Move file `%s` to `%s`", old_path, new_path) 122 | self.cp(old_path, new_path) 123 | self.rm(old_path) 124 | 125 | def cp(self, old_path, new_path): 126 | old_path_, new_path_ = self.path(old_path), self.path(new_path) 127 | self.log.debug("S3contents.S3FS: Coping `%s` to `%s`", old_path_, new_path_) 128 | 129 | if self.isdir(old_path): 130 | old_dir_path, new_dir_path = old_path, new_path 131 | for obj in self.ls(old_dir_path): 132 | old_item_path = obj 133 | new_item_path = old_item_path.replace(old_dir_path, new_dir_path, 1) 134 | self.cp(old_item_path, new_item_path) 135 | elif self.isfile(old_path): 136 | self.fs.copy(old_path_, new_path_) 137 | 138 | def rm(self, path): 139 | path_ = self.path(path) 140 | self.log.debug("S3contents.S3FS: Removing: `%s`", path_) 141 | if self.isfile(path): 142 | self.log.debug("S3contents.S3FS: Removing file: `%s`", path_) 143 | self.fs.rm(path_) 144 | elif self.isdir(path): 145 | self.log.debug("S3contents.S3FS: Removing directory: `%s`", path_) 146 | self.fs.rm(path_ + self.delimiter, recursive=True) 147 | # self.fs.rmdir(path_ + self.delimiter, recursive=True) 148 | 149 | def mkdir(self, path): 150 | path_ = self.path(path, self.dir_keep_file) 151 | self.log.debug("S3contents.S3FS: Making dir: `%s`", path_) 152 | self.fs.touch(path_) 153 | 154 | def read(self, path): 155 | path_ = self.path(path) 156 | if not self.isfile(path): 157 | raise NoSuchFile(path_) 158 | with self.fs.open(path_, mode='rb') as f: 159 | content = f.read().decode("utf-8") 160 | return content 161 | 162 | def lstat(self, path): 163 | path_ = self.path(path) 164 | info = self.fs.info(path_, refresh=True) 165 | ret = {} 166 | ret["ST_MTIME"] = info["LastModified"] 167 | return ret 168 | 169 | def write(self, path, content, format): 170 | path_ = self.path(self.unprefix(path)) 171 | self.log.debug("S3contents.S3FS: Writing file: `%s`", path_) 172 | if format not in {'text', 'base64'}: 173 | raise HTTPError( 174 | 400, 175 | "Must specify format of file contents as 'text' or 'base64'", 176 | ) 177 | try: 178 | if format == 'text': 179 | content_ = content.encode('utf8') 180 | else: 181 | b64_bytes = content.encode('ascii') 182 | content_ = base64.b64decode(b64_bytes) 183 | except Exception as e: 184 | raise HTTPError( 185 | 400, u'Encoding error saving %s: %s' % (path_, e) 186 | ) 187 | with self.fs.open(path_, mode='wb') as f: 188 | f.write(content_) 189 | 190 | def writenotebook(self, path, content): 191 | path_ = self.path(self.unprefix(path)) 192 | self.log.debug("S3contents.S3FS: Writing notebook: `%s`", path_) 193 | with self.fs.open(path_, mode='wb') as f: 194 | f.write(content.encode("utf-8")) 195 | 196 | # Utilities ------------------------------------------------------------------------------------------------------- 197 | 198 | def get_prefix(self): 199 | """Full prefix: bucket + optional prefix""" 200 | prefix = self.bucket 201 | if self.prefix: 202 | prefix += self.delimiter + self.prefix 203 | return prefix 204 | prefix_ = property(get_prefix) 205 | 206 | def unprefix(self, path): 207 | """Remove the self.prefix_ (if present) from a path or list of paths""" 208 | if isinstance(path, six.string_types): 209 | path = path[len(self.prefix_):] if path.startswith(self.prefix_) else path 210 | path = path[1:] if path.startswith(self.delimiter) else path 211 | return path 212 | if isinstance(path, (list, tuple)): 213 | path = [p[len(self.prefix_):] if p.startswith(self.prefix_) else p for p in path] 214 | path = [p[1:] if p.startswith(self.delimiter) else p for p in path] 215 | return path 216 | 217 | def path(self, *path): 218 | """Utility to join paths including the bucket and prefix""" 219 | path = list(filter(None, path)) 220 | path = self.unprefix(path) 221 | items = [self.prefix_] + path 222 | return self.delimiter.join(items) 223 | -------------------------------------------------------------------------------- /s3contents/genericmanager.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | import mimetypes 4 | import datetime 5 | 6 | from tornado.web import HTTPError 7 | 8 | from s3contents.genericfs import GenericFSError, NoSuchFile 9 | from s3contents.ipycompat import ContentsManager 10 | from s3contents.ipycompat import HasTraits, Unicode 11 | from s3contents.ipycompat import reads, from_dict, GenericFileCheckpoints 12 | 13 | DUMMY_CREATED_DATE = datetime.datetime.fromtimestamp(86400) 14 | NBFORMAT_VERSION = 4 15 | 16 | 17 | class GenericContentsManager(ContentsManager, HasTraits): 18 | 19 | # This makes the checkpoints get saved on this directory 20 | root_dir = Unicode("./", config=True) 21 | 22 | def __init__(self, *args, **kwargs): 23 | super(GenericContentsManager, self).__init__(*args, **kwargs) 24 | self._fs = None 25 | 26 | def get_fs(self): 27 | return self._fs 28 | fs = property(get_fs) 29 | 30 | def _checkpoints_class_default(self): 31 | return GenericFileCheckpoints 32 | 33 | def do_error(self, msg, code=500): 34 | raise HTTPError(code, msg) 35 | 36 | def no_such_entity(self, path): 37 | self.do_error("No such entity: [{path}]".format(path=path), 404) 38 | 39 | def already_exists(self, path): 40 | thing = "File" if self.file_exists(path) else "Directory" 41 | self.do_error(u"{thing} already exists: [{path}]".format(thing=thing, path=path), 409) 42 | 43 | def guess_type(self, path, allow_directory=True): 44 | """ 45 | Guess the type of a file. 46 | If allow_directory is False, don't consider the possibility that the 47 | file is a directory. 48 | 49 | Parameters 50 | ---------- 51 | obj: s3.Object or string 52 | """ 53 | if path.endswith(".ipynb"): 54 | return "notebook" 55 | elif allow_directory and self.dir_exists(path): 56 | return "directory" 57 | else: 58 | return "file" 59 | 60 | def file_exists(self, path): 61 | # Does a file exist at the given path? 62 | self.log.debug("S3contents.GenericManager.file_exists: ('%s')", path) 63 | return self.fs.isfile(path) 64 | 65 | def dir_exists(self, path): 66 | # Does a directory exist at the given path? 67 | self.log.debug("S3contents.GenericManager.dir_exists: path('%s')", path) 68 | return self.fs.isdir(path) 69 | 70 | def get(self, path, content=True, type=None, format=None): 71 | # Get a file or directory model. 72 | self.log.debug("S3contents.GenericManager.get] path('%s') type(%s) format(%s)", path, type, format) 73 | path = path.strip('/') 74 | 75 | if type is None: 76 | type = self.guess_type(path) 77 | try: 78 | func = { 79 | "directory": self._get_directory, 80 | "notebook": self._get_notebook, 81 | "file": self._get_file, 82 | }[type] 83 | except KeyError: 84 | raise ValueError("Unknown type passed: '{}'".format(type)) 85 | 86 | return func(path=path, content=content, format=format) 87 | 88 | def _get_directory(self, path, content=True, format=None): 89 | self.log.debug("S3contents.GenericManager.get_directory: path('%s') content(%s) format(%s)", path, content, format) 90 | return self._directory_model_from_path(path, content=content) 91 | 92 | def _get_notebook(self, path, content=True, format=None): 93 | self.log.debug("S3contents.GenericManager.get_notebook: path('%s') type(%s) format(%s)", path, content, format) 94 | return self._notebook_model_from_path(path, content=content, format=format) 95 | 96 | def _get_file(self, path, content=True, format=None): 97 | self.log.debug("S3contents.GenericManager.get_file: path('%s') type(%s) format(%s)", path, content, format) 98 | return self._file_model_from_path(path, content=content, format=format) 99 | 100 | def _directory_model_from_path(self, path, content=False): 101 | self.log.debug("S3contents.GenericManager._directory_model_from_path: path('%s') type(%s)", path, content) 102 | model = base_directory_model(path) 103 | if content: 104 | if not self.dir_exists(path): 105 | self.no_such_entity(path) 106 | model["format"] = "json" 107 | dir_content = self.fs.ls(path=path) 108 | model["content"] = self._convert_file_records(dir_content) 109 | return model 110 | 111 | def _notebook_model_from_path(self, path, content=False, format=None): 112 | """ 113 | Build a notebook model from database record. 114 | """ 115 | model = base_model(path) 116 | model["type"] = "notebook" 117 | if self.fs.isfile(path): 118 | model["last_modified"] = model["created"] = self.fs.lstat(path)["ST_MTIME"] 119 | else: 120 | model["last_modified"] = model["created"] = DUMMY_CREATED_DATE 121 | if content: 122 | if not self.fs.isfile(path): 123 | self.no_such_entity(path) 124 | file_content = self.fs.read(path) 125 | nb_content = reads(file_content, as_version=NBFORMAT_VERSION) 126 | self.mark_trusted_cells(nb_content, path) 127 | model["format"] = "json" 128 | model["content"] = nb_content 129 | self.validate_notebook_model(model) 130 | return model 131 | 132 | def _file_model_from_path(self, path, content=False, format=None): 133 | """ 134 | Build a file model from database record. 135 | """ 136 | model = base_model(path) 137 | model["type"] = "file" 138 | if self.fs.isfile(path): 139 | model["last_modified"] = model["created"] = self.fs.lstat(path)["ST_MTIME"] 140 | else: 141 | model["last_modified"] = model["created"] = DUMMY_CREATED_DATE 142 | if content: 143 | try: 144 | content = self.fs.read(path) 145 | except NoSuchFile as e: 146 | self.no_such_entity(e.path) 147 | except GenericFSError as e: 148 | self.do_error(str(e), 500) 149 | model["format"] = format or "text" 150 | model["content"] = content 151 | model["mimetype"] = mimetypes.guess_type(path)[0] or "text/plain" 152 | if format == "base64": 153 | model["format"] = format or "base64" 154 | from base64 import b64decode 155 | model["content"] = b64decode(content) 156 | return model 157 | 158 | def _convert_file_records(self, paths): 159 | """ 160 | Applies _notebook_model_from_s3_path or _file_model_from_s3_path to each entry of `paths`, 161 | depending on the result of `guess_type`. 162 | """ 163 | ret = [] 164 | for path in paths: 165 | # path = self.fs.remove_prefix(path, self.prefix) # Remove bucket prefix from paths 166 | if os.path.basename(path) == self.fs.dir_keep_file: 167 | continue 168 | type_ = self.guess_type(path, allow_directory=True) 169 | if type_ == "notebook": 170 | ret.append(self._notebook_model_from_path(path, False)) 171 | elif type_ == "file": 172 | ret.append(self._file_model_from_path(path, False, None)) 173 | elif type_ == "directory": 174 | ret.append(self._directory_model_from_path(path, False)) 175 | else: 176 | self.do_error("Unknown file type %s for file '%s'" % (type_, path), 500) 177 | return ret 178 | 179 | def save(self, model, path): 180 | """Save a file or directory model to path. 181 | """ 182 | self.log.debug("S3contents.GenericManager: save %s: '%s'", model, path) 183 | if "type" not in model: 184 | self.do_error("No model type provided", 400) 185 | if "content" not in model and model["type"] != "directory": 186 | self.do_error("No file content provided", 400) 187 | 188 | if model["type"] not in ("file", "directory", "notebook"): 189 | self.do_error("Unhandled contents type: %s" % model["type"], 400) 190 | 191 | try: 192 | if model["type"] == "notebook": 193 | validation_message = self._save_notebook(model, path) 194 | elif model["type"] == "file": 195 | validation_message = self._save_file(model, path) 196 | else: 197 | validation_message = self._save_directory(path) 198 | except Exception as e: 199 | self.log.error("Error while saving file: %s %s", path, e, exc_info=True) 200 | self.do_error("Unexpected error while saving file: %s %s" % (path, e), 500) 201 | 202 | model = self.get(path, type=model["type"], content=False) 203 | if validation_message is not None: 204 | model["message"] = validation_message 205 | return model 206 | 207 | def _save_notebook(self, model, path): 208 | nb_contents = from_dict(model['content']) 209 | self.check_and_sign(nb_contents, path) 210 | file_contents = json.dumps(model["content"]) 211 | self.fs.write(path, file_contents) 212 | self.validate_notebook_model(model) 213 | return model.get("message") 214 | 215 | def _save_file(self, model, path): 216 | file_contents = model["content"] 217 | file_format = model.get('format') 218 | self.fs.write(path, file_contents, file_format) 219 | 220 | def _save_directory(self, path): 221 | self.fs.mkdir(path) 222 | 223 | def rename_file(self, old_path, new_path): 224 | """Rename a file or directory. 225 | 226 | NOTE: This method is unfortunately named on the base class. It 227 | actually moves a file or a directory. 228 | """ 229 | self.log.debug("S3contents.GenericManager: Init rename of '%s' to '%s'", old_path, new_path) 230 | if self.file_exists(new_path) or self.dir_exists(new_path): 231 | self.already_exists(new_path) 232 | elif self.file_exists(old_path) or self.dir_exists(old_path): 233 | self.log.debug("S3contents.GenericManager: Actually renaming '%s' to '%s'", old_path, 234 | new_path) 235 | self.fs.mv(old_path, new_path) 236 | else: 237 | self.no_such_entity(old_path) 238 | 239 | def delete_file(self, path): 240 | """Delete the file or directory at path. 241 | """ 242 | self.log.debug("S3contents.GenericManager: delete_file '%s'", path) 243 | if self.file_exists(path) or self.dir_exists(path): 244 | self.fs.rm(path) 245 | else: 246 | self.no_such_entity(path) 247 | 248 | def is_hidden(self, path): 249 | """Is path a hidden directory or file? 250 | """ 251 | self.log.debug("S3contents.GenericManager: is_hidden '%s'", path) 252 | return False 253 | 254 | 255 | def base_model(path): 256 | return { 257 | "name": path.rsplit('/', 1)[-1], 258 | "path": path, 259 | "writable": True, 260 | "last_modified": None, 261 | "created": None, 262 | "content": None, 263 | "format": None, 264 | "mimetype": None, 265 | } 266 | 267 | 268 | def base_directory_model(path): 269 | model = base_model(path) 270 | model.update( 271 | type="directory", 272 | last_modified=DUMMY_CREATED_DATE, 273 | created=DUMMY_CREATED_DATE,) 274 | return model 275 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright 2015 Daniel Rodriguez 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /s3contents/_version.py: -------------------------------------------------------------------------------- 1 | # This file helps to compute a version number in source trees obtained from 2 | # git-archive tarball (such as those provided by githubs download-from-tag 3 | # feature). Distribution tarballs (built by setup.py sdist) and build 4 | # directories (produced by setup.py build) will contain a much shorter file 5 | # that just contains the computed version number. 6 | 7 | # This file is released into the public domain. Generated by 8 | # versioneer-0.16 (https://github.com/warner/python-versioneer) 9 | """Git implementation of _version.py.""" 10 | 11 | import errno 12 | import os 13 | import re 14 | import subprocess 15 | import sys 16 | 17 | 18 | def get_keywords(): 19 | """Get the keywords needed to look up the version information.""" 20 | # these strings will be replaced by git during git-archive. 21 | # setup.py/versioneer.py will grep for the variable names, so they must 22 | # each be defined on a line of their own. _version.py will just call 23 | # get_keywords(). 24 | git_refnames = " (HEAD -> master)" 25 | git_full = "f47b827bfc8c13902618682e9825b74d36ffe744" 26 | keywords = {"refnames": git_refnames, "full": git_full} 27 | return keywords 28 | 29 | 30 | class VersioneerConfig: 31 | """Container for Versioneer configuration parameters.""" 32 | 33 | 34 | def get_config(): 35 | """Create, populate and return the VersioneerConfig() object.""" 36 | # these strings are filled in when 'setup.py versioneer' creates 37 | # _version.py 38 | cfg = VersioneerConfig() 39 | cfg.VCS = "git" 40 | cfg.style = "pep440" 41 | cfg.tag_prefix = "" 42 | cfg.parentdir_prefix = "s3contents-" 43 | cfg.versionfile_source = "s3contents/_version.py" 44 | cfg.verbose = False 45 | return cfg 46 | 47 | 48 | class NotThisMethod(Exception): 49 | """Exception raised if a method is not valid for the current scenario.""" 50 | 51 | 52 | LONG_VERSION_PY = {} 53 | HANDLERS = {} 54 | 55 | 56 | def register_vcs_handler(vcs, method): # decorator 57 | """Decorator to mark a method as the handler for a particular VCS.""" 58 | 59 | def decorate(f): 60 | """Store f in HANDLERS[vcs][method].""" 61 | if vcs not in HANDLERS: 62 | HANDLERS[vcs] = {} 63 | HANDLERS[vcs][method] = f 64 | return f 65 | 66 | return decorate 67 | 68 | 69 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): 70 | """Call the given command(s).""" 71 | assert isinstance(commands, list) 72 | p = None 73 | for c in commands: 74 | try: 75 | dispcmd = str([c] + args) 76 | # remember shell=False, so use git.cmd on windows, not just git 77 | p = subprocess.Popen( 78 | [c] + args, 79 | cwd=cwd, 80 | stdout=subprocess.PIPE, 81 | stderr=(subprocess.PIPE if hide_stderr else None)) 82 | break 83 | except EnvironmentError: 84 | e = sys.exc_info()[1] 85 | if e.errno == errno.ENOENT: 86 | continue 87 | if verbose: 88 | print("unable to run %s" % dispcmd) 89 | print(e) 90 | return None 91 | else: 92 | if verbose: 93 | print("unable to find command, tried %s" % (commands,)) 94 | return None 95 | stdout = p.communicate()[0].strip() 96 | if sys.version_info[0] >= 3: 97 | stdout = stdout.decode() 98 | if p.returncode != 0: 99 | if verbose: 100 | print("unable to run %s (error)" % dispcmd) 101 | return None 102 | return stdout 103 | 104 | 105 | def versions_from_parentdir(parentdir_prefix, root, verbose): 106 | """Try to determine the version from the parent directory name. 107 | 108 | Source tarballs conventionally unpack into a directory that includes 109 | both the project name and a version string. 110 | """ 111 | dirname = os.path.basename(root) 112 | if not dirname.startswith(parentdir_prefix): 113 | if verbose: 114 | print("guessing rootdir is '%s', but '%s' doesn't start with " 115 | "prefix '%s'" % (root, dirname, parentdir_prefix)) 116 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 117 | return { 118 | "version": dirname[len(parentdir_prefix):], 119 | "full-revisionid": None, 120 | "dirty": False, 121 | "error": None 122 | } 123 | 124 | 125 | @register_vcs_handler("git", "get_keywords") 126 | def git_get_keywords(versionfile_abs): 127 | """Extract version information from the given file.""" 128 | # the code embedded in _version.py can just fetch the value of these 129 | # keywords. When used from setup.py, we don't want to import _version.py, 130 | # so we do it with a regexp instead. This function is not used from 131 | # _version.py. 132 | keywords = {} 133 | try: 134 | f = open(versionfile_abs, "r") 135 | for line in f.readlines(): 136 | if line.strip().startswith("git_refnames ="): 137 | mo = re.search(r'=\s*"(.*)"', line) 138 | if mo: 139 | keywords["refnames"] = mo.group(1) 140 | if line.strip().startswith("git_full ="): 141 | mo = re.search(r'=\s*"(.*)"', line) 142 | if mo: 143 | keywords["full"] = mo.group(1) 144 | f.close() 145 | except EnvironmentError: 146 | pass 147 | return keywords 148 | 149 | 150 | @register_vcs_handler("git", "keywords") 151 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 152 | """Get version information from git keywords.""" 153 | if not keywords: 154 | raise NotThisMethod("no keywords at all, weird") 155 | refnames = keywords["refnames"].strip() 156 | if refnames.startswith("$Format"): 157 | if verbose: 158 | print("keywords are unexpanded, not using") 159 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 160 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) 161 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 162 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 163 | TAG = "tag: " 164 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) 165 | if not tags: 166 | # Either we're using git < 1.8.3, or there really are no tags. We use 167 | # a heuristic: assume all version tags have a digit. The old git %d 168 | # expansion behaves like git log --decorate=short and strips out the 169 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 170 | # between branches and tags. By ignoring refnames without digits, we 171 | # filter out many common branch names like "release" and 172 | # "stabilization", as well as "HEAD" and "master". 173 | tags = set([r for r in refs if re.search(r'\d', r)]) 174 | if verbose: 175 | print("discarding '%s', no digits" % ",".join(refs - tags)) 176 | if verbose: 177 | print("likely tags: %s" % ",".join(sorted(tags))) 178 | for ref in sorted(tags): 179 | # sorting will prefer e.g. "2.0" over "2.0rc1" 180 | if ref.startswith(tag_prefix): 181 | r = ref[len(tag_prefix):] 182 | if verbose: 183 | print("picking %s" % r) 184 | return { 185 | "version": r, 186 | "full-revisionid": keywords["full"].strip(), 187 | "dirty": False, 188 | "error": None 189 | } 190 | # no suitable tags, so version is "0+unknown", but full hex is still there 191 | if verbose: 192 | print("no suitable tags, using unknown + full revision id") 193 | return { 194 | "version": "0+unknown", 195 | "full-revisionid": keywords["full"].strip(), 196 | "dirty": False, 197 | "error": "no suitable tags" 198 | } 199 | 200 | 201 | @register_vcs_handler("git", "pieces_from_vcs") 202 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 203 | """Get version from 'git describe' in the root of the source tree. 204 | 205 | This only gets called if the git-archive 'subst' keywords were *not* 206 | expanded, and _version.py hasn't already been rewritten with a short 207 | version string, meaning we're inside a checked out source tree. 208 | """ 209 | if not os.path.exists(os.path.join(root, ".git")): 210 | if verbose: 211 | print("no .git in %s" % root) 212 | raise NotThisMethod("no .git directory") 213 | 214 | GITS = ["git"] 215 | if sys.platform == "win32": 216 | GITS = ["git.cmd", "git.exe"] 217 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 218 | # if there isn't one, this yields HEX[-dirty] (no NUM) 219 | describe_out = run_command( 220 | GITS, 221 | ["describe", "--tags", "--dirty", "--always", "--long", "--match", "%s*" % tag_prefix], 222 | cwd=root) 223 | # --long was added in git-1.5.5 224 | if describe_out is None: 225 | raise NotThisMethod("'git describe' failed") 226 | describe_out = describe_out.strip() 227 | full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 228 | if full_out is None: 229 | raise NotThisMethod("'git rev-parse' failed") 230 | full_out = full_out.strip() 231 | 232 | pieces = {} 233 | pieces["long"] = full_out 234 | pieces["short"] = full_out[:7] # maybe improved later 235 | pieces["error"] = None 236 | 237 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 238 | # TAG might have hyphens. 239 | git_describe = describe_out 240 | 241 | # look for -dirty suffix 242 | dirty = git_describe.endswith("-dirty") 243 | pieces["dirty"] = dirty 244 | if dirty: 245 | git_describe = git_describe[:git_describe.rindex("-dirty")] 246 | 247 | # now we have TAG-NUM-gHEX or HEX 248 | 249 | if "-" in git_describe: 250 | # TAG-NUM-gHEX 251 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 252 | if not mo: 253 | # unparseable. Maybe git-describe is misbehaving? 254 | pieces["error"] = ("unable to parse git-describe output: '%s'" % describe_out) 255 | return pieces 256 | 257 | # tag 258 | full_tag = mo.group(1) 259 | if not full_tag.startswith(tag_prefix): 260 | if verbose: 261 | fmt = "tag '%s' doesn't start with prefix '%s'" 262 | print(fmt % (full_tag, tag_prefix)) 263 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix)) 264 | return pieces 265 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 266 | 267 | # distance: number of commits since tag 268 | pieces["distance"] = int(mo.group(2)) 269 | 270 | # commit: short hex revision ID 271 | pieces["short"] = mo.group(3) 272 | 273 | else: 274 | # HEX: no tags 275 | pieces["closest-tag"] = None 276 | count_out = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) 277 | pieces["distance"] = int(count_out) # total number of commits 278 | 279 | return pieces 280 | 281 | 282 | def plus_or_dot(pieces): 283 | """Return a + if we don't already have one, else return a .""" 284 | if "+" in pieces.get("closest-tag", ""): 285 | return "." 286 | return "+" 287 | 288 | 289 | def render_pep440(pieces): 290 | """Build up version string, with post-release "local version identifier". 291 | 292 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 293 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 294 | 295 | Exceptions: 296 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 297 | """ 298 | if pieces["closest-tag"]: 299 | rendered = pieces["closest-tag"] 300 | if pieces["distance"] or pieces["dirty"]: 301 | rendered += plus_or_dot(pieces) 302 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 303 | if pieces["dirty"]: 304 | rendered += ".dirty" 305 | else: 306 | # exception #1 307 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) 308 | if pieces["dirty"]: 309 | rendered += ".dirty" 310 | return rendered 311 | 312 | 313 | def render_pep440_pre(pieces): 314 | """TAG[.post.devDISTANCE] -- No -dirty. 315 | 316 | Exceptions: 317 | 1: no tags. 0.post.devDISTANCE 318 | """ 319 | if pieces["closest-tag"]: 320 | rendered = pieces["closest-tag"] 321 | if pieces["distance"]: 322 | rendered += ".post.dev%d" % pieces["distance"] 323 | else: 324 | # exception #1 325 | rendered = "0.post.dev%d" % pieces["distance"] 326 | return rendered 327 | 328 | 329 | def render_pep440_post(pieces): 330 | """TAG[.postDISTANCE[.dev0]+gHEX] . 331 | 332 | The ".dev0" means dirty. Note that .dev0 sorts backwards 333 | (a dirty tree will appear "older" than the corresponding clean one), 334 | but you shouldn't be releasing software with -dirty anyways. 335 | 336 | Exceptions: 337 | 1: no tags. 0.postDISTANCE[.dev0] 338 | """ 339 | if pieces["closest-tag"]: 340 | rendered = pieces["closest-tag"] 341 | if pieces["distance"] or pieces["dirty"]: 342 | rendered += ".post%d" % pieces["distance"] 343 | if pieces["dirty"]: 344 | rendered += ".dev0" 345 | rendered += plus_or_dot(pieces) 346 | rendered += "g%s" % pieces["short"] 347 | else: 348 | # exception #1 349 | rendered = "0.post%d" % pieces["distance"] 350 | if pieces["dirty"]: 351 | rendered += ".dev0" 352 | rendered += "+g%s" % pieces["short"] 353 | return rendered 354 | 355 | 356 | def render_pep440_old(pieces): 357 | """TAG[.postDISTANCE[.dev0]] . 358 | 359 | The ".dev0" means dirty. 360 | 361 | Eexceptions: 362 | 1: no tags. 0.postDISTANCE[.dev0] 363 | """ 364 | if pieces["closest-tag"]: 365 | rendered = pieces["closest-tag"] 366 | if pieces["distance"] or pieces["dirty"]: 367 | rendered += ".post%d" % pieces["distance"] 368 | if pieces["dirty"]: 369 | rendered += ".dev0" 370 | else: 371 | # exception #1 372 | rendered = "0.post%d" % pieces["distance"] 373 | if pieces["dirty"]: 374 | rendered += ".dev0" 375 | return rendered 376 | 377 | 378 | def render_git_describe(pieces): 379 | """TAG[-DISTANCE-gHEX][-dirty]. 380 | 381 | Like 'git describe --tags --dirty --always'. 382 | 383 | Exceptions: 384 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 385 | """ 386 | if pieces["closest-tag"]: 387 | rendered = pieces["closest-tag"] 388 | if pieces["distance"]: 389 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 390 | else: 391 | # exception #1 392 | rendered = pieces["short"] 393 | if pieces["dirty"]: 394 | rendered += "-dirty" 395 | return rendered 396 | 397 | 398 | def render_git_describe_long(pieces): 399 | """TAG-DISTANCE-gHEX[-dirty]. 400 | 401 | Like 'git describe --tags --dirty --always -long'. 402 | The distance/hash is unconditional. 403 | 404 | Exceptions: 405 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 406 | """ 407 | if pieces["closest-tag"]: 408 | rendered = pieces["closest-tag"] 409 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 410 | else: 411 | # exception #1 412 | rendered = pieces["short"] 413 | if pieces["dirty"]: 414 | rendered += "-dirty" 415 | return rendered 416 | 417 | 418 | def render(pieces, style): 419 | """Render the given version pieces into the requested style.""" 420 | if pieces["error"]: 421 | return { 422 | "version": "unknown", 423 | "full-revisionid": pieces.get("long"), 424 | "dirty": None, 425 | "error": pieces["error"] 426 | } 427 | 428 | if not style or style == "default": 429 | style = "pep440" # the default 430 | 431 | if style == "pep440": 432 | rendered = render_pep440(pieces) 433 | elif style == "pep440-pre": 434 | rendered = render_pep440_pre(pieces) 435 | elif style == "pep440-post": 436 | rendered = render_pep440_post(pieces) 437 | elif style == "pep440-old": 438 | rendered = render_pep440_old(pieces) 439 | elif style == "git-describe": 440 | rendered = render_git_describe(pieces) 441 | elif style == "git-describe-long": 442 | rendered = render_git_describe_long(pieces) 443 | else: 444 | raise ValueError("unknown style '%s'" % style) 445 | 446 | return { 447 | "version": rendered, 448 | "full-revisionid": pieces["long"], 449 | "dirty": pieces["dirty"], 450 | "error": None 451 | } 452 | 453 | 454 | def get_versions(): 455 | """Get version information or return default if unable to do so.""" 456 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 457 | # __file__, we can work backwards from there to the root. Some 458 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 459 | # case we can only use expanded keywords. 460 | 461 | cfg = get_config() 462 | verbose = cfg.verbose 463 | 464 | try: 465 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) 466 | except NotThisMethod: 467 | pass 468 | 469 | try: 470 | root = os.path.realpath(__file__) 471 | # versionfile_source is the relative path from the top of the source 472 | # tree (where the .git directory might live) to this file. Invert 473 | # this to find the root from __file__. 474 | for i in cfg.versionfile_source.split('/'): 475 | root = os.path.dirname(root) 476 | except NameError: 477 | return { 478 | "version": "0+unknown", 479 | "full-revisionid": None, 480 | "dirty": None, 481 | "error": "unable to find root of source tree" 482 | } 483 | 484 | try: 485 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 486 | return render(pieces, cfg.style) 487 | except NotThisMethod: 488 | pass 489 | 490 | try: 491 | if cfg.parentdir_prefix: 492 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 493 | except NotThisMethod: 494 | pass 495 | 496 | return { 497 | "version": "0+unknown", 498 | "full-revisionid": None, 499 | "dirty": None, 500 | "error": "unable to compute version" 501 | } 502 | -------------------------------------------------------------------------------- /versioneer.py: -------------------------------------------------------------------------------- 1 | 2 | # Version: 0.16 3 | 4 | """The Versioneer - like a rocketeer, but for versions. 5 | 6 | The Versioneer 7 | ============== 8 | 9 | * like a rocketeer, but for versions! 10 | * https://github.com/warner/python-versioneer 11 | * Brian Warner 12 | * License: Public Domain 13 | * Compatible With: python2.6, 2.7, 3.3, 3.4, 3.5, and pypy 14 | * [![Latest Version] 15 | (https://pypip.in/version/versioneer/badge.svg?style=flat) 16 | ](https://pypi.python.org/pypi/versioneer/) 17 | * [![Build Status] 18 | (https://travis-ci.org/warner/python-versioneer.png?branch=master) 19 | ](https://travis-ci.org/warner/python-versioneer) 20 | 21 | This is a tool for managing a recorded version number in distutils-based 22 | python projects. The goal is to remove the tedious and error-prone "update 23 | the embedded version string" step from your release process. Making a new 24 | release should be as easy as recording a new tag in your version-control 25 | system, and maybe making new tarballs. 26 | 27 | 28 | ## Quick Install 29 | 30 | * `pip install versioneer` to somewhere to your $PATH 31 | * add a `[versioneer]` section to your setup.cfg (see below) 32 | * run `versioneer install` in your source tree, commit the results 33 | 34 | ## Version Identifiers 35 | 36 | Source trees come from a variety of places: 37 | 38 | * a version-control system checkout (mostly used by developers) 39 | * a nightly tarball, produced by build automation 40 | * a snapshot tarball, produced by a web-based VCS browser, like github's 41 | "tarball from tag" feature 42 | * a release tarball, produced by "setup.py sdist", distributed through PyPI 43 | 44 | Within each source tree, the version identifier (either a string or a number, 45 | this tool is format-agnostic) can come from a variety of places: 46 | 47 | * ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows 48 | about recent "tags" and an absolute revision-id 49 | * the name of the directory into which the tarball was unpacked 50 | * an expanded VCS keyword ($Id$, etc) 51 | * a `_version.py` created by some earlier build step 52 | 53 | For released software, the version identifier is closely related to a VCS 54 | tag. Some projects use tag names that include more than just the version 55 | string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool 56 | needs to strip the tag prefix to extract the version identifier. For 57 | unreleased software (between tags), the version identifier should provide 58 | enough information to help developers recreate the same tree, while also 59 | giving them an idea of roughly how old the tree is (after version 1.2, before 60 | version 1.3). Many VCS systems can report a description that captures this, 61 | for example `git describe --tags --dirty --always` reports things like 62 | "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the 63 | 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has 64 | uncommitted changes. 65 | 66 | The version identifier is used for multiple purposes: 67 | 68 | * to allow the module to self-identify its version: `myproject.__version__` 69 | * to choose a name and prefix for a 'setup.py sdist' tarball 70 | 71 | ## Theory of Operation 72 | 73 | Versioneer works by adding a special `_version.py` file into your source 74 | tree, where your `__init__.py` can import it. This `_version.py` knows how to 75 | dynamically ask the VCS tool for version information at import time. 76 | 77 | `_version.py` also contains `$Revision$` markers, and the installation 78 | process marks `_version.py` to have this marker rewritten with a tag name 79 | during the `git archive` command. As a result, generated tarballs will 80 | contain enough information to get the proper version. 81 | 82 | To allow `setup.py` to compute a version too, a `versioneer.py` is added to 83 | the top level of your source tree, next to `setup.py` and the `setup.cfg` 84 | that configures it. This overrides several distutils/setuptools commands to 85 | compute the version when invoked, and changes `setup.py build` and `setup.py 86 | sdist` to replace `_version.py` with a small static file that contains just 87 | the generated version data. 88 | 89 | ## Installation 90 | 91 | First, decide on values for the following configuration variables: 92 | 93 | * `VCS`: the version control system you use. Currently accepts "git". 94 | 95 | * `style`: the style of version string to be produced. See "Styles" below for 96 | details. Defaults to "pep440", which looks like 97 | `TAG[+DISTANCE.gSHORTHASH[.dirty]]`. 98 | 99 | * `versionfile_source`: 100 | 101 | A project-relative pathname into which the generated version strings should 102 | be written. This is usually a `_version.py` next to your project's main 103 | `__init__.py` file, so it can be imported at runtime. If your project uses 104 | `src/myproject/__init__.py`, this should be `src/myproject/_version.py`. 105 | This file should be checked in to your VCS as usual: the copy created below 106 | by `setup.py setup_versioneer` will include code that parses expanded VCS 107 | keywords in generated tarballs. The 'build' and 'sdist' commands will 108 | replace it with a copy that has just the calculated version string. 109 | 110 | This must be set even if your project does not have any modules (and will 111 | therefore never import `_version.py`), since "setup.py sdist" -based trees 112 | still need somewhere to record the pre-calculated version strings. Anywhere 113 | in the source tree should do. If there is a `__init__.py` next to your 114 | `_version.py`, the `setup.py setup_versioneer` command (described below) 115 | will append some `__version__`-setting assignments, if they aren't already 116 | present. 117 | 118 | * `versionfile_build`: 119 | 120 | Like `versionfile_source`, but relative to the build directory instead of 121 | the source directory. These will differ when your setup.py uses 122 | 'package_dir='. If you have `package_dir={'myproject': 'src/myproject'}`, 123 | then you will probably have `versionfile_build='myproject/_version.py'` and 124 | `versionfile_source='src/myproject/_version.py'`. 125 | 126 | If this is set to None, then `setup.py build` will not attempt to rewrite 127 | any `_version.py` in the built tree. If your project does not have any 128 | libraries (e.g. if it only builds a script), then you should use 129 | `versionfile_build = None`. To actually use the computed version string, 130 | your `setup.py` will need to override `distutils.command.build_scripts` 131 | with a subclass that explicitly inserts a copy of 132 | `versioneer.get_version()` into your script file. See 133 | `test/demoapp-script-only/setup.py` for an example. 134 | 135 | * `tag_prefix`: 136 | 137 | a string, like 'PROJECTNAME-', which appears at the start of all VCS tags. 138 | If your tags look like 'myproject-1.2.0', then you should use 139 | tag_prefix='myproject-'. If you use unprefixed tags like '1.2.0', this 140 | should be an empty string, using either `tag_prefix=` or `tag_prefix=''`. 141 | 142 | * `parentdir_prefix`: 143 | 144 | a optional string, frequently the same as tag_prefix, which appears at the 145 | start of all unpacked tarball filenames. If your tarball unpacks into 146 | 'myproject-1.2.0', this should be 'myproject-'. To disable this feature, 147 | just omit the field from your `setup.cfg`. 148 | 149 | This tool provides one script, named `versioneer`. That script has one mode, 150 | "install", which writes a copy of `versioneer.py` into the current directory 151 | and runs `versioneer.py setup` to finish the installation. 152 | 153 | To versioneer-enable your project: 154 | 155 | * 1: Modify your `setup.cfg`, adding a section named `[versioneer]` and 156 | populating it with the configuration values you decided earlier (note that 157 | the option names are not case-sensitive): 158 | 159 | ```` 160 | [versioneer] 161 | VCS = git 162 | style = pep440 163 | versionfile_source = src/myproject/_version.py 164 | versionfile_build = myproject/_version.py 165 | tag_prefix = 166 | parentdir_prefix = myproject- 167 | ```` 168 | 169 | * 2: Run `versioneer install`. This will do the following: 170 | 171 | * copy `versioneer.py` into the top of your source tree 172 | * create `_version.py` in the right place (`versionfile_source`) 173 | * modify your `__init__.py` (if one exists next to `_version.py`) to define 174 | `__version__` (by calling a function from `_version.py`) 175 | * modify your `MANIFEST.in` to include both `versioneer.py` and the 176 | generated `_version.py` in sdist tarballs 177 | 178 | `versioneer install` will complain about any problems it finds with your 179 | `setup.py` or `setup.cfg`. Run it multiple times until you have fixed all 180 | the problems. 181 | 182 | * 3: add a `import versioneer` to your setup.py, and add the following 183 | arguments to the setup() call: 184 | 185 | version=versioneer.get_version(), 186 | cmdclass=versioneer.get_cmdclass(), 187 | 188 | * 4: commit these changes to your VCS. To make sure you won't forget, 189 | `versioneer install` will mark everything it touched for addition using 190 | `git add`. Don't forget to add `setup.py` and `setup.cfg` too. 191 | 192 | ## Post-Installation Usage 193 | 194 | Once established, all uses of your tree from a VCS checkout should get the 195 | current version string. All generated tarballs should include an embedded 196 | version string (so users who unpack them will not need a VCS tool installed). 197 | 198 | If you distribute your project through PyPI, then the release process should 199 | boil down to two steps: 200 | 201 | * 1: git tag 1.0 202 | * 2: python setup.py register sdist upload 203 | 204 | If you distribute it through github (i.e. users use github to generate 205 | tarballs with `git archive`), the process is: 206 | 207 | * 1: git tag 1.0 208 | * 2: git push; git push --tags 209 | 210 | Versioneer will report "0+untagged.NUMCOMMITS.gHASH" until your tree has at 211 | least one tag in its history. 212 | 213 | ## Version-String Flavors 214 | 215 | Code which uses Versioneer can learn about its version string at runtime by 216 | importing `_version` from your main `__init__.py` file and running the 217 | `get_versions()` function. From the "outside" (e.g. in `setup.py`), you can 218 | import the top-level `versioneer.py` and run `get_versions()`. 219 | 220 | Both functions return a dictionary with different flavors of version 221 | information: 222 | 223 | * `['version']`: A condensed version string, rendered using the selected 224 | style. This is the most commonly used value for the project's version 225 | string. The default "pep440" style yields strings like `0.11`, 226 | `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section 227 | below for alternative styles. 228 | 229 | * `['full-revisionid']`: detailed revision identifier. For Git, this is the 230 | full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". 231 | 232 | * `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that 233 | this is only accurate if run in a VCS checkout, otherwise it is likely to 234 | be False or None 235 | 236 | * `['error']`: if the version string could not be computed, this will be set 237 | to a string describing the problem, otherwise it will be None. It may be 238 | useful to throw an exception in setup.py if this is set, to avoid e.g. 239 | creating tarballs with a version string of "unknown". 240 | 241 | Some variants are more useful than others. Including `full-revisionid` in a 242 | bug report should allow developers to reconstruct the exact code being tested 243 | (or indicate the presence of local changes that should be shared with the 244 | developers). `version` is suitable for display in an "about" box or a CLI 245 | `--version` output: it can be easily compared against release notes and lists 246 | of bugs fixed in various releases. 247 | 248 | The installer adds the following text to your `__init__.py` to place a basic 249 | version in `YOURPROJECT.__version__`: 250 | 251 | from ._version import get_versions 252 | __version__ = get_versions()['version'] 253 | del get_versions 254 | 255 | ## Styles 256 | 257 | The setup.cfg `style=` configuration controls how the VCS information is 258 | rendered into a version string. 259 | 260 | The default style, "pep440", produces a PEP440-compliant string, equal to the 261 | un-prefixed tag name for actual releases, and containing an additional "local 262 | version" section with more detail for in-between builds. For Git, this is 263 | TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags 264 | --dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the 265 | tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and 266 | that this commit is two revisions ("+2") beyond the "0.11" tag. For released 267 | software (exactly equal to a known tag), the identifier will only contain the 268 | stripped tag, e.g. "0.11". 269 | 270 | Other styles are available. See details.md in the Versioneer source tree for 271 | descriptions. 272 | 273 | ## Debugging 274 | 275 | Versioneer tries to avoid fatal errors: if something goes wrong, it will tend 276 | to return a version of "0+unknown". To investigate the problem, run `setup.py 277 | version`, which will run the version-lookup code in a verbose mode, and will 278 | display the full contents of `get_versions()` (including the `error` string, 279 | which may help identify what went wrong). 280 | 281 | ## Updating Versioneer 282 | 283 | To upgrade your project to a new release of Versioneer, do the following: 284 | 285 | * install the new Versioneer (`pip install -U versioneer` or equivalent) 286 | * edit `setup.cfg`, if necessary, to include any new configuration settings 287 | indicated by the release notes 288 | * re-run `versioneer install` in your source tree, to replace 289 | `SRC/_version.py` 290 | * commit any changed files 291 | 292 | ### Upgrading to 0.16 293 | 294 | Nothing special. 295 | 296 | ### Upgrading to 0.15 297 | 298 | Starting with this version, Versioneer is configured with a `[versioneer]` 299 | section in your `setup.cfg` file. Earlier versions required the `setup.py` to 300 | set attributes on the `versioneer` module immediately after import. The new 301 | version will refuse to run (raising an exception during import) until you 302 | have provided the necessary `setup.cfg` section. 303 | 304 | In addition, the Versioneer package provides an executable named 305 | `versioneer`, and the installation process is driven by running `versioneer 306 | install`. In 0.14 and earlier, the executable was named 307 | `versioneer-installer` and was run without an argument. 308 | 309 | ### Upgrading to 0.14 310 | 311 | 0.14 changes the format of the version string. 0.13 and earlier used 312 | hyphen-separated strings like "0.11-2-g1076c97-dirty". 0.14 and beyond use a 313 | plus-separated "local version" section strings, with dot-separated 314 | components, like "0.11+2.g1076c97". PEP440-strict tools did not like the old 315 | format, but should be ok with the new one. 316 | 317 | ### Upgrading from 0.11 to 0.12 318 | 319 | Nothing special. 320 | 321 | ### Upgrading from 0.10 to 0.11 322 | 323 | You must add a `versioneer.VCS = "git"` to your `setup.py` before re-running 324 | `setup.py setup_versioneer`. This will enable the use of additional 325 | version-control systems (SVN, etc) in the future. 326 | 327 | ## Future Directions 328 | 329 | This tool is designed to make it easily extended to other version-control 330 | systems: all VCS-specific components are in separate directories like 331 | src/git/ . The top-level `versioneer.py` script is assembled from these 332 | components by running make-versioneer.py . In the future, make-versioneer.py 333 | will take a VCS name as an argument, and will construct a version of 334 | `versioneer.py` that is specific to the given VCS. It might also take the 335 | configuration arguments that are currently provided manually during 336 | installation by editing setup.py . Alternatively, it might go the other 337 | direction and include code from all supported VCS systems, reducing the 338 | number of intermediate scripts. 339 | 340 | 341 | ## License 342 | 343 | To make Versioneer easier to embed, all its code is dedicated to the public 344 | domain. The `_version.py` that it creates is also in the public domain. 345 | Specifically, both are released under the Creative Commons "Public Domain 346 | Dedication" license (CC0-1.0), as described in 347 | https://creativecommons.org/publicdomain/zero/1.0/ . 348 | 349 | """ 350 | 351 | from __future__ import print_function 352 | try: 353 | import configparser 354 | except ImportError: 355 | import ConfigParser as configparser 356 | import errno 357 | import json 358 | import os 359 | import re 360 | import subprocess 361 | import sys 362 | 363 | 364 | class VersioneerConfig: 365 | """Container for Versioneer configuration parameters.""" 366 | 367 | 368 | def get_root(): 369 | """Get the project root directory. 370 | 371 | We require that all commands are run from the project root, i.e. the 372 | directory that contains setup.py, setup.cfg, and versioneer.py . 373 | """ 374 | root = os.path.realpath(os.path.abspath(os.getcwd())) 375 | setup_py = os.path.join(root, "setup.py") 376 | versioneer_py = os.path.join(root, "versioneer.py") 377 | if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): 378 | # allow 'python path/to/setup.py COMMAND' 379 | root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) 380 | setup_py = os.path.join(root, "setup.py") 381 | versioneer_py = os.path.join(root, "versioneer.py") 382 | if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): 383 | err = ("Versioneer was unable to run the project root directory. " 384 | "Versioneer requires setup.py to be executed from " 385 | "its immediate directory (like 'python setup.py COMMAND'), " 386 | "or in a way that lets it use sys.argv[0] to find the root " 387 | "(like 'python path/to/setup.py COMMAND').") 388 | raise VersioneerBadRootError(err) 389 | try: 390 | # Certain runtime workflows (setup.py install/develop in a setuptools 391 | # tree) execute all dependencies in a single python process, so 392 | # "versioneer" may be imported multiple times, and python's shared 393 | # module-import table will cache the first one. So we can't use 394 | # os.path.dirname(__file__), as that will find whichever 395 | # versioneer.py was first imported, even in later projects. 396 | me = os.path.realpath(os.path.abspath(__file__)) 397 | if os.path.splitext(me)[0] != os.path.splitext(versioneer_py)[0]: 398 | print("Warning: build in %s is using versioneer.py from %s" 399 | % (os.path.dirname(me), versioneer_py)) 400 | except NameError: 401 | pass 402 | return root 403 | 404 | 405 | def get_config_from_root(root): 406 | """Read the project setup.cfg file to determine Versioneer config.""" 407 | # This might raise EnvironmentError (if setup.cfg is missing), or 408 | # configparser.NoSectionError (if it lacks a [versioneer] section), or 409 | # configparser.NoOptionError (if it lacks "VCS="). See the docstring at 410 | # the top of versioneer.py for instructions on writing your setup.cfg . 411 | setup_cfg = os.path.join(root, "setup.cfg") 412 | parser = configparser.SafeConfigParser() 413 | with open(setup_cfg, "r") as f: 414 | parser.readfp(f) 415 | VCS = parser.get("versioneer", "VCS") # mandatory 416 | 417 | def get(parser, name): 418 | if parser.has_option("versioneer", name): 419 | return parser.get("versioneer", name) 420 | return None 421 | cfg = VersioneerConfig() 422 | cfg.VCS = VCS 423 | cfg.style = get(parser, "style") or "" 424 | cfg.versionfile_source = get(parser, "versionfile_source") 425 | cfg.versionfile_build = get(parser, "versionfile_build") 426 | cfg.tag_prefix = get(parser, "tag_prefix") 427 | if cfg.tag_prefix in ("''", '""'): 428 | cfg.tag_prefix = "" 429 | cfg.parentdir_prefix = get(parser, "parentdir_prefix") 430 | cfg.verbose = get(parser, "verbose") 431 | return cfg 432 | 433 | 434 | class NotThisMethod(Exception): 435 | """Exception raised if a method is not valid for the current scenario.""" 436 | 437 | # these dictionaries contain VCS-specific tools 438 | LONG_VERSION_PY = {} 439 | HANDLERS = {} 440 | 441 | 442 | def register_vcs_handler(vcs, method): # decorator 443 | """Decorator to mark a method as the handler for a particular VCS.""" 444 | def decorate(f): 445 | """Store f in HANDLERS[vcs][method].""" 446 | if vcs not in HANDLERS: 447 | HANDLERS[vcs] = {} 448 | HANDLERS[vcs][method] = f 449 | return f 450 | return decorate 451 | 452 | 453 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): 454 | """Call the given command(s).""" 455 | assert isinstance(commands, list) 456 | p = None 457 | for c in commands: 458 | try: 459 | dispcmd = str([c] + args) 460 | # remember shell=False, so use git.cmd on windows, not just git 461 | p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE, 462 | stderr=(subprocess.PIPE if hide_stderr 463 | else None)) 464 | break 465 | except EnvironmentError: 466 | e = sys.exc_info()[1] 467 | if e.errno == errno.ENOENT: 468 | continue 469 | if verbose: 470 | print("unable to run %s" % dispcmd) 471 | print(e) 472 | return None 473 | else: 474 | if verbose: 475 | print("unable to find command, tried %s" % (commands,)) 476 | return None 477 | stdout = p.communicate()[0].strip() 478 | if sys.version_info[0] >= 3: 479 | stdout = stdout.decode() 480 | if p.returncode != 0: 481 | if verbose: 482 | print("unable to run %s (error)" % dispcmd) 483 | return None 484 | return stdout 485 | LONG_VERSION_PY['git'] = ''' 486 | # This file helps to compute a version number in source trees obtained from 487 | # git-archive tarball (such as those provided by githubs download-from-tag 488 | # feature). Distribution tarballs (built by setup.py sdist) and build 489 | # directories (produced by setup.py build) will contain a much shorter file 490 | # that just contains the computed version number. 491 | 492 | # This file is released into the public domain. Generated by 493 | # versioneer-0.16 (https://github.com/warner/python-versioneer) 494 | 495 | """Git implementation of _version.py.""" 496 | 497 | import errno 498 | import os 499 | import re 500 | import subprocess 501 | import sys 502 | 503 | 504 | def get_keywords(): 505 | """Get the keywords needed to look up the version information.""" 506 | # these strings will be replaced by git during git-archive. 507 | # setup.py/versioneer.py will grep for the variable names, so they must 508 | # each be defined on a line of their own. _version.py will just call 509 | # get_keywords(). 510 | git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" 511 | git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" 512 | keywords = {"refnames": git_refnames, "full": git_full} 513 | return keywords 514 | 515 | 516 | class VersioneerConfig: 517 | """Container for Versioneer configuration parameters.""" 518 | 519 | 520 | def get_config(): 521 | """Create, populate and return the VersioneerConfig() object.""" 522 | # these strings are filled in when 'setup.py versioneer' creates 523 | # _version.py 524 | cfg = VersioneerConfig() 525 | cfg.VCS = "git" 526 | cfg.style = "%(STYLE)s" 527 | cfg.tag_prefix = "%(TAG_PREFIX)s" 528 | cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" 529 | cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" 530 | cfg.verbose = False 531 | return cfg 532 | 533 | 534 | class NotThisMethod(Exception): 535 | """Exception raised if a method is not valid for the current scenario.""" 536 | 537 | 538 | LONG_VERSION_PY = {} 539 | HANDLERS = {} 540 | 541 | 542 | def register_vcs_handler(vcs, method): # decorator 543 | """Decorator to mark a method as the handler for a particular VCS.""" 544 | def decorate(f): 545 | """Store f in HANDLERS[vcs][method].""" 546 | if vcs not in HANDLERS: 547 | HANDLERS[vcs] = {} 548 | HANDLERS[vcs][method] = f 549 | return f 550 | return decorate 551 | 552 | 553 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): 554 | """Call the given command(s).""" 555 | assert isinstance(commands, list) 556 | p = None 557 | for c in commands: 558 | try: 559 | dispcmd = str([c] + args) 560 | # remember shell=False, so use git.cmd on windows, not just git 561 | p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE, 562 | stderr=(subprocess.PIPE if hide_stderr 563 | else None)) 564 | break 565 | except EnvironmentError: 566 | e = sys.exc_info()[1] 567 | if e.errno == errno.ENOENT: 568 | continue 569 | if verbose: 570 | print("unable to run %%s" %% dispcmd) 571 | print(e) 572 | return None 573 | else: 574 | if verbose: 575 | print("unable to find command, tried %%s" %% (commands,)) 576 | return None 577 | stdout = p.communicate()[0].strip() 578 | if sys.version_info[0] >= 3: 579 | stdout = stdout.decode() 580 | if p.returncode != 0: 581 | if verbose: 582 | print("unable to run %%s (error)" %% dispcmd) 583 | return None 584 | return stdout 585 | 586 | 587 | def versions_from_parentdir(parentdir_prefix, root, verbose): 588 | """Try to determine the version from the parent directory name. 589 | 590 | Source tarballs conventionally unpack into a directory that includes 591 | both the project name and a version string. 592 | """ 593 | dirname = os.path.basename(root) 594 | if not dirname.startswith(parentdir_prefix): 595 | if verbose: 596 | print("guessing rootdir is '%%s', but '%%s' doesn't start with " 597 | "prefix '%%s'" %% (root, dirname, parentdir_prefix)) 598 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 599 | return {"version": dirname[len(parentdir_prefix):], 600 | "full-revisionid": None, 601 | "dirty": False, "error": None} 602 | 603 | 604 | @register_vcs_handler("git", "get_keywords") 605 | def git_get_keywords(versionfile_abs): 606 | """Extract version information from the given file.""" 607 | # the code embedded in _version.py can just fetch the value of these 608 | # keywords. When used from setup.py, we don't want to import _version.py, 609 | # so we do it with a regexp instead. This function is not used from 610 | # _version.py. 611 | keywords = {} 612 | try: 613 | f = open(versionfile_abs, "r") 614 | for line in f.readlines(): 615 | if line.strip().startswith("git_refnames ="): 616 | mo = re.search(r'=\s*"(.*)"', line) 617 | if mo: 618 | keywords["refnames"] = mo.group(1) 619 | if line.strip().startswith("git_full ="): 620 | mo = re.search(r'=\s*"(.*)"', line) 621 | if mo: 622 | keywords["full"] = mo.group(1) 623 | f.close() 624 | except EnvironmentError: 625 | pass 626 | return keywords 627 | 628 | 629 | @register_vcs_handler("git", "keywords") 630 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 631 | """Get version information from git keywords.""" 632 | if not keywords: 633 | raise NotThisMethod("no keywords at all, weird") 634 | refnames = keywords["refnames"].strip() 635 | if refnames.startswith("$Format"): 636 | if verbose: 637 | print("keywords are unexpanded, not using") 638 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 639 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) 640 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 641 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 642 | TAG = "tag: " 643 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) 644 | if not tags: 645 | # Either we're using git < 1.8.3, or there really are no tags. We use 646 | # a heuristic: assume all version tags have a digit. The old git %%d 647 | # expansion behaves like git log --decorate=short and strips out the 648 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 649 | # between branches and tags. By ignoring refnames without digits, we 650 | # filter out many common branch names like "release" and 651 | # "stabilization", as well as "HEAD" and "master". 652 | tags = set([r for r in refs if re.search(r'\d', r)]) 653 | if verbose: 654 | print("discarding '%%s', no digits" %% ",".join(refs-tags)) 655 | if verbose: 656 | print("likely tags: %%s" %% ",".join(sorted(tags))) 657 | for ref in sorted(tags): 658 | # sorting will prefer e.g. "2.0" over "2.0rc1" 659 | if ref.startswith(tag_prefix): 660 | r = ref[len(tag_prefix):] 661 | if verbose: 662 | print("picking %%s" %% r) 663 | return {"version": r, 664 | "full-revisionid": keywords["full"].strip(), 665 | "dirty": False, "error": None 666 | } 667 | # no suitable tags, so version is "0+unknown", but full hex is still there 668 | if verbose: 669 | print("no suitable tags, using unknown + full revision id") 670 | return {"version": "0+unknown", 671 | "full-revisionid": keywords["full"].strip(), 672 | "dirty": False, "error": "no suitable tags"} 673 | 674 | 675 | @register_vcs_handler("git", "pieces_from_vcs") 676 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 677 | """Get version from 'git describe' in the root of the source tree. 678 | 679 | This only gets called if the git-archive 'subst' keywords were *not* 680 | expanded, and _version.py hasn't already been rewritten with a short 681 | version string, meaning we're inside a checked out source tree. 682 | """ 683 | if not os.path.exists(os.path.join(root, ".git")): 684 | if verbose: 685 | print("no .git in %%s" %% root) 686 | raise NotThisMethod("no .git directory") 687 | 688 | GITS = ["git"] 689 | if sys.platform == "win32": 690 | GITS = ["git.cmd", "git.exe"] 691 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 692 | # if there isn't one, this yields HEX[-dirty] (no NUM) 693 | describe_out = run_command(GITS, ["describe", "--tags", "--dirty", 694 | "--always", "--long", 695 | "--match", "%%s*" %% tag_prefix], 696 | cwd=root) 697 | # --long was added in git-1.5.5 698 | if describe_out is None: 699 | raise NotThisMethod("'git describe' failed") 700 | describe_out = describe_out.strip() 701 | full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 702 | if full_out is None: 703 | raise NotThisMethod("'git rev-parse' failed") 704 | full_out = full_out.strip() 705 | 706 | pieces = {} 707 | pieces["long"] = full_out 708 | pieces["short"] = full_out[:7] # maybe improved later 709 | pieces["error"] = None 710 | 711 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 712 | # TAG might have hyphens. 713 | git_describe = describe_out 714 | 715 | # look for -dirty suffix 716 | dirty = git_describe.endswith("-dirty") 717 | pieces["dirty"] = dirty 718 | if dirty: 719 | git_describe = git_describe[:git_describe.rindex("-dirty")] 720 | 721 | # now we have TAG-NUM-gHEX or HEX 722 | 723 | if "-" in git_describe: 724 | # TAG-NUM-gHEX 725 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 726 | if not mo: 727 | # unparseable. Maybe git-describe is misbehaving? 728 | pieces["error"] = ("unable to parse git-describe output: '%%s'" 729 | %% describe_out) 730 | return pieces 731 | 732 | # tag 733 | full_tag = mo.group(1) 734 | if not full_tag.startswith(tag_prefix): 735 | if verbose: 736 | fmt = "tag '%%s' doesn't start with prefix '%%s'" 737 | print(fmt %% (full_tag, tag_prefix)) 738 | pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" 739 | %% (full_tag, tag_prefix)) 740 | return pieces 741 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 742 | 743 | # distance: number of commits since tag 744 | pieces["distance"] = int(mo.group(2)) 745 | 746 | # commit: short hex revision ID 747 | pieces["short"] = mo.group(3) 748 | 749 | else: 750 | # HEX: no tags 751 | pieces["closest-tag"] = None 752 | count_out = run_command(GITS, ["rev-list", "HEAD", "--count"], 753 | cwd=root) 754 | pieces["distance"] = int(count_out) # total number of commits 755 | 756 | return pieces 757 | 758 | 759 | def plus_or_dot(pieces): 760 | """Return a + if we don't already have one, else return a .""" 761 | if "+" in pieces.get("closest-tag", ""): 762 | return "." 763 | return "+" 764 | 765 | 766 | def render_pep440(pieces): 767 | """Build up version string, with post-release "local version identifier". 768 | 769 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 770 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 771 | 772 | Exceptions: 773 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 774 | """ 775 | if pieces["closest-tag"]: 776 | rendered = pieces["closest-tag"] 777 | if pieces["distance"] or pieces["dirty"]: 778 | rendered += plus_or_dot(pieces) 779 | rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) 780 | if pieces["dirty"]: 781 | rendered += ".dirty" 782 | else: 783 | # exception #1 784 | rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], 785 | pieces["short"]) 786 | if pieces["dirty"]: 787 | rendered += ".dirty" 788 | return rendered 789 | 790 | 791 | def render_pep440_pre(pieces): 792 | """TAG[.post.devDISTANCE] -- No -dirty. 793 | 794 | Exceptions: 795 | 1: no tags. 0.post.devDISTANCE 796 | """ 797 | if pieces["closest-tag"]: 798 | rendered = pieces["closest-tag"] 799 | if pieces["distance"]: 800 | rendered += ".post.dev%%d" %% pieces["distance"] 801 | else: 802 | # exception #1 803 | rendered = "0.post.dev%%d" %% pieces["distance"] 804 | return rendered 805 | 806 | 807 | def render_pep440_post(pieces): 808 | """TAG[.postDISTANCE[.dev0]+gHEX] . 809 | 810 | The ".dev0" means dirty. Note that .dev0 sorts backwards 811 | (a dirty tree will appear "older" than the corresponding clean one), 812 | but you shouldn't be releasing software with -dirty anyways. 813 | 814 | Exceptions: 815 | 1: no tags. 0.postDISTANCE[.dev0] 816 | """ 817 | if pieces["closest-tag"]: 818 | rendered = pieces["closest-tag"] 819 | if pieces["distance"] or pieces["dirty"]: 820 | rendered += ".post%%d" %% pieces["distance"] 821 | if pieces["dirty"]: 822 | rendered += ".dev0" 823 | rendered += plus_or_dot(pieces) 824 | rendered += "g%%s" %% pieces["short"] 825 | else: 826 | # exception #1 827 | rendered = "0.post%%d" %% pieces["distance"] 828 | if pieces["dirty"]: 829 | rendered += ".dev0" 830 | rendered += "+g%%s" %% pieces["short"] 831 | return rendered 832 | 833 | 834 | def render_pep440_old(pieces): 835 | """TAG[.postDISTANCE[.dev0]] . 836 | 837 | The ".dev0" means dirty. 838 | 839 | Eexceptions: 840 | 1: no tags. 0.postDISTANCE[.dev0] 841 | """ 842 | if pieces["closest-tag"]: 843 | rendered = pieces["closest-tag"] 844 | if pieces["distance"] or pieces["dirty"]: 845 | rendered += ".post%%d" %% pieces["distance"] 846 | if pieces["dirty"]: 847 | rendered += ".dev0" 848 | else: 849 | # exception #1 850 | rendered = "0.post%%d" %% pieces["distance"] 851 | if pieces["dirty"]: 852 | rendered += ".dev0" 853 | return rendered 854 | 855 | 856 | def render_git_describe(pieces): 857 | """TAG[-DISTANCE-gHEX][-dirty]. 858 | 859 | Like 'git describe --tags --dirty --always'. 860 | 861 | Exceptions: 862 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 863 | """ 864 | if pieces["closest-tag"]: 865 | rendered = pieces["closest-tag"] 866 | if pieces["distance"]: 867 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) 868 | else: 869 | # exception #1 870 | rendered = pieces["short"] 871 | if pieces["dirty"]: 872 | rendered += "-dirty" 873 | return rendered 874 | 875 | 876 | def render_git_describe_long(pieces): 877 | """TAG-DISTANCE-gHEX[-dirty]. 878 | 879 | Like 'git describe --tags --dirty --always -long'. 880 | The distance/hash is unconditional. 881 | 882 | Exceptions: 883 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 884 | """ 885 | if pieces["closest-tag"]: 886 | rendered = pieces["closest-tag"] 887 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) 888 | else: 889 | # exception #1 890 | rendered = pieces["short"] 891 | if pieces["dirty"]: 892 | rendered += "-dirty" 893 | return rendered 894 | 895 | 896 | def render(pieces, style): 897 | """Render the given version pieces into the requested style.""" 898 | if pieces["error"]: 899 | return {"version": "unknown", 900 | "full-revisionid": pieces.get("long"), 901 | "dirty": None, 902 | "error": pieces["error"]} 903 | 904 | if not style or style == "default": 905 | style = "pep440" # the default 906 | 907 | if style == "pep440": 908 | rendered = render_pep440(pieces) 909 | elif style == "pep440-pre": 910 | rendered = render_pep440_pre(pieces) 911 | elif style == "pep440-post": 912 | rendered = render_pep440_post(pieces) 913 | elif style == "pep440-old": 914 | rendered = render_pep440_old(pieces) 915 | elif style == "git-describe": 916 | rendered = render_git_describe(pieces) 917 | elif style == "git-describe-long": 918 | rendered = render_git_describe_long(pieces) 919 | else: 920 | raise ValueError("unknown style '%%s'" %% style) 921 | 922 | return {"version": rendered, "full-revisionid": pieces["long"], 923 | "dirty": pieces["dirty"], "error": None} 924 | 925 | 926 | def get_versions(): 927 | """Get version information or return default if unable to do so.""" 928 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 929 | # __file__, we can work backwards from there to the root. Some 930 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 931 | # case we can only use expanded keywords. 932 | 933 | cfg = get_config() 934 | verbose = cfg.verbose 935 | 936 | try: 937 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, 938 | verbose) 939 | except NotThisMethod: 940 | pass 941 | 942 | try: 943 | root = os.path.realpath(__file__) 944 | # versionfile_source is the relative path from the top of the source 945 | # tree (where the .git directory might live) to this file. Invert 946 | # this to find the root from __file__. 947 | for i in cfg.versionfile_source.split('/'): 948 | root = os.path.dirname(root) 949 | except NameError: 950 | return {"version": "0+unknown", "full-revisionid": None, 951 | "dirty": None, 952 | "error": "unable to find root of source tree"} 953 | 954 | try: 955 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 956 | return render(pieces, cfg.style) 957 | except NotThisMethod: 958 | pass 959 | 960 | try: 961 | if cfg.parentdir_prefix: 962 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 963 | except NotThisMethod: 964 | pass 965 | 966 | return {"version": "0+unknown", "full-revisionid": None, 967 | "dirty": None, 968 | "error": "unable to compute version"} 969 | ''' 970 | 971 | 972 | @register_vcs_handler("git", "get_keywords") 973 | def git_get_keywords(versionfile_abs): 974 | """Extract version information from the given file.""" 975 | # the code embedded in _version.py can just fetch the value of these 976 | # keywords. When used from setup.py, we don't want to import _version.py, 977 | # so we do it with a regexp instead. This function is not used from 978 | # _version.py. 979 | keywords = {} 980 | try: 981 | f = open(versionfile_abs, "r") 982 | for line in f.readlines(): 983 | if line.strip().startswith("git_refnames ="): 984 | mo = re.search(r'=\s*"(.*)"', line) 985 | if mo: 986 | keywords["refnames"] = mo.group(1) 987 | if line.strip().startswith("git_full ="): 988 | mo = re.search(r'=\s*"(.*)"', line) 989 | if mo: 990 | keywords["full"] = mo.group(1) 991 | f.close() 992 | except EnvironmentError: 993 | pass 994 | return keywords 995 | 996 | 997 | @register_vcs_handler("git", "keywords") 998 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 999 | """Get version information from git keywords.""" 1000 | if not keywords: 1001 | raise NotThisMethod("no keywords at all, weird") 1002 | refnames = keywords["refnames"].strip() 1003 | if refnames.startswith("$Format"): 1004 | if verbose: 1005 | print("keywords are unexpanded, not using") 1006 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 1007 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) 1008 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 1009 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 1010 | TAG = "tag: " 1011 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) 1012 | if not tags: 1013 | # Either we're using git < 1.8.3, or there really are no tags. We use 1014 | # a heuristic: assume all version tags have a digit. The old git %d 1015 | # expansion behaves like git log --decorate=short and strips out the 1016 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 1017 | # between branches and tags. By ignoring refnames without digits, we 1018 | # filter out many common branch names like "release" and 1019 | # "stabilization", as well as "HEAD" and "master". 1020 | tags = set([r for r in refs if re.search(r'\d', r)]) 1021 | if verbose: 1022 | print("discarding '%s', no digits" % ",".join(refs-tags)) 1023 | if verbose: 1024 | print("likely tags: %s" % ",".join(sorted(tags))) 1025 | for ref in sorted(tags): 1026 | # sorting will prefer e.g. "2.0" over "2.0rc1" 1027 | if ref.startswith(tag_prefix): 1028 | r = ref[len(tag_prefix):] 1029 | if verbose: 1030 | print("picking %s" % r) 1031 | return {"version": r, 1032 | "full-revisionid": keywords["full"].strip(), 1033 | "dirty": False, "error": None 1034 | } 1035 | # no suitable tags, so version is "0+unknown", but full hex is still there 1036 | if verbose: 1037 | print("no suitable tags, using unknown + full revision id") 1038 | return {"version": "0+unknown", 1039 | "full-revisionid": keywords["full"].strip(), 1040 | "dirty": False, "error": "no suitable tags"} 1041 | 1042 | 1043 | @register_vcs_handler("git", "pieces_from_vcs") 1044 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 1045 | """Get version from 'git describe' in the root of the source tree. 1046 | 1047 | This only gets called if the git-archive 'subst' keywords were *not* 1048 | expanded, and _version.py hasn't already been rewritten with a short 1049 | version string, meaning we're inside a checked out source tree. 1050 | """ 1051 | if not os.path.exists(os.path.join(root, ".git")): 1052 | if verbose: 1053 | print("no .git in %s" % root) 1054 | raise NotThisMethod("no .git directory") 1055 | 1056 | GITS = ["git"] 1057 | if sys.platform == "win32": 1058 | GITS = ["git.cmd", "git.exe"] 1059 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 1060 | # if there isn't one, this yields HEX[-dirty] (no NUM) 1061 | describe_out = run_command(GITS, ["describe", "--tags", "--dirty", 1062 | "--always", "--long", 1063 | "--match", "%s*" % tag_prefix], 1064 | cwd=root) 1065 | # --long was added in git-1.5.5 1066 | if describe_out is None: 1067 | raise NotThisMethod("'git describe' failed") 1068 | describe_out = describe_out.strip() 1069 | full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 1070 | if full_out is None: 1071 | raise NotThisMethod("'git rev-parse' failed") 1072 | full_out = full_out.strip() 1073 | 1074 | pieces = {} 1075 | pieces["long"] = full_out 1076 | pieces["short"] = full_out[:7] # maybe improved later 1077 | pieces["error"] = None 1078 | 1079 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 1080 | # TAG might have hyphens. 1081 | git_describe = describe_out 1082 | 1083 | # look for -dirty suffix 1084 | dirty = git_describe.endswith("-dirty") 1085 | pieces["dirty"] = dirty 1086 | if dirty: 1087 | git_describe = git_describe[:git_describe.rindex("-dirty")] 1088 | 1089 | # now we have TAG-NUM-gHEX or HEX 1090 | 1091 | if "-" in git_describe: 1092 | # TAG-NUM-gHEX 1093 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 1094 | if not mo: 1095 | # unparseable. Maybe git-describe is misbehaving? 1096 | pieces["error"] = ("unable to parse git-describe output: '%s'" 1097 | % describe_out) 1098 | return pieces 1099 | 1100 | # tag 1101 | full_tag = mo.group(1) 1102 | if not full_tag.startswith(tag_prefix): 1103 | if verbose: 1104 | fmt = "tag '%s' doesn't start with prefix '%s'" 1105 | print(fmt % (full_tag, tag_prefix)) 1106 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" 1107 | % (full_tag, tag_prefix)) 1108 | return pieces 1109 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 1110 | 1111 | # distance: number of commits since tag 1112 | pieces["distance"] = int(mo.group(2)) 1113 | 1114 | # commit: short hex revision ID 1115 | pieces["short"] = mo.group(3) 1116 | 1117 | else: 1118 | # HEX: no tags 1119 | pieces["closest-tag"] = None 1120 | count_out = run_command(GITS, ["rev-list", "HEAD", "--count"], 1121 | cwd=root) 1122 | pieces["distance"] = int(count_out) # total number of commits 1123 | 1124 | return pieces 1125 | 1126 | 1127 | def do_vcs_install(manifest_in, versionfile_source, ipy): 1128 | """Git-specific installation logic for Versioneer. 1129 | 1130 | For Git, this means creating/changing .gitattributes to mark _version.py 1131 | for export-time keyword substitution. 1132 | """ 1133 | GITS = ["git"] 1134 | if sys.platform == "win32": 1135 | GITS = ["git.cmd", "git.exe"] 1136 | files = [manifest_in, versionfile_source] 1137 | if ipy: 1138 | files.append(ipy) 1139 | try: 1140 | me = __file__ 1141 | if me.endswith(".pyc") or me.endswith(".pyo"): 1142 | me = os.path.splitext(me)[0] + ".py" 1143 | versioneer_file = os.path.relpath(me) 1144 | except NameError: 1145 | versioneer_file = "versioneer.py" 1146 | files.append(versioneer_file) 1147 | present = False 1148 | try: 1149 | f = open(".gitattributes", "r") 1150 | for line in f.readlines(): 1151 | if line.strip().startswith(versionfile_source): 1152 | if "export-subst" in line.strip().split()[1:]: 1153 | present = True 1154 | f.close() 1155 | except EnvironmentError: 1156 | pass 1157 | if not present: 1158 | f = open(".gitattributes", "a+") 1159 | f.write("%s export-subst\n" % versionfile_source) 1160 | f.close() 1161 | files.append(".gitattributes") 1162 | run_command(GITS, ["add", "--"] + files) 1163 | 1164 | 1165 | def versions_from_parentdir(parentdir_prefix, root, verbose): 1166 | """Try to determine the version from the parent directory name. 1167 | 1168 | Source tarballs conventionally unpack into a directory that includes 1169 | both the project name and a version string. 1170 | """ 1171 | dirname = os.path.basename(root) 1172 | if not dirname.startswith(parentdir_prefix): 1173 | if verbose: 1174 | print("guessing rootdir is '%s', but '%s' doesn't start with " 1175 | "prefix '%s'" % (root, dirname, parentdir_prefix)) 1176 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 1177 | return {"version": dirname[len(parentdir_prefix):], 1178 | "full-revisionid": None, 1179 | "dirty": False, "error": None} 1180 | 1181 | SHORT_VERSION_PY = """ 1182 | # This file was generated by 'versioneer.py' (0.16) from 1183 | # revision-control system data, or from the parent directory name of an 1184 | # unpacked source archive. Distribution tarballs contain a pre-generated copy 1185 | # of this file. 1186 | 1187 | import json 1188 | import sys 1189 | 1190 | version_json = ''' 1191 | %s 1192 | ''' # END VERSION_JSON 1193 | 1194 | 1195 | def get_versions(): 1196 | return json.loads(version_json) 1197 | """ 1198 | 1199 | 1200 | def versions_from_file(filename): 1201 | """Try to determine the version from _version.py if present.""" 1202 | try: 1203 | with open(filename) as f: 1204 | contents = f.read() 1205 | except EnvironmentError: 1206 | raise NotThisMethod("unable to read _version.py") 1207 | mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", 1208 | contents, re.M | re.S) 1209 | if not mo: 1210 | raise NotThisMethod("no version_json in _version.py") 1211 | return json.loads(mo.group(1)) 1212 | 1213 | 1214 | def write_to_version_file(filename, versions): 1215 | """Write the given version number to the given _version.py file.""" 1216 | os.unlink(filename) 1217 | contents = json.dumps(versions, sort_keys=True, 1218 | indent=1, separators=(",", ": ")) 1219 | with open(filename, "w") as f: 1220 | f.write(SHORT_VERSION_PY % contents) 1221 | 1222 | print("set %s to '%s'" % (filename, versions["version"])) 1223 | 1224 | 1225 | def plus_or_dot(pieces): 1226 | """Return a + if we don't already have one, else return a .""" 1227 | if "+" in pieces.get("closest-tag", ""): 1228 | return "." 1229 | return "+" 1230 | 1231 | 1232 | def render_pep440(pieces): 1233 | """Build up version string, with post-release "local version identifier". 1234 | 1235 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 1236 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 1237 | 1238 | Exceptions: 1239 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 1240 | """ 1241 | if pieces["closest-tag"]: 1242 | rendered = pieces["closest-tag"] 1243 | if pieces["distance"] or pieces["dirty"]: 1244 | rendered += plus_or_dot(pieces) 1245 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 1246 | if pieces["dirty"]: 1247 | rendered += ".dirty" 1248 | else: 1249 | # exception #1 1250 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], 1251 | pieces["short"]) 1252 | if pieces["dirty"]: 1253 | rendered += ".dirty" 1254 | return rendered 1255 | 1256 | 1257 | def render_pep440_pre(pieces): 1258 | """TAG[.post.devDISTANCE] -- No -dirty. 1259 | 1260 | Exceptions: 1261 | 1: no tags. 0.post.devDISTANCE 1262 | """ 1263 | if pieces["closest-tag"]: 1264 | rendered = pieces["closest-tag"] 1265 | if pieces["distance"]: 1266 | rendered += ".post.dev%d" % pieces["distance"] 1267 | else: 1268 | # exception #1 1269 | rendered = "0.post.dev%d" % pieces["distance"] 1270 | return rendered 1271 | 1272 | 1273 | def render_pep440_post(pieces): 1274 | """TAG[.postDISTANCE[.dev0]+gHEX] . 1275 | 1276 | The ".dev0" means dirty. Note that .dev0 sorts backwards 1277 | (a dirty tree will appear "older" than the corresponding clean one), 1278 | but you shouldn't be releasing software with -dirty anyways. 1279 | 1280 | Exceptions: 1281 | 1: no tags. 0.postDISTANCE[.dev0] 1282 | """ 1283 | if pieces["closest-tag"]: 1284 | rendered = pieces["closest-tag"] 1285 | if pieces["distance"] or pieces["dirty"]: 1286 | rendered += ".post%d" % pieces["distance"] 1287 | if pieces["dirty"]: 1288 | rendered += ".dev0" 1289 | rendered += plus_or_dot(pieces) 1290 | rendered += "g%s" % pieces["short"] 1291 | else: 1292 | # exception #1 1293 | rendered = "0.post%d" % pieces["distance"] 1294 | if pieces["dirty"]: 1295 | rendered += ".dev0" 1296 | rendered += "+g%s" % pieces["short"] 1297 | return rendered 1298 | 1299 | 1300 | def render_pep440_old(pieces): 1301 | """TAG[.postDISTANCE[.dev0]] . 1302 | 1303 | The ".dev0" means dirty. 1304 | 1305 | Eexceptions: 1306 | 1: no tags. 0.postDISTANCE[.dev0] 1307 | """ 1308 | if pieces["closest-tag"]: 1309 | rendered = pieces["closest-tag"] 1310 | if pieces["distance"] or pieces["dirty"]: 1311 | rendered += ".post%d" % pieces["distance"] 1312 | if pieces["dirty"]: 1313 | rendered += ".dev0" 1314 | else: 1315 | # exception #1 1316 | rendered = "0.post%d" % pieces["distance"] 1317 | if pieces["dirty"]: 1318 | rendered += ".dev0" 1319 | return rendered 1320 | 1321 | 1322 | def render_git_describe(pieces): 1323 | """TAG[-DISTANCE-gHEX][-dirty]. 1324 | 1325 | Like 'git describe --tags --dirty --always'. 1326 | 1327 | Exceptions: 1328 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 1329 | """ 1330 | if pieces["closest-tag"]: 1331 | rendered = pieces["closest-tag"] 1332 | if pieces["distance"]: 1333 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 1334 | else: 1335 | # exception #1 1336 | rendered = pieces["short"] 1337 | if pieces["dirty"]: 1338 | rendered += "-dirty" 1339 | return rendered 1340 | 1341 | 1342 | def render_git_describe_long(pieces): 1343 | """TAG-DISTANCE-gHEX[-dirty]. 1344 | 1345 | Like 'git describe --tags --dirty --always -long'. 1346 | The distance/hash is unconditional. 1347 | 1348 | Exceptions: 1349 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 1350 | """ 1351 | if pieces["closest-tag"]: 1352 | rendered = pieces["closest-tag"] 1353 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 1354 | else: 1355 | # exception #1 1356 | rendered = pieces["short"] 1357 | if pieces["dirty"]: 1358 | rendered += "-dirty" 1359 | return rendered 1360 | 1361 | 1362 | def render(pieces, style): 1363 | """Render the given version pieces into the requested style.""" 1364 | if pieces["error"]: 1365 | return {"version": "unknown", 1366 | "full-revisionid": pieces.get("long"), 1367 | "dirty": None, 1368 | "error": pieces["error"]} 1369 | 1370 | if not style or style == "default": 1371 | style = "pep440" # the default 1372 | 1373 | if style == "pep440": 1374 | rendered = render_pep440(pieces) 1375 | elif style == "pep440-pre": 1376 | rendered = render_pep440_pre(pieces) 1377 | elif style == "pep440-post": 1378 | rendered = render_pep440_post(pieces) 1379 | elif style == "pep440-old": 1380 | rendered = render_pep440_old(pieces) 1381 | elif style == "git-describe": 1382 | rendered = render_git_describe(pieces) 1383 | elif style == "git-describe-long": 1384 | rendered = render_git_describe_long(pieces) 1385 | else: 1386 | raise ValueError("unknown style '%s'" % style) 1387 | 1388 | return {"version": rendered, "full-revisionid": pieces["long"], 1389 | "dirty": pieces["dirty"], "error": None} 1390 | 1391 | 1392 | class VersioneerBadRootError(Exception): 1393 | """The project root directory is unknown or missing key files.""" 1394 | 1395 | 1396 | def get_versions(verbose=False): 1397 | """Get the project version from whatever source is available. 1398 | 1399 | Returns dict with two keys: 'version' and 'full'. 1400 | """ 1401 | if "versioneer" in sys.modules: 1402 | # see the discussion in cmdclass.py:get_cmdclass() 1403 | del sys.modules["versioneer"] 1404 | 1405 | root = get_root() 1406 | cfg = get_config_from_root(root) 1407 | 1408 | assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" 1409 | handlers = HANDLERS.get(cfg.VCS) 1410 | assert handlers, "unrecognized VCS '%s'" % cfg.VCS 1411 | verbose = verbose or cfg.verbose 1412 | assert cfg.versionfile_source is not None, \ 1413 | "please set versioneer.versionfile_source" 1414 | assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" 1415 | 1416 | versionfile_abs = os.path.join(root, cfg.versionfile_source) 1417 | 1418 | # extract version from first of: _version.py, VCS command (e.g. 'git 1419 | # describe'), parentdir. This is meant to work for developers using a 1420 | # source checkout, for users of a tarball created by 'setup.py sdist', 1421 | # and for users of a tarball/zipball created by 'git archive' or github's 1422 | # download-from-tag feature or the equivalent in other VCSes. 1423 | 1424 | get_keywords_f = handlers.get("get_keywords") 1425 | from_keywords_f = handlers.get("keywords") 1426 | if get_keywords_f and from_keywords_f: 1427 | try: 1428 | keywords = get_keywords_f(versionfile_abs) 1429 | ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) 1430 | if verbose: 1431 | print("got version from expanded keyword %s" % ver) 1432 | return ver 1433 | except NotThisMethod: 1434 | pass 1435 | 1436 | try: 1437 | ver = versions_from_file(versionfile_abs) 1438 | if verbose: 1439 | print("got version from file %s %s" % (versionfile_abs, ver)) 1440 | return ver 1441 | except NotThisMethod: 1442 | pass 1443 | 1444 | from_vcs_f = handlers.get("pieces_from_vcs") 1445 | if from_vcs_f: 1446 | try: 1447 | pieces = from_vcs_f(cfg.tag_prefix, root, verbose) 1448 | ver = render(pieces, cfg.style) 1449 | if verbose: 1450 | print("got version from VCS %s" % ver) 1451 | return ver 1452 | except NotThisMethod: 1453 | pass 1454 | 1455 | try: 1456 | if cfg.parentdir_prefix: 1457 | ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 1458 | if verbose: 1459 | print("got version from parentdir %s" % ver) 1460 | return ver 1461 | except NotThisMethod: 1462 | pass 1463 | 1464 | if verbose: 1465 | print("unable to compute version") 1466 | 1467 | return {"version": "0+unknown", "full-revisionid": None, 1468 | "dirty": None, "error": "unable to compute version"} 1469 | 1470 | 1471 | def get_version(): 1472 | """Get the short version string for this project.""" 1473 | return get_versions()["version"] 1474 | 1475 | 1476 | def get_cmdclass(): 1477 | """Get the custom setuptools/distutils subclasses used by Versioneer.""" 1478 | if "versioneer" in sys.modules: 1479 | del sys.modules["versioneer"] 1480 | # this fixes the "python setup.py develop" case (also 'install' and 1481 | # 'easy_install .'), in which subdependencies of the main project are 1482 | # built (using setup.py bdist_egg) in the same python process. Assume 1483 | # a main project A and a dependency B, which use different versions 1484 | # of Versioneer. A's setup.py imports A's Versioneer, leaving it in 1485 | # sys.modules by the time B's setup.py is executed, causing B to run 1486 | # with the wrong versioneer. Setuptools wraps the sub-dep builds in a 1487 | # sandbox that restores sys.modules to it's pre-build state, so the 1488 | # parent is protected against the child's "import versioneer". By 1489 | # removing ourselves from sys.modules here, before the child build 1490 | # happens, we protect the child from the parent's versioneer too. 1491 | # Also see https://github.com/warner/python-versioneer/issues/52 1492 | 1493 | cmds = {} 1494 | 1495 | # we add "version" to both distutils and setuptools 1496 | from distutils.core import Command 1497 | 1498 | class cmd_version(Command): 1499 | description = "report generated version string" 1500 | user_options = [] 1501 | boolean_options = [] 1502 | 1503 | def initialize_options(self): 1504 | pass 1505 | 1506 | def finalize_options(self): 1507 | pass 1508 | 1509 | def run(self): 1510 | vers = get_versions(verbose=True) 1511 | print("Version: %s" % vers["version"]) 1512 | print(" full-revisionid: %s" % vers.get("full-revisionid")) 1513 | print(" dirty: %s" % vers.get("dirty")) 1514 | if vers["error"]: 1515 | print(" error: %s" % vers["error"]) 1516 | cmds["version"] = cmd_version 1517 | 1518 | # we override "build_py" in both distutils and setuptools 1519 | # 1520 | # most invocation pathways end up running build_py: 1521 | # distutils/build -> build_py 1522 | # distutils/install -> distutils/build ->.. 1523 | # setuptools/bdist_wheel -> distutils/install ->.. 1524 | # setuptools/bdist_egg -> distutils/install_lib -> build_py 1525 | # setuptools/install -> bdist_egg ->.. 1526 | # setuptools/develop -> ? 1527 | 1528 | # we override different "build_py" commands for both environments 1529 | if "setuptools" in sys.modules: 1530 | from setuptools.command.build_py import build_py as _build_py 1531 | else: 1532 | from distutils.command.build_py import build_py as _build_py 1533 | 1534 | class cmd_build_py(_build_py): 1535 | def run(self): 1536 | root = get_root() 1537 | cfg = get_config_from_root(root) 1538 | versions = get_versions() 1539 | _build_py.run(self) 1540 | # now locate _version.py in the new build/ directory and replace 1541 | # it with an updated value 1542 | if cfg.versionfile_build: 1543 | target_versionfile = os.path.join(self.build_lib, 1544 | cfg.versionfile_build) 1545 | print("UPDATING %s" % target_versionfile) 1546 | write_to_version_file(target_versionfile, versions) 1547 | cmds["build_py"] = cmd_build_py 1548 | 1549 | if "cx_Freeze" in sys.modules: # cx_freeze enabled? 1550 | from cx_Freeze.dist import build_exe as _build_exe 1551 | 1552 | class cmd_build_exe(_build_exe): 1553 | def run(self): 1554 | root = get_root() 1555 | cfg = get_config_from_root(root) 1556 | versions = get_versions() 1557 | target_versionfile = cfg.versionfile_source 1558 | print("UPDATING %s" % target_versionfile) 1559 | write_to_version_file(target_versionfile, versions) 1560 | 1561 | _build_exe.run(self) 1562 | os.unlink(target_versionfile) 1563 | with open(cfg.versionfile_source, "w") as f: 1564 | LONG = LONG_VERSION_PY[cfg.VCS] 1565 | f.write(LONG % 1566 | {"DOLLAR": "$", 1567 | "STYLE": cfg.style, 1568 | "TAG_PREFIX": cfg.tag_prefix, 1569 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 1570 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 1571 | }) 1572 | cmds["build_exe"] = cmd_build_exe 1573 | del cmds["build_py"] 1574 | 1575 | # we override different "sdist" commands for both environments 1576 | if "setuptools" in sys.modules: 1577 | from setuptools.command.sdist import sdist as _sdist 1578 | else: 1579 | from distutils.command.sdist import sdist as _sdist 1580 | 1581 | class cmd_sdist(_sdist): 1582 | def run(self): 1583 | versions = get_versions() 1584 | self._versioneer_generated_versions = versions 1585 | # unless we update this, the command will keep using the old 1586 | # version 1587 | self.distribution.metadata.version = versions["version"] 1588 | return _sdist.run(self) 1589 | 1590 | def make_release_tree(self, base_dir, files): 1591 | root = get_root() 1592 | cfg = get_config_from_root(root) 1593 | _sdist.make_release_tree(self, base_dir, files) 1594 | # now locate _version.py in the new base_dir directory 1595 | # (remembering that it may be a hardlink) and replace it with an 1596 | # updated value 1597 | target_versionfile = os.path.join(base_dir, cfg.versionfile_source) 1598 | print("UPDATING %s" % target_versionfile) 1599 | write_to_version_file(target_versionfile, 1600 | self._versioneer_generated_versions) 1601 | cmds["sdist"] = cmd_sdist 1602 | 1603 | return cmds 1604 | 1605 | 1606 | CONFIG_ERROR = """ 1607 | setup.cfg is missing the necessary Versioneer configuration. You need 1608 | a section like: 1609 | 1610 | [versioneer] 1611 | VCS = git 1612 | style = pep440 1613 | versionfile_source = src/myproject/_version.py 1614 | versionfile_build = myproject/_version.py 1615 | tag_prefix = 1616 | parentdir_prefix = myproject- 1617 | 1618 | You will also need to edit your setup.py to use the results: 1619 | 1620 | import versioneer 1621 | setup(version=versioneer.get_version(), 1622 | cmdclass=versioneer.get_cmdclass(), ...) 1623 | 1624 | Please read the docstring in ./versioneer.py for configuration instructions, 1625 | edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. 1626 | """ 1627 | 1628 | SAMPLE_CONFIG = """ 1629 | # See the docstring in versioneer.py for instructions. Note that you must 1630 | # re-run 'versioneer.py setup' after changing this section, and commit the 1631 | # resulting files. 1632 | 1633 | [versioneer] 1634 | #VCS = git 1635 | #style = pep440 1636 | #versionfile_source = 1637 | #versionfile_build = 1638 | #tag_prefix = 1639 | #parentdir_prefix = 1640 | 1641 | """ 1642 | 1643 | INIT_PY_SNIPPET = """ 1644 | from ._version import get_versions 1645 | __version__ = get_versions()['version'] 1646 | del get_versions 1647 | """ 1648 | 1649 | 1650 | def do_setup(): 1651 | """Main VCS-independent setup function for installing Versioneer.""" 1652 | root = get_root() 1653 | try: 1654 | cfg = get_config_from_root(root) 1655 | except (EnvironmentError, configparser.NoSectionError, 1656 | configparser.NoOptionError) as e: 1657 | if isinstance(e, (EnvironmentError, configparser.NoSectionError)): 1658 | print("Adding sample versioneer config to setup.cfg", 1659 | file=sys.stderr) 1660 | with open(os.path.join(root, "setup.cfg"), "a") as f: 1661 | f.write(SAMPLE_CONFIG) 1662 | print(CONFIG_ERROR, file=sys.stderr) 1663 | return 1 1664 | 1665 | print(" creating %s" % cfg.versionfile_source) 1666 | with open(cfg.versionfile_source, "w") as f: 1667 | LONG = LONG_VERSION_PY[cfg.VCS] 1668 | f.write(LONG % {"DOLLAR": "$", 1669 | "STYLE": cfg.style, 1670 | "TAG_PREFIX": cfg.tag_prefix, 1671 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 1672 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 1673 | }) 1674 | 1675 | ipy = os.path.join(os.path.dirname(cfg.versionfile_source), 1676 | "__init__.py") 1677 | if os.path.exists(ipy): 1678 | try: 1679 | with open(ipy, "r") as f: 1680 | old = f.read() 1681 | except EnvironmentError: 1682 | old = "" 1683 | if INIT_PY_SNIPPET not in old: 1684 | print(" appending to %s" % ipy) 1685 | with open(ipy, "a") as f: 1686 | f.write(INIT_PY_SNIPPET) 1687 | else: 1688 | print(" %s unmodified" % ipy) 1689 | else: 1690 | print(" %s doesn't exist, ok" % ipy) 1691 | ipy = None 1692 | 1693 | # Make sure both the top-level "versioneer.py" and versionfile_source 1694 | # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so 1695 | # they'll be copied into source distributions. Pip won't be able to 1696 | # install the package without this. 1697 | manifest_in = os.path.join(root, "MANIFEST.in") 1698 | simple_includes = set() 1699 | try: 1700 | with open(manifest_in, "r") as f: 1701 | for line in f: 1702 | if line.startswith("include "): 1703 | for include in line.split()[1:]: 1704 | simple_includes.add(include) 1705 | except EnvironmentError: 1706 | pass 1707 | # That doesn't cover everything MANIFEST.in can do 1708 | # (http://docs.python.org/2/distutils/sourcedist.html#commands), so 1709 | # it might give some false negatives. Appending redundant 'include' 1710 | # lines is safe, though. 1711 | if "versioneer.py" not in simple_includes: 1712 | print(" appending 'versioneer.py' to MANIFEST.in") 1713 | with open(manifest_in, "a") as f: 1714 | f.write("include versioneer.py\n") 1715 | else: 1716 | print(" 'versioneer.py' already in MANIFEST.in") 1717 | if cfg.versionfile_source not in simple_includes: 1718 | print(" appending versionfile_source ('%s') to MANIFEST.in" % 1719 | cfg.versionfile_source) 1720 | with open(manifest_in, "a") as f: 1721 | f.write("include %s\n" % cfg.versionfile_source) 1722 | else: 1723 | print(" versionfile_source already in MANIFEST.in") 1724 | 1725 | # Make VCS-specific changes. For git, this means creating/changing 1726 | # .gitattributes to mark _version.py for export-time keyword 1727 | # substitution. 1728 | do_vcs_install(manifest_in, cfg.versionfile_source, ipy) 1729 | return 0 1730 | 1731 | 1732 | def scan_setup_py(): 1733 | """Validate the contents of setup.py against Versioneer's expectations.""" 1734 | found = set() 1735 | setters = False 1736 | errors = 0 1737 | with open("setup.py", "r") as f: 1738 | for line in f.readlines(): 1739 | if "import versioneer" in line: 1740 | found.add("import") 1741 | if "versioneer.get_cmdclass()" in line: 1742 | found.add("cmdclass") 1743 | if "versioneer.get_version()" in line: 1744 | found.add("get_version") 1745 | if "versioneer.VCS" in line: 1746 | setters = True 1747 | if "versioneer.versionfile_source" in line: 1748 | setters = True 1749 | if len(found) != 3: 1750 | print("") 1751 | print("Your setup.py appears to be missing some important items") 1752 | print("(but I might be wrong). Please make sure it has something") 1753 | print("roughly like the following:") 1754 | print("") 1755 | print(" import versioneer") 1756 | print(" setup( version=versioneer.get_version(),") 1757 | print(" cmdclass=versioneer.get_cmdclass(), ...)") 1758 | print("") 1759 | errors += 1 1760 | if setters: 1761 | print("You should remove lines like 'versioneer.VCS = ' and") 1762 | print("'versioneer.versionfile_source = ' . This configuration") 1763 | print("now lives in setup.cfg, and should be removed from setup.py") 1764 | print("") 1765 | errors += 1 1766 | return errors 1767 | 1768 | if __name__ == "__main__": 1769 | cmd = sys.argv[1] 1770 | if cmd == "setup": 1771 | errors = do_setup() 1772 | errors += scan_setup_py() 1773 | if errors: 1774 | sys.exit(1) 1775 | --------------------------------------------------------------------------------