├── tests ├── __init__.py ├── requirements.txt ├── fixtures │ ├── foo_requirement │ │ ├── foo_requirement │ │ │ └── __init__.py │ │ └── setup.py │ ├── .gitignore │ └── test_requirement │ │ ├── test_requirement │ │ └── __init__.py │ │ └── setup.py ├── test_cli.py └── tests.py ├── .python-version ├── docs ├── _static │ └── placeholder_do_not_delete ├── _config.yml ├── requirements.txt ├── quickstart.rst ├── release_notes.rst ├── index.rst ├── installation.rst ├── _quickstart.rst ├── development.rst ├── user_guide.rst ├── Makefile └── conf.py ├── setup.py ├── .envrc ├── .gitignore ├── AUTHORS ├── MANIFEST.in ├── .travis.yml ├── tox.ini ├── setup.cfg ├── LICENSE ├── README.rst ├── CHANGELOG.rst └── terrarium.py /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.python-version: -------------------------------------------------------------------------------- 1 | 2.7.15 2 | -------------------------------------------------------------------------------- /tests/requirements.txt: -------------------------------------------------------------------------------- 1 | nose 2 | -------------------------------------------------------------------------------- /docs/_static/placeholder_do_not_delete: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/_config.yml: -------------------------------------------------------------------------------- 1 | theme: jekyll-theme-minimal -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | sphinx 2 | sphinx_rtd_theme 3 | -------------------------------------------------------------------------------- /tests/fixtures/foo_requirement/foo_requirement/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/fixtures/.gitignore: -------------------------------------------------------------------------------- 1 | /env.*/ 2 | /env/ 3 | /venv1/ 4 | -------------------------------------------------------------------------------- /tests/fixtures/test_requirement/test_requirement/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | setup(use_scm_version=True) 3 | -------------------------------------------------------------------------------- /.envrc: -------------------------------------------------------------------------------- 1 | # install direnv to utilize this configuration 2 | dotenv 3 | layout python 4 | -------------------------------------------------------------------------------- /docs/quickstart.rst: -------------------------------------------------------------------------------- 1 | ########### 2 | Quick Start 3 | ########### 4 | 5 | .. include:: _quickstart.rst 6 | -------------------------------------------------------------------------------- /docs/release_notes.rst: -------------------------------------------------------------------------------- 1 | ############# 2 | Release Notes 3 | ############# 4 | 5 | .. include:: ../CHANGELOG.rst 6 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | docs/_build 2 | build 3 | dist 4 | terrarium.egg-info 5 | *.pyc 6 | .eggs 7 | .direnv 8 | .env 9 | .tox 10 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | terrarium 2 | ========= 3 | 4 | .. toctree:: 5 | :maxdepth: 2 6 | 7 | 8 | installation 9 | quickstart 10 | user_guide 11 | development 12 | release_notes 13 | -------------------------------------------------------------------------------- /AUTHORS: -------------------------------------------------------------------------------- 1 | Kyle Gibson 2 | Wes Winham 3 | Marc Abramowitz 4 | Christian Ourdard 5 | Karl Norby 6 | Colin Su 7 | Mark Hellewell 8 | Kyle Kelley 9 | Stefan Wehrmeyer 10 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include AUTHORS LICENSE README.rst CHANGELOG.rst tox.ini 2 | include terrarium.py 3 | recursive-include docs *.rst *.py Makefile 4 | recursive-include requirements *.txt 5 | recursive-include tests *.py 6 | 7 | global-exclude *.py[co] 8 | global-exclude __pycache__ 9 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | install: 3 | - pip install -U pip 4 | - pip install tox 5 | env: 6 | - TOX_ENV=py27-virtualenv-latest 7 | - TOX_ENV=py27-virtualenv-previous 8 | - TOX_ENV=py27-flake8 9 | - TOX_ENV=docs 10 | script: tox -e $TOX_ENV 11 | notifications: 12 | email: 13 | - kyle.gibson@frozenonline.com 14 | -------------------------------------------------------------------------------- /tests/fixtures/test_requirement/setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | 3 | setup_options = dict( 4 | name='test_requirement', 5 | version='0.1.0dev', 6 | author='Kyle Gibson', 7 | author_email='kyle.gibson@frozenonline.com', 8 | description='Test requirement fixture', 9 | license='BSD', 10 | url='', 11 | packages=['test_requirement'], 12 | long_description='', 13 | install_requires=[], 14 | classifiers=[], 15 | zip_safe=False, 16 | ) 17 | 18 | setup(**setup_options) 19 | -------------------------------------------------------------------------------- /tests/fixtures/foo_requirement/setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | 3 | setup_options = dict( 4 | name='foo_requirement', 5 | version='0.1.0dev', 6 | author='Kyle Gibson', 7 | author_email='kyle.gibson@frozenonline.com', 8 | description='Another test requirement fixture', 9 | license='BSD', 10 | url='', 11 | packages=['foo_requirement'], 12 | long_description='', 13 | install_requires=['test_requirement'], 14 | classifiers=[], 15 | zip_safe=False, 16 | ) 17 | 18 | setup(**setup_options) 19 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | ############ 2 | Installation 3 | ############ 4 | 5 | Requirements 6 | ############ 7 | 8 | * CPython 2.7 9 | * `virtualenv `_ 10 | (only the latest 2 versions are officially supported and tested) 11 | 12 | .. note:: 13 | 14 | As Python 2.7 is no longer being maintained, 15 | terrarium's support for 2.7 16 | is officially deprecated 17 | and will be removed in a future version. 18 | 19 | Install using pip 20 | ################# 21 | 22 | .. code-block:: shell-session 23 | 24 | $ pip install terrarium 25 | 26 | Upgrade using pip 27 | ################# 28 | 29 | .. code-block:: shell-session 30 | 31 | $ pip install -U terrarium 32 | 33 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | # Tox (http://tox.testrun.org/) is a tool for running tests 2 | # in multiple virtualenvs. This configuration file will run the 3 | # test suite on all supported python versions. To use it, "pip install tox" 4 | # and then run "tox" from this directory. 5 | 6 | [tox] 7 | envlist = 8 | py27-virtualenv-latest 9 | py27-virtualenv-previous 10 | py27-flake8 11 | docs 12 | 13 | [testenv] 14 | commands = 15 | pip freeze --all --local 16 | nosetests -v {posargs:{toxinidir}/tests} 17 | deps = 18 | -rtests/requirements.txt 19 | virtualenv-latest: virtualenv<17 20 | virtualenv-previous: virtualenv<16 21 | setenv = 22 | PYTHONWARNINGS=ignore:DEPRECATION::pip._internal.cli.base_command 23 | TMPDIR="{envtmpdir}" 24 | 25 | [testenv:docs] 26 | commands = 27 | sphinx-build -W -b html -d {envtmpdir}/doctrees {toxinidir}/docs {toxinidir}/docs/_build/html 28 | deps = -rdocs/requirements.txt 29 | skipsdist = True 30 | 31 | [testenv:py27-flake8] 32 | deps = flake8 33 | commands = flake8 {toxinidir} 34 | skipsdist = True 35 | -------------------------------------------------------------------------------- /docs/_quickstart.rst: -------------------------------------------------------------------------------- 1 | .. code-block:: shell-session 2 | 3 | terrarium [options] COMMAND [requirements files...] 4 | 5 | See ``terrarium --help`` for a complete list of options and commands. 6 | 7 | Creating a new environment 8 | ########################## 9 | 10 | The following example will create a new virtual environment named ``env`` that 11 | includes the packages defined in ``requirements.txt`` 12 | 13 | .. code-block:: shell-session 14 | 15 | $ terrarium --target env install requirements.txt 16 | 17 | Replacing an existing environment 18 | ################################# 19 | 20 | The following example demonstrates how terrarium can be used to replace an 21 | existing activated virtual environment with a different set of packages. 22 | 23 | .. code-block:: shell-session 24 | 25 | $ terrarium --target env install requirements.txt 26 | $ source env/bin/activate 27 | $ terrarium install other_requirements.txt 28 | 29 | .. note:: 30 | The environment that was replaced is renamed to ``env.bak``, 31 | and can be restored using ``terrarium revert``. 32 | 33 | .. note:: 34 | After installing the ``other_requirements``, 35 | it is not necessary to run ``deactivate`` 36 | or ``activate`` 37 | to begin using the new environment. 38 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = terrarium 3 | description = Package and ship relocatable Python virtual environments 4 | long_description = file: README.rst 5 | author = Kyle Gibson 6 | author-email = kyle.gibson@frozenonline.com 7 | url = https://terrarium.readthedocs.io/ 8 | project_urls = 9 | Source=https://github.com/PolicyStat/terrarium 10 | Tracker=https://github.com/PolicyStat/terrarium/issues 11 | classifiers = 12 | Development Status :: 4 - Beta 13 | Environment :: Console 14 | Intended Audience :: Developers 15 | License :: OSI Approved :: BSD License 16 | Operating System :: POSIX 17 | Topic :: Software Development :: Testing 18 | Topic :: Software Development :: Libraries 19 | Topic :: Utilities 20 | Programming Language :: Python :: 2 21 | Programming Language :: Python :: 2.7 22 | license = BSD 3-Clause 23 | license_file = LICENSE 24 | 25 | [options] 26 | py_modules = terrarium 27 | setup_requires = 28 | setuptools>=30.3.0 29 | setuptools_scm 30 | zip_safe = True 31 | python_requires = ~=2.7.0 32 | install_requires = 33 | virtualenv 34 | wheel 35 | 36 | [options.entry_points] 37 | console_scripts = terrarium = terrarium:main 38 | 39 | [flake8] 40 | max-line-length = 94 41 | exclude = docs,build,dist,*.egg-info,.direnv,.tox,.eggs 42 | select = E,W,F 43 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2012, PolicyStat LLC. 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are met: 6 | 7 | * Redistributions of source code must retain the above copyright notice, 8 | this list of conditions and the following disclaimer. 9 | * Redistributions in binary form must reproduce the above copyright 10 | notice, this list of conditions and the following disclaimer in the 11 | documentation and/or other materials provided with the distribution. 12 | 13 | Neither the name of PolicyStat LLC. nor the names of its contributors may be used 14 | to endorse or promote products derived from this software without specific 15 | prior written permission. 16 | 17 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 18 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, 19 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 20 | PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS 21 | BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 22 | CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 23 | SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 24 | INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 25 | CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 26 | ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 27 | POSSIBILITY OF SUCH DAMAGE. 28 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ######### 2 | Terrarium 3 | ######### 4 | 5 | .. image:: https://img.shields.io/pypi/v/terrarium.svg 6 | :target: https://pypi.org/project/terrarium 7 | 8 | .. image:: https://secure.travis-ci.org/PolicyStat/terrarium.png?branch=master 9 | :target: http://travis-ci.org/PolicyStat/terrarium 10 | 11 | * `Installation `_ 12 | * `Documentation `_ 13 | * `Release Notes `_ 14 | * `Github Page `_ 15 | * `Issue Tracking `_ 16 | 17 | Package and ship relocatable python virtual environments, 18 | like a boss. 19 | 20 | Terrarium will package up 21 | and compress a virtualenv for you based on pip requirements 22 | and then let you ship that environment around. 23 | Do the complex dependency math one time 24 | and then every subsequent install is basically at the speed of 25 | file transfer + decompression. 26 | 27 | The Problem Terrarium Solves 28 | ############################ 29 | 30 | Pip and virtualenv are awesome, 31 | but pip is not designed to allow 32 | consistent and reproducible updates of existing environments. 33 | Pip is also a general installation tool, 34 | meaning that it's not near as fast as shipping around compressed source code. 35 | This means that even if you have well-made requirements files, 36 | your pip-based deploys are either slow or inconsistent. 37 | 38 | To get started using terrarium, 39 | see the `Quick Start `_ 40 | guide. 41 | -------------------------------------------------------------------------------- /docs/development.rst: -------------------------------------------------------------------------------- 1 | ########### 2 | Development 3 | ########### 4 | 5 | Building the documentation locally 6 | ################################## 7 | 8 | #. Install ``tox`` 9 | 10 | .. code-block:: shell-session 11 | 12 | $ pip install tox 13 | 14 | #. Use ``tox`` 15 | 16 | .. code-block:: shell-session 17 | 18 | $ tox -e docs 19 | 20 | #. Load HTML documentation in a web browser of your choice: 21 | 22 | .. code-block:: shell-session 23 | 24 | $ browser docs/_build/html/index.html 25 | 26 | Running tests 27 | ############# 28 | 29 | #. Install ``tox`` 30 | 31 | .. code-block:: shell-session 32 | 33 | $ pip install tox 34 | 35 | #. Use ``tox`` 36 | 37 | .. code-block:: shell-session 38 | 39 | $ tox 40 | 41 | Getting involved 42 | ################ 43 | 44 | The terrarium project welcomes help in any of the following ways: 45 | 46 | * Making pull requests on github for code, 47 | tests and documentation. 48 | * Participating on open issues and pull requests, 49 | reviewing changes 50 | 51 | Pull Request Checklist 52 | ====================== 53 | 54 | To have the best chance at an immediate merge, 55 | your pull request should have: 56 | 57 | * A passing Travis-CI build. 58 | If it fails, 59 | check the console output for reasons why. 60 | * New unit tests 61 | for new features 62 | or bug fixes. 63 | * New documentation in ``docs`` 64 | for any new features. 65 | You do want people to know 66 | how to use your new stuff, 67 | right? 68 | 69 | Release process 70 | ############### 71 | 72 | #. Update 73 | `CHANGELOG `_. 74 | #. Bump the version number in 75 | `__init__.py `_ 76 | on master. 77 | #. Tag the version. 78 | #. Generate source and wheel distributions: python setup.py sdist bdist_wheel 79 | #. Upload to PyPI: twine upload dist/* 80 | -------------------------------------------------------------------------------- /docs/user_guide.rst: -------------------------------------------------------------------------------- 1 | ########## 2 | User Guide 3 | ########## 4 | 5 | .. include:: _quickstart.rst 6 | 7 | Saving and using environment archives 8 | ##################################### 9 | 10 | Terrarium provides options for archiving and compressing 11 | a freshly installed 12 | and built environment, 13 | either locally or remotely (via Amazon S3). 14 | 15 | When these options are used, 16 | terrarium will first check if the environment has already been saved. 17 | In that case, 18 | terrarium will download the environment archive instead of downloading 19 | and building each individual package specified in the requirements files. 20 | 21 | Storing terrarium environments locally 22 | ====================================== 23 | 24 | Storing terrarium environments locally (or on a shared network disk) 25 | can be achieved using the ``--storage-dir`` option. 26 | 27 | .. code-block:: shell-session 28 | 29 | $ terrarium --target env --storage-dir path/to/environments install requirements.txt 30 | 31 | After building a fresh environment from the requirements in ``requirements.txt``, 32 | terrarium will archive and compress the environment. 33 | Finally, 34 | the compressed version is then copied to the path specified by ``--storage-dir``. 35 | 36 | Storing terrarium environments on Cloud Storage Services (S3, GCS) 37 | ================================================================== 38 | 39 | Terrarium also supports storing and retrieving archives stored on these storage services: 40 | 41 | * Amazon Web Service - S3 42 | * Google Cloud Platform - Google Cloud Storage 43 | 44 | Amazon S3 45 | --------- 46 | 47 | The following options are only available if ``boto`` is installed. 48 | 49 | * ``--s3-bucket`` 50 | * ``--s3-access-key`` 51 | * ``--s3-secret-key`` 52 | * ``--s3-max-retries`` 53 | 54 | Google Cloud Storage 55 | -------------------- 56 | 57 | The following options are only available if ``gcloud`` is installed. 58 | 59 | * ``--gcs-bucket`` 60 | * ``--gcs-client-email`` 61 | * ``--gcs-secret-key`` 62 | * ``--gcs-max-retries`` 63 | 64 | .. note:: 65 | Each of the above options can be specified using environment variables, 66 | e.g. ``S3_BUCKET``, ``GCS_BUCKET`` 67 | instead of being passed in as a parameter. 68 | 69 | Tips 70 | #### 71 | 72 | Using an alternative index server 73 | ================================= 74 | 75 | If you're using an index server other than PyPI 76 | (perhaps an index server with internal-only packages), 77 | then you need to be able to tell terrarium to use that index URL. 78 | Terrarium does not have the 79 | ``-i`` (``--index-url``) 80 | option that pip has, 81 | so how do you indicate the index URL? 82 | Well, you may recall that pip requirements files can also contain command-line options... 83 | So add a line like this to one of your requirements files: 84 | 85 | :: 86 | 87 | --index-url http://internal-index-server.corp/index 88 | 89 | You can add a line like the above to 90 | an existing requirements file 91 | that has a list of packages 92 | or you could add it to 93 | a separate requirements file 94 | and then add that to the terrarium command-line. 95 | 96 | .. code-block:: shell-session 97 | 98 | $ terrarium --target testenv install internal-index-server.txt requirements.txt 99 | -------------------------------------------------------------------------------- /CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | **1.2.0** 2 | 3 | - Support virtualenv 16 and 15 4 | - Added wheel distribution to PyPI. 5 | - Improved packaging (setup.cfg) and tox configuration 6 | - Announced deprecation for Python 2.7 7 | 8 | **1.1.0** 9 | 10 | - `Droped support for virtualenv<15 `_ 11 | - `Droped support for python 2.6 `_ 12 | - `Added support for pip>=10 `_ 13 | - `Added support for latest version of virtualenv `_ 14 | 15 | **1.0.1** 16 | 17 | - `Support latest 2 versions of virtualenv `_ 18 | 19 | **1.0.0** 20 | 21 | - `Support for Google Cloud Storage, in addition to Amazon S3 `_ 22 | - `Preserve order of requirements `_ 23 | 24 | **1.0.0-rc.6** 25 | 26 | - `shlex comments=True breaks remote source requirements `_ 27 | 28 | **1.0.0-rc.5** 29 | 30 | - `Fixed some spelling and grammar in the README `_ 31 | - `Added "Tips" Section to README `_ 32 | - `Permit inline comments in requirements files `_ 33 | - `Added tox.ini for tox `_ 34 | - `Deprecate python 2.5 `_ 35 | - `Update travis configuration to use tox `_ 36 | - `Update documentation to use sphinx `_ 37 | - `Update version number in only one place `_ 38 | - `Moved README content to sphinx docs `_ 39 | 40 | **1.0.0-rc.4** 41 | 42 | - `Add support for pip 1.3.1 `_ 43 | - `Add command to restore backup `_ 44 | - `Added --require-download option `_ 45 | 46 | **1.0.0-rc.3** 47 | 48 | - `Don't print S3 secret key in verbose mode / debug level `_ 49 | - `Update requirements to include support for virtualenv 1.8.4 and others `_ 50 | - `Fixed mac compatibility issues because of bsd tar `_ 51 | - `Improvements for tests `_ 52 | - `Pip 1.2 compatibility `_ 53 | - `Handle comments in requirements files `_ 54 | - `Output when boto not installed is confusing `_ 55 | - `Output on normal install makes it feel like terrarium hung `_ 56 | - `--storage-dir should default to environment variable TERRARIUM_STORAGE_DIR `_ 57 | - `Gracefully handle permission failures when creating environment backup `_ 58 | 59 | 60 | **1.0.0-rc.2** 61 | 62 | - `Cannot call rmtree on a symbolic link `_ 63 | - `Dangling symlink on extract `_ 64 | 65 | **1.0.0-rc.1** 66 | 67 | - `boto.S3Connection should be boto.s3.connection.S3Connection `_ 68 | - `Terrarium needs to fix paths after upload `_ 69 | - `Implement options as described in README spec `_ 70 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Terrarium.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Terrarium.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/Terrarium" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Terrarium" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Terrarium documentation build configuration file, created by 4 | # sphinx-quickstart on Tue Apr 15 03:34:32 2014. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | import sys 16 | import os 17 | 18 | # If extensions (or modules to document with autodoc) are in another directory, 19 | # add these directories to sys.path here. If the directory is relative to the 20 | # documentation root, use os.path.abspath to make it absolute, like shown here. 21 | #sys.path.insert(0, os.path.abspath('.')) 22 | 23 | # -- General configuration ------------------------------------------------ 24 | 25 | # If your documentation needs a minimal Sphinx version, state it here. 26 | #needs_sphinx = '1.0' 27 | 28 | # Add any Sphinx extension module names here, as strings. They can be 29 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 30 | # ones. 31 | extensions = [] 32 | 33 | # Add any paths that contain templates here, relative to this directory. 34 | templates_path = ['_templates'] 35 | 36 | # The suffix of source filenames. 37 | source_suffix = '.rst' 38 | 39 | # The encoding of source files. 40 | #source_encoding = 'utf-8-sig' 41 | 42 | # The master toctree document. 43 | master_doc = 'index' 44 | 45 | # General information about the project. 46 | project = u'Terrarium' 47 | copyright = u'2014, Kyle Gibson, Wes Winham' 48 | 49 | # The version info for the project you're documenting, acts as replacement for 50 | # |version| and |release|, also used in various other places throughout the 51 | # built documents. 52 | 53 | try: 54 | from terrarium import __version__ 55 | version = release = __version__ 56 | except ImportError: 57 | version = release = 'dev' 58 | 59 | # The language for content autogenerated by Sphinx. Refer to documentation 60 | # for a list of supported languages. 61 | #language = None 62 | 63 | # There are two options for replacing |today|: either, you set today to some 64 | # non-false value, then it is used: 65 | #today = '' 66 | # Else, today_fmt is used as the format for a strftime call. 67 | #today_fmt = '%B %d, %Y' 68 | 69 | # List of patterns, relative to source directory, that match files and 70 | # directories to ignore when looking for source files. 71 | exclude_patterns = ['_build', '_*.rst'] 72 | 73 | # The reST default role (used for this markup: `text`) to use for all 74 | # documents. 75 | #default_role = None 76 | 77 | # If true, '()' will be appended to :func: etc. cross-reference text. 78 | #add_function_parentheses = True 79 | 80 | # If true, the current module name will be prepended to all description 81 | # unit titles (such as .. function::). 82 | #add_module_names = True 83 | 84 | # If true, sectionauthor and moduleauthor directives will be shown in the 85 | # output. They are ignored by default. 86 | #show_authors = False 87 | 88 | # The name of the Pygments (syntax highlighting) style to use. 89 | pygments_style = 'sphinx' 90 | 91 | # A list of ignored prefixes for module index sorting. 92 | #modindex_common_prefix = [] 93 | 94 | # If true, keep warnings as "system message" paragraphs in the built documents. 95 | #keep_warnings = False 96 | 97 | 98 | # -- Options for HTML output ---------------------------------------------- 99 | 100 | # on_rtd is whether we are on readthedocs.org, this line of code grabbed from docs.readthedocs.org 101 | on_rtd = os.environ.get('READTHEDOCS', None) == 'True' 102 | 103 | # The theme to use for HTML and HTML Help pages. See the documentation for 104 | # a list of builtin themes. 105 | html_theme = 'default' 106 | 107 | if not on_rtd: # only import and set the theme if we're building docs locally 108 | try: 109 | import sphinx_rtd_theme 110 | html_theme = 'sphinx_rtd_theme' 111 | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] 112 | except ImportError: 113 | pass 114 | 115 | # Theme options are theme-specific and customize the look and feel of a theme 116 | # further. For a list of options available for each theme, see the 117 | # documentation. 118 | #html_theme_options = {} 119 | 120 | # Add any paths that contain custom themes here, relative to this directory. 121 | #html_theme_path = [] 122 | 123 | # The name for this set of Sphinx documents. If None, it defaults to 124 | # " v documentation". 125 | #html_title = None 126 | 127 | # A shorter title for the navigation bar. Default is the same as html_title. 128 | #html_short_title = None 129 | 130 | # The name of an image file (relative to this directory) to place at the top 131 | # of the sidebar. 132 | #html_logo = None 133 | 134 | # The name of an image file (within the static path) to use as favicon of the 135 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 136 | # pixels large. 137 | #html_favicon = None 138 | 139 | # Add any paths that contain custom static files (such as style sheets) here, 140 | # relative to this directory. They are copied after the builtin static files, 141 | # so a file named "default.css" will overwrite the builtin "default.css". 142 | html_static_path = ['_static'] 143 | 144 | # Add any extra paths that contain custom files (such as robots.txt or 145 | # .htaccess) here, relative to this directory. These files are copied 146 | # directly to the root of the documentation. 147 | #html_extra_path = [] 148 | 149 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 150 | # using the given strftime format. 151 | #html_last_updated_fmt = '%b %d, %Y' 152 | 153 | # If true, SmartyPants will be used to convert quotes and dashes to 154 | # typographically correct entities. 155 | #html_use_smartypants = True 156 | 157 | # Custom sidebar templates, maps document names to template names. 158 | #html_sidebars = {} 159 | 160 | # Additional templates that should be rendered to pages, maps page names to 161 | # template names. 162 | #html_additional_pages = {} 163 | 164 | # If false, no module index is generated. 165 | #html_domain_indices = True 166 | 167 | # If false, no index is generated. 168 | #html_use_index = True 169 | 170 | # If true, the index is split into individual pages for each letter. 171 | #html_split_index = False 172 | 173 | # If true, links to the reST sources are added to the pages. 174 | #html_show_sourcelink = True 175 | 176 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 177 | #html_show_sphinx = True 178 | 179 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 180 | #html_show_copyright = True 181 | 182 | # If true, an OpenSearch description file will be output, and all pages will 183 | # contain a tag referring to it. The value of this option must be the 184 | # base URL from which the finished HTML is served. 185 | #html_use_opensearch = '' 186 | 187 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 188 | #html_file_suffix = None 189 | 190 | # Output file base name for HTML help builder. 191 | htmlhelp_basename = 'Terrariumdoc' 192 | 193 | 194 | # -- Options for LaTeX output --------------------------------------------- 195 | 196 | latex_elements = { 197 | # The paper size ('letterpaper' or 'a4paper'). 198 | #'papersize': 'letterpaper', 199 | 200 | # The font size ('10pt', '11pt' or '12pt'). 201 | #'pointsize': '10pt', 202 | 203 | # Additional stuff for the LaTeX preamble. 204 | #'preamble': '', 205 | } 206 | 207 | # Grouping the document tree into LaTeX files. List of tuples 208 | # (source start file, target name, title, 209 | # author, documentclass [howto, manual, or own class]). 210 | latex_documents = [ 211 | ('index', 'Terrarium.tex', u'Terrarium Documentation', 212 | u'Kyle Gibson, Wes Winham', 'manual'), 213 | ] 214 | 215 | # The name of an image file (relative to this directory) to place at the top of 216 | # the title page. 217 | #latex_logo = None 218 | 219 | # For "manual" documents, if this is true, then toplevel headings are parts, 220 | # not chapters. 221 | #latex_use_parts = False 222 | 223 | # If true, show page references after internal links. 224 | #latex_show_pagerefs = False 225 | 226 | # If true, show URL addresses after external links. 227 | #latex_show_urls = False 228 | 229 | # Documents to append as an appendix to all manuals. 230 | #latex_appendices = [] 231 | 232 | # If false, no module index is generated. 233 | #latex_domain_indices = True 234 | 235 | 236 | # -- Options for manual page output --------------------------------------- 237 | 238 | # One entry per manual page. List of tuples 239 | # (source start file, name, description, authors, manual section). 240 | man_pages = [ 241 | ('index', 'terrarium', u'Terrarium Documentation', 242 | [u'Kyle Gibson, Wes Winham'], 1) 243 | ] 244 | 245 | # If true, show URL addresses after external links. 246 | #man_show_urls = False 247 | 248 | 249 | # -- Options for Texinfo output ------------------------------------------- 250 | 251 | # Grouping the document tree into Texinfo files. List of tuples 252 | # (source start file, target name, title, author, 253 | # dir menu entry, description, category) 254 | texinfo_documents = [ 255 | ('index', 'Terrarium', u'Terrarium Documentation', 256 | u'Kyle Gibson, Wes Winham', 'Terrarium', 'One line description of project.', 257 | 'Miscellaneous'), 258 | ] 259 | 260 | # Documents to append as an appendix to all manuals. 261 | #texinfo_appendices = [] 262 | 263 | # If false, no module index is generated. 264 | #texinfo_domain_indices = True 265 | 266 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 267 | #texinfo_show_urls = 'footnote' 268 | 269 | # If true, do not generate a @detailmenu in the "Top" node's menu. 270 | #texinfo_no_detailmenu = False 271 | -------------------------------------------------------------------------------- /tests/test_cli.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shlex 3 | import subprocess 4 | import sys 5 | import tempfile 6 | import unittest 7 | 8 | 9 | def run_command(command): 10 | params = { 11 | 'stdout': subprocess.PIPE, 12 | 'stderr': subprocess.PIPE, 13 | } 14 | result = subprocess.Popen( 15 | shlex.split(command), 16 | **params 17 | ) 18 | stdout, stderr = result.communicate() 19 | sys.stdout.write(stdout) 20 | sys.stdout.write(stderr) 21 | return result.returncode, stdout.strip(), stderr.strip() 22 | 23 | 24 | def terrarium(options): 25 | command = 'terrarium {}'.format(options) 26 | return run_command(command) 27 | 28 | 29 | def pip(env, options): 30 | pip_path = os.path.join(env, 'bin', 'pip') 31 | command = '{} {}'.format(pip_path, options) 32 | return run_command(command) 33 | 34 | 35 | def pip_freeze(env, strip_versions=True): 36 | rc, stdout, stderr = pip(env, 'freeze -l') 37 | assert rc == 0 38 | assert stderr == '' 39 | packages = stdout.split() 40 | if not strip_versions: 41 | return packages 42 | packages = [ 43 | package.split('=', 1)[0] 44 | for package in packages 45 | ] 46 | return packages 47 | 48 | 49 | class CommandLineInterfaceTestCase(unittest.TestCase): 50 | def setUp(self): 51 | self.target = _unique_name() 52 | 53 | def test_help(self): 54 | options = '--help' 55 | 56 | rc, stdout, stderr = terrarium(options) 57 | self.assertEqual(rc, 0) 58 | self.assertEqual(stderr, '') 59 | assert stdout.startswith('usage: terrarium') 60 | 61 | def test_no_params(self): 62 | options = '' 63 | 64 | rc, stdout, stderr = terrarium(options) 65 | self.assertEqual(rc, 2) 66 | self.assertEqual(stdout, '') 67 | assert stderr.startswith('usage: terrarium') 68 | assert stderr.endswith('terrarium: error: too few arguments') 69 | 70 | def test_install_requirements_file_does_not_exist(self): 71 | file_name = _unique_name() 72 | expected_stdout = '[ERROR] Requirements file {} does not exist'.format(file_name) 73 | 74 | options = '--target={} install {}'.format(self.target, file_name) 75 | 76 | rc, stdout, stderr = terrarium(options) 77 | self.assertEqual(rc, 1) 78 | self.assertEqual(stdout, expected_stdout) 79 | self.assertEqual(stderr, '') 80 | 81 | def test_install_to_target(self): 82 | file_name = _create_simple_requirements_file() 83 | 84 | options = '--target={} install {}'.format(self.target, file_name) 85 | 86 | expected_packages = ['terrarium', 'virtualenv'] 87 | 88 | rc, stdout, stderr = terrarium(options) 89 | self.assertEqual(rc, 0) 90 | self.assertEqual(stdout, '') 91 | self.assertEqual(stderr, '') 92 | 93 | assert _file_exists(self.target, 'bin', 'activate') 94 | assert _file_exists(self.target, 'bin', 'terrarium') 95 | 96 | packages = pip_freeze(self.target) 97 | self.assertEqual(sorted(packages), sorted(expected_packages)) 98 | 99 | def test_install_empty_requirements_creates_empty_virtualenv(self): 100 | file_name = _create_empty_requirements_file() 101 | 102 | options = '--target={} install {}'.format(self.target, file_name) 103 | 104 | expected_stdout = '[WARNING] wheel directory has no wheels' 105 | 106 | rc, stdout, stderr = terrarium(options) 107 | self.assertEqual(rc, 0) 108 | self.assertEqual(stderr, '') 109 | assert stdout.startswith(expected_stdout) 110 | assert _file_exists(self.target, 'bin', 'activate') 111 | 112 | packages = pip_freeze(self.target) 113 | self.assertEqual(packages, []) 114 | 115 | def test_install_with_requirements_file_that_includes_other_file(self): 116 | inner_file = _create_simple_requirements_file() 117 | requirement_include = '--requirement {}'.format(inner_file) 118 | file_name = _create_requirements_file([requirement_include]) 119 | 120 | options = '--target={} install {}'.format(self.target, file_name) 121 | 122 | rc, stdout, stderr = terrarium(options) 123 | self.assertEqual(rc, 0) 124 | self.assertEqual(stdout, '') 125 | self.assertEqual(stderr, '') 126 | 127 | assert _file_exists(self.target, 'bin', 'activate') 128 | assert _file_exists(self.target, 'bin', 'terrarium') 129 | 130 | def test_install_will_backup_existing_target(self): 131 | file_name = _create_empty_requirements_file() 132 | 133 | assert not os.path.exists(self.target) 134 | 135 | # Create an existing target with some contents 136 | os.makedirs(self.target) 137 | _create_file('bar', self.target, 'foo') 138 | 139 | options = '--target={} install {}'.format(self.target, file_name) 140 | 141 | rc, stdout, stderr = terrarium(options) 142 | self.assertEqual(rc, 0) 143 | 144 | assert _file_exists(self.target, 'bin', 'activate') 145 | # The original existing target + contents was preserved 146 | assert _file_exists(self.target + '.bak', 'foo') 147 | 148 | def test_existing_backup_is_removed(self): 149 | file_name = _create_empty_requirements_file() 150 | backup_target = self.target + '.bak' 151 | 152 | # Create an existing target with some contents 153 | assert not os.path.exists(self.target) 154 | os.makedirs(self.target) 155 | _create_file('bar', self.target, 'original-target') 156 | 157 | # Create an existing backup target with some contents 158 | assert not os.path.exists(backup_target) 159 | os.makedirs(backup_target) 160 | _create_file('baz', backup_target, 'original-backup') 161 | 162 | options = '--target={} install {}'.format(self.target, file_name) 163 | 164 | rc, stdout, stderr = terrarium(options) 165 | self.assertEqual(rc, 0) 166 | 167 | assert _file_exists(self.target, 'bin', 'activate') 168 | # The original existing target + contents was preserved 169 | assert _file_exists(backup_target, 'original-target') 170 | assert not _file_exists(backup_target, 'original-backup') 171 | 172 | def test_install_with_backup_disabled(self): 173 | file_name = _create_empty_requirements_file() 174 | 175 | assert not os.path.exists(self.target) 176 | 177 | # Create an existing target with some contents 178 | os.makedirs(self.target) 179 | _create_file('bar', self.target, 'foo') 180 | 181 | options = '--target={} --no-backup install {}'.format(self.target, file_name) 182 | 183 | rc, stdout, stderr = terrarium(options) 184 | self.assertEqual(rc, 0) 185 | 186 | assert _file_exists(self.target, 'bin', 'activate') 187 | # The original target + contents is not backed up 188 | assert not os.path.exists(self.target + '.bak') 189 | 190 | def test_require_download(self): 191 | file_name = _create_empty_requirements_file() 192 | 193 | options = '--target={} --require-download install {}'.format(self.target, file_name) 194 | 195 | rc, stdout, stderr = terrarium(options) 196 | self.assertEqual(rc, 1) 197 | self.assertEqual( 198 | stdout, 199 | '[ERROR] Failed to download environment and download is required. ' 200 | 'Refusing to build a new environment.', 201 | ) 202 | self.assertEqual(stderr, '') 203 | 204 | def test_require_download_with_empty_storage_dir(self): 205 | file_name = _create_empty_requirements_file() 206 | 207 | options = '--target={} --require-download --storage-dir=foo install {}'.format( 208 | self.target, file_name) 209 | 210 | rc, stdout, stderr = terrarium(options) 211 | self.assertEqual(rc, 1) 212 | self.assertEqual( 213 | stdout, 214 | '[ERROR] Failed to download environment and download is required. ' 215 | 'Refusing to build a new environment.', 216 | ) 217 | self.assertEqual(stderr, '') 218 | 219 | def test_gcs_required_to_use_gcs_bucket(self): 220 | file_name = _create_empty_requirements_file() 221 | 222 | expected_stderr = ( 223 | 'terrarium: error: --gcs-bucket requires that you have ' 224 | 'gcloud installed, which does not appear to be the case' 225 | ) 226 | 227 | options = '--target={} --gcs-bucket=foo install {}'.format( 228 | self.target, file_name) 229 | 230 | rc, stdout, stderr = terrarium(options) 231 | self.assertEqual(rc, 2) 232 | self.assertEqual(stdout, '') 233 | assert stderr.endswith(expected_stderr) 234 | 235 | def test_boto_required_to_use_s3_bucket(self): 236 | file_name = _create_empty_requirements_file() 237 | 238 | expected_stderr = ( 239 | 'terrarium: error: --s3-bucket requires that you have ' 240 | 'boto installed, which does not appear to be the case' 241 | ) 242 | 243 | options = '--target={} --s3-bucket=foo install {}'.format( 244 | self.target, file_name) 245 | 246 | rc, stdout, stderr = terrarium(options) 247 | self.assertEqual(rc, 2) 248 | self.assertEqual(stdout, '') 249 | assert stderr.endswith(expected_stderr) 250 | 251 | 252 | def _file_exists(*path_spec): 253 | return os.path.exists(os.path.join(*path_spec)) 254 | 255 | 256 | def _unique_name(**kwargs): 257 | prefix = kwargs.pop('prefix', 'terrarium-test-') 258 | return tempfile.mktemp(prefix=prefix, **kwargs) 259 | 260 | 261 | def _create_file(content, *path_spec): 262 | full_path = os.path.join(*path_spec) 263 | with open(full_path, 'w') as f: 264 | f.write(content) 265 | return full_path 266 | 267 | 268 | def _create_requirements_file(requirements): 269 | file_name = '{}.txt'.format(_unique_name()) 270 | content = ''.join([ 271 | '{}\n'.format(requirement) 272 | for requirement in requirements 273 | ]) 274 | return _create_file(content, file_name) 275 | 276 | 277 | def _create_empty_requirements_file(): 278 | return _create_requirements_file([]) 279 | 280 | 281 | def _create_simple_requirements_file(): 282 | # TOX_PACKAGE will be the full path to the terrarium sdist.zip 283 | # Use it if it exists, because it will mean not waiting for pypi 284 | requirement = os.environ.get('TOX_PACKAGE', 'terrarium') 285 | return _create_requirements_file([requirement]) 286 | -------------------------------------------------------------------------------- /tests/tests.py: -------------------------------------------------------------------------------- 1 | # new tests should be added to test_cli.py, not here 2 | 3 | from __future__ import absolute_import 4 | 5 | import copy 6 | import hashlib 7 | import os 8 | import shlex 9 | import shutil 10 | import subprocess 11 | import sys 12 | import tempfile 13 | import unittest 14 | 15 | 16 | class TerrariumTester(unittest.TestCase): 17 | 18 | def setUp(self): 19 | _, requirements = tempfile.mkstemp(prefix='test_terrarium_req-') 20 | target = tempfile.mkdtemp(prefix='test_terrarium_target-') 21 | self.initial_config = { 22 | 'target': target, 23 | 'storage_dir': tempfile.mkdtemp(prefix='test_terrarium_storage-'), 24 | 'python': os.path.join(target, 'bin', 'python'), 25 | 'terrarium': 'terrarium', 26 | 'requirements': requirements, 27 | 'environ': {}, 28 | 'opts': '', 29 | } 30 | self.configs = [] 31 | self.config_push(initial=True) 32 | 33 | @property 34 | def config(self): 35 | return self.configs[0] 36 | 37 | @property 38 | def target(self): 39 | return self.config['target'] 40 | 41 | @property 42 | def storage_dir(self): 43 | return self.config['storage_dir'] 44 | 45 | @property 46 | def python(self): 47 | return self.config['python'] 48 | 49 | @property 50 | def terrarium(self): 51 | return self.config['terrarium'] 52 | 53 | @property 54 | def environ(self): 55 | return self.config['environ'] 56 | 57 | @property 58 | def requirements(self): 59 | return self.config['requirements'] 60 | 61 | @property 62 | def opts(self): 63 | return self.config['opts'] 64 | 65 | def config_pop(self): 66 | return self.configs.pop() 67 | 68 | def config_push(self, initial=True): 69 | if initial: 70 | config = copy.deepcopy(self.initial_config) 71 | else: 72 | config = copy.deepcopy(self.configs[0]) 73 | self.configs.insert(0, config) 74 | return config 75 | 76 | def tearDown(self): 77 | for config in self.configs: 78 | if os.path.exists(config['target']): 79 | shutil.rmtree(config['target']) 80 | if os.path.exists('%s.bak' % config['target']): 81 | shutil.rmtree('%s.bak' % config['target']) 82 | if os.path.exists(config['storage_dir']): 83 | shutil.rmtree(config['storage_dir']) 84 | if os.path.exists(config['requirements']): 85 | os.unlink(config['requirements']) 86 | 87 | def _run(self, command, **kwargs): 88 | defaults = { 89 | 'stdout': subprocess.PIPE, 90 | 'stderr': subprocess.PIPE, 91 | } 92 | defaults.update(kwargs) 93 | env = {} 94 | if self.environ: 95 | env.update(os.environ) 96 | env.update(self.environ) 97 | defaults['env'] = env 98 | kwargs = defaults 99 | sys.stdout.write('Executing "%s"\n' % command) 100 | params = shlex.split(command) 101 | result = subprocess.Popen(params, **kwargs) 102 | stdout, stderr = result.communicate() 103 | return (stdout, stderr), result.returncode 104 | 105 | def _get_path(self, *paths): 106 | paths = list(paths) 107 | paths.insert( 108 | 0, 109 | os.path.dirname( 110 | os.path.abspath(__file__) 111 | ), 112 | ) 113 | return os.path.abspath( 114 | os.path.join(*paths) 115 | ) 116 | 117 | def _get_path_terrarium(self): 118 | return self._get_path('..') 119 | 120 | def _python(self, command='', **kwargs): 121 | output, return_code = self._run( 122 | '%s %s' % ( 123 | self.python, 124 | command, 125 | ) 126 | ) 127 | return output, return_code 128 | 129 | def _terrarium(self, command='', call_using_python=False, **kwargs): 130 | options = [] 131 | for key, value in kwargs.items(): 132 | options.append('--%s' % key.replace('_', '-')) 133 | if value is not None and value is not True: 134 | options.append(value) 135 | command = ' '.join([ 136 | self.terrarium, 137 | ' '.join(options), 138 | self.opts, 139 | command, 140 | ]) 141 | if call_using_python: 142 | output, return_code = self._python(command) 143 | else: 144 | output, return_code = self._run( 145 | command, 146 | ) 147 | return output, return_code 148 | 149 | def _install(self, call_using_python=False, **kwargs): 150 | command = 'install %s' % ( 151 | self.requirements, 152 | ) 153 | output, return_code = self._terrarium( 154 | command, 155 | target=self.target, 156 | call_using_python=call_using_python, 157 | **kwargs 158 | ) 159 | return output, return_code 160 | 161 | def _key(self, **kwargs): 162 | command = 'key %s' % ( 163 | self.requirements, 164 | ) 165 | (stdout, stderr), return_code = self._terrarium(command) 166 | self.assertEqual(return_code, 0) 167 | self.assertEqual(stderr, '') 168 | requirements_key = stdout.strip() 169 | return requirements_key 170 | 171 | def _add_requirements(self, *requirements): 172 | with open(self.requirements, 'a') as f: 173 | f.writelines('\n'.join(requirements)) 174 | f.write('\n') 175 | 176 | def _add_test_requirement(self): 177 | test_requirement = self._get_path('fixtures', 'test_requirement') 178 | self._add_requirements(test_requirement) 179 | 180 | def _add_terrarium_requirement(self): 181 | import virtualenv 182 | self._add_requirements( 183 | os.environ['TOX_PACKAGE'], 184 | 'virtualenv==%s' % virtualenv.virtualenv_version 185 | ) 186 | 187 | def _clear_requirements(self, *requirements): 188 | with open(self.requirements, 'w'): 189 | pass 190 | 191 | def _can_import_requirements(self, *requirements): 192 | imported = [] 193 | for r in requirements: 194 | output, return_code = self._python( 195 | '-c "import %s"' % r 196 | ) 197 | if return_code == 0: 198 | imported.append(r) 199 | return imported 200 | 201 | def assertInstall(self, *args, **kwargs): 202 | expected_return_code = kwargs.pop('return_code', 0) 203 | (stdout, stderr), return_code = self._install(*args, **kwargs) 204 | # Print output so it is displayed in the event of an error 205 | sys.stdout.write('\n---------- stdout ----------\n') 206 | sys.stdout.write(stdout) 207 | sys.stdout.write('\n---------- stderr ----------\n') 208 | sys.stdout.write(stderr) 209 | sys.stdout.write('\n---------- ------ ----------\n') 210 | self.assertEqual(return_code, expected_return_code) 211 | return stdout, stderr 212 | 213 | def assertExists(self, path): 214 | self.assertTrue(os.path.exists(path)) 215 | 216 | def assertNotExists(self, path): 217 | self.assertFalse(os.path.exists(path)) 218 | 219 | 220 | class TestTerrarium(TerrariumTester): 221 | def test_install_requirements_with_dependency(self): 222 | # This test involves a requirements file with two items, 223 | # test_requirement and foo_requirement. foo_requirement has 224 | # test_requirement as a dependency. We check that, if test_requirement 225 | # comes first in the requirements, the install of foo_requirement will 226 | # be successful. 227 | self._add_requirements( 228 | self._get_path('fixtures', 'test_requirement'), 229 | self._get_path('fixtures', 'foo_requirement'), 230 | ) 231 | self.assertInstall() 232 | actual = self._can_import_requirements( 233 | 'test_requirement', 234 | 'foo_requirement', 235 | ) 236 | expected = ['test_requirement', 'foo_requirement'] 237 | self.assertEqual(actual, expected) 238 | 239 | def test_install_with_requirement_comments(self): 240 | # Verify that a requirement file with comment lines can be used. 241 | self._add_requirements( 242 | self._get_path('fixtures', 'test_requirement'), 243 | '# This is a comment line in the requirements file.', 244 | ) 245 | self.assertInstall() 246 | actual = self._can_import_requirements( 247 | 'test_requirement', 248 | ) 249 | expected = ['test_requirement'] 250 | self.assertEqual(actual, expected) 251 | 252 | def test_install_editable_with_hash_egg_name(self): 253 | # Verify that a requirement file with a hash egg name can be used and 254 | # is not confused with a comment 255 | # If the #egg=foobar is removed, pip will fail 256 | self._add_requirements( 257 | '-e git+git://github.com/PolicyStat/terrarium.git#egg=foobar', 258 | ) 259 | self.assertInstall() 260 | actual = self._can_import_requirements( 261 | 'terrarium', 262 | ) 263 | expected = ['terrarium'] 264 | self.assertEqual(actual, expected) 265 | 266 | def test_hash_default_empty_requirements(self): 267 | # Verify that the hash of an empty requirements file is predictable 268 | command = 'hash %s' % ( 269 | self.requirements, 270 | ) 271 | (stdout, stderr), return_code = self._terrarium(command) 272 | expected_digest = hashlib.md5('').hexdigest() 273 | self.assertEqual(return_code, 0) 274 | self.assertEqual(stdout.strip(), expected_digest) 275 | self.assertEqual(stderr, '') 276 | 277 | def test_install_old_backup_symlink(self): 278 | # Create a scenario where the backup (from a previous install) is 279 | # actually a symlink instead of a directory 280 | os.symlink(self.target, '%s.bak' % self.target) 281 | self.assertInstall() 282 | self.assertInstall() 283 | 284 | def test_install_replace_activate_virtualenv_path(self): 285 | # Verify that when replacing an existing virtualenv, the VIRTUAL_ENV 286 | # path in the activate script matches the original path of the 287 | # replaced environment 288 | self.assertInstall() 289 | self.assertInstall() 290 | 291 | activate = os.path.join(self.target, 'bin', 'activate') 292 | with open(activate) as f: 293 | contents = f.read() 294 | self.assertTrue( 295 | 'VIRTUAL_ENV="%s"' % self.target 296 | in contents 297 | ) 298 | 299 | def test_install_storage_dir_archive(self): 300 | # Verify that the --storage-dir option causes terrarium create an 301 | # archive for the given requirement set 302 | self.assertInstall(storage_dir=self.storage_dir) 303 | 304 | requirements_key = self._key() 305 | 306 | archive = os.path.join(self.storage_dir, requirements_key) 307 | self.assertExists(archive) 308 | 309 | # Verify that the environment is returned to a usable state 310 | activate = os.path.join(self.target, 'bin', 'activate') 311 | with open(activate) as f: 312 | contents = f.read() 313 | self.assertTrue( 314 | 'VIRTUAL_ENV="%s"' % self.target 315 | in contents 316 | ) 317 | 318 | def test_install_storage_dir_archive_by_environ(self): 319 | # Verify that the --storage-dir option causes terrarium create an 320 | # archive for the given requirement set 321 | self.environ['TERRARIUM_STORAGE_DIR'] = self.storage_dir 322 | 323 | self.assertInstall() 324 | 325 | requirements_key = self._key() 326 | 327 | archive = os.path.join(self.storage_dir, requirements_key) 328 | self.assertExists(archive) 329 | 330 | # Verify that the environment is returned to a usable state 331 | activate = os.path.join(self.target, 'bin', 'activate') 332 | with open(activate) as f: 333 | contents = f.read() 334 | self.assertTrue( 335 | 'VIRTUAL_ENV="%s"' % self.target 336 | in contents 337 | ) 338 | 339 | def test_install_storage_dir_no_archive(self): 340 | # Verify that the --no-upload option causes terrarium to not create an 341 | # archive for the given requirement set 342 | self.assertInstall( 343 | storage_dir=self.storage_dir, 344 | no_upload=True, 345 | ) 346 | 347 | requirements_key = self._key() 348 | 349 | archive = os.path.join(self.storage_dir, requirements_key) 350 | self.assertNotExists(archive) 351 | 352 | def test_install_storage_dir_archive_extracted(self): 353 | # Verify that an archived terrarium can be later extracted and used 354 | 355 | # Build an archive 356 | self._add_test_requirement() 357 | self.assertInstall(storage_dir=self.storage_dir) 358 | 359 | requirements_key = self._key() 360 | 361 | archive = os.path.join(self.storage_dir, requirements_key) 362 | self.assertExists(archive) 363 | 364 | # Just install a blank environment 365 | self._clear_requirements() 366 | 367 | # Replace the environment with something else 368 | self.assertInstall(no_backup=True) 369 | 370 | actual = self._can_import_requirements( 371 | 'test_requirement', # Should not exist in the replacement 372 | ) 373 | expected = [] 374 | self.assertEqual(actual, expected) 375 | 376 | # Now attempt to install from the archive 377 | self._add_test_requirement() 378 | stdout, stderr = self.assertInstall( 379 | no_backup=True, 380 | storage_dir=self.storage_dir, 381 | verbose=True, 382 | ) 383 | self.assertNotEqual(stdout, '') 384 | self.assertEqual(stderr, '') 385 | 386 | actual = self._can_import_requirements( 387 | 'test_requirement', # Should exist now 388 | ) 389 | expected = ['test_requirement'] 390 | self.assertEqual(actual, expected) 391 | 392 | def test_install_with_terrarium_in_environment(self): 393 | # Verify that terrarium can replace an existing environment, the one 394 | # that terrarium executes from 395 | 396 | self._add_test_requirement() 397 | self._add_terrarium_requirement() 398 | 399 | self.assertInstall() 400 | 401 | actual = self._can_import_requirements( 402 | 'test_requirement', 403 | 'terrarium', 404 | ) 405 | expected = [ 406 | 'test_requirement', 407 | 'terrarium', 408 | ] 409 | self.assertEqual(actual, expected) 410 | 411 | # Use terrarium contained in the new environment 412 | config = self.config_push() 413 | config['terrarium'] = os.path.join( 414 | self.target, 415 | 'bin', 416 | 'terrarium', 417 | ) 418 | 419 | output = self.assertInstall( 420 | no_backup=True, 421 | call_using_python=True, 422 | ) 423 | self.assertFalse('Requirement already satisfied' in output[0]) 424 | 425 | actual = self._can_import_requirements( 426 | 'test_requirement', 427 | 'terrarium', 428 | ) 429 | expected = [ 430 | 'test_requirement', 431 | 'terrarium', 432 | ] 433 | self.assertEqual(actual, expected) 434 | 435 | def test_extract_with_terrarium_in_environment(self): 436 | # Verify that terrarium can install after being extracted from an 437 | # archive that was previously installed 438 | 439 | self._add_terrarium_requirement() 440 | 441 | self.assertInstall(storage_dir=self.storage_dir) 442 | 443 | # Use terrarium contained in the new environment 444 | config = self.config_push() 445 | config['terrarium'] = os.path.join( 446 | self.target, 447 | 'bin', 448 | 'terrarium', 449 | ) 450 | config['opts'] = '-VV' 451 | 452 | self.assertInstall( 453 | no_backup=True, 454 | storage_dir=self.storage_dir, 455 | ) 456 | self.assertExists(self.python) 457 | 458 | def test_logging_output_default(self): 459 | self._add_test_requirement() 460 | self._add_terrarium_requirement() 461 | 462 | stdout, stderr = self.assertInstall() 463 | 464 | self.assertEqual('', stdout) 465 | self.assertEqual('', stderr) 466 | 467 | def test_logging_output_verbose(self): 468 | self._add_test_requirement() 469 | self._add_terrarium_requirement() 470 | 471 | stdout, stderr = self.assertInstall(verbose=True) 472 | 473 | self.assertNotEqual('', stdout) 474 | self.assertEqual('', stderr) 475 | 476 | def test_sensitive_arguments_are_sensitive(self): 477 | command = 'hash %s' % ( 478 | self.requirements, 479 | ) 480 | self.config['opts'] = '-VV' 481 | (stdout, stderr), return_code = self._terrarium( 482 | command, 483 | s3_secret_key='should_not_appear', 484 | s3_access_key='do_not_show_me', 485 | ) 486 | self.assertEqual('', stderr) 487 | self.assertEqual(return_code, 0) 488 | self.assertTrue( 489 | stdout.startswith('[DEBUG] Initialized with Namespace') 490 | ) 491 | self.assertTrue('s3_secret_key' in stdout) 492 | self.assertTrue('s3_access_key' in stdout) 493 | self.assertTrue('should_not_appear' not in stdout) 494 | self.assertTrue('do_not_show_me' not in stdout) 495 | 496 | def test_restore_previously_backed_up_environment(self): 497 | output, return_code = self._terrarium( 498 | 'revert', 499 | target=self.target, 500 | ) 501 | self.assertEqual(return_code, 1) 502 | 503 | self._add_test_requirement() 504 | self.assertInstall() 505 | with open(os.path.join(self.target, 'foo'), 'w') as f: 506 | f.write('bar') 507 | self.assertInstall() 508 | with open(os.path.join(self.target, 'moo'), 'w') as f: 509 | f.write('cow') 510 | self.assertExists('%s.bak' % self.target) 511 | output, return_code = self._terrarium( 512 | 'revert', 513 | target=self.target, 514 | ) 515 | self.assertEqual(return_code, 0) 516 | self.assertNotExists('%s.bak' % self.target) 517 | self.assertExists(os.path.join(self.target, 'foo')) 518 | self.assertNotExists(os.path.join(self.target, 'moo')) 519 | -------------------------------------------------------------------------------- /terrarium.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import absolute_import 3 | 4 | import argparse 5 | import glob 6 | import hashlib 7 | import logging 8 | import os 9 | import shutil 10 | import subprocess 11 | import sys 12 | import tempfile 13 | 14 | try: 15 | import boto # noqa 16 | import boto.s3.connection 17 | import boto.exception 18 | except ImportError: 19 | boto = None # noqa 20 | 21 | # import google cloud storage lib 22 | try: 23 | import gcloud.storage as gcs 24 | except ImportError: 25 | gcs = None 26 | 27 | 28 | if __name__ == '__main__': 29 | __version__ = 'standalone' 30 | else: 31 | from pkg_resources import get_distribution, DistributionNotFound 32 | try: 33 | __version__ = get_distribution(__name__).version 34 | except DistributionNotFound: # package is not installed 35 | __version__ = None 36 | 37 | TERRARIUM_VERSION = __version__ 38 | 39 | logger = logging.getLogger(__name__) 40 | 41 | PYTHONWARNINGS_IGNORE_PIP_PYTHON2_DEPRECATION = ( 42 | 'ignore:DEPRECATION::pip._internal.cli.base_command' 43 | ) 44 | 45 | PYTHONWARNINGS = [ 46 | PYTHONWARNINGS_IGNORE_PIP_PYTHON2_DEPRECATION, 47 | ] 48 | 49 | 50 | class Terrarium(object): 51 | def __init__(self, args): 52 | self.args = args 53 | self._requirements = None 54 | 55 | def get_digest(self): 56 | return calculate_digest_for_requirements( 57 | digest_type=self.args.digest_type, 58 | requirements=self.requirements, 59 | ) 60 | 61 | @property 62 | def requirements(self): 63 | if self._requirements is not None: 64 | return self._requirements 65 | lines = [] 66 | for path in self.args.reqs: 67 | if not os.path.exists(path): 68 | raise RuntimeError( 69 | 'Requirements file {} does not exist'.format(path) 70 | ) 71 | lines.extend(parse_requirements(path=path)) 72 | self._requirements = lines 73 | return self._requirements 74 | 75 | def restore_previously_backed_up_environment(self): 76 | backup = self.get_backup_location() 77 | if not os.path.exists(backup): 78 | raise RuntimeError( 79 | 'Failed to restore backup. ' 80 | "It doesn't appear to exist at {}".format(backup), 81 | ) 82 | 83 | target = self.get_target_location() 84 | logger.info('Deleting environment at %s', target) 85 | rmtree(target) 86 | 87 | logger.info('Renaming %s to %s', backup, target) 88 | move_or_rename(backup, target) 89 | 90 | def get_target_location(self): 91 | return os.path.abspath(self.args.target) 92 | 93 | def get_backup_location(self, target=None): 94 | if target is None: 95 | target = self.get_target_location() 96 | return ''.join([target, self.args.backup_suffix]) 97 | 98 | def install(self): 99 | ''' 100 | 1. Attempt to download prebuilt environment 101 | 2. Otherwise, attempt to build one (unless prohibited) 102 | 3. If there's already an existing environment, 103 | temporarily move it out of the way. 104 | 4. Install the environment from either #2 or #1 105 | 5. If installation fails, restore the previous environment 106 | 6. Otherwise, move the previous environment to the backup location 107 | ''' 108 | target_path = self.get_target_location() 109 | backup_path = self.get_backup_location() 110 | 111 | existing_target = os.path.exists(target_path) 112 | existing_backup = os.path.exists(backup_path) 113 | 114 | downloaded = False 115 | if self.args.download: 116 | local_archive_path = self.download() 117 | if local_archive_path: 118 | downloaded = True 119 | 120 | new_env_created = False 121 | if not downloaded: 122 | if self.args.require_download: 123 | raise RuntimeError( 124 | 'Failed to download environment and download is required. ' 125 | 'Refusing to build a new environment.' 126 | ) 127 | local_archive_path = create_environment(self.requirements) 128 | if local_archive_path: 129 | new_env_created = True 130 | 131 | if not local_archive_path: 132 | raise RuntimeError('No environment was downloaded or created') 133 | 134 | target_path_temp = target_path + '.temp' 135 | try: 136 | if existing_target: 137 | move_or_rename(target_path, target_path_temp) 138 | install_environment(local_archive_path, target_path) 139 | except: # noqa - is there a better way to do this? 140 | if existing_target: 141 | # restore the original environment 142 | rmtree(target_path) 143 | move_or_rename(target_path_temp, target_path) 144 | raise 145 | 146 | if existing_backup: 147 | logger.debug('Removing backup path') 148 | rmtree(backup_path) 149 | 150 | if existing_target: 151 | if self.args.backup: 152 | move_or_rename(target_path_temp, backup_path) 153 | else: 154 | rmtree(target_path_temp) 155 | 156 | if new_env_created and self.args.upload: 157 | self.upload(local_archive_path) 158 | 159 | def _get_s3_bucket(self): 160 | conn = boto.s3.connection.S3Connection( 161 | aws_access_key_id=self.args.s3_access_key, 162 | aws_secret_access_key=self.args.s3_secret_key 163 | ) 164 | return boto.s3.bucket.Bucket(conn, name=self.args.s3_bucket) 165 | 166 | def _get_gcs_bucket(self): 167 | conn = gcs.get_connection( 168 | self.args.gcs_project, 169 | self.args.gcs_client_email, 170 | self.args.gcs_private_key 171 | ) 172 | return conn.get_bucket(self.args.gcs_bucket) 173 | 174 | def download(self): 175 | local_path = make_temp_file(suffix='.tea') 176 | 177 | # make remote key for extenal storage system 178 | remote_key = self.make_remote_key() 179 | 180 | if self.args.storage_dir: 181 | local_path = os.path.join(self.args.storage_dir, remote_key) 182 | if os.path.exists(local_path): 183 | return local_path 184 | 185 | if self._download_from_s3(remote_key, local_path): 186 | return local_path 187 | 188 | if self._download_from_gcs(remote_key, local_path): 189 | return local_path 190 | 191 | def _download_from_s3(self, remote_key, local_path): 192 | if not boto or not self.args.s3_bucket: 193 | return 194 | bucket = self._get_s3_bucket() 195 | key = bucket.get_key(remote_key) 196 | if not key: 197 | return 198 | logger.info( 199 | 'Downloading %s/%s from S3 ...', 200 | self.args.s3_bucket, 201 | remote_key, 202 | ) 203 | key.get_contents_to_filename(local_path) 204 | return True 205 | 206 | def _download_from_gcs(self, remote_key, local_path): 207 | if not gcs or not self.args.gcs_bucket: 208 | return 209 | bucket = self._get_gcs_bucket() 210 | blob = bucket.get_key(remote_key) 211 | if not blob: 212 | return 213 | logger.info( 214 | 'Downloading %s/%s from Google Cloud Storage ...', 215 | self.args.gcs_bucket, 216 | remote_key, 217 | ) 218 | blob.download_to_file(local_path) 219 | return True 220 | 221 | def make_remote_key(self): 222 | import platform 223 | major, minor, patch = platform.python_version_tuple() 224 | context = { 225 | 'digest': self.get_digest(), 226 | 'python_vmajor': major, 227 | 'python_vminor': minor, 228 | 'python_vpatch': patch, 229 | 'arch': platform.machine(), 230 | } 231 | return self.args.remote_key_format % context 232 | 233 | def upload_to_storage_dir(self, archive, storage_dir): 234 | logger.info('Copying environment to storage directory') 235 | dest = os.path.join(storage_dir, self.make_remote_key()) 236 | if os.path.exists(dest): 237 | raise RuntimeError( 238 | 'Environment already exists at {}'.format(dest), 239 | ) 240 | temp = make_temp_file(dir=storage_dir) 241 | shutil.copyfile(archive, temp) 242 | move_or_rename(temp, dest) 243 | logger.info('Archive copied to storage directory') 244 | 245 | def upload_to_s3(self, archive): 246 | logger.info('Uploading environment to S3') 247 | attempts = 0 248 | bucket = self._get_s3_bucket() 249 | key = bucket.new_key(self.make_remote_key()) 250 | 251 | while True: 252 | try: 253 | key.set_contents_from_filename(archive) 254 | logger.debug('upload finished') 255 | return True 256 | except Exception: 257 | attempts = attempts + 1 258 | logger.warning('There was an error uploading the file') 259 | if attempts > self.args.s3_max_retries: 260 | logger.error('Attempted to upload archive to S3, but failed') 261 | raise 262 | else: 263 | logger.info('Retrying S3 upload') 264 | 265 | def upload_to_gcs(self, archive): 266 | logger.info('Uploading environment to Google Cloud Storage') 267 | attempts = 0 268 | bucket = self._get_gcs_bucket() 269 | blob = bucket.new_key(self.make_remote_key()) 270 | 271 | while True: 272 | try: 273 | blob.upload_from_filename(archive) 274 | logger.debug('upload finished') 275 | return True 276 | except Exception: 277 | attempts = attempts + 1 278 | logger.warning('There was an error uploading the file') 279 | if attempts > self.args.gcs_max_retries: 280 | logger.error( 281 | 'Attempted to upload archive to Google Cloud Storage, ' 282 | 'but failed' 283 | ) 284 | raise 285 | else: 286 | logger.info('Retrying Google Cloud Storage upload') 287 | 288 | def upload(self, archive): 289 | if self.args.storage_dir: 290 | self.upload_to_storage_dir(archive, self.args.storage_dir) 291 | if boto and self.args.s3_bucket: 292 | self.upload_to_s3(archive) 293 | if gcs and self.args.gcs_bucket: 294 | self.upload_to_gcs(archive) 295 | 296 | 297 | def define_args(): 298 | ap = argparse.ArgumentParser( 299 | prog='terrarium', 300 | version=TERRARIUM_VERSION, 301 | ) 302 | ap.add_argument( 303 | '-V', '--verbose', 304 | action='count', 305 | default=0, 306 | dest='verbose_count', 307 | help='Increase verbosity. Default shows only warnings and errors.', 308 | ) 309 | ap.add_argument( 310 | '-q', '--quiet', 311 | action='store_true', 312 | default=False, 313 | dest='quiet', 314 | help='Silence output completely', 315 | ) 316 | ap.add_argument( 317 | '-t', '--target', 318 | dest='target', 319 | default=os.environ.get('VIRTUAL_ENV', None), 320 | help=''' 321 | Replace or build new environment at this location. If you are 322 | already within a virtual environment, this option defaults to 323 | VIRTUAL_ENV. 324 | ''', 325 | ) 326 | ap.add_argument( 327 | '--no-download', 328 | default=True, 329 | action='store_false', 330 | dest='download', 331 | help=''' 332 | If an external storage location is specified, terrarium will 333 | attempt to download an existing terrarium environment instead of 334 | building a new one. Using --no-download forces terrarium to build a 335 | new environment. 336 | ''', 337 | ) 338 | ap.add_argument( 339 | '--require-download', 340 | default=False, 341 | action='store_true', 342 | help=''' 343 | If we fail to download a terrarium environment from the storage 344 | location, do not proceed to build one. 345 | ''', 346 | ) 347 | ap.add_argument( 348 | '--no-upload', 349 | default=True, 350 | action='store_false', 351 | dest='upload', 352 | help=''' 353 | If an external storage location is specified, terrarium will upload 354 | a new environment after it has been built. Using --no-upload, 355 | terrarium will not upload the resulting environment to the external 356 | storage location. 357 | ''', 358 | ) 359 | ap.add_argument( 360 | '--no-backup', 361 | default=True, 362 | action='store_false', 363 | dest='backup', 364 | help=''' 365 | By default, terrarium preserves the old environment. See 366 | --backup-suffix. Using this option, terrarium will delete the old 367 | environment. 368 | ''', 369 | ) 370 | ap.add_argument( 371 | '--backup-suffix', 372 | default='.bak', 373 | help=''' 374 | The suffix to use when preserving an old environment. This option 375 | is ignored if --no-backup is used. Default is .bak. 376 | ''' 377 | ) 378 | ap.add_argument( 379 | '--no-compress', 380 | default=True, 381 | action='store_false', 382 | dest='compress', 383 | help=''' 384 | By default, terrarium compresses the archive using gzip before 385 | uploading it. 386 | ''', 387 | ) 388 | ap.add_argument( 389 | '--storage-dir', 390 | default=os.environ.get('TERRARIUM_STORAGE_DIR', None), 391 | help=''' 392 | Path to a directory in which terrarium environments will be retrieved 393 | and stored for speedy re-installation. This will usually be a 394 | shared drive. 395 | ''', 396 | ) 397 | ap.add_argument( 398 | '--digest-type', 399 | default='md5', 400 | help='Choose digest type (md5, sha, see hashlib). Default is md5.', 401 | ) 402 | default_remote_key_format = ''' 403 | %(arch)s-%(python_vmajor)s.%(python_vminor)s-%(digest)s 404 | '''.strip() 405 | ap.add_argument( 406 | '--remote-key-format', 407 | default=default_remote_key_format, 408 | help=''' 409 | Key name format to use when storing the archive. Default is "%s" 410 | ''' % default_remote_key_format.replace('%', '%%'), 411 | ) 412 | 413 | ap.add_argument( 414 | '--s3-bucket', 415 | default=os.environ.get('S3_BUCKET', None), 416 | help=''' 417 | S3 bucket name. Defaults to S3_BUCKET env variable. 418 | ''' 419 | ) 420 | ap.add_argument( 421 | '--s3-access-key', 422 | default=os.environ.get('S3_ACCESS_KEY', None), 423 | help=''' 424 | Defaults to S3_ACCESS_KEY env variable. 425 | ''' 426 | ) 427 | ap.add_argument( 428 | '--s3-secret-key', 429 | default=os.environ.get('S3_SECRET_KEY', None), 430 | help=''' 431 | Defaults to S3_SECRET_KEY env variable. 432 | ''' 433 | ) 434 | ap.add_argument( 435 | '--s3-max-retries', 436 | default=os.environ.get('S3_MAX_RETRIES', 3), 437 | help=''' 438 | Number of times to attempt a S3 operation before giving up. 439 | Default is 3. 440 | ''', 441 | ) 442 | 443 | # gcs relavent arguments 444 | ap.add_argument( 445 | '--gcs-bucket', 446 | default=os.environ.get('GCS_BUCKET', None), 447 | help=''' 448 | Google Cloud Storage bucket name. 449 | Defaults to GCS_BUCKET env variable. 450 | ''' 451 | ) 452 | ap.add_argument( 453 | '--gcs-project', 454 | default=os.environ.get('GCS_PROJECT', None), 455 | help=''' 456 | Google Cloud Storage project. 457 | Defaults to GCS_PROJECT env variable. 458 | ''' 459 | ) 460 | ap.add_argument( 461 | '--gcs-client-email', 462 | default=os.environ.get('GCS_CLIENT_EMAIL', None), 463 | help=''' 464 | Google Cloud Storage client email. 465 | Defaults to GCS_CLIENT_EMAIL env variable. 466 | ''' 467 | ) 468 | ap.add_argument( 469 | '--gcs-private-key', 470 | default=os.environ.get('GCS_PRIVATE_KEY', None), 471 | help=''' 472 | Google Cloud Storage private key. 473 | Defaults to GCS_PRIVATE_KEY env variable. 474 | ''' 475 | ) 476 | ap.add_argument( 477 | '--gcs-max-retries', 478 | default=os.environ.get('GCS_MAX_RETRIES', 3), 479 | help=''' 480 | Number of times to attempt a GCS operation before giving up. 481 | Default is 3. 482 | ''' 483 | ) 484 | 485 | subparsers = ap.add_subparsers( 486 | title='Basic Commands', 487 | dest='command', 488 | ) 489 | subparsers.required = True 490 | 491 | commands = { 492 | 'hash': subparsers.add_parser( 493 | 'hash', 494 | help='Display digest for current requirement set', 495 | ), 496 | 'key': subparsers.add_parser( 497 | 'key', 498 | help='Display remote key for current requirement set and platform', 499 | ), 500 | 'install': subparsers.add_parser( 501 | 'install', 502 | help=''' 503 | Replace current environment with the one given by the 504 | requirement set. 505 | ''', 506 | ), 507 | 'revert': subparsers.add_parser( 508 | 'revert', 509 | help=''' 510 | Restore the most recent backed-up virtualenv, if it exists. 511 | ''', 512 | ), 513 | } 514 | 515 | for command in commands.values(): 516 | command.add_argument('reqs', nargs=argparse.REMAINDER) 517 | return ap 518 | 519 | 520 | def get_displayable_args(args): 521 | sensitive_arguments = set([ 522 | 's3_access_key', 523 | 's3_secret_key', 524 | 'gcs_client_email', 525 | 'gcs_private_key', 526 | ]) 527 | for key, val in sorted(args.__dict__.items()): 528 | if val is not None and key in sensitive_arguments: 529 | val = '*****' 530 | yield key, val 531 | 532 | 533 | def parse_args(ap): 534 | args = ap.parse_args() 535 | assert args.__class__._get_kwargs 536 | args.__class__._get_kwargs = get_displayable_args 537 | 538 | if not boto and args.s3_bucket is not None: 539 | ap.error( 540 | '--s3-bucket requires that you have boto installed, ' 541 | 'which does not appear to be the case' 542 | ) 543 | 544 | if not gcs and args.gcs_bucket is not None: 545 | ap.error( 546 | '--gcs-bucket requires that you have gcloud installed, ' 547 | 'which does not appear to be the case' 548 | ) 549 | 550 | return args 551 | 552 | 553 | def call_subprocess(command, log_level=logging.INFO): 554 | logger.debug('call_subprocess: %s', command) 555 | process = subprocess.Popen( 556 | command, 557 | stdout=subprocess.PIPE, 558 | stderr=subprocess.PIPE, 559 | ) 560 | 561 | while process.poll() is None: 562 | while True: 563 | stdout = process.stdout.readline() 564 | stderr = process.stderr.readline() 565 | if not stdout and not stderr: 566 | break 567 | stdout = stdout.strip() 568 | if stdout: 569 | logger.log(log_level, stdout.decode()) 570 | stderr = stderr.strip() 571 | if stderr: 572 | logger.warning(stderr.decode()) 573 | 574 | rc = process.returncode 575 | if rc: 576 | raise RuntimeError('{cmd} exited with code {code}'.format( 577 | cmd=command[0], 578 | code=rc, 579 | )) 580 | 581 | 582 | def create_virtualenv(directory): 583 | command = [ 584 | 'virtualenv', 585 | directory, 586 | ] 587 | call_subprocess(command) 588 | 589 | 590 | def pip_install_wheels(virtualenv, wheel_dir): 591 | logger.debug('pip_install_wheels: %s, %s', virtualenv, wheel_dir) 592 | pip_path = os.path.join(virtualenv, 'bin', 'pip') 593 | requirements_path = os.path.join(wheel_dir, 'requirements.txt') 594 | 595 | # Copy requirements.txt to the virtualenv 596 | shutil.copyfile( 597 | requirements_path, 598 | os.path.join(virtualenv, 'requirements.txt'), 599 | ) 600 | 601 | # note: --find-links + --requirement 602 | # the reason the command below isn't using --find-links + --requirement is 603 | # because of how pip behaves when requirements.txt contains a remote source 604 | # requirement. In this situation, pip ignores the wheel in the wheel_dir 605 | # and instead downloads the requirement from the source 606 | 607 | wheels = glob.glob(os.path.join(wheel_dir, '*.whl')) 608 | if not wheels: 609 | logger.warning('wheel directory has no wheels: %s', wheel_dir) 610 | return 611 | 612 | command = [ 613 | pip_path, 614 | 'install', 615 | '--no-index', 616 | '--no-cache-dir', 617 | ] 618 | command.extend(wheels) 619 | call_subprocess(command) 620 | 621 | 622 | def install_environment(local_archive_path, local_directory): 623 | logger.debug('install_environment: %s, %s', local_archive_path, local_directory) 624 | wheel_dir = tempfile.mkdtemp(prefix='terrarium-wheel-') 625 | extract_tar_archive(local_archive_path, wheel_dir) 626 | requirements_path = os.path.join(wheel_dir, 'requirements.txt') 627 | if not os.path.exists(requirements_path): 628 | raise RuntimeError('Environment is missing requirements.txt') 629 | 630 | create_virtualenv(local_directory) 631 | pip_install_wheels(local_directory, wheel_dir) 632 | 633 | 634 | def pip_wheel(wheel_dir, requirements): 635 | requirements_path = os.path.join(wheel_dir, 'requirements.txt') 636 | with open(requirements_path, 'w') as f: 637 | f.write(flatten_requirements(requirements)) 638 | 639 | command = [ 640 | 'pip', 641 | 'wheel', 642 | '--wheel-dir', wheel_dir, 643 | '--requirement', requirements_path, 644 | ] 645 | call_subprocess(command) 646 | 647 | 648 | def flatten_requirements(requirements): 649 | if not requirements: 650 | return '' 651 | return '\n'.join(requirements) + '\n' 652 | 653 | 654 | def create_environment(requirements, compress=True): 655 | logger.debug('create_environment') 656 | wheel_dir = tempfile.mkdtemp(prefix='terrarium-wheel-') 657 | pip_wheel(wheel_dir, requirements) 658 | archive_path = create_tar_archive(wheel_dir) 659 | if not compress: 660 | return archive_path 661 | compressed_archive_path = gzip_compress(archive_path) 662 | return compressed_archive_path 663 | 664 | 665 | def calculate_digest_for_requirements(digest_type, requirements): 666 | h = hashlib.new(digest_type) 667 | h.update(flatten_requirements(requirements)) 668 | return h.hexdigest() 669 | 670 | 671 | def gzip_compress(target): 672 | call_subprocess(['gzip', '--force', target]) 673 | return '{}.gz'.format(target) 674 | 675 | 676 | def create_tar_archive(directory): 677 | logger.debug('create_tar_archive: %s', directory) 678 | archive_path = make_temp_file(suffix='.tar') 679 | command = [ 680 | 'tar', 681 | '--create', 682 | '--file', archive_path, 683 | '--directory', directory, 684 | '.' 685 | ] 686 | call_subprocess(command) 687 | return archive_path 688 | 689 | 690 | # http://www.astro.keele.ac.uk/oldusers/rno/Computing/File_magic.html 691 | MAGIC_NUM = { 692 | # magic code, offset 693 | 'ELF': ('.ELF', 0), 694 | 'GZIP': ('\x1f\x8b', 0), 695 | 'BZIP': ('\x42\x5a', 0), 696 | 'TAR': ('ustar', 257), 697 | } 698 | 699 | 700 | def detect_file_type(path): 701 | 'Examine the first few bytes of the given path to detect the file type' 702 | with open(path) as f: 703 | for file_type, magic in MAGIC_NUM.items(): 704 | f.seek(magic[1]) 705 | if magic[0] == f.read(len(magic[0])): 706 | return file_type 707 | return None 708 | 709 | 710 | def extract_tar_archive(archive, target): 711 | logger.debug('extract_tar_archive: %s, %s', archive, target) 712 | archive_type = detect_file_type(archive) 713 | 714 | compression_map = { 715 | 'GZIP': '--gzip', 716 | 'BZIP': '--bzip2', 717 | 'TAR': '', 718 | } 719 | 720 | compression_opt = compression_map.get(archive_type) 721 | 722 | if compression_opt is None: 723 | raise RuntimeError( 724 | 'Failed to extract archive, unknown or unsupported file type', 725 | ) 726 | 727 | if not os.path.exists(target): 728 | os.mkdir(target) 729 | command = [ 730 | 'tar', 731 | '--extract', 732 | compression_opt, 733 | '--file', archive, 734 | '--directory', target, 735 | ] 736 | call_subprocess(command) 737 | 738 | 739 | def parse_requirements(path, ignore_comments=True): 740 | logger.debug('parse_requirements: %s', path) 741 | with open(path) as f: 742 | lines = f.readlines() 743 | containing_dir = os.path.dirname(path) 744 | for line in lines: 745 | line = line.strip() 746 | if ignore_comments and line.startswith('#'): 747 | continue 748 | if line.startswith(('-r', '--requirement')): 749 | _, ref_name = line.split() 750 | # TODO - Can ref_name be an absolute path? 751 | ref_path = os.path.join(containing_dir, ref_name) 752 | if not os.path.exists(ref_path): 753 | raise RuntimeError( 754 | 'Requirements {} contains ref that does not exist: {}'.format( 755 | path, 756 | ref_path, 757 | ) 758 | ) 759 | ref_lines = parse_requirements( 760 | ref_path, 761 | ignore_comments=ignore_comments, 762 | ) 763 | for inner_line in ref_lines: 764 | yield inner_line 765 | else: 766 | yield line 767 | 768 | 769 | def move_or_rename(src, dst): 770 | if src == dst: 771 | return 772 | return shutil.move(src, dst) 773 | 774 | 775 | def rmtree(path): 776 | if not os.path.exists(path): 777 | return 778 | logger.debug('rmtree: %s', path) 779 | try: 780 | if os.path.islink(path): 781 | os.unlink(path) 782 | elif os.path.isdir(path): 783 | shutil.rmtree(path) 784 | else: 785 | os.unlink(path) 786 | except OSError as why: 787 | raise RuntimeError( 788 | 'Failed to remove {path}. ' 789 | 'Make sure you have permissions to this path. {why}'.format( 790 | path=path, 791 | why=why, 792 | ) 793 | ) 794 | 795 | 796 | def make_temp_file(**kwargs): 797 | prefix = kwargs.pop('prefix', 'terrarium-') 798 | fd, path_to_file = tempfile.mkstemp(prefix=prefix, **kwargs) 799 | os.close(fd) 800 | return path_to_file 801 | 802 | 803 | def initialize_logging(args): 804 | if args.quiet: 805 | logger.disabled = True 806 | else: 807 | level = logging.WARNING 808 | level -= args.verbose_count * 10 809 | level = max(level, logging.DEBUG) 810 | logger.setLevel(level) 811 | 812 | 813 | def update_python_warnings(): 814 | existing_warnings = os.environ.get('PYTHONWARNINGS', '').strip() 815 | warnings = [] 816 | if existing_warnings: 817 | warnings = existing_warnings.split(',') 818 | warnings.extend(PYTHONWARNINGS) 819 | os.environ['PYTHONWARNINGS'] = ','.join(warnings) 820 | 821 | 822 | def main(): 823 | update_python_warnings() 824 | 825 | logging.basicConfig( 826 | stream=sys.stdout, 827 | level=logging.WARNING, 828 | format='[%(levelname)s] %(message)s', 829 | ) 830 | 831 | ap = define_args() 832 | args = parse_args(ap) 833 | initialize_logging(args) 834 | 835 | logger.debug('Initialized with %s', args) 836 | 837 | terrarium = Terrarium(args) 838 | 839 | try: 840 | if args.command == 'hash': 841 | digest = terrarium.get_digest() 842 | sys.stdout.write('{}\n'.format(digest)) 843 | if args.command == 'key': 844 | key = terrarium.make_remote_key() 845 | sys.stdout.write('{}\n'.format(key)) 846 | elif args.command == 'install': 847 | terrarium.install() 848 | elif args.command == 'revert': 849 | terrarium.restore_previously_backed_up_environment() 850 | except RuntimeError as e: 851 | logger.error(e.message) 852 | sys.exit(1) 853 | 854 | 855 | if __name__ == '__main__': 856 | main() 857 | --------------------------------------------------------------------------------