├── .travis.yml ├── DICTIONARY ├── LICENSE.md ├── MANIFEST.in ├── README.md ├── appveyor.yml ├── container-setup.py ├── download-all-distros-to.py ├── psqtraviscontainer ├── __init__.py ├── architecture.py ├── common_options.py ├── constants.py ├── container.py ├── create.py ├── debian_package.py ├── directory.py ├── distro.py ├── download.py ├── linux_container.py ├── linux_local_container.py ├── osx_container.py ├── output.py ├── package_system.py ├── printer.py ├── rootdir.py ├── use.py ├── util.py └── windows_container.py ├── requirements.txt ├── setup.py └── test ├── __init__.py ├── test_acceptance.py ├── test_unit.py └── testutil.py /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | sudo: false 3 | matrix: 4 | include: 5 | - os: linux 6 | python: '2.7' 7 | - os: linux 8 | python: '3.5' 9 | - os: linux 10 | python: pypy 11 | - os: osx 12 | language: generic 13 | env: PYTHON=python PIP=pip 14 | osx_image: xcode8 15 | - os: osx 16 | language: generic 17 | env: PYTHON=python3 PIP=pip3 18 | osx_image: xcode8 19 | addons: 20 | apt: 21 | packages: 22 | - pandoc 23 | install: 24 | - eval "$(curl -LSs --connect-timeout 2 --retry 100 public-travis-scripts.polysquare.org/bash/osx-python.sh)" 25 | - eval "$(curl -LSs --connect-timeout 2 --retry 100 public-travis-scripts.polysquare.org/bootstrap.py 26 | | python /dev/stdin -d $(pwd)/container -s container-setup.py -e bash -p /dev/stdout)" 27 | script: 28 | - polysquare_run check/python/check.py 29 | before_cache: 30 | - polysquare_cleanup 31 | after_success: 32 | - polysquare_run coverage/python/coverage.py 33 | before_deploy: 34 | - polysquare_run deploy/python/deploy.py 35 | env: 36 | global: 37 | - JOBSTAMPS_DISABLED=1 38 | - secure: eQOVCF4WcmpWJon1rcQFXGJ9M43xKJBctEAEepOXIbTWBYN6E9eveD9wSKPU8Zw5ZY8jt1pmTLIspFJBw0BcnLPmApB6v42X8mewKcM1B1bNnsjYOFO57pqfFNbvs9p3JZzyncBcSPe8y1HVn1Gz2fN5R/piJrUqLrMd+97WgRM= 39 | deploy: 40 | provider: pypi 41 | user: 42 | secure: EnKMIgbfoWevryvbQAEOTjquSP6rUEgRX/nIUt0vW0wu8ir55XhNJ48eafKTv2uYFi9C4XE66qtZU2qwfstnyK/GABi6+wzR+scfMwB7FykU+n7+LRP3uz3O1+rEae4sgRnzWwTqfmbEwyllQmGSssYeFoPNK8i/afXgBO6VZ5A= 43 | password: 44 | secure: Exs3RO2rnrZ2EEIB8Slv7hEPTwy+YGxIyz2sCxQY4sf2h2Pv7Ntmqad+/AsLOGzMQwMTBrj4LxoIiYIeZEXpgx6UH71QMXG8RGEH1NdM+iWoyW8+gNl4BLoQ/t9pKYdC0W+r2ZDZEltCMb+GhwCkgYgFxPyWCIQj+C9201dssJk= 45 | on: 46 | repo: polysquare/polysquare-travis-container 47 | branch: master 48 | python: 2.7 49 | -------------------------------------------------------------------------------- /DICTIONARY: -------------------------------------------------------------------------------- 1 | ABIs 2 | AppVeyor 3 | AVAILABLE_DISTRIBUTIONS 4 | arg 5 | arm 6 | choco 7 | chroot 8 | CMD 9 | config 10 | CONTAINER_ARCH 11 | CONTAINER_DIRECTORY 12 | CONTAINER_DISTRO 13 | CONTAINER_RELEASE 14 | Debian 15 | distro 16 | docstring 17 | dpkg 18 | eg 19 | env 20 | Executables 21 | executables 22 | Fedora 23 | filename 24 | filename's 25 | filesystem 26 | flake8 27 | getter 28 | Homebrew 29 | homebrew 30 | LANG 31 | launchpad 32 | LC_ALL 33 | LZMA 34 | OSX 35 | overridable 36 | PATHEXT 37 | PermissionError 38 | _POLYSQUARE_TRAVIS_CONTAINER_TEST_CACHE_DIR 39 | PPA 40 | ppa 41 | Polysquare 42 | polysquare 43 | ppc 44 | prepended 45 | proot 46 | psq 47 | psqtraviscontainer 48 | pychecker 49 | PyPI 50 | matcher 51 | metaclass 52 | metadata 53 | net 54 | runtimes 55 | qemu 56 | qemu's 57 | requires_full_access 58 | rootfs 59 | subdirectory 60 | subprocess 61 | timestamp 62 | Windows 63 | Ubuntu 64 | uid 65 | untar 66 | x86_64 67 | x86 68 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2014 polysquare 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.md 2 | include LICENSE.md 3 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Polysquare Travis Container 2 | =========================== 3 | 4 | Creates a self-contained package-management installation, without root access. 5 | 6 | This allows you to install a pre-defined set of packages to a directory and 7 | then execute commands using the packages installed in that directory. 8 | 9 | Supports Windows, OS X and Linux. 10 | 11 | On Windows and OS X, local versions of chocolatey and brew are installed 12 | respectively, with packages installing to the specified folder. Commands 13 | are executed with environment variables set such that the locally 14 | installed packages will be used by any software built or installed 15 | using the `psq-travis-container-exec` wrapper. Only the host architecture 16 | is supported. 17 | 18 | On Linux, [`proot`](http://proot.me) is used to "containerize" a downloaded 19 | linux distribution, where the package manage operates only on the directory 20 | in which the downloaded linux distribution exists. This allows you to 21 | install packages using `apt-get` or `yum` without touching other 22 | system files. `proot` allow allows for different architectures to be 23 | specified as well, which are emulated transparently using the 24 | `qemu-user-mode` tool. 25 | 26 | For Linux, an alternative 'local' option is also provided which does not 27 | incur the overhead of `proot` but is not as flexible. Sadly, on Travis-CI, 28 | `proot` no longer functions correctly and so the latter mode of operation 29 | is required. 30 | 31 | Status 32 | ------ 33 | 34 | | Travis CI (Ubuntu) | AppVeyor (Windows) | Coverage | PyPI | Licence | 35 | |--------------------|--------------------|----------|------|---------| 36 | |[![Travis](https://img.shields.io/travis/polysquare/polysquare-travis-container.svg)](http://travis-ci.org/polysquare/polysquare-travis-container)|[![AppVeyor](https://img.shields.io/appveyor/ci/smspillaz/polysquare-travis-container-vd3yj.svg)](https://ci.appveyor.com/project/smspillaz/polysquare-travis-container-vd3yj)|[![Coveralls](https://img.shields.io/coveralls/polysquare/polysquare-travis-container.svg)](http://coveralls.io/polysquare/polysquare-travis-container)|[![PyPIVersion](https://img.shields.io/pypi/v/polysquare-travis-container.svg)](https://pypi.python.org/pypi/polysquare-travis-container)[![PyPIPythons](https://img.shields.io/pypi/pyversions/polysquare-travis-container.svg)](https://pypi.python.org/pypi/polysquare-travis-container)|[![License](https://img.shields.io/github/license/polysquare/polysquare-travis-container.svg)](http://github.com/polysquare/polysquare-travis-container)| 37 | 38 | Caveats 39 | ------- 40 | 41 | 64 bit executables cannot be emulated on a 32 bit architecture. 42 | 43 | Installation 44 | ------------ 45 | 46 | `polysquare-travis-container` can be installed using using `pip` from PyPI 47 | 48 | Creating a container 49 | -------------------- 50 | 51 | Containers can be created with `psq-travis-container-create`: 52 | 53 | usage: psq-travis-container-create [-h] [--distro {Fedora, 54 | Debian, 55 | Ubuntu, 56 | Windows, 57 | OSX}] 58 | [--release RELEASE] 59 | [--arch {ppc,x86_64,x86,arm}] 60 | [--repositories REPOSITORIES] 61 | [--packages PACKAGES] 62 | CONTAINER_DIRECTORY 63 | 64 | Create a Travis CI container If an arg is specified in more than one place, 65 | then command-line values override environment variables which override 66 | defaults. 67 | 68 | positional arguments: 69 | CONTAINER_DIRECTORY Directory to place container in 70 | 71 | optional arguments: 72 | -h, --help show this help message and exit 73 | --distro {Fedora,Debian,Ubuntu,Windows,OSX} 74 | Distribution name to create container of 75 | [env var: CONTAINER_DISTRO] 76 | --release RELEASE Distribution release to create container of 77 | [env var: CONTAINER_RELEASE] 78 | --arch {ppc,x86_64,x86,arm} 79 | Architecture (all architectures other than the 80 | system architecture will be emulated with qemu) 81 | [env var: CONTAINER_ARCH] 82 | --repositories REPOSITORIES 83 | A file containing a list of repositories to add 84 | before installing packages. Special keywords will 85 | control the operation of this file: {release}: The 86 | distribution release (eg, precise) {ubuntu}: Ubuntu 87 | archive URL {launchpad}: Launchpad PPA URL header 88 | (eg, http://ppa.launchpad.net) 89 | --packages PACKAGES A file containing a list of packages to install 90 | 91 | The distribution filesystem itself is placed in a subdirectory of 92 | `CONTAINER_DIRECTORY`, so multiple distribution configurations can be placed in 93 | a single `CONTAINER_DIRECTORY`. A mini-distribution of `proot` will also be 94 | placed in `CONTAINER_DIRECTORY`. This directory should be cached, for instance: 95 | 96 | cache: 97 | directories: 98 | - CONTAINER_DIRECTORY 99 | 100 | Packages will only be installed if the container is being created and not 101 | restored from the cache. To install additional packages, the travis caches 102 | should be deleted. 103 | 104 | Special directories like `/tmp` and `/home` are linked automatically, so you 105 | can run binaries or scripts directly from the project root. 106 | 107 | Using a container 108 | ----------------- 109 | 110 | To run a command inside a container, use `psq-travis-container-exec`: 111 | 112 | usage: psq-travis-container-exec [-h] [--distro {Fedora, 113 | Debian, 114 | Ubuntu, 115 | Windows, 116 | OSX}] 117 | [--release RELEASE] 118 | [--arch {ppc,x86_64,x86,arm}] --cmd 119 | [CMD [CMD ...]] 120 | CONTAINER_DIRECTORY 121 | 122 | Use a Travis CI container If an arg is specified in more than one place, 123 | then command-line values override environment variables which override 124 | defaults. 125 | 126 | positional arguments: 127 | CONTAINER_DIRECTORY Directory to place container in 128 | 129 | optional arguments: 130 | -h, --help show this help message and exit 131 | --distro {Fedora,Debian,Ubuntu,Windows,OSX} 132 | Distribution name to create container of 133 | [env var: CONTAINER_DISTRO] 134 | --release RELEASE Distribution release to create container of 135 | [env var: CONTAINER_RELEASE] 136 | --arch {ppc,x86_64,x86,arm} 137 | Architecture (all architectures other than the 138 | system architecture will be emulated with qemu) 139 | [env var: CONTAINER_ARCH] 140 | -- [CMD [CMD ...]] Command to run inside of container 141 | 142 | Executables in CMD are resolved relative to the distribution container, so 143 | running `bash` would run `CONTAINER_DIR/bin/bash` and not `/bin/bash` 144 | inside travis. 145 | 146 | The `--container`, `--release` and `--arch` options are used to select a 147 | pre-existing distribution container set up with `psq-travis-container-create`. 148 | 149 | -------------------------------------------------------------------------------- /appveyor.yml: -------------------------------------------------------------------------------- 1 | environment: 2 | matrix: 3 | - PYTHON: "C:/Python34" 4 | - PYTHON: "C:/Python27" 5 | 6 | cache: 7 | - C:\container 8 | 9 | 10 | install: 11 | - ps: $env:PATH="${env:PYTHON};${env:PYTHON}/Scripts;C:/MinGW/bin;C:/Python34;C:/Python34/Scripts;C:/Python27;C:/Python27/Scripts;${env:PATH}" 12 | - ps: wget public-travis-scripts.polysquare.org/bootstrap.py -OutFile bootstrap 13 | - ps: python bootstrap -d C:/container -s container-setup.py -e powershell -p test-env.ps1 --no-mdl 14 | - ps: . ./test-env 15 | 16 | build: false 17 | 18 | test_script: 19 | - ps: polysquare_run check/python/check.py --no-mdl 20 | 21 | after_test: 22 | - ps: polysquare_cleanup 23 | 24 | -------------------------------------------------------------------------------- /container-setup.py: -------------------------------------------------------------------------------- 1 | # /container-setup.py 2 | # 3 | # Initial setup script specific to polysquare-travis-container. Creates 4 | # a cache dir in the container and sets the 5 | # _POLYSQUARE_TRAVIS_CONTAINER_TEST_CACHE_DIR environment variable 6 | # to point to it. 7 | # 8 | # See /LICENCE.md for Copyright information 9 | """Initial setup script specific to polysquare-ci-scripts.""" 10 | 11 | import os 12 | 13 | 14 | def run(cont, util, shell, argv=list()): 15 | """Set up language runtimes and pass control to python project script.""" 16 | 17 | cache_dir = cont.named_cache_dir("travis_container_downloads", 18 | ephemeral=False) 19 | cache_dir_key = "_POLYSQUARE_TRAVIS_CONTAINER_TEST_CACHE_DIR" 20 | shell.overwrite_environment_variable(cache_dir_key, cache_dir) 21 | 22 | cont.fetch_and_import("setup/python/setup.py").run(cont, util, shell, argv) 23 | 24 | config_python = "setup/project/configure_python.py" 25 | py_ver = util.language_version("python3") 26 | py_cont = cont.fetch_and_import(config_python).get(cont, 27 | util, 28 | shell, 29 | py_ver) 30 | 31 | with py_cont.activated(util): 32 | with util.Task("""Downloading all distributions"""): 33 | os.environ[cache_dir_key] = cache_dir 34 | util.execute(cont, 35 | util.long_running_suppressed_output(), 36 | util.which("python"), 37 | "download-all-distros-to.py") 38 | -------------------------------------------------------------------------------- /download-all-distros-to.py: -------------------------------------------------------------------------------- 1 | # /download-all-distros-to.py 2 | # 3 | # Helper script to download all linux distributions 4 | # 5 | # See /LICENCE.md for Copyright information 6 | """Specialization for linux containers, using proot.""" 7 | 8 | import psqtraviscontainer.architecture 9 | import psqtraviscontainer.distro 10 | 11 | from test.testutil import download_file_cached 12 | 13 | for distro in psqtraviscontainer.distro.available_distributions(): 14 | if (not distro.get("arch", None) or 15 | not distro.get("info", None).kwargs.get("archfetch", None)): 16 | continue 17 | archfetch = distro["info"].kwargs["archfetch"] 18 | download_file_cached(distro["url"].format(arch=distro["arch"])) 19 | -------------------------------------------------------------------------------- /psqtraviscontainer/__init__.py: -------------------------------------------------------------------------------- 1 | # /psqtraviscontainer/__init__.py 2 | # 3 | # Initializes the module. 4 | # 5 | # See /LICENCE.md for Copyright information 6 | """Initializes the module.""" 7 | -------------------------------------------------------------------------------- /psqtraviscontainer/architecture.py: -------------------------------------------------------------------------------- 1 | # /psqtraviscontainer/architecture.py 2 | # 3 | # Module which provides a helper for determining the names of various 4 | # processor architectures on various distributions. 5 | # 6 | # See /LICENCE.md for Copyright information 7 | """Architecture handling.""" 8 | 9 | from collections import namedtuple 10 | 11 | from six import with_metaclass 12 | 13 | _ArchitectureType = namedtuple("_ArchitectureType", 14 | "aliases debian universal qemu") 15 | 16 | _X86_ARCHITECTURE = _ArchitectureType(aliases=["i386", 17 | "i486", 18 | "i586", 19 | "i686", 20 | "x86"], 21 | debian="i386", 22 | universal="x86", 23 | qemu="i386") 24 | _X86_64_ARCHITECTURE = _ArchitectureType(aliases=["x86_64", "amd64"], 25 | debian="amd64", 26 | universal="x86_64", 27 | qemu="x86_64") 28 | _ARM_HARD_FLOAT_ARCHITECTURE = _ArchitectureType(aliases=["arm", 29 | "armel", 30 | "armhf"], 31 | debian="armhf", 32 | universal="arm", 33 | qemu="arm") 34 | _POWERPC32_ARCHITECTURE = _ArchitectureType(aliases=["powerpc", "ppc"], 35 | debian="powerpc", 36 | universal="ppc", 37 | qemu="ppc") 38 | _POWERPC64_ARCHITECTURE = _ArchitectureType(aliases=["ppc64el", "ppc64"], 39 | debian="ppc64el", 40 | universal="ppc64", 41 | qemu="ppc64") 42 | 43 | 44 | class _AliasMetaclass(type): 45 | """A metaclass which provides an operator to convert arch strings.""" 46 | 47 | @classmethod 48 | def __getitem__(cls, # pylint:disable=bad-mcs-classmethod-argument 49 | lookup): 50 | """Operator overload for []. 51 | 52 | If a special architecture for different platforms is not found, return 53 | a generic one which just has this architecture name 54 | """ 55 | del cls 56 | 57 | overloaded_architectures = [_X86_ARCHITECTURE, 58 | _X86_64_ARCHITECTURE, 59 | _ARM_HARD_FLOAT_ARCHITECTURE, 60 | _POWERPC32_ARCHITECTURE, 61 | _POWERPC64_ARCHITECTURE] 62 | for arch in overloaded_architectures: 63 | if lookup in arch.aliases: 64 | return arch 65 | 66 | return _ArchitectureType(aliases=[lookup], 67 | debian=lookup, 68 | universal=lookup, 69 | qemu=lookup) 70 | 71 | 72 | class Alias(with_metaclass(_AliasMetaclass, object)): 73 | """Implementation of _AliasMetaclass. 74 | 75 | Provides convenience methods to convert architecture strings 76 | between platforms. 77 | """ 78 | 79 | @classmethod 80 | def debian(cls, lookup): 81 | """Convert to debian.""" 82 | return cls[lookup].debian 83 | 84 | @classmethod 85 | def qemu(cls, lookup): 86 | """Convert to qemu.""" 87 | return cls[lookup].qemu 88 | 89 | @classmethod 90 | def universal(cls, lookup): 91 | """Convert to universal.""" 92 | return cls[lookup].universal 93 | -------------------------------------------------------------------------------- /psqtraviscontainer/common_options.py: -------------------------------------------------------------------------------- 1 | # /psqtraviscontainer/common_options.py 2 | # 3 | # Options common to both both commands. 4 | # 5 | # See /LICENCE.md for Copyright information 6 | """Options common to both both commands.""" 7 | 8 | import argparse 9 | 10 | import os 11 | 12 | import platform 13 | 14 | from psqtraviscontainer import architecture 15 | from psqtraviscontainer import distro 16 | 17 | 18 | def get_parser(action): 19 | """Get a parser with options common to both commands.""" 20 | # Iterate over the available_distributions and get a list of available 21 | # distributions and architectures for the --distro and --arch arguments 22 | architectures = set() 23 | distributions = set() 24 | 25 | for config in distro.available_distributions(): 26 | if "distro" in config: 27 | distributions.add(config["distro"]) 28 | if "arch" in config: 29 | architectures.add(architecture.Alias.universal(config["arch"])) 30 | 31 | description = """{0} a CI container""".format(action) 32 | parser = argparse.ArgumentParser(description=description) 33 | 34 | current_arch = architecture.Alias.universal(platform.machine()) 35 | 36 | parser.add_argument("containerdir", 37 | metavar=("CONTAINER_DIRECTORY"), 38 | help="""Directory to place container in""", 39 | type=str) 40 | parser.add_argument("--distro", 41 | type=str, 42 | help="""Distribution name to create container of""", 43 | choices=distributions, 44 | default=os.environ.get("CONTAINER_DISTRO", None)) 45 | parser.add_argument("--release", 46 | type=str, 47 | help="""Distribution release to create container of""", 48 | default=os.environ.get("CONTAINER_RELEASE", None)) 49 | parser.add_argument("--arch", 50 | type=str, 51 | help=("""Architecture (all architectures other """ 52 | """than the system architecture will be """ 53 | """emulated with qemu)"""), 54 | choices=architectures, 55 | default=os.environ.get("CONTAINER_ARCH", current_arch)) 56 | parser.add_argument("--local", 57 | action="store_true", 58 | help="""Use the 'local' version of this container.""") 59 | 60 | return parser 61 | -------------------------------------------------------------------------------- /psqtraviscontainer/constants.py: -------------------------------------------------------------------------------- 1 | # /psqtraviscontainer/constants.py 2 | # 3 | # Constants shared across modules. 4 | # 5 | # See /LICENCE.md for Copyright information 6 | """Various constants useful for use and create modules.""" 7 | 8 | import os 9 | 10 | 11 | def have_proot_distribution(cwd): 12 | """Return proot distribution stamp filename.""" 13 | return os.path.join(cwd, ".have-proot-distribution") 14 | 15 | 16 | def proot_distribution_dir(cwd): 17 | """Return proot distribution dir from cwd.""" 18 | return os.path.join(cwd, "_proot") 19 | -------------------------------------------------------------------------------- /psqtraviscontainer/container.py: -------------------------------------------------------------------------------- 1 | # /psqtraviscontainer/container.py 2 | # 3 | # Abstract base class for an operating system container. 4 | # 5 | # See /LICENCE.md for Copyright information 6 | """Abstract base class for an operating system container.""" 7 | 8 | import abc 9 | 10 | import os # suppress(PYC50) 11 | 12 | import re 13 | 14 | import shutil 15 | 16 | import subprocess 17 | 18 | import sys 19 | 20 | import textwrap 21 | 22 | from collections import namedtuple 23 | 24 | from contextlib import contextmanager 25 | 26 | import parseshebang 27 | 28 | from psqtraviscontainer import output 29 | 30 | import shutilwhich # suppress(F401,PYC50,unused-import) 31 | 32 | import six 33 | 34 | 35 | def _not_found_binary_error_msg(argv0, path_env): 36 | """Return an error message about how argv0 was not found in path_env.""" 37 | print(path_env) 38 | return "\n".join(textwrap.wrap( 39 | """Couldn't find {argv0} in the root filesystem. Possible causes """ 40 | """include no binary with the name {argv0} being in any paths """ 41 | """in the PATH environment variable either locally or as set by """ 42 | """the user. The PATH environment variable is defined as:\n{path}""" 43 | )).format(argv0=argv0, 44 | path="\n * ".join([""] + path_env.split(os.pathsep))) 45 | 46 | 47 | @contextmanager 48 | def updated_environ(prepend, overwrite): 49 | """Context with prepend added to and overwrite replacing os.environ.""" 50 | env = os.environ.copy() 51 | for key, value in prepend.items(): 52 | env[key] = "{0}{1}{2}".format(value, 53 | os.pathsep, 54 | env.get(key, "")) 55 | 56 | env.update(overwrite) 57 | 58 | old_environ = os.environ 59 | os.environ = env 60 | 61 | try: 62 | yield env 63 | finally: 64 | os.environ = old_environ 65 | 66 | 67 | class AbstractContainer(six.with_metaclass(abc.ABCMeta, object)): 68 | """An abstract class representing an OS container.""" 69 | 70 | PopenArguments = namedtuple("PopenArguments", "argv prepend overwrite") 71 | 72 | # vulture doesn't know that the __defaults__ attribute is actually 73 | # built-in. 74 | # 75 | # suppress(unused-attribute) 76 | PopenArguments.__new__.__defaults__ = (None, dict(), dict()) 77 | 78 | @staticmethod 79 | def rmtree(directory): 80 | """Remove directory, but ignore errors.""" 81 | try: 82 | shutil.rmtree(directory) 83 | except (shutil.Error, OSError): # suppress(pointless-except) 84 | pass 85 | 86 | @abc.abstractmethod 87 | def _subprocess_popen_arguments(self, argv, **kwargs): 88 | """Return a PopenArguments tuple. 89 | 90 | This indicates what should be passed to subprocess.Popen when the 91 | execute method is called on this class. 92 | """ 93 | del argv 94 | del kwargs 95 | 96 | raise NotImplementedError() 97 | 98 | @abc.abstractmethod 99 | def _package_system(self): 100 | """Return the package system this container should be using.""" 101 | raise NotImplementedError() 102 | 103 | @abc.abstractmethod 104 | def clean(self): 105 | """Clean this container to prepare it for caching. 106 | 107 | Remove any non-useful files here. 108 | """ 109 | raise NotImplementedError() 110 | 111 | def __enter__(self): 112 | """Use this container as a context.""" 113 | return self 114 | 115 | def __exit__(self, exc_type, value, traceback): 116 | """Clean this container once it has been used a context.""" 117 | del exc_type 118 | del value 119 | del traceback 120 | 121 | self.clean() 122 | 123 | def root_filesystem_directory(self): 124 | """Return absolute and real path to installed packages.""" 125 | return os.path.realpath(self._root_filesystem_directory()) 126 | 127 | def install_packages(self, repositories_path, packages_path): 128 | """Install packages and set up repositories as configured. 129 | 130 | :repositories_path: should be a path to a text file containing 131 | a list of repositories to add to the package system 132 | before installing any packages. 133 | :packages_path: should be a path to a text file containing a 134 | list of packages to be installed. 135 | """ 136 | if packages_path: 137 | package_system = self._package_system() 138 | 139 | # Add any repositories to the package system now 140 | if repositories_path: 141 | with open(repositories_path, "r") as repositories_file: 142 | repo_lines = repositories_file.read().splitlines(False) 143 | 144 | package_system.add_repositories(repo_lines) 145 | 146 | with open(packages_path) as packages_file: 147 | packages = re.findall(r"[^\s]+", packages_file.read()) 148 | 149 | package_system.install_packages(packages) 150 | 151 | def execute(self, 152 | argv, 153 | stdout=subprocess.PIPE, 154 | stderr=subprocess.PIPE, 155 | output_modifier=None, 156 | live_output=False, 157 | env=None, 158 | **kwargs): 159 | """Execute the process and arguments indicated by argv in container.""" 160 | (argv, 161 | prepend_env, 162 | overwrite_env) = self._subprocess_popen_arguments(argv, **kwargs) 163 | 164 | # Update overwrite_env with any values that the user may 165 | # have provided in env 166 | overwrite_env.update(env or {}) 167 | 168 | with updated_environ(prepend_env, overwrite_env) as environment: 169 | if not os.path.exists(argv[0]): 170 | abs_argv0 = shutil.which(argv[0]) 171 | if abs_argv0 is None: 172 | raise RuntimeError( 173 | _not_found_binary_error_msg(argv[0], 174 | os.environ.get("PATH", 175 | "")) 176 | ) 177 | argv[0] = abs_argv0 178 | 179 | # Also use which to find the shebang program - in some cases 180 | # we may only have the name of a program but not where it 181 | # actually exists. This is necessary on some platforms like 182 | # Windows where PATH is read from its state as it existed 183 | # when this process got created, not at the time Popen was 184 | # called. 185 | argv = parseshebang.parse(str(argv[0])) + argv 186 | if not os.path.exists(argv[0]): 187 | abs_argv0 = shutil.which(argv[0]) 188 | if abs_argv0 is None: 189 | raise RuntimeError( 190 | _not_found_binary_error_msg(argv[0], 191 | os.environ.get("PATH", 192 | "")) 193 | ) 194 | argv[0] = abs_argv0 195 | 196 | executed_cmd = subprocess.Popen(argv, 197 | stdout=stdout, 198 | stderr=stderr, 199 | env=environment, 200 | universal_newlines=True) 201 | 202 | # Monitor stdout and stderr. We allow live output for 203 | # stdout, but not for stderr (so that it gets printed 204 | # at the end) 205 | stdout_monitor = output.monitor(executed_cmd.stdout, 206 | modifier=output_modifier, 207 | live=live_output) 208 | stderr_monitor = output.monitor(executed_cmd.stderr, 209 | modifier=output_modifier, 210 | live=False) 211 | 212 | try: 213 | executed_cmd.wait() 214 | finally: 215 | stdout_data = stdout_monitor().read() 216 | stderr_data = stderr_monitor().read() 217 | 218 | return (executed_cmd.returncode, stdout_data, stderr_data) 219 | 220 | def execute_success(self, argv, **kwargs): 221 | """Execute the command specified by argv, throws on failure.""" 222 | returncode, stdout_data, stderr_data = self.execute(argv, 223 | subprocess.PIPE, 224 | subprocess.PIPE, 225 | **kwargs) 226 | 227 | if returncode != 0: 228 | if not kwargs.get("live"): 229 | sys.stderr.write(stdout_data) 230 | 231 | sys.stderr.write(stderr_data) 232 | raise RuntimeError("""{0} failed with {1}""".format(" ".join(argv), 233 | returncode)) 234 | -------------------------------------------------------------------------------- /psqtraviscontainer/create.py: -------------------------------------------------------------------------------- 1 | # /psqtraviscontainer/create.py 2 | # 3 | # Module which handles the creation of proot in which APT packages 4 | # can be readily installed 5 | # 6 | # See /LICENCE.md for Copyright information 7 | """Module which handles the creation of proot. 8 | 9 | The proot for a distribution is a special directory entered with the proot 10 | command, which behaves like a chroot, except that no root access is required 11 | in order to create the jail. Commands running in the proot will have filesystem 12 | requests redirected to the proot and believe that they are running as uid 0 13 | """ 14 | 15 | from __future__ import unicode_literals 16 | 17 | import os 18 | 19 | from clint.textui import colored 20 | 21 | from psqtraviscontainer import common_options 22 | from psqtraviscontainer import distro 23 | from psqtraviscontainer import printer 24 | 25 | from psqtraviscontainer.architecture import Alias 26 | 27 | 28 | def _format_distribution_details(details, color=False): 29 | """Format distribution details for printing later.""" 30 | def _y_v(value): 31 | """Print value in distribution details.""" 32 | if color: 33 | return colored.yellow(value) 34 | else: 35 | return value 36 | 37 | # Maps keys in configuration to a pretty-printable name. 38 | distro_pretty_print_map = { 39 | "distro": lambda v: """Distribution Name: """ + _y_v(v), 40 | "release": lambda v: """Release: """ + _y_v(v), 41 | "arch": lambda v: """Architecture: """ + _y_v(Alias.universal(v)), 42 | "pkgsys": lambda v: """Package System: """ + _y_v(v.__name__), 43 | } 44 | 45 | return "\n".join([ 46 | " - " + distro_pretty_print_map[key](value) 47 | for key, value in details.items() 48 | if key in distro_pretty_print_map 49 | ]) + "\n" 50 | 51 | 52 | def _print_distribution_details(details): 53 | """Print distribution details.""" 54 | output = bytearray() 55 | output += ("\n" + 56 | colored.white("""Configured Distribution:""", bold=True) + 57 | "\n").encode() 58 | output += _format_distribution_details(details, color=True).encode() 59 | 60 | printer.unicode_safe(output.decode("utf-8")) 61 | 62 | 63 | def _parse_arguments(arguments=None): 64 | """Return a parser context result.""" 65 | parser = common_options.get_parser("Create") 66 | parser.add_argument("--repositories", 67 | type=str, 68 | help="""A file containing a list of repositories to """ 69 | """add before installing packages. Special """ 70 | """keywords will control the operation of this """ 71 | """file: \n""" 72 | """{release}: The distribution release (eg, """ 73 | """precise)\n""" 74 | """{ubuntu}: Ubuntu archive URL\n""" 75 | """{launchpad}: Launchpad PPA URL header (eg,""" 76 | """ppa.launchpad.net)\n""", 77 | default=None) 78 | parser.add_argument("--packages", 79 | type=str, 80 | help="""A file containing a list of packages """ 81 | """to install""", 82 | default=None) 83 | 84 | return parser.parse_args(arguments) 85 | 86 | 87 | def main(arguments=None): 88 | """Parse arguments and set up proot. 89 | 90 | Parse arguments, fetches initial proot distribution and downloads 91 | and sets up our proot. 92 | """ 93 | result = _parse_arguments(arguments=arguments) 94 | container_dir = os.path.realpath(result.containerdir) 95 | 96 | selected_distro = distro.lookup(vars(result)) 97 | try: 98 | existing = distro.read_existing(result.containerdir) 99 | for key, value in existing.items(): 100 | if selected_distro[key] != value: 101 | details = _format_distribution_details(existing) 102 | raise RuntimeError("""A distribution described by:\n""" 103 | """{details}\n""" 104 | """already exists in {containerdir}.\n""" 105 | """Use a different container directory """ 106 | """or move this one out of the way""" 107 | """""".format(details=details, 108 | containerdir=container_dir)) 109 | except distro.NoDistributionDetailsError: # suppress(pointless-except) 110 | pass 111 | 112 | _print_distribution_details(selected_distro) 113 | 114 | # Now set up packages in the distribution. If more packages need 115 | # to be installed or the installed packages need to be updated then 116 | # the build cache should be cleared. 117 | with selected_distro["info"].create_func(container_dir, 118 | selected_distro) as container: 119 | container.install_packages(result.repositories, result.packages) 120 | 121 | distro.write_details(result.containerdir, selected_distro) 122 | 123 | relative_containerdir = os.path.relpath(result.containerdir) 124 | msg = """\N{check mark} Container has been set up in {0}\n""" 125 | printer.unicode_safe(colored.green(msg.format(relative_containerdir), 126 | bold=True)) 127 | 128 | if __name__ == "__main__": 129 | main() 130 | -------------------------------------------------------------------------------- /psqtraviscontainer/debian_package.py: -------------------------------------------------------------------------------- 1 | # /psqtraviscontainer/debian_package.py 2 | # 3 | # Functionality common to debian packages. 4 | # 5 | # See /LICENCE.md for Copyright information 6 | """Functionality common to debian packages.""" 7 | 8 | import tarfile 9 | 10 | from contextlib import closing 11 | 12 | 13 | def extract_deb_data(archive, extract_dir): 14 | """Extract archive to extract_dir.""" 15 | # We may not have python-debian installed on all platforms 16 | from debian import arfile # suppress(import-error) 17 | 18 | data_members = ["data.tar.gz", "data.tar.xz"] 19 | 20 | for data_mem in data_members: 21 | try: 22 | with closing(arfile.ArFile(archive).getmember(data_mem)) as member: 23 | with tarfile.open(fileobj=member, 24 | mode="r|*") as data_tar: 25 | data_tar.extractall(path=extract_dir) 26 | 27 | # Succeeded, break out here 28 | break 29 | except KeyError as error: 30 | if str(error) == "'{}'".format(data_mem): 31 | continue 32 | else: 33 | raise error 34 | -------------------------------------------------------------------------------- /psqtraviscontainer/directory.py: -------------------------------------------------------------------------------- 1 | # /psqtraviscontainer/directory.py 2 | # 3 | # Utilities for safe directory navigation. 4 | # 5 | # See /LICENCE.md for Copyright information 6 | """Utilities for safe directory navigation.""" 7 | 8 | import errno 9 | 10 | import os 11 | 12 | 13 | def safe_makedirs(path): 14 | """Make directories without throwing if a directory exists.""" 15 | try: 16 | os.makedirs(path) 17 | except OSError as err: 18 | if err.errno != errno.EEXIST: # suppress(PYC90) 19 | raise err 20 | 21 | 22 | def safe_touch(path): 23 | """Create a file without throwing if it exists.""" 24 | safe_makedirs(os.path.dirname(path)) 25 | if not os.path.exists(path): 26 | with open(path, "w") as fileobj: 27 | fileobj.write("") 28 | 29 | 30 | class Navigation(object): # pylint:disable=R0903 31 | """Context manager to enter and exit directories.""" 32 | 33 | def __init__(self, path): 34 | """Initialize the path we want to change to.""" 35 | super(Navigation, self).__init__() 36 | self._path = path 37 | self._current_dir = None 38 | 39 | def __enter__(self): 40 | """Upon entry, attempt to create the directory and then enter it.""" 41 | safe_makedirs(self._path) 42 | self._current_dir = os.getcwd() 43 | os.chdir(self._path) 44 | 45 | return self._path 46 | 47 | def __exit__(self, exc_type, value, traceback): 48 | """Pop directory on exiting with statement.""" 49 | del exc_type 50 | del traceback 51 | del value 52 | 53 | os.chdir(self._current_dir) 54 | -------------------------------------------------------------------------------- /psqtraviscontainer/distro.py: -------------------------------------------------------------------------------- 1 | # /psqtraviscontainer/distro.py 2 | # 3 | # Module in which all configurations for various distributions are stored. 4 | # 5 | # See /LICENCE.md for Copyright information 6 | """Various distribution configurations are stored here.""" 7 | 8 | import errno 9 | 10 | import itertools 11 | 12 | import json 13 | 14 | import os 15 | 16 | from collections import namedtuple 17 | 18 | 19 | DistroConfig = dict 20 | DistroInfo = namedtuple("DistroInfo", 21 | "create_func " 22 | "get_func " 23 | "match_func " 24 | "enumerate_func " 25 | "kwargs") 26 | 27 | 28 | def _distribution_information(): 29 | """Return generator of DistroInfo.""" 30 | from psqtraviscontainer import linux_container 31 | from psqtraviscontainer import linux_local_container 32 | from psqtraviscontainer import osx_container 33 | from psqtraviscontainer import windows_container 34 | 35 | return itertools.chain(linux_local_container.DISTRIBUTIONS, 36 | linux_container.DISTRIBUTIONS, 37 | osx_container.DISTRIBUTIONS, 38 | windows_container.DISTRIBUTIONS) 39 | 40 | 41 | def available_distributions(): 42 | """Return list of available distributions.""" 43 | for info in _distribution_information(): 44 | for config in info.enumerate_func(info): 45 | config["info"] = info 46 | config = config.copy() 47 | yield config 48 | 49 | 50 | class NoDistributionDetailsError(Exception): 51 | """An exception that is raised if there is no distribution in a path.""" 52 | 53 | pass 54 | 55 | 56 | def read_existing(container_dir): 57 | """Attempt to detect an existing distribution in container_dir.""" 58 | try: 59 | with open(os.path.join(container_dir, ".distroinfo")) as distroinfo_f: 60 | return json.load(distroinfo_f) 61 | except EnvironmentError as error: 62 | if error.errno == errno.ENOENT: 63 | raise NoDistributionDetailsError() 64 | else: 65 | raise error 66 | 67 | 68 | def write_details(container_dir, selected_distro): 69 | """Write details of selected_distro to container_dir.""" 70 | with open(os.path.join(container_dir, ".distroinfo"), "w") as info_f: 71 | keys = ("distro", "installation", "arch", "release") 72 | info_f.write(json.dumps({ 73 | k: v for k, v in selected_distro.items() 74 | if k in keys 75 | })) 76 | 77 | 78 | def _search_for_matching_distro(distro_info): 79 | """Check all known distributions for one matching distro_info.""" 80 | matched_distribution = None 81 | 82 | for distribution in _distribution_information(): 83 | matched_distribution = distribution.match_func(distribution, 84 | distro_info) 85 | if matched_distribution: 86 | matched_distribution["info"] = distribution 87 | return matched_distribution 88 | 89 | 90 | def lookup(arguments): 91 | """Look up DistroConfig by matching against its name and arguments.""" 92 | matched_distribution = _search_for_matching_distro(arguments) 93 | if matched_distribution: 94 | return matched_distribution 95 | 96 | # As last resort, look inside the container directory and see if there 97 | # is something in there that we know about. 98 | if arguments.get("containerdir", None): 99 | try: 100 | distro_info = read_existing(arguments["containerdir"]) 101 | matched_distribution = _search_for_matching_distro(distro_info) 102 | 103 | if matched_distribution: 104 | return matched_distribution 105 | except NoDistributionDetailsError: # suppress(pointless-except) 106 | pass 107 | 108 | raise RuntimeError("""Couldn't find matching distribution """ 109 | """({0})""".format(repr(arguments))) 110 | -------------------------------------------------------------------------------- /psqtraviscontainer/download.py: -------------------------------------------------------------------------------- 1 | # /psqtraviscontainer/download.py 2 | # 3 | # Module with utilities for downloading files. 4 | # 5 | # See /LICENCE.md for Copyright information 6 | """Module with utilities for downloading files.""" 7 | 8 | import os 9 | 10 | import sys 11 | 12 | from clint.textui import colored, progress 13 | 14 | import requests 15 | 16 | 17 | def download_file(url, filename=None): 18 | """Download the file at url and store it at filename.""" 19 | basename = os.path.basename(filename or url) 20 | msg = """Downloading {dest} (from {source})""".format(source=url, 21 | dest=basename) 22 | sys.stdout.write(str(colored.blue(msg, bold=True))) 23 | sys.stdout.write("\n") 24 | request = requests.get(url, stream=True) 25 | length = int(request.headers.get("content-length", 0)) or None 26 | with open(filename or os.path.basename(url), "wb") as downloaded_file: 27 | chunk_size = 1024 28 | total = (length / chunk_size + 1) if length else 3000 29 | for chunk in progress.bar(request.iter_content(chunk_size=chunk_size), 30 | expected_size=total, 31 | label=basename): 32 | downloaded_file.write(chunk) 33 | downloaded_file.flush() 34 | 35 | return os.path.join(os.getcwd(), downloaded_file.name) 36 | 37 | 38 | class TemporarilyDownloadedFile(object): # pylint:disable=R0903 39 | """An enter/exit class representing a temporarily downloaded file. 40 | 41 | The file will be downloaded on enter and erased once the scope has 42 | been exited. 43 | """ 44 | 45 | def __init__(self, url, filename=None): 46 | """Initialize the url and path to download file to.""" 47 | super(TemporarilyDownloadedFile, self).__init__() 48 | self._url = url 49 | self._path = download_file(self._url, filename) 50 | 51 | def __enter__(self): 52 | """Run file download.""" 53 | return self 54 | 55 | def __exit__(self, exc_type, value, traceback): 56 | """Remove the temporarily downloaded file.""" 57 | del exc_type 58 | del traceback 59 | del value 60 | 61 | os.remove(self._path) 62 | self._path = None 63 | 64 | def path(self): 65 | """Get temporarily downloaded file path.""" 66 | return self._path 67 | -------------------------------------------------------------------------------- /psqtraviscontainer/linux_container.py: -------------------------------------------------------------------------------- 1 | # /psqtraviscontainer/linux_container.py 2 | # 3 | # Specialization for linux containers, using proot. 4 | # 5 | # See /LICENCE.md for Copyright information 6 | """Specialization for linux containers, using proot.""" 7 | 8 | from __future__ import unicode_literals 9 | 10 | import errno 11 | 12 | import fnmatch 13 | 14 | import os 15 | 16 | import platform 17 | 18 | import shutil 19 | 20 | import stat 21 | 22 | import tarfile 23 | 24 | import tempfile 25 | 26 | from collections import defaultdict 27 | from collections import namedtuple 28 | 29 | from getpass import getuser 30 | 31 | from itertools import chain 32 | 33 | from clint.textui import colored 34 | 35 | from psqtraviscontainer import architecture 36 | from psqtraviscontainer import constants 37 | from psqtraviscontainer import container 38 | from psqtraviscontainer import debian_package 39 | from psqtraviscontainer import directory 40 | from psqtraviscontainer import distro 41 | from psqtraviscontainer import package_system 42 | from psqtraviscontainer import printer 43 | from psqtraviscontainer import util 44 | 45 | from psqtraviscontainer.download import TemporarilyDownloadedFile 46 | 47 | import tempdir 48 | 49 | _PROOT_URL_BASE = "http://static.proot.me/proot-{arch}" 50 | _QEMU_URL_BASE = ("http://download.opensuse.org/repositories" 51 | "/home:/cedric-vincent/xUbuntu_12.04/{arch}/" 52 | "qemu-user-mode_1.6.1-1_{arch}.deb") 53 | 54 | 55 | DistroInfo = distro.DistroInfo 56 | DistroConfig = distro.DistroConfig 57 | ProotDistribution = namedtuple("ProotDistribution", "proot qemu") 58 | 59 | 60 | def proot_distro_from_container(container_dir): 61 | """Return a ProotDistribution from a container dir.""" 62 | path_to_proot_dir = constants.proot_distribution_dir(container_dir) 63 | path_to_proot_bin = os.path.join(path_to_proot_dir, "bin/proot") 64 | path_to_qemu_template = os.path.join(path_to_proot_dir, 65 | "bin/qemu-{arch}") 66 | 67 | def _get_qemu_binary(arch): 68 | """Get the qemu binary for architecture.""" 69 | qemu_arch = architecture.Alias.qemu(arch) 70 | return path_to_qemu_template.format(arch=qemu_arch) 71 | 72 | def _get_proot_binary(): 73 | """Get the proot binary.""" 74 | return path_to_proot_bin 75 | 76 | return ProotDistribution(proot=_get_proot_binary, 77 | qemu=_get_qemu_binary) 78 | 79 | 80 | def get_dir_for_distro(container_dir, config): 81 | """Get the distro dir in a container_dir for a DistroConfig.""" 82 | arch = config["arch"] 83 | url = config["url"] 84 | distro_folder_name_template = (os.path.basename(url) + ".root") 85 | distro_folder_name = distro_folder_name_template.format(arch=arch) 86 | return os.path.realpath(os.path.join(container_dir, distro_folder_name)) 87 | 88 | 89 | def _rmtrees_as_container(cont, directories): 90 | """Remove directories as the root user in the container. 91 | 92 | This allows the removal of directories where permission errors 93 | might not permit otherwise. 94 | """ 95 | root = cont.root_filesystem_directory() 96 | 97 | with tempfile.NamedTemporaryFile(dir=root, mode="wt") as bash_script: 98 | bash_script.write(";\n".join([("rm -rf " + d) for d in directories])) 99 | bash_script.flush() 100 | cont.execute(["bash", bash_script.name], minimal_bind=True) 101 | 102 | 103 | def directories_to_remove_on_clean(distro_directory): 104 | """Get directories to remove if cleaning distro_directory.""" 105 | return [ 106 | os.path.join(distro_directory, "tmp"), 107 | os.path.join(distro_directory, "var", "cache", "apt"), 108 | os.path.join(distro_directory, "var", "run"), 109 | os.path.join(distro_directory, "usr", "share", "doc"), 110 | os.path.join(distro_directory, "usr", "share", "locale"), 111 | os.path.join(distro_directory, "usr", "share", "man"), 112 | os.path.join(distro_directory, "var", "lib", "apt", "lists"), 113 | os.path.join(distro_directory, "dev") 114 | ] 115 | 116 | 117 | def directories_to_create_on_clean(distro_directory): 118 | """Get directories to create if cleaning distro_directory.""" 119 | return [ 120 | os.path.join(distro_directory, 121 | "var", 122 | "cache", 123 | "apt", 124 | "archives", 125 | "partial") 126 | ] 127 | 128 | 129 | class LinuxContainer(container.AbstractContainer): 130 | """A container for a linux distribution. 131 | 132 | We can execute commands inside this container by using proot and qemu. 133 | """ 134 | 135 | def __init__(self, # suppress(too-many-arguments) 136 | proot_distribution, 137 | distro_dir, 138 | release, 139 | arch, 140 | pkg_sys_constructor): 141 | """Initialize this LinuxContainer, storing its distribution config.""" 142 | super(LinuxContainer, self).__init__() 143 | self._proot_distro = proot_distribution 144 | self._distro_dir = distro_dir 145 | self._arch = arch 146 | self._pkgsys = pkg_sys_constructor(release, arch, self) 147 | 148 | def _subprocess_popen_arguments(self, argv, **kwargs): 149 | """For native arguments argv, return AbstractContainer.PopenArguments. 150 | 151 | This returned tuple will have no environment variables set, but the 152 | proot command to enter this container will be prepended to the 153 | argv provided. 154 | 155 | Pass minimal_bind=True to specify that no directories on the 156 | user filesystem should be exposed to the container. This will 157 | allow dpkg to remove certain system files in the container. 158 | """ 159 | def parse_from_line(line): 160 | """Parse environment variable key-value pair from line.""" 161 | return (line.split("=")[0], 162 | "".join([c for c in line.split("=")[1] 163 | if c != "\""]).strip()) 164 | 165 | popen_args = self.__class__.PopenArguments 166 | 167 | if kwargs.get("minimal_bind", None): 168 | proot_command = [ 169 | self._proot_distro.proot(), 170 | "-r", 171 | self._distro_dir, 172 | "-0" 173 | ] 174 | else: 175 | proot_command = [ 176 | self._proot_distro.proot(), 177 | "-S", 178 | self._distro_dir 179 | ] 180 | 181 | # If we're not the same architecture, interpose qemu's emulator 182 | # for the target architecture as appropriate 183 | our_architecture = architecture.Alias.universal(platform.machine()) 184 | target_architecture = architecture.Alias.universal(self._arch) 185 | 186 | if our_architecture != target_architecture: 187 | proot_command += ["-q", self._proot_distro.qemu(self._arch)] 188 | 189 | # Favor distribution's own environment variables 190 | with open(os.path.join(self._distro_dir, "etc", "environment")) as env: 191 | etc_environment_lines = env.readlines() 192 | prepend_env = dict([parse_from_line(l) 193 | for l in etc_environment_lines 194 | if l.split("=")[0].endswith("PATH")]) 195 | overwrite_env = dict([parse_from_line(l) 196 | for l in etc_environment_lines 197 | if not l.split("=")[0].endswith("PATH")]) 198 | 199 | # Make sure that LANG and LC_ALL are set to C, instead of 200 | # whatever it was set to before 201 | overwrite_env.update({ 202 | "LANG": "C", 203 | "LC_ALL": "C" 204 | }) 205 | 206 | return popen_args(prepend=prepend_env, 207 | overwrite=overwrite_env, 208 | argv=proot_command + argv) 209 | 210 | def _root_filesystem_directory(self): 211 | """Return directory on parent filesystem where our root is located.""" 212 | return self._distro_dir 213 | 214 | def _package_system(self): 215 | """Return package system for this distribution.""" 216 | return self._pkgsys 217 | 218 | def clean(self): 219 | """Clean out this container.""" 220 | _rmtrees_as_container(self, 221 | directories_to_remove_on_clean(self._distro_dir)) 222 | 223 | self.execute(["chown", "-R", "{}:users".format(getuser()), "/"], 224 | minimal_bind=True) 225 | 226 | try: 227 | shutil.rmtree(os.path.join(self._distro_dir, "dev")) 228 | except OSError as error: 229 | if error.errno != errno.ENOENT: 230 | raise error 231 | 232 | for create_dir in directories_to_create_on_clean(self._distro_dir): 233 | try: 234 | os.makedirs(create_dir) 235 | except OSError as error: 236 | if error.errno != errno.EEXIST: # suppress(PYC90) 237 | raise error 238 | 239 | 240 | def _fetch_proot_distribution(container_root, target_arch): 241 | """Fetch the initial proot distribution if it is not available. 242 | 243 | Touches /.have-proot-distribution when complete 244 | """ 245 | path_to_proot_check = constants.have_proot_distribution(container_root) 246 | path_to_proot_dir = constants.proot_distribution_dir(container_root) 247 | 248 | def _download_proot(distribution_dir, arch): 249 | """Download arch build of proot into distribution.""" 250 | from psqtraviscontainer.download import download_file 251 | 252 | with directory.Navigation(os.path.join(distribution_dir, 253 | "bin")): 254 | proot_url = _PROOT_URL_BASE.format(arch=arch) 255 | path_to_proot = download_file(proot_url, "proot") 256 | os.chmod(path_to_proot, 257 | os.stat(path_to_proot).st_mode | stat.S_IXUSR) 258 | return path_to_proot 259 | 260 | def _extract_qemu(qemu_deb_path, qemu_temp_dir): 261 | """Extract qemu.""" 262 | printer.unicode_safe(colored.magenta(("""-> Extracting {0}\n""" 263 | """""").format(qemu_deb_path), 264 | bold=True)) 265 | debian_package.extract_deb_data(qemu_deb_path, qemu_temp_dir) 266 | 267 | def _remove_unused_emulators(qemu_binaries_path): 268 | """Remove unused emulators from qemu distribution.""" 269 | distributions = distro.available_distributions() 270 | cur_arch = platform.machine() 271 | archs = [d["info"].kwargs["arch"] for d in distributions] 272 | archs = set([architecture.Alias.qemu(a) for a in chain(*archs) 273 | if a != architecture.Alias.universal(cur_arch)]) 274 | keep_binaries = ["qemu-" + a for a in archs] + ["proot"] 275 | 276 | for root, _, filenames in os.walk(qemu_binaries_path): 277 | for filename in filenames: 278 | if os.path.basename(filename) not in keep_binaries: 279 | os.remove(os.path.join(root, filename)) 280 | 281 | def _download_qemu(distribution_dir, arch): 282 | """Download arch build of qemu and extract binaries.""" 283 | qemu_url = _QEMU_URL_BASE.format(arch=arch) 284 | 285 | with TemporarilyDownloadedFile(qemu_url, 286 | filename="qemu.deb") as qemu_deb: 287 | # Go into a separate subdirectory and extract the qemu deb 288 | # there, then copy out the requisite files, so that we don't 289 | # cause tons of pollution 290 | qemu_tmp = os.path.join(path_to_proot_dir, "_qemu_tmp") 291 | with directory.Navigation(qemu_tmp): 292 | qemu_binaries_path = os.path.join(qemu_tmp, "usr", "bin") 293 | _extract_qemu(qemu_deb.path(), qemu_tmp) 294 | _remove_unused_emulators(qemu_binaries_path) 295 | 296 | for filename in os.listdir(qemu_binaries_path): 297 | shutil.copy(os.path.join(qemu_binaries_path, filename), 298 | os.path.join(path_to_proot_dir, "bin")) 299 | 300 | shutil.rmtree(qemu_tmp) 301 | 302 | return os.path.join(distribution_dir, "bin", "qemu-{arch}") 303 | 304 | try: 305 | os.stat(path_to_proot_check) 306 | printer.unicode_safe(colored.green("""-> """ 307 | """Using pre-existing proot """ 308 | """distribution\n""", 309 | bold=True)) 310 | 311 | except OSError: 312 | create_msg = """Creating distribution of proot in {}\n""" 313 | root_relative = os.path.relpath(container_root) 314 | printer.unicode_safe(colored.yellow(create_msg.format(root_relative), 315 | bold=True)) 316 | 317 | # Distro check does not exist - create the ./_proot directory 318 | # and download files for this architecture 319 | with directory.Navigation(path_to_proot_dir): 320 | proot_arch = architecture.Alias.universal(platform.machine()) 321 | _download_proot(path_to_proot_dir, proot_arch) 322 | 323 | # We may not need qemu if we're not going to emulate 324 | # anything. 325 | if (architecture.Alias.universal(platform.machine()) != 326 | architecture.Alias.universal(target_arch) or 327 | os.environ.get("_FORCE_DOWNLOAD_QEMU", None)): 328 | qemu_arch = architecture.Alias.debian(platform.machine()) 329 | _download_qemu(path_to_proot_dir, qemu_arch) 330 | 331 | with open(path_to_proot_check, "w+") as check_file: 332 | check_file.write("done") 333 | 334 | printer.unicode_safe(colored.green("""\N{check mark} """ 335 | """Successfully installed proot """ 336 | """distribution to """ 337 | """{}\n""".format(root_relative), 338 | bold=True)) 339 | 340 | return proot_distro_from_container(container_root) 341 | 342 | 343 | def _extract_distro_archive(distro_archive_file, distro_folder): 344 | """Extract distribution archive into distro_folder.""" 345 | with tarfile.open(name=distro_archive_file.path()) as archive: 346 | msg = ("""-> Extracting """ 347 | """{0}\n""").format(os.path.relpath(distro_archive_file.path())) 348 | extract_members = [m for m in archive.getmembers() 349 | if not m.isdev()] 350 | printer.unicode_safe(colored.magenta(msg, bold=True)) 351 | archive.extractall(members=extract_members, path=distro_folder) 352 | 353 | # Set the permissions of the extracted archive so we can delete it 354 | # if need be. 355 | os.chmod(distro_folder, os.stat(distro_folder).st_mode | stat.S_IRWXU) 356 | for root, directories, filenames in os.walk(distro_folder): 357 | for distro_folder_directory in directories: 358 | path = os.path.join(root, distro_folder_directory) 359 | try: 360 | os.chmod(path, os.stat(path).st_mode | stat.S_IRWXU) 361 | except OSError: # suppress(pointless-except) 362 | pass 363 | for filename in filenames: 364 | path = os.path.join(root, filename) 365 | try: 366 | os.chmod(path, os.stat(path).st_mode | stat.S_IRWXU) 367 | except OSError: # suppress(pointless-except) 368 | pass 369 | 370 | 371 | def _clear_postrm_scripts_in_root(container_root): 372 | """Remove any post-rm scripts. 373 | 374 | These scripts get run when we try to remove packages, which isn't what 375 | we want, since that causes dpkg to try and call chroot, which fails 376 | when we aren't root. 377 | """ 378 | scripts_dir = os.path.join(container_root, "var", "lib", "dpkg", "info") 379 | for script in fnmatch.filter(os.listdir(scripts_dir), "*.postrm"): 380 | os.remove(os.path.join(scripts_dir, script)) 381 | for script in fnmatch.filter(os.listdir(scripts_dir), "*.prerm"): 382 | os.remove(os.path.join(scripts_dir, script)) 383 | for script in fnmatch.filter(os.listdir(scripts_dir), "*.postinst"): 384 | os.remove(os.path.join(scripts_dir, script)) 385 | 386 | 387 | def fetch_distribution(container_root, # pylint:disable=R0913 388 | proot_distro, 389 | details): 390 | """Lazy-initialize distribution and return it.""" 391 | path_to_distro_folder = get_dir_for_distro(container_root, 392 | details) 393 | 394 | def _download_distro(details, path_to_distro_folder): 395 | """Download distribution and untar it in container root.""" 396 | distro_arch = details["arch"] 397 | download_url = details["url"].format(arch=distro_arch) 398 | with tempdir.TempDir() as download_dir: 399 | with directory.Navigation(download_dir): 400 | with TemporarilyDownloadedFile(download_url) as archive_file: 401 | _extract_distro_archive(archive_file, 402 | path_to_distro_folder) 403 | 404 | def _minimize_ubuntu(cont, root): 405 | """Reduce the install footprint of ubuntu as much as possible.""" 406 | required_packages = { 407 | "precise": set([ 408 | "apt", 409 | "base-files", 410 | "base-passwd", 411 | "bash", 412 | "bsdutils", 413 | "coreutils", 414 | "dash", 415 | "debconf", 416 | "debianutils", 417 | "diffutils", 418 | "dpkg", 419 | "findutils", 420 | "gcc-4.6-base", 421 | "gnupg", 422 | "gpgv", 423 | "grep", 424 | "gzip", 425 | "libacl1", 426 | "libapt-pkg4.12", 427 | "libattr1", 428 | "libbz2-1.0", 429 | "libc-bin", 430 | "libc6", 431 | "libdb5.1", 432 | "libffi6", 433 | "libgcc1", 434 | "liblzma5", 435 | "libpam-modules", 436 | "libpam-modules-bin", 437 | "libpam-runtime", 438 | "libpam0g", 439 | "libreadline6", 440 | "libselinux1", 441 | "libstdc++6", 442 | "libtinfo5", 443 | "libusb-0.1-4", 444 | "makedev", 445 | "mawk", 446 | "multiarch-support", 447 | "perl-base", 448 | "readline-common", 449 | "sed", 450 | "sensible-utils", 451 | "tar", 452 | "tzdata", 453 | "ubuntu-keyring", 454 | "xz-utils", 455 | "zlib1g" 456 | ]), 457 | "trusty": set([ 458 | "apt", 459 | "base-files", 460 | "base-passwd", 461 | "bash", 462 | "bsdutils", 463 | "coreutils", 464 | "dash", 465 | "debconf", 466 | "debianutils", 467 | "diffutils", 468 | "dh-python", 469 | "dpkg", 470 | "findutils", 471 | "gcc-4.8-base", 472 | "gcc-4.9-base", 473 | "gnupg", 474 | "gpgv", 475 | "grep", 476 | "gzip", 477 | "libacl1", 478 | "libapt-pkg4.12", 479 | "libaudit1", 480 | "libaudit-common", 481 | "libattr1", 482 | "libbz2-1.0", 483 | "libc-bin", 484 | "libc6", 485 | "libcap2", 486 | "libdb5.3", 487 | "libdebconfclient0", 488 | "libexpat1", 489 | "libmpdec2", 490 | "libffi6", 491 | "libgcc1", 492 | "liblzma5", 493 | "libncursesw5", 494 | "libpcre3", 495 | "libpam-modules", 496 | "libpam-modules-bin", 497 | "libpam-runtime", 498 | "libpam0g", 499 | "libpython3-stdlib", 500 | "libpython3.4-stdlib", 501 | "libpython3", 502 | "libpython3-minimal", 503 | "libpython3.4", 504 | "libpython3.4-minimal", 505 | "libreadline6", 506 | "libselinux1", 507 | "libssl1.0.0", 508 | "libstdc++6", 509 | "libsqlite3-0", 510 | "libtinfo5", 511 | "libusb-0.1-4", 512 | "lsb-release", 513 | "makedev", 514 | "mawk", 515 | "mime-support", 516 | "multiarch-support", 517 | "perl-base", 518 | "python3", 519 | "python3-minimal", 520 | "python3.4", 521 | "python3.4-minimal", 522 | "readline-common", 523 | "sed", 524 | "sensible-utils", 525 | "tar", 526 | "tzdata", 527 | "ubuntu-keyring", 528 | "xz-utils", 529 | "zlib1g" 530 | ]) 531 | } 532 | 533 | os.environ["SUDO_FORCE_REMOVE"] = "yes" 534 | os.environ["DEBIAN_FRONTEND"] = "noninteractive" 535 | 536 | if release in required_packages: 537 | pkgs = set( 538 | cont.execute(["dpkg-query", 539 | "--admindir={}".format(os.path.join(root, 540 | "var", 541 | "lib", 542 | "dpkg")), 543 | "-Wf", 544 | "${Package}\n"])[1].split("\n") 545 | ) 546 | release = details["release"] 547 | remove = [ 548 | l for l in list(pkgs ^ required_packages[release]) if len(l) 549 | ] 550 | 551 | if root != "/": 552 | _clear_postrm_scripts_in_root(root) 553 | 554 | if len(remove): 555 | cont.execute_success( 556 | [ 557 | "dpkg", 558 | "--root={}".format(root), 559 | "--purge", 560 | "--force-all" 561 | ] + remove, 562 | minimal_bind=True 563 | ) 564 | 565 | with open(os.path.join(get_dir_for_distro(container_root, 566 | details), 567 | "etc", 568 | "apt", 569 | "apt.conf.d", 570 | "99container"), "w") as apt_config: 571 | apt_config.write("\n".join([ 572 | "APT::Install-Recommends \"0\";", 573 | "APT::Install-Suggests \"0\";" 574 | ])) 575 | 576 | # Container isn't safe to use until we've either verified that the 577 | # path to the distro folder exists or we've downloaded a distro into it 578 | linux_cont = LinuxContainer(proot_distro, 579 | path_to_distro_folder, 580 | details["release"], 581 | details["arch"], 582 | details["pkgsys"]) 583 | 584 | minimize_actions = defaultdict(lambda: lambda c, p: None) 585 | 586 | try: 587 | os.stat(path_to_distro_folder) 588 | use_existing_msg = ("""\N{check mark} Using existing folder for """ 589 | """proot distro """ 590 | """{distro} {release} {arch}\n""") 591 | printer.unicode_safe(colored.green(use_existing_msg.format(**details), 592 | bold=True)) 593 | return (linux_cont, minimize_actions) 594 | except OSError: 595 | # Download the distribution tarball in the distro dir 596 | _download_distro(details, path_to_distro_folder) 597 | 598 | # Minimize the installed distribution, but only when it 599 | # was just initially downloaded 600 | minimize_actions = defaultdict(lambda: lambda c: None, 601 | Ubuntu=_minimize_ubuntu) 602 | 603 | return (linux_cont, minimize_actions) 604 | 605 | 606 | def container_for_directory(container_dir, distro_config): 607 | """Return an existing LinuxContainer at container_dir for distro_config. 608 | 609 | Also take into account arguments in result to look up the the actual 610 | directory for this distro. 611 | """ 612 | path_to_distro_folder = get_dir_for_distro(container_dir, 613 | distro_config) 614 | 615 | required_entities = [ 616 | constants.have_proot_distribution(container_dir), 617 | path_to_distro_folder 618 | ] 619 | 620 | for entity in required_entities: 621 | util.check_if_exists(entity) 622 | 623 | proot_distribution = proot_distro_from_container(container_dir) 624 | 625 | return LinuxContainer(proot_distribution, 626 | path_to_distro_folder, 627 | distro_config["release"], 628 | distro_config["arch"], 629 | distro_config["pkgsys"]) 630 | 631 | 632 | def create(container_dir, distro_config): 633 | """Create a container using proot.""" 634 | # First fetch a proot distribution if we don't already have one 635 | proot_distro = _fetch_proot_distribution(container_dir, 636 | distro_config["arch"]) 637 | 638 | # Now fetch the distribution tarball itself, if we specified one 639 | cont, minimize_actions = fetch_distribution(container_dir, 640 | proot_distro, 641 | distro_config) 642 | minimize_actions[distro_config["distro"]](cont, "/") 643 | 644 | return cont 645 | 646 | 647 | def _info_with_arch_to_config(info, arch): 648 | """Convert selected architecture for DistroInfo into DistroConfig.""" 649 | config = info.kwargs.copy() 650 | 651 | del config["arch"] 652 | del config["archfetch"] 653 | 654 | config["arch"] = arch 655 | 656 | return config 657 | 658 | 659 | def _valid_archs(archs): 660 | """Return valid archs to emulate from archs. 661 | 662 | 64 bit architectures can't be emulated on a 32 bit system, so remove 663 | them form the list of valid architectures. 664 | """ 665 | blacklist = defaultdict(lambda: None) 666 | blacklist["x86"] = "x86_64" 667 | blacklist["x86_64"] = "x86" 668 | 669 | arch_alias = architecture.Alias.universal 670 | machine = arch_alias(platform.machine()) 671 | 672 | return [a for a in archs if arch_alias(a) != blacklist[machine]] 673 | 674 | 675 | def match(info, arguments): 676 | """Check if info matches arguments.""" 677 | if platform.system() != "Linux": 678 | return None 679 | 680 | if arguments.get("distro", None) != info.kwargs["distro"]: 681 | return None 682 | 683 | if arguments.get("local", None): 684 | return None 685 | 686 | if arguments.get("installation", None) == "local": 687 | return None 688 | 689 | distro_release = info.kwargs["release"] 690 | 691 | # pychecker thinks that a list comprehension as a return value is 692 | # always None. 693 | distro_archs = _valid_archs(info.kwargs["arch"]) # suppress(PYC90) 694 | distro_archfetch = info.kwargs["archfetch"] 695 | 696 | if arguments.get("release", None) == distro_release: 697 | converted = distro_archfetch(arguments.get("arch", None)) 698 | if converted in distro_archs: 699 | return _info_with_arch_to_config(info, converted) 700 | 701 | return None 702 | 703 | 704 | def enumerate_all(info): 705 | """Enumerate all valid configurations for this DistroInfo.""" 706 | if platform.system() != "Linux": 707 | return 708 | 709 | # proot based distributions are completely broken on 710 | # Travis-CI (just exits with signal 11 immediately after 711 | # execution) so don't even both running them here. 712 | if os.environ.get("CI"): 713 | return 714 | 715 | for arch in _valid_archs(info.kwargs["arch"]): # suppress(PYC90) 716 | yield _info_with_arch_to_config(info, arch) 717 | 718 | 719 | class LinuxInfo(DistroInfo): 720 | """Linux-specific specialization of DistroInfo.""" 721 | 722 | PACKAGE_SYSTEMS = { 723 | "Ubuntu": package_system.Dpkg, 724 | "Debian": package_system.Dpkg, 725 | "Fedora": package_system.Yum 726 | } 727 | 728 | def __new__(cls, distro_type, **kwargs): 729 | """Create DistroInfo namedtuple using provided arguments.""" 730 | kwargs.update({ 731 | "distro": distro_type, 732 | "pkgsys": LinuxInfo.PACKAGE_SYSTEMS[distro_type], 733 | "installation": "proot" 734 | }) 735 | 736 | return DistroInfo.__new__(cls, 737 | create_func=create, 738 | get_func=container_for_directory, 739 | match_func=match, 740 | enumerate_func=enumerate_all, 741 | kwargs=kwargs) 742 | 743 | DISTRIBUTIONS = [ # suppress(unused-variable) 744 | LinuxInfo("Ubuntu", 745 | release="precise", 746 | url=("http://old-releases.ubuntu.com/releases/ubuntu-core/" 747 | "releases/12.04.3/release/" 748 | "ubuntu-core-12.04.3-core-{arch}.tar.gz"), 749 | arch=["i386", "amd64", "armhf"], 750 | archfetch=architecture.Alias.debian), 751 | LinuxInfo("Ubuntu", 752 | release="trusty", 753 | url=("http://old-releases.ubuntu.com/releases/ubuntu-core/" 754 | "releases/utopic/release/" 755 | "ubuntu-core-14.10-core-{arch}.tar.gz"), 756 | arch=["i386", "amd64", "armhf", "powerpc"], 757 | archfetch=architecture.Alias.debian), 758 | LinuxInfo("Ubuntu", 759 | release="focal", 760 | url=("http://cdimage.ubuntu.com/ubuntu-base/" 761 | "releases/20.04/release/" 762 | "ubuntu-base-20.04-base-{arch}.tar.gz"), 763 | arch=["amd64"], 764 | archfetch=architecture.Alias.debian), 765 | LinuxInfo("Debian", 766 | release="wheezy", 767 | url=("http://download.openvz.org/" 768 | "template/precreated/debian-7.0-{arch}-minimal.tar.gz"), 769 | arch=["x86", "x86_64"], 770 | archfetch=architecture.Alias.universal), 771 | LinuxInfo("Debian", 772 | release="squeeze", 773 | url=("http://download.openvz.org/" 774 | "template/precreated/debian-6.0-{arch}-minimal.tar.gz"), 775 | arch=["x86", "x86_64"], 776 | archfetch=architecture.Alias.universal), 777 | LinuxInfo("Fedora", 778 | release="20", 779 | url=("http://download.openvz.org/" 780 | "template/precreated/fedora-20-{arch}.tar.gz"), 781 | arch=["x86", "x86_64"], 782 | # suppress(PYC50) 783 | archfetch=architecture.Alias.universal) 784 | ] 785 | -------------------------------------------------------------------------------- /psqtraviscontainer/linux_local_container.py: -------------------------------------------------------------------------------- 1 | # /psqtraviscontainer/linux_local_container.py 2 | # 3 | # Specialization for linux containers. This version bootstraps 4 | # package manager locally, without root access, and uses 5 | # environment variables to control binary access. 6 | # 7 | # See /LICENCE.md for Copyright information 8 | """Specialization for linux containers, using environment variables.""" 9 | 10 | from __future__ import unicode_literals 11 | 12 | import errno 13 | 14 | import os 15 | 16 | import platform 17 | 18 | import shutil 19 | 20 | from psqtraviscontainer import architecture 21 | from psqtraviscontainer import container 22 | from psqtraviscontainer import distro 23 | from psqtraviscontainer import linux_container 24 | from psqtraviscontainer import package_system 25 | 26 | 27 | DistroInfo = distro.DistroInfo 28 | DistroConfig = distro.DistroConfig 29 | 30 | 31 | def get_dir_for_distro(container_dir, config): 32 | """Get the distro dir in a container_dir for a DistroConfig.""" 33 | arch = config["arch"] 34 | url = config["url"] 35 | distro_folder_name_template = (os.path.basename(url) + ".root") 36 | distro_folder_name = distro_folder_name_template.format(arch=arch) 37 | return os.path.realpath(os.path.join(container_dir, distro_folder_name)) 38 | 39 | 40 | class LocalLinuxContainer(container.AbstractContainer): 41 | """A container for a linux distribution. 42 | 43 | We can execute commands inside this container by using proot and qemu. 44 | """ 45 | 46 | def __init__(self, # suppress(too-many-arguments) 47 | package_root, 48 | release, 49 | arch, 50 | pkg_sys_constructor): 51 | """Initialize this LocalLinuxContainer, storing its distro config.""" 52 | super(LocalLinuxContainer, self).__init__() 53 | self._arch = arch 54 | self._package_root = package_root 55 | self._pkgsys = pkg_sys_constructor(release, arch, self) 56 | 57 | def _root_filesystem_directory(self): 58 | """Return directory on parent filesystem where our root is located.""" 59 | return self._package_root 60 | 61 | def _package_system(self): 62 | """Return package system for this distribution.""" 63 | return self._pkgsys 64 | 65 | def _subprocess_popen_arguments(self, argv, **kwargs): 66 | """For native arguments argv, return AbstractContainer.PopenArguments. 67 | 68 | This returned tuple will have no environment variables set, but the 69 | proot command to enter this container will be prepended to the 70 | argv provided. 71 | """ 72 | popen_args = self.__class__.PopenArguments 73 | prepend_env = { 74 | "LD_LIBRARY_PATH": os.pathsep.join([ 75 | os.path.join(self._package_root, 76 | "usr", 77 | "lib", 78 | "x86_64-linux-gnu"), 79 | os.path.join(self._package_root, 80 | "usr", 81 | "lib", 82 | "i686-linux-gnu"), 83 | os.path.join(self._package_root, 84 | "usr", 85 | "lib") 86 | ]), 87 | "PKG_CONFIG_PATH": os.pathsep.join([ 88 | os.path.join(self._package_root, 89 | "usr", 90 | "lib", 91 | "pkgconfig"), 92 | os.path.join(self._package_root, 93 | "usr", 94 | "lib", 95 | "x86_64-linux-gnu", 96 | "pkgconfig"), 97 | os.path.join(self._package_root, 98 | "usr", 99 | "lib", 100 | "i686-linux-gnu", 101 | "pkgconfig") 102 | ]), 103 | "LIBRARY_PATH": os.pathsep.join([ 104 | os.path.join(self._package_root, 105 | "usr", 106 | "lib"), 107 | os.path.join(self._package_root, 108 | "usr", 109 | "lib", 110 | "x86_64-linux-gnu"), 111 | os.path.join(self._package_root, 112 | "usr", 113 | "lib", 114 | "i686-linux-gnu") 115 | ]), 116 | "INCLUDE_PATH": os.pathsep.join([ 117 | os.path.join(self._package_root, 118 | "usr", 119 | "include") 120 | ]), 121 | "CPATH": os.pathsep.join([ 122 | os.path.join(self._package_root, 123 | "usr", 124 | "include") 125 | ]), 126 | "CPPPATH": os.pathsep.join([ 127 | os.path.join(self._package_root, 128 | "usr", 129 | "include") 130 | ]), 131 | "PATH": os.pathsep.join([ 132 | os.path.join(self._package_root, 133 | "usr", 134 | "bin") 135 | ]) 136 | } 137 | 138 | return popen_args(prepend=prepend_env, 139 | overwrite=dict(), 140 | argv=argv) 141 | 142 | def clean(self): 143 | """Clean out this container.""" 144 | remove_directories = linux_container.directories_to_remove_on_clean( 145 | self._package_root 146 | ) 147 | for directory in remove_directories: 148 | if os.path.islink(directory): 149 | continue 150 | 151 | try: 152 | shutil.rmtree(os.path.join(self._package_root, directory)) 153 | except OSError as error: 154 | if error.errno != errno.ENOENT: 155 | raise error 156 | 157 | create_directories = linux_container.directories_to_create_on_clean( 158 | self._package_root 159 | ) 160 | 161 | for directory in create_directories: 162 | try: 163 | os.makedirs(directory) 164 | except OSError as error: 165 | if error.errno != errno.EEXIST: # suppress(PYC90) 166 | raise error 167 | 168 | 169 | def container_for_directory(container_dir, distro_config): 170 | """G an existing LocalLinuxContainer at container_dir for distro_config. 171 | 172 | Also take into account arguments in result to look up the the actual 173 | directory for this distro. 174 | """ 175 | path_to_distro_folder = get_dir_for_distro(container_dir, 176 | distro_config) 177 | 178 | return LocalLinuxContainer(path_to_distro_folder, 179 | distro_config["release"], 180 | distro_config["arch"], 181 | distro_config["pkgsys"]) 182 | 183 | 184 | def create(container_dir, distro_config): 185 | """Create a container using proot.""" 186 | _, minimize_actions = linux_container.fetch_distribution(container_dir, 187 | None, 188 | distro_config) 189 | path_to_distro_folder = get_dir_for_distro(container_dir, 190 | distro_config) 191 | 192 | local_container = LocalLinuxContainer(path_to_distro_folder, 193 | distro_config["release"], 194 | distro_config["arch"], 195 | distro_config["pkgsys"]) 196 | minimize_actions[distro_config["distro"]](local_container, 197 | path_to_distro_folder) 198 | return local_container 199 | 200 | 201 | def _info_with_arch_to_config(info, arch): 202 | """Convert selected architecture for DistroInfo into DistroConfig.""" 203 | config = info.kwargs.copy() 204 | 205 | del config["arch"] 206 | del config["archfetch"] 207 | 208 | config["arch"] = arch 209 | 210 | return config 211 | 212 | 213 | def _valid_archs(archs): 214 | """Return valid archs to emulate from archs.""" 215 | alias = architecture.Alias.universal(platform.machine()) 216 | return [a for a in archs 217 | if architecture.Alias.universal(a) == alias] 218 | 219 | 220 | def match(info, arguments): 221 | """Check if info matches arguments.""" 222 | if platform.system() != "Linux": 223 | return None 224 | 225 | if arguments.get("distro", None) != info.kwargs["distro"]: 226 | return None 227 | 228 | if not (arguments.get("local", None) or 229 | arguments.get("installation", None) == "local"): 230 | return None 231 | 232 | distro_release = info.kwargs["release"] 233 | 234 | # pychecker thinks that a list comprehension as a return value is 235 | # always None. 236 | distro_archs = _valid_archs(info.kwargs["arch"]) # suppress(PYC90) 237 | distro_archfetch = info.kwargs["archfetch"] 238 | 239 | if arguments.get("release", None) == distro_release: 240 | converted = distro_archfetch(arguments.get("arch", None)) 241 | if converted in distro_archs: 242 | return _info_with_arch_to_config(info, converted) 243 | 244 | return None 245 | 246 | 247 | def enumerate_all(info): 248 | """Enumerate all valid configurations for this DistroInfo.""" 249 | if platform.system() != "Linux": 250 | return 251 | 252 | for arch in _valid_archs(info.kwargs["arch"]): # suppress(PYC90) 253 | yield _info_with_arch_to_config(info, arch) 254 | 255 | 256 | class LinuxLocalInfo(DistroInfo): 257 | """Linux-specific specialization of DistroInfo.""" 258 | 259 | PACKAGE_SYSTEMS = { 260 | "Ubuntu": package_system.DpkgLocal, 261 | } 262 | 263 | def __new__(cls, distro_type, **kwargs): 264 | """Create DistroInfo tuple using provided arguments.""" 265 | kwargs.update({ 266 | "distro": distro_type, 267 | "pkgsys": LinuxLocalInfo.PACKAGE_SYSTEMS[distro_type], 268 | "installation": "local" 269 | }) 270 | 271 | return DistroInfo.__new__(cls, 272 | create_func=create, 273 | get_func=container_for_directory, 274 | match_func=match, 275 | enumerate_func=enumerate_all, 276 | kwargs=kwargs) 277 | 278 | DISTRIBUTIONS = [ # suppress(unused-variable) 279 | LinuxLocalInfo("Ubuntu", 280 | release="precise", 281 | url=("http://old-releases.ubuntu.com/releases/ubuntu-core/" 282 | "releases/12.04.3/release/" 283 | "ubuntu-core-12.04.3-core-{arch}.tar.gz"), 284 | arch=["i386", "amd64", "armhf"], 285 | archfetch=architecture.Alias.debian), 286 | LinuxLocalInfo("Ubuntu", 287 | release="trusty", 288 | url=("http://old-releases.ubuntu.com/releases/ubuntu-core/" 289 | "releases/utopic/release/" 290 | "ubuntu-core-14.10-core-{arch}.tar.gz"), 291 | arch=["i386", "amd64", "armhf", "powerpc"], 292 | archfetch=architecture.Alias.debian), 293 | LinuxLocalInfo("Ubuntu", 294 | release="focal", 295 | url=("http://cdimage.ubuntu.com/ubuntu-base/releases/20.04/release/" 296 | "releases/utopic/release/" 297 | "ubuntu-base-20.04-base-{arch}.tar.gz"), 298 | arch=["amd64"], 299 | archfetch=architecture.Alias.debian) 300 | ] 301 | -------------------------------------------------------------------------------- /psqtraviscontainer/osx_container.py: -------------------------------------------------------------------------------- 1 | # /psqtraviscontainer/osx_container.py 2 | # 3 | # Specialization for OS X containers, using environment variables. 4 | # 5 | # See /LICENCE.md for Copyright information 6 | """Specialization for OS X containers, using environment variables.""" 7 | 8 | import os 9 | 10 | import platform 11 | 12 | import shutil 13 | 14 | import tarfile 15 | 16 | from clint.textui import colored 17 | 18 | from psqtraviscontainer import container 19 | from psqtraviscontainer import directory 20 | from psqtraviscontainer import distro 21 | from psqtraviscontainer import package_system 22 | from psqtraviscontainer import printer 23 | from psqtraviscontainer import util 24 | 25 | from psqtraviscontainer.download import TemporarilyDownloadedFile 26 | 27 | import tempdir 28 | 29 | DistroInfo = distro.DistroInfo 30 | 31 | _HOMEBREW_URL = "https://github.com/Homebrew/brew/archive/master.tar.gz" 32 | 33 | 34 | class OSXContainer(container.AbstractContainer): 35 | """A container for OS X. 36 | 37 | We can execute commands inside this container by setting the 38 | required environment variables to pick commands from this 39 | path. 40 | """ 41 | 42 | def __init__(self, # suppress(too-many-arguments) 43 | homebrew_distribution, 44 | pkg_sys_constructor): 45 | """Initialize this OSXContainer, storing its distro configuration.""" 46 | super(OSXContainer, self).__init__() 47 | self._prefix = homebrew_distribution 48 | self._pkgsys = pkg_sys_constructor(self) 49 | 50 | def _subprocess_popen_arguments(self, argv, **kwargs): 51 | """For native arguments argv, return AbstractContainer.PopenArguments. 52 | 53 | This returned tuple will have no environment variables set, but the 54 | proot command to enter this container will be prepended to the 55 | argv provided. 56 | """ 57 | del kwargs 58 | 59 | popen_args = self.__class__.PopenArguments 60 | popen_env = { 61 | "PATH": os.path.join(self._prefix, "bin"), 62 | "DYLD_LIBRARY_PATH": os.path.join(self._prefix, "lib"), 63 | "PKG_CONFIG_PATH": os.path.join(self._prefix, "lib", "pkgconfig") 64 | } 65 | return popen_args(prepend=popen_env, argv=argv) 66 | 67 | def _root_filesystem_directory(self): 68 | """Return directory on parent filesystem where our root is located.""" 69 | return self._prefix 70 | 71 | def _package_system(self): 72 | """Return package system for this distribution.""" 73 | return self._pkgsys 74 | 75 | def clean(self): 76 | """Clean out this container to prepare it for caching.""" 77 | pass 78 | 79 | 80 | def _extract_archive(archive_file, container_folder): 81 | """Extract distribution archive into container_folder.""" 82 | msg = ("""-> Extracting {0}\n""").format(archive_file.path()) 83 | printer.unicode_safe(colored.magenta(msg, bold=True)) 84 | with tarfile.open(name=archive_file.path()) as archive: 85 | extract_members = archive.getmembers() 86 | archive.extractall(members=extract_members, path=container_folder) 87 | 88 | 89 | def container_for_directory(container_dir, distro_config): 90 | """Return an existing OSXContainer at container_dir for distro_config. 91 | 92 | Also take into account arguments in result to look up the the actual 93 | directory for this distro. 94 | """ 95 | util.check_if_exists(os.path.join(container_dir, "bin", "brew")) 96 | 97 | return OSXContainer(container_dir, distro_config["pkgsys"]) 98 | 99 | 100 | def _fetch_homebrew(container_dir, distro_config): 101 | """Fetch homebrew and untar it in the container directory.""" 102 | try: 103 | os.stat(os.path.join(container_dir, "bin", "brew")) 104 | return container_for_directory(container_dir, distro_config) 105 | except OSError: 106 | with directory.Navigation(tempdir.TempDir().name): 107 | with TemporarilyDownloadedFile(_HOMEBREW_URL, 108 | filename="brew") as archive_file: 109 | with directory.Navigation(tempdir.TempDir().name) as extract: 110 | _extract_archive(archive_file, extract) 111 | first = os.path.join(extract, 112 | os.listdir(extract)[0]) 113 | files = [os.path.join(first, p) for p in os.listdir(first)] 114 | for filename in files: 115 | try: 116 | filename_base = os.path.basename(filename) 117 | shutil.move(filename, os.path.join(container_dir, 118 | filename_base)) 119 | except IOError: # suppress(pointless-except) 120 | # Ignore stuff that can't be moved for whatever 121 | # reason. These are all files that generally 122 | # don't matter. 123 | pass 124 | 125 | return OSXContainer(container_dir, distro_config["pkgsys"]) 126 | 127 | 128 | def create(container_dir, distro_config): 129 | """Create a container using homebrew.""" 130 | # First fetch a proot distribution if we don't already have one 131 | return _fetch_homebrew(container_dir, distro_config) 132 | 133 | 134 | def match(info, arguments): 135 | """Check for matching configuration from DISTRIBUTIONS for arguments. 136 | 137 | In effect, this just means checking if we're on OS X. 138 | """ 139 | if platform.system() != "Darwin": 140 | return None 141 | 142 | if arguments.get("distro", None) != "OSX": 143 | return None 144 | 145 | return info.kwargs 146 | 147 | 148 | def enumerate_all(info): 149 | """Enumerate all valid configurations for this DistroInfo.""" 150 | if platform.system() != "Darwin": 151 | return 152 | 153 | yield info.kwargs 154 | 155 | 156 | DISTRIBUTIONS = [ 157 | DistroInfo(create_func=create, 158 | get_func=container_for_directory, 159 | match_func=match, 160 | enumerate_func=enumerate_all, 161 | kwargs={ 162 | "distro": "OSX", 163 | "pkgsys": package_system.Brew # suppress(PYC50) 164 | }) 165 | ] 166 | -------------------------------------------------------------------------------- /psqtraviscontainer/output.py: -------------------------------------------------------------------------------- 1 | # /psqtraviscontainer/output.py 2 | # 3 | # Helper classes to monitor and capture output as it runs. 4 | # 5 | # See /LICENCE.md for Copyright information 6 | """Helper classes to monitor and capture output as it runs.""" 7 | 8 | import sys 9 | 10 | import threading 11 | 12 | 13 | def monitor(stream, 14 | modifier=None, 15 | live=False, 16 | output=sys.stdout): 17 | """Monitor and print lines from stream until end of file is reached. 18 | 19 | Each line is piped through :modifier:. 20 | """ 21 | from six import StringIO 22 | captured = StringIO() 23 | modifier = modifier or (lambda l: l) 24 | 25 | def read_thread(): 26 | """Read each line from the stream and print it.""" 27 | # No stream, not much we can really do here. 28 | if not stream: 29 | return 30 | 31 | for line in stream: 32 | line = modifier(line) 33 | captured.write(line) 34 | if live: 35 | output.write(line) 36 | output.flush() 37 | 38 | def joiner_for_output(thread): 39 | """Closure to join the thread and do something with its output.""" 40 | thread.start() 41 | 42 | def join(): 43 | """Join the thread and then return its output.""" 44 | thread.join() 45 | captured.seek(0) 46 | return captured 47 | 48 | return join 49 | 50 | # Note that while it is necessary to call joiner_for_output if you want 51 | # resources to be cleaned up, it is not necessary if you don't care 52 | # about cleanup and just want the program to keep running. 53 | return joiner_for_output(threading.Thread(target=read_thread)) 54 | -------------------------------------------------------------------------------- /psqtraviscontainer/package_system.py: -------------------------------------------------------------------------------- 1 | # /psqtraviscontainer/package_system.py 2 | # 3 | # Implementations of package-system controllers for various distributions. 4 | # 5 | # See /LICENCE.md for Copyright information 6 | """Implementations of package-system controllers for various distributions.""" 7 | 8 | import abc 9 | 10 | import errno 11 | 12 | import fnmatch 13 | 14 | import os 15 | 16 | import platform 17 | 18 | import shutil 19 | 20 | import stat 21 | 22 | import subprocess 23 | 24 | import sys 25 | 26 | import tarfile 27 | 28 | import tempfile 29 | 30 | import textwrap 31 | 32 | from collections import namedtuple 33 | 34 | from clint.textui import colored 35 | 36 | from psqtraviscontainer import directory 37 | from psqtraviscontainer import download 38 | 39 | import six 40 | 41 | import tempdir 42 | 43 | _UBUNTU_MAIN_ARCHS = ["i386", "amd64"] 44 | _UBUNTU_PORT_ARCHS = ["armhf", "arm64", "powerpc", "ppc64el"] 45 | _UBUNTU_MAIN_ARCHIVE = "http://archive.ubuntu.com/ubuntu/" 46 | _UBUNTU_PORT_ARCHIVE = "http://ports.ubuntu.com/ubuntu-ports/" 47 | 48 | 49 | def _report_task(description): 50 | """Report task description.""" 51 | sys.stdout.write(str(colored.white("-> {0}\n".format(description)))) 52 | 53 | 54 | def _run_task(executor, description, argv, env=None, detail=None): 55 | """Run command through executor argv and prints description.""" 56 | def wrapper(line): 57 | """Output wrapper for line.""" 58 | return textwrap.indent(line, " ") 59 | 60 | detail = "[{}]".format(" ".join(argv)) if detail is None else detail 61 | _report_task(description + " " + detail) 62 | (code, 63 | stdout_data, 64 | stderr_data) = executor.execute(argv, 65 | output_modifier=wrapper, 66 | live_output=True, 67 | env=env) 68 | sys.stderr.write(stderr_data) 69 | 70 | 71 | def _format_package_list(packages): 72 | """Return a nicely formatted list of package names.""" 73 | "\n (*) ".join([""] + packages) 74 | 75 | 76 | class PackageSystem(six.with_metaclass(abc.ABCMeta, object)): 77 | """An abstract class representing a package manager.""" 78 | 79 | PopenArguments = namedtuple("PopenArguments", "argv env") 80 | 81 | @abc.abstractmethod 82 | def add_repositories(self, repos): 83 | """Add repositories to central packaging system.""" 84 | del repos 85 | 86 | raise NotImplementedError() 87 | 88 | @abc.abstractmethod 89 | def install_packages(self, package_names): 90 | """Install specified packages in package_names.""" 91 | del package_names 92 | 93 | raise NotImplementedError() 94 | 95 | 96 | class Dpkg(PackageSystem): 97 | """Debian Packaging System.""" 98 | 99 | def __init__(self, 100 | release, 101 | arch, 102 | executor): 103 | """Initialize Dpkg with release and arch.""" 104 | super(Dpkg, self).__init__() 105 | self._release = release 106 | self._arch = arch 107 | self._executor = executor 108 | 109 | @staticmethod 110 | def format_repositories(repos, release, arch): 111 | """Take a list of APT lines and format them. 112 | 113 | There are certain shortcuts that you can use. 114 | 115 | {ubuntu} will be replaced by http://archive.ubuntu.com/ and 116 | the architecture. 117 | 118 | {debian} will be replaced by http://ftp.debian.org/. 119 | 120 | {launchpad} will be replaced by "http://ppa.launchpad.net/. 121 | 122 | {release} gets replaced by the release of the distribution, which 123 | means you don't need a repository file for every distribution. 124 | """ 125 | _ubuntu_urls = [ 126 | (_UBUNTU_MAIN_ARCHS, _UBUNTU_MAIN_ARCHIVE), 127 | (_UBUNTU_PORT_ARCHS, _UBUNTU_PORT_ARCHIVE) 128 | ] 129 | 130 | def _format_user_line(line, kwargs): 131 | """Format a line and turns it into a valid repository line.""" 132 | formatted_line = line.format(**kwargs) 133 | return "deb {0}".format(formatted_line) 134 | 135 | def _value_or_error(value): 136 | """Return first item in value, or ERROR if value is empty.""" 137 | return value[0] if len(value) else "ERROR" 138 | 139 | format_keys = { 140 | "ubuntu": [u[1] for u in _ubuntu_urls if arch in u[0]], 141 | "debian": ["http://ftp.debian.org/"], 142 | "launchpad": ["http://ppa.launchpad.net/"], 143 | "release": [release] 144 | } 145 | format_keys = { 146 | k: _value_or_error(v) for k, v in format_keys.items() 147 | } 148 | 149 | return [_format_user_line(l, format_keys) for l in repos] 150 | 151 | def add_repositories(self, repos): 152 | """Add a repository to the central packaging system.""" 153 | # We will be creating a bash script each time we need to add 154 | # a new source line to our sources list and executing that inside 155 | # the proot. This guarantees that we'll always get the right 156 | # permissions. 157 | with tempfile.NamedTemporaryFile() as bash_script: 158 | append_lines = Dpkg.format_repositories(repos, 159 | self._release, 160 | self._arch) 161 | for count, append_line in enumerate(append_lines): 162 | path = "/etc/apt/sources.list.d/{0}.list".format(count) 163 | append_cmd = "echo \"{0}\" > {1}\n".format(append_line, path) 164 | bash_script.write(six.b(append_cmd)) 165 | 166 | bash_script.flush() 167 | self._executor.execute_success(["bash", bash_script.name], 168 | requires_full_access=True) 169 | 170 | def install_packages(self, package_names): 171 | """Install all packages in list package_names.""" 172 | if len(package_names): 173 | _run_task(self._executor, 174 | """Update repositories""", 175 | ["apt-get", "update", "-y", "--force-yes"]) 176 | _run_task(self._executor, 177 | """Install APT packages""", 178 | ["apt-get", 179 | "install", 180 | "-y", 181 | "--force-yes"] + package_names, 182 | detail=_format_package_list(package_names)) 183 | 184 | 185 | class DpkgLocal(PackageSystem): 186 | """Debian packaging system, installing packages to local directory.""" 187 | 188 | def __init__(self, release, arch, executor): 189 | """Initialize this PackageSystem.""" 190 | super(DpkgLocal, self).__init__() 191 | self._release = release 192 | self._arch = arch 193 | self._executor = executor 194 | 195 | def _initialize_directories(self): 196 | """Ensure that all APT and Dpkg directories are initialized.""" 197 | root = self._executor.root_filesystem_directory() 198 | directory.safe_makedirs(os.path.join(root, 199 | "var", 200 | "cache", 201 | "apt", 202 | "archives", 203 | "partial")) 204 | directory.safe_makedirs(os.path.join(root, 205 | "var", 206 | "lib", 207 | "apt", 208 | "lists", 209 | "partial")) 210 | directory.safe_makedirs(os.path.join(root, 211 | "var", 212 | "lib", 213 | "dpkg", 214 | "updates")) 215 | directory.safe_makedirs(os.path.join(root, 216 | "var", 217 | "lib", 218 | "dpkg", 219 | "info")) 220 | directory.safe_makedirs(os.path.join(root, 221 | "var", 222 | "lib", 223 | "dpkg", 224 | "parts")) 225 | directory.safe_touch(os.path.join(root, 226 | "var", 227 | "lib", 228 | "dpkg", 229 | "status")) 230 | directory.safe_touch(os.path.join(root, 231 | "var", 232 | "lib", 233 | "dpkg", 234 | "available")) 235 | 236 | for confpath in ["apt.conf", 237 | "preferences", 238 | "trusted.gpg", 239 | "sources.list"]: 240 | directory.safe_makedirs(os.path.join(root, 241 | "etc", 242 | "apt", 243 | confpath + ".d")) 244 | 245 | config_file_contents = "\n".join([ 246 | "Apt {", 247 | " Architecture \"" + self._arch + "\";", 248 | " Get {", 249 | " Assume-Yes true;", 250 | " };", 251 | "};", 252 | "debug {", 253 | " nolocking true;", 254 | "};", 255 | "Acquire::Queue-Mode \"host\";", 256 | "Dir \"" + root + "\";", 257 | "Dir::Cache \"" + root + "/var/cache/apt\";", 258 | "Dir::State \"" + root + "/var/lib/apt\";", 259 | "Dir::State::status \"" + root + "/var/lib/dpkg/status\";", 260 | "Dir::Bin::Solvers \"" + root + "/usr/lib/apt/solvers\";", 261 | "Dir::Bin::Planners \"" + root + "/usr/lib/apt/planners\";", 262 | "Dir::Bin::Solvers \"" + root + "/usr/lib/apt/solvers\";", 263 | "Dir::Bin::Methods \"" + root + "/usr/lib/apt/methods\";", 264 | "Dir::Bin::Dpkg \"" + root + "/usr/bin/dpkg.w\";", 265 | "Dir::Etc \"" + root + "/etc/apt\";", 266 | "Dir::Log \"" + root + "/var/log/apt\";" 267 | ]) 268 | apt_config_path = os.path.join(root, "etc", "apt", "apt.conf") 269 | with open(apt_config_path, "w") as config_file: 270 | config_file.write(config_file_contents) 271 | 272 | dpkg_script_contents = "\n".join([ 273 | "#!/bin/bash", 274 | root + "/usr/bin/dpkg --root='" + root + "' \\", 275 | "--admindir=" + root + "/var/lib/dpkg \\", 276 | "--log=" + root + "/var/log/dkpkg.log \\", 277 | "--force-not-root --force-bad-path $@" 278 | ]) 279 | dpkg_bin_path = os.path.join(root, "usr", "bin", "dpkg.w") 280 | with open(dpkg_bin_path, "w") as dpkg_bin: 281 | dpkg_bin.write(dpkg_script_contents) 282 | os.chmod(dpkg_bin_path, os.stat(dpkg_bin_path).st_mode | stat.S_IXUSR) 283 | 284 | def add_repositories(self, repos): 285 | """Add repository to the central packaging system.""" 286 | self._initialize_directories() 287 | 288 | root = self._executor.root_filesystem_directory() 289 | sources_list = os.path.join(root, "etc", "apt", "sources.list") 290 | 291 | try: 292 | with open(sources_list) as sources: 293 | known_repos = [s for s in sources.read().split("\n") if len(s)] 294 | except EnvironmentError as error: 295 | if error.errno != errno.ENOENT: 296 | raise error 297 | 298 | known_repos = [] 299 | 300 | all_repos = (set(Dpkg.format_repositories(repos, 301 | self._release, 302 | self._arch)) | 303 | set(known_repos)) 304 | 305 | with open(sources_list, "w") as sources: 306 | sources.write("\n".join(sorted(list(all_repos)))) 307 | 308 | def install_packages(self, package_names): 309 | """Install all packages in list package_names. 310 | 311 | This works in a somewhat non-standard way. We will be 312 | updating the repository list as usual, but will be 313 | using a combination of apt-get download and 314 | dpkg manually to install packages into a local 315 | directory which we control. 316 | """ 317 | self._initialize_directories() 318 | 319 | from six.moves.urllib.parse import urlparse # suppress(import-error) 320 | 321 | root = self._executor.root_filesystem_directory() 322 | environment = { 323 | "APT_CONFIG": os.path.join(root, "etc", "apt", "apt.conf") 324 | } 325 | _run_task(self._executor, 326 | """Update repositories""", 327 | ["apt-get", "update", "-y", "--force-yes"], 328 | env=environment) 329 | 330 | # Separate out into packages that need to be downloaded with 331 | # apt-get and packages that can be downloaded directly 332 | # using download_file 333 | deb_packages = [p for p in package_names if urlparse(p).scheme] 334 | apt_packages = [p for p in package_names if not urlparse(p).scheme] 335 | 336 | # Clear out /var/cache/apt/archives 337 | archives = os.path.join(root, "var", "cache", "apt", "archives") 338 | if os.path.exists(archives): 339 | shutil.rmtree(archives) 340 | os.makedirs(archives) 341 | 342 | if len(deb_packages): 343 | with directory.Navigation(archives): 344 | _report_task("""Downloading user-specified packages""") 345 | for deb in deb_packages: 346 | download.download_file(deb) 347 | 348 | # Now use apt-get install -d to download the apt_packages and their 349 | # dependencies, but not install them 350 | if len(apt_packages): 351 | _run_task(self._executor, 352 | """Downloading APT packages and dependencies""", 353 | ["apt-get", 354 | "-y", 355 | "--force-yes", 356 | "-d", 357 | "install", 358 | "--reinstall"] + apt_packages, 359 | env=environment, 360 | detail=_format_package_list(apt_packages)) 361 | 362 | # Go back into our archives directory and unpack all our packages 363 | with directory.Navigation(archives): 364 | package_files = fnmatch.filter(os.listdir("."), "*.deb") 365 | for pkg in package_files: 366 | _run_task(self._executor, 367 | """Unpacking """, 368 | ["dpkg", "-x", pkg, root], 369 | detail=os.path.splitext(os.path.basename(pkg))[0]) 370 | 371 | 372 | class Yum(PackageSystem): 373 | """Red Hat Packaging System.""" 374 | 375 | def __init__(self, 376 | release, 377 | arch, 378 | executor): 379 | """Initialize Yum with release and executor.""" 380 | del arch 381 | del release 382 | 383 | super(Yum, self).__init__() 384 | self._executor = executor 385 | 386 | def add_repositories(self, repos): 387 | """Add a repository to the central packaging system.""" 388 | with tempdir.TempDir() as download_dir: 389 | with directory.Navigation(download_dir): 390 | for repo in repos: 391 | repo_file = download.download_file(repo) 392 | # Create a bash script to copy the downloaded repo file 393 | # over to /etc/yum/repos.d 394 | with tempfile.NamedTemporaryFile() as bash_script: 395 | copy_cmd = ("cp \"{0}\"" 396 | "/etc/yum/repos.d").format(repo_file) 397 | bash_script.write(six.b(copy_cmd)) 398 | bash_script.flush() 399 | self._executor.execute_success(["bash", 400 | bash_script.name]) 401 | 402 | def install_packages(self, package_names): 403 | """Install all packages in list package_names.""" 404 | if len(package_names): 405 | _run_task(self._executor, 406 | """Install packages""", 407 | ["yum", "install", "-y"] + package_names, 408 | detail=_format_package_list(package_names)) 409 | 410 | 411 | def extract_tarfile(name): 412 | """Extract a tarfile. 413 | 414 | We attempt to do this in python, but work around bugs in the tarfile 415 | implementation on various operating systems. 416 | """ 417 | # LZMA extraction in broken on Travis-CI with OSX. Shell out to 418 | # tar instead. 419 | if platform.system() == "Darwin" and os.path.splitext(name)[1] == ".xz": 420 | proc = subprocess.Popen(["tar", "-xJvf", name], 421 | stdout=subprocess.PIPE, 422 | stderr=subprocess.PIPE) 423 | (stdout, stderr) = proc.communicate() 424 | ret = proc.wait() 425 | 426 | if ret != 0: 427 | raise RuntimeError("""Extraction of {archive} failed """ 428 | """with {ret}\n{stdout}\n{stderr}""" 429 | """""".format(archive=name, 430 | ret=ret, 431 | stdout=stdout.decode(), 432 | stderr=stderr.decode())) 433 | return 434 | 435 | with tarfile.open(name=name) as tarfileobj: 436 | tarfileobj.extractall() 437 | 438 | 439 | class Brew(PackageSystem): 440 | """Homebrew packaging system for OS X.""" 441 | 442 | def __init__(self, executor): 443 | """Initialize homebrew for executor.""" 444 | super(Brew, self).__init__() 445 | self._executor = executor 446 | 447 | def add_repositories(self, repos): 448 | """Add repositories as specified at repos. 449 | 450 | Adds repositories using brew tap. 451 | """ 452 | for repo in repos: 453 | _run_task(self._executor, 454 | """Adding repository {0}""".format(repo), 455 | ["brew", "tap", repo]) 456 | 457 | def install_packages(self, package_names): 458 | """Install all packages in list package_names.""" 459 | from six.moves import shlex_quote # suppress(import-error) 460 | from six.moves.urllib.parse import urlparse # suppress(import-error) 461 | 462 | # Drop directories which cause problems for brew taps 463 | hb_docs = os.path.join(self._executor.root_filesystem_directory(), 464 | "share", 465 | "doc", 466 | "homebrew") 467 | if os.path.exists(hb_docs): 468 | shutil.rmtree(hb_docs) 469 | 470 | # Separate out into packages that need to be downloaded with 471 | # brew and those that can be downloaded directly 472 | tar_packages = [p for p in package_names if urlparse(p).scheme] 473 | brew_packages = [p for p in package_names if not urlparse(p).scheme] 474 | 475 | if len(brew_packages): 476 | _run_task(self._executor, 477 | """Updating repositories""", 478 | ["brew", "update"]) 479 | 480 | _run_task(self._executor, 481 | """Install packages""", 482 | ["brew", "install"] + brew_packages, 483 | detail=_format_package_list(brew_packages)) 484 | 485 | for tar_pkg in tar_packages: 486 | _report_task("""Install {}""".format(tar_pkg)) 487 | with tempdir.TempDir() as download_dir: 488 | with directory.Navigation(download_dir): 489 | download.download_file(tar_pkg) 490 | extract_tarfile(os.path.basename(tar_pkg)) 491 | # The shell provides an easy way to do this, so just 492 | # use subprocess to call out to it. 493 | extracted_dir = [d for d in os.listdir(download_dir) 494 | if d != os.path.basename(tar_pkg)][0] 495 | subprocess.check_call("cp -r {src}/* {dst}".format( 496 | src=shlex_quote(extracted_dir), 497 | dst=self._executor.root_filesystem_directory() 498 | ), shell=True) 499 | 500 | 501 | class Choco(PackageSystem): 502 | """Chocolatey packaging system for Windows.""" 503 | 504 | def __init__(self, executor): 505 | """Initialize choco for executor.""" 506 | super(Choco, self).__init__() 507 | self._executor = executor 508 | 509 | def add_repositories(self, repos): 510 | """Add repositories as specified at repos. 511 | 512 | This function doesn't do anything on Choco at the moment. 513 | """ 514 | pass 515 | 516 | def install_packages(self, package_names): 517 | """Install all packages in list package_names.""" 518 | _run_task(self._executor, 519 | """Install packages""", 520 | ["choco", "install", "-fy", "-m"] + package_names, 521 | detail=_format_package_list(package_names)) 522 | -------------------------------------------------------------------------------- /psqtraviscontainer/printer.py: -------------------------------------------------------------------------------- 1 | # /psqtraviscontainer/printer.py 2 | # 3 | # Utility functions for printing unicode text. 4 | # 5 | # See /LICENCE.md for Copyright information 6 | """Utility functions for printing unicode text.""" 7 | 8 | import platform 9 | 10 | import sys 11 | 12 | 13 | def unicode_safe(text): 14 | """Print text to standard output, handle unicode.""" 15 | # If a replacement of sys.stdout doesn't have isatty, don't trust it. 16 | # Also don't trust Windows to get this right either. 17 | if (not getattr(sys.stdout, "isatty", None) or 18 | not sys.stdout.isatty() or 19 | platform.system() == "Windows"): 20 | text = "".join([c for c in str(text) if ord(c) < 128]) 21 | 22 | sys.stdout.write(str(text)) 23 | sys.stdout.flush() 24 | -------------------------------------------------------------------------------- /psqtraviscontainer/rootdir.py: -------------------------------------------------------------------------------- 1 | # /psqtraviscontainer/rootdir.py 2 | # 3 | # Print the root directory of a selected container to standard out. 4 | # 5 | # See /LICENCE.md for Copyright information 6 | """Print the root directory of a selected container to standard out.""" 7 | 8 | import os 9 | 10 | import sys 11 | 12 | from psqtraviscontainer import common_options 13 | from psqtraviscontainer import distro 14 | 15 | 16 | def main(arguments=None): 17 | """Get container and print root filesystem directory.""" 18 | parser = common_options.get_parser("""Get root directory for""") 19 | result = parser.parse_args(arguments) 20 | container_dir = os.path.realpath(result.containerdir) 21 | 22 | selected_distro = distro.lookup(vars(result)) 23 | 24 | # Get the selected distribution's container and print its root 25 | # filesystem directory 26 | with selected_distro["info"].get_func(container_dir, 27 | selected_distro) as container: 28 | sys.stdout.write(container.root_filesystem_directory()) 29 | 30 | if __name__ == "__main__": 31 | main() 32 | -------------------------------------------------------------------------------- /psqtraviscontainer/use.py: -------------------------------------------------------------------------------- 1 | # /psqtraviscontainer/use.py 2 | # 3 | # Module which handles the running of scripts and commands inside of a proot 4 | # 5 | # See /LICENCE.md for Copyright information 6 | """Module which handles the running of scripts inside of a proot.""" 7 | 8 | import os 9 | 10 | import sys 11 | 12 | from psqtraviscontainer import common_options 13 | from psqtraviscontainer import distro 14 | 15 | 16 | def _parse_arguments(arguments=None): 17 | """Return a parser context result.""" 18 | parser = common_options.get_parser("Use") 19 | parser.add_argument("--show-output", 20 | action="store_true", 21 | help="""Don't buffer output - show it immediately.""") 22 | return parser.parse_args(arguments) 23 | 24 | 25 | def main(arguments=None): 26 | """Select a distro in the container root and runs a command in it.""" 27 | arguments = (arguments or sys.argv[1:]) 28 | 29 | try: 30 | two_dashes_argument = arguments.index("--") 31 | except ValueError: 32 | sys.stdout.write("""Command line must specify command to """ 33 | """run with two dashes\n""") 34 | sys.exit(1) 35 | 36 | parseable_arguments = arguments[:two_dashes_argument] 37 | command = arguments[two_dashes_argument + 1:] 38 | 39 | argparse_result = _parse_arguments(arguments=parseable_arguments) 40 | 41 | container_dir = os.path.realpath(argparse_result.containerdir) 42 | selected_distro = distro.lookup(vars(argparse_result)) 43 | with selected_distro["info"].get_func(container_dir, 44 | selected_distro) as container: 45 | if argparse_result.show_output: 46 | execute_kwargs = { 47 | "stderr": None, 48 | "stdout": None 49 | } 50 | else: 51 | execute_kwargs = { 52 | "live_output": True 53 | } 54 | 55 | result = container.execute(command, **execute_kwargs)[0] 56 | 57 | return result 58 | -------------------------------------------------------------------------------- /psqtraviscontainer/util.py: -------------------------------------------------------------------------------- 1 | # /psqtraviscontainer/util.py 2 | # 3 | # Utility functions for working with files. 4 | # 5 | # See /LICENCE.md for Copyright information 6 | """Utility functions for working with files.""" 7 | 8 | import os 9 | 10 | 11 | def check_if_exists(entity): 12 | """Raise RuntimeError if entity does not exist.""" 13 | if not os.path.exists(entity): 14 | raise RuntimeError("""A required entity {0} does not exist\n""" 15 | """Try running psq-travis-container-create """ 16 | """first before using psq-travis-container-use.""" 17 | """""".format(entity)) 18 | -------------------------------------------------------------------------------- /psqtraviscontainer/windows_container.py: -------------------------------------------------------------------------------- 1 | # /psqtraviscontainer/windows_container.py 2 | # 3 | # Specialization for Windows containers, using environment variables. 4 | # 5 | # See /LICENCE.md for Copyright information 6 | """Specialization for Windows containers, using environment variables.""" 7 | 8 | import errno 9 | 10 | import fnmatch 11 | 12 | import os 13 | 14 | import platform 15 | 16 | import subprocess 17 | 18 | import sys 19 | 20 | from psqtraviscontainer import container 21 | from psqtraviscontainer import directory 22 | from psqtraviscontainer import distro 23 | from psqtraviscontainer import package_system 24 | from psqtraviscontainer import util 25 | 26 | import tempdir 27 | 28 | DistroInfo = distro.DistroInfo 29 | 30 | _CHOCO_URL = "https://chocolatey.org/install.ps1" 31 | _CHOCO_INSTALL_CMD = ("iex ((new-object net.webclient).DownloadString('" + 32 | _CHOCO_URL + "'))") 33 | 34 | 35 | class WindowsContainer(container.AbstractContainer): 36 | """A container for Windows. 37 | 38 | We can execute commands inside this container by setting the 39 | required environment variables to pick commands from this 40 | path. 41 | """ 42 | 43 | def __init__(self, # suppress(too-many-arguments) 44 | chocolatey_distribution, 45 | pkg_sys_constructor): 46 | """Initialize this WindowsContainer, storing its distro config.""" 47 | super(WindowsContainer, self).__init__() 48 | self._prefix = chocolatey_distribution 49 | self._pkgsys = pkg_sys_constructor(self) 50 | 51 | def _subprocess_popen_arguments(self, argv, **kwargs): 52 | """For native arguments argv, return AbstractContainer.PopenArguments. 53 | 54 | This returned tuple will have no environment variables set, but the 55 | proot command to enter this container will be prepended to the 56 | argv provided. 57 | """ 58 | del kwargs 59 | 60 | popen_args = self.__class__.PopenArguments 61 | popen_env = { 62 | "PATH": os.path.join(self._prefix, "bin") 63 | } 64 | return popen_args(prepend=popen_env, argv=argv) 65 | 66 | def _root_filesystem_directory(self): 67 | """Return directory on parent filesystem where our root is located.""" 68 | return self._prefix 69 | 70 | def _package_system(self): 71 | """Return package system for this distribution.""" 72 | return self._pkgsys 73 | 74 | def clean(self): 75 | """Remove unnecessary files in this container.""" 76 | rmtree = container.AbstractContainer.rmtree 77 | rmtree(os.path.join(self._prefix, "logs")) 78 | 79 | for root, directories, files in os.walk(os.path.join(self._prefix, 80 | "lib")): 81 | for directory_name in directories: 82 | path_to_directory = os.path.join(root, directory_name) 83 | blacklist = [ 84 | "*/doc", 85 | "*/man", 86 | "*/html" 87 | ] 88 | 89 | for blacklisted_dir in blacklist: 90 | if fnmatch.fnmatch(path_to_directory, 91 | blacklisted_dir): 92 | rmtree(path_to_directory) 93 | 94 | for filename in files: 95 | path_to_file = os.path.join(root, filename) 96 | blacklist = [ 97 | "*.old" 98 | ] 99 | 100 | for blacklisted_file in blacklist: 101 | if fnmatch.fnmatch(path_to_file, 102 | blacklisted_file): 103 | os.remove(blacklisted_file) 104 | 105 | 106 | def container_for_directory(container_dir, distro_config): 107 | """Return an existing WindowsContainer at container_dir for distro_config. 108 | 109 | Also take into account arguments in result to look up the the actual 110 | directory for this distro. 111 | """ 112 | util.check_if_exists(os.path.join(container_dir, "bin", "choco.exe")) 113 | 114 | return WindowsContainer(container_dir, distro_config["pkgsys"]) 115 | 116 | 117 | def _execute_no_output(command): 118 | """Execute command, but don't show output unless it fails.""" 119 | process = subprocess.Popen(command, 120 | stdout=subprocess.PIPE, 121 | stderr=subprocess.PIPE) 122 | stdout, stderr = process.communicate() 123 | 124 | if process.returncode != 0: 125 | sys.stdout.write(stdout) 126 | sys.stderr.write(stderr) 127 | raise RuntimeError("""Process {0} failed """ 128 | """with {1}""".format(" ".join(command), 129 | process.returncode)) 130 | 131 | 132 | def _fetch_choco(container_dir, distro_config): 133 | """Fetch chocolatey and install it in the container directory.""" 134 | try: 135 | os.stat(os.path.join(container_dir, "bin", "choco.exe")) 136 | return container_for_directory(container_dir, distro_config) 137 | except OSError: 138 | with tempdir.TempDir() as download_dir: 139 | with directory.Navigation(download_dir): 140 | _execute_no_output(["setx", 141 | "ChocolateyInstall", 142 | container_dir]) 143 | 144 | # Also set the variable in the local environment 145 | # too, so that it gets propagated down to our 146 | # children 147 | os.environ["ChocolateyInstall"] = container_dir 148 | 149 | try: 150 | os.makedirs(container_dir) 151 | except OSError as error: 152 | if error.errno != errno.EEXIST: 153 | raise error 154 | 155 | _execute_no_output(["powershell", 156 | "-NoProfile", 157 | "-ExecutionPolicy", 158 | "Bypass", 159 | "-Command", 160 | _CHOCO_INSTALL_CMD]) 161 | 162 | # Reset variable back to original state to prevent 163 | # polluting the user's registry 164 | _execute_no_output(["setx", 165 | "ChocolateyInstall", 166 | ""]) 167 | 168 | return WindowsContainer(container_dir, distro_config["pkgsys"]) 169 | 170 | 171 | def create(container_dir, distro_config): 172 | """Create a container using chocolatey.""" 173 | return _fetch_choco(container_dir, distro_config) 174 | 175 | 176 | def match(info, arguments): 177 | """Check for matching configuration from DISTRIBUTIONS for arguments. 178 | 179 | In effect, this just means checking if we're on Windows. 180 | """ 181 | if platform.system() != "Windows": 182 | return None 183 | 184 | if arguments.get("distro", None) != "Windows": 185 | return None 186 | 187 | return info.kwargs 188 | 189 | 190 | def enumerate_all(info): 191 | """Enumerate all valid configurations for this DistroInfo.""" 192 | if platform.system() != "Windows": 193 | return 194 | 195 | yield info.kwargs 196 | 197 | 198 | DISTRIBUTIONS = [ 199 | DistroInfo(create_func=create, 200 | get_func=container_for_directory, 201 | match_func=match, 202 | enumerate_func=enumerate_all, 203 | kwargs={ 204 | "distro": "Windows", 205 | "pkgsys": package_system.Choco 206 | }) 207 | ] 208 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | testtools==1.7.1 2 | nose==1.3.6 3 | nose-parameterized==0.5.0 4 | mock==1.0.1 5 | setuptools-green<=0.1.0 6 | polysquare-setuptools-lint<=0.1.0 7 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # /setup.py 2 | # 3 | # Installation and setup script for psqtraviscontainer 4 | # 5 | # See /LICENCE.md for Copyright information 6 | """Installation and setup script for psqtraviscontainer.""" 7 | 8 | import platform 9 | 10 | from setuptools import find_packages, setup 11 | 12 | INSTALL_EXTRAS = [] 13 | 14 | if platform.system() != "Windows": 15 | INSTALL_EXTRAS.extend([ 16 | "python-debian" 17 | ]) 18 | 19 | setup(name="polysquare-travis-container", 20 | version="0.0.47", 21 | description="""Polysquare Travis-CI Container Root""", 22 | long_description_markdown_filename="README.md", 23 | long_description_content_type="text/markdown", 24 | author="Sam Spilsbury", 25 | author_email="smspillaz@gmail.com", 26 | classifiers=["Development Status :: 3 - Alpha", 27 | "Intended Audience :: Developers", 28 | "Topic :: Software Development :: Build Tools", 29 | "License :: OSI Approved :: MIT License", 30 | "Programming Language :: Python :: 3", 31 | "Programming Language :: Python :: 3.3", 32 | "Programming Language :: Python :: 3.4"], 33 | url="http://github.com/polysquare/polysquare-travis-container", 34 | license="MIT", 35 | keywords="development travis", 36 | packages=find_packages(exclude=["test"]), 37 | install_requires=["clint", 38 | "parse-shebang>=0.0.3", 39 | "requests", 40 | "six", 41 | "shutilwhich", 42 | "tempdir"] + INSTALL_EXTRAS, 43 | extras_require={ 44 | "upload": [ 45 | "setuptools-markdown" 46 | ] 47 | }, 48 | entry_points={ 49 | "console_scripts": [ 50 | "psq-travis-container-create=psqtraviscontainer.create:main", 51 | "psq-travis-container-exec=psqtraviscontainer.use:main", 52 | "psq-travis-container-get-root=psqtraviscontainer.rootdir:main" 53 | ] 54 | }, 55 | test_suite="nose.collector", 56 | zip_safe=True, 57 | include_package_data=True) 58 | -------------------------------------------------------------------------------- /test/__init__.py: -------------------------------------------------------------------------------- 1 | # /test/__init__.py 2 | # 3 | # Initializes the tests 4 | # 5 | # See /LICENCE.md for Copyright information 6 | """Initializes the tests.""" 7 | -------------------------------------------------------------------------------- /test/test_acceptance.py: -------------------------------------------------------------------------------- 1 | # /test/test_acceptance.py 2 | # 3 | # Test case for psqtraviscontainer/create.py, creating proot containers 4 | # 5 | # See /LICENCE.md for Copyright information 6 | """Test case for psqtraviscontainer/create.py, creating proot containers.""" 7 | 8 | import os 9 | 10 | import platform 11 | 12 | import stat 13 | 14 | import sys 15 | 16 | import tempfile 17 | 18 | from collections import namedtuple 19 | 20 | from contextlib import contextmanager 21 | 22 | from test.testutil import (download_file_cached, 23 | temporary_environment) 24 | 25 | from nose_parameterized import parameterized 26 | 27 | from psqtraviscontainer import architecture 28 | from psqtraviscontainer import create 29 | from psqtraviscontainer import use 30 | 31 | from psqtraviscontainer.architecture import Alias 32 | 33 | from psqtraviscontainer.constants import have_proot_distribution 34 | from psqtraviscontainer.constants import proot_distribution_dir 35 | 36 | from psqtraviscontainer.distro import available_distributions 37 | 38 | from psqtraviscontainer.linux_container import get_dir_for_distro 39 | 40 | import tempdir 41 | 42 | from testtools import ExpectedException 43 | from testtools import TestCase 44 | 45 | from testtools.matchers import DirExists 46 | from testtools.matchers import FileExists 47 | 48 | 49 | def _convert_to_switch_args(kwargs): 50 | """Convert keyword arguments to command line switches.""" 51 | arguments = [] 52 | 53 | def _get_representation(value): 54 | """Get representation of value as a list.""" 55 | if isinstance(value, list): 56 | return " ".join(value) 57 | else: 58 | return str(value) 59 | 60 | for key, value in kwargs.items(): 61 | if not isinstance(value, bool) or value: 62 | arguments.append("--{0}".format(key)) 63 | if not isinstance(value, bool): 64 | arguments.append(_get_representation(value)) 65 | 66 | return arguments 67 | 68 | 69 | class SafeTempDir(object): # pylint:disable=R0903 70 | """A TempDir that dissolves on __exit__, ignoring PermissionError.""" 71 | 72 | def __init__(self): 73 | """Forward initialization.""" 74 | super(SafeTempDir, self).__init__() 75 | self._temp_dir = tempdir.TempDir() 76 | 77 | def __enter__(self): 78 | """Return internal tempdir.""" 79 | return self._temp_dir.__enter__() 80 | 81 | def __exit__(self, exc_type, value, traceback): 82 | """Call dissolve.""" 83 | del exc_type 84 | del value 85 | del traceback 86 | 87 | self.dissolve() 88 | 89 | def dissolve(self): 90 | """Forward to TempDir dissolve function, ignore PermissionError.""" 91 | try: 92 | self._temp_dir.dissolve() 93 | except (IOError, OSError): # suppress(pointless-except) 94 | # IOError and OSError are fine. The directory will be deleted by 95 | # the user's operating system a little later, there's not much we 96 | # can do about this. 97 | pass 98 | 99 | @property 100 | def name(self): 101 | """Getter for 'name'.""" 102 | return self._temp_dir.name 103 | 104 | 105 | def run_create_container_on_dir(directory, *args, **kwargs): 106 | """Run main setting the container to be at directory.""" 107 | del args 108 | 109 | arguments = [directory] + _convert_to_switch_args(kwargs) 110 | 111 | with cached_downloads(): 112 | create.main(arguments=arguments) 113 | 114 | 115 | def run_create_container(**kwargs): 116 | """Run main() and returns the container in a TempDir. 117 | 118 | This houses the container created. Keyword args are converted 119 | into switch arguments as appropriate. 120 | """ 121 | temp_dir = SafeTempDir() 122 | run_create_container_on_dir(temp_dir.name, **kwargs) 123 | return temp_dir 124 | 125 | 126 | def default_create_container_arguments(local=True): 127 | """Get set of arguments which would create first known distribution.""" 128 | distro_config = list(available_distributions())[0] 129 | arguments = ("distro", "release") 130 | config = {k: v for k, v in distro_config.items() if k in arguments} 131 | 132 | # We must force local to true here so that it gets passed to 133 | # the underlying container 134 | config["local"] = local 135 | return config 136 | 137 | 138 | def run_create_default_container(local=True): 139 | """Run main() and return container for first known distribution.""" 140 | return run_create_container(**(default_create_container_arguments(local))) 141 | 142 | 143 | def run_use_container_on_dir(directory, **kwargs): 144 | """Run main() from psqtraviscontainer/use.py and return status code.""" 145 | cmd = kwargs["cmd"] 146 | del kwargs["cmd"] 147 | 148 | arguments = [directory] + _convert_to_switch_args(kwargs) + ["--"] + cmd 149 | 150 | return use.main(arguments=arguments) 151 | 152 | 153 | def test_case_requiring_platform(system): 154 | """Get a TestCase base class which can only be run on platform.""" 155 | class TestCaseRequiring(TestCase): 156 | """A wrapper around TestCase which only runs tests on platform.""" 157 | 158 | def setUp(self): # suppress(N802) 159 | """Automatically skips tests if not run on platform.""" 160 | super(TestCaseRequiring, self).setUp() 161 | if platform.system() != system: 162 | self.skipTest("""not running on system - {0}""".format(system)) 163 | 164 | return TestCaseRequiring 165 | 166 | 167 | class TestCreateProot(test_case_requiring_platform("Linux")): 168 | """A test case for proot creation basics.""" 169 | 170 | def setUp(self): 171 | """Set up the test case and check that we can run it.""" 172 | if os.environ.get("TRAVIS", False): 173 | self.skipTest("""Cannot run proot on travis-ci""") 174 | 175 | super(TestCreateProot, self).setUp() 176 | 177 | def test_create_proot_distro(self): 178 | """Check that we create a proot distro.""" 179 | with run_create_default_container(local=False) as container: 180 | self.assertThat(have_proot_distribution(container), 181 | FileExists()) 182 | 183 | def test_use_existing_proot_distro(self): 184 | """Check that we re-use an existing proot distro. 185 | 186 | In that case, the timestamp for /.have-proot-distribution and 187 | make sure that across two runs they are actual. If they were, 188 | then no re-downloading took place. 189 | """ 190 | with run_create_default_container(local=False) as container: 191 | path_to_proot_stamp = have_proot_distribution(container) 192 | 193 | first_timestamp = os.stat(path_to_proot_stamp).st_mtime 194 | 195 | config = default_create_container_arguments(local=False) 196 | run_create_container_on_dir(container, **config) 197 | 198 | second_timestamp = os.stat(path_to_proot_stamp).st_mtime 199 | 200 | self.assertEqual(first_timestamp, second_timestamp) 201 | 202 | 203 | @contextmanager 204 | def cached_downloads(): 205 | """Context manager to ensure that download_file is patched to use cache.""" 206 | import six 207 | import psqtraviscontainer.download # suppress(PYC50) 208 | 209 | original_download_file = psqtraviscontainer.download.download_file 210 | psqtraviscontainer.download.download_file = download_file_cached 211 | 212 | original_stdout = sys.stdout 213 | original_stderr = sys.stderr 214 | 215 | if not os.environ.get("_POLYSQUARE_TRAVIS_CONTAINER_TEST_SHOW_OUTPUT", 216 | None): 217 | sys.stdout = six.StringIO() 218 | sys.stderr = six.StringIO() 219 | 220 | try: 221 | yield 222 | finally: 223 | psqtraviscontainer.download.download_file = original_download_file 224 | sys.stdout = original_stdout 225 | sys.stderr = original_stderr 226 | 227 | 228 | def make_container_inspection_test_case(**create_container_kwargs): 229 | """Make a TestCase which persists a container until test are complete. 230 | 231 | create_container_kwargs is stored and applied to creating the container - 232 | this allows us to switch between proot-based and non-proot containers. 233 | """ 234 | class ContainerInspectionTestCase(TestCase): 235 | """TestCase where container persists until all tests have completed. 236 | 237 | No modifications should be made to the container during any 238 | individual test. The order of tests should not be relied upon. 239 | """ 240 | 241 | container_temp_dir = None 242 | 243 | def __init__(self, *args, **kwargs): 244 | """Initialize class.""" 245 | cls = ContainerInspectionTestCase 246 | super(cls, self).__init__(*args, **kwargs) 247 | self.container_dir = None 248 | 249 | def setUp(self): # suppress(N802) 250 | """Set up container dir.""" 251 | super(ContainerInspectionTestCase, self).setUp() 252 | self.container_dir = self.__class__.container_temp_dir.name 253 | 254 | @classmethod 255 | def create_container(cls, **kwargs): 256 | """Overridable method to create a container for this test case.""" 257 | cls.container_temp_dir = run_create_container(**kwargs) 258 | 259 | # Suppress flake8 complaints about uppercase characters in function 260 | # names, these functions are overloaded 261 | @classmethod 262 | def setUpClass(cls): # suppress(N802) 263 | """Set up container for all tests in this test case.""" 264 | # Detect if we're about to create a non-local container on an 265 | # environment that doesn't support it 266 | if (create_container_kwargs.get("local", None) is False and 267 | os.environ.get("TRAVIS", None)): 268 | return 269 | 270 | with temporary_environment(_FORCE_DOWNLOAD_QEMU="True"): 271 | apply_kwargs = create_container_kwargs 272 | config = default_create_container_arguments(**apply_kwargs) 273 | cls.create_container(**config) 274 | 275 | @classmethod 276 | def tearDownClass(cls): # suppress(N802) 277 | """Dissolve container for all tests in this test case.""" 278 | if cls.container_temp_dir: 279 | cls.container_temp_dir.dissolve() 280 | cls.container_temp_dir = None 281 | 282 | return ContainerInspectionTestCase 283 | 284 | QEMU_ARCHITECTURES = [ 285 | "arm", 286 | "i386", 287 | "ppc", 288 | "x86_64" 289 | ] 290 | 291 | 292 | def _format_arch(func, num, params): 293 | """Format docstring for TestProotDistribution parameterized tests.""" 294 | del num 295 | 296 | return func.__doc__.format(arch=params[0][0]) 297 | 298 | 299 | class TestProotDistribution(make_container_inspection_test_case(local=False)): 300 | """Tests to inspect a proot distribution itself.""" 301 | 302 | def setUp(self): # suppress(N802) 303 | """Set up TestProotDistribution.""" 304 | if platform.system() != "Linux": 305 | self.skipTest("""proot is only available on linux""") 306 | 307 | if os.environ.get("TRAVIS", False): 308 | self.skipTest("""Cannot run proot on travis-ci""") 309 | 310 | super(TestProotDistribution, self).setUp() 311 | 312 | def test_has_proot_dir(self): 313 | """Check that we have a proot directory in our distribution.""" 314 | self.assertThat(proot_distribution_dir(self.container_dir), 315 | DirExists()) 316 | 317 | def test_has_proot_executable(self): 318 | """Check that we have a proot executable in our distribution.""" 319 | cont = proot_distribution_dir(self.container_dir) 320 | self.assertThat(os.path.join(cont, "bin/proot"), 321 | FileExists()) 322 | 323 | def test_proot_binary_is_executable(self): 324 | """Check that that the proot binary is executable.""" 325 | cont = proot_distribution_dir(self.container_dir) 326 | proot_binary = os.path.join(cont, "bin/proot") 327 | stat_result = os.stat(proot_binary) 328 | executable_mask = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH 329 | self.assertTrue(stat_result.st_mode & executable_mask != 0) 330 | 331 | @parameterized.expand(QEMU_ARCHITECTURES, testcase_func_doc=_format_arch) 332 | def test_has_qemu_executables(self, arch): 333 | """Check that we have a qemu executable qemu-{arch}.""" 334 | cont = proot_distribution_dir(self.container_dir) 335 | self.assertThat(os.path.join(cont, "bin/qemu-{}".format(arch)), 336 | FileExists()) 337 | 338 | @parameterized.expand(QEMU_ARCHITECTURES, testcase_func_doc=_format_arch) 339 | def test_qemu_binary_is_executable(self, arch): 340 | """Check that qemu binary qemu-{arch} is executable.""" 341 | cont = proot_distribution_dir(self.container_dir) 342 | proot_binary = os.path.join(cont, "bin/qemu-{}".format(arch)) 343 | stat_result = os.stat(proot_binary) 344 | executable_mask = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH 345 | self.assertTrue(stat_result.st_mode & executable_mask != 0) 346 | 347 | 348 | def exec_for_returncode(*argv): 349 | """Execute command for its return code. 350 | 351 | Check that use.main() returns exit code of subprocess. 352 | """ 353 | config = default_create_container_arguments() 354 | with run_create_container(**config) as cont: 355 | use_config = config.copy() 356 | use_config["cmd"] = list(argv) 357 | return run_use_container_on_dir(cont, 358 | **use_config) 359 | 360 | 361 | PLATFORM_PROGRAM_MAPPINGS = { 362 | "Linux": { 363 | "0": ["true"], 364 | "1": ["false"] 365 | }, 366 | "Darwin": { 367 | "0": ["true"], 368 | "1": ["false"] 369 | }, 370 | "Windows": { 371 | "0": ["python", "-c", "import sys;sys.exit(0);"], 372 | "1": ["python", "-c", "import sys;sys.exit(1);"] 373 | } 374 | } 375 | 376 | 377 | class TestExecInContainer(TestCase): 378 | """A test case for executing things inside a container.""" 379 | 380 | def test_exec_fail_no_distro(self): # suppress(no-self-use) 381 | """Check that use.main() fails where there is no distro.""" 382 | with SafeTempDir() as container_dir: 383 | with ExpectedException(RuntimeError): 384 | cmd = PLATFORM_PROGRAM_MAPPINGS[platform.system()]["0"] 385 | run_use_container_on_dir(container_dir, cmd=cmd) 386 | 387 | def test_exec_success_where_distro(self): # suppress(no-self-use) 388 | """Check that use.main() succeeds where there is a distro.""" 389 | # This will test that we can use "use.main()" without needing 390 | # to specify a distro configuration 391 | with run_create_default_container() as container_dir: 392 | cmd = PLATFORM_PROGRAM_MAPPINGS[platform.system()]["0"] 393 | self.assertEqual(run_use_container_on_dir(container_dir, 394 | cmd=cmd), 0) 395 | 396 | def test_exec_return_zero(self): 397 | """Check that use.main() returns true exit code of subprocess.""" 398 | cmd = PLATFORM_PROGRAM_MAPPINGS[platform.system()]["0"] 399 | self.assertEqual(exec_for_returncode(*cmd), 0) 400 | 401 | def test_exec_return_one(self): 402 | """Check that use.main() returns false exit code of subprocess.""" 403 | cmd = PLATFORM_PROGRAM_MAPPINGS[platform.system()]["1"] 404 | self.assertEqual(exec_for_returncode(*cmd), 1) 405 | 406 | ARCHITECTURE_LIBDIR_MAPPINGS = { 407 | "armhf": "arm-linux-gnueabihf", 408 | "i386": "i386-linux-gnu", 409 | "amd64": "x86_64-linux-gnu", 410 | "arm64": "arm64-linux-gnu", 411 | "powerpc": "powerpc-linux-gnu", 412 | "ppc64el": "ppc64el-linux-gnu" 413 | } 414 | 415 | 416 | class InstallationConfig(object): # pylint:disable=R0903 417 | """Manages configuration files.""" 418 | 419 | def __init__(self, packages, repos): 420 | """Create temporary files for packages and repos.""" 421 | packages_fd, self.packages_path = tempfile.mkstemp() 422 | repos_fd, self.repos_path = tempfile.mkstemp() 423 | 424 | packages_file = os.fdopen(packages_fd, "a") 425 | repos_file = os.fdopen(repos_fd, "a") 426 | 427 | for package in packages: 428 | packages_file.write("{0}\n".format(package)) 429 | 430 | for repository in repos: 431 | repos_file.write("{0}\n".format(repository)) 432 | 433 | packages_file.close() 434 | repos_file.close() 435 | 436 | def __enter__(self): 437 | """Use as context manager.""" 438 | return self 439 | 440 | def __exit__(self, exc_type, value, traceback): 441 | """Destroy temporary files.""" 442 | del exc_type 443 | del value 444 | del traceback 445 | 446 | os.remove(self.packages_path) 447 | os.remove(self.repos_path) 448 | 449 | 450 | def _create_distro_test(test_name, # pylint:disable=R0913 451 | config, 452 | repos, 453 | packages, 454 | test_files, 455 | **kwargs): 456 | """Create a TemplateDistroTest class.""" 457 | class TemplateDistroTest(make_container_inspection_test_case()): 458 | """Template for checking a distro proot.""" 459 | 460 | def __init__(self, *args, **kwargs): 461 | """Initialize members used by this class.""" 462 | cls = TemplateDistroTest 463 | super(cls, self).__init__(*args, **kwargs) 464 | self.path_to_distro_root = None 465 | 466 | def setUp(self): # suppress(N802) 467 | """Set up path to distro root.""" 468 | super(TemplateDistroTest, self).setUp() 469 | 470 | @classmethod 471 | def setUpClass(cls): # suppress(N802) 472 | """Create a container for all uses of this TemplateDistroTest.""" 473 | with InstallationConfig(packages, repos) as command_config: 474 | keys = ("distro", "release") 475 | kwargs.update({k: v for k, v in config.items() if k in keys}) 476 | 477 | cls.create_container(repos=command_config.repos_path, 478 | packages=command_config.packages_path, 479 | **kwargs) 480 | 481 | def test_distro_folder_exists(self): 482 | """Check that distro folder exists for .""" 483 | if platform.system() == "Linux": 484 | root = get_dir_for_distro(self.container_dir, 485 | config) 486 | self.assertThat(os.path.join(self.container_dir, root), 487 | DirExists()) 488 | elif platform.system() == "Darwin": 489 | self.assertThat(os.path.join(self.container_dir, "bin"), 490 | DirExists()) 491 | 492 | def test_has_package_installed(self): 493 | """Check that our testing package got installed. 494 | 495 | If it did get installed, then it means that the repository 496 | was successfully added and the package was successfully installed 497 | using the native tool. That means that the proot "works". 498 | """ 499 | format_kwargs = dict() 500 | 501 | if kwargs.get("release", None) == "trusty": 502 | self.skipTest("""Trusty images are currently unavailable""") 503 | return 504 | 505 | if platform.system() == "Linux": 506 | root = get_dir_for_distro(self.container_dir, 507 | config) 508 | distro_arch = architecture.Alias.debian(kwargs["arch"]) 509 | archlib = ARCHITECTURE_LIBDIR_MAPPINGS[distro_arch] 510 | format_kwargs["archlib"] = archlib 511 | else: 512 | root = self.container_dir 513 | 514 | # Match against a list of files. If none of the results are None, 515 | # then throw a list of mismatches. 516 | match_results = [] 517 | for filename in test_files: 518 | path_to_file = os.path.join(root, 519 | filename.format(**format_kwargs)) 520 | result = FileExists().match(path_to_file) 521 | if result: 522 | match_results.append(result) 523 | 524 | if len(match_results) == len(test_files): 525 | raise Exception(repr(match_results)) 526 | 527 | TemplateDistroTest.__name__ = test_name 528 | return TemplateDistroTest 529 | 530 | _DistroPackage = namedtuple("_DistroPackage", "package files repo") 531 | _DISTRO_INFO = { 532 | "Ubuntu": _DistroPackage(package="libaacs0", 533 | repo=["{ubuntu} {release} universe"], 534 | files=["usr/lib/{archlib}/libaacs.so.0"]), 535 | "Debian": _DistroPackage(package="libaio1", 536 | repo=[], 537 | files=["lib/libaio.so.1.0.1", 538 | "lib/{archlib}/libaio.so.1.0.1"]), 539 | "Fedora": _DistroPackage(package="libaio", 540 | repo=[], 541 | files=["lib/libaio.so.1.0.1", 542 | "lib64/libaio.so.1.0.1"]), 543 | "OSX": _DistroPackage(package="xz", 544 | repo=[], 545 | files=["bin/xz"]), 546 | "Windows": _DistroPackage(package="cmake.portable", 547 | repo=[], 548 | files=["lib/cmake.portable.3.7.0/" 549 | "tools/cmake-3.7.0-win32-x86/bin/" 550 | "cmake.exe"]) 551 | } 552 | 553 | 554 | def get_distribution_tests(): 555 | """Fetch distribution tests as dictionary.""" 556 | tests = {} 557 | 558 | for config in available_distributions(): 559 | config = config.copy() 560 | name_array = bytearray() 561 | for key in sorted(list(config.keys())): 562 | if key in ("info", "pkgsys", "url"): 563 | continue 564 | 565 | name_array += bytes(key[0].upper().encode() + 566 | key[1:].encode() + 567 | config[key][0].upper().encode() + 568 | config[key][1:].encode()) 569 | name = "Test{0}".format(name_array.decode("ascii")) 570 | 571 | distro = config["distro"] 572 | repositories_to_add = _DISTRO_INFO[distro].repo 573 | packages_to_install = [_DISTRO_INFO[distro].package] 574 | files_to_test_for = _DISTRO_INFO[distro].files 575 | kwargs = dict() 576 | 577 | try: 578 | kwargs["arch"] = Alias.universal(config["arch"]) 579 | except KeyError: # suppress(pointless-except) 580 | pass 581 | 582 | # Set the --local switch if the installation type is local. This is 583 | # because we pass the keyword arguments to the main function of 584 | # psq-travis-container-create 585 | kwargs["local"] = (config.get("installation", None) == "local") 586 | tests[name] = _create_distro_test(name, 587 | config, 588 | repositories_to_add, 589 | packages_to_install, 590 | files_to_test_for, 591 | **kwargs) 592 | 593 | return tests 594 | 595 | for _name, _test in get_distribution_tests().items(): 596 | exec("{0} = _test".format(_name)) # pylint:disable=W0122 597 | del _test 598 | -------------------------------------------------------------------------------- /test/test_unit.py: -------------------------------------------------------------------------------- 1 | # /test/test_unit.py 2 | # 3 | # Unit tests for various utilities. 4 | # 5 | # See /LICENCE.md for Copyright information 6 | """Unit tests for various utilities.""" 7 | 8 | import os 9 | 10 | import shutil 11 | 12 | from psqtraviscontainer import architecture 13 | from psqtraviscontainer import directory 14 | from psqtraviscontainer import distro 15 | 16 | from testtools import ExpectedException 17 | from testtools import TestCase 18 | 19 | from testtools.matchers import AllMatch 20 | from testtools.matchers import DirExists 21 | from testtools.matchers import MatchesPredicate 22 | 23 | 24 | class TestDirectoryNavigation(TestCase): 25 | """Tests for psqtraviscontainer/directory.py.""" 26 | 27 | def test_enter_create_dir(self): 28 | """Check that we create a dir when entering a non-existent one.""" 29 | does_not_exist = os.path.join(os.getcwd(), "does_not_exist") 30 | self.addCleanup(lambda: shutil.rmtree(does_not_exist)) 31 | with directory.Navigation(does_not_exist) as entered: 32 | self.assertThat(entered, DirExists()) 33 | 34 | def test_enter_exist_dir(self): 35 | """Check that we can enter an existing dir.""" 36 | existing_dir = os.path.join(os.getcwd(), "existing") 37 | os.makedirs(existing_dir) 38 | self.addCleanup(lambda: shutil.rmtree(existing_dir)) 39 | with directory.Navigation(existing_dir) as entered: 40 | self.assertThat(entered, DirExists()) 41 | 42 | 43 | class TestArchitecture(TestCase): # suppress(R0903) 44 | """Tests for psqtraviscontainer/architecture.py.""" 45 | 46 | def test_unknown_architecture(self): 47 | """Check that creating a non-special architecture returns metadata.""" 48 | check_methods = [ 49 | architecture.Alias.universal, 50 | architecture.Alias.qemu, 51 | architecture.Alias.debian 52 | ] 53 | 54 | def function_returns_input(function): 55 | """Return true if function returns input.""" 56 | return function("input") == "input" 57 | 58 | self.assertThat(check_methods, 59 | AllMatch(MatchesPredicate(function_returns_input, 60 | "% did not return same"))) 61 | 62 | 63 | class TestDistroLookup(TestCase): # suppress(R0903) 64 | """Tests for looking up the distro.""" 65 | 66 | def test_error_lookup_bad_distro(self): # suppress(no-self-use) 67 | """Check that looking up a non-existent distro throws.""" 68 | with ExpectedException(RuntimeError): 69 | distro.lookup({"distro": "noexist"}) 70 | -------------------------------------------------------------------------------- /test/testutil.py: -------------------------------------------------------------------------------- 1 | # /test/testutil.py 2 | # 3 | # Monkey-patch functions for tests to cache downloaded files. This helps 4 | # to speed up test execution. 5 | # 6 | # See /LICENCE.md for Copyright information 7 | """Monkey-patch functions for tests to cache downloaded files.""" 8 | 9 | import errno 10 | 11 | import hashlib 12 | 13 | import os 14 | 15 | import shutil 16 | 17 | import sys 18 | 19 | from contextlib import contextmanager 20 | 21 | from clint.textui import colored 22 | 23 | from psqtraviscontainer.download import download_file as download_file_original 24 | 25 | 26 | def download_file_cached(url, filename=None): 27 | """Check if we've got a cached version of url, otherwise download it.""" 28 | cache_dir = os.environ.get("_POLYSQUARE_TRAVIS_CONTAINER_TEST_CACHE_DIR", 29 | None) 30 | if cache_dir: 31 | try: 32 | os.makedirs(cache_dir) 33 | except OSError as error: 34 | if error.errno != errno.EEXIST: # suppress(PYC90) 35 | raise error 36 | 37 | dest_filename = os.path.realpath(filename or os.path.basename(url)) 38 | hashed = os.path.join(cache_dir, 39 | hashlib.md5(url.encode("utf-8")).hexdigest()) 40 | 41 | if os.path.exists(hashed): 42 | msg = """Downloading {0} [found in cache]\n""".format(url) 43 | sys.stdout.write(str(colored.blue(msg, bold=True))) 44 | shutil.copyfile(hashed, dest_filename) 45 | else: 46 | # Grab the url and then store the downloaded file in the cache. 47 | # We trust download_file_original to give us the 48 | # right dest_filename, hence the reason why we overwrite it here. 49 | dest_filename = download_file_original(url, filename) 50 | shutil.copyfile(dest_filename, hashed) 51 | else: 52 | dest_filename = download_file_original(url, filename) 53 | 54 | return dest_filename 55 | 56 | 57 | @contextmanager 58 | def temporary_environment(**kwargs): 59 | """A context with os.environ set to a temporary value.""" 60 | environ_copy = os.environ.copy() 61 | os.environ.update(kwargs) 62 | try: 63 | yield 64 | finally: 65 | os.environ = environ_copy 66 | --------------------------------------------------------------------------------