├── .github └── workflows │ ├── ci.yml │ └── pypi.yml ├── .gitignore ├── .readthedocs.yaml ├── LICENSE ├── README.rst ├── docker0s ├── __init__.py ├── __main__.py ├── app │ ├── __init__.py │ ├── app.py │ ├── base.py │ ├── names.py │ └── repo.py ├── commands.py ├── config.py ├── env.py ├── exceptions.py ├── git.py ├── host.py ├── manifest.py ├── manifest_object.py ├── path.py └── settings.py ├── docs ├── Makefile ├── changelog.rst ├── conf.py ├── index.rst ├── installation.rst ├── requirements.in ├── requirements.txt ├── usage.rst └── writing │ ├── apps.rst │ ├── compose.rst │ ├── host.rst │ ├── index.rst │ ├── python.rst │ └── yaml.rst ├── requirements.in ├── requirements.txt ├── setup.cfg ├── setup.py └── tests ├── __init__.py ├── app ├── __init__.py ├── test_app.py ├── test_base_contexts.py ├── test_base_def.py ├── test_base_ops.py ├── test_names.py └── test_repo.py ├── conftest.py ├── constants.py ├── data ├── docker-compose.yml ├── extends_base_first.py ├── extends_base_second.py ├── file.txt ├── first.env ├── manifest.py ├── manifest.yml └── second.env ├── path ├── __init__.py ├── test_extends_path.py └── test_other.py ├── requirements.in ├── requirements.txt ├── test_commands.py ├── test_conftest.py ├── test_env.py ├── test_git.py ├── test_host.py └── test_manifest.py /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: 4 | push: 5 | pull_request: 6 | 7 | jobs: 8 | test: 9 | name: py-${{ matrix.python }} 10 | runs-on: ubuntu-latest 11 | strategy: 12 | matrix: 13 | include: 14 | - python: "3.10" 15 | 16 | steps: 17 | - uses: actions/checkout@v2 18 | - name: Set up Python ${{ matrix.python }} 19 | uses: actions/setup-python@v2 20 | with: 21 | python-version: ${{ matrix.python }} 22 | - name: Install dependencies 23 | run: | 24 | python -m pip install --upgrade pip 25 | pip install -r tests/requirements.txt 26 | - name: Set Python path 27 | run: | 28 | echo "PYTHONPATH=." >> $GITHUB_ENV 29 | - name: Test 30 | run: | 31 | pytest 32 | - name: Upload coverage to Codecov 33 | uses: codecov/codecov-action@v1 34 | with: 35 | name: ${{ matrix.python }} 36 | -------------------------------------------------------------------------------- /.github/workflows/pypi.yml: -------------------------------------------------------------------------------- 1 | name: PyPI 2 | 3 | on: 4 | push: 5 | tags: 6 | - 'v*' 7 | 8 | jobs: 9 | publish: 10 | name: Build and publish to PyPI 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v2 14 | - name: Set up Python 3.9 15 | uses: actions/setup-python@v2 16 | with: 17 | python-version: 3.9 18 | - name: Install dependencies 19 | run: | 20 | python -m pip install --upgrade pip 21 | pip install setuptools wheel 22 | - name: Build a binary wheel and a source tarball 23 | run: | 24 | python setup.py sdist bdist_wheel 25 | - name: Publish to PyPI 26 | if: startsWith(github.ref, 'refs/tags') 27 | uses: pypa/gh-action-pypi-publish@release/v1 28 | with: 29 | password: ${{ secrets.pypi_password }} 30 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | *.egg-info 3 | *.log 4 | 5 | # Docs 6 | docs/_* 7 | build 8 | dist 9 | 10 | # Testing 11 | .tox 12 | .coverage* 13 | htmlcov 14 | .*_cache 15 | .vagrant 16 | Vagrantfile -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yaml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | # Set the version of Python and other tools you might need 9 | build: 10 | os: ubuntu-22.04 11 | tools: 12 | python: "3.11" 13 | # You can also specify other tool versions: 14 | # nodejs: "16" 15 | # rust: "1.55" 16 | # golang: "1.17" 17 | 18 | # Build documentation in the docs/ directory with Sphinx 19 | sphinx: 20 | configuration: docs/conf.py 21 | 22 | # If using Sphinx, optionally build your docs in additional formats such as PDF 23 | # formats: 24 | # - pdf 25 | 26 | # Optionally declare the Python requirements required to build your docs 27 | python: 28 | install: 29 | - requirements: docs/requirements.txt 30 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2022, Richard Terry 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ======== 2 | docker0s 3 | ======== 4 | 5 | Docker0s uses docker-compose to manage multiple containerised apps on a single host. 6 | 7 | .. image:: https://img.shields.io/pypi/v/docker0s.svg 8 | :target: https://pypi.org/project/docker0s/ 9 | :alt: PyPI 10 | 11 | .. image:: https://readthedocs.org/projects/docker0s/badge/?version=latest 12 | :target: https://docker0s.readthedocs.io/en/latest/?badge=latest 13 | :alt: Documentation Status 14 | 15 | .. image:: https://github.com/radiac/docker0s/actions/workflows/ci.yml/badge.svg 16 | :target: https://github.com/radiac/docker0s/actions/workflows/ci.yml 17 | :alt: Tests 18 | 19 | .. image:: https://codecov.io/gh/radiac/docker0s/branch/main/graph/badge.svg?token=BCNM45T6GI 20 | :target: https://codecov.io/gh/radiac/docker0s 21 | :alt: Test coverage 22 | 23 | Bring together standard docker-compose files across multiple projects in a single simple 24 | manifest file, written in either YAML or Python with pre- and post-operation hooks, to 25 | deploy to a single host. 26 | 27 | It's designed for small self-hosted low-maintenance deployments which don't need the 28 | complexity of Kubernetes - think k8s with zero features and a much simpler config 29 | syntax, with simple app manifests instead of helm charts. 30 | 31 | There is a collection of ready-to-use app manifests at `docker0s-manifests`_, with 32 | examples for how to deploy them to your host. 33 | 34 | .. _docker0s-manifests: https://github.com/radiac/docker0s-manifests 35 | 36 | 37 | * Project site: https://radiac.net/projects/docker0s/ 38 | * Documentation: https://docker0s.readthedocs.io/ 39 | * Source code: https://github.com/radiac/docker0s 40 | 41 | 42 | 43 | Quickstart 44 | ========== 45 | 46 | Install:: 47 | 48 | pip install docker0s 49 | 50 | 51 | Put together a manifest in YAML as ``d0s-manifest.yml``: 52 | 53 | .. code-block:: yaml 54 | 55 | apps: 56 | traefik: 57 | extends: git+https://github.com/radiac/docker0s-manifests.git#traefik 58 | env_file: traefik.env 59 | smtp: 60 | compose: smtp.yml 61 | website: 62 | type: RepoApp 63 | extends: "git+ssh://git@github.com:radiac/example.com.git@main" 64 | env: 65 | DOMAIN: example.radiac.net 66 | host: 67 | name: example.radiac.net 68 | 69 | 70 | or in Python as ``d0s-manifest.py``, using subclassing to perform actions before and 71 | after operations and add custom functionality: 72 | 73 | .. code-block:: python 74 | 75 | from docker0s import RepoApp 76 | 77 | class Website(RepoApp): 78 | # Clone a repo to the host and look for docker-compose.yml in there 79 | extends = "git+ssh://git@github.com:radiac/example.com.git@main" 80 | env = { 81 | "DOMAIN": "example.radiac.net" 82 | } 83 | 84 | # Subclass operation methods to add your own logic 85 | def deploy(self): 86 | # Perform action before deployment, eg clean up any previous deployment 87 | super().deploy() 88 | # Perform action after deployment, eg push additional resources 89 | 90 | class Vagrant(Host): 91 | name = "vagrant" 92 | 93 | See `writing manifests`_ for a full reference 94 | 95 | .. _writing manifests: https://docker0s.readthedocs.io/en/latest/writing/index.html 96 | 97 | 98 | Then run a command, eg:: 99 | 100 | d0s deploy 101 | d0s up 102 | d0s restart website.django 103 | d0s exec website.django /bin/bash 104 | d0s cmd website app_command arguments 105 | 106 | See `commands`_ for a full command reference 107 | 108 | .. _commands: https://docker0s.readthedocs.io/en/latest/usage.html 109 | -------------------------------------------------------------------------------- /docker0s/__init__.py: -------------------------------------------------------------------------------- 1 | from .app import App, RepoApp # noqa 2 | from .host import Host # noqa 3 | 4 | 5 | __version__ = "2.0.0" 6 | -------------------------------------------------------------------------------- /docker0s/__main__.py: -------------------------------------------------------------------------------- 1 | from .commands import invoke 2 | 3 | 4 | invoke() 5 | -------------------------------------------------------------------------------- /docker0s/app/__init__.py: -------------------------------------------------------------------------------- 1 | from .app import App # noqa 2 | from .base import BaseApp, abstract_app_registry # noqa 3 | from .repo import RepoApp # noqa 4 | -------------------------------------------------------------------------------- /docker0s/app/app.py: -------------------------------------------------------------------------------- 1 | from .base import BaseApp 2 | 3 | 4 | class App(BaseApp, abstract=True): 5 | """ 6 | A self-contained docker-compose file which deploys containers without additional 7 | resources 8 | """ 9 | 10 | #: Assets to upload next to the docker-compose.yml 11 | assets: str | list[str] | None = None 12 | 13 | def deploy(self): 14 | """ 15 | Deploy the docker-compose and assets for this app 16 | """ 17 | super().deploy() 18 | self.push_assets_to_host() 19 | 20 | def push_assets_to_host(self): 21 | cls_assets = self.collect_attr("assets") 22 | files: str | list[str] 23 | for mro_cls, files in cls_assets: 24 | if not files: 25 | continue 26 | 27 | if isinstance(files, str): 28 | files = [files] 29 | 30 | for file in files: 31 | asset_path = mro_cls._dir / file 32 | remote_path = self.remote_assets / file 33 | self.host.push(asset_path, remote_path) 34 | -------------------------------------------------------------------------------- /docker0s/app/base.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import inspect 4 | from pathlib import Path, PosixPath 5 | from typing import Any, Callable 6 | 7 | from jinja2 import Environment, FileSystemLoader, select_autoescape 8 | 9 | from ..env import dump_env, read_env 10 | from ..exceptions import DefinitionError 11 | from ..host import Host 12 | from ..manifest_object import ManifestObject 13 | from ..path import ExtendsPath 14 | from ..settings import DIR_ASSETS, FILENAME_COMPOSE, FILENAME_ENV 15 | from .names import normalise_name, pascal_to_snake 16 | 17 | 18 | # Abstract app registry for type lookups 19 | abstract_app_registry: dict[str, type[BaseApp]] = {} 20 | 21 | 22 | class AppsTemplateContext: 23 | """ 24 | Lazy context getter for use in template context `apps` 25 | """ 26 | 27 | apps: dict[str, BaseApp] 28 | 29 | def __init__(self, apps: dict[str, BaseApp]): 30 | self.apps = apps 31 | 32 | def __getitem__(self, name: str) -> dict[str, Any]: 33 | return self.get(name) 34 | 35 | def __getattr__(self, name: str) -> dict[str, Any]: 36 | return self.get(name) 37 | 38 | def get(self, name: str) -> dict[str, Any]: 39 | normalised = normalise_name(name) 40 | if normalised not in self.apps: 41 | raise DefinitionError(f"Unknown app {name} ({normalised})") 42 | return self.apps[normalised].get_compose_context() 43 | 44 | def __contains__(self, name: str) -> bool: 45 | normalised = normalise_name(name) 46 | return normalised in self.apps 47 | 48 | 49 | class EnvTemplateContext: 50 | """ 51 | Lazy context getter for use in template context `env` 52 | """ 53 | 54 | app: BaseApp 55 | 56 | def __init__(self, app: BaseApp): 57 | self.app = app 58 | 59 | def __getitem__(self, name: str) -> Any: 60 | return self.get(name) 61 | 62 | def __getattr__(self, name: str) -> Any: 63 | return self.get(name) 64 | 65 | def get(self, name: str) -> Any: 66 | env_data = self.app.get_host_env_data() 67 | return env_data[name] 68 | 69 | def __contains__(self, name: str) -> bool: 70 | env_data = self.app.get_host_env_data() 71 | return name in env_data 72 | 73 | 74 | class BaseApp(ManifestObject, abstract=True): 75 | _file: Path # Path to this manifest file 76 | _dir: Path # Path to this manifest file 77 | 78 | #: Path to a base docker0s manifest for this app. 79 | #: 80 | #: If the path ends ``::`` it will look for an app definition with that name, 81 | #: eg ``bases.py::Store``. Otherwise it will look for an app with the same 82 | #: name as this. 83 | #: 84 | #: The base manifest must not define a host. 85 | #: 86 | #: This referenced manifest will will act as the base manifest. That in turn can 87 | #: reference an additional base manifest. 88 | #: 89 | #: Default: ``d0s-manifest.py``, then ``d0s-manifest.yml`` 90 | extends: str | None = None 91 | _extends_path: ExtendsPath | None = None # Resolved path 92 | 93 | #: Path to the app's docker compose file. This will be pushed to the host. 94 | #: 95 | #: This can be a ``.yml`` file, or a ``.jinja2`` template. 96 | #: 97 | #: For access see ``.get_compose_path`` 98 | #: 99 | #: Default: ``docker-compose.jinja2``, then ``docker-compose.yml`` 100 | compose: str | None = None 101 | 102 | COMPOSE_DEFAULTS = [ 103 | "docker-compose.j2", 104 | "docker-compose.jinja2", 105 | "docker-compose.yml", 106 | "docker-compose.yaml", 107 | ] 108 | 109 | #: Context for docker-compose Jinja2 template rendering 110 | #: 111 | #: To add instance data, override ``.get_compose_context`` 112 | compose_context: dict[str, Any] | None = None 113 | 114 | #: File containing environment variables for docker-compose 115 | #: 116 | #: Path to an env file, or a list of paths 117 | #: 118 | #: For access see ``.get_env_data`` 119 | env_file: str | list[str] | None = None 120 | 121 | #: Environment variables for docker-compose 122 | #: 123 | #: For access see ``.get_env_data`` 124 | env: dict[str, (str | int)] | None = None 125 | 126 | #: If True, COMPOSE_PROJECT_NAME will be automatically added to the env if not 127 | #: set by ``env_file`` or ``env`` 128 | set_project_name: bool = True 129 | 130 | # Host this app instance is bound to on initialisation 131 | host: Host 132 | 133 | # All app instances defined in the manifest which defines this app, including self 134 | manifest_apps: dict[str, BaseApp] 135 | 136 | def __init_subclass__( 137 | cls, 138 | abstract: bool = False, 139 | name: str | None = None, 140 | path: Path | None = None, 141 | **kwargs, 142 | ): 143 | """ 144 | Set abstract flag and register abstract classes with the registry 145 | """ 146 | super().__init_subclass__(abstract=abstract, name=name, **kwargs) 147 | 148 | if abstract: 149 | global abstract_app_registry # not required, for clarity 150 | if cls.__name__ in abstract_app_registry: 151 | raise DefinitionError( 152 | f"Abstract class names must be unique, {cls.__name__} is duplicate" 153 | ) 154 | abstract_app_registry[cls.__name__] = cls 155 | return 156 | 157 | # Detect manifest path 158 | if path: 159 | cls._file = path 160 | else: 161 | cls_module = inspect.getmodule(cls) 162 | if cls_module is None or cls_module.__file__ is None: 163 | # This shouldn't happen 164 | raise ValueError(f"Cannot find module path for app {cls}") 165 | cls._file = Path(cls_module.__file__) 166 | 167 | cls._dir = cls._file.parent 168 | 169 | def __init__(self, host: Host): 170 | self.host = host 171 | self.other_apps: dict[str, BaseApp] = {} 172 | 173 | def __str__(self): 174 | return self.get_name() 175 | 176 | @classmethod 177 | def get_name(cls) -> str: 178 | """ 179 | The docker0s name of this app in PascalCase 180 | """ 181 | return cls.__name__ 182 | 183 | @classmethod 184 | def get_docker_name(cls) -> str: 185 | """ 186 | The docker container name of this app in snake_case 187 | """ 188 | return pascal_to_snake(cls.get_name()) 189 | 190 | @classmethod 191 | def apply_base_manifest(cls, history: list[Path] | None = None): 192 | """ 193 | If a base manifest can be found by _get_base_manifest, load it and look for a 194 | BaseApp subclass with the same name as this. If found, add it to the base 195 | classes for this class. 196 | """ 197 | # Avoid import loop 198 | from ..manifest import Manifest 199 | 200 | if not cls.extends: 201 | return 202 | 203 | if not cls._extends_path: 204 | cls._extends_path = ExtendsPath(cls.extends, cls._dir) 205 | 206 | path = cls._extends_path.get_manifest() 207 | 208 | base_manifest = Manifest.load(path, history) 209 | if base_manifest.host is not None: 210 | raise DefinitionError("A base manifest cannot define a host") 211 | 212 | base_name = cls._extends_path.name or cls.get_name() 213 | base_app = base_manifest.get_app(base_name) 214 | if base_app is None: 215 | raise DefinitionError( 216 | f"Base manifest {path} does not define an app called {base_name}" 217 | ) 218 | 219 | if not issubclass(cls, base_app): 220 | cls.__bases__ = (base_app,) + cls.__bases__ 221 | 222 | @classmethod 223 | def find_relative_file( 224 | cls, 225 | attr: str, 226 | defaults: list[str] | None = None, 227 | ) -> tuple[Path, str]: 228 | """ 229 | Return first file found, or raises DefinitionError 230 | """ 231 | cls_values: list[tuple[type[BaseApp], Any]] = cls.collect_attr(attr) 232 | for mro_cls, val in cls_values: 233 | if val: 234 | # Value was specified, file should exist 235 | path = mro_cls._dir / val 236 | if not path.exists(): 237 | raise DefinitionError( 238 | f'App setting {cls.get_name()}.{attr} specified as "{val}"' 239 | f' but "{path}" does not exist' 240 | ) 241 | break 242 | return mro_cls._dir, val 243 | 244 | elif defaults: 245 | # Look for defaults 246 | for filename in defaults: 247 | if (mro_cls._dir / filename).exists(): 248 | return mro_cls._dir, filename 249 | 250 | raise DefinitionError( 251 | f"App setting {cls.get_name()}.{attr} not specified, no default found" 252 | ) 253 | 254 | @classmethod 255 | def find_file( 256 | cls, 257 | attr: str, 258 | defaults: list[str] | None = None, 259 | ) -> Path: 260 | path, filename = cls.find_relative_file(attr, defaults) 261 | return path / filename 262 | 263 | @classmethod 264 | def collect_attr(cls, attr: str) -> list[tuple[type[BaseApp], Any]]: 265 | """ 266 | Collect attributes directly from the class and its bases, bypassing inheritance 267 | and returning a list of ``(cls, *values)`` pairs. 268 | 269 | Abstract classes are ignored. 270 | 271 | This is primarily used where we need context of the value definition - ie a path 272 | relative to the manifest where the path is set, or for env var resolution. 273 | """ 274 | # TODO: This is a bit of a confusing approach. Because ``extends`` is adding to 275 | # the base classes to make it easy for us to inherit custom values and functions 276 | # from parent apps, we need this odd way to collect values bypassing 277 | # inheritance. This is unexpected magic. 278 | # 279 | # Two better options for a future refactor: 280 | # 281 | # 1. Resolve relative paths as soon as the class is defined. This makes sense - 282 | # at end of __init_subclass__ we could go through and resolve all the paths 283 | # and envs, then we can have normal inheritance working as expected. 284 | # 285 | # The disadvantage is we'll need to resolve everything every time we load a 286 | # manifest - and that probably includes resolving env vars. We're generally 287 | # not worried about speed in docker0s, but this could easily balloon to be a 288 | # problem. 289 | # 290 | # 2. Don't inject extended classes as base classes. This may be the better 291 | # option - we can keep the JIT evaluation of things we may no need, and base 292 | # class creation and injection is verging on too magical for this sort of 293 | # project. 294 | # 295 | # Instead we could just create a list of app classes that the host manifest 296 | # extends, and iterate through them. This fn would still exist, but it would 297 | # be looking at a ``cls._extend_cls_list`` list instead of ``cls.mro()``. 298 | # 299 | # The disadvantage is we'll lose the ability to reference fns in parent 300 | # classes by inheritance - most importantly, deployment hooks. This felt like 301 | # it would be a big deal early on, but it seems to be an edge case in the 302 | # real world - very few projects have needed custom deployment steps, and I 303 | # suspect those could all be handled by actual importing and subclassing. 304 | results: list[tuple[type[BaseApp], Any]] = [] 305 | for mro_cls in cls.mro(): 306 | if not issubclass(mro_cls, BaseApp) or mro_cls.abstract: 307 | # We're only interested in concrete App classes 308 | continue 309 | results.append((mro_cls, mro_cls.__dict__.get(attr, None))) 310 | return results 311 | 312 | @classmethod 313 | def get_compose_path(cls) -> Path: 314 | return cls.find_file( 315 | attr="compose", 316 | defaults=cls.COMPOSE_DEFAULTS, 317 | ) 318 | 319 | @classmethod 320 | def get_env_data(cls) -> dict[str, str | int | None]: 321 | """ 322 | Load env files in order (for key conflicts last wins), and then merge in the env 323 | dict, if defined 324 | """ 325 | attrs_env: list[tuple[type[BaseApp], Any]] = cls.collect_attr("env") 326 | attrs_env_file: list[tuple[type[BaseApp], Any]] = cls.collect_attr("env_file") 327 | attrs = reversed(list(zip(attrs_env, attrs_env_file))) 328 | 329 | env: dict[str, str | int | None] = {} 330 | env_dict: dict 331 | env_file: list | str 332 | for (mro_cls, env_dict), (_, env_file) in attrs: 333 | # Build list of files 334 | raw_env_files: list[str] = [] 335 | if env_file is not None: 336 | if isinstance(env_file, (tuple, list)): 337 | raw_env_files = env_file 338 | else: 339 | raw_env_files = [env_file] 340 | env_files: list[Path] = [ 341 | mro_cls._dir / env_file for env_file in raw_env_files 342 | ] 343 | 344 | # Prepare dict 345 | if env_dict is None: 346 | env_dict = {} 347 | 348 | env.update(read_env(*env_files, **env_dict)) 349 | 350 | if cls.set_project_name and "COMPOSE_PROJECT_NAME" not in env: 351 | env["COMPOSE_PROJECT_NAME"] = cls.get_docker_name() 352 | return env 353 | 354 | @staticmethod 355 | def command(fn): 356 | fn.is_command = True 357 | return fn 358 | 359 | def get_command(self, name: str) -> Callable: 360 | """ 361 | Return the specified command 362 | """ 363 | attr = getattr(self, name) 364 | if callable(attr) and hasattr(attr, "is_command"): 365 | return attr 366 | raise ValueError(f"Command {name} not found") 367 | 368 | @property 369 | def remote_path(self) -> PosixPath: 370 | """ 371 | The remote path for this app 372 | """ 373 | return self.host.path(self.get_docker_name()) 374 | 375 | @property 376 | def remote_compose(self) -> PosixPath: 377 | """ 378 | A PosixPath to the remote compose file 379 | """ 380 | return self.remote_path / FILENAME_COMPOSE 381 | 382 | @property 383 | def remote_env(self) -> PosixPath: 384 | """ 385 | A PosixPath for the remote env file 386 | """ 387 | return self.remote_path / FILENAME_ENV 388 | 389 | @property 390 | def remote_assets(self) -> PosixPath: 391 | """ 392 | A PosixPath for the remote assets dir 393 | 394 | Assets are resources pushed to the server as part of the docker0s deployment - 395 | config files, scripts, media etc 396 | """ 397 | return self.remote_path / DIR_ASSETS 398 | 399 | @property 400 | def remote_store(self) -> PosixPath: 401 | """ 402 | A PosixPath for the remote store dir 403 | 404 | The store is for files created by the containers - logs, databases, uploads etc 405 | """ 406 | return self.remote_path / "store" 407 | 408 | def get_compose_context(self, **kwargs: Any) -> dict[str, Any]: 409 | """ 410 | Build the template context for the compose template 411 | """ 412 | context = { 413 | "host": self.host, 414 | "env": EnvTemplateContext(self), 415 | "apps": AppsTemplateContext(self.manifest_apps), 416 | # Reserved for future expansion 417 | "docker0s": NotImplemented, 418 | "globals": NotImplemented, 419 | **kwargs, 420 | } 421 | 422 | if self.compose_context is not None: 423 | context.update(self.compose_context) 424 | 425 | return context 426 | 427 | def get_compose_content(self, context: dict[str, Any] | None = None) -> str: 428 | """ 429 | Return the content for the docker-compose file 430 | 431 | This will either be rendered from ``compose_template`` if it exists, otherwise 432 | it will be read from ``compose`` 433 | """ 434 | compose_path = self.get_compose_path() 435 | filetype = compose_path.suffix.lower() 436 | if filetype == ".yml": 437 | return compose_path.read_text() 438 | 439 | elif filetype == ".jinja2": 440 | env = Environment( 441 | loader=FileSystemLoader(compose_path.parent), 442 | autoescape=select_autoescape(), 443 | ) 444 | 445 | context = self.get_compose_context(**(context or {})) 446 | template = env.get_template(compose_path.name) 447 | return template.render(context) 448 | 449 | raise ValueError(f"Unrecognised compose filetype {filetype}") 450 | 451 | def get_host_env_data(self) -> dict[str, str | int | None]: 452 | """ 453 | Build the env data dict to be sent to the server 454 | """ 455 | env_data = self.get_env_data() 456 | env_data.update( 457 | { 458 | "ENV_FILE": str(self.remote_env), 459 | "ASSETS_PATH": str(self.remote_assets), 460 | "STORE_PATH": str(self.remote_store), 461 | } 462 | ) 463 | return env_data 464 | 465 | def deploy(self): 466 | """ 467 | Deploy the env file for this app 468 | """ 469 | self.push_compose_to_host() 470 | self.write_env_to_host() 471 | self.host.ensure_parent_path(self.remote_store) 472 | 473 | def push_compose_to_host(self): 474 | compose_content: str = self.get_compose_content() 475 | compose_remote: PosixPath = self.remote_compose 476 | self.host.write(compose_remote, compose_content) 477 | 478 | def write_env_to_host(self): 479 | env_dict = self.get_host_env_data() 480 | env_str = dump_env(env_dict) 481 | self.host.write(self.remote_env, env_str) 482 | 483 | def call_compose(self, cmd: str, args: dict[str, Any] | None = None): 484 | """ 485 | Run a docker-compose command on the host 486 | """ 487 | self.host.call_compose( 488 | compose=self.remote_compose, 489 | env=self.remote_env, 490 | cmd=cmd, 491 | cmd_args=args, 492 | ) 493 | 494 | def up(self, *services: str): 495 | """ 496 | Bring up one or more services in this app 497 | 498 | If no services are specified, all services are selected 499 | """ 500 | if services: 501 | for service in services: 502 | self.call_compose("up --build --detach {service}", {"service": service}) 503 | else: 504 | self.call_compose("up --build --detach") 505 | 506 | def down(self, *services: str): 507 | """ 508 | Take down one or more containers in this app 509 | 510 | If no services are specified, all services are selected 511 | """ 512 | if services: 513 | for service in services: 514 | self.call_compose( 515 | "rm --force --stop -v {service}", {"service": service} 516 | ) 517 | else: 518 | self.call_compose("down") 519 | 520 | def restart(self, *services: str): 521 | """ 522 | Restart one or more services in this app 523 | 524 | If no services are specified, all services are selected 525 | """ 526 | if services: 527 | for service in services: 528 | self.call_compose("restart {service}", {"service": service}) 529 | else: 530 | self.call_compose("restart") 531 | 532 | def logs(self, service: str): 533 | """ 534 | Retrieve logs for the given service 535 | """ 536 | self.call_compose("logs {service}", {"service": service}) 537 | 538 | def exec(self, service: str, command: str): 539 | """ 540 | Execute a command in the specified service 541 | 542 | Command is passed as it arrives, values are not escaped 543 | """ 544 | self.call_compose(f"exec {{service}} {command}", {"service": service}) 545 | -------------------------------------------------------------------------------- /docker0s/app/names.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | 4 | sanitise_pattern = re.compile(r"[^A-Za-z0-9_\- ]") 5 | word_pattern = re.compile(r"([A-Z])") 6 | 7 | 8 | def normalise_name(name: str): 9 | """ 10 | Normalise camelCase, snake_case and kebab-case to PascalCase 11 | 12 | Numbers are treated as lower-case characters, but cannot start the string 13 | """ 14 | # Strip anything outside A-Z, 0-9, space, dash or underscore 15 | norm = sanitise_pattern.sub("", name) 16 | 17 | # Add a space before each capital so we preserve camelCase and PascalCase 18 | norm = word_pattern.sub(r" \1", norm) 19 | 20 | # Remove snake and kebab joins, title everything and remove spaces 21 | norm = norm.replace("_", " ").replace("-", " ").title().replace(" ", "") 22 | 23 | # Check it starts with A-Z 24 | if not word_pattern.match(norm): 25 | raise ValueError(f"Names must start with A-Z: {name}") 26 | 27 | return norm 28 | 29 | 30 | def pascal_to_snake(name: str): 31 | """ 32 | Convert a PascalCase name to snake_case, for use in Docker containers 33 | 34 | Assumed to have come from a normalised_name so no sanitation required 35 | """ 36 | # Add a _ before each capital and remove the first 37 | name = word_pattern.sub(r"_\1", name).lstrip("_") 38 | 39 | return name.lower() 40 | -------------------------------------------------------------------------------- /docker0s/app/repo.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path, PosixPath 2 | 3 | from ..exceptions import DefinitionError 4 | from ..git import fetch_repo_on_host, parse_git_url 5 | from .base import BaseApp 6 | 7 | 8 | # Remote dir name to clone the repo 9 | REPO_DIR = "repo" 10 | 11 | 12 | class RepoApp(BaseApp, abstract=True): 13 | """ 14 | A project which is in a git repository and needs to be cloned to the server and 15 | mounted into the container as a service 16 | 17 | The ``repo`` must be a ``git+`` repository URL. 18 | """ 19 | 20 | #: ``git+`` URL for the repository to deploy 21 | repo: str | None = None 22 | 23 | #: Relative path to the compose file within the repository. 24 | #: 25 | #: If this path exists, Docker0s will overwrite this file with the ``compose`` file. 26 | #: 27 | #: See docs for recommended configuration 28 | #: 29 | #: Default: docker-compose.docker0s.yml 30 | repo_compose: str = "docker-compose.docker0s.yml" 31 | 32 | @classmethod 33 | def get_repo(cls) -> str: 34 | """ 35 | Validate repo argument 36 | """ 37 | if not cls.repo or not cls.repo.startswith(("git+ssh://", "git+https://")): 38 | raise DefinitionError("RepoApp must set a valid git URL in repo") 39 | return cls.repo 40 | 41 | @property 42 | def remote_repo_path(self) -> PosixPath: 43 | return self.remote_path / REPO_DIR 44 | 45 | @property 46 | def remote_compose(self) -> PosixPath: 47 | """ 48 | A PosixPath to the remote compose file 49 | """ 50 | _: Path 51 | compose_filename: str = self.repo_compose 52 | remote_path: PosixPath = self.remote_repo_path / compose_filename 53 | return remote_path 54 | 55 | def deploy(self): 56 | self.clone_on_host() 57 | super().deploy() 58 | 59 | def clone_on_host(self): 60 | """ 61 | Clone or update the repository on the host 62 | """ 63 | self.host.ensure_parent_path(self.remote_repo_path) 64 | 65 | # Break up URL into parts 66 | url, ref, repo_rel_path, name = parse_git_url(self.get_repo()) 67 | if repo_rel_path or name: 68 | raise DefinitionError( 69 | f'Invalid setting "{self.get_name()}.repo": Cannot clone a' 70 | " repository with a relative path or name" 71 | ) 72 | fetch_repo_on_host(host=self.host, path=self.remote_repo_path, url=url, ref=ref) 73 | -------------------------------------------------------------------------------- /docker0s/commands.py: -------------------------------------------------------------------------------- 1 | from functools import update_wrapper 2 | from pathlib import Path 3 | 4 | import click 5 | 6 | from .app import BaseApp 7 | from .app.names import normalise_name 8 | from .config import Config 9 | from .exceptions import Docker0sException, UsageError 10 | from .manifest import Manifest 11 | from .path import find_manifest 12 | 13 | 14 | class ExceptionHandlerGroup(click.Group): 15 | def __call__(self, *args, **kwargs): 16 | try: 17 | return self.main(*args, **kwargs) 18 | except Docker0sException as e: 19 | click.echo(e, err=True) 20 | 21 | 22 | @click.group(cls=ExceptionHandlerGroup) 23 | @click.option("--manifest", "-m") 24 | @click.pass_context 25 | def cli(ctx, manifest: str | None = None): 26 | ctx.ensure_object(dict) 27 | 28 | # Load config 29 | config_path = Path(click.get_app_dir("docker0s")) / "config.json" 30 | if config_path.exists(): 31 | config = Config.load(config_path) 32 | else: 33 | config = Config(path=config_path) 34 | 35 | # Get manifest path 36 | manifest_path: Path | None = None 37 | if manifest: 38 | manifest_path = Path(manifest) 39 | 40 | elif config.manifest_path: 41 | manifest_path = Path(config.manifest_path) 42 | 43 | else: 44 | path_dir = Path.cwd() 45 | manifest_path = find_manifest(path_dir) 46 | if manifest_path is None: 47 | manifest_path = path_dir 48 | 49 | # Try to load manifest 50 | if manifest_path and manifest_path.is_file(): 51 | manifest_obj = Manifest.load(manifest_path) 52 | else: 53 | manifest_obj = None 54 | 55 | ctx.obj.update( 56 | { 57 | "config": config, 58 | "manifest": manifest_obj, 59 | "manifest_path": manifest_path, 60 | } 61 | ) 62 | 63 | 64 | def with_manifest(f): 65 | @click.pass_context 66 | def new_func(ctx, *args, **kwargs): 67 | manifest = ctx.obj["manifest"] 68 | manifest_path = ctx.obj["manifest_path"] 69 | if manifest is None: 70 | raise UsageError(f"Manifest not found at {manifest_path}") 71 | print(f"Using manifest {manifest_path}") 72 | 73 | return ctx.invoke(f, ctx.obj["manifest"], *args, **kwargs) 74 | 75 | return update_wrapper(new_func, f) 76 | 77 | 78 | class Target: 79 | app: str 80 | service: str | None 81 | 82 | def __init__(self, app: str, service: str | None = None): 83 | # Normalise app name and ensure the target string is valid 84 | if not app: 85 | app = "" 86 | app_norm = normalise_name(app) 87 | 88 | if not app_norm and service: 89 | raise UsageError(f"Invalid target .{service} - app missing") 90 | self.app = app_norm 91 | self.service = service 92 | 93 | def __str__(self) -> str: 94 | if self.service: 95 | return f"{self.app}.{self.service}" 96 | return self.app 97 | 98 | 99 | class TargetParamType(click.ParamType): 100 | name = "target" 101 | 102 | def convert( 103 | self, value: str, param: click.Parameter | None, ctx: click.core.Context | None 104 | ) -> Target: 105 | if ctx is not None and ctx.params.get("all_flag", False): 106 | return self.fail("Cannot specify both --all and targets") 107 | 108 | # TODO: never seems to reach this bracnh 109 | elif "." in value: 110 | parts = value.split(".") 111 | if len(parts) != 2: 112 | return self.fail(f"{value!r} is not a app.service target", param, ctx) 113 | 114 | app_name, service_name = parts 115 | else: 116 | app_name = value 117 | service_name = None 118 | return Target(app=app_name, service=service_name) 119 | 120 | 121 | class TargetManager: 122 | manifest: Manifest 123 | targets = tuple[Target] 124 | apps: list[BaseApp] 125 | app_lookup: dict[str, BaseApp] 126 | service_lookup: dict[BaseApp, list[str]] 127 | 128 | def __init__(self, manifest: Manifest, targets: tuple[Target, ...]): 129 | self.manifest = manifest 130 | self.targets = targets 131 | 132 | # Init manifest and prepare lookups 133 | self.apps = manifest.init_apps( 134 | *set(target.app for target in targets if target.app) 135 | ) 136 | self.app_lookup = {app.get_name(): app for app in self.apps} 137 | 138 | # Create service lookup for each app 139 | self.service_lookup = {} 140 | for target in targets: 141 | bound_app: BaseApp = self.app_lookup[target.app] 142 | if bound_app in self.service_lookup: 143 | if len(self.service_lookup[bound_app]) == 0: 144 | raise UsageError( 145 | f"Cannot target mix of app {target.app} and service {target}" 146 | ) 147 | else: 148 | self.service_lookup[bound_app] = [] 149 | 150 | if target.service is None: 151 | continue 152 | self.service_lookup[bound_app].append(target.service) 153 | 154 | def get_app_services(self): 155 | for app in self.apps: 156 | yield (app, self.service_lookup.get(app, [])) 157 | 158 | 159 | TARGET_TYPE = TargetParamType() 160 | 161 | 162 | @cli.command() 163 | @click.argument("manifest", type=str, required=False) 164 | @click.option("--alias", "-a", type=str) 165 | @click.option("--list", "-l", "list_alias", is_flag=True, default=False) 166 | @click.pass_context 167 | def use(ctx, manifest: str = "", alias: str = "", list_alias: bool = False): 168 | """ 169 | Set a manifest as the default 170 | """ 171 | config = ctx.obj["config"] 172 | 173 | if list_alias: 174 | if not config.manifest_alias: 175 | print("No aliases defined") 176 | else: 177 | print("Available aliases:") 178 | for alias, path in sorted(config.manifest_alias.items()): 179 | print(f" {alias}: {path}") 180 | return 181 | 182 | if manifest: 183 | # Load manifest to make sure it works 184 | manifest_path = Path(manifest).absolute() 185 | if not manifest_path.is_file(): 186 | # Look for alias 187 | if manifest in config.manifest_alias: 188 | manifest_path = Path(config.manifest_alias[manifest]) 189 | 190 | manifest = str(manifest_path) 191 | if not manifest_path.is_file(): 192 | raise UsageError(f"Manifest {manifest} not found") 193 | Manifest.load(manifest_path) 194 | 195 | # Update config 196 | if config.manifest_path: 197 | print(f"Was using manifest {config.manifest_path}") 198 | 199 | # Save new manifest 200 | config.manifest_path = manifest 201 | if alias: 202 | if manifest: 203 | config.manifest_alias[alias] = manifest 204 | print(f'Manifest alias "{alias}" saved') 205 | else: 206 | config.manifest_alias.pop(alias, None) 207 | print(f'Manifest alias "{alias}" cleared') 208 | config.save() 209 | 210 | if manifest: 211 | print(f"Now using manifest {manifest}") 212 | else: 213 | print("Now using no manifest") 214 | 215 | 216 | @cli.command() 217 | @with_manifest 218 | def ls(manifest: Manifest): 219 | """ 220 | List all apps 221 | 222 | Usage: 223 | docker0s ls 224 | """ 225 | if manifest.host: 226 | print(f"Host: {manifest.host()}") 227 | print("Apps:") 228 | app: type[BaseApp] 229 | for app in manifest.apps: 230 | print(f" {app.get_name()}") 231 | 232 | 233 | @cli.command() 234 | @with_manifest 235 | @click.argument("apps", type=str, required=False, nargs=-1) 236 | @click.option("--all", "-a", "all_flag", is_flag=True) 237 | def deploy(manifest: Manifest, apps: tuple[str], all_flag: bool = False): 238 | """ 239 | Deploy one or more apps 240 | 241 | Usage: 242 | docker0s deploy myapp 243 | docker0s deploy traefik website 244 | docker0s deploy --all 245 | """ 246 | if not apps and not all_flag: 247 | raise UsageError("Must specify --all or one or more apps") 248 | 249 | safe_apps = (normalise_name(app_name) for app_name in apps) 250 | bound_apps = manifest.init_apps(*safe_apps) 251 | for app in bound_apps: 252 | app.deploy() 253 | 254 | 255 | @cli.command() 256 | @with_manifest 257 | @click.argument("targets", type=TARGET_TYPE, required=False, nargs=-1) 258 | @click.option("--all", "-a", "all_flag", is_flag=True) 259 | def up(manifest: Manifest, targets: tuple[Target, ...], all_flag: bool = False): 260 | """ 261 | Bring up all containers for one or more apps or services: 262 | docker0s up myapp 263 | docker0s up traefik website.backend 264 | 265 | Bring up all containers for all apps: 266 | docker0s deploy --all 267 | """ 268 | if not targets and not all_flag: 269 | raise UsageError("Must specify --all or one or more targets") 270 | 271 | manager = TargetManager(manifest, targets) 272 | for app, services in manager.get_app_services(): 273 | app.up(*services) 274 | 275 | 276 | @cli.command() 277 | @with_manifest 278 | @click.argument("targets", type=TARGET_TYPE, required=False, nargs=-1) 279 | @click.option("--all", "-a", "all_flag", is_flag=True) 280 | def down(manifest: Manifest, targets: tuple[Target, ...], all_flag: bool = False): 281 | if not targets and not all_flag: 282 | raise UsageError("Must specify --all or one or more targets") 283 | 284 | manager = TargetManager(manifest, targets) 285 | for app, services in manager.get_app_services(): 286 | app.down(*services) 287 | 288 | 289 | @cli.command() 290 | @with_manifest 291 | @click.argument("targets", type=TARGET_TYPE, required=False, nargs=-1) 292 | @click.option("--all", "-a", "all_flag", is_flag=True) 293 | def restart(manifest: Manifest, targets: tuple[Target, ...], all_flag: bool = False): 294 | if not targets and not all_flag: 295 | raise UsageError("Must specify --all or one or more targets") 296 | 297 | manager = TargetManager(manifest, targets) 298 | for app, services in manager.get_app_services(): 299 | app.restart(*services) 300 | 301 | 302 | @cli.command() 303 | @with_manifest 304 | @click.argument("target", type=TARGET_TYPE) 305 | @click.argument("command", type=str) 306 | def exec(manifest: Manifest, target: Target, command: str): 307 | if not target.service: 308 | raise UsageError("Must specify an app.service target") 309 | app = manifest.init_apps(target.app)[0] 310 | app.exec(service=target.service, command=command) 311 | 312 | 313 | @cli.command() 314 | @with_manifest 315 | def status(manifest: Manifest): 316 | if not manifest.host: 317 | raise UsageError("No host found in manifest") 318 | 319 | host = manifest.init_host() 320 | result = host.exec(cmd="docker ps --all", verbose=False) 321 | print(result.stdout) 322 | 323 | 324 | @cli.command() 325 | @with_manifest 326 | @click.argument("target", type=TARGET_TYPE) 327 | def logs(manifest: Manifest, target: Target): 328 | if not target.service: 329 | raise UsageError("Must specify an app.service target") 330 | app = manifest.init_apps(target.app)[0] 331 | app.logs(service=target.service) 332 | 333 | 334 | @cli.command() 335 | @with_manifest 336 | @click.argument("target", type=Target) 337 | @click.argument("command", type=str) 338 | @click.argument("arguments", nargs=-1, type=str) 339 | def cmd(manifest: Manifest, target: Target, command: str, arguments: list[str]): 340 | if target.service: 341 | raise UsageError("Must specify an app target, not an app.service") 342 | app = manifest.init_apps(target.app)[0] 343 | cmd_fn = app.get_command(command) 344 | cmd_fn(*arguments) 345 | 346 | 347 | def invoke(): 348 | cli(obj={}) 349 | -------------------------------------------------------------------------------- /docker0s/config.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import json 4 | from dataclasses import asdict, dataclass, field 5 | from pathlib import Path 6 | from typing import get_type_hints 7 | 8 | from .exceptions import DefinitionError, UsageError 9 | 10 | 11 | @dataclass 12 | class Config: 13 | path: Path 14 | manifest_path: str | None = None 15 | manifest_alias: dict[str, str] = field(default_factory=dict) 16 | 17 | @classmethod 18 | def load(cls, path: Path) -> Config: # TODO: change to Self when supported 19 | with path.open("r") as file: 20 | raw_data = json.load(file) 21 | 22 | version = raw_data.pop("version", None) 23 | if version != 1: 24 | raise UsageError(f"Invalid config file at {path}") 25 | 26 | annotations = get_type_hints(cls) 27 | safe_data = {} 28 | for key, value in raw_data.items(): 29 | if key == "path" or key not in annotations: 30 | raise DefinitionError(f"Unexpected value {key} for {path}") 31 | safe_data[key] = value 32 | 33 | safe_data["path"] = path 34 | 35 | return cls(**safe_data) 36 | 37 | def save(self): 38 | # Prep data 39 | data = asdict(self) 40 | del data["path"] 41 | data["version"] = 1 42 | 43 | # Write 44 | if not self.path.parent.exists(): 45 | self.path.parent.mkdir() 46 | 47 | with self.path.open("w") as file: 48 | json.dump(data, file) 49 | -------------------------------------------------------------------------------- /docker0s/env.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from pathlib import Path 4 | 5 | from dotenv import dotenv_values 6 | 7 | 8 | def read_env(*paths: Path, **values: str | int) -> dict[str, str | int | None]: 9 | """ 10 | Read env vars from one or more manifest paths and update it with a dict 11 | """ 12 | env: dict[str, str | int | None] = {} 13 | for path in paths: 14 | handle = open(path, "r") 15 | file_values = dotenv_values(stream=handle) 16 | env.update(file_values) 17 | handle.close() 18 | 19 | env.update(values) 20 | 21 | return env 22 | 23 | 24 | def dump_env(env: dict[str, str | int | None]) -> str: 25 | """ 26 | Convert an env dict into a multi-line string 27 | """ 28 | lines = [] 29 | for key, val in env.items(): 30 | if val is None: 31 | lines.append(key) 32 | elif isinstance(val, int): 33 | lines.append(f"{key}={val}") 34 | else: 35 | escaped = val.replace('"', r"\"") 36 | lines.append(f'{key}="{escaped}"') 37 | return "\n".join(lines) 38 | -------------------------------------------------------------------------------- /docker0s/exceptions.py: -------------------------------------------------------------------------------- 1 | class Docker0sException(Exception): 2 | pass 3 | 4 | 5 | class DefinitionError(Docker0sException): 6 | """ 7 | A problem in the manifest definition syntax or logic 8 | """ 9 | 10 | pass 11 | 12 | 13 | class UsageError(Docker0sException): 14 | """ 15 | A problem in how the app has been used 16 | """ 17 | 18 | pass 19 | 20 | 21 | class ExecutionError(Docker0sException): 22 | """ 23 | A problem when trying to perform an action 24 | """ 25 | 26 | pass 27 | -------------------------------------------------------------------------------- /docker0s/git.py: -------------------------------------------------------------------------------- 1 | """ 2 | GitHub helpers 3 | """ 4 | from __future__ import annotations 5 | 6 | import hashlib 7 | import re 8 | import shlex 9 | import subprocess 10 | from functools import lru_cache 11 | from pathlib import Path, PosixPath 12 | from typing import TYPE_CHECKING 13 | 14 | from .exceptions import DefinitionError, ExecutionError 15 | from .settings import CACHE_PATH 16 | 17 | 18 | if TYPE_CHECKING: 19 | from .host import Host 20 | 21 | GIT_SSH_PATTERN = re.compile( 22 | # url: git@github.com:username/repo 23 | r"^git\+ssh://(?:(?P.+?:.+?))" 24 | # ref: a tag, branch or commit 25 | r"(@(?P.+?))?" 26 | # path: a file within the repo 27 | r"(#(?P.+?))?" 28 | # name: the name of the object in the manifest 29 | r"(::(?P.+?))?$" 30 | ) 31 | GIT_HTTPS_PATTERN = re.compile( 32 | # url: https://github.com/username/repo 33 | r"^git\+(?Phttps://.+?)" 34 | # ref: a tag, branch or commit 35 | r"(@(?P.+?))?" 36 | # path: a file within the repo 37 | r"(#(?P.+?))?" 38 | # name: the name of the object in the manifest 39 | r"(::(?P.+?))?$" 40 | ) 41 | GIT_REMOTE_SHOW_PATTERN = re.compile(r"HEAD branch: (\S+)") 42 | 43 | 44 | class CommandError(ValueError): 45 | def __init__( 46 | self, 47 | *args, 48 | cwd: Path | None = None, 49 | result: subprocess.CompletedProcess | None = None, 50 | **kwargs, 51 | ): 52 | super().__init__(*args, **kwargs) 53 | self.cwd = cwd 54 | self.result = result 55 | 56 | def __str__(self): 57 | msg = f"{self.args[0]}\n cwd={self.cwd}" 58 | if self.result: 59 | msg += ( 60 | f"\n returncode={self.result.returncode}" 61 | f"\n stdout={self.result.stdout.decode()}" 62 | f"\n stderr={self.result.stderr.decode()}" 63 | ) 64 | return msg 65 | 66 | 67 | def call( 68 | *cmd: str, 69 | cwd: Path | None = None, 70 | ) -> subprocess.CompletedProcess: 71 | # This specific invocation will allow git to use the system's ssh agent 72 | result = subprocess.run( 73 | shlex.join(cmd), 74 | cwd=cwd, 75 | stdin=subprocess.PIPE, 76 | stdout=subprocess.PIPE, 77 | stderr=subprocess.PIPE, 78 | shell=True, 79 | start_new_session=True, 80 | ) 81 | return result 82 | 83 | 84 | def call_or_die( 85 | *cmd: str, 86 | cwd: Path | None = None, 87 | expected: str | None = None, 88 | ) -> subprocess.CompletedProcess: 89 | result = call(*cmd, cwd=cwd) 90 | 91 | if result.returncode != 0: 92 | raise CommandError( 93 | f"Command failed with exit code {result.returncode}", 94 | cwd=cwd, 95 | result=result, 96 | ) 97 | 98 | if expected and expected not in result.stdout.decode(): 99 | raise CommandError( 100 | "Command failed with unexpected output", 101 | cwd=cwd, 102 | result=result, 103 | ) 104 | return result 105 | 106 | 107 | def parse_git_url(path: str) -> tuple[str, str | None, str | None, str | None]: 108 | """ 109 | Parses a git URL in the formats: 110 | 111 | git+ssh://url@ref#path/to/file::name 112 | git+https://url@ref#path/to/file::name 113 | 114 | and returns a tuple of (repo, ref, path, name) 115 | """ 116 | if path.startswith("git+ssh://"): 117 | pattern = GIT_SSH_PATTERN 118 | elif path.startswith("git+https://"): 119 | pattern = GIT_HTTPS_PATTERN 120 | else: 121 | # Cannot support 122 | raise DefinitionError(f"Unrecognised git URL format {path}") 123 | 124 | # Pattern match 125 | matches = pattern.match(path) 126 | if not matches: 127 | raise DefinitionError(f"Unrecognised git URL format {path}") 128 | data = matches.groupdict() 129 | 130 | return (data["repo"], data["ref"], data["path"], data["name"]) 131 | 132 | 133 | def _parse_remote_show_to_head(raw: str) -> str: 134 | matches = GIT_REMOTE_SHOW_PATTERN.match(raw) 135 | if not matches: 136 | raise ExecutionError( 137 | 'Command "git remote show origin" did not return a HEAD branch' 138 | ) 139 | return matches.group(1) 140 | 141 | 142 | @lru_cache() 143 | def fetch_repo(url: str, ref: str | None) -> Path: 144 | # Build repo path 145 | repo_dir = hashlib.md5(url.encode()).hexdigest() 146 | repo_path = CACHE_PATH / repo_dir 147 | 148 | # Clone 149 | if not repo_path.exists(): 150 | call_or_die("mkdir", "-p", str(repo_path)) 151 | call_or_die("git", "init", cwd=repo_path) 152 | call_or_die("git", "remote", "add", "origin", url, cwd=repo_path) 153 | 154 | # If no ref, use remote's default branch 155 | if not ref: 156 | result = call_or_die( 157 | "git", "remote", "show", "origin", cwd=repo_path, expected="HEAD branch:" 158 | ) 159 | ref = _parse_remote_show_to_head(result.stdout.decode()) 160 | 161 | # Fetch the ref and check it out 162 | call_or_die("git", "fetch", "origin", ref, "--depth=1", cwd=repo_path) 163 | call_or_die("git", "checkout", ref, cwd=repo_path) 164 | 165 | # See if it's a branch or commit 166 | result = call( 167 | "git", "rev-parse", "--abbrev-ref", "--verify", f"{ref}@{{u}}", cwd=repo_path 168 | ) 169 | if result.returncode == 0: 170 | # It is a branch, use reset to get to head 171 | call_or_die("git", "reset", "--hard", f"origin/{ref}", cwd=repo_path) 172 | 173 | return repo_path 174 | 175 | 176 | def fetch_repo_on_host(host: Host, path: PosixPath | str, url: str, ref: str | None): 177 | # Clone 178 | if not host.exists(path): 179 | host.mkdir(path) 180 | host.exec("git init", cwd=path) 181 | host.exec("git remote add origin {url}", args={"url": url}, cwd=path) 182 | 183 | # If no ref, use remote's default branch 184 | if not ref: 185 | result = host.exec("git remote show origin", cwd=path) 186 | ref = _parse_remote_show_to_head(result.stdout) 187 | 188 | # Fetch the ref and check it out 189 | host.exec("git fetch origin {ref} --depth=1", args={"ref": ref}, cwd=path) 190 | host.exec("git checkout {ref}", args={"ref": ref}, cwd=path) 191 | 192 | # See if it's a branch or commit 193 | result = host.exec( 194 | "git rev-parse --abbrev-ref --verify {ref}@{{u}}", 195 | args={"ref": ref}, 196 | cwd=path, 197 | can_fail=True, 198 | ) 199 | if result.ok: 200 | # It is a branch, use reset to get to head 201 | host.exec("git reset --hard origin/{ref}", args={"ref": ref}, cwd=path) 202 | -------------------------------------------------------------------------------- /docker0s/host.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from functools import lru_cache 4 | from io import StringIO 5 | from pathlib import Path, PosixPath 6 | from shlex import quote 7 | from typing import Any 8 | 9 | from fabric import Connection 10 | from fabric.runners import Result 11 | 12 | from .manifest_object import ManifestObject 13 | 14 | 15 | class Host(ManifestObject, abstract=True): 16 | #: Abstract base classes should be marked as abstract so they are ignored by the 17 | #: manifest loader 18 | abstract: bool = True 19 | 20 | #: Server hostname 21 | name: str 22 | 23 | #: Server SSH port 24 | #: 25 | #: Default: 22 26 | port: str | int = 22 27 | 28 | #: Username for login 29 | user: str | None 30 | 31 | #: Home dir for user 32 | #: 33 | #: Use ``{user}`` to replace with the Host.user 34 | home: str = "/home/{user}/" 35 | 36 | #: Path to the docker0s working dir on the server 37 | #: 38 | #: Should be absolute or relative to the connecting user's home directory, do not 39 | #: use tildes. See fabric docs for details: 40 | #: https://docs.fabfile.org/en/stable/api/transfer.html 41 | root_path: str = "apps" 42 | 43 | #: Docker compose command 44 | #: 45 | #: Defaults to docker-compose, new installations may prefer ``docker compose`` 46 | compose_command: str = "docker-compose" 47 | 48 | # Internal connection handle 49 | _connection: Connection | None = None 50 | 51 | def __str__(self) -> str: 52 | value = self.name 53 | if self.port: 54 | value = f"{value}:{self.port}" 55 | if self.user: 56 | value = f"{self.user}@{value}" 57 | return value 58 | 59 | def path(self, app, service: str | None = None) -> PosixPath: 60 | """ 61 | Remote path builder to ensure consistency 62 | """ 63 | home = self.home.format(user=self.user) 64 | path = PosixPath(home) / self.root_path / app 65 | if service: 66 | path /= service 67 | return path 68 | 69 | @property 70 | def connection(self) -> Connection: 71 | """ 72 | Create an SSH connection, or retrieve an existing one 73 | """ 74 | if not self._connection: 75 | self._connection = Connection( 76 | host=self.name, 77 | port=self.port, 78 | user=self.user, 79 | connect_kwargs={"allow_agent": True}, 80 | forward_agent=True, 81 | ) 82 | return self._connection 83 | 84 | def exec( 85 | self, 86 | cmd: str, 87 | args: dict[str, Any] | None = None, 88 | env: dict[str, Any] | None = None, 89 | cwd: PosixPath | str | None = None, 90 | can_fail: bool = False, 91 | verbose: bool = True, 92 | ) -> Result: 93 | """ 94 | Execute a command on the remote server 95 | 96 | Args: 97 | cmd (str): The command string to execute on the server. Can use named 98 | placeholders for use with ``.format`` and the ``args`` dict 99 | args (dict |None): Optional dict of command arguments. These will be escaped 100 | and passed into cmd.format(). 101 | env (dict | None): Optional dictionary of env vars to run the command with. 102 | Note that this is an independent dict which will not use App env 103 | definitions unless you pass env=App.get_env_data() 104 | """ 105 | if args is not None: 106 | safe_args = {key: quote(str(val)) for key, val in args.items()} 107 | cmd = cmd.format(**safe_args) 108 | 109 | result: Result 110 | with self.connection.cd(str(cwd or "")): 111 | result = self.connection.run( 112 | cmd, 113 | env=env, 114 | warn=can_fail, 115 | echo=verbose, 116 | hide=False if verbose else "both", 117 | pty=True, 118 | ) 119 | 120 | if not result.ok and not can_fail: 121 | raise ValueError(f"Command '{cmd}' failed: {result.stderr.strip()}") 122 | return result 123 | 124 | def call_compose( 125 | self, 126 | compose: PosixPath, 127 | env: PosixPath, 128 | cmd: str, 129 | cmd_args: dict[str, Any] | None = None, 130 | ): 131 | """ 132 | Execute a docker-compose command on the server 133 | 134 | The paths for compose and env are wrapped in quotes, the command is passed to 135 | the host unaltered 136 | """ 137 | args = {"compose": compose, "env": env} 138 | if cmd_args: 139 | args.update(cmd_args) 140 | self.exec( 141 | cmd=f"{self.compose_command} --file {{compose}} --env-file {{env}} {cmd}", 142 | args=args, 143 | ) 144 | 145 | def push(self, source: Path, destination: PosixPath): 146 | """ 147 | Push a file to the server 148 | """ 149 | self.ensure_parent_path(destination) 150 | self.connection.put(str(source), str(destination)) 151 | 152 | def write(self, filename: PosixPath, content: str): 153 | """ 154 | Write a file to the server 155 | """ 156 | self.ensure_parent_path(filename) 157 | data = StringIO(content) 158 | self.connection.put(data, str(filename)) 159 | 160 | @lru_cache 161 | def ensure_parent_path(self, filename: PosixPath): 162 | """ 163 | Given a path to a file, ensure the parent directory exists. 164 | 165 | For example:: 166 | 167 | host.ensure_parent_path(root / "app_name" / "env") 168 | 169 | will connect to the server and run:: 170 | 171 | mkdir -p "{root}/app_name" 172 | 173 | This will only ever be run once per host. 174 | """ 175 | self.mkdir(filename.parent) 176 | 177 | @lru_cache 178 | def mkdir(self, path: PosixPath): 179 | """ 180 | Make the specified dir and any parent dirs on the host. 181 | 182 | If it already exists, fail silently. 183 | 184 | This will only ever be run once per host. 185 | """ 186 | self.exec("mkdir -p {path}", args={"path": path}) 187 | 188 | def exists(self, path: PosixPath | str) -> bool: 189 | """ 190 | Return True if the path exists on the host 191 | """ 192 | result = self.exec("test -e {path}", args={"path": path}, can_fail=True) 193 | return result.ok 194 | -------------------------------------------------------------------------------- /docker0s/manifest.py: -------------------------------------------------------------------------------- 1 | """ 2 | Manifest management 3 | """ 4 | from __future__ import annotations 5 | 6 | import sys 7 | from importlib.machinery import ModuleSpec, SourceFileLoader 8 | from importlib.util import module_from_spec 9 | from inspect import isclass 10 | from pathlib import Path 11 | 12 | import yaml 13 | 14 | from .app import App, BaseApp, abstract_app_registry 15 | from .app.names import normalise_name 16 | from .exceptions import DefinitionError, UsageError 17 | from .host import Host 18 | from .path import path_to_uuid 19 | 20 | 21 | class Manifest: 22 | path: Path 23 | apps: list[type[BaseApp]] 24 | app_lookup: dict[str, type[BaseApp]] 25 | host: type[Host] | None = None 26 | 27 | def __init__(self, path: Path): 28 | self.path = path 29 | self.apps: list[type[BaseApp]] = [] 30 | self.app_lookup: dict[str, type[BaseApp]] = {} 31 | 32 | def __str__(self) -> str: 33 | return str(self.path) 34 | 35 | def add_app(self, app: type[BaseApp]) -> None: 36 | self.apps.append(app) 37 | self.app_lookup[app.__name__] = app 38 | 39 | def has_app(self, name: str) -> bool: 40 | return name in self.app_lookup 41 | 42 | def get_app(self, name: str) -> type[BaseApp]: 43 | if app := self.app_lookup.get(name): 44 | return app 45 | raise DefinitionError(f"No app with name {name} in manifest {self.path}") 46 | 47 | def prepare(self, history: list[Path]): 48 | """ 49 | Prepare apps and host for use 50 | 51 | * Load base manifests for any apps which define an ``extends`` 52 | """ 53 | for app in self.apps: 54 | app.apply_base_manifest(history=history) 55 | 56 | @classmethod 57 | def load(cls, path: Path, history: list[Path] | None = None) -> Manifest: 58 | if not path.exists(): 59 | raise DefinitionError(f"Cannot load {path} - file not found") 60 | 61 | if history is None: 62 | history = [] 63 | if path in history: 64 | raise DefinitionError(f"Cannot load {path} - recursive extends detected") 65 | history.append(path) 66 | 67 | # Load manifest 68 | filetype = path.suffix.lower() 69 | if filetype == ".py": 70 | manifest = cls.load_py(path) 71 | elif filetype == ".yml": 72 | manifest = cls.load_yml(path) 73 | else: 74 | raise DefinitionError( 75 | f"Manifest {path} filetype invalid - must be .yml or .py" 76 | ) 77 | 78 | manifest.prepare(history) 79 | return manifest 80 | 81 | @classmethod 82 | def load_py(cls, path: Path) -> Manifest: 83 | # Load module 84 | module = SourceFileLoader( 85 | f"docker0s.manifest.loaded.{path_to_uuid(path)}", 86 | str(path), 87 | ).load_module() 88 | setattr(module, "__manifest_path__", path) 89 | sys.modules[module.__name__] = module 90 | 91 | # Collect apps and hosts 92 | manifest = Manifest(path) 93 | for obj in module.__dict__.values(): 94 | if not isclass(obj): 95 | continue 96 | 97 | if issubclass(obj, BaseApp) and not obj.abstract: 98 | manifest.add_app(obj) 99 | 100 | elif issubclass(obj, Host) and not obj.abstract: 101 | if manifest.host is not None: 102 | raise ValueError("Cannot define more than one host in a manifest") 103 | manifest.host = obj 104 | 105 | return manifest 106 | 107 | @classmethod 108 | def load_yml(cls, path: Path) -> Manifest: 109 | raw = path.read_text() 110 | data = yaml.safe_load(raw) 111 | 112 | # Validate top level 113 | apps_raw = data.pop("apps", []) 114 | host_raw = data.pop("host", None) 115 | if len(data) > 0: 116 | raise DefinitionError( 117 | f"Error loading {path}: unexpected root elements {', '.join(data.keys())}" 118 | ) 119 | if not isinstance(apps_raw, dict): 120 | raise DefinitionError( 121 | f"Error loading {path}: expecting root apps definition, found {type(apps_raw)}" 122 | ) 123 | if host_raw and not isinstance(host_raw, dict): 124 | raise DefinitionError( 125 | f"Error loading {path}: expecting root host definition, found {type(host_raw)}" 126 | ) 127 | 128 | # Create module and start manifest 129 | module_spec = ModuleSpec( 130 | f"docker0s.manifest.loaded.{path_to_uuid(path)}", 131 | None, 132 | origin=str(path), 133 | ) 134 | module = module_from_spec(module_spec) 135 | module.__file__ = str(path) 136 | sys.modules[module.__name__] = module 137 | manifest = Manifest(path) 138 | 139 | # Apps 140 | for app_name, app_raw in apps_raw.items(): 141 | if not isinstance(app_raw, dict): 142 | raise DefinitionError( 143 | f"Error loading {path}: expecting app definition" 144 | f" for {app_name}, found {type(app_raw)}" 145 | ) 146 | 147 | # Get app class 148 | app_type: str = app_raw.pop("type", None) 149 | app_base_cls: type[BaseApp] 150 | if app_type is None: 151 | app_base_cls = App 152 | else: 153 | if app_type not in abstract_app_registry: 154 | raise DefinitionError(f"Unknown app type {app_type}") 155 | app_base_cls = abstract_app_registry[app_type] 156 | 157 | # YAML supports snake case names because it looks nicer. 158 | # Convert to PascalCase 159 | name = normalise_name(app_name) 160 | if manifest.has_app(name): 161 | raise DefinitionError( 162 | f"Error loading {path}: normalised name collision:" 163 | f" {app_name} and {name} are equivalent" 164 | ) 165 | 166 | # Build app class and add to manifest 167 | app_cls: type[BaseApp] = app_base_cls.from_dict( 168 | name=name, path=path, module=module.__name__, data=app_raw 169 | ) 170 | setattr(module, name, app_cls) 171 | manifest.add_app(app_cls) 172 | 173 | # Host 174 | if host_raw: 175 | manifest.host = Host.from_dict( 176 | name="ImportedHost", path=path, module=module.__name__, data=host_raw 177 | ) 178 | 179 | return manifest 180 | 181 | def init_host(self) -> Host: 182 | """ 183 | Instantiate the host 184 | """ 185 | if not self.host: 186 | raise UsageError("Cannot initialise a manifest that has no host") 187 | return self.host() 188 | 189 | def init_apps(self, *app_names: str) -> list[BaseApp]: 190 | """ 191 | Given one or more names of apps, find them in the registry and initialise them 192 | with the manifest host 193 | """ 194 | # Per-exec host options can be added here later 195 | host = self.init_host() 196 | 197 | # Find app classes 198 | app_classes: list[type[BaseApp]] 199 | if not app_names: 200 | app_classes = self.apps 201 | else: 202 | app_classes = [] 203 | for app_name in app_names: 204 | app_cls: type[BaseApp] = self.get_app(app_name) 205 | app_classes.append(app_cls) 206 | 207 | # Initialise the apps 208 | apps: list[BaseApp] = [] 209 | for app_cls in app_classes: 210 | apps.append(app_cls(host=host)) 211 | 212 | # Tell the apps about each other to allow cross-app logic 213 | manifest_apps = {a.get_name(): a for a in apps} 214 | for app in apps: 215 | app.manifest_apps = manifest_apps 216 | 217 | return apps 218 | -------------------------------------------------------------------------------- /docker0s/manifest_object.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from pathlib import Path 4 | from typing import Any, TypeVar, get_type_hints 5 | 6 | from .app.names import normalise_name 7 | from .exceptions import DefinitionError 8 | 9 | 10 | ManifestObjectType = TypeVar("ManifestObjectType", bound="ManifestObject") 11 | 12 | 13 | class ManifestObject: 14 | """ 15 | Base class for manifest objects 16 | """ 17 | 18 | #: Abstract base classes should be marked as abstract so they are ignored by the 19 | #: manifest loader 20 | abstract: bool = True 21 | 22 | def __init_subclass__( 23 | cls, abstract: bool = False, name: str | None = None, **kwargs 24 | ): 25 | """ 26 | Set abstract flag and register abstract classes with the registry 27 | """ 28 | cls.abstract = abstract 29 | 30 | if name is None: 31 | name = cls.__name__ 32 | else: 33 | cls.__name__ = name 34 | 35 | # Ensure that names in Python are in CamelCase 36 | # Names from YAML are normalised by now 37 | normalised = normalise_name(cls.__name__) 38 | if normalised != cls.__name__: 39 | raise DefinitionError( 40 | f"Python manifest objects must be named in CamelCase: {cls.__name__}" 41 | ) 42 | 43 | @classmethod 44 | def from_dict( 45 | cls: type[ManifestObjectType], 46 | name: str, 47 | path: Path, 48 | module: str, 49 | data: dict[str, Any], 50 | ) -> type[ManifestObjectType]: 51 | """ 52 | Build a concrete subclass of this app using the data in the dict 53 | 54 | Args: 55 | name: Name of app 56 | path: Path of manifest 57 | module: Name of module 58 | data: App attributes 59 | """ 60 | # No type checking here - see https://github.com/python/mypy/issues/9183 and 61 | # https://github.com/python/mypy/issues/5865 62 | class FromDict(cls, name=name, path=path): # type: ignore 63 | pass 64 | 65 | FromDict.__module__ = module 66 | 67 | # Collect annotations 68 | annotations = get_type_hints(FromDict) 69 | 70 | for key, value in data.items(): 71 | if key not in annotations: 72 | raise DefinitionError( 73 | f"Unexpected attribute {key} for {cls.__name__}" 74 | f" {FromDict.get_name()} in {FromDict._file}" 75 | ) 76 | 77 | setattr(FromDict, key, value) 78 | 79 | return FromDict 80 | -------------------------------------------------------------------------------- /docker0s/path.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import hashlib 4 | from pathlib import Path 5 | from typing import Any 6 | 7 | from .exceptions import DefinitionError 8 | from .git import fetch_repo, parse_git_url 9 | 10 | 11 | def find_manifest(path: Path) -> Path | None: 12 | """ 13 | Look within the given Path for a manifest file 14 | 15 | Returns the full Path to the manifest, or None if not found 16 | """ 17 | files = [ 18 | "d0s-manifest.py", 19 | "d0s-manifest.yml", 20 | "d0s-manifest.yaml", 21 | ] 22 | 23 | # Return the first which exists 24 | for filename in files: 25 | filepath = path / filename 26 | if filepath.exists(): 27 | return filepath 28 | 29 | return None 30 | 31 | 32 | def path_to_uuid(path: Path) -> str: 33 | """ 34 | Convert a path into a UUID 35 | """ 36 | hash = hashlib.md5(str(path).encode()).hexdigest() 37 | return f"_{hash}" 38 | 39 | 40 | def path_to_relative(root: Path, path: Path) -> str: 41 | """ 42 | Given a root path and a sub-path, return the trailing relative path 43 | """ 44 | if not path.is_relative_to(root): 45 | raise DefinitionError(f"Path {path} is not a sub-path of {root}") 46 | relative = str(path)[len(str(root)) :] 47 | return relative.lstrip("/") 48 | 49 | 50 | class ExtendsPath: 51 | """ 52 | Path to a base manifest 53 | """ 54 | 55 | #: Original ``extends`` path, or dir containing a base d0s-manifest 56 | original: str 57 | 58 | #: Current working directory - directory of the manifest which set ``extends`` - for 59 | #: resolving relative paths 60 | cwd: Path 61 | 62 | #: Full ``extends`` path to the manifest, or dir containing a d0s-manifest 63 | path: Path 64 | 65 | #: Git repository 66 | repo: str | None = None 67 | ref: str | None = None 68 | 69 | #: App name within the manifest 70 | name: str | None = None 71 | 72 | def __init__(self, path: str, cwd: Path): 73 | """ 74 | Resolve the path to a local Path, retrieving a local copy if a remote source 75 | """ 76 | self.original = path 77 | self.cwd = cwd 78 | 79 | if path.startswith(("git+ssh://", "git+https://")): 80 | # Break up URL into parts 81 | self.repo, self.ref, repo_rel_path, self.name = parse_git_url(path) 82 | 83 | # Pull and build local path 84 | repo_local_path = self._pull_repo() 85 | self.path = (repo_local_path / (repo_rel_path or "")).resolve() 86 | 87 | # Validate local path 88 | # 89 | # This is to catch mistakes and bad practice, not security issues - we'll 90 | # potentially be running Python with no attempt at sandboxing 91 | if not self.path.is_relative_to(repo_local_path): 92 | raise DefinitionError( 93 | f"Invalid git URL format {path}" 94 | f" - repo path {repo_rel_path} not relative to repo root" 95 | ) 96 | else: 97 | if "::" in path: 98 | path, self.name = path.split("::") 99 | self.path = (self.cwd / path).resolve() 100 | 101 | def __truediv__(self, other: Any) -> Path: 102 | """ 103 | Add ``other`` to this path, where ``other`` must be within this path 104 | """ 105 | return (self.path / other).resolve() 106 | 107 | def _pull_repo(self) -> Path: 108 | """ 109 | Clone local copy of repo 110 | """ 111 | local_path: Path = fetch_repo(self.repo, self.ref) 112 | return local_path 113 | 114 | def get_manifest(self) -> Path: 115 | # If we've been given the path to the manifest file then we're already there 116 | if self.path.is_file(): 117 | return self.path 118 | 119 | # Not found, we must have a dir to search 120 | if not self.path.is_dir(): 121 | raise DefinitionError( 122 | f"Manifest not found at {self.path} ({self.original})" 123 | ) 124 | 125 | # We'll search for these 126 | filepath = find_manifest(self.path) 127 | if filepath is None: 128 | raise DefinitionError( 129 | f"Manifest not found in {self.path} ({self.original})" 130 | ) 131 | return filepath 132 | -------------------------------------------------------------------------------- /docker0s/settings.py: -------------------------------------------------------------------------------- 1 | from os import getenv 2 | from pathlib import Path 3 | 4 | 5 | #: Docker0s local path, default ~/.docker0s/ 6 | LOCAL_PATH = Path(getenv("DOCKER0S_PATH", "~/.docker0s")).expanduser() 7 | 8 | #: Cache dir, default ~/.docker0s/cache/ 9 | CACHE_PATH = Path(getenv("DOCKER0S_CACHE_PATH", LOCAL_PATH / "cache")).expanduser() 10 | 11 | #: Remote filename for env files 12 | FILENAME_ENV = getenv("DOCKER0S_ENV_FILENAME", "env") 13 | 14 | #: Remote filename for compose files 15 | FILENAME_COMPOSE = getenv("DOCKER0S_COMPOSE_FILENAME", "docker-compose.yml") 16 | 17 | #: Remote dir to hold assets 18 | DIR_ASSETS = getenv("DOCKER0S_DIR_ASSETS", "assets") 19 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/changelog.rst: -------------------------------------------------------------------------------- 1 | ========= 2 | Changelog 3 | ========= 4 | 5 | Changes 6 | ======= 7 | 8 | 2.0.0 - 2022-01-10 9 | ------------------ 10 | 11 | Features: 12 | 13 | * Simplify path specification so everything is relative to the originating manifest 14 | * Rename ``MountedApp`` to ``RepoApp``, add ``repo`` and ``repo_compose`` arguments 15 | * Add ``d0s`` command alias 16 | * Add ``d0s use`` to set a default manifest 17 | * Add ``d0s status`` and ``d0s log`` to assist host management 18 | 19 | Bugfix: 20 | 21 | * Fix inherited path resolution to be relative to the originating manifest's path 22 | * Fix ``d0s exec`` 23 | 24 | 25 | 1.2.0 - 2022-11-17 26 | ------------------ 27 | 28 | Features: 29 | 30 | * Add Jinja2 template support for docker-compose.yml generation 31 | 32 | 33 | 1.1.0 - 2022-11-08 34 | ------------------ 35 | 36 | Features: 37 | 38 | * Add command support - ``docker0s cmd ...`` 39 | * Restore default ``extends`` 40 | * Standardise manifest filenames 41 | 42 | 43 | 1.0.1 - 2022-10-31 44 | ------------------ 45 | 46 | Bugfix: 47 | 48 | * Fix for entrypoint 49 | 50 | 51 | 1.0.0 - 2022-10-31 52 | ------------------ 53 | 54 | Initial release 55 | 56 | 57 | 58 | Roadmap 59 | ======= 60 | 61 | * Git hash pinning for improved security 62 | * Support for gitops through a repository monitoring mode 63 | 64 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # For the full list of built-in configuration values, see the documentation: 4 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 5 | 6 | import os 7 | import sys 8 | 9 | import sphinx_radiac_theme # noqa 10 | 11 | 12 | # Make sure sphinx can find the source 13 | sys.path.insert(0, os.path.abspath("../")) 14 | sys.path.insert(0, os.path.abspath("../example/")) 15 | 16 | from setup import find_version 17 | 18 | 19 | # -- Project information ----------------------------------------------------- 20 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information 21 | 22 | project = "docker0s" 23 | copyright = "2022, Richard Terry" 24 | author = "Richard Terry" 25 | release = find_version("..", "docker0s", "__init__.py") 26 | 27 | # -- General configuration --------------------------------------------------- 28 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration 29 | 30 | extensions = [ 31 | "sphinx_radiac_theme", 32 | "sphinx.ext.autodoc", 33 | "sphinx_gitref", 34 | ] 35 | 36 | templates_path = ["_templates"] 37 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] 38 | 39 | 40 | # -- Options for HTML output ------------------------------------------------- 41 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output 42 | 43 | html_theme = "sphinx_radiac_theme" 44 | html_static_path = ["_static"] 45 | 46 | html_theme_options = { 47 | "analytics_id": "G-NH3KEN9NBN", 48 | "logo_only": False, 49 | "display_version": True, 50 | # Toc options 51 | "collapse_navigation": True, 52 | "sticky_navigation": True, 53 | "navigation_depth": 4, 54 | "includehidden": True, 55 | "titles_only": False, 56 | # radiac.net theme 57 | "radiac_project_slug": "docker0s", 58 | "radiac_project_name": "docker0s", 59 | "radiac_subsite_links": [ 60 | # ("https://radiac.net/projects/django-fastview/demo/", "Demo"), 61 | ], 62 | } 63 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | Welcome to docker0s's documentation! 2 | ==================================== 3 | 4 | .. toctree:: 5 | :maxdepth: 2 6 | :caption: Contents: 7 | 8 | installation 9 | usage 10 | writing/index 11 | changelog 12 | 13 | 14 | Indices and tables 15 | ================== 16 | 17 | * :ref:`genindex` 18 | * :ref:`modindex` 19 | * :ref:`search` 20 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Installation 3 | ============ 4 | 5 | Local 6 | ===== 7 | 8 | Using Python 3.10 or later, install using ``pip`` or ``pipx``:: 9 | 10 | pip install docker0s 11 | 12 | 13 | The local machine will need 14 | 15 | * ``git`` (optional) - required for git-based paths 16 | 17 | 18 | Manifest repository 19 | ------------------- 20 | 21 | For most projects we recommend the following structure to keep your app and host 22 | manifests separate to aid reusability: 23 | 24 | :: 25 | 26 | manifests/ 27 | apps/ 28 | app_name/ 29 | docker-compose.yml 30 | d0s-manifest.yml 31 | hosts/ 32 | host_name/ 33 | d0s-manifest.yml 34 | 35 | You do not have to follow this structure, and if you are just deploying off-the-shelf 36 | app manifests then you can configure it all from a single manifest file if you would 37 | prefer. 38 | 39 | 40 | Host preparation 41 | ================ 42 | 43 | The host will need: 44 | 45 | * ``docker`` and ``docker-compose`` (or podman equivalent) 46 | * ``git`` (optional) - required for ``RepoApp`` apps 47 | * a user to deploy the apps under 48 | * appropriate firewall and security measures in place 49 | -------------------------------------------------------------------------------- /docs/requirements.in: -------------------------------------------------------------------------------- 1 | # Core build requirements 2 | sphinx 3 | sphinx-gitref 4 | -e git+https://github.com/radiac/sphinx_radiac_theme.git#egg=sphinx_radiac_theme 5 | 6 | # Optional 7 | sphinx-autobuild 8 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with python 3.10 3 | # To update, run: 4 | # 5 | # pip-compile requirements.in 6 | # 7 | -e git+https://github.com/radiac/sphinx_radiac_theme.git#egg=sphinx_radiac_theme 8 | # via -r requirements.in 9 | alabaster==0.7.12 10 | # via sphinx 11 | babel==2.11.0 12 | # via sphinx 13 | certifi==2022.9.24 14 | # via requests 15 | charset-normalizer==2.1.1 16 | # via requests 17 | colorama==0.4.6 18 | # via sphinx-autobuild 19 | docutils==0.17.1 20 | # via 21 | # sphinx 22 | # sphinx-gitref 23 | # sphinx-radiac-theme 24 | idna==3.4 25 | # via requests 26 | imagesize==1.4.1 27 | # via sphinx 28 | jinja2==3.1.2 29 | # via sphinx 30 | livereload==2.6.3 31 | # via sphinx-autobuild 32 | markupsafe==2.1.1 33 | # via jinja2 34 | packaging==21.3 35 | # via sphinx 36 | pygments==2.13.0 37 | # via sphinx 38 | pyparsing==3.0.9 39 | # via packaging 40 | pytz==2022.6 41 | # via babel 42 | requests==2.28.1 43 | # via sphinx 44 | six==1.16.0 45 | # via livereload 46 | snowballstemmer==2.2.0 47 | # via sphinx 48 | sphinx==5.3.0 49 | # via 50 | # -r requirements.in 51 | # sphinx-autobuild 52 | # sphinx-gitref 53 | # sphinx-radiac-theme 54 | sphinx-autobuild==2021.3.14 55 | # via -r requirements.in 56 | sphinx-gitref==0.2.1 57 | # via -r requirements.in 58 | sphinxcontrib-applehelp==1.0.2 59 | # via sphinx 60 | sphinxcontrib-devhelp==1.0.2 61 | # via sphinx 62 | sphinxcontrib-htmlhelp==2.0.0 63 | # via sphinx 64 | sphinxcontrib-jsmath==1.0.1 65 | # via sphinx 66 | sphinxcontrib-qthelp==1.0.3 67 | # via sphinx 68 | sphinxcontrib-serializinghtml==1.1.5 69 | # via sphinx 70 | tornado==6.2 71 | # via livereload 72 | urllib3==1.26.12 73 | # via requests 74 | -------------------------------------------------------------------------------- /docs/usage.rst: -------------------------------------------------------------------------------- 1 | ===== 2 | Usage 3 | ===== 4 | 5 | Commands 6 | ======== 7 | 8 | Docker0s installs as ``docker0s`` and ``d0s`` for short 9 | 10 | ``docker0s ls`` 11 | List the available apps 12 | 13 | ``docker0s deploy [[.]]``: 14 | Deploy resources to the host 15 | 16 | ``docker0s up [[.]]``: 17 | Start all apps, a specific app, or a specific app's service 18 | 19 | ``docker0s down [[.]]``: 20 | Stop all apps, a specific app, or a specific app's service 21 | 22 | ``docker0s restart [[.]]``: 23 | Restart all apps, a specific app, or a specific app's service 24 | 25 | ``docker0s exec . ``: 26 | Execute a command in the specific service 27 | 28 | ``docker0s status`` 29 | Show the status of the containers on the host 30 | 31 | ``docker0s logs .``: 32 | Show host logs for the specified service 33 | 34 | ``docker0s cmd [ ...]`` 35 | Execute a local App command 36 | 37 | ``docker0s use [] [--alias=]`` 38 | Set or unset the default host manifest by either path or an alias. 39 | 40 | ``docker0s use --list`` 41 | List aliases. 42 | 43 | 44 | Options: 45 | 46 | ``--manifest=``, ``-m ``: 47 | Specify the manifest for this command. Overrides the default manifest. 48 | 49 | 50 | Specifying the manifest 51 | ----------------------- 52 | 53 | The host manifest can be set using ``d0s use`` - for example:: 54 | 55 | # Use foo.yml in the current dir and create an alias 56 | d0s use foo.yml --alias=foo 57 | 58 | # Swap tp to bar.yml 59 | d0s use bar.yml 60 | 61 | # Swap back to foo using the alias 62 | d0s use foo 63 | 64 | # Stop using a default 65 | d0s use 66 | 67 | # Clear the foo alias 68 | d0s use --alias=foo 69 | 70 | This is saved to the docker0s user config, so will take effect across all active shell 71 | sessions, and will persist across sessions and reboots. In this way it is somewhat 72 | similar to ``kubectl config use-context``. 73 | 74 | The config stores full paths, so aliases can be used to jump between manifests without 75 | needing to specify the full path. 76 | 77 | The manifest can also be set for each command with the ``--manifest`` option:: 78 | 79 | $ d0s --manifest=baz.yml ls 80 | 81 | If no manifest is specified, docker0s looks in the current directory for 82 | ``d0s-manifest.py`` then ``d0s-manifest.yml``. 83 | 84 | 85 | Deployment 86 | ========== 87 | 88 | Docker0s will deploy projects to your host using the following directory structure:: 89 | 90 | /home/user/ 91 | apps/ 92 | app_name/ 93 | store/ 94 | docker-compose.yml 95 | env 96 | repo_app_with_store/ 97 | repo/ 98 | docker-compose.docker0s.yml 99 | store/ 100 | env 101 | 102 | 103 | Security considerations 104 | ======================= 105 | 106 | You must always trust your manifest sources - remember that manifests can be arbitrary 107 | Python code which is executed locally, and it has full shell access to your host. 108 | 109 | For this reason we recommend you perform a full audit of any third-party manifests to 110 | understand exactly what they are doing, and that if you extend manifests using ``git+`` 111 | URLs that you pin them to a specific commit. 112 | -------------------------------------------------------------------------------- /docs/writing/apps.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Writing apps 3 | ============ 4 | 5 | App types 6 | ========= 7 | 8 | ``docker0s.apps.App`` 9 | --------------------- 10 | 11 | A project with a docker-compose ready for use in production. 12 | 13 | Unless otherwise specified, all paths are relative to the manifest where they are 14 | defined. 15 | 16 | Attributes: 17 | 18 | ``extends`` 19 | Path to a base docker0s manifest for this app. 20 | 21 | A base manifest: 22 | 23 | * uses the same syntax 24 | * can define multiple apps 25 | * can reference further base manifests 26 | * must not define a host 27 | 28 | A path can be a relative (to the current manifest) or absolute path: 29 | 30 | * ``path/to/d0s-manifest.yml`` 31 | * ``/path/to/dir/containing/a/manifest/`` 32 | 33 | It will look for an app with the same name by default; you can specify a different 34 | name with ``::``, eg: 35 | 36 | * ``path/to/d0s-manifest.yml::AppName`` 37 | 38 | It can also be a git URL in the format ``git+ssh://host:repo@commit#path::name``, or 39 | ``git+https://host/repo@commit#path::name``, where commit, path and name are optional, eg: 40 | 41 | * ``git+ssh://git@github.com:radiac/docker0s-manifests@main#traefik`` 42 | * ``git+https://github.com/radiac/docker0s-manifests@v1.0#traefik/d0s-manifest.yml`` 43 | * ``git+ssh://git@github.com:radiac/example.com`` 44 | 45 | For security, when using a remote manifest from a third party git repository, we 46 | recommend performing a full audit of what you are going to deploy, and then pinning to 47 | that specific commit. 48 | 49 | Default: ``d0s-manifest.py``, then ``d0s-manifest.yml`` (first found) 50 | 51 | ``compose`` 52 | Path to the app's docker compose file. 53 | 54 | This can be a YAML file (``.yml``, ``.yaml``), or a Jinja2 template (``.j2``, 55 | ``.jinja2``). See "Compose templates" below for more details of template rendering. 56 | 57 | Default: tries the following in order, uses first found: ``docker-compose.j2``, 58 | ``docker-compose.jinja2``, ``docker-compose.yml``, ``docker-compose.yaml`` 59 | 60 | ``assets``: 61 | Path or list of paths to assets which should be uploaded into an ``assets`` dir next 62 | to the docker-compose. 63 | 64 | ``env_file`` 65 | Path or list of paths to files containing environment variables for docker-compose. 66 | See "Environment variables" below for details. 67 | 68 | ``env`` 69 | Key-value pairs of environment variables for docker-compose. 70 | See "Environment variables" below for details. 71 | 72 | ``compose_context`` 73 | Key-value pairs of template variables to render a Jinja2 ``compose`` template. 74 | See "Compose templates" below for details. 75 | 76 | Example YAML: 77 | 78 | .. code-block:: yaml 79 | 80 | apps: 81 | website: 82 | extends: "git+ssh://git@github.com:radiac/example.com.git" 83 | compose: "docker-compose.live.yml" 84 | env_file: 85 | - base.env 86 | - website.env 87 | env: 88 | deployment=www.example.com 89 | 90 | 91 | ``docker0s.apps.RepoApp`` 92 | ------------------------- 93 | 94 | A project which requires the repository to be cloned on the host and mounted into 95 | the service. 96 | 97 | Takes the same arguments as an ``App``, with the following differences: 98 | 99 | ``repo`` 100 | A ``git+`` URL to the repository and branch/commit to deploy to the server. 101 | 102 | ``repo_compose`` 103 | Relative path to the compose file within the repository. 104 | 105 | If this path exists in the repo, Docker0s will overwrite it on the server. 106 | 107 | 108 | Recommended configuration: 109 | 110 | #. In the root of your repository, create a ``docker-compose.yml`` or 111 | ``docker-compose.j2`` 112 | #. Still in the root, create an app manifest - ``d0s-manifest.yml`` or 113 | ``d0s-manifest.py`` 114 | #. Add ``docker-compose.docker0s.yml`` to your ``.gitignore`` 115 | 116 | The ``RepoApp.compose`` will default to find the ``docker-compose.yml`` or ``.j2`` file, 117 | and will write the production compose to ``docker-compose.docker0s.yml`` so that any 118 | relative paths in the compose file will still resolve. 119 | 120 | If you place the manifest or compose at a different location, you will need to set 121 | ``compose`` and ``repo_compose`` accordingly. 122 | 123 | Example YAML: 124 | 125 | .. code-block:: yaml 126 | 127 | apps: 128 | website: 129 | type: RepoApp 130 | repo: "git+ssh://git@github.com:radiac/example.com.git@main" 131 | compose: docker/docker-compose.live.j2 132 | repo_compose: docker/docker-compose.live.yml 133 | 134 | 135 | .. _app_naming: 136 | 137 | App naming 138 | ========== 139 | 140 | Because apps are referenced by name in Python, YAML and on the command line, docker0s 141 | supports apps names in ``PascalCase``, ``camelCase``, ``snake_case`` and ``kebab-case`` 142 | in YAML and the command line. Python classes must always use ``PascalCase``: 143 | 144 | .. code-block:: python 145 | 146 | class WebsiteExampleCom(App): 147 | path = "../website" 148 | 149 | YAML can use any - these four app definitions are equivalent (so would raise an error): 150 | 151 | .. code-block:: yaml 152 | 153 | apps: 154 | website_example_com: 155 | path: ../website 156 | website-example-com: 157 | path: ../website 158 | websiteExampleCom: 159 | path: ../website 160 | WebsiteExampleCom: 161 | path: ../website 162 | 163 | .. _app_env: 164 | 165 | Environment variables 166 | ===================== 167 | 168 | Environment variables for the docker-compose can be defined as one or more env files, as 169 | a dict within the manifest, or both. 170 | 171 | If more than one ``env_file`` is specified, files are loaded in order. If a key appears 172 | in more than one file, the last value loaded will be used. 173 | 174 | If a key appears in both the ``env`` dict and an ``env_file``, the value in this field 175 | will be used. 176 | 177 | Environment variables are evaluated before inheritance, meaning an env file key in a 178 | child manifest can override an env dict key in a parent. Precedence order, with winner 179 | first: 180 | 181 | #. Child env dict 182 | #. Child env file 183 | #. Parent env dict 184 | #. Parent env file 185 | 186 | Environment variables are merged and written to an env file on the server for 187 | docker-compose to use. 188 | 189 | Environment variables can be used in your ``docker-compose.yml`` as normal, for example: 190 | 191 | .. code-block:: yaml 192 | 193 | services: 194 | my_service: 195 | environment: 196 | domain: "${hostname}" 197 | 198 | Docker0s provides some environment variables by default - for more information see 199 | :ref:`compose_env`. -------------------------------------------------------------------------------- /docs/writing/compose.rst: -------------------------------------------------------------------------------- 1 | ==================== 2 | Docker compose files 3 | ==================== 4 | 5 | .. _compose_env: 6 | 7 | Environment variables 8 | ===================== 9 | 10 | You can pass your own environment variables into your docker-compose file - for more 11 | details see :ref:`app_env`. 12 | 13 | A standard docker0s app provides the following environment variables: 14 | 15 | ``ENV_FILE`` 16 | Path to the combined env file on the host 17 | 18 | ``ASSETS_PATH`` 19 | Path to the assets dir on the host 20 | 21 | Assets are resources pushed to the server as part of the docker0s deployment - config 22 | files, scripts, media etc. 23 | 24 | ``STORE_PATH`` 25 | Path to the store dir on the host 26 | 27 | The store is for files created by the containers - logs, databases, uploads etc. 28 | 29 | 30 | Example usage in your ``docker-compose.yml``: 31 | 32 | .. code-block:: yaml 33 | 34 | services: 35 | postgres: 36 | image: postgres:latest 37 | restart: unless-stopped 38 | env_file: "${ENV_FILE}" 39 | volumes: 40 | - "${STORE_PATH}/db:/db" 41 | - "${ASSET_PATH}/scripts:/scripts" 42 | 43 | 44 | Compose templates 45 | ================= 46 | 47 | If the docker-compose file ends in a ``.jinja2`` extension, docker0s will treat it as a 48 | Jinja2 template. See the `Jinja documentation `_ 49 | for details of the template syntax. 50 | 51 | The template will be able to reference other documents relative to it, regardless of 52 | whether it is a local file or a remote file on a ``git+...`` url. 53 | 54 | The template is rendered with the context dict provided in ``compose_context``, plus the 55 | following values: 56 | 57 | ``host`` 58 | A reference to the instantiated Host object. 59 | 60 | Example usage in a template: 61 | 62 | .. code-block:: yaml 63 | 64 | services: 65 | my_service: 66 | environment: 67 | domain: {{ host.name }} 68 | 69 | 70 | ``env`` 71 | A reference to the fully resolved environment variables that will be sent to the 72 | server. It is recommended to prefer environment variable substitution (eg 73 | ``${env_var}``) as it allows more flexibility when working on the server in the 74 | future, but the ``env`` context variable can be useful for conditional statements. 75 | 76 | Example usage in a template: 77 | 78 | .. code-block:: yaml 79 | 80 | services: 81 | my_service: 82 | environment: 83 | {% if env.domain %} 84 | domain: ${domain} 85 | {% endif %} 86 | 87 | 88 | ``apps`` 89 | A reference to the compose template contexts of other apps in the current manifest. 90 | Note that this includes ``env`` and the other context variables mentioned here. 91 | 92 | App names are normalised, so can be specified as described in :ref:`app_naming`, eg 93 | ``apps.MyApp``, ``apps.my_app`` etc 94 | 95 | Example usage in a template: 96 | 97 | .. code-block:: yaml 98 | 99 | services: 100 | my_service: 101 | {% if smtp_relay in apps %} 102 | networks: 103 | - {{ apps.smtp_relay.network }} 104 | {% endif %} 105 | 106 | 107 | ``docker0s``, ``globals`` 108 | Reserved for future use. 109 | 110 | Take care not to use these variables in your own ``compose_context``. 111 | -------------------------------------------------------------------------------- /docs/writing/host.rst: -------------------------------------------------------------------------------- 1 | =============== 2 | Defining a host 3 | =============== 4 | 5 | A manifest can define one host. A manifest which defines a host cannot be used in 6 | ``extends``. 7 | 8 | A host definition has the following attributes: 9 | 10 | ``name`` 11 | The IP or hostname of the server. 12 | 13 | ``port`` 14 | The SSH port on the server. 15 | 16 | Default: ``22`` 17 | 18 | ``user`` 19 | Username for login 20 | 21 | ``home`` 22 | Home dir for user 23 | 24 | Default: ``/home/{user}/``, where ``{user}`` is replaced by the username defined in 25 | the ``user`` attribute. 26 | 27 | ``root_path`` 28 | Path to docker0s working dir on the server 29 | 30 | Should be absolute or relative to the connecting user's home directory, but do not use 31 | tildes. 32 | 33 | ``compose_command`` 34 | Docker compose command. 35 | 36 | Default: ``docker-compose`` 37 | 38 | 39 | 40 | Example YAML: 41 | 42 | .. code-block:: yaml 43 | 44 | host: 45 | name: example.com 46 | port: 2222 47 | user: example 48 | root_path: /var/docker0s 49 | compose_command: "docker compose" 50 | -------------------------------------------------------------------------------- /docs/writing/index.rst: -------------------------------------------------------------------------------- 1 | ================= 2 | Writing manifests 3 | ================= 4 | 5 | A manifest file defines a list of more or apps which will be deployed to one host. 6 | 7 | You can put everything in a single manifest, but usually you will define a generic *app 8 | manifest* next to a ``docker-compose.yml``, and then extend it in a *host manifest* 9 | where you set environment variables specific to your host. The syntax of both is the 10 | same, but an app manifest only defines apps, whereas a host manifest defines both apps 11 | and the host to deploy it to. 12 | 13 | See `docker0s-manifests `_ for a 14 | collection of app manifests and examples for how to use them with your host manifest. 15 | 16 | 17 | 18 | .. toctree:: 19 | :maxdepth: 2 20 | :caption: Contents: 21 | 22 | yaml 23 | python 24 | apps 25 | host 26 | compose 27 | -------------------------------------------------------------------------------- /docs/writing/python.rst: -------------------------------------------------------------------------------- 1 | ================ 2 | Python manifests 3 | ================ 4 | 5 | .. code-block:: python 6 | 7 | from docker0s import RepoApp 8 | 9 | class Website(RepoApp): 10 | # Clone a repo to the host and look for docker-compose.yml in there 11 | repo = "git+ssh://git@github.com:radiac/example.com.git@main" 12 | env = { 13 | "DOMAIN": "example.radiac.net" 14 | } 15 | 16 | # Subclass operation methods to add your own logic 17 | def deploy(self): 18 | # Perform action before deployment, eg clean up any previous deployment 19 | super().deploy() 20 | # Perform action after deployment, eg push additional resources 21 | 22 | 23 | def up(self, *services): 24 | # Perform action before ``up``, eg report to a log 25 | super().up(*services) 26 | # Perform action after ``up``, eg wait and perform a test 27 | 28 | @App.command 29 | def say_hello(self, name): 30 | print(f"Hello {name}, this runs locally") 31 | self.host.exec("echo And {name}, this is on the host", args={'name': name}) 32 | 33 | 34 | class Vagrant(Host): 35 | name = "vagrant" 36 | 37 | 38 | 39 | App commands 40 | ------------ 41 | 42 | Python App definitions can declare local commands - usually either utility functions to 43 | assist with manifest definition, such as a password encoder, or to use fabric to perform 44 | operations on the host, such as tailing docker logs. 45 | 46 | To define an app, decorate it with ``App.command``: 47 | 48 | .. code-block:: python 49 | 50 | class Website(App): 51 | @App.command 52 | def say_hello(self, name): 53 | print(f"Hello {name}, this runs locally") 54 | self.host.exec("echo And {name}, this is on the host", args={'name': name}) 55 | 56 | 57 | This can then be called as: 58 | 59 | .. code-block:: bash 60 | 61 | ./docker0s cmd website say_hello person 62 | 63 | Commands currently do not have any support for validation or typing of arguments. 64 | -------------------------------------------------------------------------------- /docs/writing/yaml.rst: -------------------------------------------------------------------------------- 1 | ============== 2 | YAML manifests 3 | ============== 4 | 5 | A YAML manifest file has two sections: 6 | 7 | ``apps``: 8 | The list of app definitions. 9 | 10 | Each app starts with its identifier. This is used as its namespace for 11 | docker-compose. 12 | 13 | Under the identifier you can declare the type of app with ``type``; if not specified 14 | it will default to ``type: App``. See "App Types" for more details and additional 15 | arguments for the app definition. 16 | 17 | An app can also specify environment variables to pass to docker-compose, by setting 18 | ``env`` with a file path, a list of files, or key/value pairs. 19 | 20 | ``host``: 21 | The host definition. 22 | 23 | There can be only one per manifest. Manifests which define a host cannot be used as a 24 | base manifest (see ``extends`` attribute). 25 | 26 | 27 | For example: 28 | 29 | .. code-block:: yaml 30 | 31 | apps: 32 | traefik: 33 | extends: git+https://github.com/radiac/docker0s-manifests.git@main#traefik 34 | env_file: traefik.env 35 | storage: 36 | extends: ../apps/storage 37 | website: 38 | type: RepoApp 39 | extends: "git+ssh://git@github.com:radiac/example.com.git@main" 40 | env: 41 | DOMAIN: example.radiac.net 42 | host: 43 | name: example.radiac.net 44 | 45 | -------------------------------------------------------------------------------- /requirements.in: -------------------------------------------------------------------------------- 1 | pip-tools 2 | 3 | click 4 | fabric 5 | jinja2 6 | python-dotenv 7 | pyyaml 8 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with python 3.10 3 | # To update, run: 4 | # 5 | # pip-compile 6 | # 7 | bcrypt==4.0.0 8 | # via paramiko 9 | build==0.8.0 10 | # via pip-tools 11 | cffi==1.15.1 12 | # via 13 | # cryptography 14 | # pynacl 15 | click==8.1.3 16 | # via 17 | # -r requirements.in 18 | # pip-tools 19 | cryptography==38.0.0 20 | # via paramiko 21 | fabric==2.7.1 22 | # via -r requirements.in 23 | invoke==1.7.1 24 | # via fabric 25 | jinja2==3.1.2 26 | # via -r requirements.in 27 | markupsafe==2.1.1 28 | # via jinja2 29 | packaging==21.3 30 | # via build 31 | paramiko==2.11.0 32 | # via fabric 33 | pathlib2==2.3.7.post1 34 | # via fabric 35 | pep517==0.13.0 36 | # via build 37 | pip-tools==6.8.0 38 | # via -r requirements.in 39 | pycparser==2.21 40 | # via cffi 41 | pynacl==1.5.0 42 | # via paramiko 43 | pyparsing==3.0.9 44 | # via packaging 45 | python-dotenv==0.21.0 46 | # via -r requirements.in 47 | pyyaml==6.0 48 | # via -r requirements.in 49 | six==1.16.0 50 | # via 51 | # paramiko 52 | # pathlib2 53 | tomli==2.0.1 54 | # via 55 | # build 56 | # pep517 57 | wheel==0.37.1 58 | # via pip-tools 59 | 60 | # The following packages are considered to be unsafe in a requirements file: 61 | # pip 62 | # setuptools 63 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = docker0s 3 | description = Use docker-compose to manage multiple apps on a single host 4 | long_description = file: README.rst 5 | keywords = docker containers deployment 6 | author = Richard Terry 7 | author_email = code@radiac.net 8 | license = BSD 9 | classifiers = 10 | Development Status :: 3 - Alpha 11 | Environment :: Web Environment 12 | Intended Audience :: Developers 13 | License :: OSI Approved :: BSD License 14 | Operating System :: OS Independent 15 | Topic :: Internet 16 | Programming Language :: Python 17 | Programming Language :: Python :: 3 18 | Programming Language :: Python :: 3 :: Only 19 | Programming Language :: Python :: 3.10 20 | url = https://radiac.net/projects/docker0s/ 21 | project_urls = 22 | Documentation = https://docker0s.readthedocs.io/ 23 | Source = https://github.com/radiac/docker0s 24 | Tracker = https://github.com/radiac/docker0s/issues 25 | 26 | [options] 27 | python_requires = >=3.10 28 | packages = find: 29 | include_package_data = true 30 | zip_safe = false 31 | install_requires = 32 | click 33 | fabric 34 | python-dotenv 35 | pyyaml 36 | 37 | [options.packages.find] 38 | exclude = tests* 39 | 40 | [options.entry_points] 41 | console_scripts = 42 | docker0s = docker0s.commands:invoke 43 | d0s = docker0s.commands:invoke 44 | 45 | [tool:pytest] 46 | addopts = --black --mypy --cov=docker0s --cov-report=term --cov-report=html 47 | pythonpath = . 48 | 49 | [coverage:run] 50 | parallel=True 51 | 52 | [flake8] 53 | ignore = E123,E128,E203,E501,W503 54 | max-line-length = 88 55 | exclude = .tox,.git 56 | 57 | [isort] 58 | multi_line_output = 3 59 | line_length = 88 60 | known_first_party = docker0s 61 | sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER 62 | include_trailing_comma = True 63 | lines_after_imports = 2 64 | skip = .tox,.git 65 | 66 | [mypy] 67 | ignore_missing_imports = True 68 | 69 | [doc8] 70 | max-line-length = 88 71 | ignore-path = *.txt,.tox 72 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import re 2 | import sys 3 | from pathlib import Path 4 | 5 | from setuptools import setup 6 | 7 | 8 | def find_version(*paths): 9 | path = Path(*paths) 10 | content = path.read_text() 11 | match = re.search(r"^__version__\s*=\s*['\"]([^'\"]*)['\"]", content, re.M) 12 | if match: 13 | return match.group(1) 14 | raise RuntimeError("Unable to find version string.") 15 | 16 | 17 | # Setup unless this is being imported by Sphinx, which just wants find_version 18 | if "sphinx" not in sys.modules: 19 | setup(version=find_version("docker0s", "__init__.py")) 20 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/radiac/docker0s/fbf0d94d942d0e821f4d37caaa8f7e603183d943/tests/__init__.py -------------------------------------------------------------------------------- /tests/app/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/radiac/docker0s/fbf0d94d942d0e821f4d37caaa8f7e603183d943/tests/app/__init__.py -------------------------------------------------------------------------------- /tests/app/test_app.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test docker0s.app.app.App 3 | """ 4 | from pathlib import Path 5 | 6 | import pytest 7 | 8 | from docker0s.app.app import App 9 | 10 | 11 | @pytest.fixture 12 | def compose_path_yml(): 13 | return Path(__file__).parent.parent / "data" / "docker-compose.yml" 14 | 15 | 16 | @pytest.fixture 17 | def app(host, compose_path_yml): 18 | """ 19 | A sample App instance 20 | 21 | Path is taken from module name: tests/app/ 22 | """ 23 | return App.from_dict( 24 | name="SampleApp", 25 | path=Path(__file__).parent, 26 | module="tests.app.test_app", 27 | data={"compose": str(compose_path_yml)}, 28 | )(host) 29 | 30 | 31 | def test_app_is_abstract(): 32 | assert App.abstract is True 33 | 34 | 35 | def test_app_subclass_is_concrete(): 36 | class TestApp(App): 37 | pass 38 | 39 | assert TestApp.abstract is False 40 | 41 | 42 | def test_mocked_app__deploy(mock_fabric, app, compose_path_yml): 43 | with mock_fabric() as mocked: 44 | app.deploy() 45 | 46 | assert mocked.flat_stack == [ 47 | ("run", "mkdir -p /home/user/apps/sample_app", None), 48 | ( 49 | "put", 50 | mocked.StringIO(compose_path_yml.read_text()), 51 | "/home/user/apps/sample_app/docker-compose.yml", 52 | ), 53 | ( 54 | "put", 55 | mocked.StringIO( 56 | 'COMPOSE_PROJECT_NAME="sample_app"\n' 57 | 'ENV_FILE="/home/user/apps/sample_app/env"\n' 58 | 'ASSETS_PATH="/home/user/apps/sample_app/assets"\n' 59 | 'STORE_PATH="/home/user/apps/sample_app/store"' 60 | ), 61 | "/home/user/apps/sample_app/env", 62 | ), 63 | ] 64 | -------------------------------------------------------------------------------- /tests/app/test_base_contexts.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from docker0s.app.base import AppsTemplateContext, BaseApp, EnvTemplateContext 4 | 5 | 6 | @pytest.fixture 7 | def contexts(mk_manifest): 8 | class TestApp1(BaseApp): 9 | compose_context = { 10 | "foo1": "bar1", 11 | } 12 | env = { 13 | "baz1": "qux1", 14 | } 15 | 16 | class TestApp2(BaseApp): 17 | compose_context = { 18 | "foo2": "bar2", 19 | } 20 | env = { 21 | "baz2": "qux2", 22 | } 23 | 24 | # Create and sanity check 25 | manifest = mk_manifest(TestApp1, TestApp2) 26 | apps = manifest.init_apps() 27 | assert len(apps) == 2 28 | app1, app2 = apps 29 | assert isinstance(app1, TestApp1) 30 | assert isinstance(app2, TestApp2) 31 | return [app.get_compose_context() for app in apps] 32 | 33 | 34 | def test_apps_template_context__get__returns_context(contexts): 35 | context1, context2 = contexts 36 | assert isinstance(context1["apps"], AppsTemplateContext) 37 | assert context1["apps"].get("TestApp2")["foo2"] == "bar2" 38 | assert context1["apps"]["TestApp2"]["foo2"] == "bar2" 39 | assert context1["apps"].TestApp2["foo2"] == "bar2" 40 | assert context1["apps"].test_app2["foo2"] == "bar2" 41 | assert context1["apps"].testApp2["foo2"] == "bar2" 42 | 43 | 44 | def test_env_template_context__get__returns_env_val(contexts): 45 | context1, context2 = contexts 46 | assert isinstance(context1["env"], EnvTemplateContext) 47 | assert isinstance(context1["apps"].TestApp2["env"], EnvTemplateContext) 48 | assert context1["env"]["baz1"] == "qux1" 49 | assert context1["apps"].TestApp2["env"]["baz2"] == "qux2" 50 | 51 | 52 | def test_apps_template_context__in__bool_correct(contexts): 53 | context1, context2 = contexts 54 | assert "TestApp1" in context1["apps"] 55 | assert "test_app2" in context1["apps"] 56 | 57 | 58 | def test_apps_template_env__in__bool_correct(contexts): 59 | context1, context2 = contexts 60 | assert "baz1" in context1["env"] 61 | assert "baz2" in context1["apps"].TestApp2["env"] 62 | -------------------------------------------------------------------------------- /tests/app/test_base_def.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test the core functionality of app definitions - the classmethods which manage how an 3 | app collects and processes its attributes. 4 | """ 5 | 6 | from pathlib import Path 7 | from unittest.mock import Mock 8 | 9 | from docker0s.app import BaseApp 10 | 11 | 12 | manifest_dir = Path(__file__).parent 13 | 14 | 15 | def test_baseapp__is_abstract(): 16 | assert BaseApp.abstract is True 17 | 18 | 19 | def test_baseapp__subclass_is_concrete(): 20 | class TestApp(BaseApp): 21 | pass 22 | 23 | assert TestApp.abstract is False 24 | 25 | 26 | def test_baseapp__get_name(): 27 | class TestApp(BaseApp): 28 | pass 29 | 30 | assert TestApp.get_name() == "TestApp" 31 | 32 | 33 | def test_baseapp__manifest_path_detected(): 34 | class TestApp(BaseApp): 35 | pass 36 | 37 | assert TestApp._file == Path(__file__) 38 | 39 | 40 | def test_baseapp__manifest_dir_dectected(): 41 | class TestApp(BaseApp): 42 | pass 43 | 44 | assert TestApp._dir == Path(__file__).parent 45 | 46 | 47 | def test_apply_base_manifest__no_extends__no_base_loaded(monkeypatch): 48 | class TestApp(BaseApp): 49 | pass 50 | 51 | # Should return from initial ``if not cls.extends`` and shouldn't ``get_manifest`` 52 | mock_get_manifest = Mock() 53 | monkeypatch.setattr("docker0s.path.ExtendsPath.get_manifest", mock_get_manifest) 54 | 55 | TestApp.apply_base_manifest() 56 | mock_get_manifest.assert_not_called() 57 | 58 | 59 | def test_apply_base_manifest__extends__merges_base_classes(tmp_path): 60 | """ 61 | TestApp extends first.py::TestApp, which extends second.py::TestApp 62 | """ 63 | 64 | class TestApp(BaseApp): 65 | extends = "../data/extends_base_first.py" 66 | 67 | # Apply ``extends`` 68 | print( 69 | "TESTAPP_DIR", 70 | TestApp._file, 71 | TestApp._dir, 72 | (TestApp._dir / "../data/extends_base_first.py").resolve(), 73 | ) 74 | TestApp.apply_base_manifest() 75 | 76 | # Should have first as the first base 77 | assert len(TestApp.__bases__) == 2 78 | assert TestApp.__bases__[0].test_id == "first" # type: ignore 79 | assert TestApp.__bases__[1] is BaseApp 80 | 81 | # First should have second as first base 82 | assert len(TestApp.__bases__[0].__bases__) == 2 83 | assert TestApp.__bases__[0].__bases__[0].test_id == "second" # type: ignore 84 | assert TestApp.__bases__[0].__bases__[1] is BaseApp 85 | 86 | # And inheritance order should give these values 87 | assert TestApp.compose == "first" 88 | assert TestApp.env_file == "second" 89 | 90 | 91 | def test_env__dict_only__returned(): 92 | env_data: dict[str, str | int] = { 93 | "key1": "value1", 94 | "key2": "value2", 95 | } 96 | 97 | class TestApp(BaseApp): 98 | env = env_data 99 | set_project_name = False 100 | 101 | assert TestApp.get_env_data() == env_data 102 | 103 | 104 | def test_env__file_only__loaded(): 105 | class TestApp(BaseApp): 106 | env_file = "../data/first.env" 107 | set_project_name = False 108 | 109 | assert TestApp.get_env_data() == { 110 | "key1": "first1", 111 | "key2": "first2", 112 | "key3": "first3", 113 | } 114 | 115 | 116 | def test_env__two_files__merged_in_order(): 117 | class TestApp(BaseApp): 118 | env_file = ["../data/first.env", "../data/second.env"] 119 | set_project_name = False 120 | 121 | assert TestApp.get_env_data() == { 122 | "key1": "second1", 123 | "key2": "first2", 124 | "key3": "first3", 125 | "key4": "second4", 126 | } 127 | 128 | 129 | def test_env__two_files_and_dict__merged_in_order(): 130 | class TestApp(BaseApp): 131 | env_file = ["../data/first.env", "../data/second.env"] 132 | env = { 133 | "key3": "data3", 134 | "key5": "data5", 135 | } 136 | set_project_name = False 137 | 138 | assert TestApp.get_env_data() == { 139 | "key1": "second1", 140 | "key2": "first2", 141 | "key3": "data3", 142 | "key4": "second4", 143 | "key5": "data5", 144 | } 145 | 146 | 147 | def test_env__data_and_set_project_name__merged_in_order_with_project_name(): 148 | class TestApp(BaseApp): 149 | env_file = ["../data/first.env", "../data/second.env"] 150 | env = { 151 | "key3": "data3", 152 | "key5": "data5", 153 | } 154 | 155 | assert TestApp.get_env_data() == { 156 | "COMPOSE_PROJECT_NAME": "test_app", 157 | "key1": "second1", 158 | "key2": "first2", 159 | "key3": "data3", 160 | "key4": "second4", 161 | "key5": "data5", 162 | } 163 | 164 | 165 | def test_env__two_files_and_two_dicts_inherited_and_set_project_name__merged_in_order(): 166 | class ParentApp(BaseApp): 167 | env_file = "../data/first.env" 168 | env = { 169 | "key1": "parent1", 170 | "key3": "parent3", 171 | "key5": "parent5", 172 | } 173 | 174 | class ChildApp(ParentApp): 175 | env_file = "../data/second.env" 176 | env = {"key3": "child3"} 177 | 178 | # Value first comes from parent file, second comes from child file 179 | # Child env_file overrides parent env 180 | assert ChildApp.get_env_data() == { 181 | "COMPOSE_PROJECT_NAME": "child_app", 182 | "key1": "second1", 183 | "key2": "first2", 184 | "key3": "child3", 185 | "key4": "second4", 186 | "key5": "parent5", 187 | } 188 | -------------------------------------------------------------------------------- /tests/app/test_base_ops.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test the operations of app definitions - the methods which deploy and call 3 | docker-compose on the host 4 | """ 5 | 6 | from pathlib import Path, PosixPath 7 | 8 | import pytest 9 | 10 | from docker0s.app.base import AppsTemplateContext, BaseApp, EnvTemplateContext 11 | 12 | from ..constants import HOST_NAME 13 | 14 | 15 | @pytest.fixture 16 | def base_app(host): 17 | """ 18 | A sample BaseApp instance 19 | 20 | Path is taken from module name: tests/app/ 21 | """ 22 | return BaseApp.from_dict( 23 | name="SampleApp", 24 | path=Path(__file__).parent, 25 | module="tests.app.test_base_ops", 26 | data=dict( 27 | compose="data/docker-compose.yml", 28 | ), 29 | )(host) 30 | 31 | 32 | def test_app__remote_path(base_app): 33 | assert base_app.remote_path == PosixPath("/home/user/apps/sample_app") 34 | 35 | 36 | def test_app__remote_compose(base_app): 37 | assert base_app.remote_compose == PosixPath( 38 | "/home/user/apps/sample_app/docker-compose.yml" 39 | ) 40 | 41 | 42 | def test_app__remote_env(base_app): 43 | assert base_app.remote_env == PosixPath("/home/user/apps/sample_app/env") 44 | 45 | 46 | def test_mocked_app__deploy(mock_fabric, base_app): 47 | with mock_fabric() as mocked: 48 | base_app.deploy() 49 | 50 | assert mocked.flat_stack == [ 51 | ("run", "mkdir -p /home/user/apps/sample_app", None), 52 | ( 53 | "put", 54 | mocked.StringIO( 55 | 'version: "3.8"\n' 56 | "services:\n" 57 | " service1:\n" 58 | " image: service1\n" 59 | " service2:\n" 60 | " image: service2\n" 61 | ), 62 | "/home/user/apps/sample_app/docker-compose.yml", 63 | ), 64 | ( 65 | "put", 66 | mocked.StringIO( 67 | 'COMPOSE_PROJECT_NAME="sample_app"\n' 68 | 'ENV_FILE="/home/user/apps/sample_app/env"\n' 69 | 'ASSETS_PATH="/home/user/apps/sample_app/assets"\n' 70 | 'STORE_PATH="/home/user/apps/sample_app/store"' 71 | ), 72 | "/home/user/apps/sample_app/env", 73 | ), 74 | ] 75 | 76 | 77 | def test_compose_content__template_with_context__renders(mk_manifest, tmp_path): 78 | template_path = tmp_path / "docker-compose.jinja2" 79 | template_path.write_text( 80 | """version: "3.8" 81 | services: 82 | {% if service1 %} 83 | service1: 84 | image: service1 85 | {% endif %} 86 | service2: 87 | image: service2 88 | """ 89 | ) 90 | 91 | class TestApp(BaseApp): 92 | compose = str(template_path) 93 | compose_context = {"service1": True} 94 | 95 | app = mk_manifest(TestApp).init_apps()[0] 96 | assert ( 97 | app.get_compose_content() 98 | == """version: "3.8" 99 | services: 100 | 101 | service1: 102 | image: service1 103 | 104 | service2: 105 | image: service2 106 | """ 107 | ) 108 | 109 | 110 | def test_compose_content__reserved_context__data_exists(mk_manifest): 111 | class TestApp(BaseApp): 112 | pass 113 | 114 | # Create app and manifest 115 | manifest = mk_manifest(TestApp) 116 | apps = manifest.init_apps() 117 | assert len(apps) == 1 118 | app = apps[0] 119 | 120 | # Collect reserved context 121 | context = app.get_compose_context() 122 | 123 | # Check host 124 | assert context["host"] == app.host 125 | 126 | # Check template context objects 127 | assert isinstance(context["apps"], AppsTemplateContext) 128 | assert isinstance(context["env"], EnvTemplateContext) 129 | 130 | # Check reserved words are there 131 | assert context["docker0s"] == NotImplemented 132 | assert context["globals"] == NotImplemented 133 | 134 | 135 | def test_compose_content__reserved_context__data_renders(mk_manifest, tmp_path): 136 | template_path = tmp_path / "docker-compose.jinja2" 137 | template_path.write_text( 138 | "host.name={{ host.name }}" 139 | " apps.TestApp2.foo={{ apps.TestApp2.foo }}" 140 | " apps.TestApp2.baz={{ apps.TestApp2.env.baz }}" 141 | ) 142 | 143 | class TestApp1(BaseApp): 144 | compose = str(template_path) 145 | 146 | class TestApp2(BaseApp): 147 | compose_context = {"foo": "bar"} 148 | env = {"baz": "qux"} 149 | 150 | # Create apps and sanity check 151 | manifest = mk_manifest(TestApp1, TestApp2) 152 | apps = manifest.init_apps() 153 | assert len(apps) == 2 154 | app1, app2 = apps 155 | assert isinstance(app1, TestApp1) 156 | assert isinstance(app2, TestApp2) 157 | 158 | assert app1.get_compose_content() == ( 159 | f"host.name={HOST_NAME} apps.TestApp2.foo=bar apps.TestApp2.baz=qux" 160 | ) 161 | 162 | 163 | @pytest.mark.parametrize( 164 | "cmd, cmd_args, cmd_out", 165 | [ 166 | ("up", {}, "up"), 167 | ("up {service}", {"service": "mycontainer"}, "up mycontainer"), 168 | ], 169 | ) 170 | def test_mocked_app__call_compose(cmd, cmd_args, cmd_out, mock_fabric, base_app): 171 | with mock_fabric() as mocked: 172 | base_app.call_compose(cmd, cmd_args) 173 | 174 | assert mocked.flat_stack == [ 175 | ( 176 | "run", 177 | ( 178 | "docker-compose " 179 | "--file /home/user/apps/sample_app/docker-compose.yml " 180 | "--env-file /home/user/apps/sample_app/env " 181 | ) 182 | + cmd_out, 183 | None, 184 | ), 185 | ] 186 | 187 | 188 | @pytest.mark.parametrize( 189 | "services, cmds_out", 190 | [ 191 | ([], ["up --build --detach"]), 192 | (["mycontainer"], ["up --build --detach mycontainer"]), 193 | ( 194 | ["con1", "con2", "con3"], 195 | [ 196 | "up --build --detach con1", 197 | "up --build --detach con2", 198 | "up --build --detach con3", 199 | ], 200 | ), 201 | ], 202 | ) 203 | def test_mocked_app__call_up(services, cmds_out, mock_fabric, base_app): 204 | with mock_fabric() as mocked: 205 | base_app.up(*services) 206 | 207 | flat_stack = mocked.flat_stack 208 | assert len(flat_stack) == len(cmds_out) 209 | for actual, expected_cmd in zip(flat_stack, cmds_out): 210 | assert actual == ( 211 | "run", 212 | ( 213 | "docker-compose " 214 | "--file /home/user/apps/sample_app/docker-compose.yml " 215 | "--env-file /home/user/apps/sample_app/env " 216 | ) 217 | + expected_cmd, 218 | None, 219 | ) 220 | 221 | 222 | @pytest.mark.parametrize( 223 | "services, cmds_out", 224 | [ 225 | ([], ["down"]), 226 | (["mycontainer"], ["rm --force --stop -v mycontainer"]), 227 | ( 228 | ["con1", "con2", "con3"], 229 | [ 230 | "rm --force --stop -v con1", 231 | "rm --force --stop -v con2", 232 | "rm --force --stop -v con3", 233 | ], 234 | ), 235 | ], 236 | ) 237 | def test_mocked_app__call_down(services, cmds_out, mock_fabric, base_app): 238 | with mock_fabric() as mocked: 239 | base_app.down(*services) 240 | 241 | flat_stack = mocked.flat_stack 242 | assert len(flat_stack) == len(cmds_out) 243 | for actual, expected_cmd in zip(flat_stack, cmds_out): 244 | assert actual == ( 245 | "run", 246 | ( 247 | "docker-compose " 248 | "--file /home/user/apps/sample_app/docker-compose.yml " 249 | "--env-file /home/user/apps/sample_app/env " 250 | ) 251 | + expected_cmd, 252 | None, 253 | ) 254 | 255 | 256 | @pytest.mark.parametrize( 257 | "services, cmds_out", 258 | [ 259 | ([], ["restart"]), 260 | (["mycontainer"], ["restart mycontainer"]), 261 | ( 262 | ["con1", "con2", "con3"], 263 | [ 264 | "restart con1", 265 | "restart con2", 266 | "restart con3", 267 | ], 268 | ), 269 | ], 270 | ) 271 | def test_mocked_app__call_restart(services, cmds_out, mock_fabric, base_app): 272 | with mock_fabric() as mocked: 273 | base_app.restart(*services) 274 | 275 | flat_stack = mocked.flat_stack 276 | assert len(flat_stack) == len(cmds_out) 277 | for actual, expected_cmd in zip(flat_stack, cmds_out): 278 | assert actual == ( 279 | "run", 280 | ( 281 | "docker-compose " 282 | "--file /home/user/apps/sample_app/docker-compose.yml " 283 | "--env-file /home/user/apps/sample_app/env " 284 | ) 285 | + expected_cmd, 286 | None, 287 | ) 288 | 289 | 290 | @pytest.mark.parametrize( 291 | "service, cmd, cmd_out", 292 | [("mycontainer", "/bin/bash", "exec mycontainer /bin/bash")], 293 | ) 294 | def test_mocked_app__call_exec(service, cmd, cmd_out, mock_fabric, base_app): 295 | with mock_fabric() as mocked: 296 | base_app.exec(service, cmd) 297 | 298 | assert mocked.flat_stack == [ 299 | ( 300 | "run", 301 | ( 302 | "docker-compose " 303 | "--file /home/user/apps/sample_app/docker-compose.yml " 304 | "--env-file /home/user/apps/sample_app/env " 305 | ) 306 | + cmd_out, 307 | None, 308 | ) 309 | ] 310 | -------------------------------------------------------------------------------- /tests/app/test_names.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from docker0s.app.names import normalise_name, pascal_to_snake 4 | 5 | 6 | @pytest.mark.parametrize( 7 | "src, out", 8 | [ 9 | ("One", "One"), 10 | ("TwoWords", "TwoWords"), 11 | ("HasThreeWords", "HasThreeWords"), 12 | ], 13 | ) 14 | def test_normalise_name__pascal_case_in__pascal_case_out(src, out): 15 | assert normalise_name(src) == out 16 | 17 | 18 | @pytest.mark.parametrize( 19 | "src, out", 20 | [ 21 | ("one", "One"), 22 | ("twoWords", "TwoWords"), 23 | ("hasThreeWords", "HasThreeWords"), 24 | ], 25 | ) 26 | def test_normalise_name__camel_case_in__pascal_case_out(src, out): 27 | assert normalise_name(src) == out 28 | 29 | 30 | @pytest.mark.parametrize( 31 | "src, out", 32 | [ 33 | ("one", "One"), 34 | ("two_words", "TwoWords"), 35 | ("has_three_words", "HasThreeWords"), 36 | ], 37 | ) 38 | def test_normalise_name__snake_case_in__pascal_case_out(src, out): 39 | assert normalise_name(src) == out 40 | 41 | 42 | @pytest.mark.parametrize( 43 | "src, out", 44 | [ 45 | ("one", "One"), 46 | ("two-words", "TwoWords"), 47 | ("has-three-words", "HasThreeWords"), 48 | ], 49 | ) 50 | def test_normalise_name__kebab_case_in__pascal_case_out(src, out): 51 | assert normalise_name(src) == out 52 | 53 | 54 | @pytest.mark.parametrize( 55 | "src, out", 56 | [ 57 | ("one ", "One"), 58 | ("one !", "One"), 59 | ("two words", "TwoWords"), 60 | ("two 123 words", "Two123Words"), 61 | ("has-three_words", "HasThreeWords"), 62 | ("this has-four_words", "ThisHasFourWords"), 63 | ], 64 | ) 65 | def test_normalise_name__rubbish_in__pascal_case_out(src, out): 66 | assert normalise_name(src) == out 67 | 68 | 69 | @pytest.mark.parametrize( 70 | "invalid", 71 | [ 72 | "123 words", 73 | ], 74 | ) 75 | def test_normalise_name__invalid_in__raises_exception(invalid): 76 | with pytest.raises(ValueError, match=f"Names must start with A-Z: {invalid}"): 77 | _ = normalise_name(invalid) 78 | 79 | 80 | @pytest.mark.parametrize( 81 | "src, out", 82 | [ 83 | ("One", "one"), 84 | ("TwoWords", "two_words"), 85 | ("HasThreeWords", "has_three_words"), 86 | ], 87 | ) 88 | def test_pascal_to_snake__pascal_in__snake_out(src, out): 89 | assert pascal_to_snake(src) == out 90 | 91 | 92 | @pytest.mark.parametrize( 93 | "src, out", 94 | [ 95 | ("one ", "one "), 96 | ("one !", "one !"), 97 | ("two words", "two words"), 98 | ("two 123 Words", "two 123 _words"), 99 | ("has-three_words", "has-three_words"), 100 | ("this has-Four_words", "this has-_four_words"), 101 | ], 102 | ) 103 | def test_pascal_to_snake__rubbish_in__rubbish_out(src, out): 104 | assert pascal_to_snake(src) == out 105 | -------------------------------------------------------------------------------- /tests/app/test_repo.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test docker0s.app.repo.RepoApp 3 | """ 4 | from docker0s.app.repo import RepoApp 5 | 6 | 7 | def test_repoapp_is_abstract(): 8 | assert RepoApp.abstract is True 9 | 10 | 11 | def test_repoapp_subclass_is_concrete(): 12 | class TestApp(RepoApp): 13 | pass 14 | 15 | assert TestApp.abstract is False 16 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import hashlib 4 | import subprocess 5 | from contextlib import contextmanager 6 | from dataclasses import dataclass 7 | from io import StringIO 8 | from pathlib import Path 9 | from typing import Any 10 | 11 | import pytest 12 | from fabric.runners import Result 13 | 14 | from docker0s import git 15 | from docker0s import host as docker0s_host 16 | from docker0s.host import Host 17 | from docker0s.manifest import Manifest 18 | 19 | from .constants import GITHUB_EXISTS_CONTENT, GITHUB_EXISTS_PARTS, HOST_NAME 20 | 21 | 22 | @pytest.fixture 23 | def mock_call(monkeypatch): 24 | """ 25 | Mock call_or_die for local calls 26 | 27 | Usage:: 28 | 29 | def test_call(mock_call): 30 | with mock_call(stdout="stdout response") as mocked: 31 | call_or_die('ls') 32 | assert mocked.stack[0].cmd = ['ls'] 33 | """ 34 | 35 | @dataclass 36 | class MockLog: 37 | cmd: tuple[str, ...] 38 | cwd: Path | None 39 | expected: str | None 40 | 41 | class MockedCall: 42 | def __init__(self, stdout=None): 43 | self.stdout = None 44 | 45 | def __enter__(self): 46 | monkeypatch.setattr(git, "call_or_die", self) 47 | monkeypatch.setattr(git, "call", self) 48 | self.stack = [] 49 | return self 50 | 51 | def __exit__(self, *args): 52 | pass 53 | 54 | def __call__( 55 | self, 56 | *cmd: str, 57 | cwd: Path | None = None, 58 | expected: str | None = None, 59 | ) -> subprocess.CompletedProcess: 60 | self.stack.append(MockLog(cmd=cmd, cwd=cwd, expected=expected)) 61 | return subprocess.CompletedProcess(cmd, returncode=0, stdout=self.stdout) 62 | 63 | @property 64 | def cmd_cwd_stack(self): 65 | """ 66 | Helper for checking cmd and cwd pairs 67 | """ 68 | return [(log.cmd, log.cwd) for log in self.stack] 69 | 70 | return MockedCall 71 | 72 | 73 | @pytest.fixture() 74 | def assert_no_calls(mock_call): 75 | """ 76 | Assert the test makes no system call 77 | """ 78 | with mock_call(stdout="") as mocked: 79 | yield 80 | assert mocked.stack == [] 81 | 82 | 83 | @pytest.fixture 84 | def cache_path(tmp_path: Path, monkeypatch) -> Path: 85 | monkeypatch.setattr(git, "CACHE_PATH", tmp_path) 86 | return tmp_path 87 | 88 | 89 | @pytest.fixture 90 | def mock_file(cache_path): 91 | url, ref, path = GITHUB_EXISTS_PARTS 92 | repo_path = cache_path / hashlib.md5(url.encode()).hexdigest() 93 | file_path = repo_path / path 94 | file_path.parent.mkdir(parents=True) 95 | file_path.write_text(GITHUB_EXISTS_CONTENT) 96 | 97 | return (url, ref, path, file_path) 98 | 99 | 100 | @pytest.fixture 101 | def mock_fabric(monkeypatch): 102 | """ 103 | Patch the fabric Connection object and track calls to run and put 104 | """ 105 | 106 | @dataclass 107 | class RunLog: 108 | cmd: str 109 | env: dict[str, Any] | None 110 | 111 | def as_flat(self): 112 | return ("run", self.cmd, self.env) 113 | 114 | @dataclass 115 | class PutStringIO: 116 | data: str 117 | 118 | @dataclass 119 | class PutLog: 120 | source: str 121 | destination: str 122 | 123 | def as_flat(self): 124 | source = self.source 125 | if isinstance(self.source, StringIO): 126 | source = PutStringIO(data=self.source.read()) 127 | return ("put", source, self.destination) 128 | 129 | class MockContext: 130 | StringIO = PutStringIO 131 | 132 | def __enter__(self): 133 | log_stack = self.log_stack = [] 134 | 135 | class MockConnection: 136 | def __init__( 137 | self, 138 | host: str, 139 | port: str | int, 140 | user: str, 141 | connect_kwargs: dict[str, Any] | None = None, 142 | forward_agent: bool | None = None, 143 | ): 144 | self.host = host 145 | self.port = port 146 | self.user = user 147 | 148 | @contextmanager 149 | def cd(self, dir: str): 150 | yield 151 | 152 | def run(self, cmd: str, env: dict[str, Any] | None, **kwargs) -> Result: 153 | log_stack.append(RunLog(cmd=cmd, env=env)) 154 | return Result(connection=self) 155 | 156 | def put(self, source: str, destination: str): 157 | log_stack.append(PutLog(source=source, destination=destination)) 158 | 159 | # Something here is screwing things up 160 | 161 | monkeypatch.setattr(docker0s_host, "Connection", MockConnection) 162 | 163 | return self 164 | 165 | def __exit__(self, *args): 166 | pass 167 | 168 | @property 169 | def flat_stack(self): 170 | """ 171 | Helper for checking stack as a list of ('run', ...) and ('put', ...) tuples 172 | """ 173 | return [log.as_flat() for log in self.log_stack] 174 | 175 | return MockContext 176 | 177 | 178 | @pytest.fixture 179 | def host_cls(): 180 | """ 181 | A sample host class 182 | """ 183 | return Host.from_dict( 184 | name="FakeTestHost", 185 | path=Path(__file__).parent, 186 | module="docker0s.tests", 187 | data={"name": HOST_NAME, "port": 22, "user": "user"}, 188 | ) 189 | 190 | 191 | @pytest.fixture 192 | def host(host_cls): 193 | """ 194 | A sample host instance 195 | """ 196 | return host_cls() 197 | 198 | 199 | @pytest.fixture 200 | def mk_manifest(host_cls, tmp_path): 201 | """ 202 | Generate a manifest for a list of apps 203 | """ 204 | 205 | def factory(*app_classes) -> Manifest: 206 | manifest = Manifest(tmp_path / "manifest.yml") 207 | for app_cls in app_classes: 208 | manifest.add_app(app_cls) 209 | manifest.host = host_cls 210 | 211 | return manifest 212 | 213 | return factory 214 | -------------------------------------------------------------------------------- /tests/constants.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test constants 3 | """ 4 | 5 | # 6 | # Live integration URLs 7 | # 8 | 9 | # This file exists in the repo 10 | GITHUB_EXISTS = "git+https://github.com/radiac/docker0s@main#tests/data/file.txt" 11 | GITHUB_EXISTS_PARTS = ( 12 | "https://github.com/radiac/docker0s", 13 | "main", 14 | "tests/data/file.txt", 15 | ) 16 | GITHUB_EXISTS_CONTENT = "All work and no play makes docker0s happy" 17 | 18 | 19 | # Fixture constants 20 | HOST_NAME = "localhost" 21 | -------------------------------------------------------------------------------- /tests/data/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.8" 2 | services: 3 | service1: 4 | image: service1 5 | service2: 6 | image: service2 7 | -------------------------------------------------------------------------------- /tests/data/extends_base_first.py: -------------------------------------------------------------------------------- 1 | from docker0s.app import BaseApp 2 | 3 | 4 | class TestApp(BaseApp): 5 | # Set test ID for easy class identification 6 | test_id = "first" 7 | 8 | # Extend second 9 | extends = "extends_base_second.py" 10 | 11 | # Second defines compose, this should override 12 | compose = "first" 13 | -------------------------------------------------------------------------------- /tests/data/extends_base_second.py: -------------------------------------------------------------------------------- 1 | from docker0s.app import BaseApp 2 | 3 | 4 | class TestApp(BaseApp): 5 | # Set test ID for easy class identification 6 | test_id = "second" 7 | 8 | # First sets compose, it should override this 9 | compose = "second" 10 | 11 | # First does not set env_file, this should win 12 | env_file = "second" 13 | -------------------------------------------------------------------------------- /tests/data/file.txt: -------------------------------------------------------------------------------- 1 | All work and no play makes docker0s happy -------------------------------------------------------------------------------- /tests/data/first.env: -------------------------------------------------------------------------------- 1 | key1=first1 2 | key2=first2 3 | key3=first3 4 | -------------------------------------------------------------------------------- /tests/data/manifest.py: -------------------------------------------------------------------------------- 1 | from docker0s import App, Host 2 | 3 | 4 | class TestApp(App): 5 | # Set test ID for easy class identification 6 | test_id = "manifest" 7 | 8 | # Extend second 9 | extends = "extends_base_first.py" 10 | 11 | 12 | class OtherApp(App): 13 | compose = "other_app" 14 | 15 | 16 | class Vagrant(Host): 17 | name = "localhost" 18 | port = 2222 19 | user = "vagrant" 20 | -------------------------------------------------------------------------------- /tests/data/manifest.yml: -------------------------------------------------------------------------------- 1 | apps: 2 | test_app: 3 | type: BaseTestApp 4 | test_id: manifest 5 | extends: extends_base_first.py 6 | other_app: 7 | compose: other_app 8 | host: 9 | name: localhost 10 | port: 2222 11 | user: vagrant 12 | -------------------------------------------------------------------------------- /tests/data/second.env: -------------------------------------------------------------------------------- 1 | key1=second1 2 | # key2 from first 3 | # key3 from first 4 | key4=second4 5 | -------------------------------------------------------------------------------- /tests/path/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/radiac/docker0s/fbf0d94d942d0e821f4d37caaa8f7e603183d943/tests/path/__init__.py -------------------------------------------------------------------------------- /tests/path/test_extends_path.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import pytest 4 | 5 | from docker0s.exceptions import DefinitionError 6 | from docker0s.path import ExtendsPath 7 | 8 | 9 | @pytest.fixture 10 | def mock_fetch_repo(monkeypatch, tmp_path): 11 | """ 12 | Mock path.fetch_repo 13 | 14 | Usage:: 15 | 16 | def test_call(mock_fetch_repo): 17 | with mock_fetch_repo(stdout="stdout response") as mocked: 18 | trigger_fetch_repo('ls') 19 | assert mocked.stack[0].cmd = ['ls'] 20 | """ 21 | 22 | class MockedCall: 23 | def __enter__(self): 24 | monkeypatch.setattr("docker0s.path.fetch_repo", self) 25 | self.stack = [] 26 | return self 27 | 28 | def __exit__(self, *args): 29 | pass 30 | 31 | def __call__(self, url: str, ref: str) -> Path: 32 | path = tmp_path / "repo" 33 | path.mkdir() 34 | self.stack.append((url, ref)) 35 | return path 36 | 37 | return MockedCall 38 | 39 | 40 | @pytest.fixture() 41 | def assert_no_fetch_repo(mock_fetch_repo): 42 | """ 43 | Assert the test makes no system call 44 | """ 45 | with mock_fetch_repo() as mocked: 46 | yield 47 | assert mocked.stack == [] 48 | 49 | 50 | def test_local__constructor__no_calls(assert_no_fetch_repo, tmp_path): 51 | ExtendsPath("/foo/bar", cwd=tmp_path) 52 | 53 | 54 | @pytest.mark.parametrize( 55 | "path_str, expected", 56 | [ 57 | # Relative paths 58 | ( 59 | "foo/bar/baz.yml", 60 | "{tmp}/foo/bar/baz.yml", 61 | ), 62 | ( 63 | "foo/../bar.yml", 64 | "{tmp}/bar.yml", 65 | ), 66 | # Absolute 67 | ( 68 | "{tmp}/foo.yml", 69 | "{tmp}/foo.yml", 70 | ), 71 | ( 72 | "{tmp}/foo/bar.yml", 73 | "{tmp}/foo/bar.yml", 74 | ), 75 | ], 76 | ) 77 | def test_local__is_resolved( 78 | path_str, expected, monkeypatch, tmp_path, assert_no_fetch_repo 79 | ): 80 | # Add tmp_path to strings 81 | path_str = path_str.replace("{tmp}", str(tmp_path)) 82 | expected = expected.replace("{tmp}", str(tmp_path)) 83 | 84 | path = ExtendsPath(path_str, cwd=tmp_path) 85 | assert isinstance(path.original, str) 86 | assert isinstance(path.path, Path) 87 | assert path.original == path_str 88 | assert str(path.path) == expected 89 | assert path.name is None 90 | 91 | 92 | def test_local__path_with_name__name_extracted(monkeypatch, tmp_path): 93 | path_str = "app/foo.yml::bar" 94 | path = ExtendsPath(path_str, cwd=tmp_path) 95 | assert path.original == path_str 96 | assert path.path == tmp_path / "app/foo.yml" 97 | assert path.name == "bar" 98 | 99 | 100 | def test_local__truediv__resolves_slash(assert_no_fetch_repo, tmp_path): 101 | path = ExtendsPath("/foo/bar", cwd=tmp_path) 102 | assert path / "baz" == Path("/foo/bar/baz") 103 | assert path / "../baz" == Path("/foo/baz") 104 | assert path / "/baz" == Path("/baz") 105 | 106 | 107 | def test_local__get_manifest_file__returns_path(tmp_path, assert_no_fetch_repo): 108 | file_path = tmp_path / "manifest.yml" 109 | file_path.touch() 110 | path = ExtendsPath(str(file_path), cwd=tmp_path) 111 | assert path.get_manifest() == file_path 112 | 113 | 114 | def test_local__get_manifest_missing__raises_exception(tmp_path, assert_no_fetch_repo): 115 | dir_path = tmp_path / "missing" 116 | with pytest.raises(DefinitionError, match=f"Manifest not found at {dir_path}"): 117 | path = ExtendsPath(str(dir_path), cwd=tmp_path) 118 | path.get_manifest() 119 | 120 | 121 | def test_local__get_manifest_find_in_dir__finds_file(tmp_path, assert_no_fetch_repo): 122 | file_path = tmp_path / "manifest.yml" 123 | file_path.touch() 124 | path = ExtendsPath(str(file_path), cwd=tmp_path) 125 | assert path.get_manifest() == file_path 126 | 127 | 128 | def test_local__get_manifest_missing_in_dir__raises_exception( 129 | tmp_path, assert_no_fetch_repo 130 | ): 131 | dir_path = tmp_path / "missing" 132 | dir_path.mkdir() 133 | with pytest.raises(DefinitionError, match=f"Manifest not found in {dir_path}"): 134 | path = ExtendsPath(str(dir_path), cwd=tmp_path) 135 | path.get_manifest() 136 | 137 | 138 | @pytest.mark.parametrize( 139 | "path_str, data", 140 | [ 141 | ( 142 | "git+ssh://git@github.com:radiac/docker0s@main#apps/foo/manifest.yml", 143 | { 144 | "repo": "git@github.com:radiac/docker0s", 145 | "ref": "main", 146 | "path": "apps/foo/manifest.yml", 147 | "name": None, 148 | }, 149 | ), 150 | ( 151 | "git+https://github.com/radiac/docker0s@main#apps/foo/manifest.yml::bar", 152 | { 153 | "repo": "https://github.com/radiac/docker0s", 154 | "ref": "main", 155 | "path": "apps/foo/manifest.yml", 156 | "name": "bar", 157 | }, 158 | ), 159 | ( 160 | "git+https://github.com/radiac/docker0s@main", 161 | { 162 | "repo": "https://github.com/radiac/docker0s", 163 | "ref": "main", 164 | "path": "", 165 | "name": None, 166 | }, 167 | ), 168 | ( 169 | "git+https://github.com/radiac/docker0s#apps/foo/manifest.yml", 170 | { 171 | "repo": "https://github.com/radiac/docker0s", 172 | "ref": None, 173 | "path": "apps/foo/manifest.yml", 174 | "name": None, 175 | }, 176 | ), 177 | ( 178 | "git+https://github.com/radiac/docker0s::bar", 179 | { 180 | "repo": "https://github.com/radiac/docker0s", 181 | "ref": None, 182 | "path": "", 183 | "name": "bar", 184 | }, 185 | ), 186 | ], 187 | ) 188 | def test_git__constructor__calls_clone(path_str, data, tmp_path, mock_fetch_repo): 189 | with mock_fetch_repo() as mocked: 190 | path = ExtendsPath(path_str, tmp_path) 191 | assert path.original == path_str 192 | assert path.path == tmp_path / "repo" / data["path"] 193 | assert path.repo == data["repo"] 194 | assert path.ref == (data["ref"] or None) 195 | assert path.name == (data["name"] or None) 196 | assert mocked.stack == [(data["repo"], data["ref"])] 197 | 198 | 199 | """ def xx_test_call(mock_call): 200 | with mock_call(stdout="stdout response") as mocked: 201 | call_or_die('ls') 202 | assert mocked.stack[0].cmd = ['ls'] """ 203 | -------------------------------------------------------------------------------- /tests/path/test_other.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import pytest 4 | 5 | from docker0s.exceptions import DefinitionError 6 | from docker0s.path import path_to_relative, path_to_uuid 7 | 8 | 9 | @pytest.mark.parametrize( 10 | "path_str, uuid", 11 | [ 12 | ("/foo.py", "_7a32759789e8efe7c60b4448755c2a9f"), 13 | ( 14 | "git+https://github.com/radiac/docker0s#foo.yml", 15 | "_773f52a79994eda777ad5db5be3960d8", 16 | ), 17 | ], 18 | ) 19 | def test_uuid(path_str, uuid): 20 | actual_uuid = path_to_uuid(Path(path_str)) 21 | assert actual_uuid == uuid 22 | 23 | 24 | @pytest.mark.parametrize( 25 | "root, path, expected", 26 | [ 27 | ("/foo", "/foo/bar", "bar"), 28 | ("/foo", "/foo/bar/../baz", "bar/../baz"), 29 | ], 30 | ) 31 | def test_path_to_relative__relative_path__resolves(root, path, expected): 32 | actual = path_to_relative(Path(root), Path(path)) 33 | assert actual == expected 34 | 35 | 36 | @pytest.mark.parametrize( 37 | "root, path", 38 | [ 39 | ("/foo/bar", "../baz"), 40 | ("/foo/bar", "baz/../../qux"), 41 | ("/foo/bar", "/baz/qux"), 42 | ], 43 | ) 44 | def test_path_to_relative__invalid_path__raises_exception(root, path): 45 | with pytest.raises( 46 | DefinitionError, match=f"Path {path} is not a sub-path of {root}" 47 | ): 48 | path_to_relative(Path(root), Path(path)) 49 | -------------------------------------------------------------------------------- /tests/requirements.in: -------------------------------------------------------------------------------- 1 | ########################## 2 | # Developer requirements # 3 | ########################## 4 | -r ../requirements.txt 5 | 6 | # 7 | # Testing 8 | # 9 | 10 | pytest 11 | pytest-asyncio 12 | pytest-black 13 | pytest-cov 14 | pytest-flake8 15 | pytest-isort 16 | pytest-mypy 17 | 18 | # Required by third party 19 | types-PyYAML 20 | 21 | 22 | # 23 | # Documentation 24 | # 25 | 26 | # Core build requirements 27 | doc8 28 | sphinx 29 | sphinx-gitref 30 | #-e git+https://github.com/radiac/sphinx_radiac_theme.git#egg=sphinx_radiac_theme 31 | 32 | # Optional 33 | sphinx-autobuild 34 | -------------------------------------------------------------------------------- /tests/requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with python 3.10 3 | # To update, run: 4 | # 5 | # pip-compile 6 | # 7 | alabaster==0.7.12 8 | # via sphinx 9 | attrs==22.2.0 10 | # via 11 | # pytest 12 | # pytest-mypy 13 | babel==2.11.0 14 | # via sphinx 15 | bcrypt==4.0.0 16 | # via 17 | # -r ../requirements.txt 18 | # paramiko 19 | black==22.12.0 20 | # via pytest-black 21 | build==0.8.0 22 | # via 23 | # -r ../requirements.txt 24 | # pip-tools 25 | certifi==2022.12.7 26 | # via requests 27 | cffi==1.15.1 28 | # via 29 | # -r ../requirements.txt 30 | # cryptography 31 | # pynacl 32 | charset-normalizer==2.1.1 33 | # via requests 34 | click==8.1.3 35 | # via 36 | # -r ../requirements.txt 37 | # black 38 | # pip-tools 39 | colorama==0.4.6 40 | # via sphinx-autobuild 41 | coverage[toml]==7.0.4 42 | # via pytest-cov 43 | cryptography==38.0.0 44 | # via 45 | # -r ../requirements.txt 46 | # paramiko 47 | doc8==1.1.1 48 | # via -r requirements.in 49 | docutils==0.19 50 | # via 51 | # doc8 52 | # restructuredtext-lint 53 | # sphinx 54 | # sphinx-gitref 55 | exceptiongroup==1.1.0 56 | # via pytest 57 | fabric==2.7.1 58 | # via -r ../requirements.txt 59 | filelock==3.9.0 60 | # via pytest-mypy 61 | flake8==6.0.0 62 | # via pytest-flake8 63 | idna==3.4 64 | # via requests 65 | imagesize==1.4.1 66 | # via sphinx 67 | iniconfig==2.0.0 68 | # via pytest 69 | invoke==1.7.1 70 | # via 71 | # -r ../requirements.txt 72 | # fabric 73 | isort==5.11.4 74 | # via pytest-isort 75 | jinja2==3.1.2 76 | # via 77 | # -r ../requirements.txt 78 | # sphinx 79 | livereload==2.6.3 80 | # via sphinx-autobuild 81 | markupsafe==2.1.1 82 | # via 83 | # -r ../requirements.txt 84 | # jinja2 85 | mccabe==0.7.0 86 | # via flake8 87 | mypy==0.991 88 | # via pytest-mypy 89 | mypy-extensions==0.4.3 90 | # via 91 | # black 92 | # mypy 93 | packaging==21.3 94 | # via 95 | # -r ../requirements.txt 96 | # build 97 | # pytest 98 | # sphinx 99 | paramiko==2.11.0 100 | # via 101 | # -r ../requirements.txt 102 | # fabric 103 | pathlib2==2.3.7.post1 104 | # via 105 | # -r ../requirements.txt 106 | # fabric 107 | pathspec==0.10.3 108 | # via black 109 | pbr==5.11.0 110 | # via stevedore 111 | pep517==0.13.0 112 | # via 113 | # -r ../requirements.txt 114 | # build 115 | pip-tools==6.8.0 116 | # via -r ../requirements.txt 117 | platformdirs==2.6.2 118 | # via black 119 | pluggy==1.0.0 120 | # via pytest 121 | pycodestyle==2.10.0 122 | # via flake8 123 | pycparser==2.21 124 | # via 125 | # -r ../requirements.txt 126 | # cffi 127 | pyflakes==3.0.1 128 | # via flake8 129 | pygments==2.14.0 130 | # via 131 | # doc8 132 | # sphinx 133 | pynacl==1.5.0 134 | # via 135 | # -r ../requirements.txt 136 | # paramiko 137 | pyparsing==3.0.9 138 | # via 139 | # -r ../requirements.txt 140 | # packaging 141 | pytest==7.2.0 142 | # via 143 | # -r requirements.in 144 | # pytest-asyncio 145 | # pytest-black 146 | # pytest-cov 147 | # pytest-flake8 148 | # pytest-isort 149 | # pytest-mypy 150 | pytest-asyncio==0.20.3 151 | # via -r requirements.in 152 | pytest-black==0.3.12 153 | # via -r requirements.in 154 | pytest-cov==4.0.0 155 | # via -r requirements.in 156 | pytest-flake8==1.1.1 157 | # via -r requirements.in 158 | pytest-isort==3.1.0 159 | # via -r requirements.in 160 | pytest-mypy==0.10.3 161 | # via -r requirements.in 162 | python-dotenv==0.21.0 163 | # via -r ../requirements.txt 164 | pytz==2022.7 165 | # via babel 166 | pyyaml==6.0 167 | # via -r ../requirements.txt 168 | requests==2.28.1 169 | # via sphinx 170 | restructuredtext-lint==1.4.0 171 | # via doc8 172 | six==1.16.0 173 | # via 174 | # -r ../requirements.txt 175 | # livereload 176 | # paramiko 177 | # pathlib2 178 | snowballstemmer==2.2.0 179 | # via sphinx 180 | sphinx==6.1.2 181 | # via 182 | # -r requirements.in 183 | # sphinx-autobuild 184 | # sphinx-gitref 185 | sphinx-autobuild==2021.3.14 186 | # via -r requirements.in 187 | sphinx-gitref==0.2.1 188 | # via -r requirements.in 189 | sphinxcontrib-applehelp==1.0.3 190 | # via sphinx 191 | sphinxcontrib-devhelp==1.0.2 192 | # via sphinx 193 | sphinxcontrib-htmlhelp==2.0.0 194 | # via sphinx 195 | sphinxcontrib-jsmath==1.0.1 196 | # via sphinx 197 | sphinxcontrib-qthelp==1.0.3 198 | # via sphinx 199 | sphinxcontrib-serializinghtml==1.1.5 200 | # via sphinx 201 | stevedore==4.1.1 202 | # via doc8 203 | toml==0.10.2 204 | # via pytest-black 205 | tomli==2.0.1 206 | # via 207 | # -r ../requirements.txt 208 | # black 209 | # build 210 | # coverage 211 | # doc8 212 | # mypy 213 | # pep517 214 | # pytest 215 | tornado==6.2 216 | # via livereload 217 | types-pyyaml==6.0.12.2 218 | # via -r requirements.in 219 | typing-extensions==4.4.0 220 | # via mypy 221 | urllib3==1.26.13 222 | # via requests 223 | wheel==0.37.1 224 | # via 225 | # -r ../requirements.txt 226 | # pip-tools 227 | 228 | # The following packages are considered to be unsafe in a requirements file: 229 | # pip 230 | # setuptools 231 | -------------------------------------------------------------------------------- /tests/test_commands.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import pytest 4 | 5 | from docker0s.commands import Target, TargetManager 6 | from docker0s.manifest import Manifest 7 | 8 | 9 | @pytest.fixture 10 | def manifest(): 11 | path = Path(__file__).parent / "data/manifest.py" 12 | manifest = Manifest.load(path) 13 | return manifest 14 | 15 | 16 | def test_target_manager__no_targets__all_services_found(manifest): 17 | tm = TargetManager(manifest, ()) 18 | # Definition order 19 | assert list(tm.app_lookup.keys()) == ["TestApp", "OtherApp"] 20 | assert tm.apps[0].get_name() == "TestApp" 21 | assert tm.apps[1].get_name() == "OtherApp" 22 | assert tm.service_lookup == {} 23 | 24 | 25 | @pytest.mark.parametrize( 26 | "targets, app_names, service_lookup", 27 | ( 28 | ((), ["TestApp", "OtherApp"], {}), 29 | ((Target("TestApp"),), ["TestApp"], {"TestApp": []}), 30 | ( 31 | ( 32 | Target("TestApp", "one"), 33 | Target("TestApp", "two"), 34 | Target("OtherApp", "three"), 35 | ), 36 | ["TestApp", "OtherApp"], 37 | { 38 | "TestApp": ["one", "two"], 39 | "OtherApp": ["three"], 40 | }, 41 | ), 42 | ), 43 | ) 44 | def test_target_manager__caches_populated(targets, app_names, service_lookup, manifest): 45 | tm = TargetManager(manifest, targets) 46 | # Definition order 47 | assert sorted(list(tm.app_lookup.keys())) == sorted(app_names) 48 | 49 | # Remap service lookup to use names, easier to parameterize tests for 50 | tm_service_lookup = { 51 | app.get_name(): services for app, services in tm.service_lookup.items() 52 | } 53 | assert sorted(tm_service_lookup) == sorted(service_lookup) 54 | 55 | 56 | @pytest.mark.parametrize( 57 | "targets, app_services", 58 | ( 59 | ((), [("TestApp", []), ("OtherApp", [])]), 60 | ((Target("TestApp"),), [("TestApp", [])]), 61 | ( 62 | ( 63 | Target("TestApp", "one"), 64 | Target("TestApp", "two"), 65 | Target("OtherApp", "three"), 66 | ), 67 | [ 68 | ("TestApp", ["one", "two"]), 69 | ("OtherApp", ["three"]), 70 | ], 71 | ), 72 | ), 73 | ) 74 | def test_target_manager__get_app_services(targets, app_services, manifest): 75 | tm = TargetManager(manifest, targets) 76 | tm_app_services = [ 77 | (app.get_name(), services) for app, services in tm.get_app_services() 78 | ] 79 | assert sorted(tm_app_services) == sorted(app_services) 80 | -------------------------------------------------------------------------------- /tests/test_conftest.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test the conftest fixtures 3 | """ 4 | from docker0s import git 5 | 6 | 7 | def test_mock_call__calls_logged(mock_call, tmp_path): 8 | dir_1 = tmp_path / "dir_1" 9 | dir_2 = tmp_path / "dir_2" 10 | 11 | with mock_call() as mocked: 12 | git.call_or_die("one", "foo", cwd=dir_1, expected="bar") 13 | git.call_or_die("two", "bar", cwd=dir_2, expected="foo") 14 | 15 | assert mocked.stack[0].cmd == ("one", "foo") 16 | assert mocked.stack[0].cwd == dir_1 17 | assert mocked.stack[0].expected == "bar" 18 | 19 | assert mocked.stack[1].cmd == ("two", "bar") 20 | assert mocked.stack[1].cwd == dir_2 21 | assert mocked.stack[1].expected == "foo" 22 | 23 | 24 | def test_mock_call__calls_logged__cmd_cwd_stack(mock_call, tmp_path): 25 | dir_1 = tmp_path / "dir_1" 26 | dir_2 = tmp_path / "dir_2" 27 | 28 | with mock_call() as mocked: 29 | git.call_or_die("one", "foo", cwd=dir_1, expected="bar") 30 | git.call_or_die("two", "bar", cwd=dir_2, expected="foo") 31 | 32 | assert mocked.cmd_cwd_stack == [ 33 | (("one", "foo"), dir_1), 34 | (("two", "bar"), dir_2), 35 | ] 36 | -------------------------------------------------------------------------------- /tests/test_env.py: -------------------------------------------------------------------------------- 1 | from docker0s.env import dump_env, read_env 2 | 3 | 4 | FILE1 = """# File 1 5 | ONE=1 6 | TWO="two" 7 | """ 8 | 9 | FILE2 = """# File 2 10 | ONE="one" 11 | THREE="three" 12 | FOUR=4 13 | """ 14 | 15 | 16 | def test_read__paths_values__merged(tmp_path): 17 | file1 = tmp_path / "file1.env" 18 | file1.write_text(FILE1) 19 | file2 = tmp_path / "file2.env" 20 | file2.write_text(FILE2) 21 | 22 | data = read_env( 23 | file1, 24 | file2, 25 | FOUR="four", 26 | ) 27 | assert data == { 28 | "ONE": "one", 29 | "TWO": "two", 30 | "THREE": "three", 31 | "FOUR": "four", 32 | } 33 | 34 | 35 | def test_dump__env_to_str(): 36 | dumped = dump_env( 37 | { 38 | "ONE": "one", 39 | "TWO": 2, 40 | "THREE": """three 41 | is 42 | multiline""", 43 | "FOUR": None, 44 | "FIVE": "five", 45 | } 46 | ) 47 | assert ( 48 | dumped 49 | == """ONE="one" 50 | TWO=2 51 | THREE="three 52 | is 53 | multiline" 54 | FOUR 55 | FIVE="five\"""" 56 | ) 57 | -------------------------------------------------------------------------------- /tests/test_git.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | 3 | import pytest 4 | 5 | from docker0s.git import ( 6 | GIT_HTTPS_PATTERN, 7 | GIT_SSH_PATTERN, 8 | CommandError, 9 | call_or_die, 10 | fetch_repo, 11 | ) 12 | 13 | from .constants import GITHUB_EXISTS_PARTS 14 | 15 | 16 | ssh_url = "git+ssh://git@github.com:username/repo@branch#path/to/file" 17 | https_url = "git+https://github.com/username/repo@branch#path/to/file" 18 | 19 | 20 | def test_git_ssh_pattern__ssh_pattern__match(): 21 | matches = GIT_SSH_PATTERN.match(ssh_url) 22 | assert matches 23 | data = matches.groupdict() 24 | assert data == { 25 | "repo": "git@github.com:username/repo", 26 | "ref": "branch", 27 | "path": "path/to/file", 28 | "name": None, 29 | } 30 | 31 | 32 | def test_git_ssh_pattern__not_ssh_pattern__no_match(): 33 | matches = GIT_SSH_PATTERN.match(https_url) 34 | assert not matches 35 | 36 | 37 | def test_git_https_pattern__https_pattern__match(): 38 | matches = GIT_HTTPS_PATTERN.match(https_url) 39 | assert matches 40 | data = matches.groupdict() 41 | assert data == { 42 | "repo": "https://github.com/username/repo", 43 | "ref": "branch", 44 | "path": "path/to/file", 45 | "name": None, 46 | } 47 | 48 | 49 | def test_git_https_pattern__not_https_pattern__no_match(): 50 | matches = GIT_HTTPS_PATTERN.match(ssh_url) 51 | assert not matches 52 | 53 | 54 | def test_call_or_die__success__returns_response(tmp_path): 55 | msg = "Hello" 56 | result = call_or_die("echo", msg, cwd=tmp_path) 57 | assert result.returncode == 0 58 | assert result.stdout.decode() == f"{msg}\n" 59 | 60 | 61 | def test_call_or_die__confirm_run_handles_spaces(tmp_path): 62 | # We know it does, just to sanity check safety 63 | filename = "Hello there" 64 | file = tmp_path / filename 65 | msg = "Hello" 66 | file.write_text(msg) 67 | 68 | result = call_or_die("cat", filename, cwd=tmp_path) 69 | assert result.returncode == 0 70 | assert result.stdout.decode() == msg 71 | 72 | 73 | def test_call_or_die__error__commanderror(tmp_path): 74 | with pytest.raises(CommandError, match="Command failed with exit code 1"): 75 | _ = call_or_die("cat", "missing", cwd=tmp_path) 76 | 77 | 78 | def test_call_or_die__content_error__commanderror(tmp_path): 79 | filename = "test" 80 | file = tmp_path / filename 81 | file.write_text("hello") 82 | with pytest.raises(CommandError, match="Command failed with unexpected output"): 83 | _ = call_or_die("cat", filename, cwd=tmp_path, expected="bye") 84 | 85 | 86 | def test_fetch_repo__not_in_cache__clones_and_pulls(mock_call, cache_path): 87 | fetch_repo.cache_clear() 88 | url, ref, path = GITHUB_EXISTS_PARTS 89 | repo_path = cache_path / hashlib.md5(url.encode()).hexdigest() 90 | 91 | with mock_call() as mocked: 92 | fetch_repo(url=url, ref=ref) 93 | 94 | assert mocked.cmd_cwd_stack == [ 95 | (("mkdir", "-p", str(repo_path)), None), 96 | (("git", "init"), repo_path), 97 | (("git", "remote", "add", "origin", url), repo_path), 98 | (("git", "fetch", "origin", ref, "--depth=1"), repo_path), 99 | (("git", "checkout", ref), repo_path), 100 | (("git", "rev-parse", "--abbrev-ref", "--verify", "main@{u}"), repo_path), 101 | (("git", "reset", "--hard", "origin/main"), repo_path), 102 | ] 103 | 104 | 105 | def test_fetch_repo__in_cache__just_pulls(mock_call, cache_path): 106 | fetch_repo.cache_clear() 107 | url, ref, path = GITHUB_EXISTS_PARTS 108 | repo_path = cache_path / hashlib.md5(url.encode()).hexdigest() 109 | 110 | repo_path.mkdir() 111 | with mock_call() as mocked: 112 | fetch_repo(url=url, ref=ref) 113 | 114 | assert mocked.cmd_cwd_stack == [ 115 | (("git", "fetch", "origin", ref, "--depth=1"), repo_path), 116 | (("git", "checkout", ref), repo_path), 117 | (("git", "rev-parse", "--abbrev-ref", "--verify", "main@{u}"), repo_path), 118 | (("git", "reset", "--hard", "origin/main"), repo_path), 119 | ] 120 | -------------------------------------------------------------------------------- /tests/test_host.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path, PosixPath 2 | 3 | import pytest 4 | 5 | 6 | @pytest.mark.parametrize( 7 | "path, cmd_expected", 8 | [ 9 | ("foo", "mkdir -p foo"), 10 | ("foo/bar", "mkdir -p foo/bar"), 11 | ('foo"bar', "mkdir -p 'foo\"bar'"), 12 | ], 13 | ) 14 | def test_mocked_host__mkdir__expected(path, cmd_expected, mock_fabric, host): 15 | with mock_fabric() as mocked: 16 | host.mkdir(path) 17 | 18 | assert mocked.flat_stack == [ 19 | ("run", cmd_expected, None), 20 | ] 21 | 22 | 23 | @pytest.mark.parametrize( 24 | "path, cmd_expected", 25 | [ 26 | ("foo", "mkdir -p ."), 27 | ("foo/bar", "mkdir -p foo"), 28 | ("foo/bar/what.yml", "mkdir -p foo/bar"), 29 | ], 30 | ) 31 | def test_mocked_host__ensure_parent_path__expected( 32 | path, cmd_expected, mock_fabric, host 33 | ): 34 | with mock_fabric() as mocked: 35 | host.ensure_parent_path(PosixPath(path)) 36 | 37 | assert mocked.flat_stack == [ 38 | ("run", cmd_expected, None), 39 | ] 40 | 41 | 42 | @pytest.mark.parametrize( 43 | "cmd, args, cmd_expected", 44 | [ 45 | ("ls", None, "ls"), 46 | ("ls {path}", {"path": "/foo/bar"}, "ls /foo/bar"), 47 | ], 48 | ) 49 | def test_mocked_host__exec__expected(cmd, args, cmd_expected, mock_fabric, host): 50 | with mock_fabric() as mocked: 51 | host.exec(cmd, args) 52 | 53 | assert mocked.flat_stack == [ 54 | ("run", cmd_expected, None), 55 | ] 56 | 57 | 58 | @pytest.mark.parametrize( 59 | "cmd, env", 60 | [ 61 | ("ls", None), 62 | ("ls", {"PATH": "/foo/bar"}), 63 | ], 64 | ) 65 | def test_mocked_host__exec_with_env__expected(cmd, env, mock_fabric, host): 66 | with mock_fabric() as mocked: 67 | host.exec(cmd, args=None, env=env) 68 | 69 | assert mocked.flat_stack == [ 70 | ("run", cmd, env), 71 | ] 72 | 73 | 74 | def test_mocked_host__call_compose(mock_fabric, host): 75 | with mock_fabric() as mocked: 76 | host.call_compose( 77 | compose=PosixPath("project/docker-compose.yml"), 78 | env=PosixPath("project/env"), 79 | cmd="up mycontainer", 80 | ) 81 | 82 | assert mocked.flat_stack == [ 83 | ( 84 | "run", 85 | ( 86 | "docker-compose " 87 | "--file project/docker-compose.yml " 88 | "--env-file project/env " 89 | "up mycontainer" 90 | ), 91 | None, 92 | ), 93 | ] 94 | 95 | 96 | def test_mocked_host__call_compose_with_args(mock_fabric, host): 97 | with mock_fabric() as mocked: 98 | host.call_compose( 99 | compose=PosixPath("project/docker-compose.yml"), 100 | env=PosixPath("project/env"), 101 | cmd="up {service}", 102 | cmd_args={"service": "mycontainer"}, 103 | ) 104 | 105 | assert mocked.flat_stack == [ 106 | ( 107 | "run", 108 | ( 109 | "docker-compose " 110 | "--file project/docker-compose.yml " 111 | "--env-file project/env " 112 | "up mycontainer" 113 | ), 114 | None, 115 | ), 116 | ] 117 | 118 | 119 | def test_mocked_host__push(mock_fabric, host): 120 | with mock_fabric() as mocked: 121 | host.push( 122 | source=Path("local/file"), 123 | destination=PosixPath("remote/file"), 124 | ) 125 | 126 | assert mocked.flat_stack == [ 127 | ("run", "mkdir -p remote", None), 128 | ("put", "local/file", "remote/file"), 129 | ] 130 | 131 | 132 | def test_mocked_host__write(mock_fabric, host): 133 | content = "example content" 134 | with mock_fabric() as mocked: 135 | host.write( 136 | filename=PosixPath("remote/file"), 137 | content=content, 138 | ) 139 | 140 | assert mocked.flat_stack == [ 141 | ("run", "mkdir -p remote", None), 142 | ("put", mocked.StringIO(content), "remote/file"), 143 | ] 144 | -------------------------------------------------------------------------------- /tests/test_manifest.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import pytest 4 | 5 | from docker0s import App, Host 6 | from docker0s.app import BaseApp, abstract_app_registry 7 | from docker0s.manifest import Manifest 8 | 9 | 10 | @pytest.fixture 11 | def BaseTestApp(): 12 | class BaseTestApp(App, abstract=True): 13 | """ 14 | Define abstract base class for manifest.yml 15 | """ 16 | 17 | test_id: str 18 | 19 | yield BaseTestApp 20 | del abstract_app_registry["BaseTestApp"] 21 | 22 | 23 | def test_manifest__load_py__loads_py(): 24 | """ 25 | Manifest TestApp extends first.py::TestApp, which extends second.py::TestApp 26 | """ 27 | path = Path(__file__).parent / "data/manifest.py" 28 | manifest = Manifest.load(path) 29 | 30 | # Should have two apps and one host 31 | assert len(manifest.apps) == 2 32 | TestApp: type[BaseApp] = manifest.get_app("TestApp") 33 | OtherApp: type[BaseApp] = manifest.get_app("OtherApp") 34 | assert manifest.host is not None 35 | VagrantHost: type[Host] = manifest.host 36 | 37 | # TestApp Should have test ID of the manifest 38 | assert TestApp is not None 39 | assert issubclass(TestApp, App) 40 | assert TestApp.test_id == "manifest" # type: ignore 41 | assert TestApp._dir == Path(__file__).parent / "data" 42 | 43 | # Confirm extends works as per 44 | # tests/test_base_def.py:test_apply_base_manifest__extends__merges_base_classes 45 | 46 | # Should have first as the first base 47 | assert len(TestApp.__bases__) == 2 48 | assert TestApp.__bases__[0].test_id == "first" # type: ignore 49 | assert TestApp.__bases__[1] is App 50 | 51 | # First should have second as first base 52 | assert len(TestApp.__bases__[0].__bases__) == 2 53 | assert TestApp.__bases__[0].__bases__[0].test_id == "second" # type: ignore 54 | assert TestApp.__bases__[0].__bases__[1] is BaseApp 55 | 56 | # And inheritance order should give these values 57 | assert TestApp.compose == "first" 58 | assert TestApp.env_file == "second" 59 | 60 | # OtherApp should exist 61 | assert issubclass(OtherApp, App) 62 | assert OtherApp.compose == "other_app" 63 | 64 | # Host 65 | assert issubclass(VagrantHost, Host) 66 | assert VagrantHost.name == "localhost" 67 | assert VagrantHost.port == 2222 68 | assert VagrantHost.user == "vagrant" 69 | 70 | 71 | def test_manifest__load_yml__loads_yml(BaseTestApp): 72 | """ 73 | Manifest TestApp extends first.py::TestApp, which extends second.py::TestApp 74 | 75 | Uses custom base class for internal_id 76 | """ 77 | path = Path(__file__).parent / "data/manifest.yml" 78 | manifest = Manifest.load(path) 79 | 80 | # Should have two apps and one host 81 | assert len(manifest.apps) == 2 82 | TestApp: type[BaseApp] = manifest.get_app("TestApp") 83 | OtherApp: type[BaseApp] = manifest.get_app("OtherApp") 84 | assert manifest.host is not None 85 | VagrantHost: type[Host] = manifest.host 86 | 87 | # TestApp Should have test ID of the manifest 88 | assert issubclass(TestApp, BaseTestApp) 89 | assert TestApp.test_id == "manifest" # type: ignore 90 | 91 | # Confirm extends works as per 92 | # tests/test_base_def.py:test_apply_base_manifest__extends__merges_base_classes 93 | 94 | # Should have first as the first base 95 | assert len(TestApp.__bases__) == 2 96 | assert TestApp.__bases__[0].test_id == "first" # type: ignore 97 | assert TestApp.__bases__[1] is BaseTestApp 98 | 99 | # First should have second as first base 100 | assert len(TestApp.__bases__[0].__bases__) == 2 101 | assert TestApp.__bases__[0].__bases__[0].test_id == "second" # type: ignore 102 | assert TestApp.__bases__[0].__bases__[1] is BaseApp 103 | 104 | # And inheritance order should give these values 105 | assert TestApp.compose == "first" 106 | assert TestApp.env_file == "second" 107 | 108 | # OtherApp should exist 109 | assert issubclass(OtherApp, App) 110 | assert OtherApp.compose == "other_app" 111 | 112 | # Host 113 | assert issubclass(VagrantHost, Host) 114 | assert VagrantHost.name == "localhost" 115 | assert VagrantHost.port == 2222 116 | assert VagrantHost.user == "vagrant" 117 | --------------------------------------------------------------------------------