├── .gitignore ├── .vscode └── settings.json ├── LICENSE ├── README.md ├── bin ├── __init__.py └── docker-sdp ├── create_venv.sh ├── docker_stack_deploy ├── __init__.py ├── cli │ ├── __init__.py │ └── deployer.py ├── py.typed └── tests │ └── test_yaml_load.py ├── setup.py └── test.sh /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.envFile": "${workspaceRoot}/.env", 3 | 4 | "python.sortImports.args": ["-rc", "--atomic"], 5 | 6 | "python.formatting.provider": "black", 7 | "python.formatting.autopep8Path": "${workspaceFolder}/.venv/bin/autopep8", 8 | "python.formatting.autopep8Args": [ 9 | "--max-line-length", 10 | "120", 11 | "--experimental" 12 | ], 13 | 14 | "editor.formatOnPaste": false, 15 | 16 | "python.linting.enabled": true, 17 | "python.linting.lintOnSave": true, 18 | "python.linting.maxNumberOfProblems": 100, 19 | "python.linting.ignorePatterns": [".vscode/*.py", "**/site-packages/**/*.py"], 20 | "python.linting.flake8Enabled": true, 21 | "python.linting.pylintEnabled": false, 22 | "python.linting.pylintPath": "${workspaceFolder}/venv/bin/pylint", 23 | "python.linting.mypyEnabled": false, 24 | 25 | "python.linting.pylintArgs": ["--disable", "I0011"], 26 | "python.linting.flake8Args": ["--ignore=E24,W504,E501", "--verbose", "--max-line-length=120"], 27 | 28 | "python.testing.unittestEnabled": false, 29 | "python.testing.pytestEnabled": true, 30 | "python.testing.pytestArgs": [ 31 | "tests", 32 | "-v", 33 | "-s", 34 | "--no-cov" 35 | ], 36 | "python.testing.autoTestDiscoverOnSaveEnabled": true, 37 | "python.testing.cwd": "${workspaceFolder}", 38 | 39 | "[python]": { 40 | "editor.rulers": [ 41 | 79, 42 | 120 43 | ] 44 | } 45 | } -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 NeuroForge GmbH & Co. KG 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Proudly made by [NeuroForge](https://neuroforge.de/) in Bayreuth, Germany. 2 | 3 | docker-stack-deploy (docker-sdp) 4 | ================================ 5 | 6 | docker-stack-deploy (docker-sdp) is a utility that wraps around dockers to but adds the following features: 7 | 8 | - appends the first 12 characters of the SHA-1 hash of the contents of any config/secret to the name to ensure rolling updates always work 9 | 10 | Why? 11 | ---- 12 | 13 | Docker Stack files are a great way to organize your deployments. The problem is, though, that Docker Swarm does not allow changes in secrets and configs. 14 | This means that you have to manually rotate configs/secrets yourself. 15 | 16 | Another smart solution people have come up with is appending the hashcode of the secret/config via an env var in the stack file, like so: 17 | 18 | ``` 19 | secrets: 20 | my_secret: 21 | name: "my_secret_${HASH}" 22 | file: ./my/secret 23 | ``` 24 | 25 | This however requires manually wrapping the deployment script in a process that generates this hash. 26 | 27 | With docker-stack-deploy, this is not required anymore. docker-stack-deploy is a small python script that wraps around the actual `docker stack deploy` command and intercepts any stack files in the arguments. Then, it rewrites the stack files by appending the hash like so: 28 | 29 | ``` 30 | secrets: 31 | my_secret_: 32 | file: ./my/secret 33 | ``` 34 | 35 | Next, it searches for all occurences of the secret/config in service definitions and remaps them accordingly. 36 | 37 | Any manually generated secret names and config names (set via the name property) will be left untouched. 38 | 39 | Installation 40 | ------------ 41 | 42 | ``` 43 | pip3 install https://github.com/neuroforgede/docker-stack-deploy/archive/refs/tags/0.2.13.zip 44 | ``` 45 | 46 | Usage 47 | ----- 48 | 49 | In your docker stack deploy commands simply replace `docker` with `docker-sdp`. E.g: 50 | 51 | ``` 52 | docker-sdp stack deploy -c my_stack.yml mystack 53 | ``` 54 | 55 | It also supports multiple stack files (inheritance) as long as secrets are not mixed between the files. 56 | 57 | ``` 58 | docker-sdp stack deploy -c my_stack.1.yml -c my_stack.2.yml mystack 59 | ``` 60 | -------------------------------------------------------------------------------- /bin/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/neuroforgede/docker-stack-deploy/fa8c23d60b22b829771f43caec9f9551e8de2dfb/bin/__init__.py -------------------------------------------------------------------------------- /bin/docker-sdp: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from docker_stack_deploy.cli import deployer 3 | 4 | deployer.main() 5 | exit(0) -------------------------------------------------------------------------------- /create_venv.sh: -------------------------------------------------------------------------------- 1 | python3 -m venv venv -------------------------------------------------------------------------------- /docker_stack_deploy/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/neuroforgede/docker-stack-deploy/fa8c23d60b22b829771f43caec9f9551e8de2dfb/docker_stack_deploy/__init__.py -------------------------------------------------------------------------------- /docker_stack_deploy/cli/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/neuroforgede/docker-stack-deploy/fa8c23d60b22b829771f43caec9f9551e8de2dfb/docker_stack_deploy/cli/__init__.py -------------------------------------------------------------------------------- /docker_stack_deploy/cli/deployer.py: -------------------------------------------------------------------------------- 1 | from genericpath import isfile 2 | from tempfile import NamedTemporaryFile 3 | from typing import Dict, Any, Tuple, List, Literal 4 | import yaml 5 | import sys 6 | import os 7 | import hashlib 8 | import subprocess 9 | from collections.abc import Mapping 10 | from copy import deepcopy 11 | from shutil import which 12 | 13 | VERBOSE: bool = ( 14 | os.getenv("DOCKER_SWARM_DEPLOY_VERBOSE") == "1" 15 | or os.getenv("SWARM_DEPLOYER_VERBOSE") == "1" 16 | ) 17 | WORKING_DIRECTORY = os.getcwd() 18 | 19 | 20 | def full_path(path: str) -> str: 21 | ret_path = os.path.abspath(path) 22 | if not os.path.exists(ret_path): 23 | raise AssertionError(f"did not find file at path {ret_path}") 24 | return ret_path 25 | 26 | 27 | def log(text: str) -> None: 28 | if VERBOSE: 29 | print(text) 30 | 31 | 32 | def augment_secrets_or_config( 33 | definitions: Dict[str, Any], key: Literal["secrets", "configs"] 34 | ) -> Tuple[Dict[str, Any], Dict[str, str]]: 35 | if key not in ["configs", "secrets"]: 36 | raise AssertionError("augmentation only allowed for configs or secrets") 37 | 38 | key_singular = key[-1] 39 | 40 | augmented: Dict[str, Any] = dict() 41 | new_keys: Dict[str, str] = dict() 42 | 43 | for key, definition in definitions.items(): 44 | augmented_definition = deepcopy(definition) 45 | 46 | external = definition.get("external", False) 47 | if external: 48 | if VERBOSE: 49 | print( 50 | f"external=true detected in definition with key {key}. Skipping auto-rotation" 51 | ) 52 | 53 | augmented[key] = augmented_definition 54 | # leave the key as is, as we will not do any auto-rotation anyways 55 | new_keys[key] = key 56 | continue 57 | 58 | path = definition.get("file") 59 | if not path: 60 | raise AssertionError( 61 | f"file path not set in {key_singular}, not supported yet" 62 | ) 63 | 64 | if not path.startswith('/'): 65 | path = os.path.join(WORKING_DIRECTORY, path) 66 | 67 | if not os.path.exists(path): 68 | raise AssertionError(f"did not find file at path {path}") 69 | 70 | augmented_definition["file"] = os.path.normpath(path) 71 | 72 | if "name" in definition: 73 | if VERBOSE: 74 | print( 75 | f"name detected in definition with key {key}. Skipping auto-rotation" 76 | ) 77 | 78 | augmented[key] = augmented_definition 79 | # leave the key as is, as we will not do any auto-rotation anyways 80 | new_keys[key] = key 81 | continue 82 | 83 | with open(path, "rb") as secret_file: 84 | version = hashlib.sha1(secret_file.read()).hexdigest()[:12] 85 | 86 | new_key = key + "_" + version 87 | 88 | if len(new_key) > 64: 89 | print( 90 | f"hashed {key_singular} with key and version is longer than 64 characters ({new_key}), please shorten it" 91 | ) 92 | 93 | augmented[new_key] = augmented_definition 94 | new_keys[key] = new_key 95 | 96 | return augmented, new_keys 97 | 98 | 99 | def augment_services( 100 | services: Dict[str, Any], 101 | new_secret_keys: Dict[str, str], 102 | new_config_keys: Dict[str, str], 103 | ) -> Dict[str, Any]: 104 | augmented = deepcopy(services) 105 | 106 | for service_key, service_definition in augmented.items(): 107 | augmented_service_definition = deepcopy(service_definition) 108 | 109 | if "secrets" in augmented_service_definition: 110 | augmented_secret_list = [] 111 | for elem in augmented_service_definition["secrets"]: 112 | if not isinstance(elem, Mapping): 113 | raise AssertionError( 114 | f"secret {elem} in service {service_key} was not defined as a mapping. This syntax is unsupported by docker-stack-deploy." 115 | ) 116 | augmented_secret_list.append( 117 | {**elem, "source": new_secret_keys[elem["source"]]} 118 | ) 119 | 120 | augmented_service_definition["secrets"] = augmented_secret_list 121 | 122 | if "configs" in augmented_service_definition: 123 | augmented_config_list = [] 124 | for elem in augmented_service_definition["configs"]: 125 | if not isinstance(elem, Mapping): 126 | raise AssertionError( 127 | f"config {elem} in service {service_key} was not defined as a mapping. This syntax is unsupported by docker-stack-deploy" 128 | ) 129 | augmented_config_list.append( 130 | {**elem, "source": new_config_keys[elem["source"]]} 131 | ) 132 | 133 | augmented_service_definition["configs"] = augmented_config_list 134 | 135 | if "env_file" in augmented_service_definition: 136 | original_env_file = augmented_service_definition["env_file"] 137 | if not isinstance(original_env_file, str) and not isinstance(original_env_file, list): 138 | raise AssertionError( 139 | f"env_file in {service_key} was not defined as either a string or a list. This is invalid according to the compose spec." 140 | ) 141 | if isinstance(original_env_file, list): 142 | augmented_service_definition["env_file"] = [full_path(elem) for elem in original_env_file] 143 | else: 144 | augmented_service_definition["env_file"] = full_path(original_env_file) 145 | 146 | augmented[service_key] = augmented_service_definition 147 | 148 | return augmented 149 | 150 | 151 | def find_all_stack_files(argv: List[str]) -> List[Tuple[int, str]]: 152 | ret: List[Tuple[int, str]] = [] 153 | 154 | found_c = False 155 | 156 | for index, value in zip(range(0, len(argv)), argv): 157 | if value == "-c" or value == "--compose-file": 158 | found_c = True 159 | continue 160 | if found_c: 161 | ret.append((index, value)) 162 | found_c = False 163 | 164 | return ret 165 | 166 | 167 | def private_opener(path, flags): 168 | return os.open(path, flags, 0o600) 169 | 170 | 171 | def docker_stack_deploy() -> None: 172 | all_stack_files = find_all_stack_files(sys.argv) 173 | new_stack_files: Dict[str, str] = dict() 174 | try: 175 | for argv_idx, stack_file in all_stack_files: 176 | if stack_file in new_stack_files: 177 | raise AssertionError(f"repeated stackfile {stack_file}") 178 | 179 | merged_augmented_secrets = dict() 180 | merged_new_secret_keys = dict() 181 | merged_augmented_configs = dict() 182 | merged_new_config_keys = dict() 183 | 184 | _stackfile_path = stack_file 185 | if _stackfile_path == "-": 186 | _stackfile_path = "/dev/stdin" 187 | 188 | with open(_stackfile_path) as stack_yml: 189 | try: 190 | parsed = yaml.load(stack_yml.read(), yaml.FullLoader) 191 | except: 192 | print(f"failed to parse stackfile {stack_file}", file=sys.stderr) 193 | raise 194 | 195 | parsed_augmented = deepcopy(parsed) 196 | 197 | augmented_secrets, new_secret_keys = augment_secrets_or_config( 198 | parsed.get("secrets", dict()), "secrets" 199 | ) 200 | merged_augmented_secrets = { 201 | **merged_augmented_secrets, 202 | **augmented_secrets, 203 | } 204 | merged_new_secret_keys = {**merged_new_secret_keys, **new_secret_keys} 205 | parsed_augmented["secrets"] = augmented_secrets 206 | 207 | augmented_configs, new_config_keys = augment_secrets_or_config( 208 | parsed.get("configs", dict()), "configs" 209 | ) 210 | merged_augmented_configs = { 211 | **merged_augmented_configs, 212 | **augmented_configs, 213 | } 214 | merged_new_config_keys = {**merged_new_config_keys, **new_config_keys} 215 | parsed_augmented["configs"] = augmented_configs 216 | 217 | augmented_services = augment_services( 218 | parsed.get("services", dict()), 219 | new_secret_keys=merged_new_secret_keys, 220 | new_config_keys=merged_new_config_keys, 221 | ) 222 | parsed_augmented["services"] = augmented_services 223 | 224 | with NamedTemporaryFile("w", delete=False) as file: 225 | new_stack_files[stack_file] = file.name 226 | yaml.dump(parsed_augmented, file) 227 | if VERBOSE: 228 | print(f"augmented stack file for {stack_file}:\n") 229 | print(yaml.dump(parsed_augmented)) 230 | 231 | forwarded_params: List[str] = sys.argv[1:] 232 | for argv_idx, stack_file in all_stack_files: 233 | forwarded_params[argv_idx - 1] = new_stack_files[stack_file] 234 | 235 | if os.path.isfile("/bin/docker"): 236 | docker_binary = "/bin/docker" 237 | elif os.path.isfile("/usr/bin/docker"): 238 | docker_binary = "/usr/bin/docker" 239 | else: 240 | docker_binary = which("docker") 241 | 242 | new_cmd = [docker_binary, *forwarded_params] 243 | if VERBOSE: 244 | print("running docker command:") 245 | print(" ".join(new_cmd)) 246 | print("") 247 | 248 | subprocess.check_call( 249 | new_cmd, 250 | env={ 251 | **os.environ, 252 | }, 253 | cwd=os.getcwd(), 254 | ) 255 | log("\nsuccess.") 256 | finally: 257 | log("cleaning up.") 258 | for argv_idx, stack_file in all_stack_files: 259 | try: 260 | os.unlink(new_stack_files[stack_file]) 261 | except: 262 | pass 263 | log("done.") 264 | 265 | 266 | def usage() -> None: 267 | print( 268 | """ 269 | docker-stack-deploy (docker-sdp) v0.2.11 270 | ======================================= 271 | 272 | docker-stack-deploy (docker-sdp) is a utility that wraps around dockers to but adds the following features: 273 | 274 | - appends the first 12 characters of the SHA-1 hash of the contents of any config/secret to the name to ensure rolling updates always work 275 | 276 | Usage: docker-sdp stack deploy [...] 277 | 278 | Usage of docker stack deploy follows:""" 279 | ) 280 | if os.path.isfile("/bin/docker"): 281 | docker_binary = "/bin/docker" 282 | elif os.path.isfile("/usr/bin/docker"): 283 | docker_binary = "/usr/bin/docker" 284 | else: 285 | docker_binary = which("docker") 286 | subprocess.check_call( 287 | [docker_binary, "stack", "deploy", "--help"], 288 | env={ 289 | **os.environ, 290 | }, 291 | cwd=os.getcwd(), 292 | ) 293 | 294 | 295 | def main() -> None: 296 | if len(sys.argv) >= 3: 297 | if sys.argv[1] == "stack" and sys.argv[2] == "deploy": 298 | docker_stack_deploy() 299 | return 300 | else: 301 | usage() 302 | -------------------------------------------------------------------------------- /docker_stack_deploy/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/neuroforgede/docker-stack-deploy/fa8c23d60b22b829771f43caec9f9551e8de2dfb/docker_stack_deploy/py.typed -------------------------------------------------------------------------------- /docker_stack_deploy/tests/test_yaml_load.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | import yaml 3 | 4 | DOUBLE_DOLLAR = """ 5 | prom2teams: 6 | image: idealista/prom2teams:3.3.0 7 | entrypoint: /bin/sh -c "python /opt/prom2teams/replace_config.py && exec prom2teams --loglevel $$PROM2TEAMS_LOGLEVEL" 8 | """ 9 | 10 | class YAMLDollarTest(TestCase): 11 | def test_double_dollar(self) -> None: 12 | parsed = yaml.load(DOUBLE_DOLLAR, yaml.FullLoader) -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import setuptools 2 | import unittest 3 | 4 | 5 | def cli_test_suite(): 6 | test_loader = unittest.TestLoader() 7 | test_suite = test_loader.discover("tests", pattern="test_*.py") 8 | return test_suite 9 | 10 | 11 | with open("README.md", "r", encoding="utf-8") as fh: 12 | long_description = fh.read() 13 | 14 | setuptools.setup( 15 | name="docker-sdp", 16 | scripts=["bin/docker-sdp"], 17 | version="0.2.10", 18 | author="NeuroForge GmbH & Co. KG", 19 | author_email="kontakt@neuroforge.de", 20 | description="docker-sdp", 21 | long_description=long_description, 22 | long_description_content_type="text/markdown", 23 | url="https://neuroforge.de/", 24 | package_data={"docker_stack_deploy": ["py.typed"]}, 25 | packages=setuptools.find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]), 26 | classifiers=[ 27 | "Programming Language :: Python :: 3.8", 28 | "License :: OSI Approved :: MIT License", 29 | "Operating System :: POSIX :: Linux", 30 | ], 31 | test_suite="setup.cli_test_suite", 32 | python_requires=">=3.8", 33 | install_requires=["pyyaml", "mypy>=0.800"], 34 | extras_require={"dev": []}, 35 | ) 36 | -------------------------------------------------------------------------------- /test.sh: -------------------------------------------------------------------------------- 1 | python -m unittest discover docker_stack_deploy/tests 2 | --------------------------------------------------------------------------------