├── .gitignore
├── .idea
├── .name
├── misc.xml
├── modules.xml
├── python-docker-compose-templer.iml
└── vcs.xml
├── CHANGELOG.md
├── LICENSE.txt
├── README.md
├── bin
└── docker-compose-templer
├── docker_compose_templer
├── __init__.py
├── auto_renderer.py
├── cached_file.py
├── context.py
├── definition.py
├── event.py
├── jinja_filter.py
├── jinja_renderer.py
├── log.py
├── template.py
└── utils.py
├── examples
├── stack-global.yml
├── stack-user1.yml
├── stack-user2.yml
├── stacks
│ ├── global
│ │ ├── 00-mariadb.yml
│ │ └── 00-reverse-proxy.yml
│ ├── user1
│ │ ├── 00-redis.yml
│ │ └── 10-nextcloud.yml
│ └── user2
│ │ ├── 00-redis.yml
│ │ └── 10-nextcloud.yml
├── templates
│ ├── mariadb.yml.j2
│ ├── nextcloud.yml.j2
│ ├── redis.yml.j2
│ └── reverse-proxy.yml.j2
└── vars-global.yml
├── requirements.txt
├── setup.py
└── tests
├── files
└── read.txt
├── test_CachedFile.py
├── test_ContextChainElement.py
├── test_JinjaRenderer.py
└── test_utils.py
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 | MANIFEST
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .coverage
42 | .coverage.*
43 | .cache
44 | nosetests.xml
45 | coverage.xml
46 | *.cover
47 | .hypothesis/
48 | .pytest_cache/
49 |
50 | # Translations
51 | *.mo
52 | *.pot
53 |
54 | # Django stuff:
55 | *.log
56 | local_settings.py
57 | db.sqlite3
58 |
59 | # Flask stuff:
60 | instance/
61 | .webassets-cache
62 |
63 | # Scrapy stuff:
64 | .scrapy
65 |
66 | # Sphinx documentation
67 | docs/_build/
68 |
69 | # PyBuilder
70 | target/
71 |
72 | # Jupyter Notebook
73 | .ipynb_checkpoints
74 |
75 | # IPython
76 | profile_default/
77 | ipython_config.py
78 |
79 | # pyenv
80 | .python-version
81 |
82 | # celery beat schedule file
83 | celerybeat-schedule
84 |
85 | # SageMath parsed files
86 | *.sage.py
87 |
88 | # Environments
89 | .env
90 | .venv
91 | env/
92 | venv/
93 | ENV/
94 | env.bak/
95 | venv.bak/
96 |
97 | # Spyder project settings
98 | .spyderproject
99 | .spyproject
100 |
101 | # Rope project settings
102 | .ropeproject
103 |
104 | # mkdocs documentation
105 | /site
106 |
107 | # mypy
108 | .mypy_cache/
109 | .dmypy.json
110 | dmypy.json
111 |
112 |
113 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm
114 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
115 |
116 | # User-specific stuff
117 | .idea/**/workspace.xml
118 | .idea/**/tasks.xml
119 | .idea/**/usage.statistics.xml
120 | .idea/**/dictionaries
121 | .idea/**/shelf
122 |
123 | # Generated files
124 | .idea/**/contentModel.xml
125 |
126 | # Sensitive or high-churn files
127 | .idea/**/dataSources/
128 | .idea/**/dataSources.ids
129 | .idea/**/dataSources.local.xml
130 | .idea/**/sqlDataSources.xml
131 | .idea/**/dynamic.xml
132 | .idea/**/uiDesigner.xml
133 | .idea/**/dbnavigator.xml
134 |
135 | # Gradle
136 | .idea/**/gradle.xml
137 | .idea/**/libraries
138 |
139 | # Gradle and Maven with auto-import
140 | # When using Gradle or Maven with auto-import, you should exclude module files,
141 | # since they will be recreated, and may cause churn. Uncomment if using
142 | # auto-import.
143 | # .idea/modules.xml
144 | # .idea/*.iml
145 | # .idea/modules
146 |
147 | # CMake
148 | cmake-build-*/
149 |
150 | # Mongo Explorer plugin
151 | .idea/**/mongoSettings.xml
152 |
153 | # File-based project format
154 | *.iws
155 |
156 | # IntelliJ
157 | out/
158 |
159 | # mpeltonen/sbt-idea plugin
160 | .idea_modules/
161 |
162 | # JIRA plugin
163 | atlassian-ide-plugin.xml
164 |
165 | # Cursive Clojure plugin
166 | .idea/replstate.xml
167 |
168 | # Crashlytics plugin (for Android Studio and IntelliJ)
169 | com_crashlytics_export_strings.xml
170 | crashlytics.properties
171 | crashlytics-build.properties
172 | fabric.properties
173 |
174 | # Editor-based Rest Client
175 | .idea/httpRequests
--------------------------------------------------------------------------------
/.idea/.name:
--------------------------------------------------------------------------------
1 | python-docker-compose-templer
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/.idea/python-docker-compose-templer.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 | ## [Unreleased]
3 | ### Added
4 |
5 | ## 1.1.0 - 2019-04-06
6 | ### Added
7 | - This CHANGELOG file
8 | - Default message for `mandatory` filter
9 |
10 | ### Changed
11 | - Refactored code
12 | - Renamed executable to `docker-compose-templer`
13 | - Updated examples
14 |
15 | ### Fixed
16 | - Fixed `to_bool` filter
17 |
18 | ### Removed
19 | - Explicit start in YAML files
20 |
21 | ## 1.0.2 - 2018-11-10
22 | ### Changed
23 | - Indentation of rendered files
24 | - Split only very long lines
25 |
26 | ### Fixed
27 | - Perform a type check for 'templates'
28 |
29 | ## 1.0.1 - 2018-09-21
30 | ### Added
31 | - Github Release
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | GNU LESSER GENERAL PUBLIC LICENSE
2 | Version 3, 29 June 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 |
9 | This version of the GNU Lesser General Public License incorporates
10 | the terms and conditions of version 3 of the GNU General Public
11 | License, supplemented by the additional permissions listed below.
12 |
13 | 0. Additional Definitions.
14 |
15 | As used herein, "this License" refers to version 3 of the GNU Lesser
16 | General Public License, and the "GNU GPL" refers to version 3 of the GNU
17 | General Public License.
18 |
19 | "The Library" refers to a covered work governed by this License,
20 | other than an Application or a Combined Work as defined below.
21 |
22 | An "Application" is any work that makes use of an interface provided
23 | by the Library, but which is not otherwise based on the Library.
24 | Defining a subclass of a class defined by the Library is deemed a mode
25 | of using an interface provided by the Library.
26 |
27 | A "Combined Work" is a work produced by combining or linking an
28 | Application with the Library. The particular version of the Library
29 | with which the Combined Work was made is also called the "Linked
30 | Version".
31 |
32 | The "Minimal Corresponding Source" for a Combined Work means the
33 | Corresponding Source for the Combined Work, excluding any source code
34 | for portions of the Combined Work that, considered in isolation, are
35 | based on the Application, and not on the Linked Version.
36 |
37 | The "Corresponding Application Code" for a Combined Work means the
38 | object code and/or source code for the Application, including any data
39 | and utility programs needed for reproducing the Combined Work from the
40 | Application, but excluding the System Libraries of the Combined Work.
41 |
42 | 1. Exception to Section 3 of the GNU GPL.
43 |
44 | You may convey a covered work under sections 3 and 4 of this License
45 | without being bound by section 3 of the GNU GPL.
46 |
47 | 2. Conveying Modified Versions.
48 |
49 | If you modify a copy of the Library, and, in your modifications, a
50 | facility refers to a function or data to be supplied by an Application
51 | that uses the facility (other than as an argument passed when the
52 | facility is invoked), then you may convey a copy of the modified
53 | version:
54 |
55 | a) under this License, provided that you make a good faith effort to
56 | ensure that, in the event an Application does not supply the
57 | function or data, the facility still operates, and performs
58 | whatever part of its purpose remains meaningful, or
59 |
60 | b) under the GNU GPL, with none of the additional permissions of
61 | this License applicable to that copy.
62 |
63 | 3. Object Code Incorporating Material from Library Header Files.
64 |
65 | The object code form of an Application may incorporate material from
66 | a header file that is part of the Library. You may convey such object
67 | code under terms of your choice, provided that, if the incorporated
68 | material is not limited to numerical parameters, data structure
69 | layouts and accessors, or small macros, inline functions and templates
70 | (ten or fewer lines in length), you do both of the following:
71 |
72 | a) Give prominent notice with each copy of the object code that the
73 | Library is used in it and that the Library and its use are
74 | covered by this License.
75 |
76 | b) Accompany the object code with a copy of the GNU GPL and this license
77 | document.
78 |
79 | 4. Combined Works.
80 |
81 | You may convey a Combined Work under terms of your choice that,
82 | taken together, effectively do not restrict modification of the
83 | portions of the Library contained in the Combined Work and reverse
84 | engineering for debugging such modifications, if you also do each of
85 | the following:
86 |
87 | a) Give prominent notice with each copy of the Combined Work that
88 | the Library is used in it and that the Library and its use are
89 | covered by this License.
90 |
91 | b) Accompany the Combined Work with a copy of the GNU GPL and this license
92 | document.
93 |
94 | c) For a Combined Work that displays copyright notices during
95 | execution, include the copyright notice for the Library among
96 | these notices, as well as a reference directing the user to the
97 | copies of the GNU GPL and this license document.
98 |
99 | d) Do one of the following:
100 |
101 | 0) Convey the Minimal Corresponding Source under the terms of this
102 | License, and the Corresponding Application Code in a form
103 | suitable for, and under terms that permit, the user to
104 | recombine or relink the Application with a modified version of
105 | the Linked Version to produce a modified Combined Work, in the
106 | manner specified by section 6 of the GNU GPL for conveying
107 | Corresponding Source.
108 |
109 | 1) Use a suitable shared library mechanism for linking with the
110 | Library. A suitable mechanism is one that (a) uses at run time
111 | a copy of the Library already present on the user's computer
112 | system, and (b) will operate properly with a modified version
113 | of the Library that is interface-compatible with the Linked
114 | Version.
115 |
116 | e) Provide Installation Information, but only if you would otherwise
117 | be required to provide such information under section 6 of the
118 | GNU GPL, and only to the extent that such information is
119 | necessary to install and execute a modified version of the
120 | Combined Work produced by recombining or relinking the
121 | Application with a modified version of the Linked Version. (If
122 | you use option 4d0, the Installation Information must accompany
123 | the Minimal Corresponding Source and Corresponding Application
124 | Code. If you use option 4d1, you must provide the Installation
125 | Information in the manner specified by section 6 of the GNU GPL
126 | for conveying Corresponding Source.)
127 |
128 | 5. Combined Libraries.
129 |
130 | You may place library facilities that are a work based on the
131 | Library side by side in a single library together with other library
132 | facilities that are not Applications and are not covered by this
133 | License, and convey such a combined library under terms of your
134 | choice, if you do both of the following:
135 |
136 | a) Accompany the combined library with a copy of the same work based
137 | on the Library, uncombined with any other library facilities,
138 | conveyed under the terms of this License.
139 |
140 | b) Give prominent notice with the combined library that part of it
141 | is a work based on the Library, and explaining where to find the
142 | accompanying uncombined form of the same work.
143 |
144 | 6. Revised Versions of the GNU Lesser General Public License.
145 |
146 | The Free Software Foundation may publish revised and/or new versions
147 | of the GNU Lesser General Public License from time to time. Such new
148 | versions will be similar in spirit to the present version, but may
149 | differ in detail to address new problems or concerns.
150 |
151 | Each version is given a distinguishing version number. If the
152 | Library as you received it specifies that a certain numbered version
153 | of the GNU Lesser General Public License "or any later version"
154 | applies to it, you have the option of following the terms and
155 | conditions either of that published version or of any later version
156 | published by the Free Software Foundation. If the Library as you
157 | received it does not specify a version number of the GNU Lesser
158 | General Public License, you may choose any version of the GNU Lesser
159 | General Public License ever published by the Free Software Foundation.
160 |
161 | If the Library as you received it specifies that a proxy can decide
162 | whether future versions of the GNU Lesser General Public License shall
163 | apply, that proxy's public statement of acceptance of any version is
164 | permanent authorization for you to choose that version for the
165 | Library.
166 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Docker Compose Templer
2 |
3 | This is a little Python3 utility that adds more dynamism to [Docker Compose or Docker Stack files](https://docs.docker.com/compose/compose-file/) by utilizing the [Jinja2 template engine](http://jinja.pocoo.org/).
4 |
5 | Docker Compose (DC) files are quite static in nature. It is possible to use [variable substitution](https://docs.docker.com/compose/compose-file/#variable-substitution) to run slightly different container configurations based on a single DC file. This, however, doesn't allow complex variations in networks, volumes, etc. and proper code reuse. Therefore I decided to create this Python program to introduce Jinja2 templating to DC files. A _definition file_ says where to find the templates, what variables to use and where to put the rendered files.
6 |
7 | The documentation on the Jinja2 syntax can be found [here](http://jinja.pocoo.org/docs/dev/templates/).
8 |
9 | **Features:**
10 |
11 | * templating using Jinja2
12 | * using YAML syntax for definition and variable files
13 | * monitoring of file changes and automatic rendering of templates (especially useful during development)
14 | * using some [extra Jinja filters](#extra-jinja2-filters) (comply with Ansible filters)
15 |
16 | **Table of contents:**
17 |
18 | * [Installation](#installation)
19 | * [Usage](#usage)
20 | * [Command line arguments](#command-line-arguments)
21 | * [Definition File](#definition-file)
22 | * [Templates](#templates)
23 | * [Examples](#examples)
24 | * [Extra Jinja2 Filters](#extra-jinja2-filters)
25 | * [Todo](#todo)
26 | * [License](#license)
27 |
28 | ---
29 |
30 | ## Installation
31 |
32 | Install directly from Github:
33 |
34 | ```sh
35 | pip install git+https://github.com/Aisbergg/python-docker-compose-templer@v1.1.0
36 | ```
37 |
38 | Install from PyPi:
39 |
40 | ```sh
41 | pip install docker-compose-templer
42 | ```
43 |
44 | If you like to use the optinal _auto render_ function then you have to install the [Pyinotify](https://github.com/seb-m/pyinotify) package as well:
45 |
46 | ```sh
47 | pip install pyinotify
48 | ```
49 |
50 | ## Usage
51 |
52 | ### Command line arguments
53 |
54 | ```text
55 | usage: docker-compose-templer [-a] [-f] [-h] [-v] [--version]
56 | definition_file [definition_file ...]
57 |
58 | Render Docker Compose file templates with the power of Jinja2
59 |
60 | positional arguments:
61 | definition_file File that defines what to do.
62 |
63 | optional arguments:
64 | -a, --auto-render Monitor file changes and render templates automatically
65 | -f, --force Overwrite existing files
66 | -h, --help Show this help message and exit
67 | -v, --verbose Enable verbose mode
68 | --version Print the program version and quit
69 | ```
70 |
71 | ### Definition File
72 |
73 | The definition file defines what to do. It lists template and the variables to be used for rendering and says where to put the resulting file. The definition file syntax is as follows:
74 |
75 | ```yaml
76 | # (optional) define global variables to be used in all templates - can contain Jinja syntax
77 | vars:
78 | some_global_var: foo
79 | another_global_var: "{{some_global_var}}bar" # will render to 'foobar'
80 |
81 | # (optional) load global variables from YAML file(s) (order matters) - can contain Jinja syntax
82 | include_vars:
83 | - path/to/file_1.yml
84 | - path/to/file_2.yml
85 |
86 | # template definitions
87 | templates:
88 | # first template
89 | - src: templates/my_template.yml.j2 # source file as Jinja2 template (Jinja syntax can be used on path)
90 | dest: stacks/s1/my_instance.yml # path for resulting file (Jinja syntax can be used on path)
91 | include_vars: variables/s1.yml # (optional) include local variables from YAML file(s)
92 | vars: # (optional) local variables for this template
93 | some_local_var: abc
94 |
95 | # second template
96 | - src: templates/my_template.yml.j2
97 | dest: stacks/s2/my_instance.yml
98 | vars:
99 | some_local_var: xyz
100 | ```
101 |
102 | The variables can itself contain Jinja syntax, you only have to make sure the variables are defined prior usage. The different sources of variables are merged together in the following order:
103 |
104 | 1. global `include_vars`
105 | 2. global `vars`
106 | 3. template `include_vars`
107 | 4. template `vars`
108 |
109 | ### Templates
110 |
111 | The templates are rendered with Jinja2 using the global and local variables defined in the definition file. Any Jinja2 specific syntax can be used.
112 |
113 | In addition to the [extra filters](#extra-jinja2-filters) the variable `omit` can be used in the templates. This concept is borrowed from Ansible and the purpose is to omit options from the DC file where a variable is not defined. In the following example the env variable `VAR2` will be omitted from the template if `my_var` was not defined in the definition file:
114 |
115 | ```yaml
116 | services:
117 | foo:
118 | environment:
119 | - "VAR1=abc"
120 | - "VAR2={{ my_var|default(omit) }}"
121 | ...
122 | ```
123 |
124 | Because of the omit functionality the renderer only renders YAML files, generic file types do not work.
125 |
126 | ### Examples
127 |
128 | Examples can be found in the [`examples`](examples) directory. There are three stacks defined, one global stack and two user stacks. The user stacks define a _Nextloud_ and _Redis_ service. Both stacks depend on the global one, meaning those share a global _MariaDB_ and a reverse proxy. To run this example execute the following command inside the `examples/` directory: `docker-compose-templer -f stack-global.yml stack-user1.yml stack-user2.yml`
129 |
130 | ## Extra Jinja2 Filters
131 |
132 | In addition to the [Jinja built-in filters](http://jinja.pocoo.org/docs/latest/templates/#builtin-filters) the following extra filters are implemented. The filter are based on the filter in Ansible:
133 |
134 | Filter | Description
135 | -------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
136 | `mandatory(msg)` | If the variable is undefined an error with a message `msg` will be thrown.
137 | `regex_escape()` | Escape special characters to safely use a string in a regex search.
138 | `regex_findall(pattern, ignorecase=False, multiline=False)` | Find all occurrences of regex matches.
139 | `regex_replace(pattern, replacement, ignorecase=False, multiline=False)` | Perform a regex search and replace operation.
140 | `regex_search(pattern, groups, ignorecase=False, multiline=False)` | Search with regex. If one or more match `groups` are specified the search result will be a list containing only those group matches. The groups are specified either by their position (e.g. `\1`) or by their name (e.g. foo: `\gfoo`).
141 | `regex_contains(pattern, ignorecase=False, multiline=False)` | Yields `true` if the string contains the given regex pattern.
142 | `to_bool(default_value=None)` | Converts a string to a bool value. The `default_value` will be used if the string cannot be converted.
143 | `to_yaml(indent=2, [...])` | Converts a value to YAML.
144 | `to_json([...])` | Converts a value to JSON.
145 | `to_nice_json(indent=2, [...])` | Converts a value to human readable JSON.
146 |
147 | ## Todo
148 |
149 | * Add `pre_render` and `post_render` options
150 | * Write more tests
151 |
152 | ## License
153 |
154 | _Docker Compose Templer_ is released under the LGPL v3 License. See [LICENSE.txt](LICENSE.txt) for more information.
155 |
--------------------------------------------------------------------------------
/bin/docker-compose-templer:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import argparse
4 | import sys
5 |
6 | from jinja2 import __version__ as jinja_version
7 |
8 | from docker_compose_templer import __version__ as docker_compose_templer_version
9 | from docker_compose_templer.definition import Definition
10 | from docker_compose_templer.log import Log
11 |
12 | # raise error when running with Python2
13 | if not sys.version_info[:2] >= (3, 0):
14 | raise SystemExit('ERROR: Templer requires a Python3 runtime! Your current Python version is: %s' % ''.join(
15 | sys.version.splitlines()))
16 |
17 | if __name__ == '__main__':
18 | # parse arguments
19 | parser = argparse.ArgumentParser(
20 | prog='docker-compose-templer',
21 | description='Render Docker Compose file templates with the power of Jinja2',
22 | add_help=False)
23 | parser.add_argument('-a', '--auto-render', dest='auto_render',
24 | action='store_true', default=False,
25 | help="Monitor file changes and render templates automatically")
26 | parser.add_argument('-f', '--force', dest='force_overwrite',
27 | action='store_true', default=False, help="Overwrite existing files")
28 | parser.add_argument("-h", "--help", action="help",
29 | help="Show this help message and exit")
30 | parser.add_argument('-v', '--verbose', dest='verbose', action='count',
31 | default=0, help="Enable verbose mode")
32 | parser.add_argument('--version', action='version', version='Docker Compose Templer {0}, Jinja2 {1}'.format(
33 | docker_compose_templer_version, jinja_version), help="Print the program version and quit")
34 | parser.add_argument('definition_file', nargs='+',
35 | help="File that defines what to do.")
36 | args = parser.parse_args(sys.argv[1:])
37 |
38 | # initialize dumb logger
39 | levels = [Log.ERROR, Log.INFO, Log.DEBUG]
40 | Log.level = levels[min(len(levels) - 1, args.verbose + 1)]
41 |
42 | if args.auto_render:
43 | try:
44 | import pyinotify
45 | except ImportError:
46 | Log.error("In order to use the auto render functionality the package 'pyinotify' needs to be installed")
47 | exit(1)
48 |
49 | definitions = [
50 | Definition(
51 | path=path,
52 | force_overwrite=args.force_overwrite,
53 | watch_changes=args.auto_render
54 | ) for path in args.definition_file
55 | ]
56 | for d in definitions:
57 | if not d.file.exists():
58 | Log.error("Definition file does not exist: '{0}'".format(d.file.path))
59 | exit(1)
60 |
61 | if args.auto_render:
62 | from docker_compose_templer.auto_renderer import AutoRenderer
63 |
64 | ar = AutoRenderer(definitions)
65 | ar.start()
66 |
67 | else:
68 | some_renders_failed = False
69 | # process definition files
70 | for df in definitions:
71 | if not df.process():
72 | some_renders_failed = True
73 |
74 | if some_renders_failed:
75 | Log.error("\nSome renders failed")
76 | exit(1)
77 |
--------------------------------------------------------------------------------
/docker_compose_templer/__init__.py:
--------------------------------------------------------------------------------
1 | __author__ = "Andre Lehmann"
2 | __email__ = "aisberg@posteo.de"
3 | __version__ = '1.1.0'
4 |
5 |
--------------------------------------------------------------------------------
/docker_compose_templer/auto_renderer.py:
--------------------------------------------------------------------------------
1 | import time
2 |
3 | from docker_compose_templer.cached_file import CachedFile
4 | from docker_compose_templer.log import Log
5 |
6 | import pyinotify
7 |
8 |
9 | class AutoRenderer(object):
10 | """The Auto Renderer periodically checks for file changes and dispatches any subscribed events.
11 |
12 | Args:
13 | definitions (Definition): The definitions
14 | Attributes:
15 | definitions (Definition): The definitions
16 | """
17 |
18 | def __init__(self, definitions):
19 | self.definitions = definitions
20 |
21 | def start(self):
22 | """Starts the Auto Renderer."""
23 | Log.info("Auto renderer started")
24 |
25 | # render on start
26 | for d in self.definitions:
27 | d.process()
28 |
29 | Log.info("\nWaiting for changes...")
30 |
31 | # start file change listener
32 | while (True):
33 | try:
34 | for notifier in [f.notifier for _, f in CachedFile.files.items()]:
35 | try:
36 | if notifier.check_events():
37 | notifier.read_events()
38 | notifier.process_events()
39 | except KeyboardInterrupt:
40 | raise
41 | except Exception:
42 | break
43 | time.sleep(0.5)
44 | except KeyboardInterrupt:
45 | break
46 |
47 | Log.info("\nAuto renderer stopped")
48 |
--------------------------------------------------------------------------------
/docker_compose_templer/cached_file.py:
--------------------------------------------------------------------------------
1 | import io
2 | import os
3 |
4 | from docker_compose_templer.event import Event
5 | from docker_compose_templer.log import Log
6 | from docker_compose_templer.utils import hash
7 |
8 |
9 | class CachedFile(object):
10 | """Represents a file.
11 |
12 | The class implements file caching and a file watching functionality. File changes will dispatch all events that are
13 | listed in on_change_event.
14 |
15 | Args:
16 | path (str): The path of the file.
17 | watch_changes (bool): If true the file shall be watched for relevant changes.
18 |
19 | Attributes:
20 | files (dict): All loaded files stored with their path as the key
21 | path (str): The path of the file
22 | on_change_event (Event): List of subscribed events
23 | notifier (pyinotify.Notifier): Underlying file change listener
24 |
25 | """
26 | files = {}
27 |
28 | def __init__(self, path, watch_changes=False):
29 | self.path = path
30 |
31 | self.cache = None
32 | self.on_change_event = Event()
33 | self.notifier = None
34 | if watch_changes:
35 | import pyinotify
36 | mask = pyinotify.IN_CREATE | pyinotify.IN_MODIFY
37 | wm = pyinotify.WatchManager()
38 | wm.add_watch(self.path, mask, rec=False)
39 | self.notifier = pyinotify.Notifier(wm, self._on_change, timeout=10)
40 |
41 | def __del__(self):
42 | self.remove()
43 |
44 | def remove(self):
45 | """Stop listening to file changes."""
46 | if self.notifier:
47 | self.notifier.stop()
48 |
49 | def exists(self):
50 | """Returns true if the file exists in the filesystem."""
51 | return os.path.exists(self.path)
52 |
53 | def read(self):
54 | """Reads the files content.
55 |
56 | The file content will cached. Consecutive reads will yield the cache content so the file doesn't have to be read
57 | twice.
58 |
59 | Returns:
60 | str: The content of the file.
61 |
62 | Raises:
63 | FileNotFoundError: If the file could not be found under the path
64 | IOError: If the given path does not contain a file
65 | """
66 | if self.cache and self.cache['path'] == self.path:
67 | Log.debug("Return cached file '{0}'...".format(self.path))
68 | return self.cache['content']
69 |
70 | else:
71 | self.cache = {}
72 |
73 | if not self.exists():
74 | raise FileNotFoundError("File does not exist: '{0}'".format(self.path))
75 | if not os.path.isfile(self.path):
76 | raise IOError("Is not a file: '{0}".format(self.path))
77 |
78 | Log.debug("Loading file '{0}'...".format(self.path))
79 | with io.open(self.path, 'r', encoding='utf8') as f:
80 | file_content = f.read()
81 |
82 | self.cache['path'] = self.path
83 | self.cache['content'] = file_content
84 | self.cache['hash'] = hash(file_content)
85 | return self.cache['content']
86 |
87 | @staticmethod
88 | def write(content, path, force_overwrite=False):
89 | """Writes the content into a file with the given path.
90 |
91 | Args:
92 | content (str): Content to write into the file
93 | path (str): Path where the content shall be written to
94 | force_overwrite (bool): If true any existing file will be overwritten
95 |
96 | Raises:
97 | IOError: If desired output file exists or is not a file
98 | """
99 | if os.path.exists(path):
100 | if os.path.isfile(path):
101 | if not force_overwrite:
102 | raise IOError("Destination already exists. Use '-f' flag to overwrite the file: '{0}".format(path))
103 | else:
104 | raise IOError("Destination exists and is not a file: '{0}".format(path))
105 | else:
106 | # create dir
107 | if os.path.dirname(path):
108 | os.makedirs(os.path.dirname(path), exist_ok=True)
109 |
110 | # write content to file
111 | Log.debug("Writing file '{0}'...".format(path))
112 | with io.open(path, 'w', encoding='utf8') as f:
113 | f.write(content)
114 |
115 | def _on_change(self, *args, **kwargs):
116 | """Gets executed on change event."""
117 | old_hash = self.cache['hash']
118 | self.cache = None
119 | self.read()
120 | if old_hash != self.cache['hash']:
121 | Log.debug("File '{0}' changed".format(self.path))
122 | self.on_change_event()
123 |
124 | @classmethod
125 | def get_file(cls, path, watch_changes=False):
126 | """Returns a file with the given path.
127 |
128 | If the file with the given path is already loaded into memory it will be returned instead of creating a new
129 | instance.
130 |
131 | Args:
132 | path: Path of the file
133 | watch_changes: Tell the file to watch for changes
134 |
135 | Returns:
136 | CachedFile: An instance of File with the given path
137 | """
138 | if path not in cls.files:
139 | cls.files[path] = cls(path, watch_changes)
140 | return cls.files[path]
141 |
142 | @classmethod
143 | def cleanup_unused_files(cls):
144 | """Removes loaded files from memory that aren't used anymore."""
145 | for k in list(cls.files.keys()):
146 | if len(cls.files[k].on_change_event) == 0:
147 | cls.files[k].remove()
148 | del cls.files[k]
149 |
--------------------------------------------------------------------------------
/docker_compose_templer/context.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from docker_compose_templer.event import Event
4 | from docker_compose_templer.cached_file import CachedFile
5 | from docker_compose_templer.jinja_renderer import JinjaRenderer
6 | from docker_compose_templer.log import Log
7 | from docker_compose_templer.utils import hash
8 | from docker_compose_templer.utils import load_yaml
9 |
10 |
11 | class ContextChainElement(object):
12 | """Represents a context chain element that is part of a ContextChain.
13 |
14 | Args:
15 | source (dict or CachedFile): Source of the context can either be a File or a dict
16 | prev (ContextChainElement): Previous element in the chain
17 | Attributes:
18 | prev (ContextChainElement): Previous element in the chain
19 | next (ContextChainElement): Next element in the chain
20 | source (dict or File): Source of the context can either be a File or a dict
21 | cache (dict): Ready processed and cached context
22 | cache_hash (str): SHA1 hash of the cache to check for changes
23 | on_change_event (Event): Events that get dispatched on a change
24 | """
25 |
26 | def __init__(self, source, prev=None):
27 | self.prev = prev
28 | self.next = None
29 | self.source = source
30 |
31 | self.cache = None
32 | self.cache_hash = None
33 | self.on_change_event = Event()
34 | if type(source) == CachedFile:
35 | self.source.on_change_event += self._on_change
36 |
37 | def get_context(self):
38 | """Returns the composed context up to this chain element.
39 |
40 | If the context was already created earlier and cached, then the cache will be returned.
41 |
42 | Returns:
43 | dict: The composed context
44 | Raises:
45 | Exception: If the variables cannot be loaded for some reasons
46 | """
47 | if self.cache is not None:
48 | return self.cache
49 | else:
50 | return self._create_context()
51 |
52 | def _create_context(self):
53 | """Creates the context by rendering the context's source with Jinja and merging it with the contexts of previous
54 | elements in the chain.
55 |
56 | Returns:
57 | dict: The composed context
58 | Raises:
59 | Exception: If the variables cannot be loaded for some reasons
60 | """
61 | parent_context = self.prev.get_context() if self.prev else {}
62 | if type(self.source) == CachedFile:
63 | file_content = self.source.read()
64 | try:
65 | self.cache = JinjaRenderer.render_dict_and_add_to_context(
66 | load_yaml(file_content),
67 | parent_context
68 | )
69 | except Exception as e:
70 | raise Exception("Cannot load variables from '{0}': {1}".format(self.source.path, str(e)))
71 | elif type(self.source) == dict:
72 | try:
73 | self.cache = JinjaRenderer.render_dict_and_add_to_context(
74 | self.source['data'],
75 | parent_context
76 | )
77 | except Exception as e:
78 | raise Exception("Cannot load variables from '{0}': {1}".format(self.source['path'], str(e)))
79 |
80 | self.cache_hash = hash(self.cache)
81 | return self.cache
82 |
83 | def _on_change(self, *args, **kwargs):
84 | """Gets executed on a change event."""
85 | old_hash = self.cache_hash
86 | try:
87 | self._create_context()
88 | except Exception as e:
89 | Log.error("Faild to create context: {0}".format(str(e)))
90 | raise
91 | if self.cache_hash != old_hash:
92 | self.on_change_event()
93 |
94 | def remove(self):
95 | """Stops listening for file changes."""
96 | if type(self.source) == CachedFile:
97 | self.source.on_change_event -= self._on_change
98 |
99 |
100 | class ContextChain(object):
101 | """Represents a context that is composed of multiple ContextChainElements.
102 |
103 | Args:
104 | watch_changes (bool): Enable watching for file changes
105 | Attributes:
106 | chain_elements (list): The elements of the chain
107 | watch_changes (bool): Enable watching for file changes
108 | on_change_event (Event): Events that get dispatched on a file change
109 | """
110 |
111 | def __init__(self, watch_changes=False):
112 | self.chain_elements = []
113 | self.watch_changes = watch_changes
114 | self.on_change_event = Event()
115 |
116 | def add_context(self, context, origin_path):
117 | """Adds a context to the chain.
118 |
119 | Args:
120 | context (dict): The (unprocessed) context to add
121 | origin_path (str): The file path where the context originated (used for logging)
122 | """
123 | if context:
124 | tail = self.chain_elements[-1] if self.chain_elements else None
125 | elm = ContextChainElement(
126 | source={'path': origin_path, 'data': context},
127 | prev=tail
128 | )
129 | elm.on_change_event += self.on_change_event
130 | self.chain_elements.append(elm)
131 | if tail:
132 | tail.next = elm
133 |
134 | def add_files(self, files, relative_path):
135 | """Adds a list of YAML files to the context chain.
136 |
137 | Args:
138 | files (list): Paths of YAML files to add
139 | relative_path: Relative path to look for the files
140 | """
141 | for path in files:
142 | if not os.path.isabs(path):
143 | path = os.path.join(relative_path, path)
144 | tail = self.chain_elements[-1] if self.chain_elements else None
145 | elm = ContextChainElement(
146 | source=CachedFile.get_file(path, self.watch_changes),
147 | prev=tail
148 | )
149 | elm.on_change_event += self.on_change_event
150 | self.chain_elements.append(elm)
151 | if tail:
152 | tail.next = elm
153 |
154 | def get_context(self):
155 | """Returns the composed context."""
156 | return self.chain_elements[-1].get_context()
157 |
158 | def remove(self):
159 | """Stops listening for changes."""
160 | for ce in self.chain_elements:
161 | ce.remove()
162 | self.chain_elements = None
163 |
164 |
--------------------------------------------------------------------------------
/docker_compose_templer/definition.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from docker_compose_templer.cached_file import CachedFile
4 | from docker_compose_templer.context import ContextChain
5 | from docker_compose_templer.log import Log
6 | from docker_compose_templer.template import Template
7 | from docker_compose_templer.utils import hash
8 | from docker_compose_templer.utils import load_yaml
9 |
10 |
11 | class Definition(object):
12 | """Resembles a definition file.
13 |
14 | Args:
15 | path (str): Path of the definition file
16 | force_overwrite (str): Force overwriting of existing files
17 | watch_changes (bool): Enable watching for file changes
18 | Attributes:
19 | force_overwrite (str): Force overwriting of existing files
20 | watch_changes (bool): Enable watching for file changes
21 | file (CachedFile): The definition file
22 | templates (dict): The loaded templates (SHA1 hashes are used as keys)
23 | changed_templates (list): List of templates that changed (only in auto render mode)
24 | """
25 |
26 | def __init__(self, path, force_overwrite=True, watch_changes=False):
27 | self.force_overwrite = force_overwrite
28 | self.watch_changes = watch_changes
29 |
30 | self.file = CachedFile.get_file(path, watch_changes)
31 | self.file.on_change_event += self._on_change
32 | self.templates = {}
33 | self.changed_templates = []
34 |
35 | def process(self):
36 | """Process the definition.
37 |
38 | Parses the definition file, loads the external context from YAML files and renders the defined templates.
39 |
40 | Returns:
41 | bool: True if the processing finished without errors else False
42 | """
43 | Log.info("\nProcess Definition: '{0}'".format(self.file.path))
44 | try:
45 | self._parse()
46 | except Exception as e:
47 | Log.error("Error loading options from definition file: {0}".format(str(e)), 2)
48 | return False
49 |
50 | return self._render_templates()
51 |
52 | def _parse(self):
53 | """Parses the definition file.
54 |
55 | Raises:
56 | FileNotFoundError: If the file could not be found under the path
57 | IOError: If the given path does not contain a file
58 | yaml.YAMLError: If the YAML string is malformed
59 | ValueError: If the syntax of the definition file is wrong
60 | """
61 | templates = {}
62 | self.changed_templates = []
63 |
64 | file_content = self.file.read()
65 | file_content = load_yaml(file_content)
66 | file_path = self.file.path
67 |
68 | if 'templates' not in file_content:
69 | raise ValueError("Missing key 'templates' in template definition")
70 | if type(file_content['templates']) is not list:
71 | raise ValueError("Value of 'templates' must be of type list")
72 |
73 | global_options = self._parse_variable_options(file_content)
74 |
75 | for t in file_content['templates']:
76 | template_options = self._parse_variable_options(t)
77 |
78 | if 'src' in t:
79 | if type(t['src']) is str:
80 | template_options['src'] = t['src']
81 | else:
82 | raise ValueError("Value of 'src' must be of type string")
83 | else:
84 | raise ValueError("Missing key 'src' in template definition")
85 |
86 | if 'dest' in t:
87 | if type(t['dest']) is str:
88 | template_options['dest'] = t['dest']
89 | else:
90 | raise ValueError("Value of 'dest' must be of type string")
91 | else:
92 | raise ValueError("Missing key 'dest' in template definition")
93 |
94 | thash = hash(global_options['include_vars'], global_options['vars'], template_options['include_vars'],
95 | template_options['vars'], template_options['src'], template_options['dest'])
96 |
97 | # reuse previous parsed templates (only in Auto Renderer mode)
98 | if thash in self.templates:
99 | templates[thash] = self.templates[thash]
100 | continue
101 |
102 | # load local variables
103 | tcc = ContextChain(self.watch_changes)
104 | tcc.add_files(global_options['include_vars'], os.path.dirname(file_path))
105 | tcc.add_context(global_options['vars'], file_path)
106 | tcc.add_files(template_options['include_vars'], os.path.dirname(file_path))
107 | tcc.add_context(template_options['vars'], file_path)
108 |
109 | templates[thash] = Template(
110 | src=template_options['src'],
111 | dest=template_options['dest'],
112 | relative_path=os.path.dirname(file_path),
113 | context=tcc,
114 | force_overwrite=self.force_overwrite,
115 | watch_changes=self.watch_changes
116 | )
117 | self.changed_templates.append(thash)
118 |
119 | # cleanup undefined templates (only in Auto Renderer mode)
120 | for thash, t in self.templates.items():
121 | if thash not in templates:
122 | t.remove()
123 | CachedFile.cleanup_unused_files()
124 |
125 | self.templates = templates
126 |
127 | def _parse_variable_options(self, options):
128 | """Parses common options and sets defaults.
129 |
130 | Args:
131 | options: Options that need to be parsed
132 |
133 | Returns:
134 | dict: The parsed options
135 | """
136 | processed_options = {}
137 |
138 | if 'vars' in options:
139 | if type(options['vars']) is dict:
140 | processed_options['vars'] = options['vars']
141 | else:
142 | raise ValueError("Value of 'vars' must be of type dict")
143 | else:
144 | processed_options['vars'] = {}
145 |
146 | if 'include_vars' in options:
147 | if type(options['include_vars']) is list:
148 | processed_options['include_vars'] = options['include_vars']
149 | elif type(options['include_vars']) is str:
150 | processed_options['include_vars'] = [options['include_vars']]
151 | else:
152 | raise ValueError("Value of 'include_vars' must be of type list or string")
153 | else:
154 | processed_options['include_vars'] = []
155 |
156 | return processed_options
157 |
158 | def _render_templates(self):
159 | """Renders the loaded templates.
160 |
161 | Returns:
162 | bool: True if the processing finished without errors else False
163 | """
164 | all_renders_successfull = True
165 | for thash in self.changed_templates:
166 | t = self.templates[thash]
167 | if not t.render():
168 | all_renders_successfull = False
169 | return all_renders_successfull
170 |
171 | def _on_change(self, *args, **kwargs):
172 | """Gets executed on change event."""
173 | self.process()
174 |
--------------------------------------------------------------------------------
/docker_compose_templer/event.py:
--------------------------------------------------------------------------------
1 | class Event(list):
2 | """Represent an subscribable event."""
3 |
4 | def __iadd__(self, handler):
5 | """Adds a handler to the subscribe list."""
6 | self.append(handler)
7 | return self
8 |
9 | def __isub__(self, handler):
10 | """Removes a handler from the subscribe list."""
11 | if handler in self:
12 | self.remove(handler)
13 | return self
14 |
15 | def __call__(self, *args, **kwargs):
16 | """Executes the stored handlers"""
17 | for f in self:
18 | f(*args, **kwargs)
19 |
--------------------------------------------------------------------------------
/docker_compose_templer/jinja_filter.py:
--------------------------------------------------------------------------------
1 | """ Custom Jinja2 filters """
2 | import json
3 | import re
4 | from distutils.util import strtobool
5 |
6 | import ruamel.yaml as yaml
7 | from jinja2 import StrictUndefined, UndefinedError
8 |
9 |
10 | class MandatoryError(UndefinedError):
11 | def __init__(self, message):
12 | super().__init__(message)
13 |
14 |
15 | def mandatory(value, error_message=u''):
16 | """Raise an 'UndefinedError' with an custom error massage, when value is undefined"""
17 | if type(value) is StrictUndefined:
18 | error_message = str(error_message) or "The variable '{0}' is undefined".format(value._undefined_name)
19 | raise MandatoryError(error_message)
20 |
21 | return value
22 |
23 |
24 | def regex_escape(string):
25 | """Escape special characters in a string so it can be used in regular expressions"""
26 | return re.escape(string)
27 |
28 |
29 | def regex_findall(value, pattern, ignorecase=False, multiline=False):
30 | """Do a regex findall on 'value'"""
31 | flags = 0
32 | if ignorecase:
33 | flags |= re.I
34 | if multiline:
35 | flags |= re.M
36 | compiled_pattern = re.compile(pattern, flags=flags)
37 | return compiled_pattern.findall(str(value))
38 |
39 |
40 | def regex_replace(value, pattern, replacement, ignorecase=False, multiline=False):
41 | """Do a regex search and replace on 'value'"""
42 | flags = 0
43 | if ignorecase:
44 | flags |= re.I
45 | if multiline:
46 | flags |= re.M
47 | compiled_pattern = re.compile(pattern, flags=flags)
48 | return compiled_pattern.sub(replacement, str(value))
49 |
50 |
51 | def regex_search(value, pattern, *args, **kwargs):
52 | """Do a regex search on 'value'"""
53 | groups = []
54 | for arg in args:
55 | match = re.match(r'\\(\d+)', arg)
56 | if match:
57 | groups.append(int(match.group(1)))
58 | continue
59 |
60 | match = re.match(r'^\\g<(\S+)>', arg)
61 | if match:
62 | groups.append(match.group(1))
63 | continue
64 |
65 | raise Exception("Unknown argument: '{}'".format(str(arg)))
66 |
67 | flags = 0
68 | if kwargs.get('ignorecase'):
69 | flags |= re.I
70 | if kwargs.get('multiline'):
71 | flags |= re.M
72 | compiled_pattern = re.compile(pattern, flags=flags)
73 | match = re.search(compiled_pattern, str(value))
74 |
75 | if match:
76 | if not groups:
77 | return match.group()
78 | else:
79 | items = []
80 | for item in groups:
81 | items.append(match.group(item))
82 | return items
83 |
84 | def regex_contains(value, pattern, ignorecase=False, multiline=False):
85 | """Search the 'value' for 'pattern' and return True if at least one match was found"""
86 | match = regex_search(value, pattern, ignorecase=ignorecase, multiline=multiline)
87 | if match:
88 | return True
89 | else:
90 | return False
91 |
92 |
93 | def to_bool(string, default_value=None):
94 | """Convert a string representation of a boolean value to an actual bool
95 |
96 | Args:
97 | string (str): A string to be converted to bool
98 | default_value: Default value when 'string' is not an boolean value
99 |
100 | Returns:
101 | bool: Converted string
102 |
103 | """
104 | try:
105 | return bool(strtobool(string.strip()))
106 | except ValueError:
107 | if default_value is not None:
108 | return default_value
109 | else:
110 | raise ValueError("'{0}' is not a boolean value".format(string.strip()))
111 |
112 |
113 | def to_yaml(value, indent=2, *args, **kw):
114 | """Convert the value to human readable YAML"""
115 | return yaml.dump(
116 | value,
117 | block_seq_indent=indent,
118 | indent=indent,
119 | allow_unicode=True,
120 | default_flow_style=False,
121 | **kw
122 | )
123 |
124 | def to_json(value, *args, **kw):
125 | """Convert the value to JSON"""
126 | return json.dumps(value, *args, **kw)
127 |
128 |
129 | def to_nice_json(value, indent=4, *args, **kw):
130 | """Convert the value to human readable JSON"""
131 | return json.dumps(value, indent=indent, sort_keys=True, separators=(',', ': '), *args, **kw)
132 |
133 |
134 | # register the filters
135 | filters = {
136 | 'mandatory': mandatory,
137 | 'regex_escape': regex_escape,
138 | 'regex_findall': regex_findall,
139 | 'regex_replace': regex_replace,
140 | 'regex_search': regex_search,
141 | 'regex_contains': regex_contains,
142 | 'to_bool': to_bool,
143 | 'to_yaml': to_yaml,
144 | 'to_json': to_json,
145 | 'to_nice_json': to_nice_json,
146 | }
147 |
--------------------------------------------------------------------------------
/docker_compose_templer/jinja_renderer.py:
--------------------------------------------------------------------------------
1 | import os
2 | from ast import literal_eval
3 | from copy import deepcopy
4 | from distutils.util import strtobool
5 | from hashlib import sha1
6 |
7 | import jinja2
8 | import ruamel.yaml as yaml
9 |
10 | from docker_compose_templer import jinja_filter
11 | from docker_compose_templer.utils import merge_dicts
12 |
13 |
14 | class JinjaRenderer(object):
15 | """Supplies functions to render templates with Jinja.
16 |
17 | Attributes:
18 | omit_placeholder (str): The omit placeholder used for removing keys from a dict/yaml
19 | env: The jinja environment used to render strings
20 |
21 | """
22 |
23 | omit_placeholder = '__omit_place_holder__%s' % sha1(os.urandom(64)).hexdigest()
24 | env = jinja2.Environment(
25 | lstrip_blocks=True,
26 | trim_blocks=True,
27 | undefined=jinja2.StrictUndefined
28 | )
29 | env.filters = merge_dicts(env.filters, jinja_filter.filters)
30 |
31 | @staticmethod
32 | def _evaluate_string(string):
33 | """Evaluates a string containing a Python value.
34 |
35 | Args:
36 | string(str): A Python value represented as a string
37 |
38 | Returns:
39 | str, int, float, bool, list or dict: The value of the evaluated string
40 | """
41 | try:
42 | # evaluate to int, float, list, dict
43 | return literal_eval(string.strip())
44 | except (ValueError, SyntaxError) as e:
45 | try:
46 | # evaluate bool from different variations
47 | return bool(strtobool(string.strip()))
48 | except ValueError as e:
49 | # string cannot be evaluated -> return string
50 | return string
51 |
52 | class Omit(object):
53 | """Represents a omit object"""
54 | pass
55 |
56 | @classmethod
57 | def render_string(cls, template_string, context):
58 | """Renders a template string with Jinja.
59 |
60 | Args:
61 | template_string (str): The template string to be rendered
62 | context (dict): The context used for rendering
63 |
64 | Returns:
65 | str: The rendered string
66 |
67 | Raises:
68 | jinja_filter.MandatoryError: If a variable is undefined and the mandatory filter was used
69 | jinja2.UndefinedError: If a variable is undefined
70 | jinja2.TemplateError: If the template contains an invalid syntax
71 | """
72 | # add omit variable to context
73 | context['omit'] = JinjaRenderer.omit_placeholder
74 |
75 | try:
76 | return cls.env.from_string(template_string).render(context)
77 | except jinja_filter.MandatoryError as e:
78 | raise e
79 | except jinja2.UndefinedError as e:
80 | raise jinja2.UndefinedError('Undefined variable: {0}'.format(str(e.message)))
81 | except jinja2.TemplateError as e:
82 | raise jinja2.TemplateError('Jinja template error: {0}'.format(str(e.message)))
83 |
84 | @classmethod
85 | def render_dict_and_add_to_context(cls, the_dict, context):
86 | """Renders a dict and adds it to the context.
87 |
88 | Args:
89 | the_dict (dict): The dict to be rendered
90 | context (dict): The context that is used for rendering
91 |
92 | Returns:
93 | dict: The context that contains also the variables from the_dict
94 |
95 | Raises:
96 | jinja_filter.MandatoryError: If a variable is undefined and the mandatory filter was used
97 | jinja2.UndefinedError: If a variable is undefined
98 | jinja2.TemplateError: If the template contains an invalid syntax
99 | """
100 | new_context = deepcopy(context)
101 | for k, v in the_dict.items():
102 | processed_value = cls._render_recursively(v, new_context)
103 | if type(processed_value) is not JinjaRenderer.Omit:
104 | new_context = merge_dicts(new_context, {k: processed_value})
105 | return new_context
106 |
107 | @classmethod
108 | def _render_recursively(cls, value, context):
109 | """Renders a value recursively.
110 |
111 | Args:
112 | value: Value to be rendered
113 | context: The context used for rendering
114 |
115 | Returns:
116 | Value that has been rendered with Jinja
117 |
118 | Raises:
119 | jinja_filter.MandatoryError: If a variable is undefined and the mandatory filter was used
120 | jinja2.UndefinedError: If a variable is undefined
121 | jinja2.TemplateError: If the template contains an invalid syntax
122 | """
123 | if value is None:
124 | return None
125 |
126 | # str
127 | elif type(value) is str:
128 | rendered_value = cls.render_string(value, context)
129 | if rendered_value == value:
130 | return value
131 | else:
132 | if rendered_value.find(JinjaRenderer.omit_placeholder) != -1:
133 | return JinjaRenderer.Omit()
134 | else:
135 | return cls._evaluate_string(rendered_value)
136 |
137 | # lists
138 | elif type(value) is list:
139 | new_list = []
140 | for li in value:
141 | processed_item = cls._render_recursively(li, context)
142 | if type(processed_item) is not JinjaRenderer.Omit:
143 | new_list.append(processed_item)
144 | return new_list
145 |
146 | # dicts
147 | elif type(value) is dict:
148 | new_dict = {}
149 | for k, v in value.items():
150 | processed_value = cls._render_recursively(v, context)
151 | if type(processed_value) is not JinjaRenderer.Omit:
152 | new_dict[k] = processed_value
153 | return new_dict
154 |
155 | # other types
156 | else:
157 | return value
158 |
159 | @classmethod
160 | def remove_omit_from_dict(cls, value):
161 | """Parses a YAML string and produce the corresponding Python object.
162 |
163 | Args:
164 | value: The value from which all occurrences of omit shall be removed
165 |
166 | Returns:
167 | dict: The processed dict
168 | """
169 | if value is None:
170 | return None
171 |
172 | elif type(value) is str:
173 | if value.find(JinjaRenderer.omit_placeholder) != -1:
174 | return JinjaRenderer.Omit()
175 | else:
176 | return value
177 |
178 | # lists
179 | elif isinstance(value, (yaml.comments.CommentedSeq, list)):
180 | vlen = len(value)
181 | for i in range(vlen - 1, -1, -1):
182 | processed_item = cls.remove_omit_from_dict(value[i])
183 | if type(processed_item) is JinjaRenderer.Omit:
184 | del value[i]
185 | i -= 1
186 | return value
187 |
188 | # dicts
189 | elif isinstance(value, (yaml.comments.CommentedMap, dict)):
190 | for key in list(value.keys()):
191 | processed_value = cls.remove_omit_from_dict(value[key])
192 | if type(processed_value) is JinjaRenderer.Omit:
193 | del value[key]
194 | return value
195 |
196 | else:
197 | return value
198 |
--------------------------------------------------------------------------------
/docker_compose_templer/log.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import traceback
3 |
4 |
5 | class Log(object):
6 | """Stupid logger that writes messages to stdout or stderr accordingly."""
7 |
8 | ERROR = 30
9 | INFO = 20
10 | DEBUG = 10
11 | level = ERROR
12 |
13 | @staticmethod
14 | def debug(msg, indent=0):
15 | if Log.level <= 10:
16 | sys.stdout.write(Log.indent_string(msg, indent) + "\n")
17 |
18 | @staticmethod
19 | def info(msg, indent=0):
20 | if Log.level <= 20:
21 | sys.stdout.write(Log.indent_string(msg, indent) + "\n")
22 |
23 | @staticmethod
24 | def error(msg, indent=0):
25 | sys.stderr.write(Log.indent_string(msg, indent) + "\n")
26 | if Log.level <= 10:
27 | traceback.print_exc(5)
28 |
29 | @staticmethod
30 | def indent_string(string, indent):
31 | """Adds indentation to a string.
32 |
33 | Args:
34 | string (str): String to be indented
35 | indent (int): Number of spaces to indent the string
36 |
37 | Returns:
38 | str: The indented string.
39 | """
40 | if indent > 0:
41 | lines = string.splitlines()
42 | return '\n'.join([' ' * indent + l for l in string.splitlines()])
43 | else:
44 | return string
45 |
--------------------------------------------------------------------------------
/docker_compose_templer/template.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from docker_compose_templer.cached_file import CachedFile
4 | from docker_compose_templer.jinja_renderer import JinjaRenderer
5 | from docker_compose_templer.log import Log
6 | from docker_compose_templer.utils import dump_yaml
7 | from docker_compose_templer.utils import load_yaml
8 |
9 |
10 | class Template(object):
11 | """Represents a template file to be rendered with jinja2
12 |
13 | Args:
14 | src (str): Path to template file
15 | dest (str): Path for rendered file
16 | context (dict): Jinja2 context
17 | force_overwrite (bool): Force overwrite of an existing file
18 | watch_changes (bool): Enable watching for file changes
19 | Attributes:
20 | src (str): Path to template file
21 | dest (str): Path for rendered file
22 | context (dict): Jinja2 context
23 | force_overwrite (bool): Force overwrite of an existing file
24 | watch_changes (bool): Enable watching for file changes
25 | _file (CachedFile): The template file
26 |
27 | """
28 |
29 | def __init__(self, src, dest, relative_path, context, force_overwrite=False, watch_changes=False):
30 | self.src = src
31 | self.dest = dest
32 | self.relative_path = relative_path
33 | self.context = context
34 | self.force_overwrite = force_overwrite
35 | self.watch_changes = watch_changes
36 |
37 | self._file = CachedFile.get_file(self._create_path(self.src), self.watch_changes)
38 | self._file.on_change_event += self.render
39 | self.context.on_change_event += self.render
40 |
41 | def remove(self):
42 | """Stop listening for changes."""
43 | self.context.remove()
44 | self._file.on_change_event -= self.render
45 |
46 | @property
47 | def file(self):
48 | """Returns the template file as File object"""
49 | # the path might change depending on the context used --> render src path and compare it to previous used path.
50 | # If it changed then a new file will be returned
51 | path = self._create_path(self.src)
52 | if self._file.path == path:
53 | return self._file
54 | else:
55 | self._file -= self.render
56 | self._file = CachedFile.get_file(path, self.watch_changes)
57 | self._file.on_change_event += self.render
58 | return self._file
59 |
60 | def _create_path(self, path, absolute=True):
61 | """Renders the given path with Jinja and returns the result.
62 |
63 | Args:
64 | path: The path to be rendered
65 | absolute: If true the returned path will be an absolute one
66 |
67 | Returns:
68 | str: The rendered path
69 | """
70 | path = JinjaRenderer.render_string(path, self.context.get_context())
71 | if absolute and not os.path.isabs(path):
72 | return os.path.join(self.relative_path, path)
73 | else:
74 | return path
75 |
76 | def render(self):
77 | """Renders the template file with Jinja and writes the output to the destination.
78 |
79 | Returns:
80 | bool: True if the processing finished without errors else false
81 | """
82 | src_rel = self.src
83 | dest_rel = self.dest
84 |
85 | try:
86 | try:
87 | src_rel = self._create_path(self.src, False)
88 | dest_rel = self._create_path(self.dest, False)
89 | finally:
90 | Log.info("Render template: '{0}' --> '{1}'".format(src_rel, dest_rel))
91 |
92 | file_content = self.file.read()
93 |
94 | # render the template with Jinja
95 | rendered_file_content = JinjaRenderer.render_string(file_content, self.context.get_context())
96 |
97 | # remove values containing an omit placeholder
98 | processed_content = dump_yaml(JinjaRenderer.remove_omit_from_dict(
99 | load_yaml(rendered_file_content, safe=False)))
100 |
101 | # write the rendered content into a file
102 | dest_path = self._create_path(self.dest)
103 | self.file.write(
104 | content=processed_content,
105 | path=dest_path,
106 | force_overwrite=self.force_overwrite
107 | )
108 |
109 | return True
110 |
111 | except Exception as e:
112 | Log.error("Error while rendering template: {0}".format(str(e)), 2)
113 | return False
114 |
--------------------------------------------------------------------------------
/docker_compose_templer/utils.py:
--------------------------------------------------------------------------------
1 | def merge_dicts(x, y):
2 | """Recursively merges two dicts.
3 |
4 | When keys exist in both the value of 'y' is used.
5 |
6 | Args:
7 | x (dict): First dict
8 | y (dict): Second dict
9 |
10 | Returns:
11 | dict: The merged dict
12 | """
13 | if x is None and y is None:
14 | return {}
15 | if x is None:
16 | return y
17 | if y is None:
18 | return x
19 |
20 | merged = dict(x, **y)
21 | xkeys = x.keys()
22 |
23 | for key in xkeys:
24 | if type(x[key]) is dict and key in y:
25 | merged[key] = merge_dicts(x[key], y[key])
26 | return merged
27 |
28 |
29 | def load_yaml(string, safe=True):
30 | """Parses a YAML string and produce the corresponding Python object.
31 |
32 | Args:
33 | string (str): The input string to be parsed
34 | safe (bool): If True the CSafeLoader is used otherwise the RoundTripLoader
35 |
36 | Returns:
37 | dict: The parsed YAML
38 |
39 | Raises:
40 | yaml.YAMLError: If the YAML string is malformed
41 | """
42 |
43 | import ruamel.yaml as yaml
44 | from docker_compose_templer.log import Log
45 |
46 | try:
47 | Log.debug("Parsing YAML...")
48 | if safe:
49 | yml = yaml.YAML(typ='safe')
50 | else:
51 | yml = yaml.YAML(typ='rt')
52 | return yml.load(string) or {}
53 | except yaml.YAMLError as e:
54 | raise yaml.YAMLError("YAML parsing error: {0}".format(e.problem))
55 | except Exception:
56 | raise
57 |
58 |
59 | def dump_yaml(data):
60 | """Dumps a Python object as a YAML string.
61 |
62 | Args:
63 | data (dict): The data to be dumped as YAML
64 |
65 | Returns:
66 | str: The dumped YAML
67 |
68 | Raises:
69 | yaml.TypeError: If a YAML type error occurred
70 | """
71 |
72 | import ruamel.yaml as yaml
73 | from io import StringIO
74 | from docker_compose_templer.log import Log
75 |
76 | yml = yaml.YAML()
77 | yml.indent(mapping=2, sequence=4, offset=2)
78 | yml.width = 1000
79 | try:
80 | Log.debug("Dumping YAML...")
81 | sio = StringIO()
82 | yml.dump(data, sio)
83 | return sio.getvalue()
84 | except yaml.TypeError as e:
85 | raise yaml.TypeError("YAML dump error: {0}".format(e.problem))
86 | except Exception:
87 | raise
88 |
89 |
90 | def hash(*args):
91 | """Creates a single sha1 hash value of the given objects.
92 |
93 | Args:
94 | *args: The objects to be hashed
95 |
96 | Returns:
97 | str: sha1 hash of all objects given
98 | """
99 |
100 | import json
101 | from hashlib import sha1
102 |
103 | calculated_hash = ''
104 | for object in args:
105 | if type(object) is dict:
106 | calculated_hash = sha1((calculated_hash + json.dumps(object, sort_keys=True)).encode()).hexdigest()
107 | else:
108 | calculated_hash = sha1((calculated_hash + str(object)).encode()).hexdigest()
109 | return calculated_hash
110 |
--------------------------------------------------------------------------------
/examples/stack-global.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | include_vars:
4 | - vars-global.yml
5 | vars:
6 | stack_name: global
7 | stacks:
8 | - global
9 | - user1
10 | - user2
11 |
12 | templates:
13 | - src: templates/reverse-proxy.yml.j2
14 | dest: stacks/{{ stack_name }}/00-reverse-proxy.yml
15 | vars:
16 | deployment_name: reverse-proxy
17 |
18 | - src: templates/mariadb.yml.j2
19 | dest: stacks/{{ stack_name }}/00-mariadb.yml
20 | vars:
21 | deployment_name: mariadb
22 | MYSQL_ROOT_PASSWORD: crPWnQpQppiI1B8H0OKU
23 |
24 |
--------------------------------------------------------------------------------
/examples/stack-user1.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | include_vars:
4 | - vars-global.yml
5 | vars:
6 | stack_name: user1
7 |
8 | templates:
9 | - src: templates/redis.yml.j2
10 | dest: stacks/{{ stack_name }}/00-{{ deployment_name }}.yml
11 | vars:
12 | deployment_name: redis
13 | REDIS_DATABASES: 3
14 | REDIS_MAXCLIENTS: 500
15 | REDIS_MAXMEMORY: "300mb"
16 | REDIS_REQUIREPASS: lnFl4MzarOL5bTEbreld
17 |
18 | - src: templates/nextcloud.yml.j2
19 | dest: stacks/{{ stack_name }}/10-{{ deployment_name }}.yml
20 | vars:
21 | deployment_name: nextcloud
22 | domainnames: "cloud.user1.de"
23 | NGINX_REDIRECT_TO_FIRST_DOMAIN: true
24 | NEXTCLOUD_DATABASE_HOST: mariadb.global.docker
25 | NEXTCLOUD_DATABASE_NAME: user1_01
26 | NEXTCLOUD_DATABASE_USER: user1_01
27 | NEXTCLOUD_MEMCACHE_LOCKING_ENABLED: true
28 | NEXTCLOUD_MEMCACHE_DISTRIBUTED: Redis
29 | NEXTCLOUD_REDIS_HOST: "redis.{{ stack_name }}.docker"
30 | NEXTCLOUD_REDIS_PORT: 6379
31 | NEXTCLOUD_REDIS_DBINDEX: 0
32 | NEXTCLOUD_REDIS_PASSWORD: lnFl4MzarOL5bTEbreld
33 | NEXTCLOUD_DATABASE_PASSWORD: 7qvbCz7JcVS3OcSa9jE0
34 |
--------------------------------------------------------------------------------
/examples/stack-user2.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | include_vars:
4 | - vars-global.yml
5 | vars:
6 | stack_name: user2
7 |
8 | templates:
9 | - src: templates/redis.yml.j2
10 | dest: stacks/{{ stack_name }}/00-{{ deployment_name }}.yml
11 | vars:
12 | deployment_name: redis
13 | REDIS_DATABASES: 3
14 | REDIS_MAXCLIENTS: 500
15 | REDIS_MAXMEMORY: "300mb"
16 | REDIS_REQUIREPASS: YwrbSwFEzEu2iG0rwEpi
17 |
18 | - src: templates/nextcloud.yml.j2
19 | dest: stacks/{{ stack_name }}/10-{{ deployment_name }}.yml
20 | vars:
21 | deployment_name: nextcloud
22 | domainnames: "cloud.user2.de"
23 | NGINX_REDIRECT_TO_FIRST_DOMAIN: true
24 | NEXTCLOUD_DATABASE_HOST: mariadb.global.docker
25 | NEXTCLOUD_DATABASE_NAME: user2_01
26 | NEXTCLOUD_DATABASE_USER: user2_01
27 | NEXTCLOUD_MEMCACHE_LOCKING_ENABLED: true
28 | NEXTCLOUD_MEMCACHE_DISTRIBUTED: Redis
29 | NEXTCLOUD_REDIS_HOST: "redis.{{ stack_name }}.docker"
30 | NEXTCLOUD_REDIS_PORT: 6379
31 | NEXTCLOUD_REDIS_DBINDEX: 0
32 | NEXTCLOUD_REDIS_PASSWORD: YwrbSwFEzEu2iG0rwEpi
33 | NEXTCLOUD_DATABASE_PASSWORD: IKJDNBuoyU53rZEAIb4Z
34 |
--------------------------------------------------------------------------------
/examples/stacks/global/00-mariadb.yml:
--------------------------------------------------------------------------------
1 | version: '3.7'
2 |
3 | services:
4 | mariadb:
5 | # https://hub.docker.com/r/bitnami/mariadb/
6 | image: bitnami/mariadb:10.2.21
7 | environment:
8 | - MARIADB_PORT_NUMBER=3306
9 | - MARIADB_ROOT_PASSWORD=crPWnQpQppiI1B8H0OKU
10 | - MARIADB_ROOT_USER=root
11 | networks:
12 | global_mariadb:
13 | aliases:
14 | - mariadb
15 | - mariadb.global.docker
16 | - mysql
17 | - mysql.global.docker
18 | user1_mariadb:
19 | aliases:
20 | - mariadb
21 | - mariadb.global.docker
22 | - mysql
23 | - mysql.global.docker
24 | user2_mariadb:
25 | aliases:
26 | - mariadb
27 | - mariadb.global.docker
28 | - mysql
29 | - mysql.global.docker
30 | user: '999'
31 | volumes:
32 | - /etc/localtime:/etc/localtime:ro
33 | - /srv/global/mariadb:/bitnami/mariadb
34 | deploy:
35 | mode: global
36 | placement:
37 | constraints: [node.platform.os == linux]
38 | restart_policy:
39 | condition: on-failure
40 | delay: 5s
41 | resources:
42 | limits:
43 | cpus: '2.0'
44 | memory: 2000MB
45 | update_config:
46 | parallelism: 1
47 | delay: 10m
48 |
49 | networks:
50 | global_mariadb:
51 | name: global_mariadb
52 | driver: overlay
53 | attachable: true
54 | user1_mariadb:
55 | name: user1_mariadb
56 | driver: overlay
57 | attachable: true
58 | user2_mariadb:
59 | name: user2_mariadb
60 | driver: overlay
61 | attachable: true
62 |
--------------------------------------------------------------------------------
/examples/stacks/global/00-reverse-proxy.yml:
--------------------------------------------------------------------------------
1 | version: '3.7'
2 |
3 | services:
4 | reverse-proxy:
5 | image: traefik:1.7.9
6 | command: --configfile='/container/traefik.toml'
7 | healthcheck:
8 | test: [CMD, /traefik, healthcheck, --configfile=/container/traefik.toml]
9 | interval: 5m
10 | timeout: 10s
11 | retries: 3
12 | start_period: 15s
13 | networks:
14 | global_reverse-proxy:
15 | aliases:
16 | - reverse-proxy
17 | - reverse-proxy.docker
18 | - reverse-proxy.global.docker
19 | user1_reverse-proxy:
20 | aliases:
21 | - reverse-proxy
22 | - reverse-proxy.docker
23 | - reverse-proxy.global.docker
24 | user2_reverse-proxy:
25 | aliases:
26 | - reverse-proxy
27 | - reverse-proxy.docker
28 | - reverse-proxy.global.docker
29 | expose:
30 | # traefik dashboard
31 | - '8000'
32 | ports:
33 | # Ports must be exposed in 'host' mode instead of 'ingress', otherwise the
34 | # incoming requests IP will be lost. For more information see:
35 | # https://github.com/moby/moby/issues/25526
36 | - target: 80
37 | published: 80
38 | protocol: tcp
39 | mode: host
40 | - target: 443
41 | published: 443
42 | protocol: tcp
43 | mode: host
44 | volumes:
45 | - /etc/localtime:/etc/localtime:ro
46 | - /var/run/docker.sock:/var/run/docker.sock:ro
47 | - /srv/global/reverse-proxy:/container
48 | deploy:
49 | labels:
50 | - bindMountOwner=0:0
51 | placement:
52 | constraints: [node.role == manager]
53 | mode: global
54 | update_config:
55 | parallelism: 1
56 | delay: 10s
57 | resources:
58 | reservations:
59 | memory: 60M
60 | limits:
61 | memory: 400MB
62 |
63 | error-pages:
64 | image: nginx:1.15-alpine
65 | command: nginx -g 'daemon off;' -c /container/nginx.conf
66 | networks:
67 | global_reverse-proxy:
68 | aliases:
69 | - error-pages.global.docker
70 | expose:
71 | - '8080'
72 | volumes:
73 | - /etc/localtime:/etc/localtime:ro
74 | - /srv/global/reverse-proxy/error-pages:/container
75 | deploy:
76 | labels:
77 | - bindMountOwner=100:101
78 | - bindMountMode=0770
79 | - updateService=true
80 | - traefik.enable=true
81 | - traefik.docker.network=global_reverse-proxy
82 | - 'traefik.frontend.rule=HostRegexp:{catchall:.*};ReplacePath: /raise-503-error'
83 | - traefik.frontend.priority=1
84 | - traefik.port=8080
85 | - traefik.weight=10
86 | - traefik.backend=error
87 | - traefik.backend.loadbalancer.method=drr
88 | - traefik.backend.loadbalancer.stickiness=false
89 | - traefik.backend.loadbalancer.swarm=true
90 | mode: global
91 | restart_policy:
92 | condition: on-failure
93 | delay: 5s
94 | max_attempts: 5
95 | resources:
96 | limits:
97 | cpus: '0.1'
98 | memory: 20MB
99 | update_config:
100 | parallelism: 1
101 | delay: 10s
102 |
103 | networks:
104 | global_reverse-proxy:
105 | name: global_reverse-proxy
106 | driver: overlay
107 | attachable: true
108 | # use explicit subnet
109 | ipam:
110 | driver: default
111 | config:
112 | - subnet: 10.10.10.0/24
113 | user1_reverse-proxy:
114 | name: user1_reverse-proxy
115 | driver: overlay
116 | attachable: true
117 | user2_reverse-proxy:
118 | name: user2_reverse-proxy
119 | driver: overlay
120 | attachable: true
121 |
--------------------------------------------------------------------------------
/examples/stacks/user1/00-redis.yml:
--------------------------------------------------------------------------------
1 | version: '3.7'
2 |
3 | services:
4 | redis:
5 | image: aisberg/redis:latest
6 | environment:
7 | - SUPERVISOR_HTTP_SERVER=inet
8 | - REDIS_DATABASES=3
9 | - REDIS_REQUIREPASS=lnFl4MzarOL5bTEbreld
10 | - REDIS_MAXCLIENTS=500
11 | - REDIS_MAXMEMORY=300mb
12 | networks:
13 | redis:
14 | aliases:
15 | - redis
16 | - redis.user1.docker
17 | volumes:
18 | - /etc/localtime:/etc/localtime:ro
19 | - /srv/user1/redis/redis:/container/redis
20 | - /srv/user1/redis/log:/container/log
21 | deploy:
22 | mode: global
23 | restart_policy:
24 | condition: on-failure
25 | delay: 5s
26 | max_attempts: 5
27 | resources:
28 | limits:
29 | cpus: '2.0'
30 | memory: 300mb
31 | update_config:
32 | parallelism: 1
33 | delay: 10s
34 |
35 | networks:
36 | redis:
37 | name: user1_redis
38 | driver: overlay
39 | attachable: true
40 |
41 |
--------------------------------------------------------------------------------
/examples/stacks/user1/10-nextcloud.yml:
--------------------------------------------------------------------------------
1 | version: '3.7'
2 |
3 | services:
4 | nextcloud:
5 | image: aisberg/nextcloud:15.0.5
6 | environment:
7 | - DOMAINNAMES=cloud.user1.de
8 | - SUPERVISOR_HTTP_SERVER=inet
9 | - MAX_UPLOAD_FILESIZE=16384M
10 | - NGINX_BEHIND_PROXY=True
11 | - NGINX_TLS_TERMINATED=True
12 | - NGINX_REWRITE_HTTPS=False
13 | - NGINX_REDIRECT_TO_FIRST_DOMAIN=True
14 | - NGINX_FASTCGI_READ_TIMEOUT=3600
15 | - PHP_MAX_EXECUTION_TIME=3600
16 | - PHP_MAX_INPUT_TIME=3600
17 |
18 | - AUTO_UPDATE=True
19 | - NEXTCLOUD_DEFAULT_LANGUAGE=de_DE
20 | - NEXTCLOUD_DEFAULT_LOCALE=de_DE
21 | - NEXTCLOUD_DATABASE_TYPE=mysql
22 | - NEXTCLOUD_DATABASE_HOST=mariadb.global.docker
23 | - NEXTCLOUD_DATABASE_NAME=user1_01
24 | - NEXTCLOUD_DATABASE_USER=user1_01
25 | - NEXTCLOUD_DATABASE_PASSWORD=7qvbCz7JcVS3OcSa9jE0
26 | - NEXTCLOUD_TRASHBIN_RETENTION_OBLIGATION=auto, 20
27 | - NEXTCLOUD_VERSIONS_RETENTION_OBLIGATION=auto, 40
28 | - NEXTCLOUD_REDIS_HOST=redis.user1.docker
29 | - NEXTCLOUD_REDIS_PORT=6379
30 | - NEXTCLOUD_REDIS_PASSWORD=lnFl4MzarOL5bTEbreld
31 | - NEXTCLOUD_REDIS_DBINDEX=0
32 | - NEXTCLOUD_MEMCACHE_LOCKING_ENABLED=True
33 | networks:
34 | database:
35 | redis:
36 | reverse-proxy:
37 | aliases:
38 | - nextcloud.user1.docker
39 | volumes:
40 | - /etc/localtime:/etc/localtime:ro
41 | - /srv/user1/nextcloud/www:/container/www
42 | - /srv/user1/nextcloud/cfg/nginx:/etc/nginx/conf.d
43 | - /srv/user1/nextcloud/log:/container/log
44 | deploy:
45 | labels:
46 | - traefik.enable=true
47 | - traefik.docker.network=user1_reverse-proxy
48 | - traefik.port=8080
49 | - traefik.weight=10
50 | - traefik.frontend.headers.STSIncludeSubdomains=true
51 | - traefik.frontend.headers.STSPreload=true
52 | - traefik.frontend.headers.STSSeconds=315360000
53 | - traefik.frontend.passHostHeader=true
54 | - traefik.frontend.redirect.entryPoint=https
55 | - traefik.frontend.redirect.permanent=true
56 | - traefik.frontend.rule=Host:cloud.user1.de
57 | - traefik.backend.loadbalancer.method=drr
58 | - traefik.backend.loadbalancer.stickiness.cookieName=traefik_user1_nextcloud
59 | - traefik.backend.loadbalancer.stickiness=true
60 | - traefik.backend.loadbalancer.swarm=true
61 | mode: global
62 | restart_policy:
63 | condition: on-failure
64 | delay: 5s
65 | max_attempts: 5
66 | resources:
67 | limits:
68 | cpus: '2.0'
69 | memory: 1000MB
70 | update_config:
71 | parallelism: 1
72 | delay: 10s
73 |
74 | networks:
75 | reverse-proxy:
76 | external: true
77 | name: user1_reverse-proxy
78 | database:
79 | external: true
80 | name: user1_mariadb
81 | redis:
82 | external: true
83 | name: user1_redis
84 |
85 |
--------------------------------------------------------------------------------
/examples/stacks/user2/00-redis.yml:
--------------------------------------------------------------------------------
1 | version: '3.7'
2 |
3 | services:
4 | redis:
5 | image: aisberg/redis:latest
6 | environment:
7 | - SUPERVISOR_HTTP_SERVER=inet
8 | - REDIS_DATABASES=3
9 | - REDIS_REQUIREPASS=YwrbSwFEzEu2iG0rwEpi
10 | - REDIS_MAXCLIENTS=500
11 | - REDIS_MAXMEMORY=300mb
12 | networks:
13 | redis:
14 | aliases:
15 | - redis
16 | - redis.user2.docker
17 | volumes:
18 | - /etc/localtime:/etc/localtime:ro
19 | - /srv/user2/redis/redis:/container/redis
20 | - /srv/user2/redis/log:/container/log
21 | deploy:
22 | mode: global
23 | restart_policy:
24 | condition: on-failure
25 | delay: 5s
26 | max_attempts: 5
27 | resources:
28 | limits:
29 | cpus: '2.0'
30 | memory: 300mb
31 | update_config:
32 | parallelism: 1
33 | delay: 10s
34 |
35 | networks:
36 | redis:
37 | name: user2_redis
38 | driver: overlay
39 | attachable: true
40 |
41 |
--------------------------------------------------------------------------------
/examples/stacks/user2/10-nextcloud.yml:
--------------------------------------------------------------------------------
1 | version: '3.7'
2 |
3 | services:
4 | nextcloud:
5 | image: aisberg/nextcloud:15.0.5
6 | environment:
7 | - DOMAINNAMES=cloud.user2.de
8 | - SUPERVISOR_HTTP_SERVER=inet
9 | - MAX_UPLOAD_FILESIZE=16384M
10 | - NGINX_BEHIND_PROXY=True
11 | - NGINX_TLS_TERMINATED=True
12 | - NGINX_REWRITE_HTTPS=False
13 | - NGINX_REDIRECT_TO_FIRST_DOMAIN=True
14 | - NGINX_FASTCGI_READ_TIMEOUT=3600
15 | - PHP_MAX_EXECUTION_TIME=3600
16 | - PHP_MAX_INPUT_TIME=3600
17 |
18 | - AUTO_UPDATE=True
19 | - NEXTCLOUD_DEFAULT_LANGUAGE=de_DE
20 | - NEXTCLOUD_DEFAULT_LOCALE=de_DE
21 | - NEXTCLOUD_DATABASE_TYPE=mysql
22 | - NEXTCLOUD_DATABASE_HOST=mariadb.global.docker
23 | - NEXTCLOUD_DATABASE_NAME=user2_01
24 | - NEXTCLOUD_DATABASE_USER=user2_01
25 | - NEXTCLOUD_DATABASE_PASSWORD=IKJDNBuoyU53rZEAIb4Z
26 | - NEXTCLOUD_TRASHBIN_RETENTION_OBLIGATION=auto, 20
27 | - NEXTCLOUD_VERSIONS_RETENTION_OBLIGATION=auto, 40
28 | - NEXTCLOUD_REDIS_HOST=redis.user2.docker
29 | - NEXTCLOUD_REDIS_PORT=6379
30 | - NEXTCLOUD_REDIS_PASSWORD=YwrbSwFEzEu2iG0rwEpi
31 | - NEXTCLOUD_REDIS_DBINDEX=0
32 | - NEXTCLOUD_MEMCACHE_LOCKING_ENABLED=True
33 | networks:
34 | database:
35 | redis:
36 | reverse-proxy:
37 | aliases:
38 | - nextcloud.user2.docker
39 | volumes:
40 | - /etc/localtime:/etc/localtime:ro
41 | - /srv/user2/nextcloud/www:/container/www
42 | - /srv/user2/nextcloud/cfg/nginx:/etc/nginx/conf.d
43 | - /srv/user2/nextcloud/log:/container/log
44 | deploy:
45 | labels:
46 | - traefik.enable=true
47 | - traefik.docker.network=user2_reverse-proxy
48 | - traefik.port=8080
49 | - traefik.weight=10
50 | - traefik.frontend.headers.STSIncludeSubdomains=true
51 | - traefik.frontend.headers.STSPreload=true
52 | - traefik.frontend.headers.STSSeconds=315360000
53 | - traefik.frontend.passHostHeader=true
54 | - traefik.frontend.redirect.entryPoint=https
55 | - traefik.frontend.redirect.permanent=true
56 | - traefik.frontend.rule=Host:cloud.user2.de
57 | - traefik.backend.loadbalancer.method=drr
58 | - traefik.backend.loadbalancer.stickiness.cookieName=traefik_user2_nextcloud
59 | - traefik.backend.loadbalancer.stickiness=true
60 | - traefik.backend.loadbalancer.swarm=true
61 | mode: global
62 | restart_policy:
63 | condition: on-failure
64 | delay: 5s
65 | max_attempts: 5
66 | resources:
67 | limits:
68 | cpus: '2.0'
69 | memory: 1000MB
70 | update_config:
71 | parallelism: 1
72 | delay: 10s
73 |
74 | networks:
75 | reverse-proxy:
76 | external: true
77 | name: user2_reverse-proxy
78 | database:
79 | external: true
80 | name: user2_mariadb
81 | redis:
82 | external: true
83 | name: user2_redis
84 |
85 |
--------------------------------------------------------------------------------
/examples/templates/mariadb.yml.j2:
--------------------------------------------------------------------------------
1 | {% set development = development|default(false) %}
2 | {% set deployment_name = deployment_name|default('mariadb') %}
3 |
4 | version: '3.7'
5 |
6 | services:
7 | {{ deployment_name }}:
8 | # https://hub.docker.com/r/bitnami/mariadb/
9 | image: bitnami/mariadb:10.2.21
10 | environment:
11 | - "MARIADB_PORT_NUMBER=3306"
12 | - "MARIADB_ROOT_PASSWORD={{ MYSQL_ROOT_PASSWORD|mandatory('MYSQL_ROOT_PASSWORD must be defined') }}"
13 | - "MARIADB_ROOT_USER=root"
14 | networks:
15 | {% for network in stacks|default([]) %}
16 | {{ network }}_mariadb:
17 | aliases:
18 | - mariadb
19 | - mariadb.{{ stack_name }}.docker
20 | - mysql
21 | - mysql.{{ stack_name }}.docker
22 | {% endfor %}
23 | user: "999"
24 | volumes:
25 | - "/etc/localtime:/etc/localtime:ro"
26 | - "{{ base_volume_dir|mandatory }}/{{ stack_name }}/{{ deployment_name }}:/bitnami/mariadb"
27 | deploy:
28 | mode: global
29 | placement:
30 | constraints: [node.platform.os == linux]
31 | restart_policy:
32 | condition: on-failure
33 | delay: 5s
34 | resources:
35 | limits:
36 | cpus: "2.0"
37 | memory: 2000MB
38 | update_config:
39 | parallelism: 1
40 | delay: 10m
41 |
42 | networks:
43 | {% for network in stacks|default([]) %}
44 | {{ network }}_mariadb:
45 | name: {{ network }}_mariadb
46 | driver: overlay
47 | attachable: true
48 | {% endfor %}
49 |
--------------------------------------------------------------------------------
/examples/templates/nextcloud.yml.j2:
--------------------------------------------------------------------------------
1 | {% set development = development|default(false) %}
2 | {% set deployment_name = deployment_name|default('nextcloud') %}
3 |
4 | version: '3.7'
5 |
6 | services:
7 | {{ deployment_name }}:
8 | image: aisberg/nextcloud:{{ 'latest' if development else '15.0.5' }}
9 | environment:
10 | - "DOMAINNAMES={{ domainnames|default(omit) }}"
11 | - "SUPERVISOR_HTTP_SERVER=inet"
12 |
13 | - "NGINX_WORKER_PROCESSES={{ NGINX_WORKER_PROCESSES|default(omit) }}"
14 | - "NGINX_WORKER_CONNECTIONS={{ NGINX_WORKER_CONNECTIONS|default(omit) }}"
15 | - "NGINX_WORKER_OPENED_FILES={{ NGINX_WORKER_OPENED_FILES|default(omit) }}"
16 | - "NGINX_MULTI_ACCEPT={{ NGINX_MULTI_ACCEPT|default(omit) }}"
17 | - "MAX_UPLOAD_FILESIZE={{ MAX_UPLOAD_FILESIZE|default('16384M') }}"
18 | - "NGINX_BEHIND_PROXY=True"
19 | - "NGINX_TLS_TERMINATED=True"
20 | - "NGINX_REWRITE_HTTPS=False"
21 | - "NGINX_REDIRECT_TO_FIRST_DOMAIN={{ NGINX_REDIRECT_TO_FIRST_DOMAIN|default(False) }}"
22 | - "NGINX_FASTCGI_READ_TIMEOUT={{ NGINX_FASTCGI_READ_TIMEOUT|default(3600) }}"
23 |
24 | - "PHP_FPM_MAX_CHILDREN={{ PHP_FPM_MAX_CHILDREN|default(omit) }}"
25 | - "PHP_FPM_MIN_SPARE_SERVERS={{ PHP_FPM_MIN_SPARE_SERVERS|default(omit) }}"
26 | - "PHP_FPM_MAX_SPARE_SERVERS={{ PHP_FPM_MAX_SPARE_SERVERS|default(omit) }}"
27 | - "PHP_MAX_EXECUTION_TIME={{ PHP_MAX_EXECUTION_TIME|default(3600) }}"
28 | - "PHP_MAX_INPUT_TIME={{ PHP_MAX_INPUT_TIME|default(3600) }}"
29 | - "PHP_MEMORY_LIMIT={{ PHP_MEMORY_LIMIT|default(omit) }}"
30 | - "PHP_MAX_FILE_UPLOADS={{ PHP_MAX_FILE_UPLOADS|default(omit) }}"
31 |
32 | - "AUTO_UPDATE={{ AUTO_UPDATE|default(true) }}"
33 | - "NEXTCLOUD_DEFAULT_LANGUAGE=de_DE"
34 | - "NEXTCLOUD_DEFAULT_LOCALE=de_DE"
35 | - "NEXTCLOUD_DATABASE_TYPE={{ NEXTCLOUD_DATABASE_TYPE|default('mysql') }}"
36 | - "NEXTCLOUD_DATABASE_HOST={{ NEXTCLOUD_DATABASE_HOST|mandatory('NEXTCLOUD_DATABASE_HOST must be defined') }}"
37 | - "NEXTCLOUD_DATABASE_NAME={{ NEXTCLOUD_DATABASE_NAME|mandatory('NEXTCLOUD_DATABASE_NAME must be defined') }}"
38 | - "NEXTCLOUD_DATABASE_USER={{ NEXTCLOUD_DATABASE_USER|mandatory('NEXTCLOUD_DATABASE_USER must be defined') }}"
39 | - "NEXTCLOUD_DATABASE_PASSWORD={{ NEXTCLOUD_DATABASE_PASSWORD|default(omit) }}"
40 | - "NEXTCLOUD_TRASHBIN_RETENTION_OBLIGATION={{ NEXTCLOUD_TRASHBIN_RETENTION_OBLIGATION|default('auto, 20') }}"
41 | - "NEXTCLOUD_VERSIONS_RETENTION_OBLIGATION={{ NEXTCLOUD_VERSIONS_RETENTION_OBLIGATION|default('auto, 40') }}"
42 | - "NEXTCLOUD_MEMCACHE_LOCAL={{ NEXTCLOUD_MEMCACHE_LOCAL|default(omit) }}"
43 | - "NEXTCLOUD_MEMCACHE_DISTRIBUTED={{ NEXTCLOUD_MEMCACHE_LOCAL|default(omit) }}"
44 | - "NEXTCLOUD_REDIS_HOST={{ NEXTCLOUD_REDIS_HOST|default(omit) }}"
45 | - "NEXTCLOUD_REDIS_PORT={{ NEXTCLOUD_REDIS_PORT|default(omit) }}"
46 | - "NEXTCLOUD_REDIS_TIMEOUT={{ NEXTCLOUD_REDIS_TIMEOUT|default(omit) }}"
47 | - "NEXTCLOUD_REDIS_PASSWORD={{ NEXTCLOUD_REDIS_PASSWORD|default(omit) }}"
48 | - "NEXTCLOUD_REDIS_DBINDEX={{ NEXTCLOUD_REDIS_DBINDEX|default(omit) }}"
49 | - "NEXTCLOUD_MEMCACHE_LOCKING_ENABLED={{ NEXTCLOUD_MEMCACHE_LOCKING_ENABLED|default(omit) }}"
50 | {# - "NEXTCLOUD_DBDRIVEROPTIONS=PDO=YSQL_ATTR_INIT_COMMAND => 'SET wait_timeout = 28800'" #}
51 | {% if secrets is defined %}
52 | secrets:
53 | {% for secret in secrets %}
54 | - source: {{ secret.source }}
55 | target: {{ secret.target }}
56 | uid: '999'
57 | gid: '0'
58 | mode: 0400
59 | {% endfor %}
60 | {% endif %}
61 | networks:
62 | database:
63 | redis:
64 | reverse-proxy:
65 | aliases:
66 | - {{ deployment_name }}.{{ stack_name }}.docker
67 | volumes:
68 | - "/etc/localtime:/etc/localtime:ro"
69 | - "{{ base_volume_dir|mandatory }}/{{ stack_name }}/{{ deployment_name }}/www:/container/www"
70 | - "{{ base_volume_dir|mandatory }}/{{ stack_name }}/{{ deployment_name }}/cfg/nginx:/etc/nginx/conf.d"
71 | - "{{ base_volume_dir|mandatory }}/{{ stack_name }}/{{ deployment_name }}/log:/container/log"
72 | {% if extra_volumes is defined %}
73 | {% for ev in extra_volumes %}
74 | - "{{ ev.source }}:{{ ev.target }}{{ ':ro' if ev.read_only|default(false) else '' }}"
75 | {% endfor %}
76 | {% endif %}
77 | deploy:
78 | labels:
79 | - "traefik.enable=true"
80 | - "traefik.docker.network={{ stack_name }}_reverse-proxy"
81 | - "traefik.port=8080"
82 | - "traefik.weight=10"
83 | - "traefik.frontend.auth.basic.users={{ basic_auth|join(',')|replace('$','$$') if basic_auth is defined else omit }}"
84 | - "traefik.frontend.auth.basic.removeHeader={{ 'true' if basic_auth is defined else omit }}"
85 | - "traefik.frontend.headers.STSIncludeSubdomains=true"
86 | - "traefik.frontend.headers.STSPreload=true"
87 | - "traefik.frontend.headers.STSSeconds=315360000"
88 | - "traefik.frontend.passHostHeader=true"
89 | - "traefik.frontend.redirect.entryPoint=https"
90 | - "traefik.frontend.redirect.permanent=true"
91 | - "traefik.frontend.rule=Host:{{ domainnames }}"
92 | - "traefik.frontend.whiteList.sourceRange={{ allowed_src|default(omit) }}"
93 | - "traefik.backend.loadbalancer.method=drr"
94 | - "traefik.backend.loadbalancer.stickiness.cookieName=traefik_{{ stack_name }}_{{ deployment_name }}"
95 | - "traefik.backend.loadbalancer.stickiness={{ treafik_stickiness|default(True)|lower }}"
96 | - "traefik.backend.loadbalancer.swarm=true"
97 | mode: global
98 | restart_policy:
99 | condition: on-failure
100 | delay: 5s
101 | max_attempts: 5
102 | resources:
103 | limits:
104 | cpus: '2.0'
105 | memory: "{{ memory|default('1000MB') }}"
106 | update_config:
107 | parallelism: 1
108 | delay: 10s
109 |
110 | networks:
111 | reverse-proxy:
112 | external: true
113 | name: {{ stack_name }}_reverse-proxy
114 | database:
115 | external: true
116 | name: {{ stack_name }}_mariadb
117 | redis:
118 | external: true
119 | name: {{ stack_name }}_redis
120 |
121 | {% if secrets is defined %}
122 | secrets:
123 | {% for secret in secrets %}
124 | {{ secret.source }}:
125 | external: true
126 | {% endfor %}
127 | {% endif %}
128 |
--------------------------------------------------------------------------------
/examples/templates/redis.yml.j2:
--------------------------------------------------------------------------------
1 | {% set development = development|default(false) %}
2 | {% set deployment_name = deployment_name|default('redis') %}
3 |
4 | version: '3.7'
5 |
6 | services:
7 | {{ deployment_name }}:
8 | image: aisberg/redis:latest
9 | environment:
10 | - "SUPERVISOR_HTTP_SERVER=inet"
11 |
12 | - "REDIS_LOGLEVEL={{ REDIS_LOGLEVEL|default(omit) }}"
13 | - "REDIS_DATABASES={{ REDIS_DATABASES|default(4) }}"
14 | - "REDIS_REQUIREPASS={{ REDIS_REQUIREPASS|default(omit) }}"
15 | - "REDIS_MAXCLIENTS={{ REDIS_MAXCLIENTS|default(1000) }}"
16 | - "REDIS_MAXMEMORY={{ REDIS_MAXMEMORY|default('300MB') }}"
17 | {% if secrets is defined %}
18 | secrets:
19 | {% for secret in secrets %}
20 | - source: {{ secret.source }}
21 | target: {{ secret.target }}
22 | uid: '999'
23 | gid: '0'
24 | mode: 0400
25 | {% endfor %}
26 | {% endif %}
27 | networks:
28 | redis:
29 | aliases:
30 | - redis
31 | - {{ deployment_name }}.{{ stack_name }}.docker
32 | volumes:
33 | - "/etc/localtime:/etc/localtime:ro"
34 | - "{{ base_volume_dir|mandatory }}/{{ stack_name }}/{{ deployment_name }}/redis:/container/redis"
35 | - "{{ base_volume_dir|mandatory }}/{{ stack_name }}/{{ deployment_name }}/log:/container/log"
36 | deploy:
37 | mode: global
38 | restart_policy:
39 | condition: on-failure
40 | delay: 5s
41 | max_attempts: 5
42 | resources:
43 | limits:
44 | cpus: '2.0'
45 | memory: "{{ REDIS_MAXMEMORY|default('300MB') }}"
46 | update_config:
47 | parallelism: 1
48 | delay: 10s
49 |
50 | networks:
51 | redis:
52 | name: {{ stack_name }}_{{ deployment_name }}
53 | driver: overlay
54 | attachable: true
55 |
56 | {% if secrets is defined %}
57 | secrets:
58 | {% for secret in secrets %}
59 | {{ secret.source }}:
60 | external: true
61 | {% endfor %}
62 | {% endif %}
63 |
--------------------------------------------------------------------------------
/examples/templates/reverse-proxy.yml.j2:
--------------------------------------------------------------------------------
1 | {% set development = development|default(false) %}
2 | {% set deployment_name = deployment_name|default('reverse-proxy') %}
3 |
4 | version: '3.7'
5 |
6 | services:
7 | {{ deployment_name }}:
8 | image: traefik:1.7.9
9 | command: --configfile='/container/traefik.toml'
10 | healthcheck:
11 | test: ["CMD", "/traefik", "healthcheck", "--configfile=/container/traefik.toml"]
12 | interval: 5m
13 | timeout: 10s
14 | retries: 3
15 | start_period: 15s
16 | networks:
17 | {% for network in stacks|default([]) %}
18 | {{ network }}_reverse-proxy:
19 | aliases:
20 | - reverse-proxy
21 | - reverse-proxy.docker
22 | - reverse-proxy.{{ stack_name }}.docker
23 | {% endfor %}
24 | {% if not development|default(false) %}
25 | expose:
26 | # traefik dashboard
27 | - "8000"
28 | {% endif %}
29 | ports:
30 | # Ports must be exposed in 'host' mode instead of 'ingress', otherwise the
31 | # incoming requests IP will be lost. For more information see:
32 | # https://github.com/moby/moby/issues/25526
33 | - target: 80
34 | published: 80
35 | protocol: tcp
36 | mode: host
37 | - target: 443
38 | published: 443
39 | protocol: tcp
40 | mode: host
41 | {% if development|default(false) %}
42 | - target: 8000
43 | published: 8000
44 | protocol: tcp
45 | mode: host
46 | {% endif %}
47 | volumes:
48 | - "/etc/localtime:/etc/localtime:ro"
49 | - "/var/run/docker.sock:/var/run/docker.sock:ro"
50 | - "{{ base_volume_dir|mandatory }}/{{ stack_name }}/{{ deployment_name }}:/container"
51 | deploy:
52 | labels:
53 | - "bindMountOwner=0:0"
54 | placement:
55 | constraints: [node.role == manager]
56 | mode: global
57 | update_config:
58 | parallelism: 1
59 | delay: 10s
60 | resources:
61 | reservations:
62 | memory: 60M
63 | limits:
64 | memory: "{{ memory|default('400MB') }}"
65 |
66 | error-pages:
67 | image: nginx:1.15-alpine
68 | command: "nginx -g 'daemon off;' -c /container/nginx.conf"
69 | networks:
70 | {{ stack_name }}_reverse-proxy:
71 | aliases:
72 | - error-pages.{{ stack_name }}.docker
73 | expose:
74 | - "8080"
75 | volumes:
76 | - "/etc/localtime:/etc/localtime:ro"
77 | - "{{ base_volume_dir|mandatory }}/{{ stack_name }}/{{ deployment_name }}/error-pages:/container"
78 | deploy:
79 | labels:
80 | - "bindMountOwner=100:101"
81 | - "bindMountMode=0770"
82 | - "updateService=true"
83 | - "traefik.enable=true"
84 | - "traefik.docker.network={{ stack_name }}_reverse-proxy"
85 | - "traefik.frontend.rule=HostRegexp:{catchall:.*};ReplacePath: /raise-503-error"
86 | - "traefik.frontend.priority=1"
87 | - "traefik.port=8080"
88 | - "traefik.weight=10"
89 | - "traefik.backend=error"
90 | - "traefik.backend.loadbalancer.method=drr"
91 | - "traefik.backend.loadbalancer.stickiness=false"
92 | - "traefik.backend.loadbalancer.swarm=true"
93 | mode: global
94 | restart_policy:
95 | condition: on-failure
96 | delay: 5s
97 | max_attempts: 5
98 | resources:
99 | limits:
100 | cpus: '0.1'
101 | memory: "{{ memory|default('20MB') }}"
102 | update_config:
103 | parallelism: 1
104 | delay: 10s
105 |
106 | networks:
107 | {% for network in stacks|default([]) %}
108 | {{ network }}_reverse-proxy:
109 | name: {{ network }}_reverse-proxy
110 | driver: overlay
111 | attachable: true
112 | {% if network == stack_name %}
113 | # use explicit subnet
114 | ipam:
115 | driver: default
116 | config:
117 | - subnet: 10.10.10.0/24
118 | {% endif %}
119 | {% endfor %}
120 |
--------------------------------------------------------------------------------
/examples/vars-global.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | base_volume_dir: /srv
4 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | jinja2
2 | pyinotify [AutoRenderer]
3 | ruamel.yaml
4 | setuptools
5 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | from os import path
4 |
5 | from setuptools import setup, find_packages
6 |
7 | with open(path.join(path.abspath(path.dirname(__file__)), 'README.md')) as f:
8 | long_description = f.read()
9 |
10 | setup(
11 | name='Docker Compose Templer',
12 | version='1.1.0',
13 | author='Andre Lehmann',
14 | author_email='aisberg@posteo.de',
15 | url='https://github.com/Aisbergg/python-docker-compose-templer',
16 | license='LGPL',
17 | description='Render Docker Compose file templates with the power of Jinja2',
18 | long_description=long_description,
19 | long_description_content_type='text/markdown',
20 | keywords='Jinja2 templating command-line CLI "Docker-Compose"',
21 | classifiers=[
22 | 'Development Status :: 4 - Beta',
23 | 'Environment :: Console',
24 | 'Intended Audience :: Developers',
25 | 'Intended Audience :: System Administrators',
26 | 'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)',
27 | 'Operating System :: POSIX',
28 | 'Programming Language :: Python :: 3 :: Only',
29 | 'Topic :: System :: Systems Administration',
30 | 'Topic :: Utilities',
31 | ],
32 | project_urls={
33 | 'Bug Reports': 'https://github.com/Aisbergg/python-docker-compose-templer/issues',
34 | 'Source': 'https://github.com/Aisbergg/python-docker-compose-templer',
35 | },
36 | packages=find_packages(exclude=['examples', 'tests']),
37 | scripts=[
38 | 'bin/docker-compose-templer'
39 | ],
40 | install_requires=[
41 | 'jinja2',
42 | 'ruamel.yaml'
43 | ],
44 | extras_require={
45 | 'AutoRenderer': ["pyinotify"],
46 | },
47 | include_package_data=True,
48 | zip_safe=False,
49 | platforms=['POSIX'],
50 | )
51 |
--------------------------------------------------------------------------------
/tests/files/read.txt:
--------------------------------------------------------------------------------
1 | foobar
--------------------------------------------------------------------------------
/tests/test_CachedFile.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 |
3 | from docker_compose_templer.template import CachedFile
4 |
5 |
6 | class TestFile(TestCase):
7 | def test_exists(self):
8 | self.assertTrue(CachedFile('./vars/vars1.yml').exists())
9 | self.assertFalse(CachedFile('./foo').exists())
10 |
11 | def test_read(self):
12 | fp = './files/read.txt'
13 | f = CachedFile(fp)
14 | fcontent = 'foobar'
15 | self.assertEqual(f.read(), fcontent)
16 | self.assertEqual(f.cache['content'], fcontent)
17 |
18 | # read cached content
19 | self.assertEqual(f.read(), fcontent)
20 |
21 | # file does not exist
22 | self.assertRaises(FileNotFoundError, CachedFile('./foo').read)
23 | # not a file
24 | self.assertRaises(IOError, CachedFile('./vars').read)
25 |
26 | def test_write(self):
27 | # path is not a file
28 | self.assertRaises(OSError, CachedFile.write, '', './vars', False)
29 | # file already exists
30 | self.assertRaises(OSError, CachedFile.write, '', './files/read.txt', False)
31 |
32 | # write
33 | fp = './files/write.txt'
34 | import os
35 | if os.path.exists(fp):
36 | os.remove(fp)
37 |
38 | write_content = 'foo'
39 | CachedFile.write(write_content, fp, False)
40 | with open(fp, 'r') as f:
41 | self.assertEqual(f.read(), write_content)
42 |
43 | if os.path.exists(fp):
44 | os.remove(fp)
45 |
--------------------------------------------------------------------------------
/tests/test_ContextChainElement.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 | from docker_compose_templer.context import ContextChainElement
3 |
4 |
5 | class TestContextChainElement(TestCase):
6 | def test_create_context(self):
7 | self.fail()
8 |
9 | def test_get_context(self):
10 | self.fail()
11 |
12 | def test__on_change(self):
13 | self.fail()
14 |
--------------------------------------------------------------------------------
/tests/test_JinjaRenderer.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 |
3 | import jinja2
4 |
5 | from docker_compose_templer.template import JinjaRenderer
6 |
7 |
8 | class TestJinjaRenderer(TestCase):
9 | sample_context = {'a': 'foo', 'b': 123.456, 'c': True, 'd': {'x': 1, 'y': 2, 'z': 3},
10 | 'e': ['my', 'very', 'own', 'context']}
11 |
12 | def test_render_string(self):
13 | # simple string without Jinja
14 | si = "Lorem ipsum dolor sit amet, ... "
15 | so = si
16 | self.assertEqual(JinjaRenderer.render_string(si, self.sample_context), so)
17 |
18 | # simple string with Jinja
19 | si = "The height of {{ a }} is {{ b - d.x }}"
20 | so = "The height of foo is 122.456"
21 | self.assertEqual(JinjaRenderer.render_string(si, self.sample_context), so)
22 |
23 | # undefined variable
24 | self.assertRaises(jinja2.exceptions.UndefinedError, JinjaRenderer.render_string, si, {})
25 |
26 | # template error
27 | si = "{{ bar"
28 | self.assertRaises(jinja2.exceptions.TemplateError, JinjaRenderer.render_string, si, self.sample_context)
29 |
30 | def test_jinja_filter(self):
31 | # filter: mandatory
32 | from docker_compose_templer.jinja_filter import MandatoryError
33 | self.assertRaises(MandatoryError, JinjaRenderer.render_string, "{{ bar|mandatory() }}", {})
34 |
35 | # filter: regex_escape
36 | self.assertEqual(
37 | JinjaRenderer.render_string("{{ '[foo](bar)'|regex_escape() }}", {}),
38 | "\[foo\]\(bar\)"
39 | )
40 |
41 | # filter: regex_findall
42 | self.assertEqual(
43 | JinjaRenderer.render_string("{{ 'Lorem ipsum dolor sit amet'|regex_findall('[ae]m') }}", {}),
44 | "['em', 'am']"
45 | )
46 |
47 | # filter: regex_replace
48 | self.assertEqual(
49 | JinjaRenderer.render_string("{{ 'foobar'|regex_replace('^foo', 'Cocktail') }}", {}),
50 | 'Cocktailbar'
51 | )
52 |
53 | # filter: regex_search
54 | self.assertEqual(
55 | JinjaRenderer.render_string("{{ 'Lorem ipsum dolor sit amet'|regex_search('ip(\S+)') }}", {}),
56 | "ipsum"
57 | )
58 | self.assertEqual(
59 | JinjaRenderer.render_string(r"{{ 'Lorem ipsum dolor sit amet'|regex_search('ip(\S+)', '\\1') }}", {}),
60 | "['sum']"
61 | )
62 |
63 | # filter: regex_contains
64 | self.assertEqual(
65 | JinjaRenderer.render_string("{{ 'foobar'|regex_contains('^foo[bB]ar$') }}", {}),
66 | 'True'
67 | )
68 | self.assertEqual(
69 | JinjaRenderer.render_string("{{ 'foobar'|regex_contains('barfoo') }}", {}),
70 | 'False'
71 | )
72 |
73 | # filter: to_bool
74 | self.assertEqual(
75 | JinjaRenderer.render_string("{{ 'yes'|to_bool() }}", {}),
76 | 'True'
77 | )
78 | self.assertEqual(
79 | JinjaRenderer.render_string("{{ 'xyz'|to_bool(default_value=False) }}", {}),
80 | 'False'
81 | )
82 |
83 | # filter: to_yaml
84 | #print("|{}|".format(JinjaRenderer.render_string("{{ c|to_yaml }}", {'c': self.sample_context})))
85 | self.assertEqual(
86 | JinjaRenderer.render_string("{{ c|to_yaml }}", {'c': self.sample_context}),
87 | 'a: foo\nb: 123.456\nc: true\nd:\n x: 1\n y: 2\n z: 3\ne:\n - my\n - very\n - own\n - context\n'
88 | )
89 |
90 | # filter: to_json
91 | #print("|{}|".format(JinjaRenderer.render_string("{{ c|to_json }}", {'c': self.sample_context})))
92 | self.assertEqual(
93 | JinjaRenderer.render_string("{{ c|to_json }}", {'c': self.sample_context}),
94 | '{"a": "foo", "b": 123.456, "c": true, "d": {"x": 1, "y": 2, "z": 3}, "e": ["my", "very", "own", "context"]}'
95 | )
96 |
97 | # filter: to_nice_json
98 | #print("|{}|".format(JinjaRenderer.render_string("{{ c|to_nice_json }}", {'c': self.sample_context})))
99 | self.assertEqual(
100 | JinjaRenderer.render_string("{{ c|to_nice_json }}", {'c': self.sample_context}),
101 | '{\n "a": "foo",\n "b": 123.456,\n "c": true,\n "d": {\n "x": 1,\n "y": 2,\n "z": 3\n },\n "e": [\n "my",\n "very",\n "own",\n "context"\n ]\n}'
102 | )
103 |
104 | def test_evaluate_string(self):
105 | # str
106 | self.assertIsInstance(JinjaRenderer._evaluate_string(' abc '), str)
107 |
108 | # bool
109 | self.assertIsInstance(JinjaRenderer._evaluate_string(' n '), bool)
110 | self.assertIsInstance(JinjaRenderer._evaluate_string(' yes '), bool)
111 | self.assertIsInstance(JinjaRenderer._evaluate_string(' True '), bool)
112 |
113 | # int
114 | self.assertIsInstance(JinjaRenderer._evaluate_string(' 99 '), int)
115 |
116 | # float
117 | self.assertIsInstance(JinjaRenderer._evaluate_string(' 1.2 '), float)
118 |
119 | # list
120 | self.assertIsInstance(JinjaRenderer._evaluate_string(' [1,2,3] '), list)
121 |
122 | # dict
123 | self.assertIsInstance(JinjaRenderer._evaluate_string(' {"a": 1} '), dict)
124 |
125 | def test_render_dict_and_add_to_context(self):
126 | d = {'a': '{{ e[0] }}', 'c': '{{ 2 == 1 }}', 'f': '{{ "1.2" }}'}
127 | self.assertEqual(
128 | JinjaRenderer.render_dict_and_add_to_context(d, self.sample_context),
129 | {'a': 'my', 'b': 123.456, 'c': False, 'd': {'x': 1, 'y': 2, 'z': 3},
130 | 'e': ['my', 'very', 'own', 'context'], 'f': 1.2, 'omit': JinjaRenderer.omit_placeholder}
131 | )
132 |
133 | def test_remove_omit_from_dict(self):
134 | d = { 'a': 'x', 'b': JinjaRenderer.omit_placeholder, 'c': {'d': 'y', 'e': JinjaRenderer.omit_placeholder}, 'f': [1, JinjaRenderer.omit_placeholder, 3, 4]}
135 | #d = { 'f': [1, JinjaRenderer.omit_placeholder, 3, 4]}
136 | self.assertEqual(
137 | JinjaRenderer.remove_omit_from_dict(d),
138 | {'a': 'x', 'c': {'d': 'y'}, 'f': [1, 3, 4]}
139 | )
140 |
--------------------------------------------------------------------------------
/tests/test_utils.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 |
3 | from _ruamel_yaml import YAMLError
4 |
5 | from docker_compose_templer.template import Utils
6 |
7 | class TestUtils(TestCase):
8 | def test_merge_dicts(self):
9 | d1 = {'a': 'foo', 'b': 123.456, 'c': True, 'd': {'x': 1, 'y': 2, 'z': 3},
10 | 'e': ['my', 'very', 'own', 'context']}
11 | d2 = {'a': 'bar', 'd': {'x': 0.99, 'zz': {}},
12 | 'e': ['new', 'list']}
13 | r = {'a': 'bar', 'b': 123.456, 'c': True, 'd': {'x': 0.99, 'y': 2, 'z': 3, 'zz': {}}, 'e': ['new', 'list']}
14 | self.assertEqual(Utils.merge_dicts(d1, d2), r)
15 |
16 | def test_load_yaml(self):
17 | self.assertEqual(
18 | Utils.load_yaml(
19 | 'a: foo\nb: 123.456\nc: true\nd:\n x: 1\n y: 2\n z: 3\ne:\n - my\n - very\n - own\n - context\n'
20 | ),
21 | {'a': 'foo', 'b': 123.456, 'c': True, 'd': {'x': 1, 'y': 2, 'z': 3},
22 | 'e': ['my', 'very', 'own', 'context']}
23 | )
24 | self.assertRaises(YAMLError, Utils.load_yaml, ' :wrong\na: foo\n')
--------------------------------------------------------------------------------