├── .editorconfig
├── .github
└── workflows
│ ├── publish-docker-image.yml
│ └── test.yml
├── .gitignore
├── .pre-commit-config.yaml
├── Dockerfile
├── LICENSE.md
├── Makefile
├── README.md
├── bootstrap
├── __init__.py
├── collector.py
├── constants.py
├── exceptions.py
├── helpers.py
└── runner.py
├── cookiecutter.json
├── pyproject.toml
├── requirements
├── common.in
├── common.txt
├── local.in
├── local.txt
├── test.in
└── test.txt
├── start.py
├── terraform
├── gitlab
│ ├── main.tf
│ └── variables.tf
├── terraform-cloud
│ ├── main.tf
│ └── variables.tf
└── vault
│ ├── main.tf
│ └── variables.tf
├── tests
├── __init__.py
├── test_collector.py
├── test_helpers.py
└── utils.py
└── {{cookiecutter.project_dirname}}
├── .dockerignore
├── .editorconfig
├── .env_template
├── .gitattributes
├── .gitignore
├── .gitlab-ci.yml
├── .pre-commit-config.yaml
├── Dockerfile
├── LICENSE.md
├── Makefile
├── README.md
├── docker-compose.yaml
├── features
├── empty.feature
└── steps
│ └── __init__.py
├── gunicorn.conf.py
├── manage.py
├── pacts
├── __init__.py
├── handler.py
└── verify_pacts.py
├── pyproject.toml
├── requirements
├── base.in
├── common.in
├── local.in
├── remote.in
└── test.in
├── scripts
├── behave.sh
├── check.sh
├── ci_pact.sh
├── ci_sentry.sh
├── coverage.sh
├── deploy.sh
├── deploy
│ ├── gitlab.sh
│ ├── init.sh
│ ├── terraform-cloud.sh
│ ├── terraform.sh
│ └── vault.sh
├── entrypoint.sh
├── pact_verify.sh
├── report.sh
└── test.sh
├── terraform
├── digitalocean-k8s
│ ├── main.tf
│ ├── variables.tf
│ ├── {% if cookiecutter.terraform_backend == "gitlab" %}backend.tf{% endif %}
│ └── {% if cookiecutter.terraform_backend == "terraform-cloud" %}cloud.tf{% endif %}
├── modules
│ └── kubernetes
│ │ ├── cronjob
│ │ ├── main.tf
│ │ └── variables.tf
│ │ └── deployment
│ │ ├── main.tf
│ │ ├── outputs.tf
│ │ └── variables.tf
├── other-k8s
│ ├── main.tf
│ ├── variables.tf
│ ├── {% if cookiecutter.terraform_backend == "gitlab" %}backend.tf{% endif %}
│ └── {% if cookiecutter.terraform_backend == "terraform-cloud" %}cloud.tf{% endif %}
└── vars
│ ├── .tfvars
│ ├── {% if "environment_dev" in cookiecutter.tfvars %}dev.tfvars{% endif %}
│ ├── {% if "environment_prod" in cookiecutter.tfvars %}prod.tfvars{% endif %}
│ └── {% if "environment_stage" in cookiecutter.tfvars %}stage.tfvars{% endif %}
└── {{cookiecutter.django_settings_dirname}}
├── __init__.py
├── asgi.py
├── settings.py
├── tests
├── __init__.py
└── test_views.py
├── urls.py
├── views.py
├── workers.py
└── wsgi.py
/.editorconfig:
--------------------------------------------------------------------------------
1 | # EditorConfig is awesome: https://EditorConfig.org
2 |
3 | # top-most EditorConfig file
4 | root = true
5 |
6 | # Unix-style newlines with a newline ending every file
7 | [*]
8 | end_of_line = lf
9 | insert_final_newline = true
10 | trim_trailing_whitespace = true
11 |
12 | # Matches multiple files with brace expansion notation
13 | # Set default charset
14 | [*.{css,html,js,json,jsx,md,py,scss,yml}]
15 | charset = utf-8
16 | indent_style = space
17 |
18 | # 4 space indentation
19 | [*.py]
20 | indent_size = 4
21 | max_line_length = 88
22 |
23 | [*.md]
24 | indent_size = 4
25 |
26 | # 2 space indentation
27 | [*.{html,js,json,jsx,css,scss,yml}]
28 | indent_size = 2
29 |
30 | # Tab indentation (no size specified)
31 | [Makefile]
32 | indent_style = tab
33 |
--------------------------------------------------------------------------------
/.github/workflows/publish-docker-image.yml:
--------------------------------------------------------------------------------
1 | name: Publish Docker image
2 |
3 | on:
4 | release:
5 | types: [published]
6 |
7 | jobs:
8 | docker:
9 | runs-on: ubuntu-latest
10 | steps:
11 | - name: Checkout
12 | uses: actions/checkout@v4
13 | - name: Docker meta
14 | id: meta
15 | uses: docker/metadata-action@v5
16 | with:
17 | images: 20tab/talos-django
18 | - name: Login to DockerHub
19 | if: github.event_name != 'pull_request'
20 | uses: docker/login-action@v3
21 | with:
22 | username: ${{ secrets.DOCKERHUB_USERNAME }}
23 | password: ${{ secrets.DOCKERHUB_TOKEN }}
24 | - name: Build and push
25 | uses: docker/build-push-action@v5
26 | with:
27 | context: .
28 | push: true
29 | tags: ${{ steps.meta.outputs.tags }},20tab/talos-django:latest
30 | labels: ${{ steps.meta.outputs.labels }}
31 |
--------------------------------------------------------------------------------
/.github/workflows/test.yml:
--------------------------------------------------------------------------------
1 | # This workflow will install Python dependencies, run tests and lint with a single version of Python
2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
3 |
4 | name: Python application
5 |
6 | on:
7 | push:
8 | branches: [main]
9 | pull_request:
10 | branches: [main]
11 |
12 | permissions:
13 | contents: read
14 |
15 | jobs:
16 | build:
17 | runs-on: ubuntu-latest
18 |
19 | steps:
20 | - uses: actions/checkout@v4
21 | - name: Set up Python 3.12
22 | uses: actions/setup-python@v5
23 | with:
24 | python-version: "3.12"
25 | - name: Install dependencies
26 | run: |
27 | python -m pip install --upgrade pip setuptools
28 | python -m pip install -r requirements/test.txt
29 | - name: Run Test
30 | run: |
31 | python3 -m unittest
32 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # START https://github.com/github/gitignore/blob/master/Python.gitignore
2 |
3 | # Byte-compiled / optimized / DLL files
4 | __pycache__/
5 | *.py[cod]
6 | *$py.class
7 |
8 | # C extensions
9 | *.so
10 |
11 | # Distribution / packaging
12 | .Python
13 | build/
14 | develop-eggs/
15 | dist/
16 | downloads/
17 | eggs/
18 | .eggs/
19 | lib/
20 | lib64/
21 | parts/
22 | sdist/
23 | var/
24 | wheels/
25 | share/python-wheels/
26 | *.egg-info/
27 | .installed.cfg
28 | *.egg
29 | MANIFEST
30 |
31 | # PyInstaller
32 | # Usually these files are written by a python script from a template
33 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
34 | *.manifest
35 | *.spec
36 |
37 | # Installer logs
38 | pip-log.txt
39 | pip-delete-this-directory.txt
40 |
41 | # Unit test / coverage reports
42 | htmlcov/
43 | .tox/
44 | .nox/
45 | .coverage
46 | .coverage.*
47 | .cache
48 | nosetests.xml
49 | coverage.xml
50 | *.cover
51 | *.py,cover
52 | .hypothesis/
53 | .pytest_cache/
54 | cover/
55 |
56 | # Translations
57 | *.mo
58 | *.pot
59 |
60 | # Django stuff:
61 | *.log
62 | local_settings.py
63 | db.sqlite3
64 | db.sqlite3-journal
65 |
66 | # Flask stuff:
67 | instance/
68 | .webassets-cache
69 |
70 | # Scrapy stuff:
71 | .scrapy
72 |
73 | # Sphinx documentation
74 | docs/_build/
75 |
76 | # PyBuilder
77 | .pybuilder/
78 | target/
79 |
80 | # Jupyter Notebook
81 | .ipynb_checkpoints
82 |
83 | # IPython
84 | profile_default/
85 | ipython_config.py
86 |
87 | # pyenv
88 | # For a library or package, you might want to ignore these files since the code is
89 | # intended to run in multiple environments; otherwise, check them in:
90 | # .python-version
91 |
92 | # pipenv
93 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
94 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
95 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
96 | # install all needed dependencies.
97 | #Pipfile.lock
98 |
99 | # poetry
100 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
101 | # This is especially recommended for binary packages to ensure reproducibility, and is more
102 | # commonly ignored for libraries.
103 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
104 | #poetry.lock
105 |
106 | # pdm
107 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
108 | #pdm.lock
109 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
110 | # in version control.
111 | # https://pdm.fming.dev/#use-with-ide
112 | .pdm.toml
113 |
114 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
115 | __pypackages__/
116 |
117 | # Celery stuff
118 | celerybeat-schedule
119 | celerybeat.pid
120 |
121 | # SageMath parsed files
122 | *.sage.py
123 |
124 | # Environments
125 | .env
126 | .venv
127 | env/
128 | venv/
129 | ENV/
130 | env.bak/
131 | venv.bak/
132 |
133 | # Spyder project settings
134 | .spyderproject
135 | .spyproject
136 |
137 | # Rope project settings
138 | .ropeproject
139 |
140 | # mkdocs documentation
141 | /site
142 |
143 | # mypy
144 | .mypy_cache/
145 | .dmypy.json
146 | dmypy.json
147 |
148 | # Pyre type checker
149 | .pyre/
150 |
151 | # pytype static type analyzer
152 | .pytype/
153 |
154 | # Cython debug symbols
155 | cython_debug/
156 |
157 | # PyCharm
158 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
159 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
160 | # and can be added to the global gitignore or merged into this file. For a more nuclear
161 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
162 | .idea/
163 |
164 | # END https://github.com/github/gitignore/blob/master/Python.gitignore
165 |
166 | # START local
167 |
168 | # SublimeText
169 | *.sublime-project
170 | *.sublime-workspace
171 |
172 | # Vim
173 | [._]*.un~
174 |
175 | # VisualStudioCode
176 | .devcontainer*
177 | .vscode/
178 |
179 | # macOS
180 | .DS_Store
181 |
182 | # requirements
183 | */requirements/*.txt
184 |
185 | # Ruff
186 | .ruff_cache/
187 |
188 | # Terraform
189 | .terraform.lock*
190 | .terraform/
191 | *.tfstate*
192 | terraform.tfvars
193 |
194 | # Artifacts
195 | backend/
196 | .coverages/
197 | .logs/
198 |
199 | # END local
200 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | default_language_version:
2 | python: python3.12
3 | repos:
4 | - repo: https://github.com/pre-commit/pre-commit-hooks
5 | rev: "v4.5.0"
6 | hooks:
7 | - id: check-added-large-files
8 | args: ["--maxkb=1024"]
9 | - id: check-case-conflict
10 | - id: check-docstring-first
11 | - id: check-json
12 | - id: check-merge-conflict
13 | - id: check-toml
14 | - id: check-yaml
15 | args: ["--allow-multiple-documents"]
16 | exclude: ^\{\{cookiecutter\.project_dirname\}\}.*$
17 | - id: debug-statements
18 | - id: detect-private-key
19 | - id: end-of-file-fixer
20 | - id: file-contents-sorter
21 | files: ^(requirements/\w*.in)$
22 | args: ["--ignore-case", "--unique"]
23 | - id: fix-byte-order-marker
24 | - id: fix-encoding-pragma
25 | args: ["--remove"]
26 | - id: mixed-line-ending
27 | - id: trailing-whitespace
28 | - repo: https://github.com/charliermarsh/ruff-pre-commit
29 | rev: v0.1.11
30 | hooks:
31 | - id: ruff
32 | exclude: ^\{\{cookiecutter\.project_dirname\}\}.*$
33 | args:
34 | - --fix
35 | - repo: https://github.com/psf/black
36 | rev: "23.12.1"
37 | hooks:
38 | - id: black
39 | - repo: https://github.com/tox-dev/pyproject-fmt
40 | rev: "1.5.3"
41 | hooks:
42 | - id: pyproject-fmt
43 | - repo: https://github.com/pre-commit/mirrors-prettier
44 | rev: "v3.1.0"
45 | hooks:
46 | - id: prettier
47 | exclude: ^\{\{cookiecutter\.project_dirname\}\}.*$
48 | - repo: https://github.com/pre-commit/mirrors-mypy
49 | rev: "v1.8.0"
50 | hooks:
51 | - id: mypy
52 | args: ["--no-site-packages"]
53 | exclude: ^\{\{cookiecutter\.project_dirname\}\}.*$
54 | - repo: https://github.com/pycqa/bandit
55 | rev: "1.7.6"
56 | hooks:
57 | - id: bandit
58 | additional_dependencies: ["bandit[toml]"]
59 | args: ["--configfile", "pyproject.toml", "--quiet", "--recursive"]
60 | - repo: https://github.com/trailofbits/pip-audit
61 | rev: v2.6.3
62 | hooks:
63 | - id: pip-audit
64 | args:
65 | [
66 | "--require-hashes",
67 | "--disable-pip",
68 | "--requirement",
69 | "requirements/local.txt",
70 | ]
71 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.12-slim-bookworm
2 |
3 | ARG DEBIAN_FRONTEND=noninteractive
4 | ARG OUTPUT_BASE_DIR=/data
5 | ENV OUTPUT_BASE_DIR=${OUTPUT_BASE_DIR}
6 | WORKDIR /app
7 | RUN apt-get update \
8 | && apt-get install --assume-yes --no-install-recommends \
9 | curl \
10 | git \
11 | gnupg \
12 | libpq-dev \
13 | software-properties-common \
14 | && curl https://apt.releases.hashicorp.com/gpg | gpg --dearmor > /usr/share/keyrings/hashicorp-archive-keyring.gpg \
15 | && gpg --no-default-keyring --keyring /usr/share/keyrings/hashicorp-archive-keyring.gpg --fingerprint \
16 | && echo "deb [signed-by=/usr/share/keyrings/hashicorp-archive-keyring.gpg] https://apt.releases.hashicorp.com $(lsb_release -cs) main" | tee /etc/apt/sources.list.d/hashicorp.list \
17 | && apt-get update \
18 | && apt-get install --assume-yes --no-install-recommends \
19 | terraform \
20 | && rm -rf /var/lib/apt/lists/*
21 | COPY ./requirements/common.txt requirements/common.txt
22 | RUN python3 -m pip install --no-cache-dir --upgrade pip setuptools \
23 | && python3 -m pip install --no-cache-dir -r requirements/common.txt
24 | COPY . .
25 | RUN mkdir ${OUTPUT_BASE_DIR}
26 | ENTRYPOINT [ "python", "/app/start.py" ]
27 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | # MIT License
2 |
3 | Copyright (c) 2014-2024 20tab srl
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | .DEFAULT_GOAL := help
2 |
3 | .PHONY: check
4 | check: ## Check code formatting and import sorting
5 | python3 -m black --check .
6 | python3 -m ruff check .
7 |
8 | .PHONY: fix
9 | fix: ## Fix code formatting, linting and sorting imports
10 | python3 -m black .
11 | python3 -m ruff --fix .
12 |
13 | .PHONY: local
14 | local: pip_update ## Install local requirements and dependencies
15 | python3 -m piptools sync requirements/local.txt
16 |
17 | .PHONY: outdated
18 | outdated: ## Check outdated requirements and dependencies
19 | python3 -m pip list --outdated
20 |
21 | .PHONY: pip
22 | pip: pip_update ## Compile requirements
23 | python3 -m piptools compile --generate-hashes --no-header --quiet --resolver=backtracking --strip-extras --upgrade --output-file requirements/common.txt requirements/common.in
24 | python3 -m piptools compile --generate-hashes --no-header --quiet --resolver=backtracking --strip-extras --upgrade --output-file requirements/local.txt requirements/local.in
25 | python3 -m piptools compile --generate-hashes --no-header --quiet --resolver=backtracking --strip-extras --upgrade --output-file requirements/test.txt requirements/test.in
26 |
27 | .PHONY: pip_update
28 | pip_update: ## Update requirements and dependencies
29 | python3 -m pip install --quiet --upgrade pip~=23.3.0 pip-tools~=7.3.0 setuptools~=69.0.0 wheel~=0.42.0
30 |
31 | .PHONY: precommit
32 | precommit: ## Fix code formatting, linting and sorting imports
33 | python3 -m pre_commit run --all-files
34 |
35 | .PHONY: precommit_update
36 | precommit_update: ## Update pre_commit
37 | python3 -m pre_commit autoupdate
38 |
39 | ifeq (simpletest,$(firstword $(MAKECMDGOALS)))
40 | simpletestargs := $(wordlist 2, $(words $(MAKECMDGOALS)), $(MAKECMDGOALS))
41 | $(eval $(simpletestargs):;@true)
42 | endif
43 |
44 | .PHONY: simpletest
45 | simpletest: ## Run debug tests
46 | python3 -m unittest $(simpletestargs)
47 |
48 | .PHONY: test
49 | test: ## Run full test and coverage
50 | python3 -m coverage run -m unittest
51 | python3 -m coverage html
52 | python3 -m coverage report
53 |
54 | .PHONY: update
55 | update: pip precommit_update ## Run update
56 |
57 | .PHONY: help
58 | help:
59 | @echo "[Help] Makefile list commands:"
60 | @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
61 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Talos Submodule - Django Continuous Delivery
2 |
3 | [](https://github.com/python/black)
4 |
5 | > A [Django](https://docs.djangoproject.com) project template ready for continuous delivery.
6 |
7 | ## 🧩 Requirements
8 |
9 | The Talos script can be run either using Docker or a Python virtual environment.
10 |
11 | ### 🐋 Docker
12 |
13 | In order to run Talos via Docker, a working [Docker installation](https://docs.docker.com/get-docker/) is the only requirement.
14 |
15 | ### 🐍 Virtual environment
16 |
17 | In order to run Talos in a virtual environment, first clone the repository in a local projects directory and ensure it is your current directory:
18 |
19 | ```console
20 | cd ~/projects
21 | git clone git@github.com:20tab/django-continuous-delivery.git talos-django
22 | cd talos-django
23 | ```
24 |
25 | Then, create and activate a virtual environment and install the requirements:
26 |
27 | ```console
28 | python3.12 -m venv .venv
29 | source .venv/bin/activate
30 | python3 -m pip install --upgrade pip setuptools
31 | python3 -m pip install -r requirements/common.txt
32 | ```
33 |
34 | The `terraform` cli package is required, unless you want to generate a project only locally. To install it we suggest to use the official [install guide](https://learn.hashicorp.com/tutorials/terraform/install-cli).
35 |
36 | ## 🔑 Credentials (optional)
37 |
38 | ### 🦊 GitLab
39 |
40 | If the GitLab integration is enabled, a Personal Access Token with _api_ permission is required.
41 | It can be generated in the GitLab User Settings panel.
42 |
43 | **Note:** the token can be generated in the Access Tokens section of the GitLab User Settings panel.
44 | ⚠️ Beware that the token is shown only once after creation.
45 |
46 | ## 🚀️ Quickstart
47 |
48 | Change to the projects directory, for example:
49 |
50 | ```console
51 | cd ~/projects
52 | ```
53 |
54 | ### 🐋 Docker
55 |
56 | ```console
57 | docker run --interactive --tty --rm --volume $PWD/.dumps:/app/.dumps --volume $PWD/.logs:/app/.logs --volume $PWD:/data 20tab/talos-django:latest
58 | ```
59 |
60 | ### 🐍 Virtual environment
61 |
62 | ```console
63 | source talos-django/.venv/bin/activate
64 | ./talos-django/start.py
65 | ```
66 |
67 | ### Example
68 |
69 | ```console
70 | Project name: My Project Name
71 | Project slug [my-project-name]:
72 | Service slug [backend]:
73 | Project dirname (backend, myprojectname) [backend]: myprojectname
74 | Deploy type (digitalocean-k8s, other-k8s) [digitalocean-k8s]:
75 | Terraform backend (gitlab, terraform-cloud) [terraform-cloud]:
76 | Terraform host name [app.terraform.io]:
77 | Terraform Cloud User token:
78 | Terraform Organization: my-organization-name
79 | Do you want to create Terraform Cloud Organization 'my-organization-name'? [y/N]:
80 | Choose the environments distribution:
81 | 1 - All environments share the same stack (Default)
82 | 2 - Dev and Stage environments share the same stack, Prod has its own
83 | 3 - Each environment has its own stack
84 | (1, 2, 3) [1]:
85 | Development environment complete URL [https://dev.my-project-name.com]:
86 | Staging environment complete URL [https://stage.my-project-name.com]:
87 | Production environment complete URL [https://www.my-project-name.com]:
88 | Media storage (digitalocean-s3, aws-s3, local, none) [digitalocean-s3]:
89 | Do you want to configure Redis? [y/N]:
90 | Do you want to use GitLab? [Y/n]:
91 | GitLab group slug [my-project-name]:
92 | Make sure the GitLab "my-project-name" group exists before proceeding. Continue? [y/N]: y
93 | GitLab private token (with API scope enabled):
94 | Sentry DSN (leave blank if unused) []:
95 | Initializing the backend service:
96 | ...cookiecutting the service
97 | ...generating the .env file
98 | ...formatting the cookiecut python code
99 | ...compiling the requirements files
100 | - common.txt
101 | - test.txt
102 | - local.txt
103 | - remote.txt
104 | - base.txt
105 | ...creating the '/static' directory
106 | ...creating the GitLab repository and associated resources
107 | ...creating the Terraform Cloud resources
108 | ```
109 |
110 | ## 🗒️ Arguments
111 |
112 | The following arguments can be appended to the Docker and shell commands
113 |
114 | #### User id
115 |
116 | `--uid=$UID`
117 |
118 | #### Group id
119 |
120 | `--gid=1000`
121 |
122 | #### Output directory
123 |
124 | `--output-dir="~/projects"`
125 |
126 | #### Project name
127 |
128 | `--project-name="My project name"`
129 |
130 | #### Project slug
131 |
132 | `--project-slug="my-project-name"`
133 |
134 | #### Project dirname
135 |
136 | `--project-dirname="myprojectname"`
137 |
138 | ### 🎖️ Service
139 |
140 | #### Service slug
141 |
142 | `--service-slug=backend`
143 |
144 | #### Service port
145 |
146 | `--internal-service-port=8000`
147 |
148 | ### 📐 Architecture
149 |
150 | #### Deploy type
151 |
152 | | Description | Argument |
153 | | ----------------------- | ------------------------------------ |
154 | | DigitalOcean Kubernetes | `--deployment-type=digitalocean-k8s` |
155 | | Other Kubernetes | `--deployment-type=other-k8s` |
156 |
157 | #### Terraform backend
158 |
159 | | Name | Argument |
160 | | --------------- | ------------------------------------- |
161 | | Terraform Cloud | `--terraform-backend=terraform-cloud` |
162 | | GitLab | `--terraform-backend=gitlab` |
163 |
164 | ##### Terraform Cloud required argument
165 |
166 | `--terraform-cloud-hostname=app.terraform.io`
167 | `--terraform-cloud-token={{terraform-cloud-token}}`
168 | `--terraform-cloud-organization`
169 |
170 | ##### Terraform Cloud create organization
171 |
172 | `--terraform-cloud-organization-create`
173 | `--terraform-cloud-admin-email={{terraform-cloud-admin-email}}`
174 |
175 | Disabled args
176 | `--terraform-cloud-organization-create-skip`
177 |
178 | #### Environment distribution
179 |
180 | Choose the environments distribution:
181 |
182 | | Value | Description | Argument |
183 | | ----- | ----------------------------------------------------------------- | ------------------------------ |
184 | | 1 | All environments share the same stack (Default) | `--environment-distribution=1` |
185 | | 2 | Dev and Stage environments share the same stack, Prod has its own | `--environment-distribution=2` |
186 | | 3 | Each environment has its own stack | `--environment-distribution=3` |
187 |
188 | #### Project Domain
189 |
190 | If you don't want DigitalOcean DNS configuration the following args are required
191 |
192 | `--project-url-dev=https://dev.project-domain.com`
193 | `--project-url-stage=https://stage.project-domain.com`
194 | `--project-url-prod=https://www.project-domain.com`
195 |
196 | #### Media storage
197 |
198 | | Value | Description | Argument |
199 | | --------------- | ------------------------------------------- | --------------------------------- |
200 | | digitalocean-s3 | DigitalOcean Spaces are used to store media | `--media-storage=digitalocean-s3` |
201 | | aws-s3 | AWS S3 are used to store media | `--media-storage=aws-s3` |
202 | | local | Docker Volume are used to store media | `--media-storage=local` |
203 | | none | Project have no media | `--media-storage=none` |
204 |
205 | #### Redis
206 |
207 | For enabling redis integration the following arguments are needed:
208 |
209 | `--use-redis`
210 |
211 | Disabled args
212 | `--no-redis`
213 |
214 | ### 🦊 GitLab
215 |
216 | > **⚠️ Important: Make sure the GitLab group exists before creating.** > https://gitlab.com/gitlab-org/gitlab/-/issues/244345
217 |
218 | For enabling gitlab integration the following arguments are needed:
219 |
220 | `--gitlab-private-token={{gitlab-private-token}}`
221 | `--gitlab-group-path={{gitlab-group-path}}`
222 |
223 | #### 🪖 Sentry
224 |
225 | For enabling sentry integration the following arguments are needed:
226 |
227 | `--sentry-dsn={{frontend-sentry-dsn}}`
228 |
229 | #### 🔇 Quiet
230 |
231 | No confirmations shown.
232 |
233 | `--quiet`
234 |
--------------------------------------------------------------------------------
/bootstrap/__init__.py:
--------------------------------------------------------------------------------
1 | """Bootstrap module."""
2 |
--------------------------------------------------------------------------------
/bootstrap/collector.py:
--------------------------------------------------------------------------------
1 | """Initialize a web project Django service based on a template."""
2 |
3 | from dataclasses import dataclass
4 | from pathlib import Path
5 | from shutil import rmtree
6 |
7 | import click
8 | from pydantic import validate_arguments
9 | from slugify import slugify
10 |
11 | from bootstrap.constants import (
12 | DEPLOYMENT_TYPE_CHOICES,
13 | DEPLOYMENT_TYPE_DIGITALOCEAN,
14 | DEPLOYMENT_TYPE_OTHER,
15 | ENVIRONMENTS_DISTRIBUTION_CHOICES,
16 | ENVIRONMENTS_DISTRIBUTION_DEFAULT,
17 | ENVIRONMENTS_DISTRIBUTION_PROMPT,
18 | GITLAB_URL_DEFAULT,
19 | MEDIA_STORAGE_CHOICES,
20 | MEDIA_STORAGE_DIGITALOCEAN_S3,
21 | TERRAFORM_BACKEND_CHOICES,
22 | TERRAFORM_BACKEND_TFC,
23 | )
24 | from bootstrap.helpers import (
25 | validate_or_prompt_domain,
26 | validate_or_prompt_email,
27 | validate_or_prompt_path,
28 | validate_or_prompt_secret,
29 | validate_or_prompt_url,
30 | warning,
31 | )
32 | from bootstrap.runner import Runner
33 |
34 |
35 | @validate_arguments
36 | @dataclass(kw_only=True)
37 | class Collector:
38 | """The bootstrap CLI options collector."""
39 |
40 | output_dir: Path = Path(".")
41 | project_name: str | None = None
42 | project_slug: str | None = None
43 | project_dirname: str | None = None
44 | service_slug: str | None = None
45 | internal_service_port: int | None = None
46 | deployment_type: str | None = None
47 | terraform_backend: str | None = None
48 | terraform_cloud_hostname: str | None = None
49 | terraform_cloud_token: str | None = None
50 | terraform_cloud_organization: str | None = None
51 | terraform_cloud_organization_create: bool | None = None
52 | terraform_cloud_admin_email: str | None = None
53 | vault_token: str | None = None
54 | vault_url: str | None = None
55 | environments_distribution: str | None = None
56 | project_url_dev: str | None = None
57 | project_url_stage: str | None = None
58 | project_url_prod: str | None = None
59 | use_redis: bool | None = None
60 | sentry_dsn: str | None = None
61 | sentry_org: str | None = None
62 | sentry_url: str | None = None
63 | media_storage: str | None = None
64 | gitlab_url: str | None = None
65 | gitlab_token: str | None = None
66 | gitlab_namespace_path: str | None = None
67 | uid: int | None = None
68 | gid: int | None = None
69 | terraform_dir: Path | None = None
70 | logs_dir: Path | None = None
71 | quiet: bool = False
72 |
73 | def __post_init__(self):
74 | """Finalize initialization."""
75 | self._service_dir = None
76 |
77 | def collect(self):
78 | """Collect options."""
79 | self.set_project_slug()
80 | self.set_service_slug()
81 | self.set_project_dirname()
82 | self.set_service_dir()
83 | self.set_use_redis()
84 | self.set_terraform()
85 | self.set_vault()
86 | self.set_deployment_type()
87 | self.set_environments_distribution()
88 | self.set_project_urls()
89 | self.set_sentry()
90 | self.set_gitlab()
91 | self.set_media_storage()
92 |
93 | def set_project_slug(self):
94 | """Set the project slug option."""
95 | self.project_slug = slugify(
96 | self.project_slug
97 | or click.prompt("Project slug", default=slugify(self.project_name))
98 | )
99 |
100 | def set_service_slug(self):
101 | """Set the service slug option."""
102 | self.service_slug = slugify(
103 | self.service_slug or click.prompt("Service slug", default="backend")
104 | )
105 |
106 | def set_project_dirname(self):
107 | """Set the project dirname option."""
108 | self.project_dirname = self.project_dirname or click.prompt(
109 | "Project dirname",
110 | default=self.service_slug,
111 | type=click.Choice(
112 | [self.service_slug, slugify(self.project_slug, separator="")]
113 | ),
114 | )
115 |
116 | def set_service_dir(self):
117 | """Set the service dir option."""
118 | service_dir = self.output_dir / self.project_dirname
119 | if service_dir.is_dir() and click.confirm(
120 | warning(
121 | f'A directory "{service_dir.resolve()}" already exists and '
122 | "must be deleted. Continue?",
123 | ),
124 | abort=True,
125 | ):
126 | rmtree(service_dir)
127 | self._service_dir = service_dir
128 |
129 | def set_use_redis(self):
130 | """Set the use Redis option."""
131 | if self.use_redis is None:
132 | self.use_redis = click.confirm(
133 | warning("Do you want to use Redis?"), default=False
134 | )
135 |
136 | def set_terraform(self):
137 | """Set the Terraform options."""
138 | if self.terraform_backend not in TERRAFORM_BACKEND_CHOICES:
139 | self.terraform_backend = click.prompt(
140 | "Terraform backend",
141 | default=TERRAFORM_BACKEND_TFC,
142 | type=click.Choice(TERRAFORM_BACKEND_CHOICES, case_sensitive=False),
143 | ).lower()
144 | if self.terraform_backend == TERRAFORM_BACKEND_TFC:
145 | self.set_terraform_cloud()
146 |
147 | def set_terraform_cloud(self):
148 | """Set the Terraform Cloud options."""
149 | self.terraform_cloud_hostname = validate_or_prompt_domain(
150 | "Terraform host name",
151 | self.terraform_cloud_hostname,
152 | default="app.terraform.io",
153 | )
154 | self.terraform_cloud_token = validate_or_prompt_secret(
155 | "Terraform Cloud User token", self.terraform_cloud_token
156 | )
157 | self.terraform_cloud_organization = (
158 | self.terraform_cloud_organization or click.prompt("Terraform Organization")
159 | )
160 | if self.terraform_cloud_organization_create is None:
161 | self.terraform_cloud_organization_create = click.confirm(
162 | "Do you want to create Terraform Cloud Organization "
163 | f"'{self.terraform_cloud_organization}'?",
164 | )
165 | if self.terraform_cloud_organization_create:
166 | self.terraform_cloud_admin_email = validate_or_prompt_email(
167 | "Terraform Cloud Organization admin email (e.g. tech@20tab.com)",
168 | self.terraform_cloud_admin_email,
169 | )
170 | else:
171 | self.terraform_cloud_admin_email = ""
172 |
173 | def set_vault(self):
174 | """Set the Vault options."""
175 | if self.vault_url or (
176 | self.vault_url is None
177 | and click.confirm("Do you want to use Vault for secrets management?")
178 | ):
179 | self.vault_token = validate_or_prompt_secret(
180 | "Vault token "
181 | "(leave blank to perform a browser-based OIDC authentication)",
182 | self.vault_token,
183 | default="",
184 | required=False,
185 | )
186 | self.quiet or click.confirm(
187 | warning(
188 | "Make sure your Vault permissions allow to enable the "
189 | "project secrets backends and manage the project secrets. Continue?"
190 | ),
191 | abort=True,
192 | )
193 | self.vault_url = validate_or_prompt_url("Vault address", self.vault_url)
194 |
195 | def set_deployment_type(self):
196 | """Set the deployment type option."""
197 | if self.deployment_type not in DEPLOYMENT_TYPE_CHOICES:
198 | self.deployment_type = click.prompt(
199 | "Deploy type",
200 | default=DEPLOYMENT_TYPE_DIGITALOCEAN,
201 | type=click.Choice(DEPLOYMENT_TYPE_CHOICES, case_sensitive=False),
202 | ).lower()
203 |
204 | def set_environments_distribution(self):
205 | """Set the environments distribution option."""
206 | # TODO: forcing a single stack when deployment is `k8s-other` should be removed,
207 | # and `set_deployment_type` merged with `set_deployment`
208 | if self.deployment_type == DEPLOYMENT_TYPE_OTHER:
209 | self.environments_distribution = "1"
210 | elif self.environments_distribution not in ENVIRONMENTS_DISTRIBUTION_CHOICES:
211 | self.environments_distribution = click.prompt(
212 | ENVIRONMENTS_DISTRIBUTION_PROMPT,
213 | default=ENVIRONMENTS_DISTRIBUTION_DEFAULT,
214 | type=click.Choice(ENVIRONMENTS_DISTRIBUTION_CHOICES),
215 | )
216 |
217 | def set_project_urls(self):
218 | """Set the project urls options."""
219 | self.project_url_dev = validate_or_prompt_url(
220 | "Development environment complete URL",
221 | self.project_url_dev or None,
222 | default=f"https://dev.{self.project_slug}.com",
223 | )
224 | self.project_url_stage = validate_or_prompt_url(
225 | "Staging environment complete URL",
226 | self.project_url_stage or None,
227 | default=f"https://stage.{self.project_slug}.com",
228 | )
229 | self.project_url_prod = validate_or_prompt_url(
230 | "Production environment complete URL",
231 | self.project_url_prod or None,
232 | default=f"https://www.{self.project_slug}.com",
233 | )
234 |
235 | def set_sentry(self):
236 | """Set the Sentry options."""
237 | if self.sentry_org or (
238 | self.sentry_org is None
239 | and click.confirm(warning("Do you want to use Sentry?"), default=False)
240 | ):
241 | self.sentry_org = self.sentry_org or click.prompt("Sentry organization")
242 | self.sentry_url = validate_or_prompt_url(
243 | "Sentry URL", self.sentry_url, default="https://sentry.io/"
244 | )
245 | self.sentry_dsn = validate_or_prompt_url(
246 | "Sentry DSN (leave blank if unused)",
247 | self.sentry_dsn,
248 | default="",
249 | required=False,
250 | )
251 |
252 | def set_gitlab(self):
253 | """Set the GitLab options."""
254 | if self.gitlab_url or (
255 | self.gitlab_url is None
256 | and click.confirm(warning("Do you want to use GitLab?"), default=True)
257 | ):
258 | self.gitlab_url = validate_or_prompt_url(
259 | "GitLab URL", self.gitlab_url, default=GITLAB_URL_DEFAULT
260 | )
261 | self.gitlab_token = self.gitlab_token or click.prompt(
262 | "GitLab access token (with API scope enabled)", hide_input=True
263 | )
264 | # TODO: extend support for root level projects (empty namespace)
265 | self.gitlab_namespace_path = validate_or_prompt_path(
266 | "GitLab parent group path", self.gitlab_namespace_path
267 | )
268 | self.quiet or (
269 | self.gitlab_namespace_path == ""
270 | and self.gitlab_url == GITLAB_URL_DEFAULT
271 | and click.confirm(
272 | warning(
273 | f'Make sure the GitLab "{self.gitlab_namespace_path}" group '
274 | "exists before proceeding. Continue?"
275 | ),
276 | abort=True,
277 | )
278 | )
279 |
280 | def set_media_storage(self):
281 | """Set the media storage options."""
282 | if self.media_storage is None:
283 | self.media_storage = click.prompt(
284 | "Media storage",
285 | default=MEDIA_STORAGE_DIGITALOCEAN_S3,
286 | type=click.Choice(MEDIA_STORAGE_CHOICES, case_sensitive=False),
287 | ).lower()
288 |
289 | def get_runner(self):
290 | """Get the bootstrap runner instance."""
291 | return Runner(
292 | uid=self.uid,
293 | gid=self.gid,
294 | output_dir=self.output_dir,
295 | project_name=self.project_name,
296 | project_slug=self.project_slug,
297 | project_dirname=self.project_dirname,
298 | service_dir=self._service_dir,
299 | service_slug=self.service_slug,
300 | internal_service_port=self.internal_service_port,
301 | deployment_type=self.deployment_type,
302 | terraform_backend=self.terraform_backend,
303 | terraform_cloud_hostname=self.terraform_cloud_hostname,
304 | terraform_cloud_token=self.terraform_cloud_token,
305 | terraform_cloud_organization=self.terraform_cloud_organization,
306 | terraform_cloud_organization_create=self.terraform_cloud_organization_create,
307 | terraform_cloud_admin_email=self.terraform_cloud_admin_email,
308 | vault_token=self.vault_token,
309 | vault_url=self.vault_url,
310 | environments_distribution=self.environments_distribution,
311 | project_url_dev=self.project_url_dev,
312 | project_url_stage=self.project_url_stage,
313 | project_url_prod=self.project_url_prod,
314 | sentry_dsn=self.sentry_dsn,
315 | sentry_org=self.sentry_org,
316 | sentry_url=self.sentry_url,
317 | media_storage=self.media_storage,
318 | use_redis=self.use_redis,
319 | gitlab_url=self.gitlab_url,
320 | gitlab_token=self.gitlab_token,
321 | gitlab_namespace_path=self.gitlab_namespace_path,
322 | terraform_dir=self.terraform_dir,
323 | logs_dir=self.logs_dir,
324 | )
325 |
326 | def launch_runner(self):
327 | """Launch a bootstrap runner with the collected options."""
328 | self.get_runner().run()
329 |
--------------------------------------------------------------------------------
/bootstrap/constants.py:
--------------------------------------------------------------------------------
1 | """Web project initialization CLI constants."""
2 |
3 | # Stacks
4 |
5 | # BEWARE: stack names must be suitable for inclusion in Vault paths
6 |
7 | DEV_STACK_NAME = "development"
8 |
9 | DEV_STACK_SLUG = "dev"
10 |
11 | STAGE_STACK_NAME = "staging"
12 |
13 | STAGE_STACK_SLUG = "stage"
14 |
15 | MAIN_STACK_NAME = "main"
16 |
17 | MAIN_STACK_SLUG = "main"
18 |
19 | STACKS_CHOICES = {
20 | "1": [{"name": MAIN_STACK_NAME, "slug": MAIN_STACK_SLUG}],
21 | "2": [
22 | {"name": DEV_STACK_NAME, "slug": DEV_STACK_SLUG},
23 | {"name": MAIN_STACK_NAME, "slug": MAIN_STACK_SLUG},
24 | ],
25 | "3": [
26 | {"name": DEV_STACK_NAME, "slug": DEV_STACK_SLUG},
27 | {"name": STAGE_STACK_NAME, "slug": STAGE_STACK_SLUG},
28 | {"name": MAIN_STACK_NAME, "slug": MAIN_STACK_SLUG},
29 | ],
30 | }
31 |
32 | # Environments
33 |
34 | # BEWARE: environment names must be suitable for inclusion in Vault paths
35 |
36 | DEV_ENV_NAME = "development"
37 |
38 | DEV_ENV_SLUG = "dev"
39 |
40 | DEV_ENV_STACK_CHOICES: dict[str, str] = {
41 | "1": MAIN_STACK_SLUG,
42 | }
43 |
44 | STAGE_ENV_NAME = "staging"
45 |
46 | STAGE_ENV_SLUG = "stage"
47 |
48 | STAGE_ENV_STACK_CHOICES: dict[str, str] = {
49 | "1": MAIN_STACK_SLUG,
50 | "2": DEV_STACK_SLUG,
51 | }
52 |
53 | PROD_ENV_NAME = "production"
54 |
55 | PROD_ENV_SLUG = "prod"
56 |
57 | PROD_ENV_STACK_CHOICES: dict[str, str] = {}
58 |
59 | # Env vars
60 |
61 | GITLAB_TOKEN_ENV_VAR = "GITLAB_PRIVATE_TOKEN" # nosec B105
62 |
63 | VAULT_TOKEN_ENV_VAR = "VAULT_TOKEN" # nosec B105
64 |
65 | # Deployment type
66 |
67 | DEPLOYMENT_TYPE_DIGITALOCEAN = "digitalocean-k8s"
68 |
69 | DEPLOYMENT_TYPE_OTHER = "other-k8s"
70 |
71 | DEPLOYMENT_TYPE_CHOICES = [DEPLOYMENT_TYPE_DIGITALOCEAN, DEPLOYMENT_TYPE_OTHER]
72 |
73 | # Environments distribution
74 |
75 | ENVIRONMENTS_DISTRIBUTION_DEFAULT = "1"
76 |
77 | ENVIRONMENTS_DISTRIBUTION_CHOICES = [ENVIRONMENTS_DISTRIBUTION_DEFAULT, "2", "3"]
78 |
79 | ENVIRONMENTS_DISTRIBUTION_PROMPT = """Choose the environments distribution:
80 | 1 - All environments share the same stack (Default)
81 | 2 - Dev and Stage environments share the same stack, Prod has its own
82 | 3 - Each environment has its own stack
83 | """
84 |
85 | # Media storage
86 |
87 | MEDIA_STORAGE_DIGITALOCEAN_S3 = "digitalocean-s3"
88 |
89 | MEDIA_STORAGE_AWS_S3 = "aws-s3"
90 |
91 | MEDIA_STORAGE_CHOICES = [
92 | MEDIA_STORAGE_DIGITALOCEAN_S3,
93 | MEDIA_STORAGE_AWS_S3,
94 | "local",
95 | "none",
96 | ]
97 |
98 |
99 | # Terraform backend
100 |
101 | TERRAFORM_BACKEND_GITLAB = "gitlab"
102 |
103 | TERRAFORM_BACKEND_TFC = "terraform-cloud"
104 |
105 | TERRAFORM_BACKEND_CHOICES = [TERRAFORM_BACKEND_GITLAB, TERRAFORM_BACKEND_TFC]
106 |
107 | # GitLab
108 |
109 | GITLAB_URL_DEFAULT = "https://gitlab.com"
110 |
--------------------------------------------------------------------------------
/bootstrap/exceptions.py:
--------------------------------------------------------------------------------
1 | """Custom bootstrap exceptions."""
2 |
3 |
4 | class BootstrapError(Exception):
5 | """A generic bootstrap exception."""
6 |
--------------------------------------------------------------------------------
/bootstrap/helpers.py:
--------------------------------------------------------------------------------
1 | """Web project initialization helpers."""
2 |
3 | import re
4 | from functools import partial
5 |
6 | import click
7 | import validators
8 | from slugify import slugify
9 |
10 | error = partial(click.style, fg="red")
11 |
12 | warning = partial(click.style, fg="yellow")
13 |
14 |
15 | def format_gitlab_variable(value, masked=False, protected=True):
16 | """Format the given value to be used as a GitLab variable."""
17 | return (
18 | f'{{ value = "{value}"'
19 | + (masked and ", masked = true" or "")
20 | + (not protected and ", protected = false" or "")
21 | + " }"
22 | )
23 |
24 |
25 | def format_tfvar(value, value_type=None):
26 | """Format the given value to be used as a Terraform variable."""
27 | if value_type == "list":
28 | return "[" + ", ".join(format_tfvar(i) for i in value) + "]"
29 | elif value_type == "bool":
30 | return value and "true" or "false"
31 | elif value_type == "num":
32 | return str(value)
33 | else:
34 | return f'"{value}"'
35 |
36 |
37 | def slugify_option(ctx, param, value):
38 | """Slugify an option value."""
39 | return value and slugify(value)
40 |
41 |
42 | def validate_or_prompt_domain(message, value=None, default=None, required=True):
43 | """Validate the given domain or prompt until a valid value is provided."""
44 | if value is None:
45 | value = click.prompt(message, default=default)
46 | if not required and value == "" or validators.domain(value):
47 | return value
48 | click.echo(error("Please type a valid domain!"))
49 | return validate_or_prompt_domain(message, None, default, required)
50 |
51 |
52 | def validate_or_prompt_email(message, value=None, default=None, required=True):
53 | """Validate the given email address or prompt until a valid value is provided."""
54 | if value is None:
55 | value = click.prompt(message, default=default)
56 | if not required and value == "" or validators.email(value):
57 | return value
58 | click.echo(error("Please type a valid email!"))
59 | return validate_or_prompt_email(message, None, default, required)
60 |
61 |
62 | def validate_or_prompt_secret(message, value=None, default=None, required=True):
63 | """Validate the given secret or prompt until a valid value is provided."""
64 | if value is None:
65 | value = click.prompt(message, default=default, hide_input=True)
66 | if not required and value == "" or validators.length(value, min=8):
67 | return value
68 |
69 | click.echo(error("Please type at least 8 chars!"))
70 | return validate_or_prompt_secret(message, None, default, required)
71 |
72 |
73 | def validate_or_prompt_path(message, value=None, default=None, required=True):
74 | """Validate the given path or prompt until a valid path is provided."""
75 | if value is None:
76 | value = click.prompt(message, default=default)
77 | if (
78 | not required
79 | and value == ""
80 | or re.match(r"^(?:/?[\w_\-]+)(?:\/[\w_\-]+)*\/?$", value)
81 | ):
82 | return value
83 | click.echo(
84 | error(
85 | "Please type a valid slash-separated path containing letters, digits, "
86 | "dashes and underscores!"
87 | )
88 | )
89 | return validate_or_prompt_path(message, None, default, required)
90 |
91 |
92 | def validate_or_prompt_url(message, value=None, default=None, required=True):
93 | """Validate the given URL or prompt until a valid value is provided."""
94 | if value is None:
95 | value = click.prompt(message, default=default)
96 | if not required and value == "" or validators.url(value):
97 | return value.rstrip("/")
98 |
99 | click.echo(error("Please type a valid URL!"))
100 | return validate_or_prompt_url(message, None, default, required)
101 |
--------------------------------------------------------------------------------
/bootstrap/runner.py:
--------------------------------------------------------------------------------
1 | """Initialize a web project Django service based on a template."""
2 |
3 | import json
4 | import os
5 | import secrets
6 | import subprocess # nosec B404
7 | from dataclasses import dataclass, field
8 | from functools import partial
9 | from operator import itemgetter
10 | from pathlib import Path
11 | from time import time
12 |
13 | import click
14 | from cookiecutter.main import cookiecutter
15 | from pydantic import validate_arguments
16 |
17 | from bootstrap.constants import (
18 | DEV_ENV_NAME,
19 | DEV_ENV_SLUG,
20 | DEV_ENV_STACK_CHOICES,
21 | DEV_STACK_SLUG,
22 | GITLAB_URL_DEFAULT,
23 | MAIN_STACK_SLUG,
24 | PROD_ENV_NAME,
25 | PROD_ENV_SLUG,
26 | PROD_ENV_STACK_CHOICES,
27 | STACKS_CHOICES,
28 | STAGE_ENV_NAME,
29 | STAGE_ENV_SLUG,
30 | STAGE_ENV_STACK_CHOICES,
31 | STAGE_STACK_SLUG,
32 | TERRAFORM_BACKEND_TFC,
33 | )
34 | from bootstrap.exceptions import BootstrapError
35 | from bootstrap.helpers import format_gitlab_variable, format_tfvar
36 |
37 | error = partial(click.style, fg="red")
38 |
39 | highlight = partial(click.style, fg="cyan")
40 |
41 | info = partial(click.style, dim=True)
42 |
43 | warning = partial(click.style, fg="yellow")
44 |
45 |
46 | @validate_arguments
47 | @dataclass(kw_only=True)
48 | class Runner:
49 | """The bootstrap runner."""
50 |
51 | output_dir: Path
52 | project_name: str
53 | project_slug: str
54 | project_dirname: str
55 | service_dir: Path
56 | service_slug: str
57 | internal_service_port: int
58 | deployment_type: str
59 | environments_distribution: str
60 | project_url_dev: str = ""
61 | project_url_stage: str = ""
62 | project_url_prod: str = ""
63 | terraform_backend: str
64 | terraform_cloud_hostname: str | None = None
65 | terraform_cloud_token: str | None = None
66 | terraform_cloud_organization: str | None = None
67 | terraform_cloud_organization_create: bool | None = None
68 | terraform_cloud_admin_email: str | None = None
69 | vault_token: str | None = None
70 | vault_url: str | None = None
71 | sentry_dsn: str | None = None
72 | sentry_org: str | None = None
73 | sentry_url: str | None = None
74 | media_storage: str
75 | use_redis: bool = False
76 | gitlab_url: str | None = None
77 | gitlab_namespace_path: str | None = None
78 | gitlab_token: str | None = None
79 | uid: int | None = None
80 | gid: int | None = None
81 | terraform_dir: Path | None = None
82 | logs_dir: Path | None = None
83 | run_id: str = field(init=False)
84 | stacks: list = field(init=False, default_factory=list)
85 | envs: list = field(init=False, default_factory=list)
86 | gitlab_variables: dict = field(init=False, default_factory=dict)
87 | tfvars: dict = field(init=False, default_factory=dict)
88 | vault_secrets: dict = field(init=False, default_factory=dict)
89 | terraform_run_modules: list = field(init=False, default_factory=list)
90 | terraform_outputs: dict = field(init=False, default_factory=dict)
91 |
92 | def __post_init__(self):
93 | """Finalize initialization."""
94 | self.gitlab_url = self.gitlab_url and self.gitlab_url.rstrip("/")
95 | self.run_id = f"{time():.0f}"
96 | self.terraform_dir = self.terraform_dir or Path(f".terraform/{self.run_id}")
97 | self.logs_dir = self.logs_dir or Path(f".logs/{self.run_id}")
98 | self.set_stacks()
99 | self.set_envs()
100 | self.collect_tfvars()
101 | self.collect_gitlab_variables()
102 |
103 | def set_stacks(self):
104 | """Set the stacks."""
105 | self.stacks = STACKS_CHOICES[self.environments_distribution]
106 |
107 | def set_envs(self):
108 | """Set the envs."""
109 | self.envs = [
110 | {
111 | "basic_auth_enabled": True,
112 | "name": DEV_ENV_NAME,
113 | "slug": DEV_ENV_SLUG,
114 | "stack_slug": DEV_ENV_STACK_CHOICES.get(
115 | self.environments_distribution, DEV_STACK_SLUG
116 | ),
117 | "url": self.project_url_dev,
118 | },
119 | {
120 | "basic_auth_enabled": True,
121 | "name": STAGE_ENV_NAME,
122 | "slug": STAGE_ENV_SLUG,
123 | "stack_slug": STAGE_ENV_STACK_CHOICES.get(
124 | self.environments_distribution, STAGE_STACK_SLUG
125 | ),
126 | "url": self.project_url_stage,
127 | },
128 | {
129 | "basic_auth_enabled": False,
130 | "name": PROD_ENV_NAME,
131 | "slug": PROD_ENV_SLUG,
132 | "stack_slug": PROD_ENV_STACK_CHOICES.get(
133 | self.environments_distribution, MAIN_STACK_SLUG
134 | ),
135 | "url": self.project_url_prod,
136 | },
137 | ]
138 |
139 | def register_gitlab_variable(
140 | self, level, var_name, var_value=None, masked=False, protected=True
141 | ):
142 | """Register a GitLab variable at the given level."""
143 | vars_dict = self.gitlab_variables.setdefault(level, {})
144 | if var_value is None:
145 | var_value = getattr(self, var_name)
146 | vars_dict[var_name] = format_gitlab_variable(var_value, masked, protected)
147 |
148 | def register_gitlab_variables(self, level, *args):
149 | """Register one or more GitLab variable at the given level."""
150 | [
151 | self.register_gitlab_variable(level, *((i,) if isinstance(i, str) else i))
152 | for i in args
153 | ]
154 |
155 | def register_gitlab_group_variables(self, *args):
156 | """Register one or more GitLab group variable."""
157 | self.register_gitlab_variables("group", *args)
158 |
159 | def register_gitlab_project_variables(self, *args):
160 | """Register one or more GitLab project variable."""
161 | self.register_gitlab_variables("project", *args)
162 |
163 | def collect_gitlab_variables(self):
164 | """Collect the GitLab group and project variables."""
165 | if self.sentry_dsn:
166 | self.register_gitlab_project_variables(
167 | ("SENTRY_ORG", self.sentry_org),
168 | ("SENTRY_URL", self.sentry_url),
169 | ("SENTRY_ENABLED", "true"),
170 | )
171 | if not self.vault_url:
172 | self.collect_gitlab_variables_secrets()
173 |
174 | def collect_gitlab_variables_secrets(self):
175 | """Collect secrets as GitLab group and project variables."""
176 | if self.sentry_dsn:
177 | self.register_gitlab_project_variables(
178 | ("SENTRY_DSN", self.sentry_dsn, True)
179 | )
180 |
181 | def render_gitlab_variables_to_string(self, level):
182 | """Return the given level GitLab variables rendered to string."""
183 | return "{%s}" % ", ".join(
184 | f"{k} = {v}" for k, v in self.gitlab_variables.get(level, {}).items()
185 | )
186 |
187 | def register_tfvar(self, tf_stage, var_name, var_value=None, var_type=None):
188 | """Register a Terraform variable value for the given stage."""
189 | vars_list = self.tfvars.setdefault(tf_stage, [])
190 | if var_value is None:
191 | var_value = getattr(self, var_name)
192 | vars_list.append("=".join((var_name, format_tfvar(var_value, var_type))))
193 |
194 | def register_tfvars(self, tf_stage, *args):
195 | """Register one or more Terraform variables to the given stage."""
196 | [
197 | self.register_tfvar(tf_stage, *((i,) if isinstance(i, str) else i))
198 | for i in args
199 | ]
200 |
201 | def register_environment_tfvars(self, *args, env_slug=None):
202 | """Register one or more environment Terraform variables."""
203 | tf_stage = "environment" + (env_slug and f"_{env_slug}" or "")
204 | self.register_tfvars(tf_stage, *args)
205 |
206 | def collect_tfvars(self):
207 | """Collect Terraform variables."""
208 | self.register_environment_tfvars(
209 | ("media_storage", self.media_storage),
210 | ("service_slug", self.service_slug),
211 | ("use_redis", self.use_redis, "bool"),
212 | )
213 | for env in self.envs:
214 | self.register_environment_tfvars(
215 | ("environment", env["name"]),
216 | ("project_url", env["url"]),
217 | ("stack_slug", env["stack_slug"]),
218 | env_slug=env["slug"],
219 | )
220 |
221 | def register_vault_environment_secret(self, env_name, secret_name, secret_data):
222 | """Register a Vault environment secret locally."""
223 | self.vault_secrets[f"envs/{env_name}/{secret_name}"] = secret_data
224 |
225 | def collect_vault_environment_secrets(self, env_name):
226 | """Collect the Vault secrets for the given environment."""
227 | # Sentry vars are used by the GitLab CI/CD
228 | self.sentry_dsn and self.register_vault_environment_secret(
229 | env_name, f"{self.service_slug}/sentry", {"sentry_dsn": self.sentry_dsn}
230 | )
231 |
232 | def collect_vault_secrets(self):
233 | """Collect Vault secrets."""
234 | [self.collect_vault_environment_secrets(env["name"]) for env in self.envs]
235 |
236 | def init_service(self):
237 | """Initialize the service."""
238 | click.echo(info("...cookiecutting the service"))
239 | cookiecutter(
240 | os.path.dirname(os.path.dirname(__file__)),
241 | extra_context={
242 | "deployment_type": self.deployment_type,
243 | "internal_service_port": self.internal_service_port,
244 | "media_storage": self.media_storage,
245 | "project_dirname": self.project_dirname,
246 | "project_name": self.project_name,
247 | "project_slug": self.project_slug,
248 | "resources": {"envs": self.envs, "stacks": self.stacks},
249 | "service_slug": self.service_slug,
250 | "terraform_backend": self.terraform_backend,
251 | "terraform_cloud_organization": self.terraform_cloud_organization,
252 | "tfvars": self.tfvars,
253 | "use_redis": self.use_redis and "true" or "false",
254 | "use_vault": self.vault_url and "true" or "false",
255 | },
256 | output_dir=self.output_dir,
257 | no_input=True,
258 | )
259 |
260 | def create_env_file(self):
261 | """Create the final env file from its template."""
262 | click.echo(info("...generating the .env file"))
263 | env_path = self.service_dir / ".env_template"
264 | env_text = (
265 | env_path.read_text()
266 | .replace("__SECRETKEY__", secrets.token_urlsafe(40))
267 | .replace("__PASSWORD__", secrets.token_urlsafe(8))
268 | )
269 | (self.service_dir / ".env").write_text(env_text)
270 |
271 | def format_files(self):
272 | """Format python code generated by cookiecutter."""
273 | click.echo(info("...formatting the cookiecut python code"))
274 | subprocess.run( # nosec B603 B607
275 | [
276 | "python3",
277 | "-m",
278 | "ruff",
279 | "format",
280 | f"{self.service_dir.resolve()}",
281 | ]
282 | )
283 |
284 | def compile_requirements(self):
285 | """Compile the requirements files."""
286 | click.echo(info("...compiling the requirements files"))
287 | requirements_path = self.service_dir / "requirements"
288 | PIP_COMPILE = [
289 | "python3",
290 | "-m",
291 | "piptools",
292 | "compile",
293 | "--generate-hashes",
294 | "--no-header",
295 | "--quiet",
296 | "--resolver=backtracking",
297 | "--strip-extras",
298 | "--upgrade",
299 | "--output-file",
300 | ]
301 | for in_file in requirements_path.glob("*.in"):
302 | output_filename = f"{in_file.stem}.txt"
303 | output_file = requirements_path / output_filename
304 | subprocess.run(PIP_COMPILE + [output_file, in_file]) # nosec B603 B607
305 | click.echo(info(f"\t- {output_filename}"))
306 |
307 | def create_static_directory(self):
308 | """Create the static directory."""
309 | click.echo(info("...creating the '/static' directory"))
310 | (self.service_dir / "static").mkdir(exist_ok=True)
311 |
312 | def create_media_directory(self):
313 | """Create the media directory."""
314 | click.echo(info("...creating the '/media' directory"))
315 | (self.service_dir / "media").mkdir(exist_ok=True)
316 |
317 | def init_terraform_cloud(self):
318 | """Initialize the Terraform Cloud resources."""
319 | click.echo(info("...creating the Terraform Cloud resources"))
320 | env = {
321 | "TF_VAR_admin_email": self.terraform_cloud_admin_email,
322 | "TF_VAR_create_organization": self.terraform_cloud_organization_create
323 | and "true"
324 | or "false",
325 | "TF_VAR_environments": json.dumps(list(map(itemgetter("slug"), self.envs))),
326 | "TF_VAR_hostname": self.terraform_cloud_hostname,
327 | "TF_VAR_organization_name": self.terraform_cloud_organization,
328 | "TF_VAR_project_name": self.project_name,
329 | "TF_VAR_project_slug": self.project_slug,
330 | "TF_VAR_service_slug": self.service_slug,
331 | "TF_VAR_terraform_cloud_token": self.terraform_cloud_token,
332 | }
333 | self.run_terraform("terraform-cloud", env)
334 |
335 | def init_gitlab(self):
336 | """Initialize the GitLab resources."""
337 | click.echo(info("...creating the GitLab resources"))
338 | env = {
339 | "TF_VAR_gitlab_token": self.gitlab_token,
340 | "TF_VAR_gitlab_url": self.gitlab_url,
341 | "TF_VAR_group_variables": self.render_gitlab_variables_to_string("group"),
342 | "TF_VAR_namespace_path": self.gitlab_namespace_path,
343 | "TF_VAR_project_name": self.project_name,
344 | "TF_VAR_project_slug": self.project_slug,
345 | "TF_VAR_project_variables": self.render_gitlab_variables_to_string(
346 | "project"
347 | ),
348 | "TF_VAR_service_dir": self.service_dir,
349 | "TF_VAR_service_slug": self.service_slug,
350 | }
351 | self.gitlab_url != GITLAB_URL_DEFAULT and env.update(
352 | GITLAB_BASE_URL=f"{self.gitlab_url}/api/v4/"
353 | )
354 | self.run_terraform("gitlab", env)
355 |
356 | def init_vault(self):
357 | """Initialize the Vault resources."""
358 | click.echo(info("...creating the Vault resources with Terraform"))
359 | self.collect_vault_secrets()
360 | env = {
361 | "TF_VAR_project_slug": self.project_slug,
362 | "TF_VAR_secrets": json.dumps(self.vault_secrets),
363 | "TF_VAR_vault_address": self.vault_url,
364 | "TF_VAR_vault_token": self.vault_token,
365 | }
366 | self.run_terraform("vault", env)
367 |
368 | def get_terraform_module_params(self, module_name, env):
369 | """Return Terraform parameters for the given module."""
370 | return (
371 | Path(__file__).parent.parent / "terraform" / module_name,
372 | self.logs_dir / self.service_slug / "terraform" / module_name,
373 | terraform_dir := self.terraform_dir / self.service_slug / module_name,
374 | {
375 | **env,
376 | "PATH": os.environ.get("PATH"),
377 | "TF_DATA_DIR": str((terraform_dir / "data").resolve()),
378 | "TF_LOG": "INFO",
379 | },
380 | )
381 |
382 | def run_terraform_init(self, cwd, env, logs_dir, state_path):
383 | """Run Terraform init."""
384 | init_log_path = logs_dir / "init.log"
385 | init_stdout_path = logs_dir / "init-stdout.log"
386 | init_stderr_path = logs_dir / "init-stderr.log"
387 | init_process = subprocess.run( # nosec B603 B607
388 | [
389 | "terraform",
390 | "init",
391 | "-backend-config",
392 | f"path={state_path.resolve()}",
393 | "-input=false",
394 | "-no-color",
395 | ],
396 | capture_output=True,
397 | cwd=cwd,
398 | env=dict(**env, TF_LOG_PATH=str(init_log_path.resolve())),
399 | text=True,
400 | )
401 | init_stdout_path.write_text(init_process.stdout)
402 | if init_process.returncode != 0:
403 | init_stderr_path.write_text(init_process.stderr)
404 | click.echo(
405 | error(
406 | "Terraform init failed "
407 | f"(check {init_stderr_path} and {init_log_path})"
408 | )
409 | )
410 | raise BootstrapError
411 |
412 | def run_terraform_apply(self, cwd, env, logs_dir):
413 | """Run Terraform apply."""
414 | apply_log_path = logs_dir / "apply.log"
415 | apply_stdout_path = logs_dir / "apply-stdout.log"
416 | apply_stderr_path = logs_dir / "apply-stderr.log"
417 | apply_process = subprocess.run( # nosec B603 B607
418 | ["terraform", "apply", "-auto-approve", "-input=false", "-no-color"],
419 | capture_output=True,
420 | cwd=cwd,
421 | env=dict(**env, TF_LOG_PATH=str(apply_log_path.resolve())),
422 | text=True,
423 | )
424 | apply_stdout_path.write_text(apply_process.stdout)
425 | if apply_process.returncode != 0:
426 | apply_stderr_path.write_text(apply_process.stderr)
427 | click.echo(
428 | error(
429 | "Terraform apply failed "
430 | f"(check {apply_stderr_path} and {apply_log_path})"
431 | )
432 | )
433 | self.reset_terraform()
434 | raise BootstrapError
435 |
436 | def run_terraform_destroy(self, cwd, env, logs_dir):
437 | """Run Terraform destroy."""
438 | destroy_log_path = logs_dir / "destroy.log"
439 | destroy_stdout_path = logs_dir / "destroy-stdout.log"
440 | destroy_stderr_path = logs_dir / "destroy-stderr.log"
441 | destroy_process = subprocess.run( # nosec B603 B607
442 | [
443 | "terraform",
444 | "destroy",
445 | "-auto-approve",
446 | "-input=false",
447 | "-no-color",
448 | ],
449 | capture_output=True,
450 | cwd=cwd,
451 | env=dict(**env, TF_LOG_PATH=str(destroy_log_path.resolve())),
452 | text=True,
453 | )
454 | destroy_stdout_path.write_text(destroy_process.stdout)
455 | if destroy_process.returncode != 0:
456 | destroy_stderr_path.write_text(destroy_process.stderr)
457 | click.echo(
458 | error(
459 | "Terraform destroy failed "
460 | f"(check {destroy_stderr_path} and {destroy_log_path})"
461 | )
462 | )
463 | raise BootstrapError
464 |
465 | def get_terraform_outputs(self, cwd, env, outputs):
466 | """Get Terraform outputs."""
467 | return {
468 | output_name: subprocess.run( # nosec B603 B607
469 | ["terraform", "output", "-raw", output_name],
470 | capture_output=True,
471 | cwd=cwd,
472 | env=env,
473 | text=True,
474 | ).stdout
475 | for output_name in outputs
476 | }
477 |
478 | def reset_terraform(self):
479 | """Destroy all Terraform modules resources."""
480 | for module_name, env in self.terraform_run_modules:
481 | click.echo(warning(f"Destroying Terraform {module_name} resources."))
482 | cwd, logs_dir, _terraform_dir, env = self.get_terraform_module_params(
483 | module_name, env
484 | )
485 | self.run_terraform_destroy(cwd, env, logs_dir)
486 |
487 | def run_terraform(self, module_name, env, outputs=None):
488 | """Initialize the Terraform controlled resources."""
489 | self.terraform_run_modules.append((module_name, env))
490 | cwd, logs_dir, terraform_dir, env = self.get_terraform_module_params(
491 | module_name, env
492 | )
493 | os.makedirs(terraform_dir, exist_ok=True)
494 | os.makedirs(logs_dir)
495 | self.run_terraform_init(cwd, env, logs_dir, terraform_dir / "terraform.tfstate")
496 | self.run_terraform_apply(cwd, env, logs_dir)
497 | outputs and self.terraform_outputs.update(
498 | {module_name: self.get_terraform_outputs(cwd, env, outputs)}
499 | )
500 |
501 | def make_sed(self, file_path, placeholder, replace_value):
502 | """Replace a placeholder value with a given one in a given file."""
503 | subprocess.run( # nosec B603 B607
504 | [
505 | "sed",
506 | "-i",
507 | f"s/{placeholder}/{replace_value}/",
508 | str(self.output_dir / self.project_dirname / file_path),
509 | ]
510 | )
511 |
512 | def change_output_owner(self):
513 | """Change the owner of the output directory recursively."""
514 | if self.uid:
515 | subprocess.run( # nosec B603 B607
516 | [
517 | "chown",
518 | "-R",
519 | ":".join(map(str, filter(None, (self.uid, self.gid)))),
520 | self.service_dir,
521 | ]
522 | )
523 |
524 | def run(self):
525 | """Run the bootstrap."""
526 | click.echo(highlight(f"Initializing the {self.service_slug} service:"))
527 | self.init_service()
528 | self.create_env_file()
529 | self.format_files()
530 | self.compile_requirements()
531 | self.create_static_directory()
532 | self.media_storage == "local" and self.create_media_directory()
533 | if self.terraform_backend == TERRAFORM_BACKEND_TFC:
534 | self.init_terraform_cloud()
535 | if self.gitlab_namespace_path:
536 | self.init_gitlab()
537 | if self.vault_url:
538 | self.init_vault()
539 | self.change_output_owner()
540 |
--------------------------------------------------------------------------------
/cookiecutter.json:
--------------------------------------------------------------------------------
1 | {
2 | "project_name": null,
3 | "project_slug": "{{ cookiecutter.project_name | slugify(separator='') }}",
4 | "service_slug": "backend",
5 | "project_dirname": "backend",
6 | "django_settings_dirname": "{{ cookiecutter.project_slug | slugify(separator='') }}",
7 | "internal_service_port": "8000",
8 | "deployment_type": ["digitalocean-k8s", "other-k8s"],
9 | "terraform_backend": "gitlab",
10 | "terraform_cloud_organization": "",
11 | "media_storage": ["digitalocean-s3", "other-s3", "local", "none"],
12 | "use_redis": "false",
13 | "use_vault": "false",
14 | "environments_distribution": "1",
15 | "resources": {
16 | "stacks": [
17 | [
18 | {
19 | "name": "main",
20 | "slug": "main"
21 | }
22 | ]
23 | ],
24 | "envs": [
25 | {
26 | "name": "development",
27 | "slug": "dev",
28 | "stack_slug": "main"
29 | },
30 | {
31 | "name": "staging",
32 | "slug": "stage",
33 | "stack_slug": "main"
34 | },
35 | {
36 | "name": "production",
37 | "slug": "prod",
38 | "stack_slug": "main"
39 | }
40 | ]
41 | },
42 | "tfvars": {},
43 | "_extensions": ["cookiecutter.extensions.SlugifyExtension"]
44 | }
45 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.black]
2 | target-version = ["py312"]
3 |
4 | [tool.coverage.html]
5 | title = "Django Continuous Delivery - Coverage"
6 |
7 | [tool.coverage.report]
8 | fail_under = 79
9 | show_missing = true
10 |
11 | [tool.coverage.run]
12 | branch = true
13 | dynamic_context = "test_function"
14 | omit = [
15 | ".venv/*",
16 | "venv/*",
17 | ]
18 | source = ["."]
19 |
20 | [tool.mypy]
21 | enable_error_code = [
22 | "truthy-bool",
23 | ]
24 | ignore_missing_imports = true
25 | python_version = "3.12"
26 |
27 | [tool.bandit]
28 | exclude_dirs = [
29 | ".venv",
30 | "tests",
31 | "features"
32 | ]
33 |
34 | [tool.ruff]
35 | extend-exclude = [
36 | "__pycache__",
37 | ".vscode*",
38 | ]
39 | ignore = [
40 | "D203",
41 | "D212",
42 | "D213",
43 | "D214",
44 | "D215",
45 | "D404",
46 | "D405",
47 | "D406",
48 | "D407",
49 | "D408",
50 | "D409",
51 | "D410",
52 | "D411",
53 | "D413",
54 | "D415",
55 | "D416",
56 | "D417",
57 | ]
58 | select = [
59 | "B",
60 | "C",
61 | "D",
62 | "E",
63 | "F",
64 | "I",
65 | "W",
66 | "B9"
67 | ]
68 | target-version = "py312"
69 |
70 | [tool.ruff.isort]
71 | known-first-party = [
72 | "bootstrap",
73 | ]
74 |
--------------------------------------------------------------------------------
/requirements/common.in:
--------------------------------------------------------------------------------
1 | click~=8.1.0
2 | cookiecutter~=2.5.0
3 | pip-tools~=7.3.0
4 | pydantic~=1.10.0
5 | python-slugify~=8.0.0
6 | ruff~=0.1.0
7 | validators~=0.20.0
8 |
--------------------------------------------------------------------------------
/requirements/local.in:
--------------------------------------------------------------------------------
1 | -r test.in
2 | black~=23.12.0
3 | ipython~=8.20.0
4 | pre-commit~=3.6.0
5 | types-python-slugify~=8.0.0
6 |
--------------------------------------------------------------------------------
/requirements/test.in:
--------------------------------------------------------------------------------
1 | -r common.in
2 | coverage[toml]~=7.4.0
3 | mypy~=1.8.0
4 |
--------------------------------------------------------------------------------
/start.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """Initialize a web project Django service based on a template."""
3 |
4 | from pathlib import Path
5 |
6 | import click
7 |
8 | from bootstrap.collector import Collector
9 | from bootstrap.constants import (
10 | DEPLOYMENT_TYPE_CHOICES,
11 | ENVIRONMENTS_DISTRIBUTION_CHOICES,
12 | GITLAB_TOKEN_ENV_VAR,
13 | MEDIA_STORAGE_CHOICES,
14 | VAULT_TOKEN_ENV_VAR,
15 | )
16 | from bootstrap.exceptions import BootstrapError
17 | from bootstrap.helpers import slugify_option
18 |
19 |
20 | @click.command()
21 | @click.option("--uid", type=int)
22 | @click.option("--gid", type=int)
23 | @click.option(
24 | "--output-dir",
25 | default=".",
26 | envvar="OUTPUT_BASE_DIR",
27 | type=click.Path(
28 | exists=True, path_type=Path, file_okay=False, readable=True, writable=True
29 | ),
30 | )
31 | @click.option("--project-name", prompt=True)
32 | @click.option("--project-slug", callback=slugify_option)
33 | @click.option("--project-dirname")
34 | @click.option("--service-slug", callback=slugify_option)
35 | @click.option("--internal-service-port", default=8000, type=int)
36 | @click.option(
37 | "--deployment-type",
38 | type=click.Choice(DEPLOYMENT_TYPE_CHOICES, case_sensitive=False),
39 | )
40 | @click.option("--terraform-backend")
41 | @click.option("--terraform-cloud-hostname")
42 | @click.option("--terraform-cloud-token")
43 | @click.option("--terraform-cloud-organization")
44 | @click.option(
45 | "--terraform-cloud-organization-create/--terraform-cloud-organization-create-skip",
46 | is_flag=True,
47 | default=None,
48 | )
49 | @click.option("--terraform-cloud-admin-email")
50 | @click.option("--vault-token", envvar=VAULT_TOKEN_ENV_VAR)
51 | @click.option("--vault-url")
52 | @click.option(
53 | "--environments-distribution", type=click.Choice(ENVIRONMENTS_DISTRIBUTION_CHOICES)
54 | )
55 | @click.option("--project-url-dev")
56 | @click.option("--project-url-stage")
57 | @click.option("--project-url-prod")
58 | @click.option("--sentry-dsn")
59 | @click.option("--sentry-org")
60 | @click.option("--sentry-url")
61 | @click.option(
62 | "--media-storage",
63 | type=click.Choice(MEDIA_STORAGE_CHOICES, case_sensitive=False),
64 | )
65 | @click.option("--use-redis/--no-redis", is_flag=True, default=None)
66 | @click.option("--gitlab-url")
67 | @click.option("--gitlab-token", envvar=GITLAB_TOKEN_ENV_VAR)
68 | @click.option("--gitlab-namespace-path")
69 | @click.option("--terraform-dir")
70 | @click.option("--logs-dir")
71 | @click.option("--quiet", is_flag=True)
72 | def main(**options):
73 | """Run the setup."""
74 | try:
75 | collector = Collector(**options)
76 | collector.collect()
77 | collector.launch_runner()
78 | except BootstrapError as e:
79 | raise click.Abort() from e
80 |
81 |
82 | if __name__ == "__main__":
83 | main()
84 |
--------------------------------------------------------------------------------
/terraform/gitlab/main.tf:
--------------------------------------------------------------------------------
1 | locals {
2 | user_data = jsondecode(data.http.user_info.response_body)
3 |
4 | git_config_args = "-c user.email=${local.user_data.email} -c user.name=\"${local.user_data.name}\""
5 |
6 | is_main_group_root = data.gitlab_group.main.parent_id == 0
7 |
8 | pages_base_url = local.is_main_group_root ? "https://${data.gitlab_group.main.path}.gitlab.io" : data.gitlab_group.main_parent[0].parent_id == 0 ? "https://${data.gitlab_group.main_parent[0].path}.gitlab.io/${data.gitlab_group.main.path}" : ""
9 | }
10 |
11 | terraform {
12 | backend "local" {
13 | }
14 |
15 | required_providers {
16 | gitlab = {
17 | source = "gitlabhq/gitlab"
18 | version = "~> 16.10.0"
19 | }
20 | }
21 | }
22 |
23 | /* Providers */
24 |
25 | provider "gitlab" {
26 | token = var.gitlab_token
27 | }
28 |
29 | /* User Info */
30 |
31 | data "http" "user_info" {
32 | url = "${var.gitlab_url}/api/v4/user"
33 |
34 | request_headers = {
35 | Accept = "application/json"
36 | Authorization = "Bearer ${var.gitlab_token}"
37 | }
38 | }
39 |
40 | /* Group */
41 |
42 | data "gitlab_group" "main" {
43 | full_path = var.namespace_path
44 | }
45 |
46 | data "gitlab_group" "main_parent" {
47 | count = local.is_main_group_root ? 0 : 1
48 |
49 | group_id = data.gitlab_group.main.parent_id
50 | }
51 |
52 | /* Project */
53 |
54 | resource "gitlab_project" "main" {
55 | name = title(var.service_slug)
56 | path = var.service_slug
57 | description = "The \"${var.project_name}\" project ${var.service_slug} service."
58 | namespace_id = data.gitlab_group.main.id
59 | initialize_with_readme = false
60 | shared_runners_enabled = true
61 | }
62 |
63 | resource "null_resource" "init_repo" {
64 | depends_on = [gitlab_branch_protection.develop]
65 |
66 | triggers = {
67 | service_project_id = gitlab_project.main.id
68 | }
69 |
70 | provisioner "local-exec" {
71 | command = join(" && ", [
72 | "cd ${var.service_dir}",
73 | format(
74 | join(" && ", [
75 | "git init --initial-branch=develop",
76 | "git remote add origin %s",
77 | "git add .",
78 | "git ${local.git_config_args} commit -m 'Initial commit'",
79 | "git push -u origin develop -o ci.skip",
80 | "git checkout -b main",
81 | "git push -u origin main -o ci.skip",
82 | "git remote set-url origin %s",
83 | "git checkout develop"
84 | ]),
85 | replace(
86 | gitlab_project.main.http_url_to_repo,
87 | "/^https://(.*)$/",
88 | "https://oauth2:${var.gitlab_token}@$1"
89 | ),
90 | gitlab_project.main.ssh_url_to_repo,
91 | )
92 | ])
93 | }
94 | }
95 |
96 | /* Branch Protections */
97 |
98 | resource "gitlab_branch_protection" "develop" {
99 | project = gitlab_project.main.id
100 | branch = "develop"
101 | push_access_level = "maintainer"
102 | merge_access_level = "developer"
103 | }
104 |
105 | resource "gitlab_branch_protection" "main" {
106 | depends_on = [null_resource.init_repo]
107 |
108 | project = gitlab_project.main.id
109 | branch = "main"
110 | push_access_level = "no one"
111 | merge_access_level = "maintainer"
112 | }
113 |
114 | resource "gitlab_tag_protection" "tags" {
115 | project = gitlab_project.main.id
116 | tag = "*"
117 | create_access_level = "maintainer"
118 | }
119 |
120 | /* Badges */
121 |
122 | resource "gitlab_project_badge" "coverage" {
123 | count = local.pages_base_url != "" ? 1 : 0
124 |
125 | project = gitlab_project.main.id
126 | link_url = "${local.pages_base_url}/${gitlab_project.main.path}/htmlcov"
127 | image_url = "https://gitlab.com/%%{project_path}/badges/%%{default_branch}/coverage.svg"
128 | }
129 |
130 | /* Group Variables */
131 |
132 | resource "gitlab_group_variable" "vars" {
133 | for_each = var.group_variables
134 |
135 | group = data.gitlab_group.main.id
136 | key = each.key
137 | value = each.value.value
138 | protected = lookup(each.value, "protected", true)
139 | masked = lookup(each.value, "masked", false)
140 | }
141 |
142 | /* Project Variables */
143 |
144 | resource "gitlab_project_variable" "vars" {
145 | for_each = var.project_variables
146 |
147 | project = gitlab_project.main.id
148 | key = each.key
149 | value = each.value.value
150 | protected = lookup(each.value, "protected", true)
151 | masked = lookup(each.value, "masked", false)
152 | environment_scope = lookup(each.value, "environment_scope", "*")
153 | }
154 |
--------------------------------------------------------------------------------
/terraform/gitlab/variables.tf:
--------------------------------------------------------------------------------
1 | variable "gitlab_token" {
2 | description = "The GitLab token."
3 | type = string
4 | sensitive = true
5 | }
6 |
7 | variable "gitlab_url" {
8 | description = "The GitLab url"
9 | type = string
10 | }
11 |
12 | variable "group_variables" {
13 | description = "A map of GitLab group variables to create."
14 | type = map(map(any))
15 | default = {}
16 | }
17 |
18 | variable "namespace_path" {
19 | description = "The GitLab namespace path."
20 | type = string
21 | }
22 |
23 | variable "project_name" {
24 | description = "The project name."
25 | type = string
26 | }
27 |
28 | variable "project_slug" {
29 | description = "The project slug."
30 | type = string
31 | }
32 |
33 | variable "project_variables" {
34 | description = "A map of GitLab project variables to create."
35 | type = map(map(any))
36 | default = {}
37 | }
38 |
39 | variable "service_dir" {
40 | description = "The service directory."
41 | type = string
42 | }
43 |
44 | variable "service_slug" {
45 | description = "The service slug."
46 | type = string
47 | }
48 |
--------------------------------------------------------------------------------
/terraform/terraform-cloud/main.tf:
--------------------------------------------------------------------------------
1 | locals {
2 | organization = var.create_organization ? tfe_organization.main[0] : data.tfe_organization.main[0]
3 |
4 | workspaces = concat(
5 | flatten(
6 | [
7 | for stage in ["base", "cluster"] :
8 | [
9 | for stack in var.stacks :
10 | {
11 | name = "${var.project_slug}_${var.service_slug}_${stage}_${stack}"
12 | description = "${var.project_name} project, ${var.service_slug} service, ${stack} stack, ${stage} stage"
13 | tags = [
14 | "project:${var.project_slug}",
15 | "service:${var.service_slug}",
16 | "stage:${stage}",
17 | "stack:${stack}",
18 | ]
19 | }
20 | ]
21 | ]
22 | ),
23 | [
24 | for env in var.environments :
25 | {
26 | name = "${var.project_slug}_${var.service_slug}_environment_${env}"
27 | description = "${var.project_name} project, ${var.service_slug} service, ${env} environment"
28 | tags = [
29 | "project:${var.project_slug}",
30 | "service:${var.service_slug}",
31 | "stage:environment",
32 | "env:${env}",
33 | ]
34 | }
35 | ]
36 | )
37 | }
38 |
39 | terraform {
40 | backend "local" {
41 | }
42 |
43 | required_providers {
44 | tfe = {
45 | source = "hashicorp/tfe"
46 | version = "~> 0.53"
47 | }
48 | }
49 | }
50 |
51 | provider "tfe" {
52 | hostname = var.hostname
53 | token = var.terraform_cloud_token
54 | }
55 |
56 | /* Organization */
57 |
58 | data "tfe_organization" "main" {
59 | count = var.create_organization ? 0 : 1
60 |
61 | name = var.organization_name
62 | }
63 |
64 | resource "tfe_organization" "main" {
65 | count = var.create_organization ? 1 : 0
66 |
67 | name = var.organization_name
68 | email = var.admin_email
69 | }
70 |
71 | /* Workspaces */
72 |
73 | resource "tfe_workspace" "main" {
74 | for_each = { for i in local.workspaces : i.name => i }
75 |
76 | name = each.value.name
77 | description = each.value.description
78 | organization = local.organization.name
79 | tag_names = each.value.tags
80 | }
81 |
82 | resource "tfe_workspace_settings" "main-settings" {
83 | for_each = tfe_workspace.main
84 |
85 | workspace_id = each.value.id
86 | execution_mode = "local"
87 | }
88 |
--------------------------------------------------------------------------------
/terraform/terraform-cloud/variables.tf:
--------------------------------------------------------------------------------
1 | variable "admin_email" {
2 | description = "The Terraform Cloud Organization admin email."
3 | type = string
4 | default = ""
5 | }
6 |
7 | variable "create_organization" {
8 | description = "Tell if the Terraform Cloud organization should be created."
9 | type = bool
10 | default = false
11 | }
12 |
13 | variable "environments" {
14 | description = "The list of environment slugs."
15 | type = list(string)
16 | default = []
17 | }
18 |
19 | variable "hostname" {
20 | description = "The Terraform Cloud hostname."
21 | type = string
22 | default = "app.terraform.io"
23 | }
24 |
25 | variable "organization_name" {
26 | description = "The Terraform Cloud Organization name."
27 | type = string
28 | }
29 |
30 | variable "project_name" {
31 | description = "The project name."
32 | type = string
33 | }
34 |
35 | variable "project_slug" {
36 | description = "The project slug."
37 | type = string
38 | }
39 |
40 | variable "service_slug" {
41 | description = "The service slug."
42 | type = string
43 | }
44 |
45 | variable "stacks" {
46 | description = "The list of stack slugs."
47 | type = list(string)
48 | default = []
49 | }
50 |
51 | variable "terraform_cloud_token" {
52 | description = "The Terraform Cloud token."
53 | type = string
54 | sensitive = true
55 | default = ""
56 | }
57 |
--------------------------------------------------------------------------------
/terraform/vault/main.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | backend "local" {
3 | }
4 |
5 | required_providers {
6 | vault = {
7 | source = "hashicorp/vault"
8 | version = "~> 4.2.0"
9 | }
10 | }
11 | }
12 |
13 | provider "vault" {
14 | address = var.vault_address
15 |
16 | token = var.vault_token
17 |
18 | dynamic "auth_login_oidc" {
19 | for_each = toset(var.vault_token == "" ? ["default"] : [])
20 |
21 | content {
22 | role = auth_login_oidc.value
23 | }
24 | }
25 | }
26 |
27 | /* Secrets */
28 |
29 | resource "vault_generic_secret" "main" {
30 | for_each = var.secrets
31 |
32 | path = "${var.project_slug}/${each.key}"
33 |
34 | data_json = jsonencode(each.value)
35 | }
36 |
--------------------------------------------------------------------------------
/terraform/vault/variables.tf:
--------------------------------------------------------------------------------
1 | variable "project_slug" {
2 | description = "The project slug."
3 | type = string
4 | }
5 |
6 | variable "secrets" {
7 | description = "The service secrets."
8 | type = map(map(string))
9 | default = {}
10 | }
11 |
12 | variable "vault_address" {
13 | description = "The Vault address."
14 | type = string
15 | }
16 |
17 | variable "vault_token" {
18 | description = "The Vault token."
19 | type = string
20 | sensitive = true
21 | default = ""
22 | }
23 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 | """Talos tests."""
2 |
--------------------------------------------------------------------------------
/tests/test_helpers.py:
--------------------------------------------------------------------------------
1 | """Bootstrap helpers tests."""
2 |
3 | from unittest import TestCase
4 |
5 | from bootstrap.helpers import (
6 | format_gitlab_variable,
7 | format_tfvar,
8 | slugify_option,
9 | validate_or_prompt_domain,
10 | validate_or_prompt_path,
11 | validate_or_prompt_secret,
12 | validate_or_prompt_url,
13 | )
14 | from tests.utils import mock_input
15 |
16 |
17 | class GitlabVariableTestcase(TestCase):
18 | """Test the 'format_gitlab_varialbe' function."""
19 |
20 | def test_gitlab_variable_unmasked_unprotected(self):
21 | """Test the formatting of a gitlab unmasked, unprotected variable."""
22 | self.assertEqual(
23 | format_gitlab_variable("value", False, False),
24 | '{ value = "value", protected = false }',
25 | )
26 |
27 | def test_gitlab_variable_masked_unprotected(self):
28 | """Test the formatting of a gitlab masked, unprotected variable."""
29 | self.assertEqual(
30 | format_gitlab_variable("value", True, False),
31 | '{ value = "value", masked = true, protected = false }',
32 | )
33 |
34 | def test_gitlab_variable_unmasked_protected(self):
35 | """Test the formatting of a gitlab unmasked, protected variable."""
36 | self.assertEqual(
37 | format_gitlab_variable("value", False, True), '{ value = "value" }'
38 | )
39 |
40 | def test_gitlab_variable_masked_protected(self):
41 | """Test the formatting of a gitlab masked, unprotected variable."""
42 | self.assertEqual(
43 | format_gitlab_variable("value", True, True),
44 | '{ value = "value", masked = true }',
45 | )
46 |
47 |
48 | class FormatTFVarTestCase(TestCase):
49 | """Test the 'format_tfvar' function."""
50 |
51 | def test_format_list(self):
52 | """Test the function formats a list properly."""
53 | self.assertEqual(format_tfvar([1, 2, 3], "list"), '["1", "2", "3"]')
54 |
55 | def test_format_bool(self):
56 | """Test the function formats a boolean properly."""
57 | self.assertEqual(format_tfvar(True, "bool"), "true")
58 |
59 | def test_format_number(self):
60 | """Test the function formats a number properly."""
61 | self.assertEqual(format_tfvar(6, "num"), "6")
62 |
63 | def test_format_default(self):
64 | """Test the function formats a boolean properly."""
65 | self.assertEqual(format_tfvar("something else", "default"), '"something else"')
66 |
67 |
68 | class OptionSlugifyTestCase(TestCase):
69 | """Test the 'option_slugify' function."""
70 |
71 | def test_slugify_with_value(self):
72 | """Test slugifying with a value."""
73 | self.assertEqual(
74 | slugify_option(None, None, "Text to slugify"), "text-to-slugify"
75 | )
76 |
77 | def test_slugify_no_value(self):
78 | """Test slugifying without a value."""
79 | self.assertEqual(slugify_option(None, None, None), None)
80 |
81 |
82 | class ValidatePromptDomain(TestCase):
83 | """Test the 'validate_or_prompt_domain' function."""
84 |
85 | def test_validate_good(self):
86 | """Test validation of a good domain."""
87 | self.assertEqual(
88 | validate_or_prompt_domain("message", "www.google.com"), "www.google.com"
89 | )
90 |
91 | def test_validate_bad(self):
92 | """Test validation of a bad domain."""
93 | with mock_input("www.test.com"):
94 | self.assertEqual(
95 | validate_or_prompt_domain("message", "www. google .com"),
96 | "www.test.com",
97 | )
98 |
99 | def test_validate_no_value(self):
100 | """Test validation without a domain."""
101 | with mock_input("www google com", "www.google.com"):
102 | self.assertEqual(
103 | validate_or_prompt_domain("message", None), "www.google.com"
104 | )
105 |
106 |
107 | class ValidatePromptUrl(TestCase):
108 | """Test the 'validate_or_prompt_url' function."""
109 |
110 | def test_validate_good(self):
111 | """Test validation of a good URL."""
112 | self.assertEqual(
113 | validate_or_prompt_url("message", "https://www.google.com"),
114 | "https://www.google.com",
115 | )
116 |
117 | def test_validate_bad(self):
118 | """Test validation of a bad URL."""
119 | with mock_input("https://www.google.com"):
120 | self.assertEqual(
121 | validate_or_prompt_url("message", "https://www. google .com"),
122 | "https://www.google.com",
123 | )
124 |
125 | def test_validate_no_value(self):
126 | """Test validation with no starting value."""
127 | with mock_input("www google com", "www.google.com", "https://www.google.com"):
128 | self.assertEqual(
129 | validate_or_prompt_url("message", None), "https://www.google.com"
130 | )
131 |
132 |
133 | class ValidatePromptPath(TestCase):
134 | """Test the 'validate_or_prompt_path' function."""
135 |
136 | def test_validate_good(self):
137 | """Test validation of a good path."""
138 | self.assertEqual(validate_or_prompt_path("message", "/app"), "/app")
139 |
140 | def test_validate_bad(self):
141 | """Test validation of a bad path."""
142 | with mock_input("app"):
143 | self.assertEqual(validate_or_prompt_path("message", "// app / "), "app")
144 |
145 | def test_validate_no_value(self):
146 | """Test validation with no starting value."""
147 | with mock_input("app"):
148 | self.assertEqual(validate_or_prompt_path("message", None), "app")
149 |
150 |
151 | class ValidatePromptSecret(TestCase):
152 | """Test the 'validate_or_prompt_secret' function."""
153 |
154 | def test_validate_good(self):
155 | """Test validation of a good secret."""
156 | self.assertEqual(
157 | validate_or_prompt_secret("message", "P4ssWord!"),
158 | "P4ssWord!",
159 | )
160 |
161 | def test_validate_bad(self):
162 | """Test validation of a bad secret."""
163 | with mock_input({"hidden": "P4ssWord!"}):
164 | self.assertEqual(
165 | validate_or_prompt_secret("message", "pw"),
166 | "P4ssWord!",
167 | )
168 |
169 | def test_validate_no_value(self):
170 | """Test validation with no starting value."""
171 | with mock_input({"hidden": "P4ssWord!"}):
172 | self.assertEqual(validate_or_prompt_secret("message", None), "P4ssWord!")
173 |
--------------------------------------------------------------------------------
/tests/utils.py:
--------------------------------------------------------------------------------
1 | """Test utils for the project."""
2 |
3 | from contextlib import contextmanager
4 | from io import StringIO
5 | from unittest import mock
6 |
7 |
8 | @contextmanager
9 | def mock_input(*cmds):
10 | """Mock the user input."""
11 | visible_cmds = "\n".join(c for c in cmds if isinstance(c, str))
12 | hidden_cmds = [c["hidden"] for c in cmds if isinstance(c, dict) and "hidden" in c]
13 | with mock.patch("sys.stdin", StringIO(f"{visible_cmds}\n")), mock.patch(
14 | "getpass.getpass", side_effect=hidden_cmds
15 | ):
16 | yield
17 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/.dockerignore:
--------------------------------------------------------------------------------
1 | **/*.po
2 | **/__pycache__
3 | **/lost+found
4 | *.md
5 | *.svg
6 | *.txt
7 | .*
8 | data
9 | docker-compose*
10 | Dockerfile*
11 | htmlcov
12 | k8s
13 | Makefile
14 | media
15 | media_test
16 | requirements/*.in
17 | static
18 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/.editorconfig:
--------------------------------------------------------------------------------
1 | # EditorConfig is awesome: https://EditorConfig.org
2 |
3 | # top-most EditorConfig file
4 | root = true
5 |
6 | # Unix-style newlines with a newline ending every file
7 | [*]
8 | end_of_line = lf
9 | insert_final_newline = true
10 | trim_trailing_whitespace = true
11 |
12 | # Matches multiple files with brace expansion notation
13 | # Set default charset
14 | [*.{css,html,js,json,jsx,md,py,scss,yml}]
15 | charset = utf-8
16 | indent_style = space
17 |
18 | # 4 space indentation
19 | [*.py]
20 | indent_size = 4
21 | max_line_length = 88
22 |
23 | [*.md]
24 | indent_size = 4
25 |
26 | # 2 space indentation
27 | [*.{html,js,json,jsx,css,scss,yml}]
28 | indent_size = 2
29 |
30 | # Tab indentation (no size specified)
31 | [Makefile]
32 | indent_style = tab
33 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/.env_template:
--------------------------------------------------------------------------------
1 | BACKEND_BUILD_TARGET=local
2 | CACHE_URL=locmem://
3 | COMPOSE_FILE=docker-compose.yaml
4 | DATABASE_URL=postgres://postgres:postgres@postgres:5432/{{ cookiecutter.project_slug }}
5 | DJANGO_ADMINS=admin,errors@example.org
6 | DJANGO_ALLOWED_HOSTS=localhost,{{ cookiecutter.service_slug }}
7 | DJANGO_CONFIGURATION=Local
8 | DJANGO_DEBUG=True
9 | DJANGO_DEFAULT_FROM_EMAIL=info@example.org
10 | DJANGO_SECRET_KEY=__SECRETKEY__
11 | DJANGO_SERVER_EMAIL=server@example.org
12 | DJANGO_SESSION_COOKIE_DOMAIN=localhost
13 | DJANGO_SUPERUSER_EMAIL=info@example.org
14 | DJANGO_SUPERUSER_PASSWORD={{ cookiecutter.project_slug }}
15 | DJANGO_SUPERUSER_USERNAME=${USER}
16 | EMAIL_URL=console:///
17 | PYTHONBREAKPOINT=IPython.core.debugger.set_trace
18 | PYTHONDEVMODE=1
19 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/.gitattributes:
--------------------------------------------------------------------------------
1 | *.sh text=auto eol=lf
2 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/.gitignore:
--------------------------------------------------------------------------------
1 | # START https://github.com/github/gitignore/blob/master/Python.gitignore
2 |
3 | # Byte-compiled / optimized / DLL files
4 | __pycache__/
5 | *.py[cod]
6 | *$py.class
7 |
8 | # C extensions
9 | *.so
10 |
11 | # Distribution / packaging
12 | .Python
13 | build/
14 | develop-eggs/
15 | dist/
16 | downloads/
17 | eggs/
18 | .eggs/
19 | lib/
20 | lib64/
21 | parts/
22 | sdist/
23 | var/
24 | wheels/
25 | share/python-wheels/
26 | *.egg-info/
27 | .installed.cfg
28 | *.egg
29 | MANIFEST
30 |
31 | # PyInstaller
32 | # Usually these files are written by a python script from a template
33 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
34 | *.manifest
35 | *.spec
36 |
37 | # Installer logs
38 | pip-log.txt
39 | pip-delete-this-directory.txt
40 |
41 | # Unit test / coverage reports
42 | htmlcov/
43 | .tox/
44 | .nox/
45 | .coverage
46 | .coverage.*
47 | .cache
48 | nosetests.xml
49 | coverage.xml
50 | *.cover
51 | *.py,cover
52 | .hypothesis/
53 | .pytest_cache/
54 | cover/
55 |
56 | # Translations
57 | # *.mo
58 | *.pot
59 |
60 | # Django stuff:
61 | *.log
62 | local_settings.py
63 | db.sqlite3
64 | db.sqlite3-journal
65 |
66 | # Flask stuff:
67 | instance/
68 | .webassets-cache
69 |
70 | # Scrapy stuff:
71 | .scrapy
72 |
73 | # Sphinx documentation
74 | docs/_build/
75 |
76 | # PyBuilder
77 | .pybuilder/
78 | target/
79 |
80 | # Jupyter Notebook
81 | .ipynb_checkpoints
82 |
83 | # IPython
84 | profile_default/
85 | ipython_config.py
86 |
87 | # pyenv
88 | # For a library or package, you might want to ignore these files since the code is
89 | # intended to run in multiple environments; otherwise, check them in:
90 | # .python-version
91 |
92 | # pipenv
93 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
94 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
95 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
96 | # install all needed dependencies.
97 | #Pipfile.lock
98 |
99 | # poetry
100 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
101 | # This is especially recommended for binary packages to ensure reproducibility, and is more
102 | # commonly ignored for libraries.
103 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
104 | #poetry.lock
105 |
106 | # pdm
107 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
108 | #pdm.lock
109 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
110 | # in version control.
111 | # https://pdm.fming.dev/#use-with-ide
112 | .pdm.toml
113 |
114 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
115 | __pypackages__/
116 |
117 | # Celery stuff
118 | celerybeat-schedule
119 | celerybeat.pid
120 |
121 | # SageMath parsed files
122 | *.sage.py
123 |
124 | # Environments
125 | .env
126 | .venv
127 | env/
128 | venv/
129 | ENV/
130 | env.bak/
131 | venv.bak/
132 |
133 | # Spyder project settings
134 | .spyderproject
135 | .spyproject
136 |
137 | # Rope project settings
138 | .ropeproject
139 |
140 | # mkdocs documentation
141 | /site
142 |
143 | # mypy
144 | .mypy_cache/
145 | .dmypy.json
146 | dmypy.json
147 |
148 | # Pyre type checker
149 | .pyre/
150 |
151 | # pytype static type analyzer
152 | .pytype/
153 |
154 | # Cython debug symbols
155 | cython_debug/
156 |
157 | # PyCharm
158 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
159 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
160 | # and can be added to the global gitignore or merged into this file. For a more nuclear
161 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
162 | .idea/
163 |
164 | # END https://github.com/github/gitignore/blob/master/Python.gitignore
165 |
166 | # START local
167 |
168 | # Coverages
169 | .coverages/
170 |
171 | # SublimeText
172 | *.sublime-project
173 | *.sublime-workspace
174 |
175 | # Vim
176 | [._]*.un~
177 |
178 | # VisualStudioCode
179 | .devcontainer*
180 | .vscode/
181 |
182 | # Redis
183 | *.rdb
184 |
185 | # macOS
186 | .DS_Store
187 |
188 | # Django
189 | /static/
190 | media/
191 |
192 | # Ruff
193 | .ruff_cache/
194 |
195 | # Others
196 | .bash_history
197 | .ipython
198 | .local
199 | *.bak
200 | *.local*
201 | /data/
202 | /manage_local.py
203 | /media_test
204 | /pacts/*.json
205 | report.xml
206 |
207 | # Terraform
208 | .terraform.lock*
209 | .terraform/
210 | *.tfstate*
211 | terraform.tfvars
212 |
213 | # END local
214 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/.gitlab-ci.yml:
--------------------------------------------------------------------------------
1 | stages:
2 | - Test
3 | - Pact-verify
4 | - Pact-check
5 | - Build
6 | - Deploy
7 | - Pact-tag
8 | - Report
9 | - Sentry
10 |
11 | variables:
12 | COMPOSE_DOCKER_CLI_BUILD: 1
13 | DOCKER_BUILDKIT: 1
14 | PACT_PROVIDER_NAME: {{ cookiecutter.project_slug }}-{{ cookiecutter.service_slug }}
15 | PROJECT_SLUG: {{ cookiecutter.project_slug }}
16 | SENTRY_PROJECT_NAME: {{ cookiecutter.project_slug }}-{{ cookiecutter.service_slug }}
17 | VERSION_BEFORE_REF: ${CI_COMMIT_BEFORE_SHA}
18 | VERSION_REF: ${CI_COMMIT_SHA}
19 | {% with env=cookiecutter.resources.envs[0] %}
20 | .development:
21 | rules: &development-rules
22 | - &pipeline-push-rule
23 | if: $CI_PIPELINE_SOURCE != "push"
24 | when: never
25 | - &development-rule
26 | if: $CI_COMMIT_BRANCH == "develop"
27 | variables:
28 | ENV_SLUG: {{ env.slug }}
29 | STACK_SLUG: {{ env.stack_slug }}
30 | VAULT_ROLE: {{ cookiecutter.service_slug }}-{{ env.slug }}
31 | environment:
32 | name: {{ env.name }}{% if env.url %}
33 | url: {{ env.url }}{% endif %}
34 | {% endwith %}{% with env=cookiecutter.resources.envs[1] %}
35 | .staging:
36 | rules: &staging-rules
37 | - <<: *pipeline-push-rule
38 | - &staging-rule
39 | if: $CI_COMMIT_BRANCH == "main"
40 | variables:
41 | ENV_SLUG: {{ env.slug }}
42 | STACK_SLUG: {{ env.stack_slug }}
43 | VAULT_ROLE: {{ cookiecutter.service_slug }}-{{ env.slug }}
44 | environment:
45 | name: {{ env.name }}{% if env.url %}
46 | url: {{ env.url }}{% endif %}
47 | {% endwith %}{% with env=cookiecutter.resources.envs[2] %}
48 | .production:
49 | rules: &production-rules
50 | - <<: *pipeline-push-rule
51 | - &production-rule
52 | if: $CI_COMMIT_TAG
53 | variables:
54 | ENV_SLUG: {{ env.slug }}
55 | STACK_SLUG: {{ env.stack_slug }}
56 | VAULT_ROLE: {{ cookiecutter.service_slug }}-{{ env.slug }}
57 | environment:
58 | name: {{ env.name }}{% if env.url %}
59 | url: {{ env.url }}{% endif %}
60 | {% endwith %}
61 | .sentry:
62 | stage: .pre
63 | image: docker:20
64 | services:
65 | - docker:20-dind{% if cookiecutter.use_vault == "true" %}
66 | id_tokens:
67 | VAULT_ID_TOKEN:
68 | aud: ${VAULT_ADDR}{% endif %}
69 | script:
70 | - >
71 | docker run --rm
72 | -v ${PWD}:${PWD}
73 | -w ${PWD}
74 | -e CI_ENVIRONMENT_NAME{% if cookiecutter.use_vault == "true" %}
75 | -e ENV_NAME=${CI_ENVIRONMENT_NAME}{% endif %}
76 | -e PROJECT_DIR=${CI_PROJECT_DIR}
77 | -e PROJECT_SLUG
78 | -e RELEASE_END
79 | -e RELEASE_START{% if cookiecutter.use_vault == "false" %}
80 | -e SENTRY_AUTH_TOKEN
81 | -e SENTRY_DSN{% endif %}
82 | -e SENTRY_ORG
83 | -e SENTRY_PROJECT_NAME
84 | -e SENTRY_URL{% if cookiecutter.use_vault == "true" %}
85 | -e SERVICE_SLUG={{ cookiecutter.service_slug }}
86 | -e VAULT_ADDR
87 | -e VAULT_ID_TOKEN
88 | -e VAULT_ROLE{% endif %}
89 | -e VERSION_REF
90 | --entrypoint=""
91 | getsentry/sentry-cli:latest ./scripts/ci_sentry.sh ${SENTRY_CMD}
92 |
93 | .sentry_release:
94 | extends:
95 | - .sentry
96 | variables:
97 | SENTRY_CMD: release
98 | before_script:
99 | - RELEASE_START=$(date +%s)
100 |
101 | sentry_release_development:
102 | extends:
103 | - .development
104 | - .sentry_release
105 | rules:
106 | - &sentry-rule
107 | if: $SENTRY_ENABLED != "true"
108 | when: never
109 | - *development-rules
110 |
111 | sentry_release_staging:
112 | extends:
113 | - .staging
114 | - .sentry_release
115 | rules:
116 | - <<: *sentry-rule
117 | - *staging-rules
118 |
119 | sentry_release_production:
120 | extends:
121 | - .production
122 | - .sentry_release
123 | rules:
124 | - <<: *sentry-rule
125 | - *production-rules
126 |
127 | test:
128 | stage: Test
129 | image: docker:20
130 | services:
131 | - docker:20-dind
132 | needs: []
133 | rules:
134 | - if: $CI_PIPELINE_SOURCE == "push"
135 | variables:
136 | {{ cookiecutter.service_slug|upper }}_CONTAINER_NAME: "${CI_PROJECT_PATH_SLUG}-${CI_JOB_NAME}-${CI_JOB_ID}_{{ cookiecutter.service_slug }}"
137 | {{ cookiecutter.service_slug|upper }}_BUILD_TARGET: "test"
138 | {{ cookiecutter.service_slug|upper }}_IMAGE_NAME: "gitlabci_{{ cookiecutter.project_slug }}_{{ cookiecutter.service_slug }}"
139 | {{ cookiecutter.service_slug|upper }}_IMAGE_TAG: "${CI_JOB_NAME}-${CI_JOB_ID}"
140 | COMPOSE_PROJECT_NAME: "${CI_PROJECT_PATH_SLUG}-${CI_JOB_NAME}-${CI_JOB_ID}"
141 | script:
142 | - docker-compose build
143 | - docker-compose run --name ${{ "{" }}{{ cookiecutter.service_slug|upper }}_CONTAINER_NAME} {{ cookiecutter.service_slug }}
144 | - docker cp ${{ "{" }}{{ cookiecutter.service_slug|upper }}_CONTAINER_NAME}:/app/htmlcov htmlcov
145 | after_script:
146 | - docker-compose down -v
147 | coverage: '/^TOTAL.*\s+(\d+\%)$/'
148 | artifacts:
149 | expire_in: 1 day
150 | paths:
151 | - htmlcov
152 | when: always
153 |
154 | pact-verify-test:
155 | stage: Pact-verify
156 | image: docker:20
157 | services:
158 | - docker:20-dind{% if cookiecutter.use_vault == "true" %}
159 | id_tokens:
160 | VAULT_ID_TOKEN:
161 | aud: ${VAULT_ADDR}{% endif %}
162 | needs: []
163 | variables:
164 | {{ cookiecutter.service_slug|upper }}_BUILD_TARGET: "test"
165 | {{ cookiecutter.service_slug|upper }}_IMAGE_NAME: "gitlabci_{{ cookiecutter.project_slug }}_{{ cookiecutter.service_slug }}"
166 | {{ cookiecutter.service_slug|upper }}_IMAGE_TAG: "${CI_JOB_NAME}-${CI_JOB_ID}"
167 | COMPOSE_PROJECT_NAME: "${CI_PROJECT_PATH_SLUG}-${CI_JOB_NAME}-${CI_JOB_ID}"
168 | rules:
169 | - <<: *pipeline-push-rule
170 | - if: $PACT_ENABLED == "true"
171 | allow_failure: true
172 | before_script: &pact-verify-before-script{% if cookiecutter.use_vault == "true" %}
173 | - >
174 | vault_token=$(wget --quiet --post-data="role=pact&jwt=${VAULT_ID_TOKEN}"
175 | "${VAULT_ADDR%/}"/v1/auth/gitlab-jwt/login -O - |
176 | sed -n 's/^.*"client_token":"\([^"]*\)".*$/\1/p')
177 | - >
178 | PACT_BROKER_AUTH_URL=$(wget --quiet --header="X-Vault-Token: ${vault_token}"
179 | "${VAULT_ADDR%/}"/v1/"${PROJECT_SLUG}"/pact -O - |
180 | sed -n 's/^.*"pact_broker_auth_url":"\([^"]*\)".*$/\1/p'){% endif %}
181 | - export PACT_BROKER_URL="${PACT_BROKER_AUTH_URL}"
182 | script:
183 | - >
184 | if [ "${CI_COMMIT_BRANCH}" ];
185 | then PACT_CONSUMER_TAG="branch:${CI_COMMIT_BRANCH}";
186 | else PACT_CONSUMER_TAG="tag:${CI_COMMIT_TAG}";
187 | fi
188 | - docker-compose build
189 | - >
190 | docker-compose run --rm {{ cookiecutter.service_slug }} ./scripts/pact_verify.sh
191 | --pact-provider-version=${VERSION_REF}
192 | --pact-verify-consumer-tag=${PACT_CONSUMER_TAG}
193 | --pact-verify-consumer-tag="env:dev"
194 | --pact-verify-consumer-tag="env:stage"
195 | --pact-verify-consumer-tag="env:prod"
196 | --pact-publish-results
197 | after_script:
198 | - docker-compose down -v
199 |
200 | pact-verify-webhook:
201 | stage: Pact-verify
202 | image: docker:20
203 | services:
204 | - docker:20-dind
205 | variables:
206 | COMPOSE_PROJECT_NAME: "${CI_PROJECT_PATH_SLUG}-${CI_JOB_NAME}-${CI_JOB_ID}"
207 | {{ cookiecutter.service_slug|upper }}_IMAGE_NAME: "gitlabci_{{ cookiecutter.project_slug }}_{{ cookiecutter.service_slug }}"
208 | {{ cookiecutter.service_slug|upper }}_BUILD_TARGET: "test"
209 | rules:
210 | - if: $CI_PIPELINE_SOURCE == "trigger" && $PACT_ENABLED == "true"
211 | allow_failure: true
212 | before_script: *pact-verify-before-script
213 | script:
214 | - docker-compose build
215 | - >
216 | docker-compose run --rm {{ cookiecutter.service_slug }} ./scripts/pact_verify.sh
217 | --pact-provider-version=${VERSION_REF}
218 | --pact-verify-consumer-tag=${PACT_CONSUMER_TAG}
219 | --pact-publish-results
220 | after_script:
221 | - docker-compose down -v
222 |
223 | pages:
224 | stage: Report
225 | image: busybox
226 | needs: ["test"]
227 | rules:
228 | - <<: *pipeline-push-rule
229 | - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
230 | script:
231 | - mkdir public
232 | - mv htmlcov public/htmlcov
233 | artifacts:
234 | paths:
235 | - public
236 |
237 | .pact:
238 | image:
239 | name: docker:20
240 | services:
241 | - docker:20-dind{% if cookiecutter.use_vault == "true" %}
242 | id_tokens:
243 | VAULT_ID_TOKEN:
244 | aud: ${VAULT_ADDR}{% endif %}
245 | script:
246 | - >
247 | docker run --rm
248 | -v ${PWD}:${PWD}
249 | -w ${PWD}{% if cookiecutter.use_vault == "true" %}
250 | -e ENV_SLUG{% else %}
251 | -e PACT_BROKER_BASE_URL
252 | -e PACT_BROKER_PASSWORD
253 | -e PACT_BROKER_USERNAME{% endif %}
254 | -e PROJECT_SLUG{% if cookiecutter.use_vault == "true" %}
255 | -e VAULT_ADDR
256 | -e VAULT_ID_TOKEN{% endif %}
257 | --entrypoint=""
258 | pactfoundation/pact-cli:latest-node14 ./scripts/ci_pact.sh ${PACT_CMD}
259 |
260 | .can-i-deploy:
261 | extends:
262 | - .pact
263 | stage: Pact-check
264 | needs: ["pact-verify-test"]
265 | before_script:
266 | - >
267 | export PACT_CMD="can-i-deploy
268 | --pacticipant ${PACT_PROVIDER_NAME}
269 | --version ${VERSION_REF}
270 | --to env:${ENV_SLUG}"
271 |
272 | can-i-deploy_development:
273 | extends:
274 | - .development
275 | - .can-i-deploy
276 | rules:
277 | - &skip-pact-rule
278 | if: $PACT_ENABLED != "true"
279 | when: never
280 | - *development-rules
281 |
282 | can-i-deploy_staging:
283 | extends:
284 | - .staging
285 | - .can-i-deploy
286 | rules:
287 | - <<: *skip-pact-rule
288 | - *staging-rules
289 |
290 | can-i-deploy_production:
291 | extends:
292 | - .production
293 | - .can-i-deploy
294 | rules:
295 | - <<: *skip-pact-rule
296 | - *production-rules
297 |
298 | .build:
299 | stage: Build
300 | image: docker:20
301 | services:
302 | - docker:20-dind
303 | before_script:
304 | - export DOCKER_CONFIG=${PWD}/.dockerconfig
305 | - docker login --username "${CI_REGISTRY_USER}" --password "${CI_REGISTRY_PASSWORD}" "${CI_REGISTRY}"
306 | script:
307 | - docker build -t ${CI_REGISTRY}/${CI_PROJECT_PATH}:${VERSION_REF} --target remote --pull .
308 | - docker push ${CI_REGISTRY}/${CI_PROJECT_PATH}:${VERSION_REF}
309 | after_script:
310 | - docker logout ${CI_REGISTRY}
311 |
312 | build_development:
313 | extends:
314 | - .development
315 | - .build
316 | needs:
317 | - job: can-i-deploy_development
318 | optional: true
319 | - job: test
320 |
321 | build_staging:
322 | extends:
323 | - .staging
324 | - .build
325 | needs:
326 | - job: can-i-deploy_staging
327 | optional: true
328 | - job: test
329 |
330 | build_production:
331 | extends:
332 | - .production
333 | - .build
334 | needs:
335 | - job: can-i-deploy_production
336 | optional: true
337 | - job: test
338 |
339 | .deploy:
340 | stage: Deploy
341 | image:
342 | name: docker:20
343 | services:
344 | - docker:20-dind{% if cookiecutter.use_vault == "true" %}
345 | id_tokens:
346 | VAULT_ID_TOKEN:
347 | aud: ${VAULT_ADDR}{% endif %}
348 | variables:
349 | TF_ROOT: ${CI_PROJECT_DIR}/terraform/{{ cookiecutter.deployment_type }}
350 | before_script:
351 | - export TF_VAR_service_container_image=${CI_REGISTRY_IMAGE}:${VERSION_REF}
352 | script:
353 | - >
354 | docker run --rm
355 | -u `id -u`
356 | -v ${PWD}:${PWD}
357 | -w ${PWD}{% if cookiecutter.terraform_backend == "gitlab" %}
358 | -e CI_API_V4_URL
359 | -e CI_COMMIT_SHA
360 | -e CI_JOB_ID
361 | -e CI_JOB_STAGE
362 | -e CI_JOB_TOKEN
363 | -e CI_PROJECT_ID
364 | -e CI_PROJECT_NAME
365 | -e CI_PROJECT_NAMESPACE
366 | -e CI_PROJECT_PATH
367 | -e CI_PROJECT_URL{% endif %}
368 | -e ENV_SLUG
369 | -e PROJECT_DIR=${CI_PROJECT_DIR}
370 | -e PROJECT_SLUG
371 | -e STACK_SLUG
372 | -e TERRAFORM_BACKEND={{ cookiecutter.terraform_backend }}
373 | -e TERRAFORM_EXTRA_VAR_FILE=${ENV_SLUG}.tfvars
374 | -e TERRAFORM_VARS_DIR=${CI_PROJECT_DIR}/terraform/vars
375 | -e TF_ROOT{% if cookiecutter.terraform_backend == "gitlab" %}
376 | -e TF_STATE_NAME="env_${ENV_SLUG}"{% endif %}{% if cookiecutter.use_vault == "false" %}{% if cookiecutter.deployment_type == "digitalocean-k8s" %}
377 | -e TF_VAR_digitalocean_token="${DIGITALOCEAN_TOKEN}"{% endif %}
378 | -e TF_VAR_email_url="${EMAIL_URL}"
379 | -e TF_VAR_service_slug="{{ cookiecutter.service_slug }}"{% if cookiecutter.deployment_type == "other-k8s" %}
380 | -e TF_VAR_kubernetes_cluster_ca_certificate="${KUBERNETES_CLUSTER_CA_CERTIFICATE}"
381 | -e TF_VAR_kubernetes_host="${KUBERNETES_HOST}"
382 | -e TF_VAR_kubernetes_token="${KUBERNETES_TOKEN}"{% endif %}{% if "s3" in cookiecutter.media_storage %}
383 | -e TF_VAR_s3_access_id="${S3_ACCESS_ID}"
384 | -e TF_VAR_s3_secret_key="${S3_SECRET_KEY}"
385 | -e TF_VAR_s3_region="${S3_REGION}"
386 | -e TF_VAR_s3_host="${S3_HOST}"
387 | -e TF_VAR_s3_bucket_name="${S3_BUCKET_NAME}"{% endif %}
388 | -e TF_VAR_sentry_dsn="${SENTRY_DSN}"{% endif %}
389 | -e TF_VAR_service_container_image{% if cookiecutter.terraform_backend != "gitlab" %}
390 | -e TF_WORKSPACE="{{ cookiecutter.project_slug }}_backend_environment_${ENV_SLUG}"{% endif %}{% if cookiecutter.terraform_backend == "terraform-cloud" and cookiecutter.use_vault == "false" %}
391 | -e TFC_TOKEN{% endif %}{% if cookiecutter.use_vault == "true" %}
392 | -e VAULT_ADDR
393 | -e VAULT_ID_TOKEN
394 | -e VAULT_ROLE
395 | -e VAULT_SECRETS="digitalocean email k8s s3 {{ cookiecutter.service_slug }}/extra {{ cookiecutter.service_slug }}/sentry"
396 | -e VAULT_SECRETS_PREFIX="envs/${CI_ENVIRONMENT_NAME}"
397 | -e VAULT_VERSION{% endif %}
398 | registry.gitlab.com/gitlab-org/terraform-images/stable:latest ./scripts/deploy.sh
399 | artifacts:
400 | name: plan
401 | reports:
402 | terraform: ${TF_ROOT}/plan.json
403 |
404 | deploy_development:
405 | extends:
406 | - .development
407 | - .deploy
408 | needs: ["build_development"]
409 |
410 | deploy_staging:
411 | extends:
412 | - .staging
413 | - .deploy
414 | needs: ["build_staging"]
415 |
416 | deploy_production:
417 | extends:
418 | - .production
419 | - .deploy
420 | needs: ["build_production"]
421 |
422 | .rollback:
423 | extends: .deploy
424 | before_script:
425 | - export TF_VAR_service_container_image=${CI_REGISTRY_IMAGE}:${VERSION_BEFORE_REF}
426 |
427 | rollback_development:
428 | extends:
429 | - .development
430 | - .rollback
431 | needs: ["deploy_development"]
432 | rules:
433 | - <<: *pipeline-push-rule
434 | - <<: *development-rule
435 | when: manual
436 | allow_failure: true
437 |
438 | rollback_staging:
439 | extends:
440 | - .staging
441 | - .rollback
442 | needs: ["deploy_staging"]
443 | rules:
444 | - <<: *pipeline-push-rule
445 | - <<: *staging-rule
446 | when: manual
447 | allow_failure: true
448 |
449 | rollback_production:
450 | extends:
451 | - .production
452 | - .rollback
453 | needs: ["deploy_production"]
454 | rules:
455 | - <<: *pipeline-push-rule
456 | - <<: *production-rule
457 | when: manual
458 | allow_failure: true
459 |
460 | .create-version-tag:
461 | extends:
462 | - .pact
463 | stage: Pact-tag
464 | before_script:
465 | - >
466 | export PACT_CMD="create-version-tag
467 | --pacticipant ${PACT_PROVIDER_NAME}
468 | --version ${VERSION_REF}
469 | --tag env:${ENV_SLUG}"
470 |
471 | create-version-tag_development:
472 | extends:
473 | - .create-version-tag
474 | needs: ["deploy_development"]
475 | rules:
476 | - <<: *skip-pact-rule
477 | - *development-rules
478 |
479 | create-version-tag_staging:
480 | extends:
481 | - .create-version-tag
482 | needs: ["deploy_staging"]
483 | rules:
484 | - <<: *skip-pact-rule
485 | - *staging-rules
486 |
487 | create-version-tag_production:
488 | extends:
489 | - .create-version-tag
490 | needs: ["deploy_production"]
491 | rules:
492 | - <<: *skip-pact-rule
493 | - *production-rules
494 |
495 | .sentry_deploy_success:
496 | extends:
497 | - .sentry
498 | variables:
499 | SENTRY_CMD: success
500 | stage: Sentry
501 | before_script:
502 | - RELEASE_END=$(date +%s)
503 |
504 | sentry_success_development:
505 | extends:
506 | - .development
507 | - .sentry_deploy_success
508 | needs: ["deploy_development"]
509 | rules:
510 | - <<: *sentry-rule
511 | - <<: *pipeline-push-rule
512 | - <<: *development-rule
513 | when: on_success
514 |
515 | sentry_success_staging:
516 | extends:
517 | - .staging
518 | - .sentry_deploy_success
519 | needs: ["deploy_staging"]
520 | rules:
521 | - <<: *sentry-rule
522 | - <<: *pipeline-push-rule
523 | - <<: *staging-rule
524 | when: on_success
525 |
526 | sentry_success_production:
527 | extends:
528 | - .production
529 | - .sentry_deploy_success
530 | needs: ["deploy_production"]
531 | rules:
532 | - <<: *sentry-rule
533 | - <<: *pipeline-push-rule
534 | - <<: *production-rule
535 | when: on_success
536 |
537 | .sentry_deploy_failure:
538 | extends:
539 | - .sentry
540 | variables:
541 | SENTRY_CMD: failure
542 | stage: Sentry
543 |
544 | sentry_failure_development:
545 | extends:
546 | - .development
547 | - .sentry_deploy_failure
548 | rules:
549 | - <<: *sentry-rule
550 | - <<: *pipeline-push-rule
551 | - <<: *development-rule
552 | when: on_failure
553 |
554 | sentry_failure_staging:
555 | extends:
556 | - .staging
557 | - .sentry_deploy_failure
558 | rules:
559 | - <<: *sentry-rule
560 | - <<: *pipeline-push-rule
561 | - <<: *staging-rule
562 | when: on_failure
563 |
564 | sentry_failure_production:
565 | extends:
566 | - .production
567 | - .sentry_deploy_failure
568 | rules:
569 | - <<: *sentry-rule
570 | - <<: *pipeline-push-rule
571 | - <<: *production-rule
572 | when: on_failure
573 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | default_language_version:
2 | python: python3.12
3 | repos:
4 | - repo: https://github.com/pre-commit/pre-commit-hooks
5 | rev: "v4.5.0"
6 | hooks:
7 | - id: check-added-large-files
8 | args: ["--maxkb=1024"]
9 | - id: check-case-conflict
10 | - id: check-docstring-first
11 | - id: check-json
12 | - id: check-merge-conflict
13 | - id: check-toml
14 | - id: check-xml
15 | - id: check-yaml
16 | args: ["--allow-multiple-documents"]
17 | - id: debug-statements
18 | - id: detect-private-key
19 | - id: end-of-file-fixer
20 | - id: file-contents-sorter
21 | files: ^(\.env_template|\.dockerignore|requirements/\w*.in)$
22 | args: ["--ignore-case", "--unique"]
23 | - id: fix-byte-order-marker
24 | - id: fix-encoding-pragma
25 | args: ["--remove"]
26 | - id: mixed-line-ending
27 | - id: trailing-whitespace
28 | - repo: https://github.com/adamchainz/django-upgrade
29 | rev: "1.15.0"
30 | hooks:
31 | - id: django-upgrade
32 | args: [--target-version, "5.0"]
33 | - repo: https://github.com/charliermarsh/ruff-pre-commit
34 | rev: v0.1.11
35 | hooks:
36 | - id: ruff
37 | args:
38 | - --fix
39 | - id: ruff-format
40 | - repo: https://github.com/tox-dev/pyproject-fmt
41 | rev: "1.5.3"
42 | hooks:
43 | - id: pyproject-fmt
44 | - repo: https://github.com/pre-commit/mirrors-prettier
45 | rev: "v3.1.0"
46 | hooks:
47 | - id: prettier
48 | exclude_types: [html]
49 | - repo: https://github.com/ducminh-phan/reformat-gherkin
50 | rev: v3.0.1
51 | hooks:
52 | - id: reformat-gherkin
53 | - repo: https://github.com/pre-commit/mirrors-mypy
54 | rev: "v1.8.0"
55 | hooks:
56 | - id: mypy
57 | args: ["--no-site-packages"]
58 | - repo: https://github.com/pycqa/bandit
59 | rev: "1.7.6"
60 | hooks:
61 | - id: bandit
62 | additional_dependencies: ["bandit[toml]"]
63 | args: ["--configfile", "pyproject.toml", "--quiet", "--recursive"]
64 | - repo: https://github.com/trailofbits/pip-audit
65 | rev: v2.6.3
66 | hooks:
67 | - id: pip-audit
68 | args:
69 | [
70 | "--require-hashes",
71 | "--disable-pip",
72 | "--requirement",
73 | "requirements/remote.txt",
74 | ]
75 | - repo: https://github.com/shellcheck-py/shellcheck-py
76 | rev: v0.9.0.6
77 | hooks:
78 | - id: shellcheck
79 | args: ["--external-sources"]
80 | exclude: "(^scripts/ci_.*$|^scripts/deploy/vault.sh|^scripts/deploy/terraform.sh$)"
81 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.12-slim-bookworm AS base
2 |
3 | LABEL company="20tab" project="{{ cookiecutter.project_slug }}" service="backend" stage="base"
4 | ARG DEBIAN_FRONTEND=noninteractive
5 | ARG USER=appuser
6 | ENV APPUSER=$USER LANG=C.UTF-8 LC_ALL=C.UTF-8 PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1 WORKDIR=/app
7 | WORKDIR $WORKDIR
8 | RUN useradd --skel /dev/null --create-home $APPUSER
9 | RUN chown $APPUSER:$APPUSER $WORKDIR
10 | ENV PATH="/home/${APPUSER}/.local/bin:${PATH}"
11 | ARG PACKAGES_PATH=/home/${APPUSER}/.local/lib/python3.12/site-packages
12 | RUN apt-get update \
13 | && apt-get install --assume-yes --no-install-recommends \
14 | libpq5 \
15 | && rm -rf /var/lib/apt/lists/*
16 | COPY --chown=$APPUSER ./requirements/base.txt requirements/base.txt
17 | RUN apt-get update \
18 | && apt-get install --assume-yes --no-install-recommends \
19 | gcc \
20 | libc6-dev \
21 | libpq-dev \
22 | && su $APPUSER -c "python3 -m pip install --user --no-cache-dir -r requirements/base.txt" \
23 | && find ${PACKAGES_PATH} -regex '^.*/locale/.*/*.\(mo\|po\)$' -not -path '*/en*' -not -path '*/it*' -delete || true \
24 | && apt-get purge --assume-yes --auto-remove \
25 | gcc \
26 | libc6-dev \
27 | libpq-dev \
28 | && rm -rf /var/lib/apt/lists/*
29 | COPY --chown=$APPUSER ./requirements/common.txt requirements/common.txt
30 | RUN su $APPUSER -c "python3 -m pip install --user --no-cache-dir -r requirements/common.txt" \
31 | && find ${PACKAGES_PATH} -regex '^.*/locale/.*/*.\(mo\|po\)$' -not -path '*/en*' -not -path '*/it*' -delete || true
32 |
33 | FROM base AS test
34 |
35 | LABEL company="20tab" project="{{ cookiecutter.project_slug }}" service="backend" stage="test"
36 | ENV DJANGO_CONFIGURATION=Testing
37 | USER $APPUSER
38 | COPY --chown=$APPUSER ./requirements/test.txt requirements/test.txt
39 | RUN python3 -m pip install --user --no-cache-dir -r requirements/test.txt
40 | COPY --chown=$APPUSER . .
41 | CMD ./scripts/test.sh
42 |
43 | FROM base AS remote
44 |
45 | LABEL company="20tab" project="{{ cookiecutter.project_slug }}" service="backend" stage="remote"
46 | ENV DJANGO_CONFIGURATION=Remote INTERNAL_SERVICE_PORT={{ cookiecutter.internal_service_port }}
47 | USER $APPUSER
48 | ARG PACKAGES_PATH=/home/${APPUSER}/.local/lib/python3.12/site-packages
49 | COPY --chown=$APPUSER ./requirements/remote.txt requirements/remote.txt
50 | RUN python3 -m pip install --user --no-cache-dir -r requirements/remote.txt \
51 | && find ${PACKAGES_PATH}/boto*/data/* -maxdepth 0 -type d -not -name s3* -exec rm -rf {} \; || true
52 | COPY --chown=$APPUSER . .
53 | RUN DJANGO_SECRET_KEY=build python3 -m manage collectstatic --clear --link --noinput
54 | ENTRYPOINT ["./scripts/entrypoint.sh"]
55 | CMD ["python3", "-m", "gunicorn", "{{ cookiecutter.django_settings_dirname }}.asgi"]
56 |
57 | FROM base AS local
58 |
59 | LABEL company="20tab" project="{{ cookiecutter.project_slug }}" service="backend" stage="local"
60 | ENV DJANGO_CONFIGURATION=Local INTERNAL_SERVICE_PORT={{ cookiecutter.internal_service_port }}
61 | RUN apt-get update \
62 | && apt-get install --assume-yes --no-install-recommends \
63 | curl \
64 | gcc \
65 | gettext \
66 | git \
67 | graphviz \
68 | libpq-dev \
69 | make \
70 | openssh-client \
71 | postgresql-client
72 | USER $APPUSER
73 | COPY --chown=$APPUSER ./requirements/local.txt requirements/local.txt
74 | RUN python3 -m pip install --user --no-cache-dir -r requirements/local.txt
75 | COPY --chown=$APPUSER . .
76 | RUN DJANGO_SECRET_KEY=build python3 -m manage collectstatic --clear --link --noinput
77 | ENTRYPOINT ["./scripts/entrypoint.sh"]
78 | CMD python3 -m manage runserver 0.0.0.0:${INTERNAL_SERVICE_PORT}
79 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/LICENSE.md:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 |
3 | Copyright (c) 2014-2023 20tab srl
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy of
6 | this software and associated documentation files (the "Software"), to deal in
7 | the Software without restriction, including without limitation the rights to
8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
9 | the Software, and to permit persons to whom the Software is furnished to do so,
10 | subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
17 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
18 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
19 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
20 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
21 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/Makefile:
--------------------------------------------------------------------------------
1 | .DEFAULT_GOAL := help
2 |
3 | .PHONY: behave
4 | behave: ## Run behave test
5 | ./scripts/behave.sh
6 |
7 | .PHONY: check
8 | check: ## Check code formatting and import sorting
9 | ./scripts/check.sh
10 |
11 | .PHONY: collectstatic
12 | collectstatic: ## Django collectstatic
13 | python3 -m manage collectstatic --clear --link --noinput
14 |
15 | .PHONY: compilemessages
16 | compilemessages: ## Django compilemessages
17 | python3 -m manage compilemessages
18 |
19 | .PHONY: coverage
20 | coverage: ## Run coverage
21 | ./scripts/coverage.sh
22 |
23 | .PHONY: createsuperuser
24 | createsuperuser: ## Django createsuperuser
25 | python3 -m manage createsuperuser --noinput
26 |
27 | .PHONY: dumpgroups
28 | dumpgroups: ## Django dump auth.Group data
29 | python3 -m manage dumpdata auth.Group --natural-foreign --natural-primary --output fixtures/auth_groups.json
30 |
31 | .PHONY: fix
32 | fix: ## Fix Python code formatting, linting and sorting imports
33 | python3 -m ruff format .
34 | python3 -m ruff check --fix .
35 | python3 -m mypy --no-site-packages .
36 |
37 | .PHONY: flush
38 | flush: ## Django flush
39 | python3 -m manage flush --noinput
40 |
41 | .PHONY: graph_models
42 | graph_models: ## Django generate graph models
43 | python3 -m manage graph_models --output models.svg
44 |
45 | .PHONY: loadgroups
46 | loadgroups: ## Django load auth.Group data
47 | python3 -m manage loaddata fixtures/auth_groups.json
48 |
49 | .PHONY: local
50 | local: pip_update ## Install local requirements and dependencies
51 | python3 -m piptools sync requirements/local.txt
52 |
53 | .PHONY: messages
54 | messages: ## Django makemessages
55 | python3 -m manage makemessages --add-location file --ignore requirements --ignore htmlcov --ignore features --ignore gunicorn.conf.py --locale it
56 |
57 | .PHONY: migrate
58 | migrate: ## Django migrate
59 | python3 -m manage migrate --noinput
60 |
61 | .PHONY: migrations
62 | ifeq ($(name),)
63 | migrations: ## Django makemigrations with optional `name="migration_name app_name"`
64 | python3 -m manage makemigrations --no-header
65 | else
66 | migrations:
67 | python3 -m manage makemigrations --no-header --name $(name)
68 | endif
69 |
70 | .PHONY: outdated
71 | outdated: ## Check outdated requirements and dependencies
72 | python3 -m pip list --outdated
73 |
74 | .PHONY: pip
75 | pip: pip_update ## Compile requirements
76 | python3 -m piptools compile --generate-hashes --no-header --quiet --resolver=backtracking --strip-extras --upgrade --output-file requirements/base.txt requirements/base.in
77 | python3 -m piptools compile --generate-hashes --no-header --quiet --resolver=backtracking --strip-extras --upgrade --output-file requirements/common.txt requirements/common.in
78 | python3 -m piptools compile --generate-hashes --no-header --quiet --resolver=backtracking --strip-extras --upgrade --output-file requirements/local.txt requirements/local.in
79 | python3 -m piptools compile --generate-hashes --no-header --quiet --resolver=backtracking --strip-extras --upgrade --output-file requirements/remote.txt requirements/remote.in
80 | python3 -m piptools compile --generate-hashes --no-header --quiet --resolver=backtracking --strip-extras --upgrade --output-file requirements/test.txt requirements/test.in
81 |
82 | .PHONY: pip_update
83 | pip_update: ## Update requirements and dependencies
84 | python3 -m pip install --quiet --upgrade pip~=23.3.0 pip-tools~=7.3.0 setuptools~=69.0.0 wheel~=0.42.0
85 |
86 | .PHONY: precommit
87 | precommit: ## Fix code formatting, linting and sorting imports
88 | python3 -m pre_commit run --all-files
89 |
90 | .PHONY: precommit_install
91 | precommit_install: ## Install pre_commit
92 | python3 -m pre_commit install
93 |
94 | .PHONY: precommit_update
95 | precommit_update: ## Update pre_commit
96 | python3 -m pre_commit autoupdate
97 |
98 | .PHONY: pytest
99 | pytest: ## Run debugging test with pytest
100 | python3 -m pytest --capture=no --dc=Testing --durations 10
101 |
102 | .PHONY: remote
103 | remote: pip_update ## Install remote requirements and dependencies
104 | python3 -m piptools sync requirements/remote.txt
105 |
106 | .PHONY: report
107 | report: ## Run coverage report
108 | ./scripts/report.sh
109 |
110 | .PHONY: runserver
111 | runserver: ## Django run
112 | python3 -m manage runserver 0:8000
113 |
114 | .PHONY: shellplus
115 | shellplus: ## Run shell_plus
116 | python3 -m manage shell_plus
117 |
118 | ifeq (simpletest,$(firstword $(MAKECMDGOALS)))
119 | simpletestargs := $(wordlist 2, $(words $(MAKECMDGOALS)), $(MAKECMDGOALS))
120 | $(eval $(simpletestargs):;@true)
121 | endif
122 |
123 | .PHONY: simpletest
124 | simpletest: ## Run debugging test
125 | # You can pass more arguments as follows:
126 | # make simpletest -- --debug-sql --failfast --keepdb --pdb --verbosity 2 path.to.TestClass
127 | python3 -m manage test --configuration=Testing --shuffle --timing $(simpletestargs)
128 |
129 | .PHONY: test
130 | test: ## Run test
131 | ./scripts/test.sh
132 |
133 | .PHONY: update
134 | update: pip precommit_update ## Run update
135 |
136 | CURRENT_BRANCH=`git rev-parse --abbrev-ref HEAD`
137 |
138 | .PHONY: verifybranchpacts
139 | verifybranchpacts: ## Verify pact for the current branch consumer tag
140 | ./scripts/pact_verify.sh --verbose --capture=no --pact-verify-consumer-tag="branch:"$(CURRENT_BRANCH)
141 |
142 | .PHONY: verifypacts
143 | verifypacts: ## Verify pact for all environments consumer tags
144 | ./scripts/pact_verify.sh --verbose --capture=no --pact-verify-consumer-tag="branch:"$(CURRENT_BRANCH)
145 | ./scripts/pact_verify.sh --verbose --capture=no --pact-verify-consumer-tag="env:dev"
146 | ./scripts/pact_verify.sh --verbose --capture=no --pact-verify-consumer-tag="env:stage"
147 | ./scripts/pact_verify.sh --verbose --capture=no --pact-verify-consumer-tag="env:prod"
148 |
149 | .PHONY: help
150 | help:
151 | @echo "[Help] Makefile list commands:"
152 | @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
153 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/README.md:
--------------------------------------------------------------------------------
1 | # {{ cookiecutter.project_name }}
2 |
3 | [](https://github.com/astral-sh/ruff)
4 |
5 | A [Django](https://docs.djangoproject.com) project using [uvicorn](https://www.uvicorn.org/#running-with-gunicorn) ASGI server.
6 |
7 | ## Index
8 |
9 | - [Conventions](#conventions)
10 | - [Initialization](#initialization)
11 | - [Virtual environment](#virtual-environment)
12 | - [Requirements](#requirements)
13 | - [Git](#git)
14 | - [Git clone](#git-clone)
15 | - [Git hooks](#git-hooks)
16 | - [Libraries](#libraries)
17 | - [List outdated libraries](#list-outdated-libraries)
18 | - [Update libraries](#update-libraries)
19 | - [Install libraries](#install-libraries)
20 | - [Testing](#testing)
21 | - [Static files](#static-files)
22 | - [Continuous Integration](#continuous-integration)
23 | - [GitLab CI](#gitlab-ci)
24 |
25 | ## Conventions
26 |
27 | - replace `projects` with your actual projects directory
28 | - replace `git_repository_url` with your actual git repository URL
29 |
30 | ## Initialization
31 |
32 | We suggest updating pip to the latest version and using a virtual environment to wrap all your libraries.
33 |
34 | ### Virtual environment
35 |
36 | **IMPORTANT**: Please, create an empty virtual environment, with the right Python version, and activate it.
37 | To install and use a virtual environment, please, visit the official [Python tutorial](https://docs.python.org/3/tutorial/venv.html)
38 |
39 | ## Git
40 |
41 | ### Git clone
42 |
43 | To get the existing project, change the directory, clone the project repository and enter the newly created `{{ cookiecutter.project_slug }}` directory.
44 |
45 | ### Git hooks
46 |
47 | To install pre-commit into your git hooks run the below command. Pre-commit will now run on every commit. Every time you clone a project using pre-commit, running `pre-commit` install should always be the first thing you do.
48 |
49 | ```shell
50 | $ make precommit_install
51 | ```
52 |
53 | ## Libraries
54 |
55 | ### Self-documentation of Makefile commands
56 |
57 | To show the Makefile self-documentation help:
58 |
59 | ```shell
60 | $ make
61 | ```
62 |
63 | ### List outdated libraries
64 |
65 | To list all outdated installed libraries:
66 |
67 | ```shell
68 | $ make outdated
69 | ```
70 |
71 | ### Update libraries
72 |
73 | Edit the appropriate requirements file `*.in`, to add/remove pinned libraries or modify their versions.
74 |
75 | To update the compiled requirements files (`requirements/*.txt`), execute:
76 |
77 | ```shell
78 | $ make pip
79 | ```
80 |
81 | ### Install libraries
82 |
83 | To install the just updated requirements (e.g. `requirements/local.txt`), execute:
84 |
85 | ```shell
86 | $ make local
87 | ```
88 |
89 | ## Testing
90 |
91 | To run the full test suite, with coverage calculation, execute:
92 |
93 | ```shell
94 | $ make test
95 | ```
96 |
97 | To run the full test suite, without coverage calculation, execute:
98 |
99 | ```shell
100 | $ make simpletest
101 | ```
102 |
103 | To run a single test suite, without coverage calculation, execute:
104 |
105 | ```shell
106 | $ make simpletest app.tests.single.Test.to_execute
107 | ```
108 |
109 | The `simpletest` command accepts dashed arguments with a particular syntax, such as:
110 |
111 | ```shell
112 | $ make simpletest app.tests.single.Test.to_execute -- --keepdb
113 | ```
114 |
115 | ## Static files
116 |
117 | To collect static files, execute:
118 |
119 | ```shell
120 | $ make collectstatic
121 | ```
122 |
123 | ## Continuous Integration
124 |
125 | Depending on the CI tool, you might need to configure Django environment variables.
126 |
127 | ### GitLab CI
128 |
129 | The configuration file `.gitlab-ci.yml` should work as it is, needing no further customization.
130 |
131 | ### The Kubernetes resource limits
132 |
133 | The Kubernetes deployment service limits should be adapted to the expected load of the other services and the size of the available nodes.
134 |
135 | By default, the `s-1vcpu-1gb-amd` DigitalOcean droplet is used (https://slugs.do-api.dev/), which allocates 900.00m of CPU capacity and 1.54Gi of memory capacity.
136 |
137 | The following default values are calculated assuming 2 deployments and 2 stacks on a single node.
138 |
139 | | tfvars name | default value |
140 | | ----------------------- | ------------- |
141 | | service_limits_cpu | 550m |
142 | | service_limits_memory | 512Mi |
143 | | service_requests_cpu | 25m |
144 | | service_requests_memory | 115Mi |
145 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | services:
2 | {{ cookiecutter.service_slug }}:
3 | build:
4 | args:
5 | USER: ${USER:-appuser}
6 | context: .
7 | target: ${{ "{" }}{{ cookiecutter.service_slug|upper }}_BUILD_TARGET:-test}
8 | image: ${{ "{" }}{{ cookiecutter.service_slug|upper }}_IMAGE_NAME:-{{ cookiecutter.project_slug }}_{{ cookiecutter.service_slug }}}:${{ "{" }}{{ cookiecutter.service_slug|upper }}_IMAGE_TAG:-latest}
9 | depends_on:
10 | postgres:
11 | condition: service_healthy
12 | environment:
13 | - CACHE_URL
14 | - DATABASE_URL=${DATABASE_URL:-postgres://postgres:postgres@postgres:5432/{{ cookiecutter.project_slug }}}
15 | - DJANGO_ADMINS
16 | - DJANGO_ALLOWED_HOSTS
17 | - DJANGO_CONFIGURATION=${DJANGO_CONFIGURATION:-Testing}
18 | - DJANGO_DEBUG
19 | - DJANGO_DEFAULT_FROM_EMAIL
20 | - DJANGO_SECRET_KEY
21 | - DJANGO_SERVER_EMAIL
22 | - DJANGO_SESSION_COOKIE_DOMAIN
23 | - DJANGO_SUPERUSER_EMAIL
24 | - DJANGO_SUPERUSER_PASSWORD
25 | - DJANGO_SUPERUSER_USERNAME
26 | - EMAIL_URL
27 | - INTERNAL_SERVICE_PORT=${INTERNAL_SERVICE_PORT:-8000}
28 | - PACT_BROKER_URL
29 | - PACT_PROVIDER_NAME
30 | - PYTHONBREAKPOINT
31 | ports:
32 | - "${{ '{' }}{{ cookiecutter.service_slug|upper }}_PORT:-{{ cookiecutter.internal_service_port }}{{ '}' }}:${INTERNAL_SERVICE_PORT:-{{ cookiecutter.internal_service_port }}{{ '}' }}"
33 | user: ${USER:-appuser}
34 |
35 | postgres:
36 | environment:
37 | - POSTGRES_DB={{ cookiecutter.project_slug }}
38 | - POSTGRES_INITDB_ARGS=--no-sync
39 | - POSTGRES_PASSWORD=postgres
40 | healthcheck:
41 | test: ["CMD", "pg_isready", "-U", "postgres"]
42 | interval: 3s
43 | timeout: 3s
44 | retries: 30
45 | image: postgres:14-bullseye
46 | volumes:
47 | - pg_data:/var/lib/postgresql/data
48 |
49 | volumes:
50 | pg_data: {}
51 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/features/empty.feature:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/20tab/django-continuous-delivery/1ed8a7da86efa8c0799867b9331f27078d022e01/{{cookiecutter.project_dirname}}/features/empty.feature
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/features/steps/__init__.py:
--------------------------------------------------------------------------------
1 | """Features steps modules."""
2 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/gunicorn.conf.py:
--------------------------------------------------------------------------------
1 | """Gunicorn configuration file."""
2 |
3 | import os
4 |
5 | # Logging
6 | # https://docs.gunicorn.org/en/stable/settings.html#logging
7 |
8 | access_log_format = '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"'
9 | accesslog = "-"
10 | errorlog = "-"
11 | loglevel = "info"
12 |
13 | # Server Socket
14 | # https://docs.gunicorn.org/en/stable/settings.html#server-socket
15 |
16 | bind = "0.0.0.0:" + os.getenv(
17 | "INTERNAL_SERVICE_PORT", "{{ cookiecutter.internal_service_port }}"
18 | )
19 |
20 | # Worker Processes
21 | # https://docs.gunicorn.org/en/stable/settings.html#worker-processes
22 |
23 | worker_class = "{{ cookiecutter.django_settings_dirname }}.workers.UvicornDjangoWorker"
24 |
25 | # Temporary Directory
26 | # https://docs.gunicorn.org/en/stable/settings.html#worker-tmp-dir
27 |
28 | worker_tmp_dir = "/dev/shm" # nosec B108
29 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/manage.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """Django's command-line utility for administrative tasks."""
3 | import os
4 | import sys
5 |
6 |
7 | def main():
8 | """Run administrative tasks."""
9 | try:
10 | from dotenv import find_dotenv, load_dotenv
11 | except ModuleNotFoundError:
12 | pass
13 | else:
14 | load_dotenv(find_dotenv()) # does not override already set variables
15 | os.environ.setdefault(
16 | "DJANGO_SETTINGS_MODULE", "{{ cookiecutter.django_settings_dirname }}.settings"
17 | )
18 | try:
19 | from configurations.management import execute_from_command_line
20 | except ImportError as exc:
21 | raise ImportError(
22 | "Couldn't import Django. Are you sure it's installed and "
23 | "available on your PYTHONPATH environment variable? Did you "
24 | "forget to activate a virtual environment?"
25 | ) from exc
26 | execute_from_command_line(sys.argv)
27 |
28 |
29 | if __name__ == "__main__":
30 | main()
31 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/pacts/__init__.py:
--------------------------------------------------------------------------------
1 | """Pact contract testing resources."""
2 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/pacts/handler.py:
--------------------------------------------------------------------------------
1 | """Pact provider state handler."""
2 |
3 | import re
4 | import shutil
5 | from datetime import datetime
6 |
7 | import time_machine
8 | from django.conf import settings
9 | from django.utils.text import slugify
10 | from pactman.verifier.verify import ProviderStateMissing
11 |
12 | DEFAULT_DATETIME = datetime(2021, 5, 17, 8, 30, 00)
13 |
14 |
15 | def make_key(*args): # pragma: no cover
16 | """Make a key."""
17 | return slugify("-".join(str(i) for i in args if i))
18 |
19 |
20 | class ProviderStatesContext(dict):
21 | """A context for Provider states inizialization."""
22 |
23 | def __init__(self, *args, **kwargs):
24 | """Initialize the instance."""
25 | self.live_server = None
26 | self.freezer = None
27 | self.patchers = {}
28 | self.requests_mockers = {}
29 | return super().__init_subclass__()
30 |
31 | def set_default_freezer(self):
32 | """Set the default freezer."""
33 | freezer = time_machine.travel(DEFAULT_DATETIME, tick=False)
34 | freezer.start()
35 | self.freezer = freezer
36 |
37 | def cleanup(self):
38 | """Clean up the context."""
39 | self.freezer and self.freezer.stop()
40 | shutil.rmtree(settings.MEDIA_ROOT, ignore_errors=True)
41 | [i.stop() for i in self.patchers.values()]
42 | [i.stop() for i in self.requests_mockers.values()]
43 |
44 |
45 | class ProviderStatesHandler:
46 | """A Provider states handler."""
47 |
48 | def __init__(self):
49 | """Initialize the instance."""
50 | self.handlers = []
51 | self.context = ProviderStatesContext()
52 |
53 | def register(self, state_matcher):
54 | """Register the given function as a handler."""
55 |
56 | def outer_wrapper(function):
57 | """Register and return the function."""
58 | try:
59 | pattern = re.compile(state_matcher, re.IGNORECASE)
60 | except re.error as e:
61 | raise ValueError(f"Invalid pattern provided: {state_matcher}.") from e
62 | self.handlers.append((pattern, function))
63 | return function
64 |
65 | return outer_wrapper
66 |
67 | def set_live_server(self, live_server):
68 | """Set the live server in context."""
69 | self.context.live_server = live_server
70 |
71 | def handle(self, state, context, **params):
72 | """Handle the given provider state."""
73 | handlers = iter(self.handlers)
74 | while True:
75 | try:
76 | pattern, function = next(handlers)
77 | if result := pattern.match(state):
78 | function(context=context, **params, **result.groupdict())
79 | return
80 | except (StopIteration, TypeError):
81 | break
82 | raise ProviderStateMissing(state)
83 |
84 | def tear_down(self):
85 | """Clean up after handling states."""
86 | self.context.cleanup()
87 |
88 | def run(self, provider_state_name, **params):
89 | """Set up the given provider state."""
90 | self.context.set_default_freezer()
91 | for handler_name in provider_state_name.split("/"):
92 | self.handle(handler_name.strip(), self.context, **params)
93 |
94 |
95 | handler = ProviderStatesHandler()
96 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/pacts/verify_pacts.py:
--------------------------------------------------------------------------------
1 | """Pact verification tests."""
2 |
3 | from django.utils.module_loading import autodiscover_modules
4 |
5 | from pacts.handler import handler
6 |
7 | autodiscover_modules("tests.pact_states")
8 |
9 |
10 | def test_pacts(live_server, pact_verifier):
11 | """Test pacts."""
12 | pact_verifier.verify(live_server.url, handler.run)
13 | handler.tear_down()
14 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.ruff]
2 | target-version = "py312"
3 |
4 | [tool.ruff.lint]
5 | ignore = [
6 | "D203",
7 | "D212",
8 | ]
9 | select = [
10 | "A", # flake8-builtins
11 | "B", # flake8-bugbear
12 | "C4", # flake8-comprehensions
13 | "C90", # McCabe cyclomatic complexity
14 | "D", # pydocstyle
15 | "DJ", # flake8-django
16 | "E", # pycodestyle errors
17 | "F", # Pyflakes
18 | "I", # isort
19 | "Q", # flake8-quotes
20 | "UP", # pyupgrade
21 | "W", # pycodestyle warnings
22 | ]
23 |
24 | [tool.ruff.per-file-ignores]
25 | "{{ cookiecutter.django_settings_dirname }}/*sgi.py" = [
26 | "E402",
27 | ]
28 | "*/migrations/*.py" = [
29 | "D100",
30 | "D101",
31 | "D102",
32 | "D104",
33 | ]
34 |
35 | [tool.ruff.format]
36 | docstring-code-format = true
37 |
38 | [tool.ruff.isort]
39 | known-first-party = [
40 | "{{ cookiecutter.django_settings_dirname }}",
41 | ]
42 | known-third-party = [
43 | "django",
44 | ]
45 |
46 | [tool.pytest.ini_options]
47 | DJANGO_CONFIGURATION = "Testing"
48 | DJANGO_SETTINGS_MODULE = "{{ cookiecutter.django_settings_dirname }}.settings"
49 |
50 | [tool.coverage.html]
51 | title = "{{cookiecutter.project_name}} - Coverage"
52 |
53 | [tool.coverage.report]
54 | fail_under = 100
55 | show_missing = true
56 |
57 | [tool.coverage.run]
58 | branch = true
59 | concurrency = ["multiprocessing"]
60 | data_file = ".coverages/.coverage"
61 | disable_warnings = ["no-data-collected"]
62 | omit = [
63 | ".venv/*",
64 | "{{cookiecutter.django_settings_dirname}}/asgi.py",
65 | "{{cookiecutter.django_settings_dirname}}/workers.py",
66 | "{{cookiecutter.django_settings_dirname}}/wsgi.py",
67 | "manage.py",
68 | "pacts/*",
69 | "venv/*",
70 | "*/pact_states.py",
71 | ]
72 | source = ["."]
73 |
74 | [tool.mypy]
75 | enable_error_code = [
76 | "truthy-bool",
77 | ]
78 | ignore_missing_imports = true
79 | python_version = "3.12"
80 |
81 | [tool.bandit]
82 | exclude_dirs = [
83 | ".venv",
84 | "tests",
85 | "features"
86 | ]
87 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/requirements/base.in:
--------------------------------------------------------------------------------
1 | psycopg[c]~=3.1.0
2 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/requirements/common.in:
--------------------------------------------------------------------------------
1 | -r base.in
2 | django-configurations[cache,database,email]~=2.5.0
3 | django~=5.0.0
4 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/requirements/local.in:
--------------------------------------------------------------------------------
1 | -r test.in
2 | django-debug-toolbar~=4.2.0
3 | django-extensions~=3.2.0
4 | graphviz~=0.20.0
5 | ipython~=8.20.0
6 | pip-tools~=7.3.0
7 | pre-commit~=3.6.0
8 | pydot~=2.0.0
9 | python-dotenv~=1.0.0
10 | rope~=1.11.0
11 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/requirements/remote.in:
--------------------------------------------------------------------------------
1 | -r common.in
2 | argon2-cffi~=23.1.0
3 | {% if "s3" in cookiecutter.media_storage %}django-storages[boto3]~=1.14.0
4 | {% endif %}gunicorn~=22.0.0
5 | {% if cookiecutter.use_redis == "true" %}redis~=5.0.0
6 | {% endif %}sentry-sdk~=1.39.0
7 | uvicorn[standard]~=0.25.0
8 | whitenoise[brotli]~=6.6.0
9 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/requirements/test.in:
--------------------------------------------------------------------------------
1 | -r common.in
2 | bandit[toml]~=1.7.0
3 | behave-django~=1.4.0
4 | coverage[toml]~=7.4.0
5 | mypy~=1.8.0
6 | pactman~=2.30.0
7 | pip-audit~=2.6.0
8 | pytest-django~=4.7.0
9 | pytest-dotenv~=0.5.0
10 | ruff~=0.1.0
11 | tblib~=3.0.0
12 | time-machine~=2.13.0
13 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/scripts/behave.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -euo pipefail
4 |
5 | python3 -m manage behave --configuration=Testing --format=progress --noinput --simple
6 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/scripts/check.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -euo pipefail
4 |
5 | python3 -m manage check
6 | python3 -m manage makemigrations --dry-run --check
7 | python3 -m ruff format --check .
8 | python3 -m ruff check .
9 | python3 -m mypy --no-site-packages .
10 | python3 -m bandit --configfile pyproject.toml --quiet --recursive .
11 | python3 -m pip_audit --require-hashes --disable-pip --requirement requirements/remote.txt
12 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/scripts/ci_pact.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | set -e
4 |
5 | if [ "${VAULT_ADDR}" != "" ]; then
6 | apk update && apk add curl jq
7 |
8 | vault_token=$(curl --silent --request POST --data "role=pact" --data "jwt=${VAULT_ID_TOKEN}" "${VAULT_ADDR%/}"/v1/auth/gitlab-jwt/login | jq -r .auth.client_token)
9 |
10 | pact_secrets=$(curl --silent --header "X-Vault-Token: ${vault_token}" "${VAULT_ADDR%/}"/v1/"${PROJECT_SLUG}"/pact | jq -r .data)
11 |
12 | PACT_BROKER_BASE_URL=$(echo "${pact_secrets}" | jq -r .pact_broker_base_url)
13 | PACT_BROKER_PASSWORD=$(echo "${pact_secrets}" | jq -r .pact_broker_password)
14 | PACT_BROKER_USERNAME=$(echo "${pact_secrets}" | jq -r .pact_broker_username)
15 |
16 | export PACT_BROKER_BASE_URL
17 | export PACT_BROKER_PASSWORD
18 | export PACT_BROKER_USERNAME
19 | fi
20 |
21 | docker-entrypoint.sh pact-broker "${@}"
22 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/scripts/ci_sentry.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | set -e
4 |
5 | apk update && apk add git
6 |
7 | git config --global --add safe.directory "${PROJECT_DIR}"
8 |
9 | if [ "${VAULT_ADDR}" != "" ]; then
10 | apk add curl jq
11 |
12 | vault_token=$(curl --silent --request POST --data "role=${VAULT_ROLE}" --data "jwt=${VAULT_ID_TOKEN}" "${VAULT_ADDR%/}"/v1/auth/gitlab-jwt/login | jq -r .auth.client_token)
13 |
14 | SENTRY_AUTH_TOKEN=$(curl --silent --header "X-Vault-Token: ${vault_token}" "${VAULT_ADDR%/}"/v1/"${PROJECT_SLUG}"/envs/"${ENV_NAME}"/sentry | jq -r .data.sentry_auth_token)
15 | SENTRY_DSN=$(curl --silent --header "X-Vault-Token: ${vault_token}" "${VAULT_ADDR%/}"/v1/"${PROJECT_SLUG}"/envs/"${ENV_NAME}"/"${SERVICE_SLUG}"/sentry | jq -r .data.sentry_dsn)
16 | export SENTRY_AUTH_TOKEN
17 | export SENTRY_DSN
18 | fi
19 |
20 | case "${1}" in
21 | "release")
22 | sentry-cli releases new "${VERSION_REF}" -p "${SENTRY_PROJECT_NAME}" --log-level=debug;
23 | sentry-cli releases set-commits "${VERSION_REF}" --auto --ignore-missing;
24 | sentry-cli releases finalize "${VERSION_REF}";
25 | ;;
26 | "success")
27 | sentry-cli releases deploys "${VERSION_REF}" new -e "${CI_ENVIRONMENT_NAME}" -t $((RELEASE_END-RELEASE_START));
28 | ;;
29 | "failure")
30 | sentry-cli send-event -m "Deploy to ${CI_ENVIRONMENT_NAME} failed.";
31 | ;;
32 | esac
33 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/scripts/coverage.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -euo pipefail
4 |
5 | python3 -m coverage run manage.py test --configuration=Testing --noinput --parallel --shuffle --buffer
6 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/scripts/deploy.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | set -e
4 |
5 | # init.sh must be sourced to let it export env vars
6 | . "${PROJECT_DIR}"/scripts/deploy/init.sh
7 |
8 | sh "${PROJECT_DIR}"/scripts/deploy/terraform.sh validate
9 |
10 | sh "${PROJECT_DIR}"/scripts/deploy/terraform.sh plan-json
11 |
12 | sh "${PROJECT_DIR}"/scripts/deploy/terraform.sh apply -auto-approve
13 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/scripts/deploy/gitlab.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | set -e
4 |
5 | # If TF_USERNAME is unset then default to GITLAB_USER_LOGIN
6 | TF_USERNAME="${TF_USERNAME:-${GITLAB_USER_LOGIN}}"
7 | # If TF_PASSWORD is unset then default to gitlab-ci-token/CI_JOB_TOKEN
8 | if [ -z "${TF_PASSWORD}" ]; then
9 | TF_USERNAME="gitlab-ci-token"
10 | TF_PASSWORD="${CI_JOB_TOKEN}"
11 | fi
12 | # If TF_ADDRESS is unset but TF_STATE_NAME is provided, then default to GitLab backend in current project
13 | if [ -n "${TF_STATE_NAME}" ]; then
14 | TF_ADDRESS="${TF_ADDRESS:-${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/terraform/state/${TF_STATE_NAME}}"
15 | fi
16 | # Set variables for the HTTP backend to default to TF_* values
17 | export TF_HTTP_ADDRESS="${TF_HTTP_ADDRESS:-${TF_ADDRESS}}"
18 | export TF_HTTP_LOCK_ADDRESS="${TF_HTTP_LOCK_ADDRESS:-${TF_ADDRESS}/lock}"
19 | export TF_HTTP_LOCK_METHOD="${TF_HTTP_LOCK_METHOD:-POST}"
20 | export TF_HTTP_UNLOCK_ADDRESS="${TF_HTTP_UNLOCK_ADDRESS:-${TF_ADDRESS}/lock}"
21 | export TF_HTTP_UNLOCK_METHOD="${TF_HTTP_UNLOCK_METHOD:-DELETE}"
22 | export TF_HTTP_USERNAME="${TF_HTTP_USERNAME:-${TF_USERNAME}}"
23 | export TF_HTTP_PASSWORD="${TF_HTTP_PASSWORD:-${TF_PASSWORD}}"
24 | export TF_HTTP_RETRY_WAIT_MIN="${TF_HTTP_RETRY_WAIT_MIN:-5}"
25 | # Expose Gitlab specific variables to terraform since no -tf-var is available
26 | # Usable in the .tf file as variable "CI_JOB_ID" { type = string } etc
27 | export TF_VAR_CI_JOB_ID="${TF_VAR_CI_JOB_ID:-${CI_JOB_ID}}"
28 | export TF_VAR_CI_COMMIT_SHA="${TF_VAR_CI_COMMIT_SHA:-${CI_COMMIT_SHA}}"
29 | export TF_VAR_CI_JOB_STAGE="${TF_VAR_CI_JOB_STAGE:-${CI_JOB_STAGE}}"
30 | export TF_VAR_CI_PROJECT_ID="${TF_VAR_CI_PROJECT_ID:-${CI_PROJECT_ID}}"
31 | export TF_VAR_CI_PROJECT_NAME="${TF_VAR_CI_PROJECT_NAME:-${CI_PROJECT_NAME}}"
32 | export TF_VAR_CI_PROJECT_NAMESPACE="${TF_VAR_CI_PROJECT_NAMESPACE:-${CI_PROJECT_NAMESPACE}}"
33 | export TF_VAR_CI_PROJECT_PATH="${TF_VAR_CI_PROJECT_PATH:-${CI_PROJECT_PATH}}"
34 | export TF_VAR_CI_PROJECT_URL="${TF_VAR_CI_PROJECT_URL:-${CI_PROJECT_URL}}"
35 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/scripts/deploy/init.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | set -e
4 |
5 | export TF_VAR_env_slug="${ENV_SLUG}"
6 | export TF_VAR_project_slug="${PROJECT_SLUG}"
7 | export TF_VAR_stack_slug="${STACK_SLUG}"
8 |
9 | terraform_cli_args="-var-file=${TERRAFORM_VARS_DIR%/}/.tfvars"
10 |
11 | if [ "${TERRAFORM_EXTRA_VAR_FILE}" != "" ]; then
12 | extra_var_file="${TERRAFORM_VARS_DIR%/}/${TERRAFORM_EXTRA_VAR_FILE}"
13 | touch "${extra_var_file}"
14 | terraform_cli_args="${terraform_cli_args} -var-file=${extra_var_file}"
15 | fi
16 |
17 | if [ "${VAULT_ADDR}" != "" ]; then
18 | . "${PROJECT_DIR}"/scripts/deploy/vault.sh
19 | terraform_cli_args="${terraform_cli_args} -var-file=${TERRAFORM_VARS_DIR%/}/vault-secrets.tfvars.json"
20 | fi
21 |
22 | export TF_CLI_ARGS_destroy="${terraform_cli_args}"
23 | export TF_CLI_ARGS_plan="${terraform_cli_args}"
24 |
25 | case "${TERRAFORM_BACKEND}" in
26 | "gitlab")
27 | . "${PROJECT_DIR}"/scripts/deploy/gitlab.sh
28 | ;;
29 | "terraform-cloud")
30 | . "${PROJECT_DIR}"/scripts/deploy/terraform-cloud.sh
31 | ;;
32 | esac
33 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/scripts/deploy/terraform-cloud.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | set -e
4 |
5 | export TF_CLI_CONFIG_FILE="${TF_ROOT}/cloud.tfc"
6 | cat << EOF > "${TF_CLI_CONFIG_FILE}"
7 | {
8 | "credentials": {
9 | "app.terraform.io": {
10 | "token": "${TFC_TOKEN}"
11 | }
12 | }
13 | }
14 | EOF
15 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/scripts/deploy/terraform.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | set -e
4 |
5 | if [ "${DEBUG_OUTPUT}" = "true" ]; then
6 | set -x
7 | fi
8 |
9 | plan_cache="plan.cache"
10 | plan_json="plan.json"
11 |
12 | JQ_PLAN='
13 | (
14 | [.resource_changes[]?.change.actions?] | flatten
15 | ) | {
16 | "create":(map(select(.=="create")) | length),
17 | "update":(map(select(.=="update")) | length),
18 | "delete":(map(select(.=="delete")) | length)
19 | }
20 | '
21 |
22 | # Use terraform automation mode (will remove some verbose unneeded messages)
23 | export TF_IN_AUTOMATION=true
24 |
25 | init() {
26 | cd "${TF_ROOT}"
27 | if [ "${TERRAFORM_BACKEND}" = "terraform-cloud" ]; then
28 | terraform init "${@}" -input=false
29 | else
30 | terraform init "${@}" -input=false -reconfigure
31 | fi
32 | }
33 |
34 | case "${1}" in
35 | "apply")
36 | init
37 | terraform "${@}" -input=false "${plan_cache}"
38 | ;;
39 | "destroy")
40 | init
41 | terraform "${@}" -auto-approve
42 | ;;
43 | "fmt")
44 | terraform "${@}" -check -diff -recursive
45 | ;;
46 | "init")
47 | # shift argument list „one to the left“ to not call 'terraform init init'
48 | shift
49 | init "${@}"
50 | ;;
51 | "plan")
52 | init
53 | terraform "${@}" -input=false -out="${plan_cache}"
54 | ;;
55 | "plan-json")
56 | init
57 | terraform plan -input=false -out="${plan_cache}"
58 | terraform show -json "${plan_cache}" | \
59 | jq -r "${JQ_PLAN}" \
60 | > "${plan_json}"
61 | ;;
62 | "validate")
63 | init -backend=false
64 | terraform "${@}"
65 | ;;
66 | *)
67 | terraform "${@}"
68 | ;;
69 | esac
70 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/scripts/deploy/vault.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | set -e
4 |
5 | vault_token=$(curl --silent --request POST --data "role=${VAULT_ROLE}" --data "jwt=${VAULT_ID_TOKEN}" "${VAULT_ADDR%/}"/v1/auth/gitlab-jwt/login | jq -r .auth.client_token)
6 |
7 | secrets_data="{}"
8 |
9 | for secret_path in ${VAULT_SECRETS}
10 | do
11 | secret_data=$(curl --silent --header "X-Vault-Token: ${vault_token}" "${VAULT_ADDR%/}"/v1/"${PROJECT_SLUG}"/"${VAULT_SECRETS_PREFIX}"/"${secret_path}" | jq -r '.data // {}') || secret_data="{}"
12 | secrets_data=$(echo "${secrets_data}" | jq --argjson new_data "${secret_data}" '. * $new_data')
13 | done
14 |
15 | echo "${secrets_data}" > "${TERRAFORM_VARS_DIR%/}"/vault-secrets.tfvars.json
16 |
17 | if [ "${TERRAFORM_BACKEND}" = "terraform-cloud" ]; then
18 | TFC_TOKEN=$(curl --silent --header "X-Vault-Token: ${vault_token}" "${VAULT_ADDR%/}"/v1/"${PROJECT_SLUG}"-tfc/creds/default | jq -r .data.token)
19 | export TFC_TOKEN
20 | fi
21 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/scripts/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -euo pipefail
4 |
5 | python3 -m manage migrate --noinput
6 | exec "${@}"
7 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/scripts/pact_verify.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -euo pipefail
4 |
5 | python3 -m pytest --dc=Testing --disable-warnings \
6 | --pact-provider-name="${PACT_PROVIDER_NAME}" \
7 | "${@}" pacts/verify_pacts.py
8 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/scripts/report.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -uo pipefail
4 |
5 | python3 -m coverage combine
6 | python3 -m coverage html
7 | python3 -m coverage report
8 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/scripts/test.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -euo pipefail
4 |
5 | ./scripts/check.sh
6 | ./scripts/coverage.sh
7 | ./scripts/behave.sh
8 | ./scripts/report.sh
9 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/terraform/digitalocean-k8s/main.tf:
--------------------------------------------------------------------------------
1 | locals {
2 | environment_slug = { development = "dev", staging = "stage", production = "prod" }[lower(var.environment)]
3 |
4 | namespace = "${var.project_slug}-${local.environment_slug}"
5 | }
6 |
7 | terraform {
8 | required_providers {
9 | digitalocean = {
10 | source = "digitalocean/digitalocean"
11 | version = "~> 2.36"
12 | }
13 | kubernetes = {
14 | source = "hashicorp/kubernetes"
15 | version = "~> 2.27"
16 | }
17 | random = {
18 | source = "hashicorp/random"
19 | version = "~> 3.6"
20 | }
21 | }
22 | }
23 |
24 | /* Providers */
25 |
26 | provider "digitalocean" {
27 | token = var.digitalocean_token
28 | }
29 |
30 | provider "kubernetes" {
31 | host = data.digitalocean_kubernetes_cluster.main.endpoint
32 | token = data.digitalocean_kubernetes_cluster.main.kube_config[0].token
33 | cluster_ca_certificate = base64decode(
34 | data.digitalocean_kubernetes_cluster.main.kube_config[0].cluster_ca_certificate
35 | )
36 | }
37 |
38 | /* Data Sources */
39 |
40 | data "digitalocean_kubernetes_cluster" "main" {
41 | name = var.stack_slug == "main" ? "${var.project_slug}-k8s-cluster" : "${var.project_slug}-${var.stack_slug}-k8s-cluster"
42 | }
43 |
44 | /* Deployment */
45 |
46 | module "deployment" {
47 | source = "../modules/kubernetes/deployment"
48 |
49 | environment = var.environment
50 | environment_slug = local.environment_slug
51 |
52 | namespace = local.namespace
53 |
54 | project_slug = var.project_slug
55 | project_url = var.project_url
56 |
57 | service_container_image = var.service_container_image
58 | service_container_port = var.service_container_port
59 | service_limits_cpu = var.service_limits_cpu
60 | service_limits_memory = var.service_limits_memory
61 | service_replicas = var.service_replicas
62 | service_requests_cpu = var.service_requests_cpu
63 | service_requests_memory = var.service_requests_memory
64 | service_slug = var.service_slug
65 |
66 | media_storage = var.media_storage
67 |
68 | cache_url = var.cache_url
69 | django_additional_allowed_hosts = var.django_additional_allowed_hosts
70 | django_admins = var.django_admins
71 | django_default_from_email = var.django_default_from_email
72 | django_disable_server_side_cursors = var.django_disable_server_side_cursors
73 | django_server_email = var.django_server_email
74 | email_url = var.email_url
75 | s3_access_id = var.s3_access_id
76 | s3_bucket_name = var.s3_bucket_name
77 | s3_file_overwrite = var.s3_file_overwrite
78 | s3_host = var.s3_host
79 | s3_region = var.s3_region
80 | s3_secret_key = var.s3_secret_key
81 | sentry_dsn = var.sentry_dsn
82 | use_redis = var.use_redis
83 | web_concurrency = var.web_concurrency
84 |
85 | extra_config_values = var.extra_config_values
86 | extra_secret_values = var.extra_secret_values
87 | }
88 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/terraform/digitalocean-k8s/variables.tf:
--------------------------------------------------------------------------------
1 | variable "cache_url" {
2 | type = string
3 | description = "A Django cache URL override."
4 | default = ""
5 | sensitive = true
6 | }
7 |
8 | variable "digitalocean_token" {
9 | description = "The Digital Ocean access token."
10 | type = string
11 | sensitive = true
12 | }
13 |
14 | variable "django_additional_allowed_hosts" {
15 | type = string
16 | description = "Additional entries of the DJANGO_ALLOWED_HOSTS environment variable ('127.0.0.1', 'localhost', the service slug and the project host are included by default)."
17 | default = ""
18 | }
19 |
20 | variable "django_admins" {
21 | type = string
22 | description = "The value of the DJANGO_ADMINS environment variable."
23 | default = ""
24 | }
25 |
26 | variable "django_configuration" {
27 | type = string
28 | description = "The value of the DJANGO_CONFIGURATION environment variable."
29 | default = "Remote"
30 | }
31 |
32 | variable "django_default_from_email" {
33 | type = string
34 | description = "The value of the DJANGO_DEFAULT_FROM_EMAIL environment variable."
35 | default = ""
36 | }
37 |
38 | variable "django_disable_server_side_cursors" {
39 | type = string
40 | description = "The value of the DJANGO_DISABLE_SERVER_SIDE_CURSORS environment variable."
41 | default = "False"
42 | }
43 |
44 | variable "django_server_email" {
45 | type = string
46 | description = "The value of the DJANGO_SERVER_EMAIL environment variable."
47 | default = ""
48 | }
49 |
50 | variable "email_url" {
51 | type = string
52 | description = "The email server connection url."
53 | default = ""
54 | sensitive = true
55 | }
56 |
57 | variable "environment" {
58 | type = string
59 | description = "The name of the deploy environment, e.g. \"Production\"."
60 | }
61 |
62 | variable "extra_config_values" {
63 | type = map(string)
64 | description = "Additional config map environment variables."
65 | default = {}
66 | }
67 |
68 | variable "extra_secret_values" {
69 | type = map(string)
70 | description = "Additional secret environment variables."
71 | default = {}
72 | sensitive = true
73 | }
74 |
75 | variable "media_storage" {
76 | description = "The media storage solution."
77 | type = string
78 | }
79 |
80 | variable "project_slug" {
81 | description = "The project slug."
82 | type = string
83 | }
84 |
85 | variable "project_url" {
86 | description = "The project url."
87 | type = string
88 | }
89 |
90 | variable "s3_access_id" {
91 | description = "The S3 bucket access key ID."
92 | type = string
93 | default = ""
94 | sensitive = true
95 | }
96 |
97 | variable "s3_bucket_name" {
98 | description = "The S3 bucket name."
99 | type = string
100 | default = ""
101 | }
102 |
103 | variable "s3_file_overwrite" {
104 | description = "The S3 bucket file overwriting setting."
105 | type = string
106 | default = "False"
107 | }
108 |
109 | variable "s3_host" {
110 | description = "The S3 bucket host."
111 | type = string
112 | default = ""
113 | }
114 |
115 | variable "s3_region" {
116 | description = "The S3 bucket region."
117 | type = string
118 | default = ""
119 | }
120 |
121 | variable "s3_secret_key" {
122 | description = "The S3 bucket secret access key."
123 | type = string
124 | default = ""
125 | sensitive = true
126 | }
127 |
128 | variable "sentry_dsn" {
129 | description = "The Sentry project DSN."
130 | type = string
131 | default = ""
132 | sensitive = true
133 | }
134 |
135 | variable "service_container_image" {
136 | description = "The service container image."
137 | type = string
138 | }
139 |
140 | variable "service_container_port" {
141 | description = "The service container port."
142 | type = string
143 | default = "{{ cookiecutter.internal_service_port }}"
144 | }
145 |
146 | variable "service_limits_cpu" {
147 | description = "The service limits cpu value."
148 | type = string
149 | }
150 |
151 | variable "service_limits_memory" {
152 | description = "The service limits memory value."
153 | type = string
154 | }
155 |
156 | variable "service_replicas" {
157 | description = "The desired numbers of replicas to deploy."
158 | type = number
159 | default = 1
160 | }
161 |
162 | variable "service_requests_cpu" {
163 | description = "The service requests cpu value."
164 | type = string
165 | }
166 |
167 | variable "service_requests_memory" {
168 | description = "The service requests memory value."
169 | type = string
170 | }
171 |
172 | variable "service_slug" {
173 | description = "The service slug."
174 | type = string
175 | }
176 |
177 | variable "stack_slug" {
178 | description = "The slug of the stack where the service is deployed."
179 | type = string
180 | }
181 |
182 | variable "use_redis" {
183 | description = "Tell if a Redis service is used."
184 | type = bool
185 | default = false
186 | }
187 |
188 | variable "web_concurrency" {
189 | description = "The desired number of gunicorn workers."
190 | type = string
191 | default = ""
192 | }
193 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/terraform/digitalocean-k8s/{% if cookiecutter.terraform_backend == "gitlab" %}backend.tf{% endif %}:
--------------------------------------------------------------------------------
1 | terraform {
2 | backend "http" {
3 | }
4 | }
5 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/terraform/digitalocean-k8s/{% if cookiecutter.terraform_backend == "terraform-cloud" %}cloud.tf{% endif %}:
--------------------------------------------------------------------------------
1 | terraform {
2 | cloud {
3 | organization = "{{ cookiecutter.terraform_cloud_organization }}"
4 |
5 | workspaces {
6 | tags = ["project:{{ cookiecutter.project_slug }}"]
7 | }
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/terraform/modules/kubernetes/cronjob/main.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_providers {
3 | kubernetes = {
4 | source = "hashicorp/kubernetes"
5 | version = "~> 2.27"
6 | }
7 | }
8 | }
9 |
10 | /* Cron Job */
11 |
12 | resource "kubernetes_cron_job_v1" "main" {
13 | metadata {
14 | name = var.name
15 | namespace = var.namespace
16 | }
17 |
18 | spec {
19 | schedule = var.schedule
20 | job_template {
21 | metadata {}
22 | spec {
23 | template {
24 | metadata {}
25 | spec {
26 | dynamic "volume" {
27 | for_each = toset(var.media_persistent_volume_claim_name != "" ? [1] : [])
28 |
29 | content {
30 | name = "media"
31 | persistent_volume_claim {
32 | claim_name = var.media_persistent_volume_claim_name
33 | }
34 | }
35 | }
36 | image_pull_secrets {
37 | name = "regcred"
38 | }
39 | container {
40 | name = "main"
41 | image = var.container_image
42 | command = var.container_command
43 | dynamic "volume_mount" {
44 | for_each = toset(var.media_persistent_volume_claim_name != "" ? [1] : [])
45 |
46 | content {
47 | name = "media"
48 | mount_path = var.media_mount_path
49 | }
50 | }
51 | dynamic "env_from" {
52 | for_each = toset(var.config_maps)
53 |
54 | content {
55 | config_map_ref {
56 | name = env_from.key
57 | }
58 | }
59 | }
60 | dynamic "env_from" {
61 | for_each = toset(var.secrets)
62 |
63 | content {
64 | secret_ref {
65 | name = env_from.key
66 | }
67 | }
68 | }
69 | }
70 | restart_policy = "OnFailure"
71 | }
72 | }
73 | }
74 | }
75 | }
76 | }
77 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/terraform/modules/kubernetes/cronjob/variables.tf:
--------------------------------------------------------------------------------
1 | variable "config_maps" {
2 | description = "The CronJob ConfigMap names."
3 | type = list(string)
4 | default = []
5 | }
6 |
7 | variable "container_command" {
8 | description = "The CronJob container command."
9 | type = list(string)
10 | }
11 |
12 | variable "container_image" {
13 | description = "The CronJob container image."
14 | type = string
15 | }
16 |
17 | variable "media_mount_path" {
18 | description = "The mount path of the media directory inside the container."
19 | type = string
20 | default = "/app/media"
21 | }
22 |
23 | variable "media_persistent_volume_claim_name" {
24 | description = "The media persistent volume claim name."
25 | type = string
26 | default = ""
27 | }
28 |
29 | variable "name" {
30 | type = string
31 | description = "The CronJob name."
32 | }
33 |
34 | variable "namespace" {
35 | description = "The Kubernetes namespace."
36 | type = string
37 | }
38 |
39 | variable "schedule" {
40 | description = "The CronJob schedule."
41 | type = string
42 | }
43 |
44 | variable "secrets" {
45 | description = "The CronJob Secret names."
46 | type = list(string)
47 | default = []
48 | }
49 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/terraform/modules/kubernetes/deployment/main.tf:
--------------------------------------------------------------------------------
1 | locals {
2 | service_labels = {
3 | component = var.service_slug
4 | environment = var.environment
5 | project = var.project_slug
6 | terraform = "true"
7 | }
8 |
9 | project_host = regexall("https?://([^/]+)", var.project_url)[0][0]
10 |
11 | django_allowed_hosts = join(
12 | ",",
13 | setunion(
14 | split(",", coalesce(var.django_additional_allowed_hosts, "127.0.0.1,localhost")),
15 | [local.project_host, var.service_slug]
16 | )
17 | )
18 |
19 | additional_secrets = var.use_redis ? ["database-url", "redis-url"] : ["database-url"]
20 |
21 | cache_url = var.cache_url != "" ? var.cache_url : var.use_redis ? "$(REDIS_URL)?key_prefix=${var.environment_slug}" : ""
22 |
23 | use_s3 = length(regexall("s3", var.media_storage)) > 0
24 | }
25 |
26 | terraform {
27 | required_providers {
28 | kubernetes = {
29 | source = "hashicorp/kubernetes"
30 | version = "~> 2.27"
31 | }
32 | random = {
33 | source = "hashicorp/random"
34 | version = "~> 3.6"
35 | }
36 | }
37 | }
38 |
39 | /* Passwords */
40 |
41 | resource "random_password" "django_secret_key" {
42 | length = 50
43 | }
44 |
45 | /* Secrets */
46 |
47 | resource "kubernetes_secret_v1" "main" {
48 |
49 | metadata {
50 | name = "${var.service_slug}-env-vars"
51 | namespace = var.namespace
52 | }
53 |
54 | data = { for k, v in merge(
55 | var.extra_secret_values,
56 | {
57 | DJANGO_SECRET_KEY = random_password.django_secret_key.result
58 | EMAIL_URL = var.email_url
59 | SENTRY_DSN = var.sentry_dsn
60 | },
61 | local.use_s3 ? {
62 | AWS_ACCESS_KEY_ID = var.s3_access_id
63 | AWS_SECRET_ACCESS_KEY = var.s3_secret_key
64 | } : {}
65 | ) : k => v if v != "" }
66 | }
67 |
68 | /* Config Map */
69 |
70 | resource "kubernetes_config_map_v1" "main" {
71 | metadata {
72 | name = "${var.service_slug}-env-vars"
73 | namespace = var.namespace
74 | }
75 |
76 | data = { for k, v in merge(
77 | var.extra_config_values,
78 | {
79 | DJANGO_ADMINS = var.django_admins
80 | DJANGO_ALLOWED_HOSTS = local.django_allowed_hosts
81 | DJANGO_CONFIGURATION = "Remote"
82 | DJANGO_CSRF_TRUSTED_ORIGINS = var.project_url
83 | DJANGO_DEFAULT_FROM_EMAIL = var.django_default_from_email
84 | DJANGO_DISABLE_SERVER_SIDE_CURSORS = var.django_disable_server_side_cursors
85 | DJANGO_SERVER_EMAIL = var.django_server_email
86 | DJANGO_SESSION_COOKIE_DOMAIN = local.project_host
87 | INTERNAL_SERVICE_PORT = var.service_container_port
88 | SENTRY_ENVIRONMENT = var.environment
89 | WEB_CONCURRENCY = var.web_concurrency
90 | },
91 | local.use_s3 ? {
92 | AWS_S3_REGION_NAME = var.s3_region
93 | DJANGO_AWS_LOCATION = "${var.environment_slug}/media"
94 | DJANGO_AWS_S3_ENDPOINT_URL = var.media_storage == "digitalocean-s3" ? "https://${var.s3_region}.${var.s3_host}" : ""
95 | DJANGO_AWS_S3_FILE_OVERWRITE = var.s3_file_overwrite
96 | DJANGO_AWS_STORAGE_BUCKET_NAME = var.s3_bucket_name
97 | } : {}
98 | ) : k => v if v != "" }
99 | }
100 |
101 | /* Deployment */
102 |
103 | resource "kubernetes_deployment_v1" "main" {
104 | metadata {
105 | name = var.service_slug
106 | namespace = var.namespace
107 | annotations = {
108 | "reloader.stakater.com/auto" = "true"
109 | }
110 | }
111 | spec {
112 | replicas = var.service_replicas
113 | selector {
114 | match_labels = local.service_labels
115 | }
116 | template {
117 | metadata {
118 | labels = local.service_labels
119 | }
120 | spec {
121 | dynamic "volume" {
122 | for_each = toset(var.media_persistent_volume_claim_name != "" ? [1] : [])
123 |
124 | content {
125 | name = "media"
126 | persistent_volume_claim {
127 | claim_name = var.media_persistent_volume_claim_name
128 | }
129 | }
130 | }
131 | image_pull_secrets {
132 | name = "regcred"
133 | }
134 | container {
135 | image = var.service_container_image
136 | name = var.service_slug
137 | resources {
138 | limits = {
139 | cpu = var.service_limits_cpu
140 | memory = var.service_limits_memory
141 | }
142 | requests = {
143 | cpu = var.service_requests_cpu
144 | memory = var.service_requests_memory
145 | }
146 | }
147 | port {
148 | container_port = var.service_container_port
149 | }
150 | dynamic "volume_mount" {
151 | for_each = toset(var.media_persistent_volume_claim_name != "" ? [1] : [])
152 |
153 | content {
154 | name = "media"
155 | mount_path = var.media_mount_path
156 | }
157 | }
158 | env_from {
159 | config_map_ref {
160 | name = kubernetes_config_map_v1.main.metadata[0].name
161 | }
162 | }
163 | env_from {
164 | secret_ref {
165 | name = kubernetes_secret_v1.main.metadata[0].name
166 | }
167 | }
168 | dynamic "env_from" {
169 | for_each = toset(local.additional_secrets)
170 | content {
171 | secret_ref {
172 | name = env_from.key
173 | }
174 | }
175 | }
176 | dynamic "env" {
177 | for_each = toset(local.cache_url != "" ? [1] : [])
178 |
179 | content {
180 | name = "CACHE_URL"
181 | value = local.cache_url
182 | }
183 | }
184 | }
185 | }
186 | }
187 | }
188 | }
189 |
190 | /* Cluster IP Service */
191 |
192 | resource "kubernetes_service_v1" "cluster_ip" {
193 | metadata {
194 | name = var.service_slug
195 | namespace = var.namespace
196 | }
197 | spec {
198 | type = "ClusterIP"
199 | selector = {
200 | component = var.service_slug
201 | }
202 | port {
203 | port = var.service_container_port
204 | target_port = var.service_container_port
205 | }
206 | }
207 | }
208 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/terraform/modules/kubernetes/deployment/outputs.tf:
--------------------------------------------------------------------------------
1 | output "config_map_name" {
2 | description = "The name of the Kubernetes ConfigMap associated with the Deployment."
3 | value = kubernetes_config_map_v1.main.metadata[0].name
4 | }
5 |
6 | output "secret_name" {
7 | description = "The name of the Kubernetes Secret associated with the Deployment."
8 | value = kubernetes_secret_v1.main.metadata[0].name
9 | }
10 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/terraform/modules/kubernetes/deployment/variables.tf:
--------------------------------------------------------------------------------
1 | variable "cache_url" {
2 | type = string
3 | description = "A Django cache URL override."
4 | default = ""
5 | sensitive = true
6 | }
7 |
8 | variable "django_additional_allowed_hosts" {
9 | type = string
10 | description = "Additional entries of the DJANGO_ALLOWED_HOSTS environment variable ('127.0.0.1', 'localhost', the service slug and the project host are included by default)."
11 | default = ""
12 | }
13 |
14 | variable "django_admins" {
15 | type = string
16 | description = "The value of the DJANGO_ADMINS environment variable."
17 | default = ""
18 | }
19 |
20 | variable "django_default_from_email" {
21 | type = string
22 | description = "The value of the DJANGO_DEFAULT_FROM_EMAIL environment variable."
23 | default = ""
24 | }
25 |
26 | variable "django_disable_server_side_cursors" {
27 | type = string
28 | description = "The value of the DJANGO_DISABLE_SERVER_SIDE_CURSORS environment variable."
29 | }
30 |
31 | variable "django_server_email" {
32 | type = string
33 | description = "The value of the DJANGO_SERVER_EMAIL environment variable."
34 | default = ""
35 | }
36 |
37 | variable "email_url" {
38 | type = string
39 | description = "The email server connection url."
40 | default = ""
41 | sensitive = true
42 | }
43 |
44 | variable "environment" {
45 | type = string
46 | description = "The deploy environment name, e.g. \"Production\"."
47 | }
48 |
49 | variable "environment_slug" {
50 | type = string
51 | description = "The deploy environment slug, e.g. \"stage\"."
52 | }
53 |
54 | variable "extra_config_values" {
55 | type = map(string)
56 | description = "Additional config map environment variables."
57 | default = {}
58 | }
59 |
60 | variable "extra_secret_values" {
61 | type = map(string)
62 | description = "Additional secret environment variables."
63 | default = {}
64 | sensitive = true
65 | }
66 |
67 | variable "media_mount_path" {
68 | description = "The mount path of the media directory inside the container."
69 | type = string
70 | default = "/app/media"
71 | }
72 |
73 | variable "media_persistent_volume_claim_name" {
74 | description = "The media persistent volume claim name."
75 | type = string
76 | default = ""
77 | }
78 |
79 | variable "media_storage" {
80 | description = "The media storage solution."
81 | type = string
82 | }
83 |
84 | variable "namespace" {
85 | description = "The Kubernetes namespace."
86 | type = string
87 | }
88 |
89 | variable "project_slug" {
90 | description = "The project slug."
91 | type = string
92 | }
93 |
94 | variable "project_url" {
95 | description = "The project url."
96 | type = string
97 | }
98 |
99 | variable "s3_access_id" {
100 | description = "The S3 bucket access key ID."
101 | type = string
102 | default = ""
103 | sensitive = true
104 | }
105 |
106 | variable "s3_bucket_name" {
107 | description = "The S3 bucket name."
108 | type = string
109 | default = ""
110 | }
111 |
112 | variable "s3_file_overwrite" {
113 | description = "The S3 bucket file overwriting setting."
114 | type = string
115 | default = "False"
116 | }
117 |
118 | variable "s3_host" {
119 | description = "The S3 bucket host."
120 | type = string
121 | default = ""
122 | }
123 |
124 | variable "s3_region" {
125 | description = "The S3 bucket region."
126 | type = string
127 | default = ""
128 | }
129 |
130 | variable "s3_secret_key" {
131 | description = "The S3 bucket secret access key."
132 | type = string
133 | default = ""
134 | sensitive = true
135 | }
136 |
137 | variable "sentry_dsn" {
138 | description = "The Sentry project DSN."
139 | type = string
140 | default = ""
141 | sensitive = true
142 | }
143 |
144 | variable "service_container_image" {
145 | description = "The service container image."
146 | type = string
147 | }
148 |
149 | variable "service_container_port" {
150 | description = "The service container port."
151 | type = string
152 | default = "{{ cookiecutter.internal_service_port }}"
153 | }
154 |
155 | variable "service_limits_cpu" {
156 | description = "The service limits cpu value."
157 | type = string
158 | }
159 |
160 | variable "service_limits_memory" {
161 | description = "The service limits memory value."
162 | type = string
163 | }
164 |
165 | variable "service_replicas" {
166 | description = "The desired numbers of replicas to deploy."
167 | type = number
168 | default = 1
169 | }
170 |
171 | variable "service_requests_cpu" {
172 | description = "The service requests cpu value."
173 | type = string
174 | }
175 |
176 | variable "service_requests_memory" {
177 | description = "The service requests memory value."
178 | type = string
179 | }
180 |
181 | variable "service_slug" {
182 | description = "The service slug."
183 | type = string
184 | }
185 |
186 | variable "use_redis" {
187 | description = "Tell if a Redis service is used."
188 | type = bool
189 | default = false
190 | }
191 |
192 | variable "web_concurrency" {
193 | description = "The desired number of gunicorn workers."
194 | type = string
195 | default = ""
196 | }
197 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/terraform/other-k8s/main.tf:
--------------------------------------------------------------------------------
1 | locals {
2 | environment_slug = { development = "dev", staging = "stage", production = "prod" }[lower(var.environment)]
3 |
4 | namespace = "${var.project_slug}-${local.environment_slug}"
5 | }
6 |
7 | terraform {
8 | required_providers {
9 | kubernetes = {
10 | source = "hashicorp/kubernetes"
11 | version = "~> 2.27"
12 | }
13 | random = {
14 | source = "hashicorp/random"
15 | version = "~> 3.6"
16 | }
17 | }
18 | }
19 |
20 | /* Providers */
21 |
22 | provider "kubernetes" {
23 | host = var.kubernetes_host
24 | token = var.kubernetes_token
25 | cluster_ca_certificate = base64decode(var.kubernetes_cluster_ca_certificate)
26 | }
27 |
28 | /* Volumes */
29 |
30 | resource "kubernetes_persistent_volume_v1" "media" {
31 | count = var.media_storage == "local" ? 1 : 0
32 |
33 | metadata {
34 | name = "${local.namespace}-${var.service_slug}-media"
35 | }
36 | spec {
37 | capacity = {
38 | storage = var.media_persistent_volume_capacity
39 | }
40 | access_modes = ["ReadWriteOnce"]
41 | persistent_volume_source {
42 | host_path {
43 | path = var.media_persistent_volume_host_path
44 | }
45 | }
46 | }
47 | }
48 |
49 | resource "kubernetes_persistent_volume_claim_v1" "media" {
50 | count = var.media_storage == "local" ? 1 : 0
51 |
52 | metadata {
53 | name = "${var.service_slug}-media"
54 | namespace = local.namespace
55 | }
56 | spec {
57 | access_modes = ["ReadWriteOnce"]
58 | resources {
59 | requests = {
60 | storage = coalesce(
61 | var.media_persistent_volume_claim_capacity,
62 | var.media_persistent_volume_capacity
63 | )
64 | }
65 | }
66 | volume_name = kubernetes_persistent_volume_v1.media[0].metadata[0].name
67 | }
68 | }
69 |
70 | /* Deployment */
71 |
72 | module "deployment" {
73 | source = "../modules/kubernetes/deployment"
74 |
75 | environment = var.environment
76 | environment_slug = local.environment_slug
77 |
78 | namespace = local.namespace
79 |
80 | project_slug = var.project_slug
81 | project_url = var.project_url
82 |
83 | service_container_image = var.service_container_image
84 | service_container_port = var.service_container_port
85 | service_limits_cpu = var.service_limits_cpu
86 | service_limits_memory = var.service_limits_memory
87 | service_replicas = var.service_replicas
88 | service_requests_cpu = var.service_requests_cpu
89 | service_requests_memory = var.service_requests_memory
90 | service_slug = var.service_slug
91 |
92 | media_storage = var.media_storage
93 |
94 | media_persistent_volume_claim_name = var.media_storage == "local" ? kubernetes_persistent_volume_claim_v1.media[0].metadata[0].name : ""
95 |
96 | cache_url = var.cache_url
97 | django_additional_allowed_hosts = var.django_additional_allowed_hosts
98 | django_admins = var.django_admins
99 | django_default_from_email = var.django_default_from_email
100 | django_disable_server_side_cursors = var.django_disable_server_side_cursors
101 | django_server_email = var.django_server_email
102 | email_url = var.email_url
103 | s3_access_id = var.s3_access_id
104 | s3_bucket_name = var.s3_bucket_name
105 | s3_file_overwrite = var.s3_file_overwrite
106 | s3_host = var.s3_host
107 | s3_region = var.s3_region
108 | s3_secret_key = var.s3_secret_key
109 | sentry_dsn = var.sentry_dsn
110 | use_redis = var.use_redis
111 | web_concurrency = var.web_concurrency
112 |
113 | extra_config_values = var.extra_config_values
114 | extra_secret_values = var.extra_secret_values
115 | }
116 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/terraform/other-k8s/variables.tf:
--------------------------------------------------------------------------------
1 | variable "cache_url" {
2 | type = string
3 | description = "A Django cache URL override."
4 | default = ""
5 | sensitive = true
6 | }
7 |
8 | variable "django_additional_allowed_hosts" {
9 | type = string
10 | description = "Additional entries of the DJANGO_ALLOWED_HOSTS environment variable ('127.0.0.1', 'localhost', the service slug and the project host are included by default)."
11 | default = ""
12 | }
13 |
14 | variable "django_admins" {
15 | type = string
16 | description = "The value of the DJANGO_ADMINS environment variable."
17 | default = ""
18 | }
19 |
20 | variable "django_configuration" {
21 | type = string
22 | description = "The value of the DJANGO_CONFIGURATION environment variable."
23 | default = "Remote"
24 | }
25 |
26 | variable "django_default_from_email" {
27 | type = string
28 | description = "The value of the DJANGO_DEFAULT_FROM_EMAIL environment variable."
29 | default = ""
30 | }
31 |
32 | variable "django_disable_server_side_cursors" {
33 | type = string
34 | description = "The value of the DJANGO_DISABLE_SERVER_SIDE_CURSORS environment variable."
35 | default = "False"
36 | }
37 |
38 | variable "django_server_email" {
39 | type = string
40 | description = "The value of the DJANGO_SERVER_EMAIL environment variable."
41 | default = ""
42 | }
43 |
44 | variable "email_url" {
45 | type = string
46 | description = "The email server connection url."
47 | default = ""
48 | sensitive = true
49 | }
50 |
51 | variable "environment" {
52 | type = string
53 | description = "The name of the deploy environment, e.g. \"Production\"."
54 | }
55 |
56 | variable "extra_config_values" {
57 | type = map(string)
58 | description = "Additional config map environment variables."
59 | default = {}
60 | }
61 |
62 | variable "extra_secret_values" {
63 | type = map(string)
64 | description = "Additional secret environment variables."
65 | default = {}
66 | sensitive = true
67 | }
68 |
69 | variable "kubernetes_cluster_ca_certificate" {
70 | description = "The base64 encoded Kubernetes CA certificate."
71 | type = string
72 | sensitive = true
73 | }
74 |
75 | variable "kubernetes_host" {
76 | description = "The Kubernetes host."
77 | type = string
78 | }
79 |
80 | variable "kubernetes_token" {
81 | description = "A Kubernetes admin token."
82 | type = string
83 | sensitive = true
84 | }
85 |
86 | variable "media_persistent_volume_capacity" {
87 | description = "The media persistent volume capacity (e.g. 1Gi)."
88 | type = string
89 | default = "10Gi"
90 | }
91 |
92 | variable "media_persistent_volume_claim_capacity" {
93 | description = "The media persistent volume claim capacity (e.g. 1Gi)."
94 | type = string
95 | default = ""
96 | }
97 |
98 | variable "media_persistent_volume_host_path" {
99 | description = "The media persistent volume host path."
100 | type = string
101 | default = ""
102 | }
103 |
104 | variable "media_storage" {
105 | description = "The media storage solution."
106 | type = string
107 | }
108 |
109 | variable "project_slug" {
110 | description = "The project slug."
111 | type = string
112 | }
113 |
114 | variable "project_url" {
115 | description = "The project url."
116 | type = string
117 | }
118 |
119 | variable "s3_access_id" {
120 | description = "The S3 bucket access key ID."
121 | type = string
122 | default = ""
123 | sensitive = true
124 | }
125 |
126 | variable "s3_bucket_name" {
127 | description = "The S3 bucket name."
128 | type = string
129 | default = ""
130 | }
131 |
132 | variable "s3_file_overwrite" {
133 | description = "The S3 bucket file overwriting setting."
134 | type = string
135 | default = "False"
136 | }
137 |
138 | variable "s3_host" {
139 | description = "The S3 bucket host."
140 | type = string
141 | default = ""
142 | }
143 |
144 | variable "s3_region" {
145 | description = "The S3 bucket region."
146 | type = string
147 | default = ""
148 | }
149 |
150 | variable "s3_secret_key" {
151 | description = "The S3 bucket secret access key."
152 | type = string
153 | default = ""
154 | sensitive = true
155 | }
156 |
157 | variable "sentry_dsn" {
158 | description = "The Sentry project DSN."
159 | type = string
160 | default = ""
161 | sensitive = true
162 | }
163 |
164 | variable "service_container_image" {
165 | description = "The service container image."
166 | type = string
167 | }
168 |
169 | variable "service_container_port" {
170 | description = "The service container port."
171 | type = string
172 | default = ""
173 | }
174 |
175 | variable "service_limits_cpu" {
176 | description = "The service limits cpu value."
177 | type = string
178 | }
179 |
180 | variable "service_limits_memory" {
181 | description = "The service limits memory value."
182 | type = string
183 | }
184 |
185 | variable "service_replicas" {
186 | description = "The desired numbers of replicas to deploy."
187 | type = number
188 | default = 1
189 | }
190 |
191 | variable "service_requests_cpu" {
192 | description = "The service requests cpu value."
193 | type = string
194 | }
195 |
196 | variable "service_requests_memory" {
197 | description = "The service requests memory value."
198 | type = string
199 | }
200 |
201 | variable "service_slug" {
202 | description = "The service slug."
203 | type = string
204 | }
205 |
206 | variable "stack_slug" {
207 | description = "The slug of the stack where the service is deployed."
208 | type = string
209 | }
210 |
211 | variable "use_redis" {
212 | description = "Tell if a Redis service is used."
213 | type = bool
214 | default = false
215 | }
216 |
217 | variable "web_concurrency" {
218 | description = "The desired number of gunicorn workers."
219 | type = string
220 | default = ""
221 | }
222 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/terraform/other-k8s/{% if cookiecutter.terraform_backend == "gitlab" %}backend.tf{% endif %}:
--------------------------------------------------------------------------------
1 | terraform {
2 | backend "http" {
3 | }
4 | }
5 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/terraform/other-k8s/{% if cookiecutter.terraform_backend == "terraform-cloud" %}cloud.tf{% endif %}:
--------------------------------------------------------------------------------
1 | terraform {
2 | cloud {
3 | organization = "{{ cookiecutter.terraform_cloud_organization }}"
4 |
5 | workspaces {
6 | tags = ["project:{{ cookiecutter.project_slug }}"]
7 | }
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/terraform/vars/.tfvars:
--------------------------------------------------------------------------------
1 | {% if "environment" in cookiecutter.tfvars %}{% for item in cookiecutter.tfvars.environment|sort %}{{ item }}
2 | {% endfor %}{% endif %}# django_admins=""
3 | # django_additional_allowed_hosts=""
4 | # django_configuration="Remote"
5 | # django_default_from_email=""
6 | # django_disable_server_side_cursors="False"
7 | # django_server_email=""
8 | # s3_file_overwrite="False"
9 | # service_container_port="{{ cookiecutter.internal_service_port }}"
10 | service_limits_cpu="550m"
11 | service_limits_memory="512Mi"
12 | # service_replicas=1
13 | service_requests_cpu="25m"
14 | service_requests_memory="115Mi"
15 | # web_concurrency=""
16 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/terraform/vars/{% if "environment_dev" in cookiecutter.tfvars %}dev.tfvars{% endif %}:
--------------------------------------------------------------------------------
1 | {% for item in cookiecutter.tfvars.environment_dev|sort %}{{ item }}
2 | {% endfor %}
3 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/terraform/vars/{% if "environment_prod" in cookiecutter.tfvars %}prod.tfvars{% endif %}:
--------------------------------------------------------------------------------
1 | {% for item in cookiecutter.tfvars.environment_prod|sort %}{{ item }}
2 | {% endfor %}
3 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/terraform/vars/{% if "environment_stage" in cookiecutter.tfvars %}stage.tfvars{% endif %}:
--------------------------------------------------------------------------------
1 | {% for item in cookiecutter.tfvars.environment_stage|sort %}{{ item }}
2 | {% endfor %}
3 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/{{cookiecutter.django_settings_dirname}}/__init__.py:
--------------------------------------------------------------------------------
1 | """{{ cookiecutter.project_name }} project."""
2 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/{{cookiecutter.django_settings_dirname}}/asgi.py:
--------------------------------------------------------------------------------
1 | """
2 | ASGI config for {{ cookiecutter.project_name }} project.
3 |
4 | It exposes the ASGI callable as a module-level variable named ``application``.
5 |
6 | For more information on this file, see
7 | https://docs.djangoproject.com/en/stable/howto/deployment/asgi/
8 | """
9 |
10 | import os
11 |
12 | os.environ.setdefault("DJANGO_CONFIGURATION", "Remote")
13 | os.environ.setdefault(
14 | "DJANGO_SETTINGS_MODULE", "{{ cookiecutter.django_settings_dirname }}.settings"
15 | )
16 |
17 | from configurations.asgi import get_asgi_application
18 |
19 | application = get_asgi_application()
20 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/{{cookiecutter.django_settings_dirname}}/settings.py:
--------------------------------------------------------------------------------
1 | """
2 | Django settings for {{ cookiecutter.project_name }} project.
3 |
4 | Generated by 'django-admin startproject' using Django.
5 |
6 | For more information on this file, see
7 | https://docs.djangoproject.com/en/stable/topics/settings/
8 |
9 | For the full list of settings and their values, see
10 | https://docs.djangoproject.com/en/stable/ref/settings/
11 | """
12 |
13 | import string
14 | from copy import deepcopy
15 | from pathlib import Path
16 |
17 | import dj_database_url
18 | from configurations import Configuration, values
19 |
20 |
21 | class ProjectDefault(Configuration):
22 | """
23 | The default settings from the Django project template.
24 |
25 | Django Configurations
26 | https://django-configurations.readthedocs.io
27 | """
28 |
29 | # Build paths inside the project like this: BASE_DIR / "subdir".
30 | BASE_DIR = Path(__file__).resolve().parent.parent
31 |
32 | # Quick-start development settings - unsuitable for production
33 | # See https://docs.djangoproject.com/en/stable/howto/deployment/checklist/
34 |
35 | # SECURITY WARNING: keep the secret key used in production secret!
36 | SECRET_KEY = values.SecretValue()
37 |
38 | # SECURITY WARNING: don't run with debug turned on in production!
39 | DEBUG = values.BooleanValue(True)
40 |
41 | ALLOWED_HOSTS = values.ListValue([])
42 |
43 | # Application definition
44 |
45 | INSTALLED_APPS = [
46 | "django.contrib.admin",
47 | "django.contrib.auth",
48 | "django.contrib.contenttypes",
49 | "django.contrib.sessions",
50 | "django.contrib.messages",
51 | "django.contrib.staticfiles",
52 | ]
53 |
54 | MIDDLEWARE = [
55 | "django.middleware.security.SecurityMiddleware",
56 | "django.contrib.sessions.middleware.SessionMiddleware",
57 | "django.middleware.common.CommonMiddleware",
58 | "django.middleware.csrf.CsrfViewMiddleware",
59 | "django.contrib.auth.middleware.AuthenticationMiddleware",
60 | "django.contrib.messages.middleware.MessageMiddleware",
61 | "django.middleware.clickjacking.XFrameOptionsMiddleware",
62 | ]
63 |
64 | ROOT_URLCONF = "{{ cookiecutter.django_settings_dirname }}.urls"
65 |
66 | TEMPLATES = [
67 | {
68 | "BACKEND": "django.template.backends.django.DjangoTemplates",
69 | "DIRS": [],
70 | "APP_DIRS": True,
71 | "OPTIONS": {
72 | "context_processors": [
73 | "django.template.context_processors.debug",
74 | "django.template.context_processors.request",
75 | "django.contrib.auth.context_processors.auth",
76 | "django.contrib.messages.context_processors.messages",
77 | ],
78 | },
79 | },
80 | ]
81 |
82 | WSGI_APPLICATION = "{{ cookiecutter.django_settings_dirname }}.wsgi.application"
83 |
84 | # Database
85 | # https://docs.djangoproject.com/en/stable/ref/settings/#databases
86 |
87 | DATABASES = {
88 | "default": dj_database_url.config(),
89 | }
90 |
91 | # Password validation
92 | # https://docs.djangoproject.com/en/stable/ref/settings/#auth-password-validators
93 |
94 | AUTH_PASSWORD_VALIDATORS = [
95 | {
96 | "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", # noqa: E501
97 | },
98 | {
99 | "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
100 | },
101 | {
102 | "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
103 | },
104 | {
105 | "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
106 | },
107 | ]
108 |
109 | # Internationalization
110 | # https://docs.djangoproject.com/en/stable/topics/i18n/
111 |
112 | LANGUAGE_CODE = "en-us"
113 |
114 | TIME_ZONE = "UTC"
115 |
116 | USE_I18N = True
117 |
118 | USE_TZ = True
119 |
120 | # Static files (CSS, JavaScript, Images)
121 | # https://docs.djangoproject.com/en/stable/howto/static-files/
122 |
123 | STATIC_URL = "static/"
124 |
125 | STATIC_ROOT = BASE_DIR / "static"
126 |
127 | # Default primary key field type
128 | # https://docs.djangoproject.com/en/stable/ref/settings/#default-auto-field
129 |
130 | DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
131 |
132 | # Stored files
133 | # https://docs.djangoproject.com/en/stable/topics/files/{% if cookiecutter.media_storage == "local" %} # noqa
134 |
135 | MEDIA_URL = "/media/"
136 |
137 | MEDIA_ROOT = BASE_DIR / "media" # noqa{% else %}
138 |
139 | # Uncomment when using local media
140 |
141 | # MEDIA_URL = "/media/"
142 |
143 | # MEDIA_ROOT = BASE_DIR / "media"{% endif %}
144 |
145 | # Email Settings
146 | # https://docs.djangoproject.com/en/stable/topics/email/
147 |
148 | ADMINS = values.SingleNestedTupleValue(
149 | (("admin", "errors@{{ cookiecutter.project_slug }}.com"),)
150 | )
151 |
152 | DEFAULT_FROM_EMAIL = values.EmailValue("info@{{ cookiecutter.project_slug }}.com")
153 |
154 | EMAIL_SUBJECT_PREFIX = "[{{ cookiecutter.project_name }}] "
155 |
156 | EMAIL_USE_LOCALTIME = True
157 |
158 | SERVER_EMAIL = values.EmailValue("server@{{ cookiecutter.project_slug }}.com")
159 |
160 | # Email URL
161 | # https://django-configurations.readthedocs.io/en/stable/values.html
162 |
163 | EMAIL = values.EmailURLValue("console://")
164 |
165 | # Cache URL
166 | # https://django-configurations.readthedocs.io/en/stable/values.html
167 |
168 | CACHES = values.CacheURLValue("locmem://")
169 |
170 | # Translation
171 | # https://docs.djangoproject.com/en/stable/topics/i18n/translation/
172 |
173 | # LANGUAGES = (("en", "English"), ("it", "Italiano"))
174 |
175 | # Clickjacking Protection
176 | # https://docs.djangoproject.com/en/stable/ref/clickjacking/
177 |
178 | X_FRAME_OPTIONS = "SAMEORIGIN" # Default: 'SAMEORIGIN'
179 |
180 | # Session auth
181 | # https://docs.djangoproject.com/en/stable/ref/settings/#sessions
182 |
183 | SESSION_COOKIE_DOMAIN = values.Value()
184 |
185 | SESSION_COOKIE_SECURE = True
186 |
187 | # Secure Proxy SSL Header
188 | # https://docs.djangoproject.com/en/stable/ref/settings/#secure-proxy-ssl-header
189 |
190 | SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
191 |
192 | # CSRF Trusted Origins
193 | # https://docs.djangoproject.com/en/stable/ref/settings/#csrf-trusted-origins
194 |
195 | CSRF_TRUSTED_ORIGINS = values.ListValue([])
196 |
197 |
198 | class Local(ProjectDefault):
199 | """The local settings."""
200 |
201 | # Application definition
202 |
203 | INSTALLED_APPS = ProjectDefault.INSTALLED_APPS.copy()
204 |
205 | MIDDLEWARE = ProjectDefault.MIDDLEWARE.copy()
206 |
207 | # Django Debug Toolbar
208 | # https://django-debug-toolbar.readthedocs.io/en/stable/configuration.html
209 |
210 | try:
211 | import debug_toolbar
212 | except ModuleNotFoundError: # pragma: no cover
213 | pass
214 | else: # pragma: no cover
215 | INTERNAL_IPS = values.ListValue(["127.0.0.1"])
216 | INSTALLED_APPS.append("debug_toolbar")
217 | MIDDLEWARE.insert(0, "debug_toolbar.middleware.DebugToolbarMiddleware")
218 | DEBUG_TOOLBAR_CONFIG = {"SHOW_TOOLBAR_CALLBACK": lambda x: True}
219 |
220 | # Django Extensions
221 | # https://django-extensions.readthedocs.io/en/stable/graph_models.html
222 |
223 | try:
224 | import django_extensions
225 | except ModuleNotFoundError: # pragma: no cover
226 | pass
227 | else: # pragma: no cover
228 | INSTALLED_APPS.append("django_extensions")
229 | SHELL_PLUS_PRINT_SQL = True
230 | SHELL_PLUS_PRINT_SQL_TRUNCATE = None
231 | GRAPH_MODELS = {
232 | "all_applications": True,
233 | "arrow_shape": "diamond",
234 | "disable_abstract_fields": False,
235 | "disable_fields": False,
236 | "exclude_columns": [
237 | "id",
238 | ],
239 | "exclude_models": ",".join(
240 | (
241 | "AbstractBaseSession",
242 | "AbstractBaseUser",
243 | "AbstractUser",
244 | "ContentType",
245 | "LogEntry",
246 | "PermissionsMixin",
247 | "Session",
248 | "UserGroup",
249 | )
250 | ),
251 | "group_models": True,
252 | "hide_edge_labels": True,
253 | "inheritance": True,
254 | "language": "en",
255 | "layout": "dot",
256 | "relations_as_fields": True,
257 | "theme": "django2018",
258 | "verbose_names": False,
259 | }
260 |
261 |
262 | class Testing(ProjectDefault):
263 | """The testing settings."""
264 |
265 | SECRET_KEY = string.ascii_letters
266 |
267 | # Debug
268 | # https://docs.djangoproject.com/en/stable/ref/settings/#debug
269 |
270 | DEBUG = False
271 |
272 | # Application definition
273 |
274 | INSTALLED_APPS = ProjectDefault.INSTALLED_APPS.copy()
275 |
276 | # Email URL
277 | # https://django-configurations.readthedocs.io/en/stable/values/
278 |
279 | EMAIL = "dummy://"
280 |
281 | # Cache URL
282 | # https://django-configurations.readthedocs.io/en/stable/values/
283 |
284 | CACHES = {"default": {"BACKEND": "django.core.cache.backends.locmem.LocMemCache"}}
285 |
286 | # Storages
287 | # https://docs.djangoproject.com/en/stable/ref/settings/#std-setting-STORAGES
288 |
289 | STORAGES = {
290 | "default": {
291 | "BACKEND": "django.core.files.storage.InMemoryStorage",
292 | },
293 | "staticfiles": {
294 | "BACKEND": "django.contrib.staticfiles.storage.StaticFilesStorage",
295 | },
296 | }
297 |
298 | # The MD5 based password hasher is much less secure but faster
299 | # https://docs.djangoproject.com/en/stable/topics/auth/passwords/
300 |
301 | PASSWORD_HASHERS = [
302 | "django.contrib.auth.hashers.MD5PasswordHasher",
303 | ]
304 |
305 | # Behave
306 | # https://behave-django.readthedocs.io/en/latest/installation.html
307 |
308 | try:
309 | import behave_django
310 | except ModuleNotFoundError: # pragma: no cover
311 | pass
312 | else: # pragma: no cover
313 | INSTALLED_APPS.append("behave_django")
314 |
315 | # Stored files
316 | # https://docs.djangoproject.com/en/stable/topics/files/{% if cookiecutter.media_storage != "none" %} # noqa
317 |
318 | MEDIA_ROOT = ProjectDefault.BASE_DIR / "media_test" # noqa{% else %}
319 |
320 | # Uncomment when using media
321 |
322 | # MEDIA_ROOT = ProjectDefault.BASE_DIR / "media_test"{% endif %}
323 |
324 |
325 | class Remote(ProjectDefault):
326 | """The remote settings."""
327 |
328 | # Debug
329 | # https://docs.djangoproject.com/en/stable/ref/settings/#debug
330 |
331 | DEBUG = False
332 |
333 | @property
334 | def MIDDLEWARE(self): # pragma: no cover
335 | """Return the middleware settings."""
336 | middleware = deepcopy(ProjectDefault.MIDDLEWARE)
337 | try:
338 | # WhiteNoise
339 | # http://whitenoise.evans.io/en/stable/django.html
340 |
341 | import whitenoise # noqa: F401
342 | except ModuleNotFoundError: # pragma: no cover
343 | pass
344 | else: # pragma: no cover
345 | middleware.insert(1, "whitenoise.middleware.WhiteNoiseMiddleware")
346 | return middleware
347 |
348 | # DB Transaction pooling and server-side cursors
349 | # https://docs.djangoproject.com/en/stable/ref/databases/#transaction-pooling-and-server-side-cursors
350 |
351 | DISABLE_SERVER_SIDE_CURSORS = values.BooleanValue(False)
352 |
353 | @property
354 | def DATABASES(self): # pragma: no cover
355 | """Return the databases."""
356 | databases = deepcopy(ProjectDefault.DATABASES)
357 | databases["default"][
358 | "DISABLE_SERVER_SIDE_CURSORS"
359 | ] = self.DISABLE_SERVER_SIDE_CURSORS
360 | return databases
361 |
362 | # Email URL
363 | # https://django-configurations.readthedocs.io/en/stable/values/
364 |
365 | EMAIL = values.EmailURLValue()
366 |
367 | # Argon2 is the winner of the 2015 Password Hashing Competition
368 | # https://docs.djangoproject.com/en/stable/topics/auth/passwords/
369 |
370 | PASSWORD_HASHERS = [
371 | "django.contrib.auth.hashers.Argon2PasswordHasher",
372 | "django.contrib.auth.hashers.PBKDF2PasswordHasher",
373 | "django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher",
374 | "django.contrib.auth.hashers.BCryptSHA256PasswordHasher",
375 | "django.contrib.auth.hashers.ScryptPasswordHasher",
376 | ]
377 |
378 | # Security
379 | # https://docs.djangoproject.com/en/stable/topics/security/
380 |
381 | CSRF_COOKIE_SECURE = True
382 |
383 | SECURE_BROWSER_XSS_FILTER = True
384 |
385 | SECURE_CONTENT_TYPE_NOSNIFF = True
386 |
387 | SECURE_HSTS_INCLUDE_SUBDOMAINS = True
388 |
389 | SECURE_HSTS_PRELOAD = True
390 |
391 | SECURE_HSTS_SECONDS = 3_600
392 |
393 | X_FRAME_OPTIONS = "DENY"
394 |
395 | # Persistent connections
396 | # https://docs.djangoproject.com/en/stable/ref/databases/#general-notes
397 |
398 | CONN_MAX_AGE = None
399 |
400 | # Storages
401 | # https://docs.djangoproject.com/en/stable/ref/settings/#std-setting-STORAGES
402 |
403 | @property
404 | def STORAGES(self): # pragma: no cover
405 | """Return the storage settings."""
406 | storages = deepcopy(
407 | ProjectDefault.STORAGES
408 | ) # noqa{% if "s3" in cookiecutter.media_storage %}
409 | storages["default"][
410 | "BACKEND"
411 | ] = "storages.backends.s3boto3.S3Boto3Storage" # noqa{% endif %}
412 | try:
413 | # WhiteNoise
414 | # http://whitenoise.evans.io/en/stable/django.html
415 |
416 | import whitenoise # noqa: F401
417 | except ModuleNotFoundError: # pragma: no cover
418 | pass
419 | else: # pragma: no cover
420 | storages["staticfiles"][
421 | "BACKEND"
422 | ] = "whitenoise.storage.CompressedManifestStaticFilesStorage"
423 | return storages
424 |
425 | # Sentry
426 | # https://sentry.io/for/django/
427 |
428 | try:
429 | import sentry_sdk
430 | except ModuleNotFoundError: # pragma: no cover
431 | pass
432 | else: # pragma: no cover{% if cookiecutter.use_redis == "true" %}
433 | from sentry_sdk.integrations.django import DjangoIntegration
434 | from sentry_sdk.integrations.redis import RedisIntegration
435 |
436 | sentry_sdk.init(
437 | integrations=[DjangoIntegration(), RedisIntegration()],
438 | send_default_pii=True,
439 | ) # noqa{% else %}
440 | from sentry_sdk.integrations.django import DjangoIntegration
441 |
442 | sentry_sdk.init(
443 | integrations=[DjangoIntegration()],
444 | send_default_pii=True,
445 | ) # noqa{% endif %}{% if "s3" in cookiecutter.media_storage %}
446 |
447 | # Django Storages
448 | # https://django-storages.readthedocs.io/en/stable/
449 |
450 | AWS_LOCATION = values.Value("")
451 |
452 | AWS_S3_ENDPOINT_URL = values.Value()
453 |
454 | AWS_S3_FILE_OVERWRITE = values.BooleanValue(False)
455 |
456 | AWS_STORAGE_BUCKET_NAME = values.Value() # noqa{% endif %}
457 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/{{cookiecutter.django_settings_dirname}}/tests/__init__.py:
--------------------------------------------------------------------------------
1 | """Initialize tests for the main app."""
2 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/{{cookiecutter.django_settings_dirname}}/tests/test_views.py:
--------------------------------------------------------------------------------
1 | """The main app views tests."""
2 |
3 | from django.test import Client, TestCase
4 |
5 |
6 | class ApiHealthTest(TestCase):
7 | """The health view tests."""
8 |
9 | url = "/{{ cookiecutter.service_slug }}/health/"
10 | client = Client()
11 |
12 | def test_health(self):
13 | """Test api health endpoint."""
14 | with self.subTest("GET"):
15 | response = self.client.get(self.url)
16 | self.assertEqual(response.status_code, 204)
17 | self.assertEqual(response.content, b"")
18 | self.assertEqual(
19 | response.headers,
20 | {
21 | "Content-Type": "text/html; charset=utf-8",
22 | "X-Frame-Options": "SAMEORIGIN",
23 | "Content-Length": "0",
24 | "X-Content-Type-Options": "nosniff",
25 | "Referrer-Policy": "same-origin",
26 | "Cross-Origin-Opener-Policy": "same-origin",
27 | },
28 | )
29 | with self.subTest("OPTIONS"):
30 | response = self.client.options(self.url)
31 | self.assertEqual(response.status_code, 200)
32 | self.assertEqual(response.content, b"")
33 | self.assertEqual(
34 | response.headers,
35 | {
36 | "Content-Type": "text/html; charset=utf-8",
37 | "X-Frame-Options": "SAMEORIGIN",
38 | "Allow": "GET, HEAD, OPTIONS",
39 | "Content-Length": "0",
40 | "X-Content-Type-Options": "nosniff",
41 | "Referrer-Policy": "same-origin",
42 | "Cross-Origin-Opener-Policy": "same-origin",
43 | },
44 | )
45 | with self.subTest("HEAD"):
46 | response = self.client.head(self.url)
47 | self.assertEqual(response.status_code, 204)
48 | self.assertEqual(response.content, b"")
49 | self.assertEqual(
50 | response.headers,
51 | {
52 | "Content-Type": "text/html; charset=utf-8",
53 | "X-Frame-Options": "SAMEORIGIN",
54 | "Content-Length": "0",
55 | "X-Content-Type-Options": "nosniff",
56 | "Referrer-Policy": "same-origin",
57 | "Cross-Origin-Opener-Policy": "same-origin",
58 | },
59 | )
60 | with self.subTest("POST"):
61 | response = self.client.post(self.url)
62 | self.assertEqual(response.status_code, 405)
63 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/{{cookiecutter.django_settings_dirname}}/urls.py:
--------------------------------------------------------------------------------
1 | """
2 | {{ cookiecutter.project_name }} URL Configuration.
3 |
4 | The `urlpatterns` list routes URLs to views. For more information please see:
5 | https://docs.djangoproject.com/en/stable/topics/http/urls/
6 |
7 | Examples
8 | --------
9 | Function views
10 | 1. Add an import: from my_app import views
11 | 2. Add a URL to urlpatterns: path('', views.home, name='home')
12 | Class-based views
13 | 1. Add an import: from other_app.views import Home
14 | 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
15 | Including another URLconf
16 | 1. Import the include() function: from django.urls import include, path
17 | 2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
18 |
19 |
20 | """
21 | from django.conf import settings
22 | from django.contrib import admin
23 | from django.urls import include, path, re_path
24 | from django.views.static import serve
25 |
26 | from .views import HealthView
27 |
28 | admin.site.site_header = admin.site.site_title = "{{ cookiecutter.project_name }}"
29 |
30 | urlpatterns = [
31 | path("admin/", admin.site.urls),
32 | path(
33 | "{{ cookiecutter.service_slug }}/health/",
34 | HealthView.as_view(),
35 | name="health-check",
36 | ),
37 | ]
38 |
39 | if settings.DEBUG: # pragma: no cover
40 | urlpatterns += [
41 | re_path(r"^media/(?P.*)$", serve, {"document_root": settings.MEDIA_ROOT}),
42 | ]
43 |
44 | try:
45 | import debug_toolbar
46 | except ModuleNotFoundError: # pragma: no cover
47 | pass
48 | else:
49 | urlpatterns.append(
50 | path("__debug__/", include(debug_toolbar.urls))
51 | ) # pragma: no cover
52 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/{{cookiecutter.django_settings_dirname}}/views.py:
--------------------------------------------------------------------------------
1 | """The main app views."""
2 |
3 | from django.http import HttpResponse
4 | from django.views.generic import View
5 |
6 |
7 | class HealthView(View):
8 | """The health endpoint view."""
9 |
10 | http_method_names = ("get", "head", "options")
11 |
12 | def get(self, request, *args, **kwargs):
13 | """Return health endpoint GET response."""
14 | return HttpResponse(status=204)
15 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/{{cookiecutter.django_settings_dirname}}/workers.py:
--------------------------------------------------------------------------------
1 | """Custom uvicorn supported worker."""
2 |
3 | from uvicorn.workers import UvicornWorker
4 |
5 |
6 | class UvicornDjangoWorker(UvicornWorker):
7 | """A Uvicorn worker having lifespan option disabled."""
8 |
9 | CONFIG_KWARGS = {**UvicornWorker.CONFIG_KWARGS, "lifespan": "off"}
10 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_dirname}}/{{cookiecutter.django_settings_dirname}}/wsgi.py:
--------------------------------------------------------------------------------
1 | """
2 | WSGI config for {{ cookiecutter.project_name }} project.
3 |
4 | It exposes the WSGI callable as a module-level variable named ``application``.
5 |
6 | For more information on this file, see
7 | https://docs.djangoproject.com/en/stable/howto/deployment/wsgi/
8 | """
9 |
10 | import os
11 |
12 | os.environ.setdefault("DJANGO_CONFIGURATION", "Remote")
13 | os.environ.setdefault(
14 | "DJANGO_SETTINGS_MODULE", "{{ cookiecutter.django_settings_dirname }}.settings"
15 | )
16 |
17 | from configurations.wsgi import get_wsgi_application
18 |
19 | application = get_wsgi_application()
20 |
--------------------------------------------------------------------------------