{% blocktranslate %}We have sent an e-mail to you for verification. Follow the link provided to finalize the signup process. Please contact us if you do not receive it within a few minutes.{% endblocktranslate %}
{% blocktranslate %}This part of the site requires us to verify that
13 | you are who you claim to be. For this purpose, we require that you
14 | verify ownership of your e-mail address. {% endblocktranslate %}
15 |
16 |
{% blocktranslate %}We have sent an e-mail to you for
17 | verification. Please click on the link inside this e-mail. Please
18 | contact us if you do not receive it within a few minutes.{% endblocktranslate %}
{% blocktranslate with confirmation.email_address.email as email %}Please confirm that {{ email }} is an e-mail address for user {{ user_display }}.{% endblocktranslate %}
9 |
10 | {% if token_fail %}
11 | {% url 'account_reset_password' as passwd_reset_url %}
12 |
{% blocktranslate %}The password reset link was invalid, possibly because it has already been used. Please request a new password reset.{% endblocktranslate %}
13 | {% else %}
14 | {% if form %}
15 |
20 | {% else %}
21 |
{% translate 'Your password is now changed.' %}
22 | {% endif %}
23 | {% endif %}
24 | {% endblock %}
25 | {%- endraw %}
26 |
--------------------------------------------------------------------------------
/tests/test_bare.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # this is a very simple script that tests the docker configuration for cookiecutter-django
3 | # it is meant to be run from the root directory of the repository, eg:
4 | # sh tests/test_bare.sh
5 |
6 | set -o errexit
7 | set -x
8 |
9 | # create a cache directory
10 | mkdir -p .cache/bare
11 | cd .cache/bare
12 |
13 | # create the project using the default settings in cookiecutter.json
14 | cookiecutter ../../ --no-input --overwrite-if-exists use_docker=n "$@"
15 | cd my_awesome_project
16 |
17 | # Install OS deps
18 | sudo utility/install_os_dependencies.sh install
19 |
20 | # Install Python deps
21 | pip install -r requirements/local.txt
22 |
23 | # Lint by running pre-commit on all files
24 | # Needs a git repo to find the project root
25 | git init
26 | git add .
27 | pre-commit run --show-diff-on-failure -a
28 |
29 | # run the project's tests
30 | pytest
31 |
32 | # Make sure the check doesn't raise any warnings
33 | python manage.py check --fail-level WARNING
34 |
35 | if [ -f "package.json" ]
36 | then
37 | npm install
38 | if [ -f "gulpfile.js" ]
39 | then
40 | npm run build
41 | fi
42 | fi
43 |
44 | # Generate the HTML for the documentation
45 | cd docs && make html
46 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/.idea/runConfigurations/merge_production_dotenvs_in_dotenv.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/setup.cfg:
--------------------------------------------------------------------------------
1 | [flake8]
2 | max-line-length = 120
3 | exclude = .tox,.git,*/migrations/*,*/static/CACHE/*,docs,node_modules,venv
4 |
5 | [pycodestyle]
6 | max-line-length = 120
7 | exclude = .tox,.git,*/migrations/*,*/static/CACHE/*,docs,node_modules,venv
8 |
9 | [isort]
10 | line_length = 88
11 | known_first_party = {{cookiecutter.project_slug}},config
12 | multi_line_output = 3
13 | default_section = THIRDPARTY
14 | skip = venv/
15 | skip_glob = **/migrations/*.py
16 | include_trailing_comma = true
17 | force_grid_wrap = 0
18 | use_parentheses = true
19 |
20 | [mypy]
21 | python_version = 3.9
22 | check_untyped_defs = True
23 | ignore_missing_imports = True
24 | warn_unused_ignores = True
25 | warn_redundant_casts = True
26 | warn_unused_configs = True
27 | plugins = mypy_django_plugin.main{% if cookiecutter.use_drf == "y" %}, mypy_drf_plugin.main{% endif %}
28 |
29 | [mypy.plugins.django-stubs]
30 | django_settings_module = config.settings.test
31 |
32 | [mypy-*.migrations.*]
33 | # Django migrations should not produce any errors:
34 | ignore_errors = True
35 |
36 | [coverage:run]
37 | include = {{cookiecutter.project_slug}}/*
38 | omit = *migrations*, *tests*
39 | plugins =
40 | django_coverage_plugin
41 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/compose/production/postgres/maintenance/backup:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 |
4 | ### Create a database backup.
5 | ###
6 | ### Usage:
7 | ### $ docker-compose -f .yml (exec |run --rm) postgres backup
8 |
9 |
10 | set -o errexit
11 | set -o pipefail
12 | set -o nounset
13 |
14 |
15 | working_dir="$(dirname ${0})"
16 | source "${working_dir}/_sourced/constants.sh"
17 | source "${working_dir}/_sourced/messages.sh"
18 |
19 |
20 | message_welcome "Backing up the '${POSTGRES_DB}' database..."
21 |
22 |
23 | if [[ "${POSTGRES_USER}" == "postgres" ]]; then
24 | message_error "Backing up as 'postgres' user is not supported. Assign 'POSTGRES_USER' env with another one and try again."
25 | exit 1
26 | fi
27 |
28 | export PGHOST="${POSTGRES_HOST}"
29 | export PGPORT="${POSTGRES_PORT}"
30 | export PGUSER="${POSTGRES_USER}"
31 | export PGPASSWORD="${POSTGRES_PASSWORD}"
32 | export PGDATABASE="${POSTGRES_DB}"
33 |
34 | backup_filename="${BACKUP_FILE_PREFIX}_$(date +'%Y_%m_%dT%H_%M_%S').sql.gz"
35 | pg_dump | gzip > "${BACKUP_DIR_PATH}/${backup_filename}"
36 |
37 |
38 | message_success "'${POSTGRES_DB}' database backup '${backup_filename}' has been created and placed in '${BACKUP_DIR_PATH}'."
39 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/manage.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import os
3 | import sys
4 | from pathlib import Path
5 |
6 | if __name__ == "__main__":
7 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
8 |
9 | try:
10 | from django.core.management import execute_from_command_line
11 | except ImportError:
12 | # The above import may fail for some other reason. Ensure that the
13 | # issue is really that Django is missing to avoid masking other
14 | # exceptions on Python 2.
15 | try:
16 | import django # noqa
17 | except ImportError:
18 | raise ImportError(
19 | "Couldn't import Django. Are you sure it's installed and "
20 | "available on your PYTHONPATH environment variable? Did you "
21 | "forget to activate a virtual environment?"
22 | )
23 |
24 | raise
25 |
26 | # This allows easy placement of apps within the interior
27 | # {{ cookiecutter.project_slug }} directory.
28 | current_path = Path(__file__).parent.resolve()
29 | sys.path.append(str(current_path / "{{ cookiecutter.project_slug }}"))
30 |
31 | execute_from_command_line(sys.argv)
32 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/admin.py:
--------------------------------------------------------------------------------
1 | from django.contrib import admin
2 | from django.contrib.auth import admin as auth_admin
3 | from django.contrib.auth import get_user_model
4 | from django.utils.translation import gettext_lazy as _
5 |
6 | from {{ cookiecutter.project_slug }}.users.forms import UserAdminChangeForm, UserAdminCreationForm
7 |
8 | User = get_user_model()
9 |
10 |
11 | @admin.register(User)
12 | class UserAdmin(auth_admin.UserAdmin):
13 |
14 | form = UserAdminChangeForm
15 | add_form = UserAdminCreationForm
16 | fieldsets = (
17 | (None, {"fields": ("username", "password")}),
18 | (_("Personal info"), {"fields": ("name", "email")}),
19 | (
20 | _("Permissions"),
21 | {
22 | "fields": (
23 | "is_active",
24 | "is_staff",
25 | "is_superuser",
26 | "groups",
27 | "user_permissions",
28 | ),
29 | },
30 | ),
31 | (_("Important dates"), {"fields": ("last_login", "date_joined")}),
32 | )
33 | list_display = ["username", "name", "is_superuser"]
34 | search_fields = ["name"]
35 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 |
8 | if "%SPHINXBUILD%" == "" (
9 | set SPHINXBUILD=sphinx-build -c .
10 | )
11 | set SOURCEDIR=_source
12 | set BUILDDIR=_build
13 | set APP=..\{{cookiecutter.project_slug}}
14 |
15 | if "%1" == "" goto help
16 |
17 | %SPHINXBUILD% >NUL 2>NUL
18 | if errorlevel 9009 (
19 | echo.
20 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
21 | echo.installed, then set the SPHINXBUILD environment variable to point
22 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
23 | echo.may add the Sphinx directory to PATH.
24 | echo.
25 | echo.Install sphinx-autobuild for live serving.
26 | echo.If you don't have Sphinx installed, grab it from
27 | echo.http://sphinx-doc.org/
28 | exit /b 1
29 | )
30 |
31 | %SPHINXBUILD% -b %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
32 | goto end
33 |
34 | :livehtml
35 | sphinx-autobuild -b html --open-browser -p 9000 --watch %APP% -c . %SOURCEDIR% %BUILDDIR%/html
36 | GOTO :EOF
37 |
38 | :apidocs
39 | sphinx-apidoc -o %SOURCEDIR%/api %APP%
40 | GOTO :EOF
41 |
42 | :help
43 | %SPHINXBUILD% -b help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
44 |
45 | :end
46 | popd
47 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/compose/production/mysql/init.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 |
4 | initialize() {
5 | {
6 | mysql_note "Giving user ${MYSQL_USER} access to schema test_${MYSQL_DATABASE}"
7 | docker_process_sql --database=mysql <<<"GRANT ALL ON \`test_${MYSQL_DATABASE//_/\\_}\`.* TO '$MYSQL_USER'@'%' ;"
8 |
9 | # exporting dummy MYSQL_ONETIME_PASSWORD to avoid -> MYSQL_ONETIME_PASSWORD: unbound variable
10 | export DUMMY_ONETIME_PASSWORD="$MYSQL_ROOT_PASSWORD"
11 | } || {
12 | exit 1
13 | }
14 | }
15 |
16 | docker_process_sql() {
17 | if [ -n "$MYSQL_DATABASE" ]; then
18 | set -- --database="$MYSQL_DATABASE" "$@"
19 | fi
20 |
21 | mysql --protocol=socket -uroot --password="${MYSQL_ROOT_PASSWORD}" -hlocalhost --socket="${SOCKET}" --comments "$@"
22 | }
23 |
24 | # logging functions
25 | mysql_log() {
26 | local type="$1"; shift
27 | # accept argument string or stdin
28 | local text="$*"; if [ "$#" -eq 0 ]; then text="$(cat)"; fi
29 | local dt; dt="$(date --rfc-3339=seconds)"
30 | printf '%s [%s] [Entrypoint]: %s\n' "$dt" "$type" "$text"
31 | }
32 |
33 | mysql_note() {
34 | mysql_log Note "$@"
35 | }
36 |
37 | until (initialize); do
38 | >&2 echo 'Waiting for MYSQL to execute init'
39 | sleep 1
40 | done
41 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/compose/production/mysql/maintenance/backup:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 |
4 | ### Create a database backup.
5 | ###
6 | ### Usage:
7 | ### $ docker-compose -f .yml (exec |run --rm) mysql backup
8 |
9 |
10 | set -o errexit
11 | set -o pipefail
12 | set -o nounset
13 |
14 |
15 | working_dir="$(dirname ${0})"
16 | source "${working_dir}/_sourced/constants.sh"
17 | source "${working_dir}/_sourced/messages.sh"
18 |
19 |
20 | message_welcome "Backing up the '${MYSQL_DATABASE}' database..."
21 |
22 |
23 | if [[ "${MYSQL_USER}" == "root" ]]; then
24 | message_error "Backing up as 'root' user is not supported. Assign 'MYSQL_USER' env with another one and try again."
25 | exit 1
26 | fi
27 |
28 | export MYSQL_TCP_PORT="${MYSQL_PORT}"
29 | export MYSQL_HOST="${MYSQL_HOST}"
30 |
31 | backup_filename="${BACKUP_FILE_PREFIX}_$(date +'%Y_%m_%dT%H_%M_%S').sql.gz"
32 | backup_file_path="${BACKUP_DIR_PATH}/${backup_filename}"
33 |
34 |
35 | mysqldump --no-tablespaces --user=${MYSQL_USER} --password=${MYSQL_PASSWORD} --port=${MYSQL_PORT} ${MYSQL_DATABASE} | gzip > "${backup_file_path}"
36 |
37 |
38 |
39 | message_success "'${MYSQL_DATABASE}' database backup '${backup_filename}' has been created and placed in '${BACKUP_DIR_PATH}'."
40 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/.idea/runConfigurations/pytest___.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = .
9 | BUILDDIR = ./_build
10 | {%- if cookiecutter.use_docker == 'y' %}
11 | APP = /app
12 | {%- else %}
13 | APP = ../{{cookiecutter.project_slug}}
14 | {% endif %}
15 |
16 | .PHONY: help livehtml apidocs Makefile
17 |
18 | # Put it first so that "make" without argument is like "make help".
19 | help:
20 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -c .
21 |
22 | # Build, watch and serve docs with live reload
23 | livehtml:
24 | sphinx-autobuild -b html
25 | {%- if cookiecutter.use_docker == 'y' %} --host 0.0.0.0
26 | {%- else %} --open-browser
27 | {%- endif %} --port 9000 --watch $(APP) -c . $(SOURCEDIR) $(BUILDDIR)/html
28 |
29 | # Outputs rst files from django application code
30 | apidocs:
31 | sphinx-apidoc -o $(SOURCEDIR)/api $(APP)
32 |
33 | # Catch-all target: route all unknown targets to Sphinx using the new
34 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
35 | %: Makefile
36 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -c .
37 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/.idea/runConfigurations/pytest__users.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/config/settings/test.py:
--------------------------------------------------------------------------------
1 | """
2 | With these settings, tests run faster.
3 | """
4 |
5 | from .base import * # noqa
6 | from .base import env
7 |
8 | # GENERAL
9 | # ------------------------------------------------------------------------------
10 | # https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
11 | SECRET_KEY = env(
12 | "DJANGO_SECRET_KEY",
13 | default="!!!SET DJANGO_SECRET_KEY!!!",
14 | )
15 | # https://docs.djangoproject.com/en/dev/ref/settings/#test-runner
16 | TEST_RUNNER = "django.test.runner.DiscoverRunner"
17 |
18 | # PASSWORDS
19 | # ------------------------------------------------------------------------------
20 | # https://docs.djangoproject.com/en/dev/ref/settings/#password-hashers
21 | PASSWORD_HASHERS = ["django.contrib.auth.hashers.MD5PasswordHasher"]
22 |
23 | # EMAIL
24 | # ------------------------------------------------------------------------------
25 | # https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
26 | EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend"
27 |
28 | # DEBUGING FOR TEMPLATES
29 | # ------------------------------------------------------------------------------
30 | TEMPLATES[0]["OPTIONS"]["debug"] = True # type: ignore # noqa F405
31 |
32 | # Your stuff...
33 | # ------------------------------------------------------------------------------
34 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tests/test_forms.py:
--------------------------------------------------------------------------------
1 | """
2 | Module for all Form Tests.
3 | """
4 | from django.utils.translation import gettext_lazy as _
5 |
6 | from {{ cookiecutter.project_slug }}.users.forms import UserAdminCreationForm
7 | from {{ cookiecutter.project_slug }}.users.models import User
8 |
9 |
10 | class TestUserAdminCreationForm:
11 | """
12 | Test class for all tests related to the UserAdminCreationForm
13 | """
14 |
15 | def test_username_validation_error_msg(self, user: User):
16 | """
17 | Tests UserAdminCreation Form's unique validator functions correctly by testing:
18 | 1) A new user with an existing username cannot be added.
19 | 2) Only 1 error is raised by the UserCreation Form
20 | 3) The desired error message is raised
21 | """
22 |
23 | # The user already exists,
24 | # hence cannot be created.
25 | form = UserAdminCreationForm(
26 | {
27 | "username": user.username,
28 | "password1": user.password,
29 | "password2": user.password,
30 | }
31 | )
32 |
33 | assert not form.is_valid()
34 | assert len(form.errors) == 1
35 | assert "username" in form.errors
36 | assert form.errors["username"][0] == _("This username has already been taken.")
37 |
--------------------------------------------------------------------------------
/.github/workflows/issue-manager.yml:
--------------------------------------------------------------------------------
1 | # Automatically close issues or pull requests that have a label, after a custom delay, if no one replies.
2 | # https://github.com/tiangolo/issue-manager
3 | name: Issue Manager
4 |
5 | on:
6 | schedule:
7 | - cron: "12 0 * * *"
8 | issue_comment:
9 | types:
10 | - created
11 | issues:
12 | types:
13 | - labeled
14 | pull_request_target:
15 | types:
16 | - labeled
17 | workflow_dispatch:
18 |
19 | jobs:
20 | issue-manager:
21 | # Disables this workflow from running in a repository that is not part of the indicated organization/user
22 | if: github.repository_owner == 'cookiecutter'
23 |
24 | runs-on: ubuntu-latest
25 | steps:
26 | - uses: tiangolo/issue-manager@0.4.0
27 | with:
28 | token: ${{ secrets.GITHUB_TOKEN }}
29 | config: >
30 | {
31 | "answered": {
32 | "message": "Assuming the question was answered, this will be automatically closed now."
33 | },
34 | "solved": {
35 | "message": "Assuming the original issue was solved, it will be automatically closed now."
36 | },
37 | "waiting": {
38 | "message": "Automatically closing after waiting for additional info. To re-open, please provide the additional information requested."
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/.github/CONTRIBUTORS-template.md:
--------------------------------------------------------------------------------
1 | # Contributors
2 |
3 | ## Core Developers
4 |
5 | These contributors have commit flags for the repository, and are able to
6 | accept and merge pull requests.
7 |
8 |
9 |
10 |
Name
11 |
Github
12 |
Twitter
13 |
14 | {%- for contributor in core_contributors %}
15 |
24 |
25 | *Audrey is also the creator of Cookiecutter. Audrey and Daniel are on
26 | the Cookiecutter core team.*
27 |
28 | ## Other Contributors
29 |
30 | Listed in alphabetical order.
31 |
32 |
33 |
34 |
Name
35 |
Github
36 |
Twitter
37 |
38 | {%- for contributor in other_contributors %}
39 |
31 |
32 |
33 |
34 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/config/asgi.py:
--------------------------------------------------------------------------------
1 | """
2 | ASGI config for {{ cookiecutter.project_name }} project.
3 |
4 | It exposes the ASGI callable as a module-level variable named ``application``.
5 |
6 | For more information on this file, see
7 | https://docs.djangoproject.com/en/dev/howto/deployment/asgi/
8 |
9 | """
10 | import os
11 | import sys
12 | from pathlib import Path
13 |
14 | from django.core.asgi import get_asgi_application
15 |
16 | # This allows easy placement of apps within the interior
17 | # {{ cookiecutter.project_slug }} directory.
18 | ROOT_DIR = Path(__file__).resolve(strict=True).parent.parent
19 | sys.path.append(str(ROOT_DIR / "{{ cookiecutter.project_slug }}"))
20 |
21 | # If DJANGO_SETTINGS_MODULE is unset, default to the local settings
22 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
23 |
24 | # This application object is used by any ASGI server configured to use this file.
25 | django_application = get_asgi_application()
26 | # Apply ASGI middleware here.
27 | # from helloworld.asgi import HelloWorldApplication
28 | # application = HelloWorldApplication(application)
29 |
30 | # Import websocket application here, so apps from django_application are loaded first
31 | from config.websocket import websocket_application # noqa isort:skip
32 |
33 |
34 | async def application(scope, receive, send):
35 | if scope["type"] == "http":
36 | await django_application(scope, receive, send)
37 | elif scope["type"] == "websocket":
38 | await websocket_application(scope, receive, send)
39 | else:
40 | raise NotImplementedError(f"Unknown scope type {scope['type']}")
41 |
--------------------------------------------------------------------------------
/.github/workflows/pre-commit-autoupdate.yml:
--------------------------------------------------------------------------------
1 | # Run pre-commit autoupdate every day at midnight
2 | # and create a pull request if any changes
3 |
4 | name: Pre-commit auto-update
5 |
6 | on:
7 | schedule:
8 | - cron: "15 2 * * *"
9 | workflow_dispatch: # to trigger manually
10 |
11 | permissions:
12 | contents: read
13 |
14 | jobs:
15 | auto-update:
16 | # Disables this workflow from running in a repository that is not part of the indicated organization/user
17 | if: github.repository_owner == 'cookiecutter'
18 | permissions:
19 | contents: write # for peter-evans/create-pull-request to create branch
20 | pull-requests: write # for peter-evans/create-pull-request to create a PR
21 |
22 | runs-on: ubuntu-latest
23 | steps:
24 | - uses: actions/checkout@v3
25 | - uses: actions/setup-python@v4
26 | with:
27 | python-version: "3.9"
28 |
29 | - name: Install pre-commit
30 | run: pip install pre-commit
31 |
32 | - name: Autoupdate template
33 | run: pre-commit autoupdate
34 |
35 | - name: Autoupdate generated projects
36 | working-directory: "{{cookiecutter.project_slug}}"
37 | run: pre-commit autoupdate
38 |
39 | - name: Create Pull Request
40 | uses: peter-evans/create-pull-request@v4
41 | with:
42 | token: ${{ secrets.GITHUB_TOKEN }}
43 | branch: update/pre-commit-autoupdate
44 | title: Auto-update pre-commit hooks
45 | commit-message: Auto-update pre-commit hooks
46 | body: Update versions of tools in pre-commit configs to latest version
47 | labels: update
48 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # How to Contribute
2 |
3 | Always happy to get issues identified and pull requests!
4 |
5 | ## Getting your pull request merged in
6 |
7 | 1. Keep it small. The smaller the pull request, the more likely we are to accept.
8 | 2. Pull requests that fix a current issue get priority for review.
9 |
10 | ## Testing
11 |
12 | ### Installation
13 |
14 | Please install [tox](https://tox.readthedocs.io/en/latest/), which is a generic virtualenv management and test command line tool.
15 |
16 | [tox](https://tox.readthedocs.io/en/latest/) is available for download from [PyPI](https://pypi.python.org/pypi) via [pip](https://pypi.python.org/pypi/pip/):
17 |
18 | $ pip install tox
19 |
20 | It will automatically create a fresh virtual environment and install our test dependencies,
21 | such as [pytest-cookies](https://pypi.python.org/pypi/pytest-cookies/) and [flake8](https://pypi.python.org/pypi/flake8/).
22 |
23 | ### Run the Tests
24 |
25 | Tox uses pytest under the hood, hence it supports the same syntax for selecting tests.
26 |
27 | For further information please consult the [pytest usage docs](https://pytest.org/latest/usage.html#specifying-tests-selecting-tests).
28 |
29 | To run all tests using various versions of python in virtualenvs defined in tox.ini, just run tox.:
30 |
31 | $ tox
32 |
33 | It is possible to test with a specific version of python. To do this, the command
34 | is:
35 |
36 | $ tox -e py39
37 |
38 | This will run pytest with the python3.9 interpreter, for example.
39 |
40 | To run a particular test with tox for against your current Python version:
41 |
42 | $ tox -e py -- -k test_default_configuration
43 |
--------------------------------------------------------------------------------
/docs/faq.rst:
--------------------------------------------------------------------------------
1 | FAQ
2 | ===
3 |
4 | .. index:: FAQ, 12-Factor App
5 |
6 | Why is there a django.contrib.sites directory in Cookiecutter Django?
7 | ---------------------------------------------------------------------
8 |
9 | It is there to add a migration so you don't have to manually change the ``sites.Site`` record from ``example.com`` to whatever your domain is. Instead, your ``{{cookiecutter.domain_name}}`` and ``{{cookiecutter.project_name}}`` value is placed by **Cookiecutter** in the domain and name fields respectively.
10 |
11 | See `0003_set_site_domain_and_name.py`_.
12 |
13 | .. _`0003_set_site_domain_and_name.py`: https://github.com/cookiecutter/cookiecutter-django/blob/master/%7B%7Bcookiecutter.project_slug%7D%7D/%7B%7Bcookiecutter.project_slug%7D%7D/contrib/sites/migrations/0003_set_site_domain_and_name.py
14 |
15 |
16 | Why aren't you using just one configuration file (12-Factor App)
17 | ----------------------------------------------------------------------
18 |
19 | TODO
20 | .. TODO
21 |
22 | Why doesn't this follow the layout from Two Scoops of Django?
23 | -------------------------------------------------------------
24 |
25 | You may notice that some elements of this project do not exactly match what we describe in chapter 3 of `Two Scoops of Django 1.11`_. The reason for that is this project, amongst other things, serves as a test bed for trying out new ideas and concepts. Sometimes they work, sometimes they don't, but the end result is that it won't necessarily match precisely what is described in the book I co-authored.
26 |
27 | .. _Two Scoops of Django 1.11: https://www.feldroy.com/collections/django/products/two-scoops-of-django-1-11
28 |
--------------------------------------------------------------------------------
/tests/test_docker.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # this is a very simple script that tests the docker configuration for cookiecutter-django
3 | # it is meant to be run from the root directory of the repository, eg:
4 | # sh tests/test_docker.sh
5 |
6 | set -o errexit
7 | set -x
8 |
9 | # create a cache directory
10 | mkdir -p .cache/docker
11 | cd .cache/docker
12 |
13 | # create the project using the default settings in cookiecutter.json
14 | cookiecutter ../../ --no-input --overwrite-if-exists use_docker=y "$@"
15 | cd my_awesome_project
16 |
17 | # Lint by running pre-commit on all files
18 | # Needs a git repo to find the project root
19 | # We don't have git inside Docker, so run it outside
20 | git init
21 | git add .
22 | pre-commit run --show-diff-on-failure -a
23 |
24 | # make sure all images build
25 | docker-compose -f local.yml build
26 |
27 | # run the project's type checks
28 | docker-compose -f local.yml run django mypy my_awesome_project
29 |
30 | # run the project's tests
31 | docker-compose -f local.yml run django pytest
32 |
33 | # return non-zero status code if there are migrations that have not been created
34 | docker-compose -f local.yml run django python manage.py makemigrations --dry-run --check || { echo "ERROR: there were changes in the models, but migration listed above have not been created and are not saved in version control"; exit 1; }
35 |
36 | # Test support for translations
37 | docker-compose -f local.yml run django python manage.py makemessages --all
38 |
39 | # Make sure the check doesn't raise any warnings
40 | docker-compose -f local.yml run django python manage.py check --fail-level WARNING
41 |
42 | # Generate the HTML for the documentation
43 | docker-compose -f local.yml run docs make html
44 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/.travis.yml:
--------------------------------------------------------------------------------
1 | dist: focal
2 |
3 | language: python
4 | python:
5 | - "3.9"
6 |
7 | services:
8 | - {% if cookiecutter.use_docker == 'y' %}docker{% else %}postgresql{% endif %}
9 | jobs:
10 | include:
11 | - name: "Linter"
12 | before_script:
13 | - pip install -q flake8
14 | script:
15 | - "flake8"
16 |
17 | - name: "Django Test"
18 | {%- if cookiecutter.use_docker == 'y' %}
19 | before_script:
20 | - docker-compose -v
21 | - docker -v
22 | - docker-compose -f local.yml build
23 | # Ensure celerybeat does not crash due to non-existent tables
24 | - docker-compose -f local.yml run --rm django python manage.py migrate
25 | - docker-compose -f local.yml up -d
26 | script:
27 | - "docker-compose -f local.yml run django pytest"
28 | after_failure:
29 | - docker-compose -f local.yml logs
30 | {%- else %}
31 | before_install:
32 | - sudo apt-get update -qq
33 | - sudo apt-get install -qq build-essential gettext python-dev zlib1g-dev libpq-dev xvfb
34 | - sudo apt-get install -qq libjpeg8-dev libfreetype6-dev libwebp-dev
35 | - sudo apt-get install -qq graphviz-dev python-setuptools python3-dev python-virtualenv python-pip
36 | - sudo apt-get install -qq firefox automake libtool libreadline6 libreadline6-dev libreadline-dev
37 | - sudo apt-get install -qq libsqlite3-dev libxml2 libxml2-dev libssl-dev libbz2-dev wget curl llvm
38 | language: python
39 | python:
40 | - "3.9"
41 | install:
42 | - pip install -r requirements/local.txt
43 | script:
44 | - "pytest"
45 | {%- endif %}
46 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug Report
3 | about: Report a bug
4 | labels: bug
5 | ---
6 |
7 | ## What happened?
8 |
9 | ## What should've happened instead?
10 |
11 | ## Additional details
12 |
13 |
14 |
15 | * Host system configuration:
16 | * Version of cookiecutter CLI (get it with `cookiecutter --version`):
17 | * OS name and version:
18 |
19 | On Linux, run
20 | ```bash
21 | lsb_release -a 2> /dev/null || cat /etc/redhat-release 2> /dev/null || cat /etc/*-release 2> /dev/null || cat /etc/issue 2> /dev/null
22 | ```
23 |
24 | On MacOs, run
25 | ```bash
26 | sw_vers
27 | ```
28 |
29 | On Windows, via CMD, run
30 | ```
31 | systeminfo | findstr /B /C:"OS Name" /C:"OS Version"
32 | ```
33 |
34 |
35 | ```bash
36 | # Insert here the OS name and version
37 |
38 | ```
39 |
40 | * Python version, run `python3 -V`:
41 | * Docker version (if using Docker), run `docker --version`:
42 | * docker-compose version (if using Docker), run `docker-compose --version`:
43 | * ...
44 | * Options selected and/or [replay file](https://cookiecutter.readthedocs.io/en/latest/advanced/replay.html):
45 | On Linux and MacOS: `cat ${HOME}/.cookiecutter_replay/cookiecutter-django.json`
46 | (Please, take care to remove sensitive information)
47 | ```json
48 | # Insert here the replay file content
49 | ```
50 |
51 | Logs:
52 |
53 |
57 |
58 |
59 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/docs/howto.rst:
--------------------------------------------------------------------------------
1 | How To - Project Documentation
2 | ======================================================================
3 |
4 | Get Started
5 | ----------------------------------------------------------------------
6 |
7 | Documentation can be written as rst files in `{{cookiecutter.project_slug}}/docs`.
8 |
9 | {% if cookiecutter.use_docker == 'n' %}
10 | To build and serve docs, use the command::
11 |
12 | make livehtml
13 |
14 | from inside the `{{cookiecutter.project_slug}}/docs` directory.
15 | {% else %}
16 | To build and serve docs, use the commands::
17 |
18 | docker-compose -f local.yml up docs
19 |
20 | {% endif %}
21 |
22 | Changes to files in `docs/_source` will be picked up and reloaded automatically.
23 |
24 | `Sphinx `_ is the tool used to build documentation.
25 |
26 | Docstrings to Documentation
27 | ----------------------------------------------------------------------
28 |
29 | The sphinx extension `apidoc `_ is used to automatically document code using signatures and docstrings.
30 |
31 | Numpy or Google style docstrings will be picked up from project files and available for documentation. See the `Napoleon `_ extension for details.
32 |
33 | For an in-use example, see the `page source <_sources/users.rst.txt>`_ for :ref:`users`.
34 |
35 | To compile all docstrings automatically into documentation source files, use the command:
36 | ::
37 |
38 | make apidocs
39 |
40 | {% if cookiecutter.use_docker == 'y' %}
41 | This can be done in the docker container:
42 | ::
43 |
44 | docker run --rm docs make apidocs
45 | {% endif -%}
46 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/config/wsgi.py:
--------------------------------------------------------------------------------
1 | """
2 | WSGI config for {{ cookiecutter.project_name }} project.
3 |
4 | This module contains the WSGI application used by Django's development server
5 | and any production WSGI deployments. It should expose a module-level variable
6 | named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
7 | this application via the ``WSGI_APPLICATION`` setting.
8 |
9 | Usually you will have the standard Django WSGI application here, but it also
10 | might make sense to replace the whole Django WSGI application with a custom one
11 | that later delegates to the Django one. For example, you could introduce WSGI
12 | middleware here, or combine a Django application with an application of another
13 | framework.
14 |
15 | """
16 | import os
17 | import sys
18 | from pathlib import Path
19 |
20 | from django.core.wsgi import get_wsgi_application
21 |
22 | # This allows easy placement of apps within the interior
23 | # {{ cookiecutter.project_slug }} directory.
24 | ROOT_DIR = Path(__file__).resolve(strict=True).parent.parent
25 | sys.path.append(str(ROOT_DIR / "{{ cookiecutter.project_slug }}"))
26 | # We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
27 | # if running multiple sites in the same mod_wsgi process. To fix this, use
28 | # mod_wsgi daemon mode with each site in its own daemon process, or use
29 | # os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.production"
30 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production")
31 |
32 | # This application object is used by any WSGI server configured to use this
33 | # file. This includes Django's development server, if the WSGI_APPLICATION
34 | # setting points here.
35 | application = get_wsgi_application()
36 | # Apply WSGI middleware here.
37 | # from helloworld.wsgi import HelloWorldApplication
38 | # application = HelloWorldApplication(application)
39 |
--------------------------------------------------------------------------------
/docs/document.rst:
--------------------------------------------------------------------------------
1 | .. _document:
2 |
3 | Document
4 | =========
5 |
6 | This project uses Sphinx_ documentation generator.
7 |
8 | After you have set up to `develop locally`_, run the following command from the project directory to build and serve HTML documentation: ::
9 |
10 | $ make -C docs livehtml
11 |
12 | If you set up your project to `develop locally with docker`_, run the following command: ::
13 |
14 | $ docker-compose -f local.yml up docs
15 |
16 | Navigate to port 9000 on your host to see the documentation. This will be opened automatically at `localhost`_ for local, non-docker development.
17 |
18 | Note: using Docker for documentation sets up a temporary SQLite file by setting the environment variable ``DATABASE_URL=sqlite:///readthedocs.db`` in ``docs/conf.py`` to avoid a dependency on PostgreSQL.
19 |
20 | Generate API documentation
21 | ----------------------------
22 |
23 | Edit the ``docs`` files and project application docstrings to create your documentation.
24 |
25 | Sphinx can automatically include class and function signatures and docstrings in generated documentation.
26 | See the generated project documentation for more examples.
27 |
28 | Setting up ReadTheDocs
29 | ----------------------
30 |
31 | To setup your documentation on `ReadTheDocs`_, you must
32 |
33 | 1. Go to `ReadTheDocs`_ and login/create an account
34 | 2. Add your GitHub repository
35 | 3. Trigger a build
36 |
37 | Additionally, you can auto-build Pull Request previews, but `you must enable it`_.
38 |
39 | .. _localhost: http://localhost:9000/
40 | .. _Sphinx: https://www.sphinx-doc.org/en/master/index.html
41 | .. _develop locally: ./developing-locally.html
42 | .. _develop locally with docker: ./developing-locally-docker.html
43 | .. _ReadTheDocs: https://readthedocs.org/
44 | .. _you must enable it: https://docs.readthedocs.io/en/latest/guides/autobuild-docs-for-pull-requests.html#autobuild-documentation-for-pull-requests
45 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/compose/production/mysql/maintenance/restore:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 |
4 | ### Restore database from a backup.
5 | ###
6 | ### Parameters:
7 | ### <1> filename of an existing backup.
8 | ###
9 | ### Usage:
10 | ### $ docker-compose -f .yml (exec |run --rm) mysql restore <1>
11 |
12 |
13 | set -o errexit
14 | set -o pipefail
15 | set -o nounset
16 |
17 |
18 | working_dir="$(dirname ${0})"
19 | source "${working_dir}/_sourced/constants.sh"
20 | source "${working_dir}/_sourced/messages.sh"
21 |
22 |
23 | if [[ -z ${1+x} ]]; then
24 | message_error "Backup filename is not specified yet it is a required parameter. Make sure you provide one and try again."
25 | exit 1
26 | fi
27 | backup_filename="${BACKUP_DIR_PATH}/${1}"
28 | if [[ ! -f "${backup_filename}" ]]; then
29 | message_error "No backup with the specified filename found. Check out the 'backups' maintenance script output to see if there is one and try again."
30 | exit 1
31 | fi
32 |
33 | message_welcome "Restoring the '${MYSQL_DATABASE}' database from the '${backup_filename}' backup..."
34 |
35 | if [[ "${MYSQL_USER}" == "root" ]]; then
36 | message_error "Restoring as 'mysql' user is not supported. Assign 'MYSQL_DATABASE' env with another one and try again."
37 | exit 1
38 | fi
39 |
40 | message_info "Dropping the database..."
41 | echo "DROP DATABASE IF EXISTS ${MYSQL_DATABASE};" | mysql --user=${MYSQL_USER} --password=${MYSQL_PASSWORD}
42 |
43 | message_info "Creating a new database..."
44 | echo "CREATE DATABASE IF NOT EXISTS ${MYSQL_DATABASE};" | mysql --user=${MYSQL_USER} --password=${MYSQL_PASSWORD}
45 |
46 | message_info "Applying the backup to the new database..."
47 | gunzip -c "${backup_filename}" | mysql --user=${MYSQL_USER} --password=${MYSQL_PASSWORD} ${MYSQL_DATABASE}
48 |
49 | message_success "The '${MYSQL_DATABASE}' database has been restored from the '${backup_filename}' backup."
50 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/compose/production/postgres/maintenance/restore:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 |
4 | ### Restore database from a backup.
5 | ###
6 | ### Parameters:
7 | ### <1> filename of an existing backup.
8 | ###
9 | ### Usage:
10 | ### $ docker-compose -f .yml (exec |run --rm) postgres restore <1>
11 |
12 |
13 | set -o errexit
14 | set -o pipefail
15 | set -o nounset
16 |
17 |
18 | working_dir="$(dirname ${0})"
19 | source "${working_dir}/_sourced/constants.sh"
20 | source "${working_dir}/_sourced/messages.sh"
21 |
22 |
23 | if [[ -z ${1+x} ]]; then
24 | message_error "Backup filename is not specified yet it is a required parameter. Make sure you provide one and try again."
25 | exit 1
26 | fi
27 | backup_filename="${BACKUP_DIR_PATH}/${1}"
28 | if [[ ! -f "${backup_filename}" ]]; then
29 | message_error "No backup with the specified filename found. Check out the 'backups' maintenance script output to see if there is one and try again."
30 | exit 1
31 | fi
32 |
33 | message_welcome "Restoring the '${POSTGRES_DB}' database from the '${backup_filename}' backup..."
34 |
35 | if [[ "${POSTGRES_USER}" == "postgres" ]]; then
36 | message_error "Restoring as 'postgres' user is not supported. Assign 'POSTGRES_USER' env with another one and try again."
37 | exit 1
38 | fi
39 |
40 | export PGHOST="${POSTGRES_HOST}"
41 | export PGPORT="${POSTGRES_PORT}"
42 | export PGUSER="${POSTGRES_USER}"
43 | export PGPASSWORD="${POSTGRES_PASSWORD}"
44 | export PGDATABASE="${POSTGRES_DB}"
45 |
46 | message_info "Dropping the database..."
47 | dropdb "${PGDATABASE}"
48 |
49 | message_info "Creating a new database..."
50 | createdb --owner="${POSTGRES_USER}"
51 |
52 | message_info "Applying the backup to the new database..."
53 | gunzip -c "${backup_filename}" | psql "${POSTGRES_DB}"
54 |
55 | message_success "The '${POSTGRES_DB}' database has been restored from the '${backup_filename}' backup."
56 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/utility/install_python_dependencies.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | WORK_DIR="$(dirname "$0")"
4 | PROJECT_DIR="$(dirname "$WORK_DIR")"
5 |
6 | pip --version >/dev/null 2>&1 || {
7 | echo >&2 -e "\npip is required but it's not installed."
8 | echo >&2 -e "You can install it by running the following command:\n"
9 | echo >&2 "wget https://bootstrap.pypa.io/get-pip.py --output-document=get-pip.py; chmod +x get-pip.py; sudo -H python3 get-pip.py"
10 | echo >&2 -e "\n"
11 | echo >&2 -e "\nFor more information, see pip documentation: https://pip.pypa.io/en/latest/"
12 | exit 1;
13 | }
14 |
15 | virtualenv --version >/dev/null 2>&1 || {
16 | echo >&2 -e "\nvirtualenv is required but it's not installed."
17 | echo >&2 -e "You can install it by running the following command:\n"
18 | echo >&2 "sudo -H pip3 install virtualenv"
19 | echo >&2 -e "\n"
20 | echo >&2 -e "\nFor more information, see virtualenv documentation: https://virtualenv.pypa.io/en/latest/"
21 | exit 1;
22 | }
23 |
24 | if [ -z "$VIRTUAL_ENV" ]; then
25 | echo >&2 -e "\nYou need activate a virtualenv first"
26 | echo >&2 -e 'If you do not have a virtualenv created, run the following command to create and automatically activate a new virtualenv named "venv" on current folder:\n'
27 | echo >&2 -e "virtualenv venv --python=\`which python3\`"
28 | echo >&2 -e "\nTo leave/disable the currently active virtualenv, run the following command:\n"
29 | echo >&2 "deactivate"
30 | echo >&2 -e "\nTo activate the virtualenv again, run the following command:\n"
31 | echo >&2 "source venv/bin/activate"
32 | echo >&2 -e "\nFor more information, see virtualenv documentation: https://virtualenv.pypa.io/en/latest/"
33 | echo >&2 -e "\n"
34 | exit 1;
35 | else
36 | pip install -r $PROJECT_DIR/requirements/local.txt
37 | {%- if cookiecutter.use_heroku == "y" -%}
38 | pip install -r $PROJECT_DIR/requirements.txt
39 | {%- endif %}
40 | fi
41 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/templates/account/login.html:
--------------------------------------------------------------------------------
1 | {% raw %}{% extends "account/base.html" %}
2 |
3 | {% load i18n %}
4 | {% load account socialaccount %}
5 | {% load crispy_forms_tags %}
6 |
7 | {% block head_title %}{% translate "Sign In" %}{% endblock %}
8 |
9 | {% block inner %}
10 |
11 |
{% translate "Sign In" %}
12 |
13 | {% get_providers as socialaccount_providers %}
14 |
15 | {% if socialaccount_providers %}
16 |
17 | {% translate "Please sign in with one of your existing third party accounts:" %}
18 | {% if ACCOUNT_ALLOW_REGISTRATION %}
19 | {% blocktranslate trimmed %}
20 | Or, sign up
21 | for a {{ site_name }} account and sign in below:
22 | {% endblocktranslate %}
23 | {% endif %}
24 |
25 |
26 |
27 |
28 |
29 | {% include "socialaccount/snippets/provider_list.html" with process="login" %}
30 |
46 |
47 |
48 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/.gitlab-ci.yml:
--------------------------------------------------------------------------------
1 | stages:
2 | - lint
3 | - test
4 |
5 | variables:
6 | {% if cookiecutter.database_engine == 'postgresql' -%}
7 | POSTGRES_USER: '{{ cookiecutter.project_slug }}'
8 | POSTGRES_PASSWORD: ''
9 | POSTGRES_DB: 'test_{{ cookiecutter.project_slug }}'
10 | POSTGRES_HOST_AUTH_METHOD: trust
11 | {% elif cookiecutter.database_engine == 'mysql' -%}
12 | MYSQL_USER: '{{ cookiecutter.project_slug }}'
13 | MYSQL_PASSWORD: ''
14 | MYSQL_DATABASE: 'test_{{ cookiecutter.project_slug }}'
15 | MYSQL_ROOT_PASSWORD: ''
16 | MYSQL_ALLOW_EMPTY_PASSWORD: 'yes'
17 | {% endif -%}
18 | {% if cookiecutter.use_celery == 'y' -%}
19 | CELERY_BROKER_URL: 'redis://redis:6379/0'
20 | {%- endif %}
21 |
22 | flake8:
23 | stage: lint
24 | image: python:3.9-alpine
25 | before_script:
26 | - pip install -q flake8
27 | script:
28 | - flake8
29 |
30 | pytest:
31 | stage: test
32 | {% if cookiecutter.use_docker == 'y' -%}
33 | image: docker/compose:1.29.2
34 | tags:
35 | - docker
36 | services:
37 | - docker:dind
38 | before_script:
39 | - docker-compose -f local.yml build
40 | # Ensure celerybeat does not crash due to non-existent tables
41 | - docker-compose -f local.yml run --rm django python manage.py migrate
42 | - docker-compose -f local.yml up -d
43 | script:
44 | - docker-compose -f local.yml run django pytest
45 | {%- else %}
46 | image: python:3.9
47 | tags:
48 | - python
49 | services:
50 | {%- if cookiecutter.database_engine == 'postgresql' %}
51 | - postgres:{{ cookiecutter.database_version.split('@')[1] }}
52 | {%- elif cookiecutter.database_engine == 'mysql' %}
53 | - mysql:{{ cookiecutter.database_version.split('@')[1] }}
54 | {%- endif %}
55 | variables:
56 | {%- if cookiecutter.database_engine == 'postgresql' %}
57 | DATABASE_URL: pgsql://$POSTGRES_USER:$POSTGRES_PASSWORD@postgres/$POSTGRES_DB
58 | {%- elif cookiecutter.database_engine == 'mysql' %}
59 | DATABASE_URL: mysql://$MYSQL_USER:$MYSQL_PASSWORD@mysql/$MYSQL_DATABASE
60 | {%- endif %}
61 |
62 | before_script:
63 | - pip install -r requirements/local.txt
64 |
65 | script:
66 | - pytest
67 | {%- endif %}
68 |
69 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/.idea/workspace.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | {%- if cookiecutter.use_docker == 'n' %}
4 |
7 |
8 | {%- elif cookiecutter.use_celery == 'y' %}
9 |
12 |
13 | {%- else %}
14 |
17 |
18 | {%- endif %}
19 |
20 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/merge_production_dotenvs_in_dotenv.py:
--------------------------------------------------------------------------------
1 | import os
2 | from collections.abc import Sequence
3 | from pathlib import Path
4 |
5 | import pytest
6 |
7 | ROOT_DIR_PATH = Path(__file__).parent.resolve()
8 | PRODUCTION_DOTENVS_DIR_PATH = ROOT_DIR_PATH / ".envs" / ".production"
9 | PRODUCTION_DOTENV_FILE_PATHS = [
10 | PRODUCTION_DOTENVS_DIR_PATH / ".django",
11 | PRODUCTION_DOTENVS_DIR_PATH / ".postgres",
12 | ]
13 | DOTENV_FILE_PATH = ROOT_DIR_PATH / ".env"
14 |
15 |
16 | def merge(
17 | output_file_path: str, merged_file_paths: Sequence[str], append_linesep: bool = True
18 | ) -> None:
19 | with open(output_file_path, "w") as output_file:
20 | for merged_file_path in merged_file_paths:
21 | with open(merged_file_path) as merged_file:
22 | merged_file_content = merged_file.read()
23 | output_file.write(merged_file_content)
24 | if append_linesep:
25 | output_file.write(os.linesep)
26 |
27 |
28 | def main():
29 | merge(DOTENV_FILE_PATH, PRODUCTION_DOTENV_FILE_PATHS)
30 |
31 |
32 | @pytest.mark.parametrize("merged_file_count", range(3))
33 | @pytest.mark.parametrize("append_linesep", [True, False])
34 | def test_merge(tmpdir_factory, merged_file_count: int, append_linesep: bool):
35 | tmp_dir_path = Path(str(tmpdir_factory.getbasetemp()))
36 |
37 | output_file_path = tmp_dir_path / ".env"
38 |
39 | expected_output_file_content = ""
40 | merged_file_paths = []
41 | for i in range(merged_file_count):
42 | merged_file_ord = i + 1
43 |
44 | merged_filename = f".service{merged_file_ord}"
45 | merged_file_path = tmp_dir_path / merged_filename
46 |
47 | merged_file_content = merged_filename * merged_file_ord
48 |
49 | with open(merged_file_path, "w+") as file:
50 | file.write(merged_file_content)
51 |
52 | expected_output_file_content += merged_file_content
53 | if append_linesep:
54 | expected_output_file_content += os.linesep
55 |
56 | merged_file_paths.append(merged_file_path)
57 |
58 | merge(output_file_path, merged_file_paths, append_linesep)
59 |
60 | with open(output_file_path) as output_file:
61 | actual_output_file_content = output_file.read()
62 |
63 | assert actual_output_file_content == expected_output_file_content
64 |
65 |
66 | if __name__ == "__main__":
67 | main()
68 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/compose/production/traefik/traefik.yml:
--------------------------------------------------------------------------------
1 | log:
2 | level: INFO
3 |
4 | entryPoints:
5 | web:
6 | # http
7 | address: ":80"
8 | http:
9 | # https://docs.traefik.io/routing/entrypoints/#entrypoint
10 | redirections:
11 | entryPoint:
12 | to: web-secure
13 |
14 | web-secure:
15 | # https
16 | address: ":443"
17 | {%- if cookiecutter.use_celery == 'y' %}
18 |
19 | flower:
20 | address: ":5555"
21 | {%- endif %}
22 |
23 | certificatesResolvers:
24 | letsencrypt:
25 | # https://docs.traefik.io/master/https/acme/#lets-encrypt
26 | acme:
27 | email: "{{ cookiecutter.email }}"
28 | storage: /etc/traefik/acme/acme.json
29 | # https://docs.traefik.io/master/https/acme/#httpchallenge
30 | httpChallenge:
31 | entryPoint: web
32 |
33 | http:
34 | routers:
35 | web-secure-router:
36 | {%- if cookiecutter.domain_name.count('.') == 1 %}
37 | rule: "Host(`{{ cookiecutter.domain_name }}`) || Host(`www.{{ cookiecutter.domain_name }}`)"
38 | {%- else %}
39 | rule: "Host(`{{ cookiecutter.domain_name }}`)"
40 | {%- endif %}
41 | entryPoints:
42 | - web-secure
43 | middlewares:
44 | - csrf
45 | service: django
46 | tls:
47 | # https://docs.traefik.io/master/routing/routers/#certresolver
48 | certResolver: letsencrypt
49 | {%- if cookiecutter.use_celery == 'y' %}
50 |
51 | flower-secure-router:
52 | rule: "Host(`{{ cookiecutter.domain_name }}`)"
53 | entryPoints:
54 | - flower
55 | service: flower
56 | tls:
57 | # https://docs.traefik.io/master/routing/routers/#certresolver
58 | certResolver: letsencrypt
59 | {%- endif %}
60 |
61 | middlewares:
62 | csrf:
63 | # https://docs.traefik.io/master/middlewares/headers/#hostsproxyheaders
64 | # https://docs.djangoproject.com/en/dev/ref/csrf/#ajax
65 | headers:
66 | hostsProxyHeaders: ["X-CSRFToken"]
67 |
68 | services:
69 | django:
70 | loadBalancer:
71 | servers:
72 | - url: http://django:5000
73 | {%- if cookiecutter.use_celery == 'y' %}
74 |
75 | flower:
76 | loadBalancer:
77 | servers:
78 | - url: http://flower:5555
79 | {%- endif %}
80 |
81 | providers:
82 | # https://docs.traefik.io/master/providers/file/
83 | file:
84 | filename: /etc/traefik/traefik.yml
85 | watch: true
86 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/requirements/production.txt:
--------------------------------------------------------------------------------
1 | # PRECAUTION: avoid production dependencies that aren't in development
2 |
3 | -r base.txt
4 |
5 | gunicorn==20.1.0 # https://github.com/benoitc/gunicorn
6 | {%- if cookiecutter.database_engine == "postgresql" %}
7 | psycopg2==2.9.3 # https://github.com/psycopg/psycopg2
8 | {%- elif cookiecutter.database_engine == "mysql" %}
9 | mysqlclient==2.1.0 # https://github.com/PyMySQL/mysqlclient
10 | {%- endif %}
11 | {%- if cookiecutter.use_whitenoise == 'n' %}
12 | Collectfast==2.2.0 # https://github.com/antonagestam/collectfast
13 | {%- endif %}
14 | {%- if cookiecutter.use_sentry == "y" %}
15 | sentry-sdk==1.9.9 # https://github.com/getsentry/sentry-python
16 | {%- endif %}
17 | {%- if cookiecutter.use_docker == "n" and cookiecutter.windows == "y" %}
18 | hiredis==2.0.0 # https://github.com/redis/hiredis-py
19 | {%- endif %}
20 |
21 | # Django
22 | # ------------------------------------------------------------------------------
23 | {%- if cookiecutter.cloud_provider == 'AWS' %}
24 | django-storages[boto3]==1.13.1 # https://github.com/jschneier/django-storages
25 | {%- elif cookiecutter.cloud_provider == 'GCP' %}
26 | django-storages[google]==1.13.1 # https://github.com/jschneier/django-storages
27 | {%- endif %}
28 | {%- if cookiecutter.mail_service == 'Mailgun' %}
29 | django-anymail[mailgun]==8.6 # https://github.com/anymail/django-anymail
30 | {%- elif cookiecutter.mail_service == 'Amazon SES' %}
31 | django-anymail[amazon_ses]==8.6 # https://github.com/anymail/django-anymail
32 | {%- elif cookiecutter.mail_service == 'Mailjet' %}
33 | django-anymail[mailjet]==8.6 # https://github.com/anymail/django-anymail
34 | {%- elif cookiecutter.mail_service == 'Mandrill' %}
35 | django-anymail[mandrill]==8.6 # https://github.com/anymail/django-anymail
36 | {%- elif cookiecutter.mail_service == 'Postmark' %}
37 | django-anymail[postmark]==8.6 # https://github.com/anymail/django-anymail
38 | {%- elif cookiecutter.mail_service == 'Sendgrid' %}
39 | django-anymail[sendgrid]==8.6 # https://github.com/anymail/django-anymail
40 | {%- elif cookiecutter.mail_service == 'SendinBlue' %}
41 | django-anymail[sendinblue]==8.6 # https://github.com/anymail/django-anymail
42 | {%- elif cookiecutter.mail_service == 'SparkPost' %}
43 | django-anymail[sparkpost]==8.6 # https://github.com/anymail/django-anymail
44 | {%- elif cookiecutter.mail_service == 'Other SMTP' %}
45 | django-anymail==8.6 # https://github.com/anymail/django-anymail
46 | {%- endif %}
47 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/compose/local/docs/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG PYTHON_VERSION=3.9-slim-bullseye
2 |
3 | # define an alias for the specfic python version used in this file.
4 | FROM python:${PYTHON_VERSION} as python
5 |
6 |
7 | # Python build stage
8 | FROM python as python-build-stage
9 |
10 | ENV PYTHONDONTWRITEBYTECODE 1
11 |
12 | RUN apt-get update && apt-get install --no-install-recommends -y \
13 | # dependencies for building Python packages
14 | build-essential \
15 | {%- if cookiecutter.database_engine == "postgresql" %}
16 | # psycopg2 dependencies
17 | libpq-dev \
18 | {%- elif cookiecutter.database_engine == "mysql" %}
19 | # mysql dependency
20 | default-libmysqlclient-dev \
21 | {%- endif %}
22 | # cleaning up unused files
23 | && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \
24 | && rm -rf /var/lib/apt/lists/*
25 |
26 | # Requirements are installed here to ensure they will be cached.
27 | COPY ./requirements /requirements
28 |
29 | # create python dependency wheels
30 | RUN pip wheel --no-cache-dir --wheel-dir /usr/src/app/wheels \
31 | -r /requirements/local.txt -r /requirements/production.txt \
32 | && rm -rf /requirements
33 |
34 |
35 | # Python 'run' stage
36 | FROM python as python-run-stage
37 |
38 | ARG BUILD_ENVIRONMENT
39 | ENV PYTHONUNBUFFERED 1
40 | ENV PYTHONDONTWRITEBYTECODE 1
41 |
42 | RUN apt-get update && apt-get install --no-install-recommends -y \
43 | # To run the Makefile
44 | make \
45 | {%- if cookiecutter.database_engine == "postgresql" %}
46 | # psycopg2 dependencies
47 | libpq-dev \
48 | {%- elif cookiecutter.database_engine == "mysql" %}
49 | # mysql dependency
50 | default-libmysqlclient-dev \
51 | {%- endif %}
52 | # Translations dependencies
53 | gettext \
54 | # Uncomment below lines to enable Sphinx output to latex and pdf
55 | # texlive-latex-recommended \
56 | # texlive-fonts-recommended \
57 | # texlive-latex-extra \
58 | # latexmk \
59 | # cleaning up unused files
60 | && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \
61 | && rm -rf /var/lib/apt/lists/*
62 |
63 | # copy python dependency wheels from python-build-stage
64 | COPY --from=python-build-stage /usr/src/app/wheels /wheels
65 |
66 | # use wheels to install python dependencies
67 | RUN pip install --no-cache /wheels/* \
68 | && rm -rf /wheels
69 |
70 | COPY ./compose/local/docs/start /start-docs
71 | RUN sed -i 's/\r$//g' /start-docs
72 | RUN chmod +x /start-docs
73 |
74 | WORKDIR /docs
75 |
--------------------------------------------------------------------------------
/docs/testing.rst:
--------------------------------------------------------------------------------
1 | .. _testing:
2 |
3 | Testing
4 | ========
5 |
6 | We encourage users to build application tests. As best practice, this should be done immediately after documentation of the application being built, before starting on any coding.
7 |
8 | Pytest
9 | ------
10 |
11 | This project uses the Pytest_, a framework for easily building simple and scalable tests.
12 | After you have set up to `develop locally`_, run the following commands to make sure the testing environment is ready: ::
13 |
14 | $ pytest
15 |
16 | You will get a readout of the `users` app that has already been set up with tests. If you do not want to run the `pytest` on the entire project, you can target a particular app by typing in its location: ::
17 |
18 | $ pytest
19 |
20 | If you set up your project to `develop locally with docker`_, run the following command: ::
21 |
22 | $ docker-compose -f local.yml run --rm django pytest
23 |
24 | Targeting particular apps for testing in ``docker`` follows a similar pattern as previously shown above.
25 |
26 | Coverage
27 | --------
28 |
29 | You should build your tests to provide the highest level of **code coverage**. You can run the ``pytest`` with code ``coverage`` by typing in the following command: ::
30 |
31 | $ docker-compose -f local.yml run --rm django coverage run -m pytest
32 |
33 | Once the tests are complete, in order to see the code coverage, run the following command: ::
34 |
35 | $ docker-compose -f local.yml run --rm django coverage report
36 |
37 | .. note::
38 |
39 | At the root of the project folder, you will find the `pytest.ini` file. You can use this to customize_ the ``pytest`` to your liking.
40 |
41 | There is also the `.coveragerc`. This is the configuration file for the ``coverage`` tool. You can find out more about `configuring`_ ``coverage``.
42 |
43 | .. seealso::
44 |
45 | For unit tests, run: ::
46 |
47 | $ python manage.py test
48 |
49 | Since this is a fresh install, and there are no tests built using the Python `unittest`_ library yet, you should get feedback that says there were no tests carried out.
50 |
51 | .. _Pytest: https://docs.pytest.org/en/latest/example/simple.html
52 | .. _develop locally: ./developing-locally.html
53 | .. _develop locally with docker: ./developing-locally-docker.html
54 | .. _customize: https://docs.pytest.org/en/latest/customize.html
55 | .. _unittest: https://docs.python.org/3/library/unittest.html#module-unittest
56 | .. _configuring: https://coverage.readthedocs.io/en/v4.5.x/config.html
57 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/requirements/base.txt:
--------------------------------------------------------------------------------
1 | pytz==2022.2.1 # https://github.com/stub42/pytz
2 | python-slugify==6.1.2 # https://github.com/un33k/python-slugify
3 | Pillow==9.2.0 # https://github.com/python-pillow/Pillow
4 | {%- if cookiecutter.frontend_pipeline == 'Django Compressor' %}
5 | {%- if cookiecutter.windows == 'y' and cookiecutter.use_docker == 'n' %}
6 | rcssmin==1.1.0 --install-option="--without-c-extensions" # https://github.com/ndparker/rcssmin
7 | {%- else %}
8 | rcssmin==1.1.0 # https://github.com/ndparker/rcssmin
9 | {%- endif %}
10 | {%- endif %}
11 | argon2-cffi==21.3.0 # https://github.com/hynek/argon2_cffi
12 | {%- if cookiecutter.use_whitenoise == 'y' %}
13 | whitenoise==6.2.0 # https://github.com/evansd/whitenoise
14 | {%- endif %}
15 | redis==4.3.4 # https://github.com/redis/redis-py
16 | {%- if cookiecutter.use_docker == "y" or cookiecutter.windows == "n" %}
17 | hiredis==2.0.0 # https://github.com/redis/hiredis-py
18 | {%- endif %}
19 | {%- if cookiecutter.use_celery == "y" %}
20 | celery==5.2.7 # pyup: < 6.0 # https://github.com/celery/celery
21 | django-celery-beat==2.3.0 # https://github.com/celery/django-celery-beat
22 | {%- if cookiecutter.use_docker == 'y' %}
23 | flower==1.2.0 # https://github.com/mher/flower
24 | {%- endif %}
25 | {%- endif %}
26 | {%- if cookiecutter.use_async == 'y' %}
27 | uvicorn[standard]==0.18.3 # https://github.com/encode/uvicorn
28 | {%- endif %}
29 |
30 | # Django
31 | # ------------------------------------------------------------------------------
32 | django==3.2.15 # pyup: < 4.0 # https://www.djangoproject.com/
33 | django-environ==0.9.0 # https://github.com/joke2k/django-environ
34 | django-model-utils==4.2.0 # https://github.com/jazzband/django-model-utils
35 | django-allauth==0.51.0 # https://github.com/pennersr/django-allauth
36 | django-crispy-forms==1.14.0 # https://github.com/django-crispy-forms/django-crispy-forms
37 | crispy-bootstrap5==0.6 # https://github.com/django-crispy-forms/crispy-bootstrap5
38 | {%- if cookiecutter.frontend_pipeline == 'Django Compressor' %}
39 | django-compressor==4.1 # https://github.com/django-compressor/django-compressor
40 | {%- endif %}
41 | django-redis==5.2.0 # https://github.com/jazzband/django-redis
42 | {%- if cookiecutter.use_drf == 'y' %}
43 | # Django REST Framework
44 | djangorestframework==3.14.0 # https://github.com/encode/django-rest-framework
45 | django-cors-headers==3.13.0 # https://github.com/adamchainz/django-cors-headers
46 | # DRF-spectacular for api documentation
47 | drf-spectacular==0.24.2 # https://github.com/tfranzel/drf-spectacular
48 | {%- endif %}
49 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/requirements/local.txt:
--------------------------------------------------------------------------------
1 | -r base.txt
2 |
3 | Werkzeug[watchdog]==2.2.2 # https://github.com/pallets/werkzeug
4 | ipdb==0.13.9 # https://github.com/gotcha/ipdb
5 | {%- if cookiecutter.database_engine == "postgresql" %}
6 | {%- if cookiecutter.use_docker == 'y' %}
7 | psycopg2==2.9.3 # https://github.com/psycopg/psycopg2
8 | {%- else %}
9 | psycopg2-binary==2.9.3 # https://github.com/psycopg/psycopg2
10 | {%- endif %}
11 | {%- endif %}
12 | {%- if cookiecutter.database_engine == "mysql" %}
13 | mysqlclient==2.1.0 # https://github.com/PyMySQL/mysqlclient
14 | {%- endif %}
15 | {%- if cookiecutter.use_async == 'y' or cookiecutter.use_celery == 'y' %}
16 | watchfiles==0.17.0 # https://github.com/samuelcolvin/watchfiles
17 | {%- endif %}
18 |
19 | # Testing
20 | # ------------------------------------------------------------------------------
21 | mypy==0.981 # https://github.com/python/mypy
22 | django-stubs==1.12.0 # https://github.com/typeddjango/django-stubs
23 | pytest==7.1.3 # https://github.com/pytest-dev/pytest
24 | pytest-sugar==0.9.5 # https://github.com/Frozenball/pytest-sugar
25 | {%- if cookiecutter.use_drf == "y" %}
26 | djangorestframework-stubs==1.7.0 # https://github.com/typeddjango/djangorestframework-stubs
27 | {%- endif %}
28 |
29 | # Documentation
30 | # ------------------------------------------------------------------------------
31 | sphinx==5.2.3 # https://github.com/sphinx-doc/sphinx
32 | sphinx-autobuild==2021.3.14 # https://github.com/GaretJax/sphinx-autobuild
33 |
34 | # Code quality
35 | # ------------------------------------------------------------------------------
36 | flake8==5.0.4 # https://github.com/PyCQA/flake8
37 | flake8-isort==4.2.0 # https://github.com/gforcada/flake8-isort
38 | coverage==6.5.0 # https://github.com/nedbat/coveragepy
39 | black==22.8.0 # https://github.com/psf/black
40 | pylint-django==2.5.3 # https://github.com/PyCQA/pylint-django
41 | {%- if cookiecutter.use_celery == 'y' %}
42 | pylint-celery==0.3 # https://github.com/PyCQA/pylint-celery
43 | {%- endif %}
44 | pre-commit==2.20.0 # https://github.com/pre-commit/pre-commit
45 |
46 | # Django
47 | # ------------------------------------------------------------------------------
48 | factory-boy==3.2.1 # https://github.com/FactoryBoy/factory_boy
49 |
50 | django-debug-toolbar==3.7.0 # https://github.com/jazzband/django-debug-toolbar
51 | django-extensions==3.2.1 # https://github.com/django-extensions/django-extensions
52 | django-coverage-plugin==2.0.3 # https://github.com/nedbat/django_coverage_plugin
53 | pytest-django==4.5.2 # https://github.com/pytest-dev/pytest-django
54 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/.envs/.production/.django:
--------------------------------------------------------------------------------
1 | # General
2 | # ------------------------------------------------------------------------------
3 | # DJANGO_READ_DOT_ENV_FILE=True
4 | DJANGO_SETTINGS_MODULE=config.settings.production
5 | DJANGO_SECRET_KEY=!!!SET DJANGO_SECRET_KEY!!!
6 | DJANGO_ADMIN_URL=!!!SET DJANGO_ADMIN_URL!!!
7 | DJANGO_ALLOWED_HOSTS=.{{ cookiecutter.domain_name }}
8 |
9 | # Security
10 | # ------------------------------------------------------------------------------
11 | # TIP: better off using DNS, however, redirect is OK too
12 | DJANGO_SECURE_SSL_REDIRECT=False
13 |
14 | # Email
15 | # ------------------------------------------------------------------------------
16 | DJANGO_SERVER_EMAIL=
17 | {% if cookiecutter.mail_service == 'Mailgun' %}
18 | MAILGUN_API_KEY=
19 | MAILGUN_DOMAIN=
20 | {% elif cookiecutter.mail_service == 'Mailjet' %}
21 | MAILJET_API_KEY=
22 | MAILJET_SECRET_KEY=
23 | {% elif cookiecutter.mail_service == 'Mandrill' %}
24 | MANDRILL_API_KEY=
25 | {% elif cookiecutter.mail_service == 'Postmark' %}
26 | POSTMARK_SERVER_TOKEN=
27 | {% elif cookiecutter.mail_service == 'Sendgrid' %}
28 | SENDGRID_API_KEY=
29 | SENDGRID_GENERATE_MESSAGE_ID=True
30 | SENDGRID_MERGE_FIELD_FORMAT=None
31 | {% elif cookiecutter.mail_service == 'SendinBlue' %}
32 | SENDINBLUE_API_KEY=
33 | {% elif cookiecutter.mail_service == 'SparkPost' %}
34 | SPARKPOST_API_KEY=
35 | {% endif %}
36 | {% if cookiecutter.cloud_provider == 'AWS' %}
37 | # AWS
38 | # ------------------------------------------------------------------------------
39 | DJANGO_AWS_ACCESS_KEY_ID=
40 | DJANGO_AWS_SECRET_ACCESS_KEY=
41 | DJANGO_AWS_STORAGE_BUCKET_NAME=
42 | {% elif cookiecutter.cloud_provider == 'GCP' %}
43 | # GCP
44 | # ------------------------------------------------------------------------------
45 | GOOGLE_APPLICATION_CREDENTIALS=
46 | DJANGO_GCP_STORAGE_BUCKET_NAME=
47 | {% endif %}
48 | # django-allauth
49 | # ------------------------------------------------------------------------------
50 | DJANGO_ACCOUNT_ALLOW_REGISTRATION=True
51 |
52 | # Gunicorn
53 | # ------------------------------------------------------------------------------
54 | WEB_CONCURRENCY=4
55 | {% if cookiecutter.use_sentry == 'y' %}
56 | # Sentry
57 | # ------------------------------------------------------------------------------
58 | SENTRY_DSN=
59 | {% endif %}
60 |
61 | # Redis
62 | # ------------------------------------------------------------------------------
63 | REDIS_URL=redis://redis:6379/0
64 | {% if cookiecutter.use_celery == 'y' %}
65 | # Celery
66 | # ------------------------------------------------------------------------------
67 |
68 | # Flower
69 | CELERY_FLOWER_USER=!!!SET CELERY_FLOWER_USER!!!
70 | CELERY_FLOWER_PASSWORD=!!!SET CELERY_FLOWER_PASSWORD!!!
71 | {% endif %}
72 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/compose/production/django/entrypoint:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -o errexit
4 | set -o pipefail
5 | set -o nounset
6 |
7 |
8 | {% if cookiecutter.use_celery == 'y' %}
9 | # N.B. If only .env files supported variable expansion...
10 | export CELERY_BROKER_URL="${REDIS_URL}"
11 | {% endif %}
12 |
13 | {%- if cookiecutter.database_engine == 'postgresql' %}
14 | if [ -z "${POSTGRES_USER}" ]; then
15 | base_postgres_image_default_user='postgres'
16 | export POSTGRES_USER="${base_postgres_image_default_user}"
17 | fi
18 | export DATABASE_URL="postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}"
19 | {%- endif %}
20 | {%- if cookiecutter.database_engine == 'mysql' %}
21 | if [ -z "${MYSQL_USER}" ]; then
22 | base_mysql_image_default_user='root'
23 | export MYSQL_USER="${base_mysql_image_default_user}"
24 | fi
25 | export DATABASE_URL="mysql://${MYSQL_USER}:${MYSQL_PASSWORD}@${MYSQL_HOST}:${MYSQL_PORT}/${MYSQL_DATABASE}"
26 | {%- endif %}
27 |
28 |
29 | python << END
30 | import sys
31 | import time
32 |
33 | suggest_unrecoverable_after = 30
34 | start = time.time()
35 |
36 | {%- if cookiecutter.database_engine == 'postgresql' %}
37 | import psycopg2
38 | while True:
39 | try:
40 | psycopg2.connect(
41 | dbname="${POSTGRES_DB}",
42 | user="${POSTGRES_USER}",
43 | password="${POSTGRES_PASSWORD}",
44 | host="${POSTGRES_HOST}",
45 | port="${POSTGRES_PORT}",
46 | )
47 | break
48 | except psycopg2.OperationalError as error:
49 | sys.stderr.write("Waiting for PostgreSQL to become available...\n")
50 | if time.time() - start > suggest_unrecoverable_after:
51 | sys.stderr.write(" This is taking longer than expected. The following exception may be indicative of an unrecoverable error: '{}'\n".format(error))
52 | time.sleep(1)
53 | {%- endif %}
54 |
55 | {%- if cookiecutter.database_engine == 'mysql' %}
56 | import MySQLdb
57 |
58 | while True:
59 | try:
60 | _db = MySQLdb._mysql.connect(
61 | host="${MYSQL_HOST}",
62 | user="${MYSQL_USER}",
63 | password="${MYSQL_PASSWORD}",
64 | database="${MYSQL_DATABASE}",
65 | port=int("${MYSQL_PORT}")
66 | )
67 | break
68 | except MySQLdb._exceptions.OperationalError:
69 | sys.stderr.write("Waiting for MySQL to become available...\n")
70 | if time.time() - start > suggest_unrecoverable_after:
71 | sys.stderr.write(" This is taking longer than expected. The following exception may be indicative of an unrecoverable error: '{}'\n".format(error))
72 | time.sleep(1)
73 | {%- endif %}
74 |
75 | END
76 |
77 | exec "$@"
78 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/docs/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
6 |
7 | # -- Path setup --------------------------------------------------------------
8 |
9 | # If extensions (or modules to document with autodoc) are in another directory,
10 | # add these directories to sys.path here. If the directory is relative to the
11 | # documentation root, use os.path.abspath to make it absolute, like shown here.
12 |
13 | import os
14 | import sys
15 | import django
16 |
17 | if os.getenv("READTHEDOCS", default=False) == "True":
18 | sys.path.insert(0, os.path.abspath(".."))
19 | os.environ["DJANGO_READ_DOT_ENV_FILE"] = "True"
20 | os.environ["USE_DOCKER"] = "no"
21 | else:
22 | {%- if cookiecutter.use_docker == 'y' %}
23 | sys.path.insert(0, os.path.abspath("/app"))
24 | {%- else %}
25 | sys.path.insert(0, os.path.abspath(".."))
26 | {%- endif %}
27 | os.environ["DATABASE_URL"] = "sqlite:///readthedocs.db"
28 | {%- if cookiecutter.use_celery == 'y' %}
29 | os.environ["CELERY_BROKER_URL"] = os.getenv("REDIS_URL", "redis://redis:6379")
30 | {%- endif %}
31 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
32 | django.setup()
33 |
34 | # -- Project information -----------------------------------------------------
35 |
36 | project = "{{ cookiecutter.project_name }}"
37 | copyright = """{% now 'utc', '%Y' %}, {{ cookiecutter.author_name }}"""
38 | author = "{{ cookiecutter.author_name }}"
39 |
40 |
41 | # -- General configuration ---------------------------------------------------
42 |
43 | # Add any Sphinx extension module names here, as strings. They can be
44 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
45 | # ones.
46 | extensions = [
47 | "sphinx.ext.autodoc",
48 | "sphinx.ext.napoleon",
49 | ]
50 |
51 | # Add any paths that contain templates here, relative to this directory.
52 | # templates_path = ["_templates"]
53 |
54 | # List of patterns, relative to source directory, that match files and
55 | # directories to ignore when looking for source files.
56 | # This pattern also affects html_static_path and html_extra_path.
57 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
58 |
59 | # -- Options for HTML output -------------------------------------------------
60 |
61 | # The theme to use for HTML and HTML Help pages. See the documentation for
62 | # a list of builtin themes.
63 | #
64 | html_theme = "alabaster"
65 |
66 | # Add any paths that contain custom static files (such as style sheets) here,
67 | # relative to this directory. They are copied after the builtin static files,
68 | # so a file named "default.css" will overwrite the builtin "default.css".
69 | # html_static_path = ["_static"]
70 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/docs/pycharm/configuration.rst:
--------------------------------------------------------------------------------
1 | Docker Remote Debugging
2 | =======================
3 |
4 | To connect to python remote interpreter inside docker, you have to make sure first, that Pycharm is aware of your docker.
5 |
6 | Go to *Settings > Build, Execution, Deployment > Docker*. If you are on linux, you can use docker directly using its socket `unix:///var/run/docker.sock`, if you are on Windows or Mac, make sure that you have docker-machine installed, then you can simply *Import credentials from Docker Machine*.
7 |
8 | .. image:: images/1.png
9 |
10 | Configure Remote Python Interpreter
11 | -----------------------------------
12 |
13 | This repository comes with already prepared "Run/Debug Configurations" for docker.
14 |
15 | .. image:: images/2.png
16 |
17 | But as you can see, at the beginning there is something wrong with them. They have red X on django icon, and they cannot be used, without configuring remote python interpreter. To do that, you have to go to *Settings > Build, Execution, Deployment* first.
18 |
19 |
20 | Next, you have to add new remote python interpreter, based on already tested deployment settings. Go to *Settings > Project > Project Interpreter*. Click on the cog icon, and click *Add Remote*.
21 |
22 | .. image:: images/3.png
23 |
24 | Switch to *Docker Compose* and select `local.yml` file from directory of your project, next set *Service name* to `django`
25 |
26 | .. image:: images/4.png
27 |
28 | Having that, click *OK*. Close *Settings* panel, and wait few seconds...
29 |
30 | .. image:: images/7.png
31 |
32 | After few seconds, all *Run/Debug Configurations* should be ready to use.
33 |
34 | .. image:: images/8.png
35 |
36 | **Things you can do with provided configuration**:
37 |
38 | * run and debug python code
39 |
40 | .. image:: images/f1.png
41 |
42 | * run and debug tests
43 |
44 | .. image:: images/f2.png
45 | .. image:: images/f3.png
46 |
47 | * run and debug migrations or different django management commands
48 |
49 | .. image:: images/f4.png
50 |
51 | * and many others..
52 |
53 | Known issues
54 | ------------
55 |
56 | * Pycharm hangs on "Connecting to Debugger"
57 |
58 | .. image:: images/issue1.png
59 |
60 | This might be fault of your firewall. Take a look on this ticket - https://youtrack.jetbrains.com/issue/PY-18913
61 |
62 | * Modified files in `.idea` directory
63 |
64 | Most of the files from `.idea/` were added to `.gitignore` with a few exceptions, which were made, to provide "ready to go" configuration. After adding remote interpreter some of these files are altered by PyCharm:
65 |
66 | .. image:: images/issue2.png
67 |
68 | In theory you can remove them from repository, but then, other people will lose a ability to initialize a project from provided configurations as you did. To get rid of this annoying state, you can run command::
69 |
70 | $ git update-index --assume-unchanged {{cookiecutter.project_slug}}.iml
71 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/utility/install_os_dependencies.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | WORK_DIR="$(dirname "$0")"
4 | DISTRO_NAME=$(lsb_release -sc)
5 | OS_REQUIREMENTS_FILENAME="requirements-$DISTRO_NAME.apt"
6 |
7 | cd $WORK_DIR
8 |
9 | # Check if a requirements file exist for the current distribution.
10 | if [ ! -r "$OS_REQUIREMENTS_FILENAME" ]; then
11 | cat <<-EOF >&2
12 | There is no requirements file for your distribution.
13 | You can see one of the files listed below to help search the equivalent package in your system:
14 | $(find ./ -name "requirements-*.apt" -printf " - %f\n")
15 | EOF
16 | exit 1;
17 | fi
18 |
19 | # Handle call with wrong command
20 | function wrong_command()
21 | {
22 | echo "${0##*/} - unknown command: '${1}'" >&2
23 | usage_message
24 | }
25 |
26 | # Print help / script usage
27 | function usage_message()
28 | {
29 | cat <<-EOF
30 | Usage: $WORK_DIR/${0##*/}
31 | Available commands are:
32 | list Print a list of all packages defined on ${OS_REQUIREMENTS_FILENAME} file
33 | help Print this help
34 |
35 | Commands that require superuser permission:
36 | install Install packages defined on ${OS_REQUIREMENTS_FILENAME} file. Note: This
37 | does not upgrade the packages already installed for new versions, even if
38 | new version is available in the repository.
39 | upgrade Same that install, but upgrade the already installed packages, if new
40 | version is available.
41 | EOF
42 | }
43 |
44 | # Read the requirements.apt file, and remove comments and blank lines
45 | function list_packages(){
46 | grep -v "#" "${OS_REQUIREMENTS_FILENAME}" | grep -v "^$";
47 | }
48 |
49 | function install_packages()
50 | {
51 | list_packages | xargs apt-get --no-upgrade install -y;
52 | }
53 |
54 | function upgrade_packages()
55 | {
56 | list_packages | xargs apt-get install -y;
57 | }
58 |
59 | function install_or_upgrade()
60 | {
61 | P=${1}
62 | PARAN=${P:-"install"}
63 |
64 | if [[ $EUID -ne 0 ]]; then
65 | cat <<-EOF >&2
66 | You must run this script with root privilege
67 | Please do:
68 | sudo $WORK_DIR/${0##*/} $PARAN
69 | EOF
70 | exit 1
71 | else
72 |
73 | apt-get update
74 |
75 | # Install the basic compilation dependencies and other required libraries of this project
76 | if [ "$PARAN" == "install" ]; then
77 | install_packages;
78 | else
79 | upgrade_packages;
80 | fi
81 |
82 | # cleaning downloaded packages from apt-get cache
83 | apt-get clean
84 |
85 | exit 0
86 | fi
87 | }
88 |
89 | # Handle command argument
90 | case "$1" in
91 | install) install_or_upgrade;;
92 | upgrade) install_or_upgrade "upgrade";;
93 | list) list_packages;;
94 | help|"") usage_message;;
95 | *) wrong_command "$1";;
96 | esac
97 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/config/urls.py:
--------------------------------------------------------------------------------
1 | from django.conf import settings
2 | from django.conf.urls.static import static
3 | from django.contrib import admin
4 | {%- if cookiecutter.use_async == 'y' %}
5 | from django.contrib.staticfiles.urls import staticfiles_urlpatterns
6 | {%- endif %}
7 | from django.urls import include, path
8 | from django.views import defaults as default_views
9 | from django.views.generic import TemplateView
10 | {%- if cookiecutter.use_drf == 'y' %}
11 | from drf_spectacular.views import SpectacularAPIView, SpectacularSwaggerView
12 | from rest_framework.authtoken.views import obtain_auth_token
13 | {%- endif %}
14 |
15 | urlpatterns = [
16 | path("", TemplateView.as_view(template_name="pages/home.html"), name="home"),
17 | path(
18 | "about/", TemplateView.as_view(template_name="pages/about.html"), name="about"
19 | ),
20 | # Django Admin, use {% raw %}{% url 'admin:index' %}{% endraw %}
21 | path(settings.ADMIN_URL, admin.site.urls),
22 | # User management
23 | path("users/", include("{{ cookiecutter.project_slug }}.users.urls", namespace="users")),
24 | path("accounts/", include("allauth.urls")),
25 | # Your stuff: custom urls includes go here
26 | ] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
27 | {%- if cookiecutter.use_async == 'y' %}
28 | if settings.DEBUG:
29 | # Static file serving when using Gunicorn + Uvicorn for local web socket development
30 | urlpatterns += staticfiles_urlpatterns()
31 | {%- endif %}
32 | {% if cookiecutter.use_drf == 'y' %}
33 | # API URLS
34 | urlpatterns += [
35 | # API base url
36 | path("api/", include("config.api_router")),
37 | # DRF auth token
38 | path("auth-token/", obtain_auth_token),
39 | path("api/schema/", SpectacularAPIView.as_view(), name="api-schema"),
40 | path(
41 | "api/docs/",
42 | SpectacularSwaggerView.as_view(url_name="api-schema"),
43 | name="api-docs",
44 | ),
45 | ]
46 | {%- endif %}
47 |
48 | if settings.DEBUG:
49 | # This allows the error pages to be debugged during development, just visit
50 | # these url in browser to see how these error pages look like.
51 | urlpatterns += [
52 | path(
53 | "400/",
54 | default_views.bad_request,
55 | kwargs={"exception": Exception("Bad Request!")},
56 | ),
57 | path(
58 | "403/",
59 | default_views.permission_denied,
60 | kwargs={"exception": Exception("Permission Denied")},
61 | ),
62 | path(
63 | "404/",
64 | default_views.page_not_found,
65 | kwargs={"exception": Exception("Page not Found")},
66 | ),
67 | path("500/", default_views.server_error),
68 | ]
69 | if "debug_toolbar" in settings.INSTALLED_APPS:
70 | import debug_toolbar
71 |
72 | urlpatterns = [path("__debug__/", include(debug_toolbar.urls))] + urlpatterns
73 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/contrib/sites/migrations/0003_set_site_domain_and_name.py:
--------------------------------------------------------------------------------
1 | """
2 | To understand why this file is here, please read:
3 |
4 | http://cookiecutter-django.readthedocs.io/en/latest/faq.html#why-is-there-a-django-contrib-sites-directory-in-cookiecutter-django
5 | """
6 | from django.conf import settings
7 | from django.db import migrations
8 |
9 |
10 | def _update_or_create_site_with_sequence(site_model, connection, domain, name):
11 | """Update or create the site with default ID and keep the DB sequence in sync."""
12 | site, created = site_model.objects.update_or_create(
13 | id=settings.SITE_ID,
14 | defaults={
15 | "domain": domain,
16 | "name": name,
17 | },
18 | )
19 | if created:
20 | # We provided the ID explicitly when creating the Site entry, therefore the DB
21 | # sequence to auto-generate them wasn't used and is now out of sync. If we
22 | # don't do anything, we'll get a unique constraint violation the next time a
23 | # site is created.
24 | # To avoid this, we need to manually update DB sequence and make sure it's
25 | # greater than the maximum value.
26 | max_id = site_model.objects.order_by('-id').first().id
27 | with connection.cursor() as cursor:
28 | {%- if cookiecutter.database_engine == 'postgresql' %}
29 | cursor.execute("SELECT last_value from django_site_id_seq")
30 | (current_id,) = cursor.fetchone()
31 | if current_id <= max_id:
32 | cursor.execute(
33 | "alter sequence django_site_id_seq restart with %s",
34 | [max_id + 1],
35 | )
36 | {%- elif cookiecutter.database_engine == 'mysql' %}
37 | cursor.execute("SELECT MAX(id) FROM django_site")
38 | (current_id,) = cursor.fetchone()
39 | if current_id <= max_id:
40 | cursor.execute(
41 | "ALTER TABLE django_site AUTO_INCREMENT=%s",
42 | [max_id + 1],
43 | )
44 | {%- endif %}
45 |
46 |
47 | def update_site_forward(apps, schema_editor):
48 | """Set site domain and name."""
49 | Site = apps.get_model("sites", "Site")
50 | _update_or_create_site_with_sequence(
51 | Site,
52 | schema_editor.connection,
53 | "{{cookiecutter.domain_name}}",
54 | "{{cookiecutter.project_name}}",
55 | )
56 |
57 |
58 | def update_site_backward(apps, schema_editor):
59 | """Revert site domain and name to default."""
60 | Site = apps.get_model("sites", "Site")
61 | _update_or_create_site_with_sequence(
62 | Site,
63 | schema_editor.connection,
64 | "example.com",
65 | "example.com",
66 | )
67 |
68 |
69 | class Migration(migrations.Migration):
70 |
71 | dependencies = [("sites", "0002_alter_domain_unique")]
72 |
73 | operations = [migrations.RunPython(update_site_forward, update_site_backward)]
74 |
--------------------------------------------------------------------------------
/docs/troubleshooting.rst:
--------------------------------------------------------------------------------
1 | Troubleshooting
2 | =====================================
3 |
4 | This page contains some advice about errors and problems commonly encountered during the development of Cookiecutter Django applications.
5 |
6 | Server Error on sign-up/log-in
7 | ------------------------------
8 |
9 | Make sure you have configured the mail backend (e.g. Mailgun) by adding the API key and sender domain
10 |
11 | .. include:: mailgun.rst
12 |
13 | .. _docker-postgres-auth-failed:
14 |
15 | Docker: Postgres authentication failed
16 | --------------------------------------
17 |
18 | Examples of logs::
19 |
20 | postgres_1 | 2018-06-07 19:11:23.963 UTC [81] FATAL: password authentication failed for user "pydanny"
21 | postgres_1 | 2018-06-07 19:11:23.963 UTC [81] DETAIL: Password does not match for user "pydanny".
22 | postgres_1 | Connection matched pg_hba.conf line 95: "host all all all md5"
23 |
24 | If you recreate the project multiple times with the same name, Docker would preserve the volumes for the postgres container between projects. Here is what happens:
25 |
26 | #. You generate the project the first time. The .env postgres file is populated with the random password
27 | #. You run the docker-compose and the containers are created. The postgres container creates the database based on the .env file credentials
28 | #. You "regenerate" the project with the same name, so the postgres .env file is populated with a new random password
29 | #. You run docker-compose. Since the names of the containers are the same, docker will try to start them (not create them from scratch i.e. it won't execute the Dockerfile to recreate the database). When this happens, it tries to start the database based on the new credentials which do not match the ones that the database was created with, and you get the error message above.
30 |
31 | To fix this, you can either:
32 |
33 | - Clear your project-related Docker cache with ``docker-compose -f local.yml down --volumes --rmi all``.
34 | - Use the Docker volume sub-commands to find volumes (`ls`_) and remove them (`rm`_).
35 | - Use the `prune`_ command to clear system-wide (use with care!).
36 |
37 | .. _ls: https://docs.docker.com/engine/reference/commandline/volume_ls/
38 | .. _rm: https://docs.docker.com/engine/reference/commandline/volume_rm/
39 | .. _prune: https://docs.docker.com/v17.09/engine/reference/commandline/system_prune/
40 |
41 | Others
42 | ------
43 |
44 | #. ``project_slug`` must be a valid Python module name or you will have issues on imports.
45 |
46 | #. ``jinja2.exceptions.TemplateSyntaxError: Encountered unknown tag 'now'.``: please upgrade your cookiecutter version to >= 1.4 (see `#528`_)
47 |
48 | #. New apps not getting created in project root: This is the expected behavior, because cookiecutter-django does not change the way that django startapp works, you'll have to fix this manually (see `#1725`_)
49 |
50 | .. _#528: https://github.com/cookiecutter/cookiecutter-django/issues/528#issuecomment-212650373
51 | .. _#1725: https://github.com/cookiecutter/cookiecutter-django/issues/1725#issuecomment-407493176
52 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/compose/local/django/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG PYTHON_VERSION=3.9-slim-bullseye
2 |
3 | # define an alias for the specfic python version used in this file.
4 | FROM python:${PYTHON_VERSION} as python
5 |
6 | # Python build stage
7 | FROM python as python-build-stage
8 |
9 | ARG BUILD_ENVIRONMENT=local
10 |
11 | # Install apt packages
12 | RUN apt-get update && apt-get install --no-install-recommends -y \
13 | # dependencies for building Python packages
14 | build-essential \
15 | {%- if cookiecutter.database_engine == "postgresql" %}
16 | # psycopg2 dependencies
17 | libpq-dev
18 | {%- elif cookiecutter.database_engine == "mysql" %}
19 | # mysql dependency
20 | default-libmysqlclient-dev
21 | {%- endif %}
22 |
23 | # Requirements are installed here to ensure they will be cached.
24 | COPY ./requirements .
25 |
26 | # Create Python Dependency and Sub-Dependency Wheels.
27 | RUN pip wheel --wheel-dir /usr/src/app/wheels \
28 | -r ${BUILD_ENVIRONMENT}.txt
29 |
30 |
31 | # Python 'run' stage
32 | FROM python as python-run-stage
33 |
34 | ARG BUILD_ENVIRONMENT=local
35 | ARG APP_HOME=/app
36 |
37 | ENV PYTHONUNBUFFERED 1
38 | ENV PYTHONDONTWRITEBYTECODE 1
39 | ENV BUILD_ENV ${BUILD_ENVIRONMENT}
40 |
41 | WORKDIR ${APP_HOME}
42 |
43 | # Install required system dependencies
44 | RUN apt-get update && apt-get install --no-install-recommends -y \
45 | {%- if cookiecutter.database_engine == "postgresql" %}
46 | # psycopg2 dependencies
47 | libpq-dev \
48 | {%- elif cookiecutter.database_engine == "mysql" %}
49 | # mysql dependency
50 | default-libmysqlclient-dev \
51 | {%- endif %}
52 | # Translations dependencies
53 | gettext \
54 | # cleaning up unused files
55 | && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \
56 | && rm -rf /var/lib/apt/lists/*
57 |
58 | # All absolute dir copies ignore workdir instruction. All relative dir copies are wrt to the workdir instruction
59 | # copy python dependency wheels from python-build-stage
60 | COPY --from=python-build-stage /usr/src/app/wheels /wheels/
61 |
62 | # use wheels to install python dependencies
63 | RUN pip install --no-cache-dir --no-index --find-links=/wheels/ /wheels/* \
64 | && rm -rf /wheels/
65 |
66 | COPY ./compose/production/django/entrypoint /entrypoint
67 | RUN sed -i 's/\r$//g' /entrypoint
68 | RUN chmod +x /entrypoint
69 |
70 | COPY ./compose/local/django/start /start
71 | RUN sed -i 's/\r$//g' /start
72 | RUN chmod +x /start
73 |
74 | {% if cookiecutter.use_celery == "y" %}
75 | COPY ./compose/local/django/celery/worker/start /start-celeryworker
76 | RUN sed -i 's/\r$//g' /start-celeryworker
77 | RUN chmod +x /start-celeryworker
78 |
79 | COPY ./compose/local/django/celery/beat/start /start-celerybeat
80 | RUN sed -i 's/\r$//g' /start-celerybeat
81 | RUN chmod +x /start-celerybeat
82 |
83 | COPY ./compose/local/django/celery/flower/start /start-flower
84 | RUN sed -i 's/\r$//g' /start-flower
85 | RUN chmod +x /start-flower
86 | {% endif %}
87 |
88 | # copy application code to WORKDIR
89 | COPY . ${APP_HOME}
90 |
91 | ENTRYPOINT ["/entrypoint"]
92 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/templates/account/email.html:
--------------------------------------------------------------------------------
1 | {% raw %}
2 | {% extends "account/base.html" %}
3 |
4 | {% load i18n %}
5 | {% load crispy_forms_tags %}
6 |
7 | {% block head_title %}{% translate "Account" %}{% endblock %}
8 |
9 | {% block inner %}
10 |
{% translate "E-mail Addresses" %}
11 |
12 | {% if user.emailaddress_set.all %}
13 |
{% translate 'The following e-mail addresses are associated with your account:' %}
14 |
15 |
44 |
45 | {% else %}
46 |
{% translate 'Warning:'%} {% translate "You currently do not have any e-mail address set up. You should really add an e-mail address so you can receive notifications, reset your password, etc." %}