{% blocktrans %}We have sent an e-mail to you for verification. Follow the link provided to finalize the signup process. Please contact us if you do not receive it within a few minutes.{% endblocktrans %}
{% blocktrans %}This part of the site requires us to verify that
13 | you are who you claim to be. For this purpose, we require that you
14 | verify ownership of your e-mail address. {% endblocktrans %}
15 |
16 |
{% blocktrans %}We have sent an e-mail to you for
17 | verification. Please click on the link inside this e-mail. Please
18 | contact us if you do not receive it within a few minutes.{% endblocktrans %}
{% blocktrans with confirmation.email_address.email as email %}Please confirm that {{ email }} is an e-mail address for user {{ user_display }}.{% endblocktrans %}
{% if token_fail %}{% trans "Bad Token" %}{% else %}{% trans "Change Password" %}{% endif %}
9 |
10 | {% if token_fail %}
11 | {% url 'account_reset_password' as passwd_reset_url %}
12 |
{% blocktrans %}The password reset link was invalid, possibly because it has already been used. Please request a new password reset.{% endblocktrans %}
13 | {% else %}
14 | {% if form %}
15 |
20 | {% else %}
21 |
{% trans 'Your password is now changed.' %}
22 | {% endif %}
23 | {% endif %}
24 | {% endblock %}
25 | {% endraw %}
26 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/compose/production/django/entrypoint:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -o errexit
4 | set -o pipefail
5 | set -o nounset
6 |
7 |
8 | {% if cookiecutter.use_celery == 'y' %}
9 | # N.B. If only .env files supported variable expansion...
10 | export CELERY_BROKER_URL="${REDIS_URL}"
11 | {% endif %}
12 |
13 | if [ -z "${POSTGRES_USER}" ]; then
14 | base_postgres_image_default_user='postgres'
15 | export POSTGRES_USER="${base_postgres_image_default_user}"
16 | fi
17 | export DATABASE_URL="postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}"
18 |
19 | postgres_ready() {
20 | python << END
21 | import sys
22 |
23 | import psycopg2
24 |
25 | try:
26 | psycopg2.connect(
27 | dbname="${POSTGRES_DB}",
28 | user="${POSTGRES_USER}",
29 | password="${POSTGRES_PASSWORD}",
30 | host="${POSTGRES_HOST}",
31 | port="${POSTGRES_PORT}",
32 | )
33 | except psycopg2.OperationalError:
34 | sys.exit(-1)
35 | sys.exit(0)
36 |
37 | END
38 | }
39 | until postgres_ready; do
40 | >&2 echo 'Waiting for PostgreSQL to become available...'
41 | sleep 1
42 | done
43 | >&2 echo 'PostgreSQL is available'
44 |
45 | exec "$@"
46 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/manage.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import os
3 | import sys
4 |
5 | if __name__ == "__main__":
6 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
7 |
8 | try:
9 | from django.core.management import execute_from_command_line
10 | except ImportError:
11 | # The above import may fail for some other reason. Ensure that the
12 | # issue is really that Django is missing to avoid masking other
13 | # exceptions on Python 2.
14 | try:
15 | import django # noqa
16 | except ImportError:
17 | raise ImportError(
18 | "Couldn't import Django. Are you sure it's installed and "
19 | "available on your PYTHONPATH environment variable? Did you "
20 | "forget to activate a virtual environment?"
21 | )
22 |
23 | raise
24 |
25 | # This allows easy placement of apps within the interior
26 | # {{ cookiecutter.project_slug }} directory.
27 | current_path = os.path.dirname(os.path.abspath(__file__))
28 | sys.path.append(os.path.join(current_path, "{{ cookiecutter.project_slug }}"))
29 |
30 | execute_from_command_line(sys.argv)
31 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/.idea/runConfigurations/merge_production_dotenvs_in_dotenv.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/compose/production/postgres/maintenance/backup:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 |
4 | ### Create a database backup.
5 | ###
6 | ### Usage:
7 | ### $ docker-compose -f .yml (exec |run --rm) postgres backup
8 |
9 |
10 | set -o errexit
11 | set -o pipefail
12 | set -o nounset
13 |
14 |
15 | working_dir="$(dirname ${0})"
16 | source "${working_dir}/_sourced/constants.sh"
17 | source "${working_dir}/_sourced/messages.sh"
18 |
19 |
20 | message_welcome "Backing up the '${POSTGRES_DB}' database..."
21 |
22 |
23 | if [[ "${POSTGRES_USER}" == "postgres" ]]; then
24 | message_error "Backing up as 'postgres' user is not supported. Assign 'POSTGRES_USER' env with another one and try again."
25 | exit 1
26 | fi
27 |
28 | export PGHOST="${POSTGRES_HOST}"
29 | export PGPORT="${POSTGRES_PORT}"
30 | export PGUSER="${POSTGRES_USER}"
31 | export PGPASSWORD="${POSTGRES_PASSWORD}"
32 | export PGDATABASE="${POSTGRES_DB}"
33 |
34 | backup_filename="${BACKUP_FILE_PREFIX}_$(date +'%Y_%m_%dT%H_%M_%S').sql.gz"
35 | pg_dump | gzip > "${BACKUP_DIR_PATH}/${backup_filename}"
36 |
37 |
38 | message_success "'${POSTGRES_DB}' database backup '${backup_filename}' has been created and placed in '${BACKUP_DIR_PATH}'."
39 |
--------------------------------------------------------------------------------
/docs/linters.rst:
--------------------------------------------------------------------------------
1 | Linters
2 | =======
3 |
4 | .. index:: linters
5 |
6 |
7 | flake8
8 | ------
9 |
10 | To run flake8: ::
11 |
12 | $ flake8
13 |
14 | The config for flake8 is located in setup.cfg. It specifies:
15 |
16 | * Set max line length to 120 chars
17 | * Exclude ``.tox,.git,*/migrations/*,*/static/CACHE/*,docs,node_modules``
18 |
19 | pylint
20 | ------
21 |
22 | This is included in flake8's checks, but you can also run it separately to see a more detailed report: ::
23 |
24 | $ pylint
25 |
26 | The config for pylint is located in .pylintrc. It specifies:
27 |
28 | * Use the pylint_django plugin. If using Celery, also use pylint_celery.
29 | * Set max line length to 120 chars
30 | * Disable linting messages for missing docstring and invalid name
31 | * max-parents=13
32 |
33 | pycodestyle
34 | -----------
35 |
36 | This is included in flake8's checks, but you can also run it separately to see a more detailed report: ::
37 |
38 | $ pycodestyle
39 |
40 | The config for pycodestyle is located in setup.cfg. It specifies:
41 |
42 | * Set max line length to 120 chars
43 | * Exclude ``.tox,.git,*/migrations/*,*/static/CACHE/*,docs,node_modules``
44 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/frontend/src/apollo/client.js:
--------------------------------------------------------------------------------
1 | import { ApolloClient } from 'apollo-client'
2 | import { ApolloLink } from 'apollo-link'
3 |
4 | import { authLink, restLink, stateLink, uploadLink } from './links'
5 | import cache from './cache'
6 | import { mockLink } from './mocks'
7 |
8 | const defaultOptions = {
9 | watchQuery: {
10 | fetchPolicy: 'cache-and-network',
11 | errorPolicy: 'ignore',
12 | },
13 | query: {
14 | fetchPolicy: 'cache-and-network',
15 | errorPolicy: 'all',
16 | },
17 | mutate: {
18 | // NOTE: Using 'none' will allow Apollo to recognize errors even if the response
19 | // includes {data: null} in it (graphene does this with each unsuccessful mutation!)
20 | errorPolicy: 'none',
21 | },
22 | }
23 |
24 | const link = ApolloLink.from([stateLink, restLink, authLink.concat(uploadLink)])
25 |
26 | const client = new ApolloClient({
27 | link:
28 | process.env.NODE_ENV === 'production'
29 | ? link // never use mock for production
30 | : ApolloLink.split(operation => operation.getContext().mock, mockLink, link),
31 | cache,
32 | connectToDevTools: process.env.NODE_ENV === 'production' ? false : true,
33 | defaultOptions,
34 | })
35 |
36 | export default client
37 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/frontend/src/apollo/test-helpers.js:
--------------------------------------------------------------------------------
1 | import { assign } from 'lodash'
2 | import { baseMocks } from './mocks'
3 | import { graphql } from 'graphql'
4 | import { addMockFunctionsToSchema } from 'graphql-tools'
5 | import { mockSchema } from './schema-parser'
6 | import { print as gqlToString } from 'graphql/language'
7 |
8 |
9 | export const mockQuery = ({query, mocks, variables = { id: 'id' }, log = false}) => {
10 | // Arguments:
11 | // (required) QUERY
12 | // (optional) Mock object : to override base mocks
13 | // (optional) Variables : If not passed, it will use a fake id only (most frequently used)
14 | // (optional) Log : In case you want the query result to be shown with test results.
15 |
16 | /// MOCKING SCHEMA:
17 | const schema = mockSchema()
18 | const combinedMocks = mocks ? assign({}, baseMocks, mocks) : baseMocks
19 | addMockFunctionsToSchema({ schema, mocks: combinedMocks })
20 |
21 | // need the first 'return' so that the final output is the promise result
22 | return graphql(schema, gqlToString(query), null, null, variables).then(result => {
23 | if (log) console.log('mockQuery result', result)
24 | const { data, errors } = result
25 | expect(errors).toBe()
26 | return data
27 | })
28 | }
29 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/.idea/runConfigurations/pytest___.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/contrib/sites/migrations/0003_set_site_domain_and_name.py:
--------------------------------------------------------------------------------
1 | """
2 | To understand why this file is here, please read:
3 |
4 | http://cookiecutter-django.readthedocs.io/en/latest/faq.html#why-is-there-a-django-contrib-sites-directory-in-cookiecutter-django
5 | """
6 | from django.conf import settings
7 | from django.db import migrations
8 |
9 |
10 | def update_site_forward(apps, schema_editor):
11 | """Set site domain and name."""
12 | Site = apps.get_model("sites", "Site")
13 | Site.objects.update_or_create(
14 | id=settings.SITE_ID,
15 | defaults={
16 | "domain": "{{cookiecutter.domain_name}}",
17 | "name": "{{cookiecutter.project_name}}",
18 | },
19 | )
20 |
21 |
22 | def update_site_backward(apps, schema_editor):
23 | """Revert site domain and name to default."""
24 | Site = apps.get_model("sites", "Site")
25 | Site.objects.update_or_create(
26 | id=settings.SITE_ID, defaults={"domain": "example.com", "name": "example.com"}
27 | )
28 |
29 |
30 | class Migration(migrations.Migration):
31 |
32 | dependencies = [("sites", "0002_alter_domain_unique")]
33 |
34 | operations = [migrations.RunPython(update_site_forward, update_site_backward)]
35 |
--------------------------------------------------------------------------------
/cookiecutter.json:
--------------------------------------------------------------------------------
1 | {
2 | "project_name": "My Awesome Project",
3 | "project_slug": "{{ cookiecutter.project_name.lower()|replace(' ', '_')|replace('-', '_')|replace('.', '_')|trim() }}",
4 | "description": "Behold My Awesome Project!",
5 | "author_name": "Daniel Roy Greenfeld",
6 | "domain_name": "example.com",
7 | "email": "{{ cookiecutter.author_name.lower()|replace(' ', '-') }}@example.com",
8 | "version": "0.1.0",
9 | "open_source_license": [
10 | "MIT",
11 | "BSD",
12 | "GPLv3",
13 | "Apache Software License 2.0",
14 | "Not open source"
15 | ],
16 | "timezone": "UTC",
17 | "windows": "n",
18 | "use_pycharm": "n",
19 | "use_docker": "n",
20 | "postgresql_version": [
21 | "11.3",
22 | "10.8",
23 | "9.6",
24 | "9.5",
25 | "9.4"
26 | ],
27 | "js_task_runner": "react",
28 | "cloud_provider": [
29 | "AWS",
30 | "GCP",
31 | "None"
32 | ],
33 | "use_drf": "n",
34 | "custom_bootstrap_compilation": "n",
35 | "use_compressor": "n",
36 | "use_celery": "n",
37 | "use_mailhog": "n",
38 | "use_sentry": "n",
39 | "use_whitenoise": "n",
40 | "use_heroku": "n",
41 | "ci_tool": [
42 | "None",
43 | "Travis",
44 | "Gitlab"
45 | ],
46 | "keep_local_envs_in_vcs": "y",
47 |
48 | "debug": "n"
49 | }
50 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/.idea/runConfigurations/pytest__users.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/frontend/src/App.js:
--------------------------------------------------------------------------------
1 | import React, { Component } from 'react'
2 | import logo from './logo.svg'
3 | import './App.css'
4 |
5 | import axios from 'axios'
6 | import DocumentTitle from 'react-document-title'
7 |
8 | class App extends Component {
9 | onClick = e => {
10 | console.log('Sending a GET API Call !!!')
11 | axios
12 | .get('/api/')
13 | .then(res => {
14 | console.log(res)
15 | })
16 | .then(response => {
17 | console.log(JSON.stringify(response))
18 | })
19 | }
20 |
21 | render() {
22 | return (
23 |
43 | )
44 | }
45 | }
46 |
47 | export default App
48 |
--------------------------------------------------------------------------------
/tests/test_docker.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # this is a very simple script that tests the docker configuration for cookiecutter-django
3 | # it is meant to be run from the root directory of the repository, eg:
4 | # sh tests/test_docker.sh
5 |
6 | set -o errexit
7 |
8 | # install test requirements
9 | pip install -r requirements.txt
10 |
11 | # create a cache directory
12 | mkdir -p .cache/docker
13 | cd .cache/docker
14 |
15 | # create the project using the default settings in cookiecutter.json
16 | cookiecutter ../../ --no-input --overwrite-if-exists use_docker=y $@
17 | cd my_awesome_project
18 |
19 | # run the project's type checks
20 | docker-compose -f local.yml run django mypy my_awesome_project
21 |
22 | # Run black with --check option
23 | docker-compose -f local.yml run django black --check --diff --exclude 'migrations' ./
24 |
25 | # run the project's tests
26 | docker-compose -f local.yml run django pytest
27 |
28 | # return non-zero status code if there are migrations that have not been created
29 | docker-compose -f local.yml run django python manage.py makemigrations --dry-run --check || { echo "ERROR: there were changes in the models, but migration listed above have not been created and are not saved in version control"; exit 1; }
30 |
31 | # Test support for translations
32 | docker-compose -f local.yml run django python manage.py makemessages
33 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/frontend/src/apollo/links.js:
--------------------------------------------------------------------------------
1 | import cookie from 'react-cookies'
2 | import { setContext } from 'apollo-link-context'
3 | import { createHttpLink } from 'apollo-link-http'
4 | import { RestLink } from 'apollo-link-rest'
5 | import { withClientState } from 'apollo-link-state'
6 | import { createUploadLink } from 'apollo-upload-client'
7 | import { merge } from 'lodash'
8 |
9 | import cache from './cache'
10 |
11 | // docs: https://www.apollographql.com/docs/link/links/http.html
12 | export const httpLink = createHttpLink({
13 | uri: '/graphql/',
14 | credentials: 'same-origin',
15 | })
16 |
17 | // docs: https://github.com/jaydenseric/apollo-upload-client
18 | export const uploadLink = createUploadLink({
19 | uri: '/graphql/',
20 | credentials: 'same-origin',
21 | })
22 |
23 | // docs: https://www.apollographql.com/docs/link/links/rest.html
24 | export const restLink = new RestLink({
25 | uri: '/api/',
26 | endpoints: {},
27 | })
28 |
29 | // docs: https://www.apollographql.com/docs/link/links/state.html
30 | export const stateLink = withClientState({
31 | cache,
32 | ...merge({}),
33 | })
34 |
35 | export const authLink = setContext((_, { headers }) => {
36 | return {
37 | headers: {
38 | ...headers,
39 | 'X-CSRFToken': cookie.load('csrftoken'),
40 | Authorization: `JWT ${cookie.load('jwt')}`,
41 | },
42 | }
43 | })
44 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tests/test_forms.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from {{ cookiecutter.project_slug }}.users.forms import UserCreationForm
4 | from {{ cookiecutter.project_slug }}.users.tests.factories import UserFactory
5 |
6 | pytestmark = pytest.mark.django_db
7 |
8 |
9 | class TestUserCreationForm:
10 | def test_clean_username(self):
11 | # A user with proto_user params does not exist yet.
12 | proto_user = UserFactory.build()
13 |
14 | form = UserCreationForm(
15 | {
16 | "username": proto_user.username,
17 | "password1": proto_user._password,
18 | "password2": proto_user._password,
19 | }
20 | )
21 |
22 | assert form.is_valid()
23 | assert form.clean_username() == proto_user.username
24 |
25 | # Creating a user.
26 | form.save()
27 |
28 | # The user with proto_user params already exists,
29 | # hence cannot be created.
30 | form = UserCreationForm(
31 | {
32 | "username": proto_user.username,
33 | "password1": proto_user._password,
34 | "password2": proto_user._password,
35 | }
36 | )
37 |
38 | assert not form.is_valid()
39 | assert len(form.errors) == 1
40 | assert "username" in form.errors
41 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "{{cookiecutter.project_slug}}",
3 | "version": "{{ cookiecutter.version }}",
4 | "dependencies": {},
5 | "devDependencies": {
6 | {% if cookiecutter.js_task_runner == 'Gulp' -%}
7 | {% if cookiecutter.custom_bootstrap_compilation == 'y' -%}
8 | "bootstrap": "4.3.1",
9 | "gulp-concat": "^2.6.1",
10 | "jquery": "3.3.1",
11 | "popper.js": "1.14.3",
12 | {% endif -%}
13 | "autoprefixer": "^9.4.7",
14 | "browser-sync": "^2.14.0",
15 | "cssnano": "^4.1.10",
16 | "gulp": "^4.0.0",
17 | "gulp-imagemin": "^5.0.3",
18 | "gulp-plumber": "^1.2.1",
19 | "gulp-postcss": "^8.0.0",
20 | "gulp-rename": "^1.2.2",
21 | "gulp-sass": "^4.0.2",
22 | "gulp-uglify-es": "^1.0.4",
23 | "pixrem": "^5.0.0"
24 | {%- endif %}
25 | },
26 | "engines": {
27 | {% if cookiecutter.js_task_runner == "react" %}
28 | "node": ">=9.11.1",
29 | "npm": ">=5.6.0",
30 | "yarn": ">=1.6.0"
31 | {% else %}
32 | "node": ">=8"
33 | {% endif %}
34 | },
35 | "browserslist": [
36 | "last 2 versions"
37 | ],
38 | "scripts": {
39 | {% if cookiecutter.js_task_runner == 'Gulp' -%}
40 | "dev": "gulp",
41 | "build": "gulp generate-assets"
42 | {% endif %}
43 | {% if cookiecutter.js_task_runner == "react" %}
44 | "heroku-prebuild": "cd frontend/ && yarn install && yarn build",
45 | "heroku-postbuild": "cd ../"
46 | {% endif %}
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/compose/local/django/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.7-slim-buster
2 |
3 | ENV PYTHONUNBUFFERED 1
4 |
5 | RUN apt-get update \
6 | # dependencies for building Python packages
7 | && apt-get install -y build-essential \
8 | # psycopg2 dependencies
9 | && apt-get install -y libpq-dev \
10 | # Translations dependencies
11 | && apt-get install -y gettext \
12 | # cleaning up unused files
13 | && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \
14 | && rm -rf /var/lib/apt/lists/*
15 |
16 | # Requirements are installed here to ensure they will be cached.
17 | COPY ./requirements /requirements
18 | RUN pip install -r /requirements/local.txt
19 |
20 | COPY ./compose/production/django/entrypoint /entrypoint
21 | RUN sed -i 's/\r$//g' /entrypoint
22 | RUN chmod +x /entrypoint
23 |
24 | COPY ./compose/local/django/start /start
25 | RUN sed -i 's/\r$//g' /start
26 | RUN chmod +x /start
27 | {% if cookiecutter.use_celery == "y" %}
28 | COPY ./compose/local/django/celery/worker/start /start-celeryworker
29 | RUN sed -i 's/\r$//g' /start-celeryworker
30 | RUN chmod +x /start-celeryworker
31 |
32 | COPY ./compose/local/django/celery/beat/start /start-celerybeat
33 | RUN sed -i 's/\r$//g' /start-celerybeat
34 | RUN chmod +x /start-celerybeat
35 |
36 | COPY ./compose/local/django/celery/flower/start /start-flower
37 | RUN sed -i 's/\r$//g' /start-flower
38 | RUN chmod +x /start-flower
39 | {% endif %}
40 | WORKDIR /app
41 |
42 | ENTRYPOINT ["/entrypoint"]
43 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/.idea/runConfigurations/migrate.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/views.py:
--------------------------------------------------------------------------------
1 | from django.contrib.auth import get_user_model
2 | from django.contrib.auth.mixins import LoginRequiredMixin
3 | from django.urls import reverse
4 | from django.views.generic import DetailView, RedirectView, UpdateView
5 | from django.contrib import messages
6 | from django.utils.translation import ugettext_lazy as _
7 |
8 | User = get_user_model()
9 |
10 |
11 | class UserDetailView(LoginRequiredMixin, DetailView):
12 |
13 | model = User
14 | slug_field = "username"
15 | slug_url_kwarg = "username"
16 |
17 |
18 | user_detail_view = UserDetailView.as_view()
19 |
20 |
21 | class UserUpdateView(LoginRequiredMixin, UpdateView):
22 |
23 | model = User
24 | fields = ["name"]
25 |
26 | def get_success_url(self):
27 | return reverse("users:detail", kwargs={"username": self.request.user.username})
28 |
29 | def get_object(self):
30 | return User.objects.get(username=self.request.user.username)
31 |
32 | def form_valid(self, form):
33 | messages.add_message(
34 | self.request, messages.INFO, _("Infos successfully updated")
35 | )
36 | return super().form_valid(form)
37 |
38 |
39 | user_update_view = UserUpdateView.as_view()
40 |
41 |
42 | class UserRedirectView(LoginRequiredMixin, RedirectView):
43 |
44 | permanent = False
45 |
46 | def get_redirect_url(self):
47 | return reverse("users:detail", kwargs={"username": self.request.user.username})
48 |
49 |
50 | user_redirect_view = UserRedirectView.as_view()
51 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/contrib/sites/migrations/0001_initial.py:
--------------------------------------------------------------------------------
1 | import django.contrib.sites.models
2 | from django.contrib.sites.models import _simple_domain_name_validator
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = []
9 |
10 | operations = [
11 | migrations.CreateModel(
12 | name="Site",
13 | fields=[
14 | (
15 | "id",
16 | models.AutoField(
17 | verbose_name="ID",
18 | serialize=False,
19 | auto_created=True,
20 | primary_key=True,
21 | ),
22 | ),
23 | (
24 | "domain",
25 | models.CharField(
26 | max_length=100,
27 | verbose_name="domain name",
28 | validators=[_simple_domain_name_validator],
29 | ),
30 | ),
31 | ("name", models.CharField(max_length=50, verbose_name="display name")),
32 | ],
33 | options={
34 | "ordering": ("domain",),
35 | "db_table": "django_site",
36 | "verbose_name": "site",
37 | "verbose_name_plural": "sites",
38 | },
39 | bases=(models.Model,),
40 | managers=[("objects", django.contrib.sites.models.SiteManager())],
41 | )
42 | ]
43 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2013-2018, Daniel Roy Greenfeld
2 | All rights reserved.
3 |
4 | Redistribution and use in source and binary forms, with or without modification,
5 | are permitted provided that the following conditions are met:
6 |
7 | * Redistributions of source code must retain the above copyright notice, this
8 | list of conditions and the following disclaimer.
9 |
10 | * Redistributions in binary form must reproduce the above copyright notice, this
11 | list of conditions and the following disclaimer in the documentation and/or
12 | other materials provided with the distribution.
13 |
14 | * Neither the name of Cookiecutter Django nor the names of its contributors may
15 | be used to endorse or promote products derived from this software without
16 | specific prior written permission.
17 |
18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
19 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
21 | IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
22 | INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
23 | BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
25 | LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
26 | OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
27 | OF THE POSSIBILITY OF SUCH DAMAGE.
28 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/.idea/runConfigurations/runserver.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/.idea/runConfigurations/runserver_plus.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
--------------------------------------------------------------------------------
/docs/faq.rst:
--------------------------------------------------------------------------------
1 | FAQ
2 | ===
3 |
4 | .. index:: FAQ, 12-Factor App
5 |
6 | Why is there a django.contrib.sites directory in Cookiecutter Django?
7 | ---------------------------------------------------------------------
8 |
9 | It is there to add a migration so you don't have to manually change the ``sites.Site`` record from ``example.com`` to whatever your domain is. Instead, your ``{{cookiecutter.domain_name}}`` and {{cookiecutter.project_name}} value is placed by **Cookiecutter** in the domain and name fields respectively.
10 |
11 | See `0003_set_site_domain_and_name.py`_.
12 |
13 | .. _`0003_set_site_domain_and_name.py`: https://github.com/pydanny/cookiecutter-django/blob/master/%7B%7Bcookiecutter.project_slug%7D%7D/%7B%7Bcookiecutter.project_slug%7D%7D/contrib/sites/migrations/0003_set_site_domain_and_name.py
14 |
15 |
16 | Why aren't you using just one configuration file (12-Factor App)
17 | ----------------------------------------------------------------------
18 |
19 | TODO
20 | .. TODO
21 |
22 | Why doesn't this follow the layout from Two Scoops of Django?
23 | -------------------------------------------------------------
24 |
25 | You may notice that some elements of this project do not exactly match what we describe in chapter 3 of `Two Scoops of Django 1.11`_. The reason for that is this project, amongst other things, serves as a test bed for trying out new ideas and concepts. Sometimes they work, sometimes they don't, but the end result is that it won't necessarily match precisely what is described in the book I co-authored.
26 |
27 | .. _Two Scoops of Django 1.11: https://www.twoscoopspress.com/collections/django/products/two-scoops-of-django-1-11
28 |
--------------------------------------------------------------------------------
/docs/document.rst:
--------------------------------------------------------------------------------
1 | .. _document:
2 |
3 | Document
4 | =========
5 |
6 | This project uses Sphinx_ documentation generator.
7 | After you have set up to `develop locally`_, run the following commands to generate the HTML documentation: ::
8 |
9 | $ sphinx-build docs/ docs/_build/html/
10 |
11 | If you set up your project to `develop locally with docker`_, run the following command: ::
12 |
13 | $ docker-compose -f local.yml run --rm django sphinx-build docs/ docs/_build/html/
14 |
15 | Generate API documentation
16 | ----------------------------
17 |
18 | Sphinx can automatically generate documentation from docstrings, to enable this feature, follow these steps:
19 |
20 | 1. Add Sphinx extension in ``docs/conf.py`` file, like below: ::
21 |
22 | extensions = [
23 | 'sphinx.ext.autodoc',
24 | ]
25 |
26 | 2. Uncomment the following lines in the ``docs/conf.py`` file: ::
27 |
28 | # import django
29 | # sys.path.insert(0, os.path.abspath('..'))
30 | # os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
31 | # django.setup()
32 |
33 | 3. Run the following command: ::
34 |
35 | $ sphinx-apidoc -f -o ./docs/modules/ ./tpub/ migrations/*
36 |
37 | If you set up your project to `develop locally with docker`_, run the following command: ::
38 |
39 | $ docker-compose -f local.yml run --rm django sphinx-apidoc -f -o ./docs/modules ./tpub/ migrations/*
40 |
41 | 4. Regenerate HTML documentation as written above.
42 |
43 | .. _Sphinx: https://www.sphinx-doc.org/en/master/index.html
44 | .. _develop locally: ./developing-locally.html
45 | .. _develop locally with docker: ./developing-locally-docker.html
46 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tests/test_views.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from django.test import RequestFactory
3 |
4 | from {{ cookiecutter.project_slug }}.users.models import User
5 | from {{ cookiecutter.project_slug }}.users.views import UserRedirectView, UserUpdateView
6 |
7 | pytestmark = pytest.mark.django_db
8 |
9 |
10 | class TestUserUpdateView:
11 | """
12 | TODO:
13 | extracting view initialization code as class-scoped fixture
14 | would be great if only pytest-django supported non-function-scoped
15 | fixture db access -- this is a work-in-progress for now:
16 | https://github.com/pytest-dev/pytest-django/pull/258
17 | """
18 |
19 | def test_get_success_url(self, user: User, request_factory: RequestFactory):
20 | view = UserUpdateView()
21 | request = request_factory.get("/fake-url/")
22 | request.user = user
23 |
24 | view.request = request
25 |
26 | assert view.get_success_url() == f"/users/{user.username}/"
27 |
28 | def test_get_object(self, user: User, request_factory: RequestFactory):
29 | view = UserUpdateView()
30 | request = request_factory.get("/fake-url/")
31 | request.user = user
32 |
33 | view.request = request
34 |
35 | assert view.get_object() == user
36 |
37 |
38 | class TestUserRedirectView:
39 | def test_get_redirect_url(self, user: User, request_factory: RequestFactory):
40 | view = UserRedirectView()
41 | request = request_factory.get("/fake-url")
42 | request.user = user
43 |
44 | view.request = request
45 |
46 | assert view.get_redirect_url() == f"/users/{user.username}/"
47 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/frontend/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "{{cookiecutter.project_slug}}",
3 | "version": "0.1.0",
4 | "private": true,
5 | "proxy": "http://localhost:8000",
6 | "scripts": {
7 | "start": "react-scripts start",
8 | "build": "react-scripts build",
9 | "test": "react-scripts test",
10 | "eject": "react-scripts eject"
11 | },
12 | "eslintConfig": {
13 | "extends": "react-app"
14 | },
15 | "dependencies": {
16 | "react": "16.12.0",
17 | "react-dom": "16.12.0",
18 | "react-scripts": "3.3.1"
19 | },
20 | "devDependencies": {
21 | "@apollo/react-common": "3.1.3",
22 | "@apollo/react-components": "3.1.3",
23 | "@apollo/react-hooks": "3.1.3",
24 | "@apollo/react-testing": "3.1.3",
25 | "apollo-boost": "0.4.7",
26 | "apollo-cache-persist": "0.1.1",
27 | "apollo-link-context": "1.0.19",
28 | "apollo-link-rest": "0.7.3",
29 | "apollo-link-schema": "1.2.4",
30 | "apollo-link-state": "0.4.2",
31 | "apollo-upload-client": "12.1.0",
32 | "axios": "0.19.2",
33 | "graphql": "14.6.0",
34 | "graphql-tag": "2.10.3",
35 | "graphql-tools": "4.0.6",
36 | "localforage": "1.7.3",
37 | "node-sass": "4.13.1",
38 | "raven-js": "3.27.2",
39 | "react-alert": "6.0.0",
40 | "react-alert-template-basic": "1.0.0",
41 | "react-cookies": "0.1.1",
42 | "react-document-title": "2.0.3",
43 | "react-router-dom": "5.1.2",
44 | "react-transition-group": "4.3.0"
45 | },
46 | "engines": {
47 | "node": ">=9.11.1",
48 | "npm": ">=5.6.0",
49 | "yarn": ">=1.6.0"
50 | },
51 | "browserslist": [
52 | ">0.2%",
53 | "not dead",
54 | "not ie <= 11",
55 | "not op_mini all"
56 | ]
57 | }
58 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/templates/account/login.html:
--------------------------------------------------------------------------------
1 | {% raw %}{% extends "account/base.html" %}
2 |
3 | {% load i18n %}
4 | {% load account socialaccount %}
5 | {% load crispy_forms_tags %}
6 |
7 | {% block head_title %}{% trans "Sign In" %}{% endblock %}
8 |
9 | {% block inner %}
10 |
11 |
{% trans "Sign In" %}
12 |
13 | {% get_providers as socialaccount_providers %}
14 |
15 | {% if socialaccount_providers %}
16 |
{% blocktrans with site.name as site_name %}Please sign in with one
17 | of your existing third party accounts. Or, sign up
18 | for a {{ site_name }} account and sign in below:{% endblocktrans %}
19 |
20 |
21 |
22 |
23 | {% include "socialaccount/snippets/provider_list.html" with process="login" %}
24 |
{% blocktrans %}If you have not created an account yet, then please
34 | sign up first.{% endblocktrans %}
35 | {% endif %}
36 |
37 |
46 |
47 | {% endblock %}
48 | {% endraw %}
49 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import os
4 | import sys
5 |
6 | try:
7 | from setuptools import setup
8 | except ImportError:
9 | from distutils.core import setup
10 |
11 | # Our version ALWAYS matches the version of Django we support
12 | # If Django has a new release, we branch, tag, then update this setting after the tag.
13 | version = "2.2.1"
14 |
15 | if sys.argv[-1] == "tag":
16 | os.system(f'git tag -a {version} -m "version {version}"')
17 | os.system("git push --tags")
18 | sys.exit()
19 |
20 | with open("README.rst") as readme_file:
21 | long_description = readme_file.read()
22 |
23 | setup(
24 | name="cookiecutter-django",
25 | version=version,
26 | description="A Cookiecutter template for creating production-ready Django projects quickly.",
27 | long_description=long_description,
28 | author="Daniel Roy Greenfeld",
29 | author_email="pydanny@gmail.com",
30 | url="https://github.com/pydanny/cookiecutter-django",
31 | packages=[],
32 | license="BSD",
33 | zip_safe=False,
34 | classifiers=[
35 | "Development Status :: 4 - Beta",
36 | "Environment :: Console",
37 | "Framework :: Django :: 2.2",
38 | "Intended Audience :: Developers",
39 | "Natural Language :: English",
40 | "License :: OSI Approved :: BSD License",
41 | "Programming Language :: Python",
42 | "Programming Language :: Python :: 3",
43 | "Programming Language :: Python :: 3.7",
44 | "Programming Language :: Python :: Implementation :: CPython",
45 | "Topic :: Software Development",
46 | ],
47 | keywords=(
48 | "cookiecutter, Python, projects, project templates, django, "
49 | "skeleton, scaffolding, project directory, setup.py"
50 | ),
51 | )
52 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/requirements/local.txt:
--------------------------------------------------------------------------------
1 | -r ./base.txt
2 |
3 | Werkzeug==0.16.1 # https://github.com/pallets/werkzeug
4 | ipdb==0.12.3 # https://github.com/gotcha/ipdb
5 | Sphinx==2.3.1 # https://github.com/sphinx-doc/sphinx
6 | {%- if cookiecutter.use_docker == 'y' %}
7 | psycopg2==2.8.4 --no-binary psycopg2 # https://github.com/psycopg/psycopg2
8 | {%- else %}
9 | psycopg2-binary==2.8.4 # https://github.com/psycopg/psycopg2
10 | {%- endif %}
11 |
12 | # Testing
13 | # ------------------------------------------------------------------------------
14 | mypy==0.761 # https://github.com/python/mypy
15 | django-stubs==1.4.0 # https://github.com/typeddjango/django-stubs
16 | pytest==5.3.5 # https://github.com/pytest-dev/pytest
17 | pytest-sugar==0.9.2 # https://github.com/Frozenball/pytest-sugar
18 |
19 | # Code quality
20 | # ------------------------------------------------------------------------------
21 | flake8==3.7.9 # https://github.com/PyCQA/flake8
22 | coverage==5.0.3 # https://github.com/nedbat/coveragepy
23 | black==19.10b0 # https://github.com/ambv/black
24 | pylint-django==2.0.13 # https://github.com/PyCQA/pylint-django
25 | {%- if cookiecutter.use_celery == 'y' %}
26 | pylint-celery==0.3 # https://github.com/PyCQA/pylint-celery
27 | {%- endif %}
28 | pre-commit==2.0.1 # https://github.com/pre-commit/pre-commit
29 |
30 | # Django
31 | # ------------------------------------------------------------------------------
32 | factory-boy==2.12.0 # https://github.com/FactoryBoy/factory_boy
33 |
34 | django-debug-toolbar==2.2 # https://github.com/jazzband/django-debug-toolbar
35 | django-extensions==2.2.6 # https://github.com/django-extensions/django-extensions
36 | django-coverage-plugin==1.8.0 # https://github.com/nedbat/django_coverage_plugin
37 | pytest-django==3.8.0 # https://github.com/pytest-dev/pytest-django
38 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/compose/production/traefik/traefik.yml:
--------------------------------------------------------------------------------
1 | log:
2 | level: INFO
3 |
4 | entryPoints:
5 | web:
6 | # http
7 | address: ":80"
8 |
9 | web-secure:
10 | # https
11 | address: ":443"
12 |
13 | certificatesResolvers:
14 | letsencrypt:
15 | # https://docs.traefik.io/master/https/acme/#lets-encrypt
16 | acme:
17 | email: "{{ cookiecutter.email }}"
18 | storage: /etc/traefik/acme/acme.json
19 | # https://docs.traefik.io/master/https/acme/#httpchallenge
20 | httpChallenge:
21 | entryPoint: web
22 |
23 | http:
24 | routers:
25 | web-router:
26 | rule: "Host(`{{ cookiecutter.domain_name }}`)"
27 | entryPoints:
28 | - web
29 | middlewares:
30 | - redirect
31 | - csrf
32 | service: django
33 |
34 | web-secure-router:
35 | rule: "Host(`{{ cookiecutter.domain_name }}`)"
36 | entryPoints:
37 | - web-secure
38 | middlewares:
39 | - csrf
40 | service: django
41 | tls:
42 | # https://docs.traefik.io/master/routing/routers/#certresolver
43 | certResolver: letsencrypt
44 |
45 | middlewares:
46 | redirect:
47 | # https://docs.traefik.io/master/middlewares/redirectscheme/
48 | redirectScheme:
49 | scheme: https
50 | permanent: true
51 | csrf:
52 | # https://docs.traefik.io/master/middlewares/headers/#hostsproxyheaders
53 | # https://docs.djangoproject.com/en/dev/ref/csrf/#ajax
54 | headers:
55 | hostsProxyHeaders: ['X-CSRFToken']
56 |
57 | services:
58 | django:
59 | loadBalancer:
60 | servers:
61 | - url: http://django:5000
62 |
63 | providers:
64 | # https://docs.traefik.io/master/providers/file/
65 | file:
66 | filename: /etc/traefik/traefik.yml
67 | watch: true
68 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/config/settings/test.py:
--------------------------------------------------------------------------------
1 | """
2 | With these settings, tests run faster.
3 | """
4 |
5 | from .base import * # noqa
6 | from .base import env
7 |
8 | # GENERAL
9 | # ------------------------------------------------------------------------------
10 | # https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
11 | SECRET_KEY = env(
12 | "DJANGO_SECRET_KEY",
13 | default="!!!SET DJANGO_SECRET_KEY!!!",
14 | )
15 | # https://docs.djangoproject.com/en/dev/ref/settings/#test-runner
16 | TEST_RUNNER = "django.test.runner.DiscoverRunner"
17 |
18 | # CACHES
19 | # ------------------------------------------------------------------------------
20 | # https://docs.djangoproject.com/en/dev/ref/settings/#caches
21 | CACHES = {
22 | "default": {
23 | "BACKEND": "django.core.cache.backends.locmem.LocMemCache",
24 | "LOCATION": "",
25 | }
26 | }
27 |
28 | # PASSWORDS
29 | # ------------------------------------------------------------------------------
30 | # https://docs.djangoproject.com/en/dev/ref/settings/#password-hashers
31 | PASSWORD_HASHERS = ["django.contrib.auth.hashers.MD5PasswordHasher"]
32 |
33 | # TEMPLATES
34 | # ------------------------------------------------------------------------------
35 | TEMPLATES[-1]["OPTIONS"]["loaders"] = [ # type: ignore[index] # noqa F405
36 | (
37 | "django.template.loaders.cached.Loader",
38 | [
39 | "django.template.loaders.filesystem.Loader",
40 | "django.template.loaders.app_directories.Loader",
41 | ],
42 | )
43 | ]
44 |
45 | # EMAIL
46 | # ------------------------------------------------------------------------------
47 | # https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
48 | EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend"
49 |
50 | # Your stuff...
51 | # ------------------------------------------------------------------------------
52 |
--------------------------------------------------------------------------------
/CONTRIBUTING.rst:
--------------------------------------------------------------------------------
1 | How to Contribute
2 | =================
3 |
4 | Always happy to get issues identified and pull requests!
5 |
6 | Getting your pull request merged in
7 | ------------------------------------
8 |
9 | #. Keep it small. The smaller the pull request the more likely I'll pull it in.
10 | #. Pull requests that fix a current issue get priority for review.
11 | #. If you're not already in the `CONTRIBUTORS.rst` file, add yourself!
12 |
13 | Testing
14 | -------
15 |
16 | Installation
17 | ~~~~~~~~~~~~
18 |
19 | Please install `tox`_, which is a generic virtualenv management and test command line tool.
20 |
21 | `tox`_ is available for download from `PyPI`_ via `pip`_::
22 |
23 | $ pip install tox
24 |
25 | It will automatically create a fresh virtual environment and install our test dependencies,
26 | such as `pytest-cookies`_ and `flake8`_.
27 |
28 | Run the Tests
29 | ~~~~~~~~~~~~~
30 |
31 | Tox uses py.test under the hood, hence it supports the same syntax for selecting tests.
32 |
33 | For further information please consult the `pytest usage docs`_.
34 |
35 | To run all tests using various versions of python in virtualenvs defined in tox.ini, just run tox.::
36 |
37 | $ tox
38 |
39 | It is possible to test with a specific version of python. To do this, the command
40 | is::
41 |
42 | $ tox -e py37
43 |
44 | This will run py.test with the python3.7 interpreter, for example.
45 |
46 | To run a particular test with tox for against your current Python version::
47 |
48 | $ tox -e py -- -k test_default_configuration
49 |
50 | .. _`pytest usage docs`: https://pytest.org/latest/usage.html#specifying-tests-selecting-tests
51 | .. _`tox`: https://tox.readthedocs.io/en/latest/
52 | .. _`pip`: https://pypi.python.org/pypi/pip/
53 | .. _`pytest-cookies`: https://pypi.python.org/pypi/pytest-cookies/
54 | .. _`flake8`: https://pypi.python.org/pypi/flake8/
55 | .. _`PyPI`: https://pypi.python.org/pypi
56 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/frontend/public/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
12 |
13 |
17 |
18 |
27 | React App
28 |
29 |
30 |
31 |
32 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/compose/production/postgres/maintenance/restore:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 |
4 | ### Restore database from a backup.
5 | ###
6 | ### Parameters:
7 | ### <1> filename of an existing backup.
8 | ###
9 | ### Usage:
10 | ### $ docker-compose -f .yml (exec |run --rm) postgres restore <1>
11 |
12 |
13 | set -o errexit
14 | set -o pipefail
15 | set -o nounset
16 |
17 |
18 | working_dir="$(dirname ${0})"
19 | source "${working_dir}/_sourced/constants.sh"
20 | source "${working_dir}/_sourced/messages.sh"
21 |
22 |
23 | if [[ -z ${1+x} ]]; then
24 | message_error "Backup filename is not specified yet it is a required parameter. Make sure you provide one and try again."
25 | exit 1
26 | fi
27 | backup_filename="${BACKUP_DIR_PATH}/${1}"
28 | if [[ ! -f "${backup_filename}" ]]; then
29 | message_error "No backup with the specified filename found. Check out the 'backups' maintenance script output to see if there is one and try again."
30 | exit 1
31 | fi
32 |
33 | message_welcome "Restoring the '${POSTGRES_DB}' database from the '${backup_filename}' backup..."
34 |
35 | if [[ "${POSTGRES_USER}" == "postgres" ]]; then
36 | message_error "Restoring as 'postgres' user is not supported. Assign 'POSTGRES_USER' env with another one and try again."
37 | exit 1
38 | fi
39 |
40 | export PGHOST="${POSTGRES_HOST}"
41 | export PGPORT="${POSTGRES_PORT}"
42 | export PGUSER="${POSTGRES_USER}"
43 | export PGPASSWORD="${POSTGRES_PASSWORD}"
44 | export PGDATABASE="${POSTGRES_DB}"
45 |
46 | message_info "Dropping the database..."
47 | dropdb "${PGDATABASE}"
48 |
49 | message_info "Creating a new database..."
50 | createdb --owner="${POSTGRES_USER}"
51 |
52 | message_info "Applying the backup to the new database..."
53 | gunzip -c "${backup_filename}" | psql "${POSTGRES_DB}"
54 |
55 | message_success "The '${POSTGRES_DB}' database has been restored from the '${backup_filename}' backup."
56 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/config/wsgi.py:
--------------------------------------------------------------------------------
1 | """
2 | WSGI config for {{ cookiecutter.project_name }} project.
3 |
4 | This module contains the WSGI application used by Django's development server
5 | and any production WSGI deployments. It should expose a module-level variable
6 | named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
7 | this application via the ``WSGI_APPLICATION`` setting.
8 |
9 | Usually you will have the standard Django WSGI application here, but it also
10 | might make sense to replace the whole Django WSGI application with a custom one
11 | that later delegates to the Django one. For example, you could introduce WSGI
12 | middleware here, or combine a Django application with an application of another
13 | framework.
14 |
15 | """
16 | import os
17 | import sys
18 |
19 | from django.core.wsgi import get_wsgi_application
20 |
21 | # This allows easy placement of apps within the interior
22 | # {{ cookiecutter.project_slug }} directory.
23 | app_path = os.path.abspath(
24 | os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir)
25 | )
26 | sys.path.append(os.path.join(app_path, "{{ cookiecutter.project_slug }}"))
27 | # We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
28 | # if running multiple sites in the same mod_wsgi process. To fix this, use
29 | # mod_wsgi daemon mode with each site in its own daemon process, or use
30 | # os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.production"
31 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production")
32 |
33 | # This application object is used by any WSGI server configured to use this
34 | # file. This includes Django's development server, if the WSGI_APPLICATION
35 | # setting points here.
36 | application = get_wsgi_application()
37 | # Apply WSGI middleware here.
38 | # from helloworld.wsgi import HelloWorldApplication
39 | # application = HelloWorldApplication(application)
40 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/utility/install_python_dependencies.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | WORK_DIR="$(dirname "$0")"
4 | PROJECT_DIR="$(dirname "$WORK_DIR")"
5 |
6 | pip --version >/dev/null 2>&1 || {
7 | echo >&2 -e "\npip is required but it's not installed."
8 | echo >&2 -e "You can install it by running the following command:\n"
9 | echo >&2 "wget https://bootstrap.pypa.io/get-pip.py --output-document=get-pip.py; chmod +x get-pip.py; sudo -H python3 get-pip.py"
10 | echo >&2 -e "\n"
11 | echo >&2 -e "\nFor more information, see pip documentation: https://pip.pypa.io/en/latest/"
12 | exit 1;
13 | }
14 |
15 | virtualenv --version >/dev/null 2>&1 || {
16 | echo >&2 -e "\nvirtualenv is required but it's not installed."
17 | echo >&2 -e "You can install it by running the following command:\n"
18 | echo >&2 "sudo -H pip3 install virtualenv"
19 | echo >&2 -e "\n"
20 | echo >&2 -e "\nFor more information, see virtualenv documentation: https://virtualenv.pypa.io/en/latest/"
21 | exit 1;
22 | }
23 |
24 | if [ -z "$VIRTUAL_ENV" ]; then
25 | echo >&2 -e "\nYou need activate a virtualenv first"
26 | echo >&2 -e 'If you do not have a virtualenv created, run the following command to create and automatically activate a new virtualenv named "venv" on current folder:\n'
27 | echo >&2 -e "virtualenv venv --python=\`which python3\`"
28 | echo >&2 -e "\nTo leave/disable the currently active virtualenv, run the following command:\n"
29 | echo >&2 "deactivate"
30 | echo >&2 -e "\nTo activate the virtualenv again, run the following command:\n"
31 | echo >&2 "source venv/bin/activate"
32 | echo >&2 -e "\nFor more information, see virtualenv documentation: https://virtualenv.pypa.io/en/latest/"
33 | echo >&2 -e "\n"
34 | exit 1;
35 | else
36 |
37 | pip install -r $PROJECT_DIR/requirements/local.txt
38 | {% if cookiecutter.use_heroku == "y" -%}
39 | pip install -r $PROJECT_DIR/requirements.txt
40 | {%- endif %}
41 | fi
42 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/.idea/{{cookiecutter.project_slug}}.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 | {% if cookiecutter.js_task_runner != 'None' %}
17 |
18 |
19 |
20 | {% else %}
21 |
22 | {% endif %}
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/frontend/README.md:
--------------------------------------------------------------------------------
1 | This project was bootstrapped with [Create React App](https://github.com/facebook/create-react-app).
2 |
3 | ## Available Scripts
4 |
5 | In the project directory, you can run:
6 |
7 | ### `npm start`
8 |
9 | Runs the app in the development mode.
10 | Open [http://localhost:3000](http://localhost:3000) to view it in the browser.
11 |
12 | The page will reload if you make edits.
13 | You will also see any lint errors in the console.
14 |
15 | ### `npm test`
16 |
17 | Launches the test runner in the interactive watch mode.
18 | See the section about [running tests](https://facebook.github.io/create-react-app/docs/running-tests) for more information.
19 |
20 | ### `npm run build`
21 |
22 | Builds the app for production to the `build` folder.
23 | It correctly bundles React in production mode and optimizes the build for the best performance.
24 |
25 | The build is minified and the filenames include the hashes.
26 | Your app is ready to be deployed!
27 |
28 | See the section about [deployment](https://facebook.github.io/create-react-app/docs/deployment) for more information.
29 |
30 | ### `npm run eject`
31 |
32 | **Note: this is a one-way operation. Once you `eject`, you can’t go back!**
33 |
34 | If you aren’t satisfied with the build tool and configuration choices, you can `eject` at any time. This command will remove the single build dependency from your project.
35 |
36 | Instead, it will copy all the configuration files and the transitive dependencies (Webpack, Babel, ESLint, etc) right into your project so you have full control over them. All of the commands except `eject` will still work, but they will point to the copied scripts so you can tweak them. At this point you’re on your own.
37 |
38 | You don’t have to ever use `eject`. The curated feature set is suitable for small and middle deployments, and you shouldn’t feel obligated to use this feature. However we understand that this tool wouldn’t be useful if you couldn’t customize it when you are ready for it.
39 |
40 | ## Learn More
41 |
42 | You can learn more in the [Create React App documentation](https://facebook.github.io/create-react-app/docs/getting-started).
43 |
44 | To learn React, check out the [React documentation](https://reactjs.org/).
45 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/production.yml:
--------------------------------------------------------------------------------
1 | version: '3'
2 |
3 | volumes:
4 | production_postgres_data: {}
5 | production_postgres_data_backups: {}
6 | production_traefik: {}
7 |
8 | services:
9 | django:{% if cookiecutter.use_celery == 'y' %} &django{% endif %}
10 | build:
11 | context: .
12 | dockerfile: ./compose/production/django/Dockerfile
13 | image: {{ cookiecutter.project_slug }}_production_django
14 | depends_on:
15 | - postgres
16 | - redis
17 | env_file:
18 | - ./.envs/.production/.django
19 | - ./.envs/.production/.postgres
20 | command: /start
21 |
22 | postgres:
23 | build:
24 | context: .
25 | dockerfile: ./compose/production/postgres/Dockerfile
26 | image: {{ cookiecutter.project_slug }}_production_postgres
27 | volumes:
28 | - production_postgres_data:/var/lib/postgresql/data
29 | - production_postgres_data_backups:/backups
30 | env_file:
31 | - ./.envs/.production/.postgres
32 |
33 | traefik:
34 | build:
35 | context: .
36 | dockerfile: ./compose/production/traefik/Dockerfile
37 | image: {{ cookiecutter.project_slug }}_production_traefik
38 | depends_on:
39 | - django
40 | volumes:
41 | - production_traefik:/etc/traefik/acme
42 | ports:
43 | - "0.0.0.0:80:80"
44 | - "0.0.0.0:443:443"
45 |
46 | redis:
47 | image: redis:5.0
48 | {%- if cookiecutter.use_celery == 'y' %}
49 |
50 | celeryworker:
51 | <<: *django
52 | image: {{ cookiecutter.project_slug }}_production_celeryworker
53 | command: /start-celeryworker
54 |
55 | celerybeat:
56 | <<: *django
57 | image: {{ cookiecutter.project_slug }}_production_celerybeat
58 | command: /start-celerybeat
59 |
60 | flower:
61 | <<: *django
62 | image: {{ cookiecutter.project_slug }}_production_flower
63 | ports:
64 | - "5555:5555"
65 | command: /start-flower
66 |
67 | {%- endif %}
68 |
69 | {% if cookiecutter.cloud_provider == 'AWS' %}
70 | awscli:
71 | build:
72 | context: .
73 | dockerfile: ./compose/production/aws/Dockerfile
74 | env_file:
75 | - ./.envs/.production/.django
76 | volumes:
77 | - production_postgres_data_backups:/backups
78 | {%- endif %}
79 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/docs/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
6 |
7 | # -- Path setup --------------------------------------------------------------
8 |
9 | # If extensions (or modules to document with autodoc) are in another directory,
10 | # add these directories to sys.path here. If the directory is relative to the
11 | # documentation root, use os.path.abspath to make it absolute, like shown here.
12 | #
13 | import os
14 | import sys
15 |
16 | # import django
17 | # sys.path.insert(0, os.path.abspath('..'))
18 | # os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
19 | # django.setup()
20 |
21 |
22 | # -- Project information -----------------------------------------------------
23 |
24 | project = "{{ cookiecutter.project_name }}"
25 | copyright = """{% now 'utc', '%Y' %}, {{ cookiecutter.author_name }}"""
26 | author = "{{ cookiecutter.author_name }}"
27 |
28 |
29 | # -- General configuration ---------------------------------------------------
30 |
31 | # Add any Sphinx extension module names here, as strings. They can be
32 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
33 | # ones.
34 | extensions = []
35 |
36 | # Add any paths that contain templates here, relative to this directory.
37 | templates_path = ["_templates"]
38 |
39 | # List of patterns, relative to source directory, that match files and
40 | # directories to ignore when looking for source files.
41 | # This pattern also affects html_static_path and html_extra_path.
42 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
43 |
44 |
45 | # -- Options for HTML output -------------------------------------------------
46 |
47 | # The theme to use for HTML and HTML Help pages. See the documentation for
48 | # a list of builtin themes.
49 | #
50 | html_theme = "alabaster"
51 |
52 | # Add any paths that contain custom static files (such as style sheets) here,
53 | # relative to this directory. They are copied after the builtin static files,
54 | # so a file named "default.css" will overwrite the builtin "default.css".
55 | html_static_path = ["_static"]
56 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/.envs/.production/.django:
--------------------------------------------------------------------------------
1 | # General
2 | # ------------------------------------------------------------------------------
3 | # DJANGO_READ_DOT_ENV_FILE=True
4 | DJANGO_SETTINGS_MODULE=config.settings.production
5 | DJANGO_SECRET_KEY=!!!SET DJANGO_SECRET_KEY!!!
6 | DJANGO_ADMIN_URL=!!!SET DJANGO_ADMIN_URL!!!
7 | DJANGO_ALLOWED_HOSTS=.{{ cookiecutter.domain_name }}
8 |
9 | # Security
10 | # ------------------------------------------------------------------------------
11 | # TIP: better off using DNS, however, redirect is OK too
12 | DJANGO_SECURE_SSL_REDIRECT=False
13 |
14 | # Email
15 | # ------------------------------------------------------------------------------
16 | MAILGUN_API_KEY=
17 | DJANGO_SERVER_EMAIL=
18 | MAILGUN_DOMAIN=
19 | {% if cookiecutter.cloud_provider == 'AWS' %}
20 | # AWS
21 | # ------------------------------------------------------------------------------
22 | DJANGO_AWS_ACCESS_KEY_ID=
23 | DJANGO_AWS_SECRET_ACCESS_KEY=
24 | DJANGO_AWS_STORAGE_BUCKET_NAME=
25 | {% elif cookiecutter.cloud_provider == 'GCP' %}
26 | # GCP
27 | # ------------------------------------------------------------------------------
28 | GOOGLE_APPLICATION_CREDENTIALS=
29 | DJANGO_GCP_STORAGE_BUCKET_NAME=
30 | {% endif %}
31 | # django-allauth
32 | # ------------------------------------------------------------------------------
33 | DJANGO_ACCOUNT_ALLOW_REGISTRATION=True
34 | {% if cookiecutter.use_compressor == 'y' %}
35 | # django-compressor
36 | # ------------------------------------------------------------------------------
37 | COMPRESS_ENABLED=
38 | {% endif %}
39 | # Gunicorn
40 | # ------------------------------------------------------------------------------
41 | WEB_CONCURRENCY=4
42 | {% if cookiecutter.use_sentry == 'y' %}
43 | # Sentry
44 | # ------------------------------------------------------------------------------
45 | SENTRY_DSN=
46 | {% endif %}
47 |
48 | # Redis
49 | # ------------------------------------------------------------------------------
50 | REDIS_URL=redis://redis:6379/0
51 | {% if cookiecutter.use_celery == 'y' %}
52 | # Celery
53 | # ------------------------------------------------------------------------------
54 |
55 | # Flower
56 | CELERY_FLOWER_USER=!!!SET CELERY_FLOWER_USER!!!
57 | CELERY_FLOWER_PASSWORD=!!!SET CELERY_FLOWER_PASSWORD!!!
58 | {% endif %}
59 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/compose/production/django/Dockerfile:
--------------------------------------------------------------------------------
1 | {% if cookiecutter.js_task_runner == 'Gulp' -%}
2 | FROM node:10-stretch-slim as client-builder
3 |
4 | WORKDIR /app
5 | COPY ./package.json /app
6 | RUN npm install && npm cache clean --force
7 | COPY . /app
8 | RUN npm run build
9 |
10 | # Python build stage
11 | {%- endif %}
12 | FROM python:3.7-slim-buster
13 |
14 | ENV PYTHONUNBUFFERED 1
15 |
16 | RUN apt-get update \
17 | # dependencies for building Python packages
18 | && apt-get install -y build-essential \
19 | # psycopg2 dependencies
20 | && apt-get install -y libpq-dev \
21 | # Translations dependencies
22 | && apt-get install -y gettext \
23 | # cleaning up unused files
24 | && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \
25 | && rm -rf /var/lib/apt/lists/*
26 |
27 | RUN addgroup --system django \
28 | && adduser --system --ingroup django django
29 |
30 | # Requirements are installed here to ensure they will be cached.
31 | COPY ./requirements /requirements
32 | RUN pip install --no-cache-dir -r /requirements/production.txt \
33 | && rm -rf /requirements
34 |
35 | COPY ./compose/production/django/entrypoint /entrypoint
36 | RUN sed -i 's/\r$//g' /entrypoint
37 | RUN chmod +x /entrypoint
38 | RUN chown django /entrypoint
39 |
40 | COPY ./compose/production/django/start /start
41 | RUN sed -i 's/\r$//g' /start
42 | RUN chmod +x /start
43 | RUN chown django /start
44 |
45 | {%- if cookiecutter.use_celery == "y" %}
46 | COPY ./compose/production/django/celery/worker/start /start-celeryworker
47 | RUN sed -i 's/\r$//g' /start-celeryworker
48 | RUN chmod +x /start-celeryworker
49 | RUN chown django /start-celeryworker
50 |
51 | COPY ./compose/production/django/celery/beat/start /start-celerybeat
52 | RUN sed -i 's/\r$//g' /start-celerybeat
53 | RUN chmod +x /start-celerybeat
54 | RUN chown django /start-celerybeat
55 |
56 | COPY ./compose/production/django/celery/flower/start /start-flower
57 | RUN sed -i 's/\r$//g' /start-flower
58 | RUN chmod +x /start-flower
59 | {%- endif %}
60 |
61 | {%- if cookiecutter.js_task_runner == 'Gulp' %}
62 | COPY --from=client-builder --chown=django:django /app /app
63 | {% else %}
64 | COPY --chown=django:django . /app
65 | {%- endif %}
66 |
67 | USER django
68 |
69 | WORKDIR /app
70 |
71 | ENTRYPOINT ["/entrypoint"]
72 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/.idea/workspace.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | {%- if cookiecutter.use_docker == 'n' %}
4 |
7 |
8 | {%- elif cookiecutter.use_celery == 'y' %}
9 |
12 |
13 | {%- else %}
14 |
17 |
18 | {%- endif %}
19 |
20 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/merge_production_dotenvs_in_dotenv.py:
--------------------------------------------------------------------------------
1 | import os
2 | from typing import Sequence
3 |
4 | import pytest
5 |
6 | ROOT_DIR_PATH = os.path.dirname(os.path.realpath(__file__))
7 | PRODUCTION_DOTENVS_DIR_PATH = os.path.join(ROOT_DIR_PATH, ".envs", ".production")
8 | PRODUCTION_DOTENV_FILE_PATHS = [
9 | os.path.join(PRODUCTION_DOTENVS_DIR_PATH, ".django"),
10 | os.path.join(PRODUCTION_DOTENVS_DIR_PATH, ".postgres"),
11 | ]
12 | DOTENV_FILE_PATH = os.path.join(ROOT_DIR_PATH, ".env")
13 |
14 |
15 | def merge(
16 | output_file_path: str, merged_file_paths: Sequence[str], append_linesep: bool = True
17 | ) -> None:
18 | with open(output_file_path, "w") as output_file:
19 | for merged_file_path in merged_file_paths:
20 | with open(merged_file_path, "r") as merged_file:
21 | merged_file_content = merged_file.read()
22 | output_file.write(merged_file_content)
23 | if append_linesep:
24 | output_file.write(os.linesep)
25 |
26 |
27 | def main():
28 | merge(DOTENV_FILE_PATH, PRODUCTION_DOTENV_FILE_PATHS)
29 |
30 |
31 | @pytest.mark.parametrize("merged_file_count", range(3))
32 | @pytest.mark.parametrize("append_linesep", [True, False])
33 | def test_merge(tmpdir_factory, merged_file_count: int, append_linesep: bool):
34 | tmp_dir_path = str(tmpdir_factory.getbasetemp())
35 |
36 | output_file_path = os.path.join(tmp_dir_path, ".env")
37 |
38 | expected_output_file_content = ""
39 | merged_file_paths = []
40 | for i in range(merged_file_count):
41 | merged_file_ord = i + 1
42 |
43 | merged_filename = ".service{}".format(merged_file_ord)
44 | merged_file_path = os.path.join(tmp_dir_path, merged_filename)
45 |
46 | merged_file_content = merged_filename * merged_file_ord
47 |
48 | with open(merged_file_path, "w+") as file:
49 | file.write(merged_file_content)
50 |
51 | expected_output_file_content += merged_file_content
52 | if append_linesep:
53 | expected_output_file_content += os.linesep
54 |
55 | merged_file_paths.append(merged_file_path)
56 |
57 | merge(output_file_path, merged_file_paths, append_linesep)
58 |
59 | with open(output_file_path, "r") as output_file:
60 | actual_output_file_content = output_file.read()
61 |
62 | assert actual_output_file_content == expected_output_file_content
63 |
64 |
65 | if __name__ == "__main__":
66 | main()
67 |
--------------------------------------------------------------------------------
/docs/testing.rst:
--------------------------------------------------------------------------------
1 | .. _testing:
2 |
3 | Testing
4 | ========
5 |
6 | We encourage users to build application tests. As best practice, this should be done immediately after documentation of the application being built, before starting on any coding.
7 |
8 | Pytest
9 | ------
10 |
11 | This project uses the Pytest_, a framework for easily building simple and scalable tests.
12 | After you have set up to `develop locally`_, run the following commands to make sure the testing environment is ready: ::
13 |
14 | $ pytest
15 |
16 | You will get a readout of the `users` app that has already been set up with tests. If you do not want to run the `pytest` on the entire project, you can target a particular app by typing in its location: ::
17 |
18 | $ pytest
19 |
20 | If you set up your project to `develop locally with docker`_, run the following command: ::
21 |
22 | $ docker-compose -f local.yml run --rm django pytest
23 |
24 | Targeting particular apps for testing in ``docker`` follows a similar pattern as previously shown above.
25 |
26 | Coverage
27 | --------
28 |
29 | You should build your tests to provide the highest level of **code coverage**. You can run the ``pytest`` with code ``coverage`` by typing in the following command: ::
30 |
31 | $ docker-compose -f local.yml run --rm django coverage run -m pytest
32 |
33 | Once the tests are complete, in order to see the code coverage, run the following command: ::
34 |
35 | $ docker-compose -f local.yml run --rm django coverage report
36 |
37 | .. note::
38 |
39 | At the root of the project folder, you will find the `pytest.ini` file. You can use this to customize_ the ``pytest`` to your liking.
40 |
41 | There is also the `.coveragerc`. This is the configuration file for the ``coverage`` tool. You can find out more about `configuring`_ ``coverage``.
42 |
43 | .. seealso::
44 |
45 | For unit tests, run: ::
46 |
47 | $ python manage.py test
48 |
49 | Since this is a fresh install, and there are no tests built using the Python `unittest`_ library yet, you should get feedback that says there were no tests carried out.
50 |
51 | .. _Pytest: https://docs.pytest.org/en/latest/example/simple.html
52 | .. _develop locally: ./developing-locally.html
53 | .. _develop locally with docker: ./developing-locally-docker.html
54 | .. _customize: https://docs.pytest.org/en/latest/customize.html
55 | .. _unittest: https://docs.python.org/3/library/unittest.html#module-unittest
56 | .. _configuring: https://coverage.readthedocs.io/en/v4.5.x/config.html
57 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/requirements/base.txt:
--------------------------------------------------------------------------------
1 | pytz==2019.3 # https://github.com/stub42/pytz
2 | python-slugify==4.0.0 # https://github.com/un33k/python-slugify
3 | Pillow==7.0.0 # https://github.com/python-pillow/Pillow
4 | {%- if cookiecutter.use_compressor == "y" %}
5 | rcssmin==1.0.6{% if cookiecutter.windows == 'y' and cookiecutter.use_docker == 'n' %} --install-option="--without-c-extensions"{% endif %} # https://github.com/ndparker/rcssmin
6 | {%- endif %}
7 | argon2-cffi==19.2.0 # https://github.com/hynek/argon2_cffi
8 | {%- if cookiecutter.use_whitenoise == 'y' %}
9 | whitenoise==5.0.1 # https://github.com/evansd/whitenoise
10 | {%- endif %}
11 | redis==3.4.1 # https://github.com/andymccurdy/redis-py
12 | {%- if cookiecutter.use_celery == "y" %}
13 | celery==4.4.0 # pyup: < 5.0 # https://github.com/celery/celery
14 | django-celery-beat==1.6.0 # https://github.com/celery/django-celery-beat
15 | {%- if cookiecutter.use_docker == 'y' %}
16 | flower==0.9.3 # https://github.com/mher/flower
17 | {%- endif %}
18 | {%- endif %}
19 |
20 | # Django
21 | # ------------------------------------------------------------------------------
22 | django==2.2.10 # pyup: < 3.0 # https://www.djangoproject.com/
23 | django-environ==0.4.5 # https://github.com/joke2k/django-environ
24 | django-model-utils==4.0.0 # https://github.com/jazzband/django-model-utils
25 | django-allauth==0.41.0 # https://github.com/pennersr/django-allauth
26 | django-crispy-forms==1.8.1 # https://github.com/django-crispy-forms/django-crispy-forms
27 | {%- if cookiecutter.use_compressor == "y" %}
28 | django-compressor==2.4 # https://github.com/django-compressor/django-compressor
29 | {%- endif %}
30 | django-redis==4.11.0 # https://github.com/niwinz/django-redis
31 |
32 | {%- if cookiecutter.use_drf == "y" %}
33 | # Django REST Framework
34 | djangorestframework==3.11.0 # https://github.com/encode/django-rest-framework
35 | coreapi==2.3.3 # https://github.com/core-api/python-client
36 | {%- endif %}
37 |
38 | {% if cookiecutter.js_task_runner == "react" %}
39 | # Django/ReactJS integration
40 | # ------------------------------------------------------------------------------
41 | django-cors-headers==3.2.1 # https://github.com/ottoyiu/django-cors-headers
42 | graphene-django==2.8.1 # http://docs.graphene-python.org/projects/django/en/latest/
43 | graphene-django-optimizer==0.6.1 # https://github.com/tfoxy/graphene-django-optimizer
44 | graphene-file-upload==1.2.2 # https://github.com/lmcgartland/graphene-file-upload
45 | django-filter==2.2.0 # https://github.com/carltongibson/django-filter
46 | {%- endif %}
47 |
--------------------------------------------------------------------------------
/hooks/pre_gen_project.py:
--------------------------------------------------------------------------------
1 | """
2 | NOTE:
3 | the below code is to be maintained Python 2.x-compatible
4 | as the whole Cookiecutter Django project initialization
5 | can potentially be run in Python 2.x environment.
6 |
7 | TODO: ? restrict Cookiecutter Django project initialization to Python 3.x environments only
8 | """
9 | from __future__ import print_function
10 |
11 | import sys
12 |
13 | TERMINATOR = "\x1b[0m"
14 | WARNING = "\x1b[1;33m [WARNING]: "
15 | INFO = "\x1b[1;33m [INFO]: "
16 | HINT = "\x1b[3;33m"
17 | SUCCESS = "\x1b[1;32m [SUCCESS]: "
18 |
19 | project_slug = "{{ cookiecutter.project_slug }}"
20 | if hasattr(project_slug, "isidentifier"):
21 | assert (
22 | project_slug.isidentifier()
23 | ), "'{}' project slug is not a valid Python identifier.".format(project_slug)
24 |
25 | assert (
26 | project_slug == project_slug.lower()
27 | ), "'{}' project slug should be all lowercase".format(project_slug)
28 |
29 | assert (
30 | "\\" not in "{{ cookiecutter.author_name }}"
31 | ), "Don't include backslashes in author name."
32 |
33 | if "{{ cookiecutter.use_docker }}".lower() == "n":
34 | python_major_version = sys.version_info[0]
35 | if python_major_version == 2:
36 | print(
37 | WARNING + "You're running cookiecutter under Python 2, but the generated "
38 | "project requires Python 3.7+. Do you want to proceed (y/n)? " + TERMINATOR
39 | )
40 | yes_options, no_options = frozenset(["y"]), frozenset(["n"])
41 | while True:
42 | choice = raw_input().lower()
43 | if choice in yes_options:
44 | break
45 |
46 | elif choice in no_options:
47 | print(INFO + "Generation process stopped as requested." + TERMINATOR)
48 | sys.exit(1)
49 | else:
50 | print(
51 | HINT
52 | + "Please respond with {} or {}: ".format(
53 | ", ".join(
54 | ["'{}'".format(o) for o in yes_options if not o == ""]
55 | ),
56 | ", ".join(
57 | ["'{}'".format(o) for o in no_options if not o == ""]
58 | ),
59 | )
60 | + TERMINATOR
61 | )
62 |
63 | if (
64 | "{{ cookiecutter.use_whitenoise }}".lower() == "n"
65 | and "{{ cookiecutter.cloud_provider }}" == "None"
66 | ):
67 | print(
68 | "You should either use Whitenoise or select a Cloud Provider to serve static files"
69 | )
70 | sys.exit(1)
71 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/graphql/conversions.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from django.contrib.postgres.fields import ArrayField, JSONField
3 | from django.db.models import FileField
4 | from django.forms import Field
5 |
6 | from django_filters import Filter
7 | from graphene import Float, Int, JSONString, List, String
8 | from graphene_django.converter import convert_django_field
9 | from graphene_django.forms.converter import convert_form_field
10 |
11 |
12 | # NOTE: This needs to be done before importing from queries
13 | # SEE: https://github.com/graphql-python/graphene-django/issues/18
14 | @convert_django_field.register(ArrayField)
15 | def convert_array_to_list(field, registry=None):
16 | return List(of_type=String, description=field.help_text, required=not field.null)
17 |
18 |
19 | @convert_django_field.register(JSONField)
20 | def convert_jsonb_to_string(field, registry=None):
21 | return JSONString(description=field.help_text, required=not field.null)
22 |
23 |
24 | @convert_django_field.register(FileField)
25 | def convert_file_to_string(field, registry=None):
26 | return String(description=field.help_text, required=not field.null)
27 |
28 |
29 | def generate_list_filter_class(inner_type):
30 | """
31 | Returns a Filter class that will resolve into a List(`inner_type`) graphene
32 | type.
33 |
34 | This allows us to do things like use `__in` filters that accept graphene
35 | lists instead of a comma delimited value string that's interpolated into
36 | a list by django_filters.BaseCSVFilter (which is used to define
37 | django_filters.BaseInFilter)
38 | """
39 |
40 | form_field = type(
41 | "List{}FormField".format(inner_type.__name__),
42 | (Field,),
43 | {},
44 | )
45 | filter_class = type(
46 | "{}ListFilter".format(inner_type.__name__),
47 | (Filter,),
48 | {
49 | "field_class": form_field,
50 | "__doc__": (
51 | "{0}ListFilter is a small extension of a raw django_filters.Filter "
52 | "that allows us to express graphql List({0}) arguments using FilterSets."
53 | "Note that the given values are passed directly into queryset filters."
54 | ).format(inner_type.__name__),
55 | },
56 | )
57 | convert_form_field.register(form_field)(
58 | lambda x: List(inner_type, required=x.required)
59 | )
60 |
61 | return filter_class
62 |
63 |
64 | FloatListFilter = generate_list_filter_class(Float)
65 | IntListFilter = generate_list_filter_class(Int)
66 | StringListFilter = generate_list_filter_class(String)
67 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/local.yml:
--------------------------------------------------------------------------------
1 | version: '3'
2 |
3 | volumes:
4 | local_postgres_data: {}
5 | local_postgres_data_backups: {}
6 |
7 | services:
8 | django:{% if cookiecutter.use_celery == 'y' %} &django{% endif %}
9 | build:
10 | context: .
11 | dockerfile: ./compose/local/django/Dockerfile
12 | image: {{ cookiecutter.project_slug }}_local_django
13 | depends_on:
14 | - postgres
15 | {%- if cookiecutter.use_mailhog == 'y' %}
16 | - mailhog
17 | {%- endif %}
18 | volumes:
19 | - .:/app
20 | env_file:
21 | - ./.envs/.local/.django
22 | - ./.envs/.local/.postgres
23 | ports:
24 | - "8000:8000"
25 | command: /start
26 |
27 | postgres:
28 | build:
29 | context: .
30 | dockerfile: ./compose/production/postgres/Dockerfile
31 | image: {{ cookiecutter.project_slug }}_production_postgres
32 | volumes:
33 | - local_postgres_data:/var/lib/postgresql/data
34 | - local_postgres_data_backups:/backups
35 | env_file:
36 | - ./.envs/.local/.postgres
37 | {%- if cookiecutter.use_mailhog == 'y' %}
38 |
39 | mailhog:
40 | image: mailhog/mailhog:v1.0.0
41 | ports:
42 | - "8025:8025"
43 |
44 | {%- endif %}
45 | {%- if cookiecutter.use_celery == 'y' %}
46 |
47 | redis:
48 | image: redis:5.0
49 |
50 | celeryworker:
51 | <<: *django
52 | image: {{ cookiecutter.project_slug }}_local_celeryworker
53 | depends_on:
54 | - redis
55 | - postgres
56 | {% if cookiecutter.use_mailhog == 'y' -%}
57 | - mailhog
58 | {%- endif %}
59 | ports: []
60 | command: /start-celeryworker
61 |
62 | celerybeat:
63 | <<: *django
64 | image: {{ cookiecutter.project_slug }}_local_celerybeat
65 | depends_on:
66 | - redis
67 | - postgres
68 | {% if cookiecutter.use_mailhog == 'y' -%}
69 | - mailhog
70 | {%- endif %}
71 | ports: []
72 | command: /start-celerybeat
73 |
74 | flower:
75 | <<: *django
76 | image: {{ cookiecutter.project_slug }}_local_flower
77 | ports:
78 | - "5555:5555"
79 | command: /start-flower
80 |
81 | {%- endif %}
82 | {%- if cookiecutter.js_task_runner == 'Gulp' %}
83 |
84 | node:
85 | build:
86 | context: .
87 | dockerfile: ./compose/local/node/Dockerfile
88 | image: {{ cookiecutter.project_slug }}_local_node
89 | depends_on:
90 | - django
91 | volumes:
92 | - .:/app
93 | # http://jdlm.info/articles/2016/03/06/lessons-building-node-app-docker.html
94 | - /app/node_modules
95 | command: npm run dev
96 | ports:
97 | - "3000:3000"
98 | # Expose browsersync UI: https://www.browsersync.io/docs/options/#option-ui
99 | - "3001:3001"
100 |
101 | {%- endif %}
102 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/docs/pycharm/configuration.rst:
--------------------------------------------------------------------------------
1 | Docker Remote Debugging
2 | =======================
3 |
4 | To connect to python remote interpreter inside docker, you have to make sure first, that Pycharm is aware of your docker.
5 |
6 | Go to *Settings > Build, Execution, Deployment > Docker*. If you are on linux, you can use docker directly using its socket `unix:///var/run/docker.sock`, if you are on Windows or Mac, make sure that you have docker-machine installed, then you can simply *Import credentials from Docker Machine*.
7 |
8 | .. image:: images/1.png
9 |
10 | Configure Remote Python Interpreter
11 | -----------------------------------
12 |
13 | This repository comes with already prepared "Run/Debug Configurations" for docker.
14 |
15 | .. image:: images/2.png
16 |
17 | But as you can see, at the beginning there is something wrong with them. They have red X on django icon, and they cannot be used, without configuring remote python interpteter. To do that, you have to go to *Settings > Build, Execution, Deployment* first.
18 |
19 |
20 | Next, you have to add new remote python interpreter, based on already tested deployment settings. Go to *Settings > Project > Project Interpreter*. Click on the cog icon, and click *Add Remote*.
21 |
22 | .. image:: images/3.png
23 |
24 | Switch to *Docker Compose* and select `local.yml` file from directory of your project, next set *Service name* to `django`
25 |
26 | .. image:: images/4.png
27 |
28 | Having that, click *OK*. Close *Settings* panel, and wait few seconds...
29 |
30 | .. image:: images/7.png
31 |
32 | After few seconds, all *Run/Debug Configurations* should be ready to use.
33 |
34 | .. image:: images/8.png
35 |
36 | **Things you can do with provided configuration**:
37 |
38 | * run and debug python code
39 |
40 | .. image:: images/f1.png
41 |
42 | * run and debug tests
43 |
44 | .. image:: images/f2.png
45 | .. image:: images/f3.png
46 |
47 | * run and debug migrations or different django management commands
48 |
49 | .. image:: images/f4.png
50 |
51 | * and many others..
52 |
53 | Known issues
54 | ------------
55 |
56 | * Pycharm hangs on "Connecting to Debugger"
57 |
58 | .. image:: images/issue1.png
59 |
60 | This might be fault of your firewall. Take a look on this ticket - https://youtrack.jetbrains.com/issue/PY-18913
61 |
62 | * Modified files in `.idea` directory
63 |
64 | Most of the files from `.idea/` were added to `.gitignore` with a few exceptions, which were made, to provide "ready to go" configuration. After adding remote interpreter some of these files are altered by PyCharm:
65 |
66 | .. image:: images/issue2.png
67 |
68 | In theory you can remove them from repository, but then, other people will lose a ability to initialize a project from provided configurations as you did. To get rid of this annoying state, you can run command::
69 |
70 | $ git update-index --assume-unchanged {{cookiecutter.project_slug}}.iml
71 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/utility/install_os_dependencies.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | WORK_DIR="$(dirname "$0")"
4 | DISTRO_NAME=$(lsb_release -sc)
5 | OS_REQUIREMENTS_FILENAME="requirements-$DISTRO_NAME.apt"
6 |
7 | cd $WORK_DIR
8 |
9 | # Check if a requirements file exist for the current distribution.
10 | if [ ! -r "$OS_REQUIREMENTS_FILENAME" ]; then
11 | cat <<-EOF >&2
12 | There is no requirements file for your distribution.
13 | You can see one of the files listed below to help search the equivalent package in your system:
14 | $(find ./ -name "requirements-*.apt" -printf " - %f\n")
15 | EOF
16 | exit 1;
17 | fi
18 |
19 | # Handle call with wrong command
20 | function wrong_command()
21 | {
22 | echo "${0##*/} - unknown command: '${1}'" >&2
23 | usage_message
24 | }
25 |
26 | # Print help / script usage
27 | function usage_message()
28 | {
29 | cat <<-EOF
30 | Usage: $WORK_DIR/${0##*/}
31 | Available commands are:
32 | list Print a list of all packages defined on ${OS_REQUIREMENTS_FILENAME} file
33 | help Print this help
34 |
35 | Commands that require superuser permission:
36 | install Install packages defined on ${OS_REQUIREMENTS_FILENAME} file. Note: This
37 | does not upgrade the packages already installed for new versions, even if
38 | new version is available in the repository.
39 | upgrade Same that install, but upgrade the already installed packages, if new
40 | version is available.
41 | EOF
42 | }
43 |
44 | # Read the requirements.apt file, and remove comments and blank lines
45 | function list_packages(){
46 | grep -v "#" "${OS_REQUIREMENTS_FILENAME}" | grep -v "^$";
47 | }
48 |
49 | function install_packages()
50 | {
51 | list_packages | xargs apt-get --no-upgrade install -y;
52 | }
53 |
54 | function upgrade_packages()
55 | {
56 | list_packages | xargs apt-get install -y;
57 | }
58 |
59 | function install_or_upgrade()
60 | {
61 | P=${1}
62 | PARAN=${P:-"install"}
63 |
64 | if [[ $EUID -ne 0 ]]; then
65 | cat <<-EOF >&2
66 | You must run this script with root privilege
67 | Please do:
68 | sudo $WORK_DIR/${0##*/} $PARAN
69 | EOF
70 | exit 1
71 | else
72 |
73 | apt-get update
74 |
75 | # Install the basic compilation dependencies and other required libraries of this project
76 | if [ "$PARAN" == "install" ]; then
77 | install_packages;
78 | else
79 | upgrade_packages;
80 | fi
81 |
82 | # cleaning downloaded packages from apt-get cache
83 | apt-get clean
84 |
85 | exit 0
86 | fi
87 | }
88 |
89 | # Handle command argument
90 | case "$1" in
91 | install) install_or_upgrade;;
92 | upgrade) install_or_upgrade "upgrade";;
93 | list) list_packages;;
94 | help|"") usage_message;;
95 | *) wrong_command "$1";;
96 | esac
97 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/templates/account/email.html:
--------------------------------------------------------------------------------
1 | {% raw %}
2 | {% extends "account/base.html" %}
3 |
4 | {% load i18n %}
5 | {% load crispy_forms_tags %}
6 |
7 | {% block head_title %}{% trans "Account" %}{% endblock %}
8 |
9 | {% block inner %}
10 |
{% trans "E-mail Addresses" %}
11 |
12 | {% if user.emailaddress_set.all %}
13 |
{% trans 'The following e-mail addresses are associated with your account:' %}
14 |
15 |
44 |
45 | {% else %}
46 |
{% trans 'Warning:'%} {% trans "You currently do not have any e-mail address set up. You should really add an e-mail address so you can receive notifications, reset your password, etc." %}
47 |
48 | {% endif %}
49 |
50 |
51 |
{% trans "Add E-mail Address" %}
52 |
53 |
58 |
59 | {% endblock %}
60 |
61 |
62 | {% block javascript %}
63 | {{ block.super }}
64 |
79 | {% endblock %}
80 | {% endraw %}
81 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/config/urls.py:
--------------------------------------------------------------------------------
1 | from django.conf import settings
2 | from django.urls import include, path, re_path
3 | from django.conf.urls.static import static
4 | from django.contrib import admin
5 | from django.views.decorators.csrf import csrf_exempt
6 | from django.views.generic import TemplateView
7 | from django.views import defaults as default_views
8 | {% if cookiecutter.use_drf == 'y' -%}
9 | from graphene_file_upload.django import FileUploadGraphQLView
10 | {%- endif %}
11 | {% if cookiecutter.use_drf == 'y' -%}
12 | from rest_framework.authtoken.views import obtain_auth_token
13 | from rest_framework.documentation import include_docs_urls
14 | {%- endif %}
15 |
16 |
17 |
18 |
19 | urlpatterns = [
20 | path("", TemplateView.as_view(template_name="pages/home.html"), name="home"),
21 | re_path(r'^app/(?P.*)$', TemplateView.as_view(template_name="index.html"), name='app'),
22 |
23 | # User management from django-all-auth
24 | path("about/", TemplateView.as_view(template_name="pages/about.html"), name="about"),
25 | path("users/", include("{{ cookiecutter.project_slug }}.users.urls", namespace="users")),
26 | path("accounts/", include("allauth.urls")),
27 |
28 | # Django Admin, use {% raw %}{% url 'admin:index' %}{% endraw %}
29 | path(settings.ADMIN_URL, admin.site.urls),
30 |
31 | # Your stuff: custom urls includes go here
32 |
33 | ] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
34 |
35 | {% if cookiecutter.use_drf == 'y' -%}
36 | # API URLS
37 | urlpatterns += [
38 | # API base url
39 | path("api/", include("config.api_router")),
40 | # DRF auth token
41 | path("auth-token/", obtain_auth_token),
42 | # DRF API docs
43 | path("api-docs/", include_docs_urls(title="{{ cookiecutter.project_name }} REST API", public=False)),
44 | ]
45 | {%- endif %}
46 |
47 | {% if cookiecutter.js_task_runner == 'react' -%}
48 | # API URLS
49 | urlpatterns += [
50 | # GraphQL
51 | path("graphql/", csrf_exempt(FileUploadGraphQLView.as_view(graphiql=True, pretty=True))),
52 | ]
53 | {%- endif %}
54 |
55 | if settings.DEBUG:
56 | # This allows the error pages to be debugged during development, just visit
57 | # these url in browser to see how these error pages look like.
58 | urlpatterns += [
59 | path(
60 | "400/",
61 | default_views.bad_request,
62 | kwargs={"exception": Exception("Bad Request!")},
63 | ),
64 | path(
65 | "403/",
66 | default_views.permission_denied,
67 | kwargs={"exception": Exception("Permission Denied")},
68 | ),
69 | path(
70 | "404/",
71 | default_views.page_not_found,
72 | kwargs={"exception": Exception("Page not Found")},
73 | ),
74 | path("500/", default_views.server_error),
75 | ]
76 | if "debug_toolbar" in settings.INSTALLED_APPS:
77 | import debug_toolbar
78 |
79 | urlpatterns = [path("__debug__/", include(debug_toolbar.urls))] + urlpatterns
80 |
--------------------------------------------------------------------------------
/docs/troubleshooting.rst:
--------------------------------------------------------------------------------
1 | Troubleshooting
2 | =====================================
3 |
4 | This page contains some advice about errors and problems commonly encountered during the development of Cookiecutter Django applications.
5 |
6 | Server Error on sign-up/log-in
7 | ------------------------------
8 |
9 | Make sure you have configured the mail backend (e.g. Mailgun) by adding the API key and sender domain
10 |
11 | .. include:: mailgun.rst
12 |
13 | .. _docker-postgres-auth-failed:
14 |
15 | Docker: Postgres authentication failed
16 | --------------------------------------
17 |
18 | Examples of logs::
19 |
20 | postgres_1 | 2018-06-07 19:11:23.963 UTC [81] FATAL: password authentication failed for user "pydanny"
21 | postgres_1 | 2018-06-07 19:11:23.963 UTC [81] DETAIL: Password does not match for user "pydanny".
22 | postgres_1 | Connection matched pg_hba.conf line 95: "host all all all md5"
23 |
24 | If you recreate the project multiple times with the same name, Docker would preserve the volumes for the postgres container between projects. Here is what happens:
25 |
26 | #. You generate the project the first time. The .env postgres file is populated with the random password
27 | #. You run the docker-compose and the containers are created. The postgres container creates the database based on the .env file credentials
28 | #. You "regenerate" the project with the same name, so the postgres .env file is populated with a new random password
29 | #. You run docker-compose. Since the names of the containers are the same, docker will try to start them (not create them from scratch i.e. it won't execute the Dockerfile to recreate the database). When this happens, it tries to start the database based on the new credentials which do not match the ones that the database was created with, and you get the error message above.
30 |
31 | To fix this, you can either:
32 |
33 | - Clear your project-related Docker cache with ``docker-compose -f local.yml down --volumes --rmi all``.
34 | - Use the Docker volume sub-commands to find volumes (`ls`_) and remove them (`rm`_).
35 | - Use the `prune`_ command to clear system-wide (use with care!).
36 |
37 | .. _ls: https://docs.docker.com/engine/reference/commandline/volume_ls/
38 | .. _rm: https://docs.docker.com/engine/reference/commandline/volume_rm/
39 | .. _prune: https://docs.docker.com/v17.09/engine/reference/commandline/system_prune/
40 |
41 | Others
42 | ------
43 |
44 | #. ``project_slug`` must be a valid Python module name or you will have issues on imports.
45 |
46 | #. ``jinja2.exceptions.TemplateSyntaxError: Encountered unknown tag 'now'.``: please upgrade your cookiecutter version to >= 1.4 (see `#528`_)
47 |
48 | #. New apps not getting created in project root: This is the expected behavior, because cookiecutter-django does not change the way that django startapp works, you'll have to fix this manually (see `#1725`_)
49 |
50 | .. _#528: https://github.com/pydanny/cookiecutter-django/issues/528#issuecomment-212650373
51 | .. _#1725: https://github.com/pydanny/cookiecutter-django/issues/1725#issuecomment-407493176
52 |
--------------------------------------------------------------------------------
/docs/docker-postgres-backups.rst:
--------------------------------------------------------------------------------
1 | PostgreSQL Backups with Docker
2 | ==============================
3 |
4 | .. note:: For brevity it is assumed that you will be running the below commands against local environment, however, this is by no means mandatory so feel free to switch to ``production.yml`` when needed.
5 |
6 |
7 | Prerequisites
8 | -------------
9 |
10 | #. the project was generated with ``use_docker`` set to ``y``;
11 | #. the stack is up and running: ``docker-compose -f local.yml up -d postgres``.
12 |
13 |
14 | Creating a Backup
15 | -----------------
16 |
17 | To create a backup, run::
18 |
19 | $ docker-compose -f local.yml exec postgres backup
20 |
21 | Assuming your project's database is named ``my_project`` here is what you will see: ::
22 |
23 | Backing up the 'my_project' database...
24 | SUCCESS: 'my_project' database backup 'backup_2018_03_13T09_05_07.sql.gz' has been created and placed in '/backups'.
25 |
26 | Keep in mind that ``/backups`` is the ``postgres`` container directory.
27 |
28 |
29 | Viewing the Existing Backups
30 | ----------------------------
31 |
32 | To list existing backups, ::
33 |
34 | $ docker-compose -f local.yml exec postgres backups
35 |
36 | These are the sample contents of ``/backups``: ::
37 |
38 | These are the backups you have got:
39 | total 24K
40 | -rw-r--r-- 1 root root 5.2K Mar 13 09:05 backup_2018_03_13T09_05_07.sql.gz
41 | -rw-r--r-- 1 root root 5.2K Mar 12 21:13 backup_2018_03_12T21_13_03.sql.gz
42 | -rw-r--r-- 1 root root 5.2K Mar 12 21:12 backup_2018_03_12T21_12_58.sql.gz
43 |
44 |
45 | Copying Backups Locally
46 | -----------------------
47 |
48 | If you want to copy backups from your ``postgres`` container locally, ``docker cp`` command_ will help you on that.
49 |
50 | For example, given ``9c5c3f055843`` is the container ID copying all the backups over to a local directory is as simple as ::
51 |
52 | $ docker cp 9c5c3f055843:/backups ./backups
53 |
54 | With a single backup file copied to ``.`` that would be ::
55 |
56 | $ docker cp 9c5c3f055843:/backups/backup_2018_03_13T09_05_07.sql.gz .
57 |
58 | .. _`command`: https://docs.docker.com/engine/reference/commandline/cp/
59 |
60 |
61 | Restoring from the Existing Backup
62 | ----------------------------------
63 |
64 | To restore from one of the backups you have already got (take the ``backup_2018_03_13T09_05_07.sql.gz`` for example), ::
65 |
66 | $ docker-compose -f local.yml exec postgres restore backup_2018_03_13T09_05_07.sql.gz
67 |
68 | You will see something like ::
69 |
70 | Restoring the 'my_project' database from the '/backups/backup_2018_03_13T09_05_07.sql.gz' backup...
71 | INFO: Dropping the database...
72 | INFO: Creating a new database...
73 | INFO: Applying the backup to the new database...
74 | SET
75 | SET
76 | SET
77 | SET
78 | SET
79 | set_config
80 | ------------
81 |
82 | (1 row)
83 |
84 | SET
85 | # ...
86 | ALTER TABLE
87 | SUCCESS: The 'my_project' database has been restored from the '/backups/backup_2018_03_13T09_05_07.sql.gz' backup.
88 |
89 |
90 | Backup to Amazon S3
91 | ----------------------------------
92 | For uploading your backups to Amazon S3 you can use the aws cli container. There is an upload command for uploading the postgres /backups directory recursively and there is a download command for downloading a specific backup. The default S3 environment variables are used. ::
93 |
94 | $ docker-compose -f production.yml run --rm awscli upload
95 | $ docker-compose -f production.yml run --rm awscli download backup_2018_03_13T09_05_07.sql.gz
96 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/LICENSE:
--------------------------------------------------------------------------------
1 | {% if cookiecutter.open_source_license == 'MIT' %}
2 | The MIT License (MIT)
3 | Copyright (c) {% now 'utc', '%Y' %}, {{ cookiecutter.author_name }}
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
6 |
7 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
8 |
9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
10 | {% elif cookiecutter.open_source_license == 'BSD' %}
11 | Copyright (c) {% now 'utc', '%Y' %}, {{ cookiecutter.author_name }}
12 | All rights reserved.
13 |
14 | Redistribution and use in source and binary forms, with or without modification,
15 | are permitted provided that the following conditions are met:
16 |
17 | * Redistributions of source code must retain the above copyright notice, this
18 | list of conditions and the following disclaimer.
19 |
20 | * Redistributions in binary form must reproduce the above copyright notice, this
21 | list of conditions and the following disclaimer in the documentation and/or
22 | other materials provided with the distribution.
23 |
24 | * Neither the name of {{ cookiecutter.project_name }} nor the names of its
25 | contributors may be used to endorse or promote products derived from this
26 | software without specific prior written permission.
27 |
28 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
29 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
30 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
31 | IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
32 | INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
33 | BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
34 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
35 | OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
36 | OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
37 | OF THE POSSIBILITY OF SUCH DAMAGE.
38 | {% elif cookiecutter.open_source_license == 'GPLv3' %}
39 | Copyright (c) {% now 'utc', '%Y' %}, {{ cookiecutter.author_name }}
40 |
41 | This program is free software: you can redistribute it and/or modify
42 | it under the terms of the GNU General Public License as published by
43 | the Free Software Foundation, either version 3 of the License, or
44 | (at your option) any later version.
45 |
46 | This program is distributed in the hope that it will be useful,
47 | but WITHOUT ANY WARRANTY; without even the implied warranty of
48 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
49 | GNU General Public License for more details.
50 |
51 | You should have received a copy of the GNU General Public License
52 | along with this program. If not, see .
53 | {% endif %}
54 |
--------------------------------------------------------------------------------
/docs/deployment-on-heroku.rst:
--------------------------------------------------------------------------------
1 | Deployment on Heroku
2 | ====================
3 |
4 | .. index:: Heroku
5 |
6 | Commands to run
7 | ---------------
8 |
9 | Run these commands to deploy the project to Heroku:
10 |
11 | .. code-block:: bash
12 |
13 | heroku create --buildpack https://github.com/heroku/heroku-buildpack-python
14 |
15 | heroku addons:create heroku-postgresql:hobby-dev
16 | # On Windows use double quotes for the time zone, e.g.
17 | # heroku pg:backups schedule --at "02:00 America/Los_Angeles" DATABASE_URL
18 | heroku pg:backups schedule --at '02:00 America/Los_Angeles' DATABASE_URL
19 | heroku pg:promote DATABASE_URL
20 |
21 | heroku addons:create heroku-redis:hobby-dev
22 |
23 | heroku addons:create mailgun:starter
24 |
25 | heroku config:set PYTHONHASHSEED=random
26 |
27 | heroku config:set WEB_CONCURRENCY=4
28 |
29 | heroku config:set DJANGO_DEBUG=False
30 | heroku config:set DJANGO_SETTINGS_MODULE=config.settings.production
31 | heroku config:set DJANGO_SECRET_KEY="$(openssl rand -base64 64)"
32 |
33 | # Generating a 32 character-long random string without any of the visually similar characters "IOl01":
34 | heroku config:set DJANGO_ADMIN_URL="$(openssl rand -base64 4096 | tr -dc 'A-HJ-NP-Za-km-z2-9' | head -c 32)/"
35 |
36 | # Set this to your Heroku app url, e.g. 'bionic-beaver-28392.herokuapp.com'
37 | heroku config:set DJANGO_ALLOWED_HOSTS=
38 |
39 | # Assign with AWS_ACCESS_KEY_ID
40 | heroku config:set DJANGO_AWS_ACCESS_KEY_ID=
41 |
42 | # Assign with AWS_SECRET_ACCESS_KEY
43 | heroku config:set DJANGO_AWS_SECRET_ACCESS_KEY=
44 |
45 | # Assign with AWS_STORAGE_BUCKET_NAME
46 | heroku config:set DJANGO_AWS_STORAGE_BUCKET_NAME=
47 |
48 | git push heroku master
49 |
50 | heroku run python manage.py createsuperuser
51 |
52 | heroku run python manage.py check --deploy
53 |
54 | heroku open
55 |
56 |
57 | .. warning::
58 |
59 | .. include:: mailgun.rst
60 |
61 |
62 | Optional actions
63 | ----------------
64 |
65 | Celery
66 | ++++++
67 |
68 | Celery requires a few extra environment variables to be ready operational. Also, the worker is created,
69 | it's in the ``Procfile``, but is turned off by default:
70 |
71 | .. code-block:: bash
72 |
73 | # Set the broker URL to Redis
74 | heroku config:set CELERY_BROKER_URL=`heroku config:get REDIS_URL`
75 | # Scale dyno to 1 instance
76 | heroku ps:scale worker=1
77 |
78 | Sentry
79 | ++++++
80 |
81 | If you're opted for Sentry error tracking, you can either install it through the `Sentry add-on`_:
82 |
83 | .. code-block:: bash
84 |
85 | heroku addons:create sentry:f1
86 |
87 |
88 | Or add the DSN for your account, if you already have one:
89 |
90 | .. code-block:: bash
91 |
92 | heroku config:set SENTRY_DSN=https://xxxx@sentry.io/12345
93 |
94 | .. _Sentry add-on: https://elements.heroku.com/addons/sentry
95 |
96 |
97 | Gulp & Bootstrap compilation
98 | ++++++++++++++++++++++++++++
99 |
100 | If you've opted for a custom bootstrap build, you'll most likely need to setup
101 | your app to use `multiple buildpacks`_: one for Python & one for Node.js:
102 |
103 | .. code-block:: bash
104 |
105 | heroku buildpacks:add --index 1 heroku/nodejs
106 |
107 | At time of writing, this should do the trick: during deployment,
108 | the Heroku should run ``npm install`` and then ``npm build``,
109 | which runs Gulp in cookiecutter-django.
110 |
111 | If things don't work, please refer to the Heroku docs.
112 |
113 | .. _multiple buildpacks: https://devcenter.heroku.com/articles/using-multiple-buildpacks-for-an-app
114 |
115 | About Heroku & Docker
116 | ---------------------
117 |
118 | Although Heroku has some sort of `Docker support`_, it's not supported by cookiecutter-django.
119 | We invite you to follow Heroku documentation about it.
120 |
121 | .. _Docker support: https://devcenter.heroku.com/articles/build-docker-images-heroku-yml
122 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_slug}}/config/settings/local.py:
--------------------------------------------------------------------------------
1 | from .base import * # noqa
2 | from .base import env
3 |
4 | # GENERAL
5 | # ------------------------------------------------------------------------------
6 | # https://docs.djangoproject.com/en/dev/ref/settings/#debug
7 | DEBUG = True
8 | # https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
9 | SECRET_KEY = env(
10 | "DJANGO_SECRET_KEY",
11 | default="!!!SET DJANGO_SECRET_KEY!!!",
12 | )
13 | # https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts
14 | ALLOWED_HOSTS = ["localhost", "0.0.0.0", "127.0.0.1"]
15 |
16 | # CACHES
17 | # ------------------------------------------------------------------------------
18 | # https://docs.djangoproject.com/en/dev/ref/settings/#caches
19 | CACHES = {
20 | "default": {
21 | "BACKEND": "django.core.cache.backends.locmem.LocMemCache",
22 | "LOCATION": "",
23 | }
24 | }
25 |
26 | # EMAIL
27 | # ------------------------------------------------------------------------------
28 | {% if cookiecutter.use_mailhog == 'y' and cookiecutter.use_docker == 'y' -%}
29 | # https://docs.djangoproject.com/en/dev/ref/settings/#email-host
30 | EMAIL_HOST = env("EMAIL_HOST", default="mailhog")
31 | # https://docs.djangoproject.com/en/dev/ref/settings/#email-port
32 | EMAIL_PORT = 1025
33 | {%- elif cookiecutter.use_mailhog == 'y' and cookiecutter.use_docker == 'n' -%}
34 | # https://docs.djangoproject.com/en/dev/ref/settings/#email-host
35 | EMAIL_HOST = "localhost"
36 | # https://docs.djangoproject.com/en/dev/ref/settings/#email-port
37 | EMAIL_PORT = 1025
38 | {%- else -%}
39 | # https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
40 | EMAIL_BACKEND = env(
41 | "DJANGO_EMAIL_BACKEND", default="django.core.mail.backends.console.EmailBackend"
42 | )
43 | {%- endif %}
44 |
45 | {%- if cookiecutter.use_whitenoise == 'y' %}
46 |
47 | # WhiteNoise
48 | # ------------------------------------------------------------------------------
49 | # http://whitenoise.evans.io/en/latest/django.html#using-whitenoise-in-development
50 | INSTALLED_APPS = ["whitenoise.runserver_nostatic"] + INSTALLED_APPS # noqa F405
51 | {% endif %}
52 |
53 | # django-debug-toolbar
54 | # ------------------------------------------------------------------------------
55 | # https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#prerequisites
56 | INSTALLED_APPS += ["debug_toolbar"] # noqa F405
57 | # https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#middleware
58 | MIDDLEWARE += ["debug_toolbar.middleware.DebugToolbarMiddleware"] # noqa F405
59 | # https://django-debug-toolbar.readthedocs.io/en/latest/configuration.html#debug-toolbar-config
60 | DEBUG_TOOLBAR_CONFIG = {
61 | "DISABLE_PANELS": ["debug_toolbar.panels.redirects.RedirectsPanel"],
62 | "SHOW_TEMPLATE_CONTEXT": True,
63 | }
64 | # https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#internal-ips
65 | INTERNAL_IPS = ["127.0.0.1", "10.0.2.2"]
66 | {% if cookiecutter.use_docker == 'y' -%}
67 | if env("USE_DOCKER") == "yes":
68 | import socket
69 |
70 | hostname, _, ips = socket.gethostbyname_ex(socket.gethostname())
71 | INTERNAL_IPS += [ip[:-1] + "1" for ip in ips]
72 | {%- endif %}
73 |
74 | # django-extensions
75 | # ------------------------------------------------------------------------------
76 | # https://django-extensions.readthedocs.io/en/latest/installation_instructions.html#configuration
77 | INSTALLED_APPS += ["django_extensions"] # noqa F405
78 | {% if cookiecutter.use_celery == 'y' -%}
79 |
80 | # Celery
81 | # ------------------------------------------------------------------------------
82 | {% if cookiecutter.use_docker == 'n' -%}
83 | # http://docs.celeryproject.org/en/latest/userguide/configuration.html#task-always-eager
84 | CELERY_TASK_ALWAYS_EAGER = True
85 | {%- endif %}
86 | # http://docs.celeryproject.org/en/latest/userguide/configuration.html#task-eager-propagates
87 | CELERY_TASK_EAGER_PROPAGATES = True
88 |
89 | {%- endif %}
90 | # Your stuff...
91 | # ------------------------------------------------------------------------------
92 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | ### Python template
2 | # Byte-compiled / optimized / DLL files
3 | __pycache__/
4 | *.py[cod]
5 | *$py.class
6 |
7 | # C extensions
8 | *.so
9 |
10 | # Distribution / packaging
11 | .Python
12 | build/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | .eggs/
18 | lib/
19 | lib64/
20 | parts/
21 | sdist/
22 | var/
23 | wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .coverage.*
42 | .cache/
43 | nosetests.xml
44 | coverage.xml
45 | *.cover
46 | .hypothesis/
47 |
48 | # Translations
49 | *.mo
50 | *.pot
51 |
52 | # Django stuff:
53 | *.log
54 |
55 | # Sphinx documentation
56 | docs/_build/
57 |
58 | # PyBuilder
59 | target/
60 |
61 | # pyenv
62 | .python-version
63 |
64 | # celery beat schedule file
65 | celerybeat-schedule
66 |
67 | # Environments
68 | .env
69 | .venv
70 | env/
71 | venv/
72 | ENV/
73 |
74 | # Rope project settings
75 | .ropeproject
76 |
77 | # mkdocs documentation
78 | /site
79 |
80 | # mypy
81 | .mypy_cache/
82 |
83 |
84 | ### Linux template
85 | *~
86 |
87 | # temporary files which can be created if a process still has a handle open of a deleted file
88 | .fuse_hidden*
89 |
90 | # KDE directory preferences
91 | .directory
92 |
93 | # Linux trash folder which might appear on any partition or disk
94 | .Trash-*
95 |
96 | # .nfs files are created when an open file is removed but is still being accessed
97 | .nfs*
98 |
99 |
100 | ### VisualStudioCode template
101 | .vscode/*
102 | !.vscode/settings.json
103 | !.vscode/tasks.json
104 | !.vscode/launch.json
105 | !.vscode/extensions.json
106 |
107 |
108 | ### Windows template
109 | # Windows thumbnail cache files
110 | Thumbs.db
111 | ehthumbs.db
112 | ehthumbs_vista.db
113 |
114 | # Dump file
115 | *.stackdump
116 |
117 | # Folder config file
118 | Desktop.ini
119 |
120 | # Recycle Bin used on file shares
121 | $RECYCLE.BIN/
122 |
123 | # Windows Installer files
124 | *.cab
125 | *.msi
126 | *.msm
127 | *.msp
128 |
129 | # Windows shortcuts
130 | *.lnk
131 |
132 |
133 | ### SublimeText template
134 | # Cache files for Sublime Text
135 | *.tmlanguage.cache
136 | *.tmPreferences.cache
137 | *.stTheme.cache
138 |
139 | # Workspace files are user-specific
140 | *.sublime-workspace
141 |
142 | # Project files should be checked into the repository, unless a significant
143 | # proportion of contributors will probably not be using Sublime Text
144 | # *.sublime-project
145 |
146 | # SFTP configuration file
147 | sftp-config.json
148 |
149 | # Package control specific files
150 | Package Control.last-run
151 | Package Control.ca-list
152 | Package Control.ca-bundle
153 | Package Control.system-ca-bundle
154 | Package Control.cache/
155 | Package Control.ca-certs/
156 | Package Control.merged-ca-bundle
157 | Package Control.user-ca-bundle
158 | oscrypto-ca-bundle.crt
159 | bh_unicode_properties.cache
160 |
161 | # Sublime-github package stores a github token in this file
162 | # https://packagecontrol.io/packages/sublime-github
163 | GitHub.sublime-settings
164 |
165 |
166 | ### macOS template
167 | # General
168 | *.DS_Store
169 | .AppleDouble
170 | .LSOverride
171 |
172 | # Icon must end with two \r
173 | Icon
174 |
175 | # Thumbnails
176 | ._*
177 |
178 | # Files that might appear in the root of a volume
179 | .DocumentRevisions-V100
180 | .fseventsd
181 | .Spotlight-V100
182 | .TemporaryItems
183 | .Trashes
184 | .VolumeIcon.icns
185 | .com.apple.timemachine.donotpresent
186 |
187 | # Directories potentially created on remote AFP share
188 | .AppleDB
189 | .AppleDesktop
190 | Network Trash Folder
191 | Temporary Items
192 | .apdisk
193 |
194 |
195 | ### Vim template
196 | # Swap
197 | [._]*.s[a-v][a-z]
198 | [._]*.sw[a-p]
199 | [._]s[a-v][a-z]
200 | [._]sw[a-p]
201 |
202 | # Session
203 | Session.vim
204 |
205 | # Temporary
206 | .netrwhist
207 | # Auto-generated tag files
208 | tags
209 |
210 |
211 | ### VirtualEnv template
212 | # Virtualenv
213 | # http://iamzed.com/2009/05/07/a-primer-on-virtualenv/
214 | [Bb]in
215 | [Ii]nclude
216 | [Ll]ib
217 | [Ll]ib64
218 | [Ss]cripts
219 | pyvenv.cfg
220 | pip-selfcheck.json
221 |
222 |
223 | # Even though the project might be opened and edited
224 | # in any of the JetBrains IDEs, it makes no sence whatsoever
225 | # to 'run' anything within it since any particular cookiecutter
226 | # is declarative by nature.
227 | .idea/
228 |
229 | .pytest_cache/
230 |
--------------------------------------------------------------------------------
/docs/project-generation-options.rst:
--------------------------------------------------------------------------------
1 | Project Generation Options
2 | ==========================
3 |
4 | project_name:
5 | Your project's human-readable name, capitals and spaces allowed.
6 |
7 | project_slug:
8 | Your project's slug without dashes or spaces. Used to name your repo
9 | and in other places where a Python-importable version of your project name
10 | is needed.
11 |
12 | description:
13 | Describes your project and gets used in places like ``README.rst`` and such.
14 |
15 | author_name:
16 | This is you! The value goes into places like ``LICENSE`` and such.
17 |
18 | email:
19 | The email address you want to identify yourself in the project.
20 |
21 | domain_name:
22 | The domain name you plan to use for your project once it goes live.
23 | Note that it can be safely changed later on whenever you need to.
24 |
25 | version:
26 | The version of the project at its inception.
27 |
28 | open_source_license:
29 | A software license for the project. The choices are:
30 |
31 | 1. MIT_
32 | 2. BSD_
33 | 3. GPLv3_
34 | 4. `Apache Software License 2.0`_
35 | 5. Not open source
36 |
37 | timezone:
38 | The value to be used for the ``TIME_ZONE`` setting of the project.
39 |
40 | windows:
41 | Indicates whether the project should be configured for development on Windows.
42 |
43 | use_pycharm:
44 | Indicates whether the project should be configured for development with PyCharm_.
45 |
46 | use_docker:
47 | Indicates whether the project should be configured to use Docker_ and `Docker Compose`_.
48 |
49 | postgresql_version:
50 | Select a PostgreSQL_ version to use. The choices are:
51 |
52 | 1. 11.3
53 | 2. 10.8
54 | 3. 9.6
55 | 4. 9.5
56 | 5. 9.4
57 |
58 | js_task_runner:
59 | Select a JavaScript task runner. The choices are:
60 |
61 | 1. None
62 | 2. Gulp_
63 |
64 | cloud_provider:
65 | Select a cloud provider for static & media files. The choices are:
66 |
67 | 1. AWS_
68 | 2. GCP_
69 | 3. None
70 |
71 | Note that if you choose no cloud provider, media files won't work.
72 |
73 | use_drf:
74 | Indicates whether the project should be configured to use `Django Rest Framework`_.
75 |
76 | custom_bootstrap_compilation:
77 | Indicates whether the project should support Bootstrap recompilation
78 | via the selected JavaScript task runner's task. This can be useful
79 | for real-time Bootstrap variable alteration.
80 |
81 | use_compressor:
82 | Indicates whether the project should be configured to use `Django Compressor`_.
83 |
84 | use_celery:
85 | Indicates whether the project should be configured to use Celery_.
86 |
87 | use_mailhog:
88 | Indicates whether the project should be configured to use MailHog_.
89 |
90 | use_sentry:
91 | Indicates whether the project should be configured to use Sentry_.
92 |
93 | use_whitenoise:
94 | Indicates whether the project should be configured to use WhiteNoise_.
95 |
96 | use_heroku:
97 | Indicates whether the project should be configured so as to be deployable
98 | to Heroku_.
99 |
100 | ci_tool:
101 | Select a CI tool for running tests. The choices are:
102 |
103 | 1. None
104 | 2. Travis_
105 | 3. Gitlab_
106 |
107 | keep_local_envs_in_vcs:
108 | Indicates whether the project's ``.envs/.local/`` should be kept in VCS
109 | (comes in handy when working in teams where local environment reproducibility
110 | is strongly encouraged).
111 | Note: .env(s) are only utilized when Docker Compose and/or Heroku support is enabled.
112 |
113 | debug:
114 | Indicates whether the project should be configured for debugging.
115 | This option is relevant for Cookiecutter Django developers only.
116 |
117 |
118 | .. _MIT: https://opensource.org/licenses/MIT
119 | .. _BSD: https://opensource.org/licenses/BSD-3-Clause
120 | .. _GPLv3: https://www.gnu.org/licenses/gpl.html
121 | .. _Apache Software License 2.0: http://www.apache.org/licenses/LICENSE-2.0
122 |
123 | .. _PyCharm: https://www.jetbrains.com/pycharm/
124 |
125 | .. _Docker: https://github.com/docker/docker
126 | .. _Docker Compose: https://docs.docker.com/compose/
127 |
128 | .. _PostgreSQL: https://www.postgresql.org/docs/
129 |
130 | .. _Gulp: https://github.com/gulpjs/gulp
131 |
132 | .. _AWS: https://aws.amazon.com/s3/
133 | .. _GCP: https://cloud.google.com/storage/
134 |
135 | .. _Django Rest Framework: https://github.com/encode/django-rest-framework/
136 |
137 | .. _Django Compressor: https://github.com/django-compressor/django-compressor
138 |
139 | .. _Celery: https://github.com/celery/celery
140 |
141 | .. _MailHog: https://github.com/mailhog/MailHog
142 |
143 | .. _Sentry: https://github.com/getsentry/sentry
144 |
145 | .. _WhiteNoise: https://github.com/evansd/whitenoise
146 |
147 | .. _Heroku: https://github.com/heroku/heroku-buildpack-python
148 |
149 | .. _Travis CI: https://travis-ci.org/
150 |
151 | .. _GitLab CI: https://docs.gitlab.com/ee/ci/
152 |
153 |
--------------------------------------------------------------------------------