├── .cloudbuild
├── build-migrate-deploy.yaml
└── django_migrate.sh
├── .dockerignore
├── .env-local
├── .gcloud
├── README.md
├── djangomigrate.yaml
├── postbuild.sh
├── postcreate.sh
├── prebuild.sh
└── precreate.sh
├── .gcloudignore
├── .github
└── workflows
│ └── django-test.yml
├── .gitignore
├── .util
├── README.md
├── bash_helpers.sh
├── cliformatting.py
├── deployment_checks.py
├── helper
├── markdown.py
└── requirements.txt
├── CONTRIBUTING.md
├── Dockerfile
├── LICENSE
├── README.md
├── app.json
├── docker-compose.yml
├── docs
├── 00-test-local.md
├── 10-setup-gcp.md
├── 20-setup-sql.md
├── 30-setup-bucket.md
├── 40-setup-secrets.md
├── 50-first-deployment.md
├── 60-ongoing-deployments.md
├── 70-manual-deployments.md
├── 80-automation.md
├── 90-cleanup.md
├── yy_styleguide.md
└── zz_debugging.md
├── experimental
├── README.md
├── button_test.yaml
├── cleanup.yaml
├── cloudbuild.yaml
├── gen_test.yaml
├── nested.yaml
├── project_setup.sh
├── setup.sh
└── terraform_test.yaml
├── manage.py
├── requirements.txt
├── terraform
├── .gitignore
├── bucket.tf
├── database.tf
├── etc
│ ├── env.tpl
│ └── get_image_digest.sh
├── main.tf
├── output.tf
├── project_services.tf
├── secrets.tf
├── service.tf
└── variables.tf
└── unicodex
├── __init__.py
├── admin.py
├── fixtures
└── sampledata.yaml
├── management
└── commands
│ ├── import_emoji.py
│ └── import_from_vendor.py
├── migrations
├── 0001_initial.py
├── 0002_create_superuser.py
├── 0003_codepoint_order.py
├── 0004_auto_20200922_2216.py
└── __init__.py
├── models.py
├── settings.py
├── static
└── css
│ ├── normalize.css
│ ├── skeleton.css
│ └── unicodex.css
├── templates
├── base.html
├── codepoint.html
└── index.html
├── tests.py
├── urls.py
├── views.py
└── wsgi.py
/.cloudbuild/build-migrate-deploy.yaml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 | steps:
15 | - id: "build "
16 | name: "gcr.io/cloud-builders/docker"
17 | args: ["build", "-t", "${_IMAGE_NAME}", "."]
18 |
19 | - id: "push "
20 | name: "gcr.io/cloud-builders/docker"
21 | args: ["push", "${_IMAGE_NAME}"]
22 |
23 | - id: "layer "
24 | name: "gcr.io/cloud-builders/docker"
25 | entrypoint: /bin/bash
26 | args:
27 | - "-c"
28 | - |
29 | echo "FROM $_IMAGE_NAME
30 | COPY --from=gcr.io/cloudsql-docker/gce-proxy /cloud_sql_proxy /cloudsql/cloud_sql_proxy" > Dockerfile-proxy;
31 |
32 | docker build -f Dockerfile-proxy -t ${_IMAGE_NAME}-proxy .
33 |
34 | - id: "migrate"
35 | name: "${_IMAGE_NAME}-proxy"
36 | entrypoint: /bin/bash
37 | env:
38 | - "PROJECT_ID=$PROJECT_ID"
39 | - "USE_CLOUD_SQL_AUTH_PROXY=true"
40 | args:
41 | - '-c'
42 | - |
43 | /cloudsql/cloud_sql_proxy -instances=${_INSTANCE_CONNECTION_NAME}=tcp:${_DATABASE_PORT} & sleep 2;
44 |
45 | sh .cloudbuild/django_migrate.sh
46 |
47 | - id: "deploy "
48 | name: "gcr.io/google.com/cloudsdktool/cloud-sdk"
49 | entrypoint: "gcloud"
50 | args:
51 | [
52 | "run",
53 | "deploy",
54 | "${_SERVICE}",
55 | "--platform",
56 | "managed",
57 | "--region",
58 | "${_REGION}",
59 | "--image",
60 | "${_IMAGE_NAME}",
61 | ]
62 |
63 | images:
64 | - $_IMAGE_NAME
65 |
66 | options:
67 | dynamic_substitutions: true
68 |
69 | substitutions:
70 | _SERVICE: unicodex
71 | _REGION: us-central1
72 | _INSTANCE_NAME: psql
73 | _DATABASE_PORT: '5432'
74 | _IMAGE_NAME: gcr.io/${PROJECT_ID}/${_SERVICE}
75 | _INSTANCE_CONNECTION_NAME: ${PROJECT_ID}:${_REGION}:${_INSTANCE_NAME}
76 |
--------------------------------------------------------------------------------
/.cloudbuild/django_migrate.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash -eu
2 | #
3 | # Copyright 2019 Google LLC
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License");
6 | # you may not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 |
17 | #!/bin/sh
18 | set -e
19 |
20 | echo "🎸 migrate"
21 | python manage.py migrate
22 |
23 | echo "🦄 loaddata"
24 | python manage.py loaddata sampledata
25 |
26 | echo "📦 collect static"
27 | python manage.py collectstatic --noinput
28 |
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | unicodex/__pycache__
2 | venv/
3 | .git
4 |
5 | **/.dockerignore
6 | **/*Dockerfile
7 | **/.*.sw[po]
8 |
--------------------------------------------------------------------------------
/.env-local:
--------------------------------------------------------------------------------
1 |
2 | # Settings for a local docker-compose driven test
3 |
4 | DEBUG="true"
5 | DATABASE_URL=postgres://postgres:mysecretpassword@db:5432/unicodex
6 | SECRET_KEY=FP0hA3L14P1OeJg-youshouldChangeThis-Pr2gCEuNhH9dfjHJSY8cRlfen5jApzMan6E
7 | SUPERUSER=admin
8 | SUPERPASS=localUserPasswordHere
--------------------------------------------------------------------------------
/.gcloud/README.md:
--------------------------------------------------------------------------------
1 | These files are configured in `../app.yaml`, and are the steps required to power the "Run on Google Cloud" button.
2 |
--------------------------------------------------------------------------------
/.gcloud/djangomigrate.yaml:
--------------------------------------------------------------------------------
1 | # Image should already be in GCR, so pull it down then use it.
2 | steps:
3 | - id: 'pull '
4 | name: 'gcr.io/cloud-builders/docker'
5 | args: ['pull', 'gcr.io/${PROJECT_ID}/${_SERVICE}']
6 | - id: 'migrate'
7 | name: 'gcr.io/google-appengine/exec-wrapper'
8 | args: ['-i', 'gcr.io/${PROJECT_ID}/${_SERVICE}',
9 | '-s', '${PROJECT_ID}:${_REGION}:${_INSTANCE_NAME}',
10 | '-e', 'PROJECT_ID=${PROJECT_ID}',
11 | '--', 'sh', '.cloudbuild/django_migrate.sh']
12 |
--------------------------------------------------------------------------------
/.gcloud/postbuild.sh:
--------------------------------------------------------------------------------
1 | echo "No prebuild steps defined"
2 |
--------------------------------------------------------------------------------
/.gcloud/postcreate.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | source .util/bash_helpers.sh
4 |
5 | echo "🚀 Final service configuration changes"
6 | export SERVICE_URL=$(gcloud run services describe $K_SERVICE --format "value(status.url)" --platform managed --region ${GOOGLE_CLOUD_REGION})
7 |
8 | echo "→ Setting CURRENT_HOST to deployed URL"
9 | echo " ${SERVICE_URL}"
10 |
11 | echo "→ Connecting to SQL Instance"
12 | echo " ${GOOGLE_CLOUD_PROJECT}:${GOOGLE_CLOUD_REGION}:psql"
13 |
14 | echo "→ Deploying service with service account"
15 | echo " ${K_SERVICE}@${GOOGLE_CLOUD_PROJECT}.iam.gserviceaccount.com"
16 |
17 | gcloud run services update $K_SERVICE --platform managed --region ${GOOGLE_CLOUD_REGION} \
18 | --update-env-vars "CURRENT_HOST=${SERVICE_URL}" \
19 | --add-cloudsql-instances ${GOOGLE_CLOUD_PROJECT}:${GOOGLE_CLOUD_REGION}:psql \
20 | --service-account ${K_SERVICE}@${GOOGLE_CLOUD_PROJECT}.iam.gserviceaccount.com
21 |
22 | echo "→ Removing Compute Service Account secret access"
23 | export PROJECTNUM=$(gcloud projects describe ${PROJECT_ID} --format 'value(projectNumber)')
24 | export COMPUTE_SA=${PROJECTNUM}-compute@developer.gserviceaccount.com
25 | quiet gcloud secrets remove-iam-policy-binding django_settings \
26 | --member serviceAccount:$COMPUTE_SA \
27 | --role roles/secretmanager.secretAccessor
28 |
29 | echo "Post-create configuration complete ✨"
30 |
31 | echo ""
32 | echo ""
33 | echo "Unicodex is now deployed to ${SERVICE_URL}"
34 | echo ""
35 | echo "To login into the Django admin: "
36 | echo " * go to ${SERVICE_URL}/admin"
37 | echo " * login with the username and password that are stored in Secret Manager"
38 | echo ""
39 | echo "gcloud secrets versions access latest --secret SUPERUSER"
40 | echo "gcloud secrets versions access latest --secret SUPERPASS"
41 | echo ""
42 |
--------------------------------------------------------------------------------
/.gcloud/prebuild.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | shopt -s expand_aliases
3 |
4 | source .util/bash_helpers.sh
5 |
6 | echo "🚀 Deploying $K_SERVICE to $GOOGLE_CLOUD_PROJECT in $GOOGLE_CLOUD_REGION"
7 | export PROJECT_ID=$GOOGLE_CLOUD_PROJECT
8 | gcloud config set project $PROJECT_ID
9 | gcloud config set run/platform managed
10 | export REGION=$GOOGLE_CLOUD_REGION
11 | gcloud config set run/region $REGION
12 | export SERVICE_NAME=$K_SERVICE
13 | export INSTANCE_NAME=psql
14 |
15 | stepdo "Enabling Google API services"
16 | gcloud services enable \
17 | run.googleapis.com \
18 | iam.googleapis.com \
19 | compute.googleapis.com \
20 | sql-component.googleapis.com \
21 | sqladmin.googleapis.com \
22 | cloudbuild.googleapis.com \
23 | cloudkms.googleapis.com \
24 | cloudresourcemanager.googleapis.com \
25 | secretmanager.googleapis.com
26 | stepdone
27 |
28 | stepdo "Creating dedicated service account for $SERVICE_NAME"
29 | gcloud iam service-accounts create $SERVICE_NAME \
30 | --display-name "$SERVICE_NAME service account"
31 | stepdone
32 |
33 | export CLOUDRUN_SA=${SERVICE_NAME}@${PROJECT_ID}.iam.gserviceaccount.com
34 | export PROJECTNUM=$(gcloud projects describe ${PROJECT_ID} --format 'value(projectNumber)')
35 | export CLOUDBUILD_SA=${PROJECTNUM}@cloudbuild.gserviceaccount.com
36 | export COMPUTE_SA=${PROJECTNUM}-compute@developer.gserviceaccount.com
37 |
38 | stepdo "Grant IAM permissions to service accounts"
39 | for role in cloudsql.client run.admin; do
40 | quiet gcloud projects add-iam-policy-binding $PROJECT_ID \
41 | --member serviceAccount:$CLOUDRUN_SA \
42 | --role roles/${role}
43 | quiet gcloud projects add-iam-policy-binding ${PROJECT_ID} \
44 | --member serviceAccount:${CLOUDBUILD_SA} \
45 | --role roles/${role}
46 | done
47 | quiet gcloud iam service-accounts add-iam-policy-binding ${CLOUDRUN_SA} \
48 | --member "serviceAccount:${CLOUDBUILD_SA}" \
49 | --role "roles/iam.serviceAccountUser"
50 | stepdone
51 |
52 | stepdo "Create SQL Instance (may take some time)"
53 | export ROOT_PASSWORD=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 64 | head -n 1)
54 | export DATABASE_INSTANCE=$PROJECT_ID:$REGION:$INSTANCE_NAME
55 | operation_id=$(gcloud sql instances create $INSTANCE_NAME \
56 | --database-version POSTGRES_13 --cpu 2 --memory 4GB \
57 | --region $REGION \
58 | --project $PROJECT_ID \
59 | --root-password $ROOT_PASSWORD \
60 | --async --format="value(name)")
61 | gcloud sql operations wait $operation_id --timeout=unlimited
62 | stepdone
63 |
64 | stepdo "Create SQL Database and User"
65 | export DATABASE_NAME=unicodex
66 | gcloud sql databases create $DATABASE_NAME \
67 | --instance=$INSTANCE_NAME
68 | export DBUSERNAME=unicodex-django
69 | export DBPASSWORD=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 40 | head -n 1)
70 | export DATABASE_URL=postgres://$DBUSERNAME:${DBPASSWORD}@//cloudsql/$PROJECT_ID:$REGION:$INSTANCE_NAME/$DATABASE_NAME
71 | gcloud sql users create $DBUSERNAME \
72 | --password $DBPASSWORD \
73 | --instance $INSTANCE_NAME
74 | stepdone
75 |
76 | stepdo "Create Storage bucket"
77 | export GS_BUCKET_NAME=${PROJECT_ID}-media
78 | gsutil mb -l ${REGION} gs://${GS_BUCKET_NAME}
79 | gsutil iam ch \
80 | serviceAccount:${CLOUDRUN_SA}:roles/storage.objectAdmin \
81 | gs://${GS_BUCKET_NAME}
82 | stepdone
83 |
84 | stepdo "Creating Django settings secret, and allowing service access"
85 | echo DATABASE_URL=\"${DATABASE_URL}\" > temp_env
86 | echo GS_BUCKET_NAME=\"${GS_BUCKET_NAME}\" >> temp_env
87 | echo SECRET_KEY=\"$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 50 | head -n 1)\" >> temp_env
88 | gcloud secrets create django_settings --data-file temp_env
89 | rm temp_env
90 |
91 | for MEMBER in $CLOUDRUN_SA $CLOUDBUILD_SA $COMPUTE_SA; do
92 | echo "... Adding $MEMBER..."
93 | quiet gcloud secrets add-iam-policy-binding django_settings \
94 | --member serviceAccount:$MEMBER \
95 | --role roles/secretmanager.secretAccessor
96 | done
97 | stepdone
98 |
99 | stepdo "Creating Django admin user secrets, and allowing limited access"
100 | export SUPERUSER="admin"
101 | export SUPERPASS=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 30 | head -n 1)
102 | for SECRET in SUPERUSER SUPERPASS; do
103 | gcloud secrets create $SECRET --replication-policy automatic
104 | echo -n "${!SECRET}" | gcloud secrets versions add $SECRET --data-file=-
105 | quiet gcloud secrets add-iam-policy-binding $SECRET \
106 | --member serviceAccount:$CLOUDBUILD_SA \
107 | --role roles/secretmanager.secretAccessor
108 | done
109 | stepdone
110 |
111 | echo "Pre-build provisioning complete ✨"
112 |
--------------------------------------------------------------------------------
/.gcloud/precreate.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # migrate
3 | echo "🚀 Running initial Django migration (this will take a few minutes)..."
4 | echo " Configurations: service ${K_SERVICE}, region ${GOOGLE_CLOUD_REGION}, instance psql"
5 |
6 | gcloud builds submit --config .gcloud/djangomigrate.yaml \
7 | --project $GOOGLE_CLOUD_PROJECT \
8 | --substitutions _SERVICE=${K_SERVICE},_REGION=${GOOGLE_CLOUD_REGION},_INSTANCE_NAME=psql
9 |
10 | echo "Pre-create data migration complete ✨"
11 |
--------------------------------------------------------------------------------
/.gcloudignore:
--------------------------------------------------------------------------------
1 | #!include:.gitignore
2 |
3 | .env
4 |
5 | .git
6 |
7 | .DS_Store
8 | media/
9 |
10 | venv/
11 | htmlcov/
12 | .coverage/
13 |
--------------------------------------------------------------------------------
/.github/workflows/django-test.yml:
--------------------------------------------------------------------------------
1 | name: django-test
2 |
3 | on:
4 | push:
5 | branches:
6 | - latest
7 |
8 | pull_request:
9 |
10 | schedule:
11 | - cron: "0 16 * * 1" # Weekly on Tuesdays
12 |
13 | jobs:
14 | run_tests:
15 | name: Run Django Tests
16 | runs-on: ubuntu-latest
17 | services:
18 | db:
19 | image: postgres:alpine
20 | env:
21 | POSTGRES_PASSWORD: postgres
22 | POSTGRES_USER: postgres
23 | POSTGRES_DB: unicodex
24 | ports:
25 | - 5432:5432
26 | options: --mount type=tmpfs,destination=/var/lib/postgresql/data --health-cmd
27 | pg_isready --health-interval 10s --health-timeout 5s --health-retries 5
28 | steps:
29 | - name: Checkout
30 | uses: actions/checkout@v1
31 |
32 | - name: Set up Python
33 | uses: actions/setup-python@v1
34 | with:
35 | python-version: 3.x
36 |
37 | - name: Create local settings
38 | run: |
39 | # Ensure database port is dynamic
40 | echo "DATABASE_URL='postgres://postgres:postgres@localhost:${{ job.services.postgres.ports[5432] }}/unicodex'" > .env
41 | # Enter some useful defaults for other environment variables
42 | echo -en "SECRET_KEY=asdfasdfasdf\nSUPERUSER=admin\nSUPERPASS=admin\nDBEUG=True" >> .env
43 | - name: Install dependencies
44 | run: pip install -r requirements.txt
45 |
46 | - name: Run tests
47 | run: python manage.py test
48 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # ignore local uploaded dev media
2 | media/
3 |
4 | # ignore python cache
5 | __pycache__/
6 | venv/
7 | __pycache__/
8 | *.py[cod]
9 | *$py.class
10 |
11 | # ignore vim caches
12 | *.swp
13 |
14 | # ignore terraform
15 | terraform/.terraform/*
16 | .terraform/*
17 | *.tfstate
18 | *.tfstate.*
19 | crash.logs
20 | *.tfvars
21 |
22 | # ignore .env
23 | .env
24 |
25 | # ignore generated file
26 | deploy.sh
27 |
--------------------------------------------------------------------------------
/.util/README.md:
--------------------------------------------------------------------------------
1 | This directory holds `helper`, a CLI to help with Unicodex.
2 |
3 | This helper does require some setup:
4 |
5 | ```
6 | python -m venv venv
7 | source venv/bin/activate
8 | python -m pip install -r .util/requirements.txt
9 | .util/helper --help
10 | ```
11 |
12 | Additionally, `googleapiclient.discovery` requires authentication, so setup a dedicated service
13 | account:
14 |
15 | ```
16 | gcloud iam service-accounts create robot-account \
17 | --display-name "Robot account"
18 | gcloud projects add-iam-policy-binding ${PROJECT_ID} \
19 | --member serviceAccount:robot-account@${PROJECT_ID}.iam.gserviceaccount.com \
20 | --role roles/owner
21 | gcloud iam service-accounts keys create ~/robot-account-key.json \
22 | --iam-account robot-account@${PROJECT_ID}.iam.gserviceaccount.com
23 | export GOOGLE_APPLICATION_CREDENTIALS=~/robot-account-key.json
24 | ```
25 |
26 | ### `gen` - generate a scriptable deployment
27 |
28 | If you look closely at the `docs/`, there are a few "Here's what you could do" sections. These are for scriptability. Using the `shell` markers in `docs/`, it's possible to extract all the code to a single file to at least somewhat automate the setup.
29 |
30 | We can also make placeholder substitutions at parse time, to have a script that's all ready for you to run.
31 |
32 | To generate:
33 |
34 | ```
35 | .util/helper gen > deploy.sh
36 | ```
37 |
38 | Then, in either a Cloud Shell or your local machine:
39 |
40 | ```
41 | time bash -ex deploy.sh
42 | ```
43 |
44 | Notes:
45 |
46 | * Your passwords will be echoed in the output.
47 | * The entire process will take ~5-10 minutes
48 | * You *can* run this on your local machine, assuming:
49 | * You're running macOS or a Linux variant
50 | * You have `gcloud` installed and configured.
51 |
52 | ℹ️ This script serves as a test of the tutorial, and 'works', but is not really a complete replacement for the terraform Infrastructure as Code approach.
53 |
54 | ---
55 |
56 | ### `check-env` - environment variable debugging
57 |
58 | Humans running the tutorial may run into issues if they don't execute all the
59 | variable declarations. This script uses the functionality from `gen`
60 | to parse all environment variables declare in the tutorial, and report their values.
61 |
62 | In theory, this should make it obvious when part of the tutorial was missed.
63 |
64 | To generate:
65 | ```
66 | .util/helper check-env
67 | ```
68 |
69 | Note: this helper is only for manual deployments, and will not work with the
70 | scriptable deployment helper above. Why? Because the variables aren't in your
71 | local terminal, but defined in the bash subprocess, so this debugging is moot.
72 |
73 | ### `check-deploy` - live introspection of a deployment
74 |
75 | Unicodex is complex, and even if you think you have remembered to toggle all the switches, you may end up missing something.
76 | This helper attempts to inspect a live deployment of unicodex for common misconfigurations.
77 |
78 | To use, follow the setup instructions in the top comment of `.util/deployment_checks.py`
79 | to setup a dedicated service account that has access to perform all the checks required, then run:
80 |
81 | ```
82 | .util/helper check-deploy $PROJECT_ID
83 | ```
84 |
85 | It will assume the default values for region, service, and django settings secret, but you can override these if required.
86 |
--------------------------------------------------------------------------------
/.util/bash_helpers.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # A collection of helper functions for bash scripts
4 |
5 | function quiet {
6 | $* > /dev/null 2>&1
7 | }
8 |
9 | stepdo() {
10 | echo "→ ${1}..."
11 | }
12 |
13 | # this will only capture the most recent return code, sadly.
14 | stepdone(){
15 | statuscode=$?
16 | msg="... done"
17 | if [ $statuscode -ne 0 ]; then msg="❌ done, but non-zero return code ($statuscode)"; fi
18 | echo $msg
19 | echo " "
20 | }
21 |
--------------------------------------------------------------------------------
/.util/cliformatting.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import click
4 | from math import ceil
5 | import shutil
6 |
7 | columns, _ = shutil.get_terminal_size()
8 | RESULTS = {"success": 0, "failure": 0}
9 |
10 |
11 | def header(msg):
12 | click.secho(f"\n# {msg}", bold=True)
13 |
14 |
15 | def s(n):
16 | if n == 1:
17 | return ""
18 | return "s"
19 |
20 |
21 | def error(s, details=None):
22 | lineart = "********************************"
23 | click.secho(f"{lineart}\nError {s}", bold=True, fg="red")
24 | if details:
25 | click.echo(details)
26 | click.secho(f"{lineart}", bold=True, fg="red")
27 |
28 |
29 | def echo(msg, indent=""):
30 | click.echo(f"{indent}{msg}")
31 |
32 |
33 | def summary():
34 | total = RESULTS["success"] + RESULTS["failure"]
35 | fails = RESULTS["failure"]
36 | if fails != 0:
37 | failcol = {"bold": True, "fg": "red"}
38 | else:
39 | failcol = {}
40 | click.echo(
41 | (
42 | click.style(
43 | f"\nResults: {total} check{s(total)}, ",
44 | bold=True,
45 | )
46 | + click.style(f"{fails} failure{s(fails)}", **failcol)
47 | + click.style(".", bold=True)
48 | )
49 | )
50 | if fails == 0:
51 | sys.exit(0)
52 | else:
53 | sys.exit(1)
54 |
55 |
56 | def result(msg, success=True, details=None):
57 | if success:
58 | success_message = "PASS"
59 | fg = "green"
60 | RESULTS["success"] += 1
61 | else:
62 | success_message = "FAIL"
63 | fg = "red"
64 | RESULTS["failure"] += 1
65 |
66 | # overflow math. 7 is the result length ("[FAIL] ")
67 | amsg = msg.ljust(ceil((len(msg) + 7) / columns) * columns - 7)
68 |
69 | click.echo(amsg + click.style(f"[{success_message}]", fg=fg, bold=True))
70 | if details and not success:
71 | click.echo(details)
72 |
73 |
74 | """
75 | Usage:
76 | header("Testing the things")
77 | result("I did a thing")
78 | result("I failed a thing", success=False, details="how to fix the issue")
79 | summary()
80 | """
81 |
--------------------------------------------------------------------------------
/.util/deployment_checks.py:
--------------------------------------------------------------------------------
1 | import json
2 | import subprocess
3 | from io import StringIO
4 | from urllib.parse import urlparse
5 |
6 | import click
7 | import googleapiclient
8 | import httpx
9 | from bs4 import BeautifulSoup as bs4
10 | from dotenv import dotenv_values
11 | from google.api_core import exceptions
12 | from google.cloud import secretmanager as sml
13 | from googleapiclient.discovery import build
14 |
15 | from cliformatting import echo, error, header, result, summary
16 |
17 | # TODO(glasnt): more checks as required
18 |
19 |
20 | def get_service(project, service_name, region):
21 | run = build("run", "v1")
22 | fqname = f"projects/{project}/locations/{region}/services/{service_name}"
23 | service = run.projects().locations().services().get(name=fqname).execute()
24 | return service
25 |
26 |
27 | def get_sa(service):
28 | return service["spec"]["template"]["spec"]["serviceAccountName"]
29 |
30 |
31 | def check_run(service):
32 | header(f"Service configuration checks")
33 |
34 | sn = service["metadata"]["name"]
35 | result(f"Service {sn} exists")
36 |
37 |
38 | def check_sa(service):
39 | sa = get_sa(service)
40 | echo(f"Associated service account: {sa}")
41 |
42 | result(
43 | f"Associated service account is not the default service account",
44 | details="Ensure a custom service account is associated to the service",
45 | success=("compute-" not in sa),
46 | )
47 |
48 |
49 | def check_bindings(service, project):
50 | sa = get_sa(service)
51 | echo(f"Associated service account (SA): {sa}")
52 |
53 | success = True
54 | crm = build("cloudresourcemanager", "v1")
55 | iam = crm.projects().getIamPolicy(resource=f"{project}").execute()
56 | for binding in iam["bindings"]:
57 | if binding["role"] == "roles/owner":
58 | for member in binding["members"]:
59 | if member == sa:
60 | success = False
61 | result(
62 | "SA doesn't have Owner role",
63 | details="Remove service account from having Owner role",
64 | success=success,
65 | )
66 |
67 |
68 | def check_roles(service, project):
69 | sa = f"serviceAccount:{get_sa(service)}"
70 | crm = build("cloudresourcemanager", "v1")
71 | iam = crm.projects().getIamPolicy(resource=f"{project}").execute()
72 |
73 | required_roles = ["roles/run.admin", "roles/cloudsql.client"]
74 |
75 | member_roles = [b["role"] for b in iam["bindings"] if sa in b["members"]]
76 |
77 | for role in required_roles:
78 | result(
79 | f"SA has {role}",
80 | details=f"Ensure SA has {role}",
81 | success=(role in member_roles),
82 | )
83 |
84 |
85 | def cleanhtml(raw_html):
86 | soup = bs4(raw_html, "html.parser")
87 | for tag in soup():
88 | for attribute in ["class", "id", "name", "style"]:
89 | del tag[attribute]
90 |
91 | return "Page preview: " + " ".join(soup.find_all(text=True)).replace(
92 | " ", ""
93 | ).replace("\n", " ")
94 |
95 |
96 | def check_envvars(project, service):
97 | envvars = service["spec"]["template"]["spec"]["containers"][0]["env"]
98 | current_host = [x["value"] for x in envvars if x["name"] == "CURRENT_HOST"]
99 |
100 | if not current_host:
101 | result(
102 | "CURRENT_HOST envvar not found",
103 | details="Check the service environment variables.",
104 | success=False,
105 | )
106 | else:
107 | host = current_host[0]
108 | service_host = service["status"]["url"]
109 | if host == service_host:
110 | result(f"CURRENT_HOST set to service URL ({host}).")
111 | else:
112 | result(f"CURRENT_HOST ({host}) and service URL ({service_host}) don't match.", success=False)
113 |
114 | def check_unicodex(project, service):
115 | header("Deployed service checks")
116 |
117 | fixture_code = "1F44B"
118 | fixture_slug = f"/u/{fixture_code}"
119 | login_slug = "/admin/login/?next=/admin/"
120 | model_admin_slug = "/admin/unicodex/codepoint/"
121 |
122 | if "url" not in service["status"].keys():
123 | message = service["status"]["conditions"][0]["message"]
124 | result(f"Service does not have a deployed URL: {message}", success=False)
125 | else:
126 | url = service["status"]["url"]
127 | echo(f"Service deployment URL: {url}")
128 |
129 | try:
130 | response = httpx.get(url, timeout=30)
131 |
132 | except httpx.ReadTimeout as e:
133 | result(e, success=False)
134 | return
135 |
136 | print(cleanhtml(response.text))
137 | if response.status_code == 200:
138 | result("Index page loaded successfully")
139 | else:
140 | result(
141 | f"Index page returns an error: {response.status_code}", success=False
142 | )
143 |
144 | if "Unicodex" in response.text:
145 | result("Index page contains 'Unicodex'")
146 | else:
147 | result("Index page does not contain the string 'Unicodex'", success=False)
148 |
149 | fixture = httpx.get(url + fixture_slug)
150 | print(cleanhtml(fixture.text))
151 |
152 | admin = httpx.get(url + login_slug)
153 | if not admin.is_error:
154 | result(f"Django admin returns status okay ({admin.status_code})")
155 | else:
156 | result(f"Django admin returns an error: {admin.status_code}", success=False)
157 |
158 | if "Log in" in admin.text and "Django administration" in admin.text:
159 | result("Django admin login screen successfully loaded")
160 | else:
161 | result("Django admin login not found", success=False, details=admin.text)
162 |
163 | headers = {"Referer": url}
164 | with httpx.Client(headers=headers, follow_redirects=True, timeout=30) as client:
165 |
166 | # Login
167 | admin_username = get_secret(project, "SUPERUSER")
168 | admin_password = get_secret(project, "SUPERPASS")
169 |
170 | header("Test Django Admin")
171 | client.get(url + login_slug)
172 | response = client.post(
173 | url + login_slug,
174 | data={
175 | "username": admin_username,
176 | "password": admin_password,
177 | "csrfmiddlewaretoken": client.cookies["csrftoken"],
178 | },
179 | )
180 | assert not response.is_error
181 | assert "Site administration" in response.text
182 | assert "Codepoints" in response.text
183 | result(f"Django Admin logged in")
184 |
185 | # Try admin action
186 | response = client.post(
187 | url + model_admin_slug,
188 | data={
189 | "action": "generate_designs",
190 | "_selected_action": 1,
191 | "csrfmiddlewaretoken": client.cookies["csrftoken"],
192 | },
193 | )
194 | assert not response.is_error
195 | assert "Imported vendor versions" in response.text
196 | result(f"Django Admin action completed")
197 |
198 | # check updated feature
199 | response = client.get(url + f"/u/{fixture_code}")
200 | assert fixture_code in response.text
201 | assert "Android" in response.text
202 | result(f"Django Admin action verified")
203 |
204 | print(cleanhtml(response.text))
205 |
206 |
207 | def get_secret(project, secret_name):
208 | sm = sml.SecretManagerServiceClient() # using static library
209 | secret_path = f"projects/{project}/secrets/{secret_name}/versions/latest"
210 | try:
211 | payload = sm.access_secret_version(name=secret_path).payload.data.decode(
212 | "UTF-8"
213 | )
214 | return payload
215 | except exceptions.PermissionDenied as e:
216 | result(f"Secret error: {e}", success=False)
217 | return ""
218 |
219 |
220 | def parse_secrets(values):
221 | secrets = {}
222 | secrets["dburl"] = urlparse(values["DATABASE_URL"])
223 | secrets["dbuser"] = secrets["dburl"].netloc.split(":")[0]
224 | secrets["dbinstance"], secrets["dbname"] = secrets["dburl"].path.split("/")[3:]
225 | secrets["media_bucket"] = values["GS_BUCKET_NAME"]
226 | return secrets
227 |
228 |
229 | def check_secrets(values):
230 | header("Settings checks")
231 | for key in ["DATABASE_URL", "GS_BUCKET_NAME", "SECRET_KEY"]:
232 | result(f"{key} is defined", success=(key in values.keys()))
233 |
234 |
235 | def check_bucket(media_bucket):
236 | header("Object storage checks")
237 | sapi = build("storage", "v1")
238 | try:
239 | bucket = sapi.buckets().get(bucket=media_bucket).execute()
240 | result(f"Storage bucket {bucket['name']} exists in {bucket['location']}")
241 | except googleapiclient.errors.HttpError as e:
242 | result(f"Storage bucket error {e}", success=False)
243 | # TODO check bucket permissions.
244 |
245 |
246 | def check_database(project, service, secrets):
247 |
248 | header("Database checks")
249 | database_name = service["spec"]["template"]["metadata"]["annotations"][
250 | "run.googleapis.com/cloudsql-instances"
251 | ]
252 | echo(f"Associated database: {database_name}")
253 | _, dbregion, dbinstance = database_name.split(":")
254 |
255 | result(
256 | f"Associated database instance matches secret connection URL instance",
257 | success=(secrets["dbinstance"] == database_name),
258 | )
259 |
260 | dbapi = build("sqladmin", "v1beta4")
261 | instance = dbapi.instances().get(project=project, instance=dbinstance).execute()
262 | result(
263 | f"Instance exists: {instance['name']}, running {instance['databaseVersion']}"
264 | )
265 |
266 | database = (
267 | dbapi.databases()
268 | .get(project=project, instance=dbinstance, database=secrets["dbname"])
269 | .execute()
270 | )
271 | result(f"Database exists: {database['name']}, collation {database['collation']}")
272 |
273 | users = dbapi.users().list(project=project, instance=dbinstance).execute()
274 | result(
275 | f"User exists: {secrets['dbuser']}",
276 | details=users["items"],
277 | success=(secrets["dbuser"] in [user["name"] for user in users["items"]]),
278 | )
279 |
280 |
281 | def check_deploy(project, service_name, region, secret_name):
282 | click.secho(f"🛠 Checking {service_name} in {region} in {project}", bold=True)
283 |
284 | service = get_service(project, service_name, region)
285 |
286 | check_run(service)
287 | check_bindings(service, project)
288 |
289 | check_roles(service, project)
290 | check_envvars(project, service)
291 |
292 | check_unicodex(project, service)
293 |
294 | secret_env = get_secret(project, secret_name)
295 |
296 | if secret_env:
297 | # https://github.com/theskumar/python-dotenv#in-memory-filelikes
298 | values = dotenv_values(stream=StringIO(secret_env))
299 | check_secrets(values)
300 | secrets = parse_secrets(values)
301 |
302 | check_bucket(secrets["media_bucket"])
303 |
304 | check_database(project, service, secrets)
305 |
306 |
307 |
308 | summary()
309 |
310 |
311 | def gcloud(call):
312 | """
313 | WARNING: should only be used when no Python API exists.
314 | Calls out to gcloud, and returns a dict of the json result.
315 |
316 | Sample invocation:
317 | service = gcloud(f"run services describe {service}")
318 | sa = service["spec"]["template"]["spec"]["serviceAccountName"]
319 | """
320 | params = ["gcloud"] + call.split(" ") + ["--format", "json"]
321 | resp = subprocess.run(params, capture_output=True)
322 | if resp.returncode != 0:
323 | error(f"gcloud {call} returned {resp.returncode}", details=resp.stderr)
324 | return json.loads(resp.stdout)
325 |
--------------------------------------------------------------------------------
/.util/helper:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import datetime
3 | import json
4 | import os
5 |
6 | import click
7 |
8 | try:
9 | import git
10 |
11 | has_git = True
12 | except ImportError:
13 | has_git = False
14 |
15 | import deployment_checks
16 | from markdown import get_all_codeblocks
17 |
18 |
19 | @click.group()
20 | def cli():
21 | pass
22 |
23 |
24 | @click.command()
25 | @click.argument("path", default="docs/")
26 | @click.option("--project", default="YOUR_PROJECT_ID", help="Replacement Project ID")
27 | @click.option("--instance", default="psql", help="Replacement SQL Instance name")
28 | @click.option("--region", default="us-central1", help="Replacement Google Cloud region")
29 | def gen(path, project, instance, region):
30 | """Generate a script to deploy unicodex"""
31 | if has_git:
32 | try:
33 | repo = git.Repo(search_parent_directories=True)
34 | sha = repo.head.object.hexsha
35 | branch = repo.active_branch.name
36 | gitinfo = f"# git: {branch}, sha {sha}"
37 | except git.InvalidGitRepositoryError as e:
38 | gitinfo = "# static files (not in an active git directory)"
39 | else:
40 | gitinfo = "# no local install of git found"
41 |
42 | r = [
43 | "#!/bin/bash",
44 | "shopt -s expand_aliases",
45 | "",
46 | f"# Generated from {path} on {str(datetime.datetime.now())}",
47 | gitinfo,
48 | "# execute with: bash -ex script.sh for echo output",
49 | "",
50 | ]
51 | r.extend(get_all_codeblocks(path))
52 |
53 | script = "\n".join(r)
54 | script = script.replace("YourProjectID", project)
55 | script = script.replace("YourInstanceName", instance)
56 | script = script.replace("us-central1", region)
57 | print(script)
58 |
59 |
60 | @click.command()
61 | @click.argument("path", default="docs/", type=click.Path(exists=True))
62 | def check_env(path):
63 | """Check the locally defined environment variables"""
64 | envvars = {}
65 | code = "\n".join(get_all_codeblocks(path)).split("\n")
66 | for line in code:
67 | if "export" in line:
68 | var = line.split(" ")[1].split("=")[0]
69 | envvars[var] = os.environ.get(var, "None")
70 | print(json.dumps(envvars, indent=4))
71 |
72 |
73 | @click.command()
74 | @click.argument("project", envvar="PROJECT_ID")
75 | @click.option("--service", default="unicodex", help="The service name")
76 | @click.option(
77 | "--region", default="us-central1", help="The region the service is deployed in"
78 | )
79 | @click.option(
80 | "--secret",
81 | default="django_settings",
82 | help="The name of the secret containing the django env",
83 | )
84 | def check_deploy(project, service, region, secret):
85 | """Check a unicodex deployment"""
86 | deployment_checks.check_deploy(project, service, region, secret)
87 |
88 |
89 | cli.add_command(gen)
90 | cli.add_command(check_env)
91 | cli.add_command(check_deploy)
92 |
93 | if __name__ == "__main__":
94 | cli()
95 |
--------------------------------------------------------------------------------
/.util/markdown.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import re
3 | from pathlib import Path
4 |
5 | """
6 | For a bunch of markdown, return all the triple-backtick blocks
7 | """
8 |
9 |
10 | def extract(f, filter=None):
11 | code_blocks = []
12 | while True:
13 | line = f.readline()
14 | if not line:
15 | # EOF
16 | break
17 |
18 | out = re.match("[^`]*```(.*)$", line)
19 | if out:
20 | if filter and filter.strip() != out.group(1).strip():
21 | continue
22 | code_block = [f.readline()]
23 | while re.search("```", code_block[-1]) is None:
24 | code_block.append(f.readline())
25 | code_blocks.append("".join(code_block[:-1]))
26 | return code_blocks
27 |
28 |
29 | """
30 | For a glob of files, send them all off for processing in a logical order
31 | """
32 |
33 |
34 | def get_all_codeblocks(path):
35 | r = []
36 | targets = sorted(Path(path).glob("*.md"))
37 | for x in targets:
38 | r.append(f"# {x.name}")
39 | with open(x) as f:
40 | data = extract(f, "shell")
41 |
42 | r.append("\n".join(data))
43 | return r
44 |
--------------------------------------------------------------------------------
/.util/requirements.txt:
--------------------------------------------------------------------------------
1 | gitpython
2 | python-dotenv
3 | httpx
4 | click
5 | google-api-python-client
6 | google-cloud-secret-manager
7 | pytest
8 | requests
9 | beautifulsoup4
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # How to Contribute
2 |
3 | We'd love to accept your patches and contributions to this project. There are
4 | just a few small guidelines you need to follow.
5 |
6 | ## Contributor License Agreement
7 |
8 | Contributions to this project must be accompanied by a Contributor License
9 | Agreement. You (or your employer) retain the copyright to your contribution;
10 | this simply gives us permission to use and redistribute your contributions as
11 | part of the project. Head over to to see
12 | your current agreements on file or to sign a new one.
13 |
14 | You generally only need to submit a CLA once, so if you've already submitted one
15 | (even if it was for a different project), you probably don't need to do it
16 | again.
17 |
18 | ## Code reviews
19 |
20 | All submissions, including submissions by project members, require review. We
21 | use GitHub pull requests for this purpose. Consult
22 | [GitHub Help](https://help.github.com/articles/about-pull-requests/) for more
23 | information on using pull requests.
24 |
25 | ## Community Guidelines
26 |
27 | This project follows
28 | [Google's Open Source Community Guidelines](https://opensource.google.com/conduct/).
29 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.8-slim-buster
2 |
3 | ENV APP_HOME /app
4 | ENV PORT 8080
5 | ENV PYTHONUNBUFFERED 1
6 |
7 | WORKDIR $APP_HOME
8 | COPY requirements.txt .
9 |
10 | RUN pip install --no-cache-dir -r requirements.txt
11 |
12 | COPY . .
13 |
14 | CMD gunicorn --bind :$PORT --workers 1 --threads 8 --preload unicodex.wsgi:application
15 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
179 | APPENDIX: How to apply the Apache License to your work.
180 |
181 | To apply the Apache License to your work, attach the following
182 | boilerplate notice, with the fields enclosed by brackets "[]"
183 | replaced with your own identifying information. (Don't include
184 | the brackets!) The text should be enclosed in the appropriate
185 | comment syntax for the file format. We also recommend that a
186 | file or class name and description of purpose be included on the
187 | same "printed page" as the copyright notice for easier
188 | identification within third-party archives.
189 |
190 | Copyright 2019 The unicodex Authors
191 |
192 | Licensed under the Apache License, Version 2.0 (the "License");
193 | you may not use this file except in compliance with the License.
194 | You may obtain a copy of the License at
195 |
196 | http://www.apache.org/licenses/LICENSE-2.0
197 |
198 | Unless required by applicable law or agreed to in writing, software
199 | distributed under the License is distributed on an "AS IS" BASIS,
200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201 | See the License for the specific language governing permissions and
202 | limitations under the License.
203 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # ✨ [unicodex](https://unicodex.gl.asnt.app/) ✨
2 |
3 | Unicodex is a demo database-backed serverless Django application, that uses:
4 |
5 | * [Django](https://djangoproject.com/) as the web framework,
6 | * [Google Cloud Run](https://cloud.google.com/run/) as the hosting platform,
7 | * [Google Cloud SQL](https://cloud.google.com/sql/) as the managed database (via [django-environ](https://django-environ.readthedocs.io/en/latest/)),
8 | * [Google Cloud Storage](https://cloud.google.com/storage/) as the media storage platform (via [django-storages](https://django-storages.readthedocs.io/en/latest/)),
9 | * [Google Cloud Build](https://cloud.google.com/cloud-build/) for build and deployment automation, and
10 | * [Google Secret Manager](https://cloud.google.com/secret-manager/) for managing encrypted values.
11 |
12 | ## Deployment
13 |
14 | This demo can be deployed by multiple different methods: via the Cloud Run button, through Terraform, or manually via a guided tutorial.
15 |
16 | ### Automated
17 |
18 | [](https://deploy.cloud.run)
19 |
20 | See `app.json` and the `.gcloud/` folder for implementation details.
21 |
22 | ### Terraform
23 |
24 | * Install [Terraform](https://learn.hashicorp.com/terraform/getting-started/install.html) and setup [authentication](docs/80-automation.md#install-terraform-and-setup-authentication)
25 | * Use Terraform to [provision infrastructure](docs/80-automation.md#provision-infrastructure)
26 | * Use Cloud Build to perform the initial [database migration](docs/80-automation.md#migrate-database)
27 |
28 | See `terraform/` for configuration details.
29 |
30 | ### Manual
31 |
32 | * [Try the application locally](docs/00-test-local.md) (*optional*)
33 | * Setup your [Google Cloud environment](docs/10-setup-gcp.md), then provision backing services:
34 | * a [Cloud SQL Instance](docs/20-setup-sql.md),
35 | * a [Cloud Storage Bucket](docs/30-setup-bucket.md), and
36 | * some [Secrets](docs/40-setup-secrets.md), then
37 | * Run your [first deployment](docs/50-first-deployment.md)
38 | * Automate [ongoing deployments](docs/60-ongoing-deployments.md) (*optional*)
39 |
40 | Don't forget to [cleanup your project resources](docs/90-cleanup.md) when you're done!
41 |
42 | ## Live deployments
43 |
44 | There are no current live deployments of this demo.
45 |
46 | ## Application Design
47 |
48 | ### Unicodex itself
49 |
50 | [Emojipedia](https://emojipedia.org/) curates information about emoji and how they are represented on different platforms. E.g. the [Sparkles emoji](https://emojipedia.org/sparkles/) (✨) is mostly represented by three golden stars in a cluster, but this has changed over the years (click the sparkle image marked "Google" and you'll see how Sparkles has appeared in every version of Android over the years. It used to look *very* different!)
51 |
52 | In Unicodex, these relations are represented by a **codepoint** (Sparkles) having multiple **designs** (images). Each image represents a **version** from a **vendor** (e.g. Google Android 9.0, Twitter Twemoji 1.0, ...). These relations are represented by four models: `Codepoint`, `Design`, `VendorVersion` and `Vendor`, respectively. Designs have a FileField which stores the image.
53 |
54 | In the Django admin, an admin action has been setup so that you can select a Codepoint, and run the "Generate designs" actions. This will -- for all configured vendors and vendor versions -- scrape Emojipedia for the information, including uploading images. Alternatively, you can enter this information manually from the django admin.
55 |
56 | ### Service design - one deployment per Google Cloud project
57 |
58 | Unicodex runs as a Cloud Run service. Using the Python package `django-storages`, it's been configured to take a `GS_BUCKET_NAME` as a storage place for its media. Using the Python package `django-environ` it takes a complex `DATABASE_URL`, which will point to a Cloud SQL PostgreSQL database. The `settings.py` is also designed to pull a specific secret into the environment. These are all designed to live in the same Google Cloud Project.
59 |
60 | In this way, Unicodex runs 1:1:1 -- one Cloud Run Service, one Cloud SQL Database, one Google Storage bucket. It also assumes that there is *only* one service/database/bucket.
61 |
62 | ### Other service designs
63 |
64 | It is possible to host multiple instances of Unicodex on the one project (where the service name, bucket name, and database name, and django database username have different appended 'slugs', and all share one instance), but this configuration is out of scope for this project.
65 |
66 | You can host multiple versions of Unicodex using project isolation (one Google Cloud account can have multiple projects) without any code editing, but this may not work for your own project. [Read more about project organisation considerations](https://cloud.google.com/docs/enterprise/best-practices-for-enterprise-organizations#project-structure)
67 |
68 | ## Demo instances
69 |
70 | This repo was configured to automatically [build, migrate, and deploy](.cloudbuild/build-migrate-deploy.yaml) unicodex via Cloud Build in different demo projects.
71 |
72 | The different deployment methods (Terraform, Cloud Run Button, and manual) were tested weekly by creating [ephemeral projects](/experimental), deploying the application, testing it, then deleting the project. If the tests or deployment fails, the deletion doesn't happen and the issue can be investigated.
73 |
74 | Feel free to use these implementations as a basis for your own processes, as with all other code in this repo.
75 |
76 | ## Contributions
77 |
78 | Please see the [contributing guidelines](CONTRIBUTING.md)
79 |
80 | ## License
81 |
82 | This library is licensed under Apache 2.0. Full license text is available in [LICENSE](LICENSE).
83 |
84 |
--------------------------------------------------------------------------------
/app.json:
--------------------------------------------------------------------------------
1 | { "name": "unicodex",
2 | "hooks": {
3 | "prebuild": { "commands": [".gcloud/prebuild.sh"] },
4 | "postbuild": { "commands": [".gcloud/postbuild.sh"] },
5 | "precreate": { "commands": [".gcloud/precreate.sh"] },
6 | "postcreate": { "commands": [".gcloud/postcreate.sh"] }
7 | }
8 | }
9 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3'
2 |
3 | services:
4 | db:
5 | image: postgres:alpine
6 | environment:
7 | POSTGRES_PASSWORD: mysecretpassword
8 | POSTGRES_DB: unicodex
9 | volumes:
10 | - dev_data:/var/lib/postgresql/data
11 | web:
12 | build: .
13 | command: python manage.py runserver 0.0.0.0:8080
14 | volumes:
15 | - .:/app
16 | ports:
17 | - "8080:8080"
18 | depends_on:
19 | - db
20 |
21 | volumes:
22 | dev_data:
23 |
--------------------------------------------------------------------------------
/docs/00-test-local.md:
--------------------------------------------------------------------------------
1 |
2 | # Try the application locally
3 |
4 | *Before you get to deploying this application on Google Cloud, you can test the application locally with Docker and Docker Compose.*
5 |
6 | *In this section, we'll build the application on our local machine, and using the provided configuration file, we'll deploy the app locally.*
7 |
8 | ---
9 |
10 | You will need to install:
11 |
12 | * Docker Desktop
13 | * for Windows or macOS: use [Docker Desktop](https://www.docker.com/products/docker-desktop)
14 | * for Linux: use [Docker CE](https://docs.docker.com/install/) ([Ubuntu](https://docs.docker.com/install/linux/docker-ce/ubuntu/), [Debian](https://docs.docker.com/install/linux/docker-ce/debian/), [CentOS](https://docs.docker.com/install/linux/docker-ce/centos/), and [Fedora](https://docs.docker.com/install/linux/docker-ce/fedora/) have dedicated instructions)
15 | * [Docker Compose](https://docs.docker.com/compose/install/#install-compose)
16 |
17 | This local deployment will use the same image as our production deployment will, but will make use of the included `docker-compose.yml` to connect together the components.
18 |
19 | ## Get a local copy of the code
20 |
21 | If you are familiar with `git`:
22 |
23 | ```
24 | git clone git@github.com:GoogleCloudPlatform/django-demo-app-unicodex
25 | cd django-demo-app-unicodex
26 | ```
27 |
28 | Otherwise, you can download and extract the latest [release](https://github.com/GoogleCloudPlatform/django-demo-app-unicodex/releases).
29 |
30 |
31 | ## Build the image
32 |
33 | Before we can use our image, we have to build it. The database image will be pulled down later, so we just need to manually build our web image:
34 |
35 | ```
36 | docker-compose build
37 | ```
38 |
39 | ## Configure the local environment
40 |
41 | Because you'll be running the image locally, you won't want to access Google Cloud services. The Django settings pull configurations
42 | from a service called Secret Manager if a `.env` file doesn't exist locally.
43 |
44 | To bypass this, you will need to create a `.env` file populated with some default values. Use the `.env-local` as a base:
45 |
46 | ```
47 | cp .env-local .env
48 | ```
49 |
50 | This file uses configurations that match the expected values in `docker-compose.yml` for the database connection string, which is the most essential part of this setup.
51 |
52 | ## Initialise the database
53 |
54 | At the moment the database is empty. We can use standard Django commands to run our database migrations, and add some default data; these instructions need to be run the context of our web image:
55 |
56 | ```
57 | docker-compose run --rm web python manage.py migrate
58 | docker-compose run --rm web python manage.py loaddata sampledata
59 | ```
60 |
61 | **Tip**: `docker-compose run --rm` is quite long. You could create an alias for this command in your `.bashrc`. For example: `alias drc=docker-compose run --rm`. Adjust for your choice of terminal.
62 |
63 | ## Start the services
64 |
65 | Now we have a database with data, and a build web image, we can start them:
66 |
67 | ```
68 | docker-compose up
69 | ```
70 |
71 | You can now see unicodex running in your browser at [http://localhost:8080/](http://localhost:8080/)
72 |
73 |
74 | ## Testing your installation
75 |
76 | If you've loaded the sample data correctly, you'll have a display that shows the Waving emoji.
77 |
78 | [Clicking on the emoji][hand] shows the designs for that emoji. Of which, currently, there are none. That's okay, we'll add some.
79 |
80 | Go to the [django admin](http://localhost:8080/admin) and login with the username and password from `docker-compose.yaml`. From there, click on the ["Codepoint" model](http://localhost:8080/admin/unicodex/codepoint/). You should see one listing, `1F44B`. Now, select that listing by clicking on the checkbox on the far left side, and in the Action dropdown, select 'Generate designs for available vendor versions'.
81 |
82 | Your `docker-compose` window will show a lot of output. What this [admin action](https://docs.djangoproject.com/en/2.2/ref/contrib/admin/actions/) is doing is getting the Emojipedia page for ["waving hand sign"](https://emojipedia.org/waving-hand-sign/), and cross-referencing all the vendors it knows about; downloading and creating the Design objects as it goes.
83 |
84 | Reload the [waves page][hand], and there will now be some entries!
85 |
86 | [hand]: http://localhost:8080/u/1F44B
87 |
88 | ---
89 |
90 | Now that we have an understanding of our application, let's get it on the cloud.
91 |
92 | ---
93 |
94 | Next step: [Setup Google Cloud Platform environment](10-setup-gcp.md)
95 |
96 |
97 |
--------------------------------------------------------------------------------
/docs/10-setup-gcp.md:
--------------------------------------------------------------------------------
1 | # Setup Google Cloud Platform environment
2 |
3 | *The steps listed below are common to doing anything on Google Cloud. If you run into any issues, you can find many step-by-step tutorials on the topic.*
4 |
5 | ---
6 |
7 | In order to deploy on Google Cloud, you need to sign up (if you haven't already).
8 |
9 | To sign up, go to [cloud.google.com](https://cloud.google.com/) and click "Get started".
10 |
11 | Once you have signed up, you need to create a new project.
12 |
13 | Notes:
14 |
15 | * Project names are *globally unique* -- no one else can have the same project name as you.
16 | * Project names cannot be changed after they have been created.
17 | * We're going to be referring to this name as `PROJECT_ID`. A lot.
18 |
19 | You will also need to setup this project against a Billing Account. Some of the components we will provision will cost money, but new customers do get [free credits](https://cloud.google.com/free)
20 |
21 | ---
22 |
23 | ### 🤔 Think about how long you want to keep this demo
24 |
25 | If you happen to already have a Google Cloud Platform account, create a new project for this tutorial demo. Don't use an existing project. That way, [cleanup](90-cleanup.md) will be much easier.
26 |
27 | ---
28 |
29 | We're also going to be using the command line utility (CLI) for Google Cloud, `gcloud`, wherever possible.
30 |
31 | Go to the [`gcloud` install website](https://cloud.google.com/sdk/docs/#install_the_latest_cloud_tools_version_cloudsdk_current_version) download a version for your operating system. You'll then be guided to install the tool and then initialise it (which handles logging into Google Cloud, so that `gcloud` can perform operations as you.)
32 |
33 | To test your `gcloud` CLI works and is up to date:
34 |
35 | ```shell,exclude
36 | gcloud --version
37 | ```
38 |
39 | If you see a "Updates are available" prompt, follow those update instructions.
40 |
41 | ---
42 |
43 | Next, we need to set our project ID in both the command-line and as an environment variable.
44 |
45 |
46 | Setting this as an environment variable will mean when you copy and paste code from this tutorial, it will Just Work(tm). Note that this variable will only be set for your current terminal. Run it again if you open a new terminal window.
47 |
48 | ```shell
49 | export PROJECT_ID=YourProjectID
50 | gcloud config set project $PROJECT_ID
51 | ```
52 |
53 | You can check your current project settings by running:
54 |
55 | ```shell,exclude
56 | gcloud config list
57 | ```
58 |
59 | When we get to the Cloud Run sections, we'll be using the fully managed version of Cloud Run. To prevent us from having to define platform setting that each time (`--platform managed`), we can set the default:
60 |
61 | ```shell
62 | gcloud config set run/platform managed
63 | ```
64 |
65 | We'll also want to default to the `us-central1` region, which we can also tell `gcloud` about.
66 |
67 | ```shell
68 | export REGION=us-central1
69 | gcloud config set run/region $REGION
70 | ```
71 |
72 | ---
73 |
74 | Finally, we will be using a number of Google Cloud services in this tutorial. We can save time by enabling them ahead of time:
75 |
76 | ```shell
77 | gcloud services enable \
78 | run.googleapis.com \
79 | iam.googleapis.com \
80 | compute.googleapis.com \
81 | sql-component.googleapis.com \
82 | sqladmin.googleapis.com \
83 | cloudbuild.googleapis.com \
84 | cloudkms.googleapis.com \
85 | cloudresourcemanager.googleapis.com \
86 | secretmanager.googleapis.com
87 | ```
88 |
89 | This operation may take a few minutes to complete.
90 |
91 | ---
92 |
93 | While we're here making all these project configurations, we'll also take the time to setup our Service Account.
94 |
95 | ℹ️ While in theory this is not required, it good practice to be explicit when setting up complex systems like this to have named service accounts. This ensures that we know what's going on at a glance, and prevents us having to use the 'default service accounts' that might have more permissions than we want.
96 |
97 | Since this is the Unicodex project, we'll create a service account called unicodex.
98 |
99 | ```shell
100 | export SERVICE_NAME=unicodex
101 |
102 | gcloud iam service-accounts create $SERVICE_NAME \
103 | --display-name "$SERVICE_NAME service account"
104 | ```
105 |
106 |
107 | Now that this account exists, we'll be referring to it later by it's email. We can take the time to store that now:
108 |
109 | ```shell
110 | export CLOUDRUN_SA=${SERVICE_NAME}@${PROJECT_ID}.iam.gserviceaccount.com
111 | ```
112 |
113 | (In this case "SA" refers to "Service Account".)
114 |
115 | We'll also need to tell this account what it's allowed to access. It needs to be allowed to [connect to our database](https://cloud.google.com/sql/docs/postgres/connect-run#configuring), and be our Cloud Run administrator:
116 |
117 | ```shell
118 | for role in cloudsql.client run.admin; do
119 | gcloud projects add-iam-policy-binding $PROJECT_ID \
120 | --member serviceAccount:$CLOUDRUN_SA \
121 | --role roles/${role}
122 | done
123 | ```
124 |
125 | We wouldn't normally have to be this explicit, but since we created a new service account from scratch, we need to give it a few specific roles.
126 |
127 | ---
128 |
129 | ℹ️ A note on permissions:
130 |
131 | This guided deployment makes liberal use of `gcloud`. When you setup `gcloud`, you configured it to run as you. That is, youremail@yourdomain.com. You are, by default, a member of the ["Owner" role](https://cloud.google.com/iam/docs/understanding-roles). Given this, you have permission to do *anything*, so you will have permission to do anything in `gcloud` for your project.
132 |
133 | But when we start getting into the [automation parts](50-first-deployment.md), 'we' won't be running these actions, our automation will. We really don't want our automation to be able to do just anything, so we're restricting it to just what it needs and nothing more.
134 |
135 | How do you reduce down permissions? You can work through your automation steps, and work out which exact actions you are invoking, and if there are any default [roles](https://console.cloud.google.com/iam-admin/roles) that match your requirements. In our instance, we could create a custom role with specific permissions, like limiting to `run.services.get` and `run.services.update` instead of allowing `run.services.create` like the Cloud Run Admin role. Starting with a service account with no permissions and slowly working through the PermissionDenied errors will slowly build up the minimum permissions required.
136 |
137 | If you are after ways in which to limit access across service accounts and IAM bindings in your own project, keep this method in mind.
138 |
139 | ---
140 |
141 | Now we've setup our environment, it's time to setup some services. First up: databases!
142 |
143 | ---
144 |
145 | Next step: [Create a Cloud SQL Instance](20-setup-sql.md)
146 |
--------------------------------------------------------------------------------
/docs/20-setup-sql.md:
--------------------------------------------------------------------------------
1 | # Create a Cloud SQL Instance
2 |
3 | *In this section we will be creating a database instance, database, and user, and come away with our `DATABASE_URL`*
4 |
5 | To store our application data, we'll need to setup a [Cloud SQL](https://console.cloud.google.com/sql/instances) instance, and a specific database with a database user.
6 |
7 | ---
8 |
9 | **This section is one of the more complex parts of this tutorial.** But it's worth going to the effort.
10 |
11 | ---
12 |
13 | It's a *really very good idea* to setup our database in a way that's going to be secure. You *could* run just the basic `gcloud sql` commands here to create an instance and user, but using these commands gives the user [too many permissions](https://cloud.google.com/sql/docs/postgres/users#default-users).
14 |
15 | The Cloud SQL API is designed to give the same functionality to multiple different database implementations: (for the most part) the same commands will create databases and users in Postgres, MySQL, or infact MSSQL instances. Since these databases are so different, there's no(t yet an) implementation for explicitly setting Postgres roles, so we have no option to set this in the API (which is used by both `gcloud` and the web Cloud Console.)
16 |
17 | This is why we're taking the time to set things up explicitly. We'll create our instance and database, then take the time to create a low-access user that Django will use to login to the database.
18 |
19 | ---
20 |
21 | ### A note on generating passwords
22 |
23 | There are a number of secret strings for passwords and such that we will be generating in this tutorial. Ensuring a unique and complex password is paramount for security.
24 |
25 | For many of our examples, we'll be using this sequence to generate a random string:
26 |
27 | ```shell,exlucde
28 | cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 64 | head -n 1
29 | ```
30 |
31 | Breaking this command down, it uses input from [urandom](https://www.2uo.de/myths-about-urandom/), deletes any non letter or number values, gives us this value in 64-character length chunks, and gives us the first line in that output. As a mess of characters that is probably never going to ever return us the same value, this is good enough for our purposes (and many other purposes. The discussion about true randomness and the entire field of cryptography is outside the scope of this tutorial.)
32 |
33 | There are other ways you can generate random strings, for example with pure python:
34 |
35 | ```shell,exclude
36 | python -c "import secrets; print(secrets.token_urlsafe(50))"
37 | ```
38 |
39 | This is a [Python standard library method](https://docs.python.org/3/library/secrets.html#secrets.token_urlsafe) that will generate a 50 byte string for us, or around ~65 characters. Plenty long enough for a password.
40 |
41 | For our purposes, we'll stick to the `/dev/urandom` method.
42 |
43 | ---
44 |
45 | ### Database Instance
46 |
47 | The database instance creation process has many configuration options, as detailed by the [Create a Cloud SQL instance](https://cloud.google.com/sql/docs/postgres/quickstart#create-instance) section of the "Quickstart for Cloud SQL for PostgreSQL" tutorial.
48 |
49 | Some important notes:
50 |
51 | * The default configurations may work for you, but be sure to check if there's anything you want to change.
52 | * For instance, we've chosen a non-minimum instance size. [Learn more about Instance Settings](https://cloud.google.com/sql/docs/postgres/instance-settings).
53 | * Make sure you make note of the "Default User Password". We'll refer to this as `ROOT_PASSWORD`.
54 | * The instance creation will take **several minutes**. Do not worry.
55 |
56 | A sample version of what you'd end up running, if you chose the defaults (the latest Postgres version with the smallest instance size), and generating a random password, would be the following:
57 |
58 | ```shell
59 | export INSTANCE_NAME=YourInstanceName
60 | export ROOT_PASSWORD=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 64 | head -n 1)
61 |
62 | gcloud sql instances create $INSTANCE_NAME \
63 | --database-version POSTGRES_13 --cpu 2 --memory 4GB \
64 | --region $REGION \
65 | --project $PROJECT_ID \
66 | --root-password $ROOT_PASSWORD
67 | ```
68 |
69 | We can confirm we've correctly setup this database for our project by checking for it in `gcloud`:
70 |
71 | ```shell,exclude
72 | gcloud sql instances list
73 | ```
74 |
75 | Google Cloud often refers to "fully-qualified" identifier to specify our database instance, which is a a combination of our project ID, region, and the instance name itself. We can set this variable now to use it later on.
76 |
77 | ```shell
78 | export DATABASE_INSTANCE=$PROJECT_ID:$REGION:$INSTANCE_NAME
79 | ```
80 |
81 | This is an great time to make note about disambiguation: We will talk about databases within database instances a lot, but when we use the three-segmented version, we are talking about the Cloud SQL managed database instance.
82 |
83 | ### Our Database
84 |
85 | Our database **instance** can hold many **databases**. For our purposes, we're going to setup a `unicodex` database:
86 |
87 | ```shell
88 | export DATABASE_NAME=unicodex
89 | gcloud sql databases create $DATABASE_NAME \
90 | --instance=$INSTANCE_NAME
91 | ```
92 |
93 | ### A user that can access only our database.
94 |
95 | Finally, our user. This is where it gets complex.
96 |
97 | Since by default [users created using Cloud SQL have the privileges associated with the `cloudsqlsuperuser` role](https://cloud.google.com/sql/docs/postgres/create-manage-users#creating), we don't want our django user to have such permissions. So, we'll have to manually create our user.
98 |
99 | To do this, we can connect directly to our new instance from the command line:
100 |
101 | ```shell,exclude
102 | gcloud sql connect $INSTANCE_NAME
103 | ```
104 |
105 | There will be a bit of output here. This is generated by [Cloud SQL Proxy](https://cloud.google.com/sql/docs/postgres/sql-proxy), which `gcloud` is using behind the scenes.
106 |
107 | But, you will be prompted for the DBPASSWORD for `SQL user [postgres]`. This is the `ROOT_PASSWORD` we set earlier.
108 |
109 | Once successfully connected, you'll be dropped into a postgres console. It will look something like this:
110 |
111 | ```shell,exclude
112 | psql (11.5)
113 | Type "help" for help.
114 |
115 | unicodex=>
116 | ```
117 |
118 | From here, the commands we need to execute are: creating our user, and giving it access to only our specific database:
119 |
120 | ```sql,exclude
121 | CREATE USER "" WITH PASSWORD "";
122 | GRANT ALL PRIVILEGES ON DATABASE "" TO "";
123 | ```
124 |
125 | Some notes:
126 |
127 | * environment variables won't explicitly work here. All the terms in `""` will need to be replaced manually.
128 | * Our `django` user needs `CREATE` and `ALTER` permissions to perform database migrations. It only needs these permissions on the database we created, not any other database in our instance. Hence, we're being explicit.
129 |
130 | ----
131 |
132 | You **could** create the user using just the `gcloud` command yourself, but there are some limitations to this method:
133 |
134 | * The `gcloud` command does not handle custom roles, and your default role will be `cloudsqladmin`, which is tremendously high for a Django database user.
135 | * You will have to manually go and change the role yourself after.
136 |
137 | ```shell
138 | export DBUSERNAME=unicodex-django
139 | export DBPASSWORD=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 40 | head -n 1)
140 |
141 | gcloud sql users create $DBUSERNAME \
142 | --password $DBPASSWORD \
143 | --instance $INSTANCE_NAME
144 | ```
145 |
146 | ---
147 |
148 | ### Configuring our Database URL
149 |
150 | We now have all the elements we need to create our `DATABASE_URL`. It's a connection URL, a configuration format shared by many systems including [`django-environ`](https://django-environ.readthedocs.io/en/latest/).
151 |
152 | This string is **super secret**, so it's one of the secrets we'll be encrypting.
153 |
154 | To create the DATABASE_URL, we'll need the `DBUSERNAME` and `DBPASSWORD` for the `DATABASE_NAME` on the `INSTANCE_NAME` we created, in whatever `REGION` to form the `DATABASE_URL`:
155 |
156 | ```shell
157 | export DATABASE_URL=postgres://$DBUSERNAME:${DBPASSWORD}@//cloudsql/$PROJECT_ID:$REGION:$INSTANCE_NAME/$DATABASE_NAME
158 | ```
159 |
160 | We now have the environment variables we need! 🍪
161 |
162 | ---
163 |
164 | 🤔 Want to check your `DATABASE_URL`? You can use the "Database issues" section in the [debugging docs](zz_debugging.md)
165 |
166 | ---
167 |
168 | Next step: [Create a Cloud Storage Bucket](30-setup-bucket.md)
169 |
--------------------------------------------------------------------------------
/docs/30-setup-bucket.md:
--------------------------------------------------------------------------------
1 | # Create a Cloud Storage Bucket
2 |
3 | *In this section we will be creating a place to store our media assets.*
4 |
5 | ---
6 |
7 | We need to create a new bucket to store our media assets (django admin assets, design images, etc).
8 |
9 | Bucket names are **globally unique** across all of Google Cloud, so consider the scope of the bucket when you create it.
10 |
11 | We suggest that you name it something unique to the project, but still noting it's purpose. Here's what we suggest:
12 |
13 | ```shell
14 | export GS_BUCKET_NAME=${PROJECT_ID}-media
15 | ```
16 |
17 | Now, create our bucket using the [Google Storage utility](https://cloud.google.com/storage/docs/gsutil) `gsutil` (which was installed for you when you installed `gcloud`):
18 |
19 | ```shell
20 | gsutil mb -l ${REGION} gs://${GS_BUCKET_NAME}
21 | ```
22 |
23 | We'll also need to give our service account permission to operate on this bucket (which it needs to do Django admin action based [storage object alteration](https://cloud.google.com/storage/docs/access-control/using-iam-permissions#bucket-add):
24 |
25 | ```shell
26 | gsutil iam ch \
27 | serviceAccount:${CLOUDRUN_SA}:roles/storage.objectAdmin \
28 | gs://${GS_BUCKET_NAME}
29 | ```
30 |
31 | ---
32 |
33 | We now have somewhere to store our media! 🖼
34 |
35 | ---
36 |
37 | Next step: [Create some secrets](40-setup-secrets.md)
38 |
--------------------------------------------------------------------------------
/docs/40-setup-secrets.md:
--------------------------------------------------------------------------------
1 | # Create some Secrets
2 |
3 | *In this section, we will setup some secrets!*
4 |
5 | To encode our secrets, we'll be using [Secrets Manager](https://cloud.google.com/secret-manager/docs)
6 |
7 | ----
8 |
9 | > But why?
10 |
11 | It's a *exceedingly good idea* to ensure that only our application can access our database. To do that, we spent a whole lot of time setting up passwords. It's also a really good idea if only our application has access to these passwords.
12 |
13 | Plus, we'll be using `django-environ` later, which is directly influenced by [The Twelve Factor App](https://12factor.net/). You can read up how [Cloud Run complies with the Twelve Factor application](https://cloud.google.com/blog/products/serverless/a-dozen-reasons-why-cloud-run-complies-with-the-twelve-factor-app-methodology).
14 |
15 | This setup looks a bit long, but application security security is no joke, and this is an important part of our app to setup.
16 |
17 | ---
18 |
19 | Back in [an earlier step](docs/10-setup-gcp.md) we enabled the Secret Manager API, so we now have access to use it. So now, we can create our secrets.
20 |
21 | There are five secrets we need to create.
22 |
23 | Three are our base django secrets:
24 |
25 | * `DATABASE_URL`, with the value `DATABASE_URL`, as [mentioned earlier](20-setup-sql.md),
26 | * `SECRET_KEY`, a minimum 50 character random string for django's `SECRET_KEY`,
27 | * `GS_BUCKET_KEY`, the media bucket we [created earlier](30-setup-media.md).
28 |
29 | We'll also need an additional two for our django admin login (`/admin`):
30 |
31 | * `SUPERUSER`, a superuser name (`admin`? your name?)
32 | * `SUPERPASS`, a secret password, using our generator from earlier.
33 |
34 | Also, for each of these secrets, we need to define *who* can access them.
35 |
36 | In our case, we want only Cloud Run and Cloud Build (for [automating deployments](60-ongoing-deployment.md) later) to be able to view our secrets. In order to do that, we need to get their service account names.
37 |
38 | We know our Cloud Run service account, because we explicitly created it earlier. So we just need our Cloud Build account. It was automatically created for us when we enabled the Cloud Build API, and is identified by an email address that uses our project number (rather than the project ID we've been using so far):
39 |
40 | ```shell
41 | export PROJECTNUM=$(gcloud projects describe ${PROJECT_ID} --format 'value(projectNumber)')
42 | export CLOUDBUILD_SA=${PROJECTNUM}@cloudbuild.gserviceaccount.com
43 | ```
44 |
45 | ---
46 |
47 | We can reduce the number of the secrets that need to stored by introducing some minor complexity: django-environ accepts a `.env` file of `key=value` pairs. We can create a file of settings that Django will always require: the databse connection string, media bucket, and secret key. The admin username and password can stay seperate, and have reduced access.
48 |
49 | Create a .env file, with the values defined earlier
50 |
51 | ```shell
52 | echo DATABASE_URL=\"${DATABASE_URL}\" > .env
53 | echo GS_BUCKET_NAME=\"${GS_BUCKET_NAME}\" >> .env
54 | echo SECRET_KEY=\"$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 50 | head -n 1)\" >> .env
55 | ```
56 |
57 | Then, create the secret and assign the services access:
58 |
59 | ```shell
60 | gcloud secrets create django_settings --replication-policy automatic --data-file .env
61 |
62 | gcloud secrets add-iam-policy-binding django_settings \
63 | --member serviceAccount:$CLOUDRUN_SA \
64 | --role roles/secretmanager.secretAccessor
65 |
66 | gcloud secrets add-iam-policy-binding django_settings \
67 | --member serviceAccount:$CLOUDBUILD_SA \
68 | --role roles/secretmanager.secretAccessor
69 | ```
70 |
71 | These commands will:
72 |
73 | * create the secret, with the intial version being the secret from file, and
74 | * allow our service account to access the secret.
75 |
76 | As for the admin username and password secrets, they should only be accessed by Cloud Build:
77 |
78 | ```shell
79 | export SUPERUSER="admin"
80 | export SUPERPASS=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 30 | head -n 1)
81 |
82 | for SECRET in SUPERUSER SUPERPASS; do
83 | gcloud secrets create $SECRET --replication-policy automatic
84 |
85 | echo -n "${!SECRET}" | gcloud secrets versions add $SECRET --data-file=-
86 |
87 | gcloud secrets add-iam-policy-binding $SECRET \
88 | --member serviceAccount:$CLOUDBUILD_SA \
89 | --role roles/secretmanager.secretAccessor
90 | done
91 | ```
92 |
93 | Some of the bash tricks we're using here:
94 |
95 | * Many of the commands are very similar, so we're using `for` loops a lot.
96 | * The `${!var}` expands the value of `var`, which allows us to dynamically define variables. This works in bash, but may not work in other shells. Running all these scripts in bash is a good idea, just in case the eccentric doesn't work in your shell.
97 | * The `-n` in `echo` makes sure we don't accidentally save any trailing newline characters to our secret.
98 | * You can call `secrets create` with a `--data-file` once, or you can use `secrets create`/`secrets versions add`. When you need to update a secret, just repeat `secrets versions add`.
99 |
100 | ---
101 |
102 | If you *need* to get the **value** of these secrets, you can run the following:
103 |
104 | ```shell,exclude
105 | gcloud secrets versions access latest --secret $SECRET
106 | ```
107 |
108 | ---
109 |
110 | You now have all the secrets you need to deploy django securely! 🤫
111 |
112 | ---
113 |
114 | Next step: [First Deployment](50-first-deployment.md)
115 |
--------------------------------------------------------------------------------
/docs/50-first-deployment.md:
--------------------------------------------------------------------------------
1 | # First Deployment
2 |
3 | *In this section, we'll create our first deployment.*
4 |
5 | **We're nearly there!**
6 |
7 | ---
8 |
9 | We're up to deploying our project for the first time!
10 |
11 | This is going to be a bit complicated at first, but the work we do here will make every other deployment much simpler.
12 |
13 | You'll remember way back when we were [testing locally](00-test-local.md) we built two images: a `web` image and a `db` image. `db` will be replaced with the proper PostgreSQL database we've [already setup](20-setup-sql.md). So we just need to build the other image!
14 |
15 | For that, we'll be using Cloud Build. This is a similar process to `docker build .`, but this will build the image on Google Cloud itself, and publish the image to the Google Cloud image repository, `gcr.io`.
16 |
17 | If you haven't already done so, you'll need a copy of this source code:
18 |
19 | ```shell,exclude
20 | git clone https://github.com/GoogleCloudPlatform/django-demo-app-unicodex
21 | cd django-demo-app-unicodex
22 | ```
23 |
24 | Then, we'll build the image:
25 |
26 | ```shell
27 | gcloud builds submit --tag gcr.io/$PROJECT_ID/$SERVICE_NAME .
28 | ```
29 |
30 | Then, we can (finally!) create our Cloud Run service, telling it about the image we just created, the database we configured earlier, and the service account we set up. And just for good measure, we'll allow public access:
31 |
32 | ```shell
33 | gcloud run deploy $SERVICE_NAME \
34 | --allow-unauthenticated \
35 | --image gcr.io/$PROJECT_ID/$SERVICE_NAME \
36 | --add-cloudsql-instances $PROJECT_ID:$REGION:$INSTANCE_NAME \
37 | --service-account $CLOUDRUN_SA
38 | ```
39 |
40 | *Note:* We are using the fully-qualified database instance name here. Although not strictly required, as our database is in the same project and region, it helps with clarity.
41 |
42 | Sadly, we have a few more steps. Even though we have deployed our service, **Django won't work yet**. You could try and navigate to the `unicodex-HASH-region.a.run.app` URL, but it will show an error. This is because:
43 |
44 | * We need to tell Django where to expect our service to run, and
45 | * we need to initialise our database.
46 |
47 | #### Service URL, CSRF, and `ALLOWED_HOSTS`
48 |
49 | Django has a setting called `ALLOWED_HOSTS`, which recommended to be defined for [security purposes](https://docs.djangoproject.com/en/3.0/ref/settings/#allowed-hosts). We want our set our `ALLOWED_HOSTS` to be the service URL of the site we just deployed.
50 |
51 | New in Django 4.0, we will also want to set the [`TRUSTED_CSRF_DOMAIN` value](https://docs.djangoproject.com/en/4.0/ref/csrf/) as well. [Read more about how not setting this can break Cloud Run deployments](https://cloud.google.com/blog/topics/developers-practitioners/follow-pink-pony-story-csrf-managed-services-and-unicorns).
52 |
53 | When we deployed our service, it told us the service URL that our site can be accessed from. We can either copy the URL from the output we got from the last step, or we can get it from `gcloud`
54 |
55 | ```shell,exclude
56 | gcloud run services list
57 | ```
58 |
59 | We could copy the URL from this output, or we can use a [`--format`](https://dev.to/googlecloud/giving-format-to-your-gcloud-output-57gm) parameter:
60 |
61 | ```shell
62 | export SERVICE_URL=$(gcloud run services describe $SERVICE_NAME \
63 | --format "value(status.url)")
64 |
65 | echo $SERVICE_URL
66 | ```
67 |
68 | Then, we can redeploy our service, updating *just* this new environment variable:
69 |
70 | ```shell
71 | gcloud run services update $SERVICE_NAME \
72 | --update-env-vars "CURRENT_HOST=${SERVICE_URL}"
73 | ```
74 |
75 | In this case, `CURRENT_HOST` is setup in our [settings.py](../settings.py) to be parsed into the `ALLOWED_HOSTS` and `TRUSTED_CSRF_DOMAIN` settings.
76 |
77 | **Important**: Django's `ALLOWED_HOSTS` takes a hostname without a scheme (i.e. without the leading 'https'). Our `settings.py` handles this by removing it, if it appears.
78 |
79 | #### Initialising Database
80 |
81 | Our database currently has no schema or data, so we need to now set that up.
82 |
83 | Back in our [local testing](00-test-local.md), we did this by executing `migrate` and `loaddata` from the command line.
84 |
85 | The problem is, we don't have a command-line. 🤦♂️
86 |
87 | Well, we do, we have Cloud Shell; but that's not really *automateable*. We don't want to have to log into the console every time we have to run a migration. Well, okay, we *could*, and this is absolutely a valid option if your setup requires/demands it, but for the scope of our application we're going to take the time now to automate migration during our deployment.
88 |
89 | We're going to use [Cloud Build](https://cloud.google.com/cloud-build/) and instead of just building the image like we did earlier, we'll make it perform build our image, apply our database migrations, and deploy our service; all at once.
90 |
91 | But to start, we need to give Cloud Build permission to do all these fancy things (like [deployment](https://cloud.google.com/run/docs/reference/iam/roles#additional-configuration):
92 |
93 | ```shell
94 | for role in cloudsql.client run.admin; do
95 | gcloud projects add-iam-policy-binding ${PROJECT_ID} \
96 | --member serviceAccount:${CLOUDBUILD_SA} \
97 | --role roles/${role}
98 | done
99 | ```
100 |
101 | We'll also need to ensure that for the final step in our deployment, Cloud Build has permission to deploy as Cloud Run. For that, we'll configure our Cloud Build to [act as our service account](https://cloud.google.com/run/docs/continuous-deployment-with-cloud-build#continuous-iam):
102 |
103 | ```shell
104 | gcloud iam service-accounts add-iam-policy-binding ${CLOUDRUN_SA} \
105 | --member "serviceAccount:${CLOUDBUILD_SA}" \
106 | --role "roles/iam.serviceAccountUser"
107 | ```
108 |
109 | You can check the current roles by:
110 |
111 | * running `gcloud projects get-iam-policy $PROJECT_ID`, or
112 | * going to the [IAM & Admin](https://console.cloud.google.com/iam-admin/iam) page in the console.
113 |
114 | From here, we can then run our `gcloud builds submit` command again, but this time specifying a configuration file:
115 |
116 | ```shell
117 | # migrate and deploy
118 | gcloud builds submit \
119 | --config .cloudbuild/build-migrate-deploy.yaml \
120 | --substitutions "_REGION=${REGION},_INSTANCE_NAME=${INSTANCE_NAME},_SERVICE=${SERVICE_NAME}"
121 | ```
122 |
123 | This command will take a few minutes to complete, but the output will show you what it's doing as it goes.
124 |
125 | As suggested by the `--config` filename, this will perform three tasks for us:
126 |
127 | * Build the image (like we were doing before),
128 | * 'migrate', and
129 | * deploy the service with the new image.
130 |
131 | By 'migrate', we mean:
132 |
133 | * Configuring an environment using the secrets we setup earlier, to them
134 | * run the django management commands:
135 | * `./manage.py migrate`, which applies our database migrations
136 | * `./manage.py collectstatic`, which uploads our local static files to the media bucket
137 | * ⚠️ These commands need to be run at least once, but you can choose to remove this part of the script later (for instance, if you want to manually do database migrations).
138 |
139 | The full contents of the script is in [.cloudbuild/build-migrate-deploy.yaml](../.cloudbuild/build-migrate-deploy.yaml).
140 |
141 |
142 | Noted custom configurations:
143 |
144 | * We use `gcr.io/google-appengine/exec-wrapper` as an easier way to setup a Cloud SQL proxy to interface with our database.
145 | * We use the Secret Manager client library to import specific secrets in our `settings.py`
146 | * We are explicitly doing all these things as three steps in one configuration.
147 |
148 | We are also running this command with [substitutions](https://cloud.google.com/cloud-build/docs/configuring-builds/substitute-variable-values#using_user-defined_substitutions). These allow us to change the image, service, and database instance (which will be helpful later on when we define multiple environments). You can hardcode these yourself by commenting out the `substitutions:` stanza in the yaml file.
149 |
150 | ---
151 |
152 | And now.
153 |
154 | Finally.
155 |
156 | You can see the working website!
157 |
158 | Go to the `SERVICE_URL` in your browser, and gaze upon all you created.
159 |
160 | You did it! 🏆
161 |
162 | You can also log in with the `superuser`/`superpass`, and run the admin action we did in [local testing](00-test-local.md).
163 |
164 | ---
165 |
166 | 🤔 But what if all this didn't work? Check the [Debugging Steps](zz_debugging.md).
167 |
168 | ---
169 |
170 | If this is as far as you want to take this project, think about [cleaning up](90-cleanup.md) your resources.
171 |
172 | ---
173 |
174 | After all this work, each future deployment is exceedingly less complex.
175 |
176 | ---
177 |
178 | Next step: [Ongoing Deployments](60-ongoing-deployments.md)
179 |
--------------------------------------------------------------------------------
/docs/60-ongoing-deployments.md:
--------------------------------------------------------------------------------
1 |
2 | # Ongoing Deployments
3 |
4 | *In this step, we're going to automate the deployment we manually executed in the last step*
5 |
6 | ---
7 |
8 | ### *Cloud Build Triggers is in beta*
9 |
10 | Please note that the following descriptions may change as functionality is updated in this beta.
11 |
12 | ---
13 |
14 | We're going to get our service setup with continuous deployment by adding a build trigger through Cloud Build.
15 |
16 | To start, you'll need to have your copy of this repo code setup in it's own repo. We'll be using GitHub for this tutorial, but you can also do this a Cloud Source Repository.
17 |
18 | If you're unfamiliar with forking a respository, you can follow [GitHub's tutorial on the subject](https://help.github.com/en/github/getting-started-with-github/fork-a-repo).
19 |
20 | We're going to setup our `master` branch on our fork deploy to our `unicodex` service on merge.
21 |
22 | ---
23 |
24 | Before we can setup our trigger, we're going to have to [connect to our source repository](https://cloud.google.com/cloud-build/docs/running-builds/create-manage-triggers#connecting_to_source_repositories). The full instructions for this are on the [Google Cloud docs page](https://cloud.google.com/cloud-build/docs/running-builds/create-manage-triggers)
25 |
26 | You'll need to sign into GitHub, whereever you made a copy of your repo, then install Google Cloud Build as an application against your repository (or all repositories, if you wish.)
27 |
28 | **Note**: If you have already installed Cloud Build and did not allow access to all repositories, you'll have to add more from the "Edit repositories on GitHub ⬀" link.
29 |
30 | Then, select the repo you have this code in, noting the disclaimer on access rights.
31 |
32 | Finally, click 'Skip' to skip implementing the suggested default trigger. We'll be making our own.
33 |
34 | ---
35 |
36 | Now that we've connected our accounts, we can setup the trigger.
37 |
38 | This command is similar to the `gcloud builds submit --config` command we ran in the [last section](50-first-deployment.md), and for good reason. Instead of having to manually choose when we run this command, we're essentially setting up a listener to do this for us.
39 |
40 | For this command, you'll have to define your own GitHub username as the `REPO_OWNER`:
41 |
42 | ```shell,exclude
43 | REPO_OWNER=you
44 |
45 | gcloud beta builds triggers create github \
46 | --repo-name django-demo-app-unicodex \
47 | --repo-owner ${REPO_OWNER} \
48 | --branch-pattern master \
49 | --build-config .cloudbuild/build-migrate-deploy.yaml \
50 | --substitutions "_REGION=${REGION},_INSTANCE_NAME=${INSTANCE_NAME},_SERVICE=${SERVICE_NAME}"
51 | ```
52 |
53 | With this setup, any time we push code to the `master` branch, our service will be deployed.
54 |
55 | You can test that this works by making a pull request on your own repo, merge it, and see your changes automatically deployed.
56 |
57 | What could you change?
58 |
59 | * Want to change the home page from "✨ Unicodex ✨"?
60 | * Try changing the `block title` on `unicodex/templates/index.html`
61 | * Want to add another field to the Codepoint display?
62 | * Try adding a new field on the `unicodex/models.py`
63 | * Be sure to add this new field on `unicodex/templates/codepoint.html`
64 | * Make sure you run `./manage.py makemigrations` and commit the new `migrations` file it generates!
65 | * Want to add something more?
66 | * Go wild! ✨
67 |
68 | ---
69 |
70 | We only implemented one trigger here.
71 |
72 | You could customise this for your own project in a number of ways.
73 |
74 | Perhaps make use of the [included files](https://cloud.google.com/cloud-build/docs/running-builds/automate-builds#build_trigger) feature, and trigger a build that makes database migrations only if there have been changes to files in `unicodex/migrations/*`. You could then remove that step from the unconditional `master` branch build.
75 |
76 | Using [substitution variables](https://cloud.google.com/cloud-build/docs/configuring-builds/substitute-variable-values#using_user-defined_substitutions), you could setup multiple triggers: ones that on master deploy to a staging environment, and on a tagged release deploy to production. Changing the substitution variables allows you to use the same code and aim the deploy at different places.
77 |
78 | You could also take advantage of [build concurrency](https://cloud.google.com/cloud-build/docs/configuring-builds/configure-build-step-order), if you have steps that don't need to be run one at a time.
79 |
80 | You can always also skip builds entirely if the commit messages includes the string [[`skip ci`](https://cloud.google.com/cloud-build/docs/running-builds/automate-builds#skipping_a_build_trigger)]
81 |
82 | ---
83 |
84 | Next step: None! You're done! 🧁
85 |
86 | But if you really want to, you can [automate this entire process with Terraform](80-automation.md).
87 |
88 | ---
89 |
90 | Don't forget to [clean-up](90-cleanup.md) your resources if you don't want to continue running your app.
91 |
92 | ---
93 |
94 |
--------------------------------------------------------------------------------
/docs/70-manual-deployments.md:
--------------------------------------------------------------------------------
1 | # Manual Deployments
2 |
3 | *In this section, we'll discuss manual deployment strategies.*
4 |
5 | While deployments are automatable, there may be times where automatically deploying when a pull request happens might not be adventageous:
6 |
7 | * You have to adhere to specific maintenance windows.
8 | * You want to manually review potential database schema changes, and have a human present in case a migration fails.
9 | * You have other processes that require a manual review.
10 |
11 |
12 | In these cases, you may need to manually run Django commands against your deployment.
13 |
14 | Cloud Run doesn't provide a way to run one-off shell commands directly on a running container. Instead, you will need to configure Cloud SQL Proxy, and run a locally built image of your application.
15 |
16 | This section describes how this can be achieved, assuming you have completed the initial unicodex deployment.
17 |
18 | ---
19 |
20 | **Note**: This section details with performing adhoc database commands on live databases, and download local credential files to local file systems. Ensure you understand the processes before performing any commands.
21 |
22 | ---
23 |
24 | In the [testing locally](00-test-local.md) section, we used `docker-compose.yml` to run a local SQLite database against our Django application. This time, we will use a similar manifest, that uses a proxy image that will connect to our production database from our local machine.
25 |
26 | ### docker-proxy.yml
27 |
28 | ```yaml
29 | version: '3'
30 |
31 | services:
32 | unicodex:
33 | build: .
34 | volumes:
35 | - .:/app
36 | ports:
37 | - "8080:8080"
38 |
39 | cloudsql-proxy:
40 | container_name: cloudsql-proxy
41 | image: gcr.io/cloudsql-docker/gce-proxy
42 | command: /cloud_sql_proxy --dir=/cloudsql -instances=PROJECT_ID:REGION:psql=tcp:0.0.0.0:5432 -credential_file=/secrets/cloudsql/credentials.json
43 | ports:
44 | - 5432:5432
45 | volumes:
46 | - ./credentials.json:/secrets/cloudsql/credentials.json
47 | restart: always
48 | ```
49 |
50 | *This manifest is based on [this stack overflow answer](https://stackoverflow.com/a/48431559/124019)*.
51 |
52 | **What this manifest does**: It uses a docker image of the [Cloud SQL Proxy](https://github.com/GoogleCloudPlatform/cloudsql-proxy) in a container named `cloudsql-proxy`. It maps to your instance, using your credentials file. It also explicitly uses a name for the web container. If in later commands you miss the `-f` flag, you will otherwise end up running the existing `docker-compose.yml` file, using the local database.
53 |
54 | **What you need to do**:
55 |
56 | * Replace `PROJECT_ID:REGION` with your project ID and region.
57 | * Provide credentials and secrets.
58 |
59 | For credentials: Previously we could have relied on local `gcloud` settings, but given these commands will be run in a containerised environment, they need to be explicit.
60 |
61 | You can either create a new service account like in [section one](10-setup-gcp.md), or re-use this account.
62 |
63 | Save a copy of the credentials locally:
64 |
65 | ```shell,exclude
66 | gcloud iam service-accounts keys create credentials.json \
67 | --iam-account youraccount@${PROJECT_ID}.iam.gserviceaccount.com
68 | ```
69 |
70 | For settings: You will also need to use an altered version of your `django_settings`; the database name will have to change to suit.
71 |
72 | If you don't still have a local copy of `.env`, download it:
73 |
74 | ```shell,exclude
75 | gcloud secrets versions access latest --secret django_settings > .env
76 | ```
77 |
78 | In this file, replace the `@//cloudsql/PROJECT_ID:REGION:psql` with `@cloudsql-proxy`. This replaces the host name with the fully qualified Cloud SQL instance name with the container name in the manifest.
79 |
80 | The `DATABASE_URL` value should now read: `"postgres://unicodex-django:PASSWORD@cloudsql-proxy/unicodex"`
81 |
82 | This file is added automatically in the `web` image by the `volume` line mounting the current folder in to `/app`.
83 |
84 | ---
85 |
86 | From here, you will need to start the database container:
87 |
88 | ```shell,exclude
89 | docker-compose -f docker-proxy.yml up cloudsql-proxy
90 | ```
91 |
92 | And then in another terminal, run a command against the database. For example, a migration plan:
93 |
94 | ```shell, exclude
95 | docker-compose -f docker-proxy.yml run --rm unicodex python manage.py migrate --plan
96 | ```
97 |
98 | This will show what would be run if a migration were to be executed. If you have completed your migrations, the output should be:
99 |
100 | ```
101 | Planned operations:
102 | No planned migration operations.
103 | ```
104 |
105 | ---
106 |
107 | If you want to be able to run `dbshell`, you will have to install `psql` into your original docker image.
108 |
109 | Add the following line to the `Dockerfile`, before the other `RUN` command:
110 | ```exclude
111 | RUN apt-get update && apt-get install postgresql postgresql-contrib -y --no-install-recommends
112 | ```
113 |
114 | This will update the package index cache within the container, and install postgres, without installing all the other recommended packages.
115 |
116 | You should then be able to build your image and run dbshell:
117 |
118 | ```shell,exclude
119 | docker-compose -f docker-proxy.yml build unicodex
120 | docker-compose -f docker-proxy.yml run unicodex python manage.py dbshell
121 | ```
122 |
123 | You should then get a `psql` shell into your deployed database:
124 |
125 | ```
126 | psql (11.7 (Debian 11.7-0+deb10u1), server 11.8)
127 | Type "help" for help.
128 |
129 | unicodex=>
130 | ```
131 |
132 | The `server 11.8` is the Postgres 11 server you deployed.
133 |
134 | You can also confirm it's your production server by checking the codepoints you added, compared to your local testing instance:
135 |
136 | ```
137 | unicodex=> select * from unicodex_codepoint;
138 | id | name | description | codepoint | emojipedia_name | order
139 | ----+---------+-------------------+-----------+------------------+-------
140 | 3 | Runners | Gotta go fast. | 1F45F | running-shoe | 3
141 | 2 | Cloud | Light and fluffy! | 2601 | | 2
142 | 1 | Waving | Oh hi! | 1F44B | waving-hand-sign | 1
143 | (3 rows)
144 | ```
145 |
--------------------------------------------------------------------------------
/docs/80-automation.md:
--------------------------------------------------------------------------------
1 | # Automation
2 |
3 | If you're joining us after having completed the rest of the tutorial, it was complex, but should have provided some insight as to what was being configured.
4 |
5 | We don't have to do this manual process every time we create a new complex service. It makes sense to do it at least *once*, to learn and understand what's going on.
6 |
7 | But for deploying multiple setups, such as if you wanted to implement the project separation suggested in the [last section](60-ongoing-deployments.md), using provisioning automation makes sense.
8 |
9 | [Terraform](https://www.terraform.io/) allows us to automate infrastructure provisioning, and comes with a [Google Cloud Platform Provider](https://www.terraform.io/docs/providers/google/index.html) out of the box.
10 |
11 | This tutorial isn't a full Terraform 101, but it should help guide you along the process.
12 |
13 | 💡 If you want to, run this section in a new project. That way you can compare and constrast to your original project. Ensure you set your `gcloud config set project` and `$PROJECT_ID` before continuing!
14 |
15 | ---
16 |
17 | ## Install Terraform and setup authentication
18 |
19 | To start with, you'll need to [install Terraform](https://learn.hashicorp.com/terraform/getting-started/install.html) for your operating system.
20 |
21 | Once that's setup, you'll need to create a [new service account](https://www.terraform.io/docs/providers/google/getting_started.html#adding-credentials) that has Owner rights to your project, and [export an authentication key](https://cloud.google.com/iam/docs/creating-managing-service-account-keys) to that service account that Terraform can use.
22 |
23 | ```shell,exclude
24 | # Setup gcloud for your project
25 | export PROJECT_ID=YourProjectID
26 | gcloud config set project $PROJECT_ID
27 |
28 | # Create the service account
29 | gcloud iam service-accounts create terraform \
30 | --display-name "Terraform Service Account"
31 |
32 | # Grant owner permissions
33 | gcloud projects add-iam-policy-binding ${PROJECT_ID} \
34 | --member serviceAccount:terraform@${PROJECT_ID}.iam.gserviceaccount.com \
35 | --role roles/owner
36 |
37 | # create and save a local private key
38 | gcloud iam service-accounts keys create ~/terraform-key.json \
39 | --iam-account terraform@${PROJECT_ID}.iam.gserviceaccount.com
40 |
41 | # store location of private key in environment that terraform can use
42 | export GOOGLE_APPLICATION_CREDENTIALS=~/terraform-key.json
43 |
44 | # enable the resource API and IAM APIs
45 | gcloud services enable \
46 | cloudresourcemanager.googleapis.com \
47 | iam.googleapis.com
48 | ```
49 |
50 | ---
51 |
52 | 🤔 Didn't we already do this authentication step?
53 |
54 | We did, back in [the GCP setup section](10-setup-gcp.md); we authenticated to let `gcloud` act as "us". Us, in this case, is your login to the Google Cloud Console. There you get the Project Owner role, which is universal admin rights. There are some parts of this setup that require the same level of access as "us".
55 |
56 | ---
57 |
58 | 🧐 But why not just authenticate as ourselves?
59 |
60 | You could use `gcloud auth application-default login`, and other tutorials may suggest, but we aren't. There are reasons.
61 |
62 | In the identity model that Google Cloud uses, service accounts belong to the project in which they were created. For example, a service account example@yourproject.iam.gserviceaccount.com belongs to the yourproject project. You can grant a service account a role in another project, but it will always be owned by the project in it's identifying email address.
63 |
64 | Your identity (yourname@gmail.com) also belongs to a project, just not one you have access to. It belongs to a Google-owned project. Your account has some special automatic features -- such as automatically being granted Project Owner when you create a new project -- but otherwise it's not much different than a service account.
65 |
66 | When requests are issued to provision or alter resources, a number of checks are made before the action is performed: precondition checks, quota checks, and billing access checks. These checks are made on the project in which the credentials belong to, rather than the resource being altered. This means that if you ask to perform actions on your project using your identity, the checks are actually made against the Google-owned project, not your own.
67 |
68 | In order to prevent any potential ownership and issues, it's recommended that for automation tasks you create a dedicated service account within the project you are performing automations.
69 |
70 |
71 | ---
72 |
73 | To give an overview of how you can use terraform, a basic Cloud Run setup might be as simple as the [provided example](https://www.terraform.io/docs/providers/google/r/cloud_run_service.html).
74 |
75 | This page has other sample provisionings, such as [Cloud Run + Cloud SQL](https://www.terraform.io/docs/providers/google/r/cloud_run_service.html#example-usage-cloud-run-service-sql) setup, or [Allow Unauthenticated](https://www.terraform.io/docs/providers/google/r/cloud_run_service.html#example-usage-cloud-run-service-noauth) for example.
76 |
77 | Our setup is a little bit more complex than a 'Hello World', so we're going to provision and deploy in a few steps:
78 |
79 | * [*Provision*](#provision-infrastructure) the infrastructure with Terraform, then
80 | * perform the initial [*migration*](#migrate-the-database), then setup
81 | * Continuous [*Deployment*](#continuous-deployment).
82 |
83 |
84 | There is a reason for this separation. All the manual configurations required to get our sample application out into the real world were detailed in the last half-dozen pages of this tutorial. The *provisioning* itself was most of that. But we can use Terraform to re-create all that once. However, when you run Terraform on your local machine, you run Terraform in the same Owner permissions you run `gcloud` with. It makes sense to keep the manual configuration with these permissions, but for automated processes -- such as our triggers -- it makes sense to keep these processes to the minimum possible permissions. This has consequences in the scripts we use to deploy, so to mitigate that, we'll do a manual deployment once, which creates our service. Then, our updates can happen with the most focused permissions required.
85 |
86 | ---
87 |
88 | ### Provision Infrastructure
89 |
90 | We've provided the Terraform files in `terraform/`, so navigate there and initialise:
91 |
92 | ```shell,exclude
93 | git clone https://github.com/GoogleCloudPlatform/django-demo-app-unicodex
94 | ```
95 |
96 | 💡 If you chose to run this section in a new project, you will need to re-create the base image:
97 |
98 | ```shell,exclude
99 | gcloud builds submit --tag gcr.io/${PROJECT_ID}/unicodex .
100 | ```
101 |
102 | Once you have this configured, you need to initialise Terrafrom:
103 |
104 | ```shell,exclude
105 | cd terraform
106 | terraform init
107 | ```
108 |
109 | Then apply the configurations:
110 |
111 | ```shell,exclude
112 | terraform apply
113 | ```
114 |
115 | Without specifying any other flags, this command will prompt you for some variables (with details about what's required, see `variables.tf` for the full list), and to check the changes that will be applied (which can be checked without potentially applying with `terraform plan`).
116 |
117 | You can specify your variables using [command-line flags](https://learn.hashicorp.com/terraform/getting-started/variables.html#command-line-flags), which would look something like this:
118 |
119 | ```shell,exclude
120 | terraform apply \
121 | -var region=us-central1 \
122 | -var service=unicodex \
123 | -var project=${PROJECT_ID} \
124 | -var instance_name=psql
125 | ```
126 |
127 | ⚠️ Since we are dynamically creating our secret strings, our [terraform state is considered sensitive data](https://www.terraform.io/docs/state/sensitive-data.html).
128 |
129 |
130 | Looking within `terraform/`, you'll see we're separating our Terraform process into some major segments:
131 |
132 | * Enabling the service APIs (which then allows us to)
133 | * Create the Cloud SQL database (which then allows us to)
134 | * Create the secrets, permissions, and other components required, to finally allow us to
135 | * Create the Cloud Run service.
136 |
137 | This separation means we can stagger out setup where core sections that depend on each other are completed one at a time.
138 |
139 | ### Migrate database
140 |
141 | Once this processes finishes, everything will be setup ready for our build-migrate-deploy:
142 |
143 | ```shell,exclude
144 | cd ..
145 | gcloud builds submit --config .cloudbuild/build-migrate-deploy.yaml \
146 | --substitutions="[generated from terraform inputs]"
147 | ```
148 |
149 | It will also show how to log into the Django admin, including how to retrieve the login secrets:
150 |
151 | ```shell,exclude
152 | gcloud secrets versions access latest --secret SUPERUSER
153 | gcloud secrets versions access latest --secret SUPERPASS
154 | ```
155 |
156 | 🗒 The secret values are stored in the [local terraform state](https://www.terraform.io/docs/state/index.html), but it's always a good idea to get configurations from the one source of truth.
157 |
158 | ℹ️ Unlike the shell scripts we used earlier, we can re-`apply` terraform at any time. So if you have any component that doesn't seem to work, or you manually change something and want to change it back, just run `terraform apply` again. This can help with issues of eventual consistency, network latency, or any other gremlins in the system.
159 |
160 | ---
161 |
162 | ℹ️ A note on permissions:
163 |
164 | This tutorial has two methods of provisioning: the shell scripts you saw earlier, and the Terraform scripts. Both setups are designed to produce the same project setup in the end, which means that although we could automate more in Terraform (such as creating the Cloud Run service), that would require a different set of permissions for the unicodex service accounts.
165 |
166 | We granted Owner rights for the Terraform service account, as we are running it only locally on our own laptops. If you want to use Terraform within Cloud Build, you should absolutely use a lower level of access.
167 |
168 | ---
169 |
170 | ### Continuous deployment
171 |
172 | Once this *provisioning* and *first deploy* is done, you can configure the *automated deployment* as in the [last step](60-ongoing-deployments.md), which is effectively setting up the last command in our above script to trigger automatically.
173 |
174 | ---
175 |
176 | Don't forget to [clean-up](90-cleanup.md) your resources if you don't want to continue running your app.
177 |
178 | ---
179 |
180 |
--------------------------------------------------------------------------------
/docs/90-cleanup.md:
--------------------------------------------------------------------------------
1 | # Clean up
2 |
3 | If you no longer want to keep your application running, you need to clean it up.
4 |
5 | If you chose to create a new Google Cloud Platform project just for this app, then you can cleanup by deleting the entire project.
6 |
7 |
8 | To avoid incurring charges to your Google Cloud Platform account for the resources used in this tutorial:
9 | * In the Cloud Console, go to the [Manage resources](https://console.cloud.google.com/cloud-resource-manager) page.
10 | * In the project list, select your project then click Delete.
11 | * In the dialog, type the project ID and then click Shut down to delete the project.
12 |
13 | #### ⚠️ Please cleanup after yourself ⚠️
14 |
15 | Some of the resources created in this documentation have an ongoing cost. If you no longer want to run this application, it's a really good idea to destroy the resources so you are not charged.
16 |
17 | The most significant part of this tutorial is the Cloud SQL database, which has an ongoing cost (of the suggested settings) of around $10USD per month. Other components could have ongoing costs, depending on their usage.
18 |
--------------------------------------------------------------------------------
/docs/yy_styleguide.md:
--------------------------------------------------------------------------------
1 | # 💅 Style Guide
2 |
3 | This documentation, and contributions, should conform to the [Google developer documentation style guide](https://developers.google.com/style/), with the following added opinions for consistency and clarity.
4 |
5 | ## Code blocks
6 |
7 | **Always check your formatting are able to be run in a terminal**.
8 |
9 | Some command line tools are more particular about their formatting than others. Always opt on the side of 'this-works' over 'looks-pretty'.
10 |
11 | The following convensions are followed by [the aforementioned style guide](https://developers.google.com/style/command-line-terminology); the descriptiveness below has been added for explicit clarity.
12 |
13 | ### Parameters, flags and arguments
14 |
15 | Where there are multiple options for command inputs, opt for spaces over equal-signs. This allows the command to be more easily read by a human.
16 |
17 | ```shell,exclude
18 | # 👍 Recommended
19 | gcloud sample command --parameter option
20 |
21 | # not recommended
22 | gcloud sample command --parameter=option
23 | ```
24 |
25 | Some parameters must be passed literally with no spaces, so preserve those. Also wrap special characters that would otherwise interupt terminal execution in double-quote marks.
26 |
27 | ```shell,exclude
28 | # 👍 Recommended
29 | gcloud sample command --parameter "option=values(csv)"
30 |
31 | # will error
32 | gcloud sample command --parameter option values(csv)
33 | ```
34 |
35 | ### Shell variables
36 |
37 | When documenting replacement shell environment variables, use literal replacements where possible, opting for variable expansion when literal replacements will not produce the desired result.
38 |
39 | ```shell,exclude
40 | # 👍 Recommended
41 | gcloud sample command option --name $NAME --parameter "option=${VALUES}"
42 | ```
43 |
44 | ### Long commands
45 |
46 | In the event a command cannot fit on one line, opt to split lines on:
47 | * position arguments, then
48 | * pairs of positional arguments
49 |
50 | The ordering of positional arguments, if unimportant to the execution of the command, should be ordered in whatever logic pertains to the operation.
51 |
52 | Split lines should be noted with a trailing backslash, ensuring no whitespace follows. Indentation of extra lines should be two spaces.
53 |
54 |
55 | ```shell,exclude
56 | # 👍 Recommended
57 | gcloud sample command ofincredible length \
58 | --parameter option \
59 | --yetanother option \
60 | --which-keeps-going "and-on"
61 |
62 | # not recommended
63 | gcloud sample command ofincredible length --parameter option --yetanother option --which-keeps-going "and-on"
64 | ```
65 |
66 |
67 | ### Automation
68 |
69 | Code blocks that are intended to be run as part of a sequence as defined in `.util` should be prefaced with "shell".
70 |
71 | Code blocks that are **descriptive** only should be prefaced with "shell,exclude"
72 |
73 | For examples of these, see the source code for this page (as it is difficult to show backticks in markdown without them being explicitly rendered).
74 |
75 |
--------------------------------------------------------------------------------
/docs/zz_debugging.md:
--------------------------------------------------------------------------------
1 | # 🐛🐛🐛 Debugging Steps 🐛🐛🐛
2 |
3 | *If at any point your deployment of unicodex breaks, hopefully one of the following debugging tricks can help.* ✨
4 |
5 | ## Enable `DEBUG` mode in Django
6 |
7 | For security reasons, `DEBUG` should not be enabled in production. So, we didn't set it as enabled.
8 |
9 | To temporary enable it:
10 |
11 | ```
12 | gcloud run services update unicodex --update-env-vars DEBUG=True
13 | ```
14 |
15 | You should then reload the page, and see a more useful error.
16 |
17 | Remember to turn it off again!
18 |
19 | ```
20 | gcloud run services update unicodex --update-env-vars DEBUG=False
21 | ```
22 |
23 |
24 | ## Database issues
25 |
26 | ### Check your instance settings
27 |
28 | Is the instance you are connecting to correct?
29 |
30 | For a number of configurations, the database instance needs to be in the form `PROJECT_ID:REGION:INSTANCE_NAME`.
31 |
32 | Check the instance name is correct by going to the [SQL Instances](https://console.cloud.google.com/sql/instances) listing and confirming your configuration matches the "instance connection name" listing for your instance.
33 |
34 | ### Check your DATABASE_URL
35 |
36 | Is your `DATABASE_URL` correct? Test it with `cloud_sql_proxy`!
37 |
38 | Install the [`cloud_sql_proxy` client](https://cloud.google.com/sql/docs/postgres/sql-proxy#install) for your platform. More instructions are in the [database](20-setup-sql.md) section.
39 |
40 | Then, we're going to test our `DATABASE_URL`. Well, some of it.
41 |
42 | In a new terminal:
43 |
44 | ```
45 | ./cloud_sql_proxy -instances=$PROJECT_ID:$REGION:$INSTANCE_NAME=tcp:5433
46 | ```
47 |
48 | You should see "Ready for new connections".
49 |
50 | What we've done is map our `DATABASE_INSTANCE` to localhost, port 5433.
51 |
52 | So, we need to remove the `//cloudsql/.../DATABASE` from our `DATABASE_URL`, and replace it with `localhost:5433`.
53 |
54 | So what was once:
55 |
56 | ```
57 | export DATABASE_URL=postgres://django:SECRET@//cloudsql/$PROJECT_ID:$REGION:$INSTANCE_NAME/$DATABASE_NAME
58 | ```
59 |
60 | now becomes
61 |
62 | ```
63 | export TEST_DATABASE_URL=postgres://django:SECRET@localhost:5433/$DATABASE_NAME
64 | ```
65 |
66 | Then, in your original terminal:
67 |
68 | ```
69 | pip install psycopg2-binary
70 | python -c "import os, psycopg2; conn = psycopg2.connect(os.environ['TEST_DATABASE_URL']);"
71 | ```
72 |
73 | If this did not return an error, then it all worked!
74 |
75 | *So what did we just do?*
76 |
77 | We installed a pre-compiled PostgreSQL database adapter, [psycopg2-binary](https://pypi.org/project/psycopg2-binary/).
78 |
79 | We then started up the `cloud_sql_proxy` in a new tab, mapping that locally.
80 |
81 | Finally, we ran a tiny bit of Python that used the used the PostgreSQL adapter and created a connection using our new `TEST_DATABASE_URL` variable.
82 |
83 | ---
84 |
85 | ## Still more database issues?
86 |
87 | Check you have configured the correct IAM settings.
88 |
89 | Locate the [IAM permissions page](https://console.cloud.google.com/iam-admin/iam) in the Cloud Console, and confirm that the `unicodex service account` has Cloud SQL Client and Cloud Run admin roles.
90 |
91 | Locate the [Cloud Build settings page](https://console.cloud.google.com/cloud-build/settings/service-account) in the Cloud Console, and confirm that the `Cloud Run` GCP service is set to `ENABLED`.
92 | ---
93 |
94 | Did you encounter a different problem? [Log an issue](https://github.com/GoogleCloudPlatform/django-demo-app-unicodex/issues).
95 |
96 | ---
97 |
--------------------------------------------------------------------------------
/experimental/README.md:
--------------------------------------------------------------------------------
1 | # Experimental System Testing
2 |
3 | This repo has multiple deployment methods:
4 |
5 | * Terraform (`terraform`)
6 | * Cloud Run Button (`button`)
7 | * Generated script from the tutorial documentation (`gen`).
8 |
9 | This folder attempts to test those.
10 |
11 | ⚠️ This setup relies on Preview functionality, and requires a Workspace account (to automate creating projects to test).
12 |
13 | ## project_setup.sh
14 |
15 | Your Workspace administrator will need to create a base project with billing enabled (to run the script in), and a base folder (where the ephemeral projects will live, identified by `$_PARENT_FOLDER`)
16 |
17 | Running this script will setup the current project in a state that it will be ready to be used as a parent project.
18 |
19 | ```
20 | source experimental/project_setup.sh
21 | ```
22 |
23 | This will do a number of things, including setup a custom service account, mandatory for later steps.
24 |
25 | ## cloudbuild.yaml
26 |
27 | A hands off way to do full testing. WIP.
28 |
29 | With the parent project setup, it will run the test, as if it were running:
30 |
31 | ```
32 | source experimental/setup.sh [terraform|button|gen]
33 | ./util/helper check-deploy.sh
34 | ```
35 |
36 | This will create a new project. If you want to use an existing project, specify `_CI_PROJECT` (the script will check if this project exists, if not, create it.)
37 |
38 |
39 | ## Cloud Build triggers
40 |
41 | `cloudbuild.yaml` is configured in such a way that it can be run periodically as a trigger.
42 |
43 | 1. Go to Cloud Build and [create a trigger](https://console.cloud.google.com/cloud-build/triggers/add)
44 | 1. Use these settings:
45 | 1. Event: Manual Invocation
46 | 1. Source: your repo (click "Connect Repository" if you haven't configured it before)
47 | 1. Branch: your main branch
48 | 1. Configuration: Repository; `experimental/cloudbuild.yaml`
49 | 1. Subsitutions:
50 | 1. `_PARENT_FOLDER`: the ID of the folder to create builds in
51 | 1. `_TEST_TYPE`: one of: terraform, gen, or button.
52 | 1. Service Account: ci-serviceaccount@PROJECT_ID.iam.gserviceaccount.com
53 | 1. Test the build by clicking 'Run'
54 | 1. Make the run periodic by clicking the three veritical dots on the Trigger record, and specifying a schedule.
55 |
56 | Note that you have a maximum amount of projects allowed by default, and projects are only purge-deleted 30 days after you 'delete' them (this allows you a grace period to undelete, etc). Ensure you restrict your test frequency within this limit.
57 |
58 | ## Cloud Builds local machine
59 |
60 | To test the builds ad-hoc on your local machine, you will need to add the service account inline:
61 |
62 | ```
63 | echo "serviceAccount: projects/${PROJECT_ID}/serviceAccounts/ci-serviceaccount@${PROJECT_ID}.iam.gserviceaccount.com" >> experimental/cloudbuild.yaml
64 | ```
65 |
66 | Then call as you would the main test:
67 |
68 | ```
69 | gcloud builds submit --config experimental/cloudbuild.yaml --substitutions _TEST_TYPE=terraform
70 | ```
71 |
--------------------------------------------------------------------------------
/experimental/button_test.yaml:
--------------------------------------------------------------------------------
1 | steps:
2 | - id: build
3 | name: golang
4 | entrypoint: /bin/bash
5 | env: GOPATH=/workspace
6 | args:
7 | - -c
8 | - |
9 | go install github.com/GoogleCloudPlatform/cloud-run-button/cmd/cloudshell_open@latest
10 |
11 | - id: "press button"
12 | name: "google/cloud-sdk"
13 | entrypoint: /bin/bash
14 | env:
15 | - "TRUSTED_ENVIRONMENT=true"
16 | - "SKIP_CLONE_REPORTING=true"
17 | - "GOOGLE_CLOUD_PROJECT=$PROJECT_ID"
18 | - "GOOGLE_CLOUD_REGION=${_REGION}"
19 | args:
20 | - "-c"
21 | - |
22 | ./bin/cloudshell_open --repo_url=https://github.com/GoogleCloudPlatform/django-demo-app-unicodex.git
23 |
24 | options:
25 | dynamic_substitutions: true
26 |
27 | logsBucket: "${_PARENT_PROJECT}-buildlogs"
28 |
--------------------------------------------------------------------------------
/experimental/cleanup.yaml:
--------------------------------------------------------------------------------
1 | steps:
2 | - id: Cleanup any projects
3 | name: 'gcr.io/google.com/cloudsdktool/cloud-sdk'
4 | entrypoint: /bin/bash
5 | args:
6 | - '-c'
7 | - |
8 | echo "List the projects"
9 | gcloud projects list --filter parent=${_PARENT_FOLDER} --format "value(project_id)"
10 | echo "Delete projects"
11 | for project in $(gcloud projects list --filter parent=${_PARENT_FOLDER} --format "value(project_id)")
12 | do gcloud projects delete $project --quiet
13 | done
14 |
15 | logsBucket: ${PROJECT_ID}-buildlogs
16 | serviceAccount: projects/${PROJECT_ID}/serviceAccounts/ci-serviceaccount@unicodex-admin.iam.gserviceaccount.com
17 |
--------------------------------------------------------------------------------
/experimental/cloudbuild.yaml:
--------------------------------------------------------------------------------
1 | steps:
2 | - id: setup
3 | name: "gcr.io/google.com/cloudsdktool/cloud-sdk:slim"
4 | entrypoint: /bin/bash
5 | args:
6 | - "-c"
7 | - |
8 | source ./experimental/setup.sh \
9 | -t ${_TEST_TYPE} \
10 | -f ${_PARENT_FOLDER} \
11 | -p ${_CI_PROJECT} \
12 | -r ${_REGION} \
13 | -s ${_SA_NAME}
14 |
15 | - id: "check"
16 | name: python:3.9-slim
17 | entrypoint: /bin/bash
18 | args:
19 | - "-c"
20 | - |
21 | python -m pip install -r .util/requirements.txt
22 | .util/helper check-deploy ${_CI_PROJECT}
23 |
24 | - id: "destroy"
25 | name: "gcr.io/google.com/cloudsdktool/cloud-sdk:slim"
26 | env: ["KEEP_PROJECT=${_KEEP_PROJECT}"]
27 | script:
28 | #!/bin/bash
29 |
30 | if [ $KEEP_PROJECT = 'true' ]; then echo "Configuration says to keep project around. Not deleting."; exit 0; fi
31 |
32 | echo "Deleting project..."
33 | gcloud projects delete ${_CI_PROJECT} --quiet;
34 |
35 | options:
36 | dynamic_substitutions: true
37 | machineType: 'E2_HIGHCPU_8'
38 |
39 | logsBucket: ${PROJECT_ID}-buildlogs
40 | serviceAccount: projects/${PROJECT_ID}/serviceAccounts/ci-serviceaccount@unicodex-admin.iam.gserviceaccount.com
41 |
42 | substitutions:
43 | _KEEP_PROJECT: "false"
44 | _REGION: us-central1
45 | _TEST_TYPE: terraform
46 | _SA_NAME: ci-serviceaccount
47 | _CI_PROJECT: "gephemeral-${BUILD_ID:0:8}-${_TEST_TYPE:0:1}"
48 |
49 | timeout: "1500s"
50 |
--------------------------------------------------------------------------------
/experimental/gen_test.yaml:
--------------------------------------------------------------------------------
1 | steps:
2 | - id: generate
3 | name: python:3.9-slim
4 | entrypoint: /bin/bash
5 | args:
6 | - "-c"
7 | - |
8 | python -m pip install -r .util/requirements.txt --user
9 | .util/helper gen --project $PROJECT_ID --region $_REGION > /workspace/deploy.sh
10 | wc -l /workspace/deploy.sh
11 |
12 | - id: "deploy"
13 | name: "google/cloud-sdk"
14 | entrypoint: /bin/bash
15 | args:
16 | - "-c"
17 | - |
18 | time bash -e /workspace/deploy.sh
19 |
20 | options:
21 | dynamic_substitutions: true
22 |
23 | logsBucket: "${_PARENT_PROJECT}-buildlogs"
--------------------------------------------------------------------------------
/experimental/nested.yaml:
--------------------------------------------------------------------------------
1 | steps:
2 | - name: 'gcr.io/google.com/cloudsdktool/cloud-sdk'
3 | entrypoint: 'gcloud'
4 | args:
5 | - 'builds'
6 | - 'submit'
7 | - '--config'
8 | - './experimental/success.yaml'
9 | - '--project=${_TARGET_PROJECT}'
10 |
11 | serviceAccount: projects/unicodex-ci-base/serviceAccounts/ci-serviceaccount@unicodex-ci-base.iam.gserviceaccount.com
12 | logsBucket: unicodex-ci-base-buildlogs
--------------------------------------------------------------------------------
/experimental/project_setup.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Sets up the requirements for the parent project which will be used to start tests.
4 |
5 | # borrows heavily from fourkeys
6 | # https://github.com/GoogleCloudPlatform/fourkeys/blob/main/experimental/terraform/setup.sh
7 |
8 |
9 | # Sets up a parent project for CI work
10 | source .util/bash_helpers.sh
11 |
12 |
13 | export PARENT_PROJECT=$(gcloud config get-value project)
14 | echo "🔨 configure parent project $PARENT_PROJECT"
15 |
16 | export PARENT_FOLDER=$1
17 | stepdo "confirm folder exists"
18 | gcloud resource-manager folders describe $PARENT_FOLDER --format "value(lifecycleState)"
19 | stepdone
20 | echo "🔧 configure parent folder $PARENT_FOLDER"
21 |
22 | export ORGANIZATION=$(gcloud organizations list --format "value(name)")
23 | echo "🗜 configure organisation $ORGANIZATION"
24 |
25 | export BILLING_ACCOUNT=$(gcloud beta billing projects describe ${PARENT_PROJECT} --format="value(billingAccountName)" || sed -e 's/.*\///g')
26 | echo "💳 configure billing account $BILLING_ACCOUNT"
27 |
28 | export PARENT_PROJECTNUM=$(gcloud projects describe ${PARENT_PROJECT} --format='value(projectNumber)')
29 | export DEFAULT_GCB=$PARENT_PROJECTNUM@cloudbuild.gserviceaccount.com
30 |
31 | stepdo "Enable services on parent"
32 | gcloud services enable --project $PARENT_PROJECT \
33 | cloudresourcemanager.googleapis.com \
34 | cloudbilling.googleapis.com \
35 | cloudbuild.googleapis.com \
36 | iam.googleapis.com \
37 | sqladmin.googleapis.com
38 | stepdone
39 |
40 | stepdo "Create service account"
41 | SA_NAME=ci-serviceaccount
42 | SA_EMAIL=$(gcloud iam service-accounts list --filter $SA_NAME --format 'value(email)')
43 | if [ -z "$SA_EMAIL" ]; then
44 | gcloud iam service-accounts create $SA_NAME --project $PARENT_PROJECT
45 | SA_EMAIL=$(gcloud iam service-accounts list --filter $SA_NAME --format 'value(email)')
46 | else
47 | echo "Service account $SA_EMAIL already exists. Skipping"
48 | fi
49 | stepdone
50 |
51 | stepdo "Create CI logs bucket"
52 | LOGS_BUCKET=gs://${PARENT_PROJECT}-buildlogs
53 |
54 | if gsutil ls $LOGS_BUCKET 2>&1 | grep -q 'BucketNotFoundException'; then
55 | gsutil mb -p $PARENT_PROJECT $LOGS_BUCKET
56 |
57 | gsutil iam ch \
58 | serviceAccount:${DEFAULT_GCB}:roles/storage.objectAdmin \
59 | $LOGS_BUCKET
60 | else
61 | echo "Bucket $LOGS_BUCKET already exists. Skipping"
62 | fi
63 | stepdone
64 |
65 | stepdo "Grant access to default logs bucket"
66 | DEFAULT_BUCKET=gs://${PARENT_PROJECT}_cloudbuild
67 |
68 | if gsutil ls $DEFAULT_BUCKET 2>&1 | grep -q 'BucketNotFoundException'; then
69 | echo "Default Cloud Build log bucket not automatically created. Fixing."
70 | gsutil mb -p $PARENT_PROJECT $DEFAULT_BUCKET
71 | fi
72 | gsutil iam ch \
73 | serviceAccount:${SA_EMAIL}:roles/storage.admin \
74 | $DEFAULT_BUCKET
75 | stepdone
76 |
77 | stepdo "Grant roles to service account on project"
78 | for role in storage.admin iam.serviceAccountUser; do
79 | quiet gcloud projects add-iam-policy-binding $PARENT_PROJECT \
80 | --member serviceAccount:${SA_EMAIL} \
81 | --role roles/${role}
82 | done
83 | stepdone
84 |
85 | stepdo "Grant roles to service account on folder"
86 | for role in billing.projectManager resourcemanager.projectCreator resourcemanager.projectDeleter resourcemanager.projectIamAdmin; do
87 | quiet gcloud resource-manager folders add-iam-policy-binding $PARENT_FOLDER \
88 | --member serviceAccount:${SA_EMAIL} \
89 | --role roles/${role}
90 | done
91 | stepdone
92 |
93 | stepdo "Grant roles to service account on billing account"
94 | for role in billing.user billing.viewer; do
95 | quiet gcloud beta billing accounts add-iam-policy-binding $BILLING_ACCOUNT \
96 | --member serviceAccount:${SA_EMAIL} \
97 | --role roles/${role}
98 | done
99 | stepdone
--------------------------------------------------------------------------------
/experimental/setup.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 | source .util/bash_helpers.sh
4 |
5 | PARENT_PROJECT=$(gcloud config get-value project)
6 |
7 | while getopts t:f:p:r:s: flag; do
8 | case "${flag}" in
9 | t) TEST_TYPE=${OPTARG} ;;
10 | f) PARENT_FOLDER=${OPTARG} ;;
11 | p) CI_PROJECT=${OPTARG} ;;
12 | r) REGION=${OPTARG} ;;
13 | s) SA_NAME=${OPTARG} ;;
14 | esac
15 | done
16 |
17 | CLOUDBUILD_CONFIG=experimental/${TEST_TYPE}_test.yaml
18 |
19 | if [ ! -f "$CLOUDBUILD_CONFIG" ]; then
20 | echo "❌ No configuration for $CLOUDBUILD_CONFIG."
21 | exit 1
22 | fi
23 |
24 | if [[ -z $PARENT_FOLDER ]]; then
25 | export PARENT_FOLDER=$(gcloud projects describe ${PARENT_PROJECT} --format="value(parent.id)")
26 | echo "🔍 Found folder ${PARENT_FOLDER} from ${PARENT_PROJECT}"
27 | else
28 | echo "📦 Using provided folder $PARENT_FOLDER"
29 | fi
30 |
31 | if [[ -z $CI_PROJECT ]]; then
32 | CI_PROJECT_PREFIX=unicodex-test
33 | RANDOM_IDENTIFIER=$((RANDOM % 999999))
34 | CI_PROJECT=$(printf "%s-%06d" $CI_PROJECT_PREFIX $RANDOM_IDENTIFIER)-${TEST_TYPE:=manual}
35 | fi
36 |
37 | if [[ -z $REGION ]]; then
38 | REGION=us-central1
39 | fi
40 |
41 | SA_EMAIL=$(gcloud iam service-accounts list --project ${PARENT_PROJECT} --filter $SA_NAME --format 'value(email)')
42 |
43 | echo "🚀 Running setup using $TEST_TYPE on $CI_PROJECT in $REGION with $SA_EMAIL"
44 |
45 | if gcloud projects list --filter $CI_PROJECT | grep -q "$CI_PROJECT"; then
46 | echo "🔁 Reusing ${CI_PROJECT}"
47 | else
48 | stepdo "🔨 create CI project $CI_PROJECT in folder $PARENT_FOLDER"
49 | gcloud projects create ${CI_PROJECT} --folder ${PARENT_FOLDER}
50 | stepdone
51 |
52 | stepdo "assign IAM policies to service account"
53 | for role in iam.serviceAccountTokenCreator iam.serviceAccountUser billing.projectManager; do
54 | quiet gcloud projects add-iam-policy-binding $CI_PROJECT \
55 | --member serviceAccount:${SA_EMAIL} \
56 | --role roles/${role}
57 | done
58 | stepdone
59 |
60 | stepdo "setup billing"
61 | BILLING_ACCOUNT=$(gcloud beta billing projects describe ${PARENT_PROJECT} --format="value(billingAccountName)" || sed -e 's/.*\///g')
62 | gcloud beta billing projects link ${CI_PROJECT} \
63 | --billing-account=${BILLING_ACCOUNT}
64 | stepdone
65 |
66 | stepdo "enable services on ci project"
67 | gcloud services enable --project $CI_PROJECT \
68 | cloudresourcemanager.googleapis.com \
69 | containerregistry.googleapis.com \
70 | cloudbuild.googleapis.com \
71 | cloudbilling.googleapis.com
72 | stepdone
73 |
74 | stepdo "assign IAM owner role to Cloud Build service account"
75 | CI_PROJECTNUMBER=$(gcloud projects describe ${CI_PROJECT} --format='value(projectNumber)')
76 | CLOUDBUILD_SA=$CI_PROJECTNUMBER@cloudbuild.gserviceaccount.com
77 | quiet gcloud projects add-iam-policy-binding $CI_PROJECT \
78 | --member serviceAccount:${CLOUDBUILD_SA} \
79 | --role roles/owner
80 | stepdone
81 |
82 | stepdo "assign Log Bucket writer to Cloud Build service account"
83 | LOGS_BUCKET=gs://${PARENT_PROJECT}-buildlogs
84 | gsutil iam ch \
85 | serviceAccount:${CLOUDBUILD_SA}:roles/storage.admin \
86 | $LOGS_BUCKET
87 | stepdone
88 |
89 | echo ""
90 | echo "✅ Project '${CI_PROJECT}' is now ready to use."
91 | echo ""
92 | fi
93 |
94 | if [[ "$TEST_TYPE" == "terraform" ]]; then
95 | TF_STATE_BUCKET=${CI_PROJECT}-tfstate
96 |
97 | if gsutil ls gs://$TF_STATE_BUCKET | grep -q $TF_STATE_BUCKET; then
98 | echo "Bucket $TF_STATE_BUCKET already exists. Skipping"
99 | else
100 | gsutil mb -p ${CI_PROJECT} gs://$TF_STATE_BUCKET
101 | echo "Created $TF_STATE_BUCKET bucket"
102 | fi
103 |
104 | cat >terraform/backend.tf <<_EOF
105 | terraform {
106 | backend gcs {
107 | bucket = "$TF_STATE_BUCKET"
108 | }
109 | }
110 | _EOF
111 | cat terraform/backend.tf
112 | stepdone
113 | fi
114 |
115 | echo "Running $CLOUDBUILD_CONFIG in $CI_PROJECT"
116 | gcloud builds submit \
117 | --config $CLOUDBUILD_CONFIG \
118 | --timeout 1500 \
119 | --project $CI_PROJECT \
120 | --substitutions _PARENT_PROJECT=${PARENT_PROJECT},_REGION=${REGION}
121 |
--------------------------------------------------------------------------------
/experimental/terraform_test.yaml:
--------------------------------------------------------------------------------
1 | # Designed to be run in the CI project, which in this case will be *the* project.
2 | steps:
3 | - id: "build"
4 | name: "gcr.io/cloud-builders/docker"
5 | args: ["build", "-t", "gcr.io/${PROJECT_ID}/${_SERVICE}", "."]
6 |
7 | - id: "push"
8 | name: "gcr.io/cloud-builders/docker"
9 | args: ["push", "gcr.io/${PROJECT_ID}/${_SERVICE}"]
10 |
11 | - id: "tf"
12 | name: "hashicorp/terraform:${_TERRAFORM_VERSION}"
13 | entrypoint: "sh"
14 | args:
15 | - "-c"
16 | - |
17 | cd terraform
18 | terraform init
19 | terraform apply -auto-approve \
20 | -var project=${PROJECT_ID} \
21 | -var instance_name=${_INSTANCE_NAME} \
22 | -var region=${_REGION} \
23 | -var service=${_SERVICE}
24 |
25 | # This is a workaround for Terraform, as it can't lift state to an resource before it exists.
26 | # TODO(glasnt): fix
27 | - id: "workaround"
28 | name: "gcr.io/google.com/cloudsdktool/cloud-sdk:slim"
29 | env: ["REGION=${_REGION}","SERVICE=${_SERVICE}"]
30 | script:
31 | #!/bin/bash
32 | gcloud run services describe unicodex --region $REGION --platform managed | grep CURRENT_HOST;
33 |
34 | export SERVICE_URL=$(gcloud run services describe $SERVICE --format "value(status.url)" --platform managed --region ${REGION})
35 |
36 | gcloud run services update $SERVICE --platform managed --region ${REGION} --update-env-vars "CURRENT_HOST=${SERVICE_URL}"
37 |
38 | gcloud run services describe unicodex --region $REGION --platform managed | grep CURRENT_HOST;
39 |
40 |
41 | - id: "migrate"
42 | name: "gcr.io/google-appengine/exec-wrapper"
43 | args:
44 | [
45 | "-i",
46 | "gcr.io/${PROJECT_ID}/${_SERVICE}",
47 | "-s",
48 | "${PROJECT_ID}:${_REGION}:${_INSTANCE_NAME}",
49 | "-e",
50 | "PROJECT_ID=${PROJECT_ID}",
51 | "--",
52 | "sh",
53 | ".cloudbuild/django_migrate.sh",
54 | ]
55 |
56 | options:
57 | dynamic_substitutions: true
58 |
59 | logsBucket: "${_PARENT_PROJECT}-buildlogs"
60 |
61 | substitutions:
62 | _SERVICE: unicodex
63 | _REGION: us-central1
64 | _INSTANCE_NAME: psql
65 | _PARENT_PROJECT: unicodex-ci-base
66 | _TERRAFORM_VERSION: latest
67 |
68 | timeout: "1500s"
69 |
--------------------------------------------------------------------------------
/manage.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | #
3 | # Copyright 2019 Google LLC
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License");
6 | # you may not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 |
17 | #!/usr/bin/env python
18 | """Django's command-line utility for administrative tasks."""
19 | import os
20 | import sys
21 |
22 |
23 | def main():
24 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "unicodex.settings")
25 | try:
26 | from django.core.management import execute_from_command_line
27 | except ImportError as exc:
28 | raise ImportError(
29 | "Couldn't import Django. Are you sure it's installed and "
30 | "available on your PYTHONPATH environment variable? Did you "
31 | "forget to activate a virtual environment?"
32 | ) from exc
33 | execute_from_command_line(sys.argv)
34 |
35 |
36 | if __name__ == "__main__":
37 | main()
38 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | Django
2 | gunicorn
3 | django-environ
4 | psycopg2-binary
5 | django-storages[google]
6 | google-cloud-secret-manager
7 | google-auth
8 |
9 | # required for internal functionality, not deployement related
10 | requests
11 | pyyaml
12 | bs4
13 |
--------------------------------------------------------------------------------
/terraform/.gitignore:
--------------------------------------------------------------------------------
1 | **/.terraform/*
2 | .terraform/*
3 | *.tfstate
4 | *.tfstate.*
5 | crash.logs
6 | *.tfvars
7 |
8 |
9 | # This file is automatically generated through experimental/quickstart.sh, and should not be committed
10 | backend.tf
11 |
--------------------------------------------------------------------------------
/terraform/bucket.tf:
--------------------------------------------------------------------------------
1 | resource "google_storage_bucket" "media" {
2 | name = "${var.project}-media"
3 | location = "us-central1"
4 | storage_class = "REGIONAL"
5 | }
6 |
7 | data "google_iam_policy" "mediaaccess" {
8 | binding {
9 | role = "roles/storage.objectAdmin"
10 | members = [local.unicodex_sa]
11 | }
12 |
13 | binding {
14 | role = "roles/storage.legacyBucketOwner"
15 | members = ["projectOwner:${var.project}", "projectEditor:${var.project}"]
16 | }
17 | binding {
18 | role = "roles/storage.legacyBucketReader"
19 | members = ["projectViewer:${var.project}"]
20 | }
21 | }
22 |
23 | resource "google_storage_bucket_iam_policy" "policy" {
24 | bucket = google_storage_bucket.media.name
25 | policy_data = data.google_iam_policy.mediaaccess.policy_data
26 | }
27 |
--------------------------------------------------------------------------------
/terraform/database.tf:
--------------------------------------------------------------------------------
1 | locals {
2 | database_user = "${var.service}-django"
3 | database_name = var.service
4 | }
5 |
6 | resource "random_password" "database_user_password" {
7 | length = 30
8 | special = false
9 | }
10 |
11 | resource "google_sql_database_instance" "postgres" {
12 | name = var.instance_name
13 | database_version = "POSTGRES_13"
14 | project = var.project
15 | region = var.region
16 |
17 | settings {
18 | tier = "db-custom-2-4096" # 2 CPU, 4GB Memory
19 | }
20 |
21 | depends_on = [google_project_service.sqladmin]
22 |
23 | }
24 |
25 | resource "google_sql_database" "database" {
26 | name = local.database_name
27 | instance = google_sql_database_instance.postgres.name
28 | }
29 |
30 | # NOTE: users created this way automatically gain cloudsqladmin rights.
31 | resource "google_sql_user" "django" {
32 | name = local.database_user
33 | instance = google_sql_database_instance.postgres.name
34 | password = random_password.database_user_password.result
35 | }
36 |
--------------------------------------------------------------------------------
/terraform/etc/env.tpl:
--------------------------------------------------------------------------------
1 | # Django Settings
2 | DATABASE_URL="postgres://${user.name}:${user.password}@//cloudsql/${instance.project}:${instance.region}:${instance.name}/${database.name}"
3 | GS_BUCKET_NAME="${bucket}"
4 | SECRET_KEY="${secret_key}"
--------------------------------------------------------------------------------
/terraform/etc/get_image_digest.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # WORKAROUND SCRIPT
3 |
4 | # The Terraform Google provider (as of 3.53.0) provides no way to get
5 | # information about images in the Container regsitry.
6 | # If Terraform sees the "latest" tag, it takes no action, even if the latest
7 | # image has changed since last run.
8 |
9 | # So, manually retrieve the most recent fully qualified digest for the image.
10 | # This will ensure that a service is only redeployed if the image has been updated
11 | # This will require you to run 'gcloud builds submit', or similar, separately.
12 |
13 | PROJECT=$1
14 | IMAGE=$2
15 |
16 | # deep JSON is invalid for terraform, so serve flat value
17 | LATEST=$(gcloud container images describe gcr.io/${PROJECT}/${IMAGE}:latest --project $PROJECT --format="value(image_summary.fully_qualified_digest)" | tr -d '\n')
18 |
19 | # Attempt to throw an error if the image doesn't exist.
20 | if [ -z "$LATEST" ]; then
21 | echo {}
22 | else
23 | echo "{\"image\": \"${LATEST}\"}"
24 | fi
--------------------------------------------------------------------------------
/terraform/main.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_providers {
3 | google = {
4 | source = "hashicorp/google"
5 | }
6 | }
7 | }
8 |
9 | provider "google" {
10 | project = var.project
11 | }
12 |
13 | data "google_project" "project" {
14 | project_id = var.project
15 | }
16 |
17 | resource "google_service_account" "unicodex" {
18 | account_id = var.service
19 | display_name = "${var.service} service account"
20 |
21 | depends_on = [google_project_service.iam]
22 | }
23 |
24 | locals {
25 | unicodex_sa = "serviceAccount:${google_service_account.unicodex.email}"
26 | cloudbuild_sa = "serviceAccount:${data.google_project.project.number}@cloudbuild.gserviceaccount.com"
27 | }
28 |
29 | resource google_project_iam_binding service_permissions {
30 | for_each = toset([
31 | "run.admin", "cloudsql.client"
32 | ])
33 |
34 | project = var.project
35 | role = "roles/${each.key}"
36 | members = [local.cloudbuild_sa, local.unicodex_sa]
37 | depends_on = [google_service_account.unicodex]
38 | }
39 |
40 | resource google_service_account_iam_binding cloudbuild_sa {
41 | service_account_id = google_service_account.unicodex.name
42 | role = "roles/iam.serviceAccountUser"
43 |
44 | members = [local.cloudbuild_sa]
45 | }
--------------------------------------------------------------------------------
/terraform/output.tf:
--------------------------------------------------------------------------------
1 | locals {
2 | service_url = google_cloud_run_service.unicodex.status[0].url
3 | }
4 |
5 | output "result" {
6 | value = < .label-body {
283 | display: inline-block;
284 | margin-left: .5rem;
285 | font-weight: normal; }
286 |
287 |
288 | /* Lists
289 | –––––––––––––––––––––––––––––––––––––––––––––––––– */
290 | ul {
291 | list-style: circle inside; }
292 | ol {
293 | list-style: decimal inside; }
294 | ol, ul {
295 | padding-left: 0;
296 | margin-top: 0; }
297 | ul ul,
298 | ul ol,
299 | ol ol,
300 | ol ul {
301 | margin: 1.5rem 0 1.5rem 3rem;
302 | font-size: 90%; }
303 | li {
304 | margin-bottom: 1rem; }
305 |
306 |
307 | /* Code
308 | –––––––––––––––––––––––––––––––––––––––––––––––––– */
309 | code {
310 | padding: .2rem .5rem;
311 | margin: 0 .2rem;
312 | font-size: 90%;
313 | white-space: nowrap;
314 | background: #F1F1F1;
315 | border: 1px solid #E1E1E1;
316 | border-radius: 4px; }
317 | pre > code {
318 | display: block;
319 | padding: 1rem 1.5rem;
320 | white-space: pre; }
321 |
322 |
323 | /* Tables
324 | –––––––––––––––––––––––––––––––––––––––––––––––––– */
325 | th,
326 | td {
327 | padding: 12px 15px;
328 | text-align: left;
329 | border-bottom: 1px solid #E1E1E1; }
330 | th:first-child,
331 | td:first-child {
332 | padding-left: 0; }
333 | th:last-child,
334 | td:last-child {
335 | padding-right: 0; }
336 |
337 |
338 | /* Spacing
339 | –––––––––––––––––––––––––––––––––––––––––––––––––– */
340 | button,
341 | .button {
342 | margin-bottom: 1rem; }
343 | input,
344 | textarea,
345 | select,
346 | fieldset {
347 | margin-bottom: 1.5rem; }
348 | pre,
349 | blockquote,
350 | dl,
351 | figure,
352 | table,
353 | p,
354 | ul,
355 | ol,
356 | form {
357 | margin-bottom: 2.5rem; }
358 |
359 |
360 | /* Utilities
361 | –––––––––––––––––––––––––––––––––––––––––––––––––– */
362 | .u-full-width {
363 | width: 100%;
364 | box-sizing: border-box; }
365 | .u-max-full-width {
366 | max-width: 100%;
367 | box-sizing: border-box; }
368 | .u-pull-right {
369 | float: right; }
370 | .u-pull-left {
371 | float: left; }
372 |
373 |
374 | /* Misc
375 | –––––––––––––––––––––––––––––––––––––––––––––––––– */
376 | hr {
377 | margin-top: 3rem;
378 | margin-bottom: 3.5rem;
379 | border-width: 0;
380 | border-top: 1px solid #E1E1E1; }
381 |
382 |
383 | /* Clearing
384 | –––––––––––––––––––––––––––––––––––––––––––––––––– */
385 |
386 | /* Self Clearing Goodness */
387 | .container:after,
388 | .row:after,
389 | .u-cf {
390 | content: "";
391 | display: table;
392 | clear: both; }
393 |
394 |
395 | /* Media Queries
396 | –––––––––––––––––––––––––––––––––––––––––––––––––– */
397 | /*
398 | Note: The best way to structure the use of media queries is to create the queries
399 | near the relevant code. For example, if you wanted to change the styles for buttons
400 | on small devices, paste the mobile query code up in the buttons section and style it
401 | there.
402 | */
403 |
404 |
405 | /* Larger than mobile */
406 | @media (min-width: 400px) {}
407 |
408 | /* Larger than phablet (also point when grid becomes active) */
409 | @media (min-width: 550px) {}
410 |
411 | /* Larger than tablet */
412 | @media (min-width: 750px) {}
413 |
414 | /* Larger than desktop */
415 | @media (min-width: 1000px) {}
416 |
417 | /* Larger than Desktop HD */
418 | @media (min-width: 1200px) {}
419 |
--------------------------------------------------------------------------------
/unicodex/static/css/unicodex.css:
--------------------------------------------------------------------------------
1 | .emoji {
2 | font-size: 48pt;
3 | text-align: center;
4 | }
5 | .emoji a {
6 | text-decoration: none
7 | }
8 |
9 | h1 a, h2 a {
10 | text-decoration: none
11 | }
12 | body {
13 | font-family: 'Montserrat', sans-serif;
14 | }
15 |
16 | code {
17 | font-family: 'Monaco', monospace;
18 | }
19 |
20 | .water {
21 | margin-top: 10px;
22 | font-size: 48pt;
23 | font-family: 'Oswald';
24 | margin-top: 20px;
25 | text-align: center;
26 | background: -webkit-linear-gradient(#b10fce, #452768);
27 | text-decoration: none;
28 | -webkit-background-clip: text;
29 | -webkit-text-fill-color: transparent;
30 | }
31 |
32 | .row {
33 | text-align: center;
34 | }
35 |
36 | .img {
37 | text-align: center;
38 | vertical-align: center;
39 | }
40 | .img img {
41 | height: 64px;
42 | }
43 |
--------------------------------------------------------------------------------
/unicodex/templates/base.html:
--------------------------------------------------------------------------------
1 | {% load static %}
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 | {% block title%}{%endblock%}
10 |
11 |