├── .env.template ├── .gitignore ├── .parameters.template.json ├── .secrets.template.json ├── README.md ├── app.py ├── app ├── README.md ├── app │ ├── __init__.py │ ├── asgi.py │ ├── celery.py │ ├── settings │ │ ├── __init__.py │ │ ├── base.py │ │ ├── ci_tests.py │ │ ├── local.py │ │ ├── prod.py │ │ └── stage.py │ ├── urls.py │ └── wsgi.py ├── aws_utils │ ├── __init__.py │ └── aws_secrets.py ├── docker │ ├── .env.template │ ├── app │ │ ├── Dockerfile │ │ ├── entrypoint.sh │ │ ├── start-celery-worker.sh │ │ ├── start-dev-server.sh │ │ └── start-prod-server.sh │ ├── broker │ │ └── custom.conf │ ├── db │ │ └── psql-init │ │ │ └── db.sql │ └── docker-compose.yml ├── manage.py ├── requirements │ ├── base.txt │ └── prod.txt └── users │ ├── __init__.py │ ├── admin.py │ ├── apps.py │ ├── management │ ├── __init__.py │ └── commands │ │ ├── __init__.py │ │ └── run_celery_test_task.py │ ├── migrations │ ├── 0001_initial.py │ └── __init__.py │ ├── models.py │ ├── tasks.py │ ├── tests.py │ └── views.py ├── cdk.context.json ├── cdk.json ├── my_django_app ├── __init__.py ├── backend_workers_stack.py ├── database_stack.py ├── deployment_stage.py ├── dns_route_to_alb_stack.py ├── external_secrets_stack.py ├── my_django_app_stack.py ├── network_stack.py ├── pipeline_stack.py ├── queues_stack.py └── static_files_stack.py ├── requirements-dev.txt ├── requirements.txt ├── scripts ├── run_cmd.py ├── set_env_vars.sh └── set_parameters.py ├── source.bat └── tests ├── __init__.py └── unit ├── __init__.py └── test_my_django_app_stack.py /.env.template: -------------------------------------------------------------------------------- 1 | CDK_DEFAULT_ACCOUNT=123456789123 2 | CDK_DEFAULT_REGION=us-east-1 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.swp 2 | package-lock.json 3 | __pycache__ 4 | .pytest_cache 5 | backend/.env 6 | .venv 7 | .venv* 8 | .idea 9 | *.egg-info 10 | *.sqlite3 11 | .env 12 | .env.stage 13 | .env.prod 14 | 15 | # CDK asset staging directory 16 | .cdk.staging 17 | cdk.out 18 | 19 | # Local Secrets and Parameters 20 | .secrets.json 21 | .dockerhub_creds.json 22 | .parameters.json 23 | 24 | # log files 25 | *.log 26 | -------------------------------------------------------------------------------- /.parameters.template.json: -------------------------------------------------------------------------------- 1 | { 2 | "/Github/Connection": "arn:aws:codestar-connections:us-east-2:111111111111:connection/1ae38fa9-21b5-4a7d-91a0-c653a6e1fd05", 3 | "/MyDjangoAppStaging/CertificateArn": "arn:aws:acm:us-east-1:111111111111:certificate/1cdd512b-fc00-4c62-9e93-123c658f4e1b", 4 | "/MyDjangoAppProduction/CertificateArn": "arn:aws:acm:us-east-1:111111111111:certificate/1cdd512b-fc00-4c62-9e93-123c658f4e1b" 5 | } -------------------------------------------------------------------------------- /.secrets.template.json: -------------------------------------------------------------------------------- 1 | { 2 | "/MyDjangoAppPipeline/DockerHubSecret": "file:.dockerhub_creds.json", 3 | "/MyDjangoAppStaging/DjangoSecretKey": "YourSuperSecureDjangoSecret", 4 | "/MyDjangoAppStaging/AwsApiKeyId": "YourAwsApiKeyID", 5 | "/MyDjangoAppStaging/AwsApiKeySecret": "YourAwsApiKeySecret", 6 | "/MyDjangoAppProduction/DjangoSecretKey": "YourSuperSecureDjangoSecret", 7 | "/MyDjangoAppProduction/AwsApiKeyId": "YourAwsApiKeyID", 8 | "/MyDjangoAppProduction/AwsApiKeySecret": "YourAwsApiKeySecret" 9 | } -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Scalable Django Apps 2 | A sample project for auto-scalable Django apps, ready to be deployed in AWS with Docker and CDK. 3 | 4 | 5 | At the root of this repository you will find a CDK (v2) project. 6 | 7 | You will find the Django project and more details about how to set up the development environment is inside the `app/` directory. 8 | 9 | 10 | ## The Architecture Features 11 | * A load-balanced, highly-available, auto-scalable Django app running in Amazon ECS+Fargate (a.k.a Serverless Containers). 12 | * Fully-managed Queues and auto-scalable Workers using Amazon SQS and Celery Workers running in Amazon ECS+Fargate. 13 | * A fully-managed serverless database using Amazon Aurora Serverless. 14 | * Static files are stored in a private S3 bucket and served through CloudFront. 15 | * Private Isolated subnets and VPC Endpoints are used for improved security and performance, also allowing to remove NAT GWs. 16 | * Sensitive data such as API KEYs or Passwords are stored in AWS Secrets Manager. Other parameters are stored in AWS SSM Parameter Store. 17 | 18 | ## DevOps 19 | * IaC support using CDK v2 20 | * CI/CD using CDK Pipelines 21 | * Docker support for local development with `docker-compose`. 22 | 23 | 24 | ## CDK 25 | 26 | The entrypoint for the CDK project is app.py. 27 | Other Stacks and stages are defined in `my_django_app/`. 28 | 29 | ## Prerequisites to work with CDK 30 | - Python 3.6 or later including pip and virtualenv. 31 | 32 | - Node.js 10.13.0 or later 33 | 34 | - Install the aws client v2: 35 | 36 | https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2-mac.html 37 | 38 | - Setup API Keys of an administrator user and set the region running: 39 | 40 | `aws configure` 41 | 42 | CDK requires API KEYs with enough permissions to create and destroy resources in your AWS Account. Hence, it's recommended to create a user with `Administrator` role. 43 | 44 | - Install the cdk client: 45 | 46 | `npm install -g aws-cdk` 47 | 48 | ### Working with CDK 49 | To work with CDK first activate the virtualenv located at `.venv` and install dependencies. 50 | 51 | ```shell 52 | $ source .venv/bin/activate 53 | (.venv) $ pip install -r requirements.txt 54 | (.venv) $ pip install -r requirements-dev.txt 55 | ``` 56 | 57 | ### Bootstrapping 58 | The usage of CDK Pipelines require an extra command, [cdk bootstrap](https://docs.aws.amazon.com/cdk/latest/guide/cli.html#cli-bootstrap), to provision resources used by CDK during the deploy. 59 | This command needs to be executed once per account/region combination as: `cdk bootstrap ACCOUNT-NUMBER/REGION`. 60 | 61 | ```shell 62 | (.venv) $ cdk bootstrap aws://123456789123/us-east-1 63 | ⏳ Bootstrapping environment aws://123456789123/us-east-1... 64 | ... 65 | ✅ Environment aws://123456789123/us-east-1 bootstrapped 66 | ``` 67 | 68 | ### Deploying to AWS 69 | #### Set up CDK environment variables 70 | The required env vars for CDK can be found in .env.template 71 | You can either set them manually or, if using linux, use the helper script `./scripts/set_env_vars.sh` 72 | ```shell 73 | $ cp .env.template .env 74 | # Edit .env and set your values 75 | $ . ./scripts/set_env_vars.sh 76 | ``` 77 | 78 | #### GitHub connection 79 | Create a CodeStar connection in [AWS CodeSuite Console](https://console.aws.amazon.com/codesuite/settings/connections) and link it to the GitHub repo so it can be used to trigger CI/CD pipelines. 80 | 81 | The connection arn must be stored as a parameter to be used later. 82 | 83 | #### Parameters 84 | Parameters containing non-sensitive data are sotred in AWS System Manager Parameter Store. 85 | The required parameters are listed in `.parameters.template.json`. 86 | These parameters can be manually created from the AWS Console, or using the helper script `scripts/set_parameters.py`: 87 | ```shell 88 | (.venv) $ python ./scripts/set_parameters.py .parameters.json 89 | Settings parameters in AWS.. 90 | ... # Parameters or Errors will be printed out 91 | Finished. 92 | ``` 93 | 94 | #### Secrets 95 | Sensitive information is stored encrypted in AWS Secrets Manager. 96 | 97 | The required secrets are listed in `.secrets.template.json`. 98 | These secrets can be manually created from the AWS Console, or using the helper script `scripts/set_parameters.py` with the `--secret` option: 99 | ```shell 100 | (.venv) $ cp .secrets.template.json .secrets.json 101 | # Replace the placeholders with your secret values 102 | (.venv) $ python ./scripts/set_parameters.py --secret .secrets.json 103 | Settings parameters in AWS.. 104 | ... # Parameters or Errors will be printed out 105 | Finished. 106 | ``` 107 | 108 | #### Deploying 109 | IMPORTANT: Before deploying the pipeline you need to set the secrets and parameters described above with your own values. You also need to set your own domain and SSL certificate. 110 | 111 | Now you can deploy de CI/CD Pipeline: 112 | ```shell 113 | $ cdk deploy MyDjangoAppPipeline 114 | ``` 115 | CDK will ask for confirmation before creating roles, policies and security groups. Enter 'y' for yes and the deployment process will start.You will see the deployment progress in your shell and once finished you will see the pipeline in the CodePipeline panel at the AWS Console. 116 | 117 | After the pipeline is deployed it will be triggered and all the stacks will be created. You can monitor the stacks creation in the CloudFormation panel at the AWS Console. 118 | 119 | This is the only time you need to run the deploy command. The next time you commit any changes in the infrastructure code, or the app code, the pipepile will update the infrastructure and will update the ecs services as needed. 120 | 121 | # License 122 | You are free to use, copy or distribute this code. Knowledge is meant to be shared :) 123 | 124 | THIS SOFTWARE COMES WITH NO WARRANTIES, USE AT YOUR OWN RISK 125 | 126 | Enjoy! 127 | ======= 128 | -------------------------------------------------------------------------------- /app.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import os 3 | import aws_cdk as cdk 4 | from aws_cdk import ( 5 | Environment, 6 | ) 7 | from my_django_app.pipeline_stack import MyDjangoAppPipelineStack 8 | 9 | 10 | app = cdk.App() 11 | pipeline = MyDjangoAppPipelineStack( 12 | app, 13 | "MyDjangoAppPipeline", 14 | repository="marianobrc/scalable-django-apps", 15 | branch="master", 16 | ssm_gh_connection_param="/Github/Connection", 17 | env=Environment( 18 | account=os.getenv('CDK_DEFAULT_ACCOUNT'), 19 | region=os.getenv('CDK_DEFAULT_REGION') 20 | ), 21 | ) 22 | app.synth() 23 | -------------------------------------------------------------------------------- /app/README.md: -------------------------------------------------------------------------------- 1 | This is a template project for Dockerized Django apps. 2 | 3 | ## Prerequisites 4 | 5 | - [X] Docker & Docker Compose - [instructions](https://docs.docker.com/compose/install/) 6 | 7 | 8 | Notice: Windows support was not tested but should work if environment variables are set properly. 9 | 10 | ## Development environment with Docker & Docker Compose 11 | 12 | All the files required to run docker & docker-compose are at the `docker/` directory, grouped by service. 13 | 14 | 15 | The system is composed by the following services (docker-compose.yml): 16 | * `db`: The PostresSQL database, used by the Django app and eventually by the workers. 17 | * `app`: The Django app. 18 | * `broker`: SQS broker holding queues and managing messages between the app and the workers. 19 | * `worker-default`: A Celery worker processing messages in the default queue. 20 | 21 | `docker-compose` is used as the local orchestrator. 22 | Each service run in a docker container, and they all share a common docker network. 23 | 24 | ### Environment variables for local development 25 | First create a local file for env vars called .env, at the docker folder (next to docker-compose.yml). 26 | The variables needed can be found in .env.template . 27 | These environment variables will be automatically loaded by docker-compose while starting containers for the different services. 28 | 29 | ### Running services locally 30 | All services required to run the project have been dockerized and can be initiated with the generic `docker-compose up`, as seen below: 31 | ```shell 32 | $ COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker-compose up -d --build # Use Docker Buildkit for multi-stage builds 33 | Creating network "docker_default" with the default driver 34 | Creating volume "docker_postgres_data" with default driver 35 | Building app 36 | [+] Building 4.0s (17/17) FINISHED 37 | => [internal] load build definition from Dockerfile 0.5s 38 | => => transferring dockerfile: 38B 0.0s 39 | => [internal] load .dockerignore 2.2s 40 | => => transferring context: 2B 0.0s 41 | => [internal] load metadata for docker.io/library/python:3.10 0.0s 42 | => [base 1/10] FROM docker.io/library/python:3.10 0.0s 43 | => [internal] load build context 0.3s 44 | => => transferring context: 276B 0.0s 45 | => CACHED [base 2/10] RUN addgroup --system web && adduser --system --ingroup web web 0.0s 46 | => CACHED [base 3/10] RUN apt-get update && apt-get install -y -q --no-install-recommends build-essential libpq-dev && apt-get purge -y --au 0.0s 47 | => CACHED [base 4/10] WORKDIR /home/web/code/ 0.0s 48 | => CACHED [base 5/10] COPY --chown=web:web ./requirements/base.txt requirements/base.txt 0.0s 49 | => CACHED [base 6/10] RUN pip install --no-cache-dir -r requirements/base.txt 0.0s 50 | => CACHED [base 7/10] COPY --chown=web:web ./docker/app/entrypoint.sh /usr/local/bin/entrypoint.sh 0.0s 51 | => CACHED [base 8/10] RUN chmod +x /usr/local/bin/entrypoint.sh 0.0s 52 | => CACHED [base 9/10] COPY --chown=web:web ./docker/app/start-celery-worker.sh /usr/local/bin/start-celery-worker.sh 0.0s 53 | => CACHED [base 10/10] RUN chmod +x /usr/local/bin/start-celery-worker.sh 0.0s 54 | => CACHED [dev 1/2] COPY --chown=web:web ./docker/app/start-dev-server.sh /usr/local/bin/start-dev-server.sh 0.0s 55 | => CACHED [dev 2/2] RUN chmod +x /usr/local/bin/start-dev-server.sh 0.0s 56 | => exporting to image 1.1s 57 | => => exporting layers 0.0s 58 | => => writing image sha256:c3f1d7aa49a371cd7f54f2925e745867cb054f3819e666d8051b569b1b5f725e 0.0s 59 | => => naming to docker.io/library/docker_app 0.0s 60 | Building worker-default 61 | [+] Building 2.5s (17/17) FINISHED 62 | => [internal] load build definition from Dockerfile 0.7s 63 | => => transferring dockerfile: 38B 0.0s 64 | => [internal] load .dockerignore 0.8s 65 | => => transferring context: 2B 0.0s 66 | => [internal] load metadata for docker.io/library/python:3.10 0.0s 67 | => [base 1/10] FROM docker.io/library/python:3.10 0.0s 68 | => [internal] load build context 0.3s 69 | => => transferring context: 276B 0.0s 70 | => CACHED [base 2/10] RUN addgroup --system web && adduser --system --ingroup web web 0.0s 71 | => CACHED [base 3/10] RUN apt-get update && apt-get install -y -q --no-install-recommends build-essential libpq-dev && apt-get purge -y --au 0.0s 72 | => CACHED [base 4/10] WORKDIR /home/web/code/ 0.0s 73 | => CACHED [base 5/10] COPY --chown=web:web ./requirements/base.txt requirements/base.txt 0.0s 74 | => CACHED [base 6/10] RUN pip install --no-cache-dir -r requirements/base.txt 0.0s 75 | => CACHED [base 7/10] COPY --chown=web:web ./docker/app/entrypoint.sh /usr/local/bin/entrypoint.sh 0.0s 76 | => CACHED [base 8/10] RUN chmod +x /usr/local/bin/entrypoint.sh 0.0s 77 | => CACHED [base 9/10] COPY --chown=web:web ./docker/app/start-celery-worker.sh /usr/local/bin/start-celery-worker.sh 0.0s 78 | => CACHED [base 10/10] RUN chmod +x /usr/local/bin/start-celery-worker.sh 0.0s 79 | => CACHED [dev 1/2] COPY --chown=web:web ./docker/app/start-dev-server.sh /usr/local/bin/start-dev-server.sh 0.0s 80 | => CACHED [dev 2/2] RUN chmod +x /usr/local/bin/start-dev-server.sh 0.0s 81 | => exporting to image 1.0s 82 | => => exporting layers 0.0s 83 | => => writing image sha256:c3f1d7aa49a371cd7f54f2925e745867cb054f3819e666d8051b569b1b5f725e 0.1s 84 | => => naming to docker.io/library/worker-default 0.0s 85 | Creating docker_db_1 ... done 86 | Creating docker_broker_1 ... done 87 | Creating docker_worker-default_1 ... done 88 | Creating docker_app_1 ... done 89 | ``` 90 | After a successful start you will see: 91 | * The Django app is available at `http://127.0.0.1:8000` (Port exposed at docker-compose) 92 | * A status check endpoint is available at `http://127.0.0.1:8000/status/` 93 | * Django Admin Panel is available at `http://127.0.0.1:8000/admin/` 94 | * An SQS Monitoring UI is available at `http://127.0.0.1:9325/` 95 | 96 | 97 | #### Automatic actions executed on services start: 98 | * `db`: At volume creation, the script /db.sql is ran to create a PostgreSQL user and database. 99 | * `app`: 100 | * There is an entry point script to check and wait until the db is ready. It's normal to have three or four 101 | retries the first time until the db is ready to accept connection. 102 | * Migrations are applied running `python manage.py migrate`. 103 | * The development server is started running `python manage.py runserver 0.0.0.0:8000`. 104 | * `broker`: The default queue is initialized. 105 | * `worker-default`: The celery worker tries to connect to the broker. In case of failure it retries applying a back-off policy (in 2s, in 4s, in 8s..). It's normal to have two or three retries until the broker starts accepting connections. 106 | 107 | ### View the logs 108 | The `docker-compose up` command aggregates the output of each container. When the command exits, all containers are stopped. 109 | Running `docker-compose up -d` starts the containers in the background and leaves them running. 110 | 111 | #### How do I see the logs for a single service? 112 | Run `docker-compose logs [-f] ` 113 | ```shell 114 | $ docker-compose logs -f app 115 | Attaching to docker_app_1 116 | app_1 | Trying to connect to database 'db_dev' on host 'db'.. 117 | app_1 | could not connect to server: Connection refused 118 | app_1 | Is the server running on host "db" (172.27.0.3) and accepting 119 | app_1 | TCP/IP connections on port 5432? 120 | app_1 | 121 | app_1 | Postgres is unavailable - sleeping 122 | app_1 | Trying to connect to database 'db_dev' on host 'db'.. 123 | app_1 | Postgres is up - continuing... 124 | app_1 | Running migrations.. 125 | app_1 | Loading CELERY app with settings from app.settings.local 126 | app_1 | Operations to perform: 127 | app_1 | Apply all migrations: admin, auth, contenttypes, sessions, users 128 | app_1 | Running migrations: 129 | app_1 | Applying contenttypes.0001_initial... OK 130 | app_1 | Applying contenttypes.0002_remove_content_type_name... OK 131 | app_1 | Applying auth.0001_initial... OK 132 | app_1 | Applying auth.0002_alter_permission_name_max_length... OK 133 | app_1 | Applying auth.0003_alter_user_email_max_length... OK 134 | app_1 | Applying auth.0004_alter_user_username_opts... OK 135 | app_1 | Applying auth.0005_alter_user_last_login_null... OK 136 | app_1 | Applying auth.0006_require_contenttypes_0002... OK 137 | app_1 | Applying auth.0007_alter_validators_add_error_messages... OK 138 | app_1 | Applying auth.0008_alter_user_username_max_length... OK 139 | app_1 | Applying auth.0009_alter_user_last_name_max_length... OK 140 | app_1 | Applying auth.0010_alter_group_name_max_length... OK 141 | app_1 | Applying auth.0011_update_proxy_permissions... OK 142 | app_1 | Applying auth.0012_alter_user_first_name_max_length... OK 143 | app_1 | Applying users.0001_initial... OK 144 | app_1 | Applying admin.0001_initial... OK 145 | app_1 | Applying admin.0002_logentry_remove_auto_add... OK 146 | app_1 | Applying admin.0003_logentry_add_action_flag_choices... OK 147 | app_1 | Applying sessions.0001_initial... OK 148 | app_1 | Starting server.. 149 | app_1 | Loading CELERY app with settings from app.settings.local 150 | app_1 | Loading CELERY app with settings from app.settings.local 151 | app_1 | Watching for file changes with StatReloader 152 | app_1 | Watching for file changes with StatReloader 153 | app_1 | Performing system checks... 154 | app_1 | 155 | app_1 | System check identified no issues (0 silenced). 156 | app_1 | April 10, 2022 - 14:36:05 157 | app_1 | Django version 4.0.2, using settings 'app.settings.local' 158 | app_1 | Starting development server at http://0.0.0.0:8000/ 159 | app_1 | Quit the server with CONTROL-C. 160 | ``` 161 | 162 | ### Hot reloading 163 | * backend supports hot reloading. 164 | * local changes are synched with the containers via volumes. 165 | * celery workers don't support hot-reloading and they need to be restarted manually. 166 | 167 | ### Managing services & docker containers 168 | 169 | #### How do I reset the database? 170 | * Stop the services and delete the volumes: `docker-compose down --volumes` 171 | * Start the services again: `docker-compose up` 172 | 173 | #### How do I see the status of each service? 174 | Using docker-compose: 175 | `docker-compose ps` 176 | ```shell 177 | $ docker-compose ps 178 | Name Command State Ports 179 | -------------------------------------------------------------------------------------------------------------------------------- 180 | docker_app_1 entrypoint.sh start-dev-se ... Up 0.0.0.0:8000->8000/tcp 181 | docker_broker_1 /sbin/tini -- /opt/docker/ ... Up 0.0.0.0:9324->9324/tcp, 0.0.0.0:9325->9325/tcp 182 | docker_db_1 docker-entrypoint.sh postgres Up 0.0.0.0:5432->5432/tcp 183 | docker_worker-default_1 entrypoint.sh start-celery ... Up 184 | ``` 185 | Using docker: 186 | `docker ps` 187 | ```shell 188 | $ docker ps 189 | CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES 190 | 75e428f6ea24 docker_app "entrypoint.sh start…" 8 minutes ago Up 8 minutes 0.0.0.0:8000->8000/tcp docker_app_1 191 | 531d4131159c worker-default "entrypoint.sh start…" 8 minutes ago Up 8 minutes docker_worker-default_1 192 | d8e46eb8aa6b softwaremill/elasticmq-native "/sbin/tini -- /opt/…" 9 minutes ago Up 9 minutes 0.0.0.0:9324-9325->9324-9325/tcp docker_broker_1 193 | b88fecb94bab postgres:10 "docker-entrypoint.s…" 9 minutes ago Up 8 minutes 0.0.0.0:5432->5432/tcp docker_db_1 194 | ``` 195 | 196 | #### How do I run a command in a service container? 197 | Using docker-compose: 198 | `docker-compose exec ` 199 | ```shell 200 | # Open a bash shell inside the container 201 | $ docker-compose exec app bash 202 | web@75e428f6ea24:~/code$ 203 | ``` 204 | Using docker: 205 | `docker exec -it ` 206 | ```shell 207 | # docker_app_1 is the name of the running Docker container, found with docker ps. 208 | $ docker exec -it docker_app_1 /bin/bash 209 | web@75e428f6ea24:~/code$ 210 | ``` 211 | 212 | #### How can I restart or stop a single service? 213 | Restart can be useful if one specific service gets unresponsive or gets into some unrecoverable error state. 214 | Using docker-compose: 215 | `docker-compose restart ` 216 | ```shell 217 | $ docker-compose restart worker-default 218 | Restarting docker_worker-default_1 ... done 219 | ``` 220 | 221 | Stopping the backend service can be useful if you want to run it locally for debugging. 222 | `docker-compose restart ` 223 | ```shell 224 | $ docker-compose stop app 225 | Stopping app_1 ... done 226 | ``` 227 | -------------------------------------------------------------------------------- /app/app/__init__.py: -------------------------------------------------------------------------------- 1 | # This will make sure the app is always imported when 2 | # Django starts so that shared_task will use this app. 3 | from .celery import app as celery_app 4 | 5 | __all__ = ('celery_app',) 6 | -------------------------------------------------------------------------------- /app/app/asgi.py: -------------------------------------------------------------------------------- 1 | """ 2 | ASGI config for app project. 3 | 4 | It exposes the ASGI callable as a module-level variable named ``application``. 5 | 6 | For more information on this file, see 7 | https://docs.djangoproject.com/en/4.0/howto/deployment/asgi/ 8 | """ 9 | 10 | import os 11 | 12 | from django.core.asgi import get_asgi_application 13 | 14 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'app.settings') 15 | 16 | application = get_asgi_application() 17 | -------------------------------------------------------------------------------- /app/app/celery.py: -------------------------------------------------------------------------------- 1 | """" 2 | On this file we define an instance of the Celery app. 3 | reference: https://docs.celeryproject.org/en/stable/django/first-steps-with-django.html 4 | """ 5 | from __future__ import absolute_import, unicode_literals 6 | 7 | import os 8 | 9 | from celery import Celery 10 | 11 | 12 | # set the default Django settings module for the 'celery' program. 13 | #os.environ.setdefault("DJANGO_SETTINGS_MODULE", "quickpay.settings.local") 14 | print(f"Loading CELERY app with settings from {os.getenv('DJANGO_SETTINGS_MODULE')}") 15 | app = Celery("app") 16 | 17 | # Using a string here means the worker doesn't have to serialize 18 | # the configuration object to child processes. 19 | # - namespace='CELERY' means all celery-related configuration keys 20 | # should have a `CELERY_` prefix. 21 | app.config_from_object("django.conf:settings", namespace="CELERY") 22 | #print(f"CELERY CONFIG:\n {app.conf.humanize(with_defaults=False, censored=True)}") 23 | # Load task modules from all registered Django app configs. 24 | app.autodiscover_tasks() 25 | -------------------------------------------------------------------------------- /app/app/settings/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/marianobrc/scalable-django-apps/d43a81d0efba150c47c20d39974a92250dbfae0a/app/app/settings/__init__.py -------------------------------------------------------------------------------- /app/app/settings/base.py: -------------------------------------------------------------------------------- 1 | """ 2 | Django settings for app project. 3 | 4 | Generated by 'django-admin startproject' using Django 4.0.1. 5 | 6 | For more information on this file, see 7 | https://docs.djangoproject.com/en/4.0/topics/settings/ 8 | 9 | For the full list of settings and their values, see 10 | https://docs.djangoproject.com/en/4.0/ref/settings/ 11 | """ 12 | import os 13 | from pathlib import Path 14 | from distutils.util import strtobool 15 | 16 | # Build paths inside the project like this: BASE_DIR / 'subdir'. 17 | BASE_DIR = Path(__file__).resolve().parent.parent.parent 18 | 19 | 20 | # Quick-start development settings - unsuitable for production 21 | # todo: since this is a dev environment key, it is okay to check it into version control 22 | # although not the best thing to do. in production, the key is overridden. 23 | # See https://docs.djangoproject.com/en/4.0/howto/deployment/checklist/ 24 | 25 | # SECURITY WARNING: keep the secret key used in production secret! 26 | SECRET_KEY = 'django-insecure-d2%c9nrw#-5mifb*$ux4dvjr&2i6u1%f=v9bd!)^ic^zjt38gb' 27 | 28 | # SECURITY WARNING: don't run with debug turned on in production! 29 | DEBUG = True 30 | ALLOWED_HOSTS = ["*"] 31 | 32 | # Application definition 33 | DJANGO_APPS = [ 34 | 'django.contrib.admin', 35 | 'django.contrib.auth', 36 | 'django.contrib.contenttypes', 37 | 'django.contrib.sessions', 38 | 'django.contrib.messages', 39 | 'django.contrib.staticfiles', 40 | ] 41 | THIRD_PARTY_APPS = [ 42 | 43 | ] 44 | 45 | LOCAL_APPS = [ 46 | # Project apps go here 47 | 'users', 48 | ] 49 | # https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps 50 | INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS 51 | 52 | 53 | MIDDLEWARE = [ 54 | 'django.middleware.security.SecurityMiddleware', 55 | 'django.contrib.sessions.middleware.SessionMiddleware', 56 | 'django.middleware.common.CommonMiddleware', 57 | 'django.middleware.csrf.CsrfViewMiddleware', 58 | 'django.contrib.auth.middleware.AuthenticationMiddleware', 59 | 'django.contrib.messages.middleware.MessageMiddleware', 60 | 'django.middleware.clickjacking.XFrameOptionsMiddleware', 61 | ] 62 | 63 | ROOT_URLCONF = 'app.urls' 64 | 65 | TEMPLATES = [ 66 | { 67 | 'BACKEND': 'django.template.backends.django.DjangoTemplates', 68 | 'DIRS': ['templates', 'common/templates', ], 69 | 'APP_DIRS': True, 70 | 'OPTIONS': { 71 | 'context_processors': [ 72 | 'django.template.context_processors.debug', 73 | 'django.template.context_processors.request', 74 | 'django.contrib.auth.context_processors.auth', 75 | 'django.contrib.messages.context_processors.messages', 76 | ], 77 | }, 78 | }, 79 | ] 80 | 81 | WSGI_APPLICATION = 'app.wsgi.application' 82 | 83 | # Custom User model 84 | # https://docs.djangoproject.com/en/4.0/ref/settings/#auth-user-model 85 | AUTH_USER_MODEL = "users.CustomUser" 86 | 87 | # Database 88 | # https://docs.djangoproject.com/en/4.0/ref/settings/#databases 89 | 90 | DATABASES = { 91 | "default": { 92 | "ENGINE": "django.db.backends.postgresql", 93 | "NAME": os.environ.get("DB_NAME"), 94 | "USER": os.environ.get("DB_USER"), 95 | "PASSWORD": os.environ.get("DB_PASSWORD"), 96 | "HOST": os.environ.get("DB_HOST"), 97 | "PORT": os.environ.get("DB_PORT"), 98 | } 99 | } 100 | 101 | 102 | # Password validation 103 | # https://docs.djangoproject.com/en/4.0/ref/settings/#auth-password-validators 104 | 105 | AUTH_PASSWORD_VALIDATORS = [ 106 | { 107 | 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', 108 | }, 109 | { 110 | 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', 111 | }, 112 | { 113 | 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', 114 | }, 115 | { 116 | 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', 117 | }, 118 | ] 119 | 120 | 121 | # Internationalization 122 | # https://docs.djangoproject.com/en/4.0/topics/i18n/ 123 | 124 | LANGUAGE_CODE = 'en-us' 125 | 126 | TIME_ZONE = 'UTC' 127 | 128 | USE_I18N = True 129 | 130 | USE_L10N = True 131 | 132 | USE_TZ = True 133 | 134 | 135 | # Static files (CSS, JavaScript, Images) 136 | # https://docs.djangoproject.com/en/4.0/howto/static-files/ 137 | 138 | STATIC_URL = '/static/' 139 | 140 | # Default primary key field type 141 | # https://docs.djangoproject.com/en/4.0/ref/settings/#default-auto-field 142 | 143 | DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField' 144 | 145 | # Logging settings. Send all the logs to the console. 146 | LOGGING = { 147 | 'version': 1, 148 | 'disable_existing_loggers': False, 149 | 'handlers': { 150 | 'console': { 151 | 'class': 'logging.StreamHandler', 152 | }, 153 | }, 154 | 'root': { 155 | 'handlers': ['console'], 156 | 'level': 'INFO', 157 | }, 158 | } 159 | 160 | # AWS Settings 161 | AWS_ACCOUNT_ID = os.getenv("AWS_ACCOUNT_ID", "000000000000") 162 | AWS_REGION_NAME = os.getenv("AWS_REGION_NAME", "us-east-1") 163 | AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "FAKEABCDEFGHIJKLMNOP") 164 | AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "FAKE7NiynG+TogH8Nj+P9nlE73sq3") 165 | 166 | # Celery settings 167 | # Check celery good practices: https://denibertovic.com/posts/celery-best-practices/ 168 | CELERY_BROKER_URL = os.getenv("CELERY_BROKER_URL", "sqs://broker:9324") 169 | # Pass only json serializable arguments to tasks 170 | CELERY_TASK_SERIALIZER = "json" 171 | # We ignore the celery task "result" as we don't need it. 172 | # We keep track of status and/or results in our own DB models as necessary. 173 | CELERY_TASK_IGNORE_RESULT = True 174 | # Queues and routes for celery tasks 175 | CELERY_TASK_DEFAULT_QUEUE = "default" 176 | SQS_DEFAULT_QUEUE_URL = f"http://broker:9324/000000000000/{CELERY_TASK_DEFAULT_QUEUE}" 177 | CELERY_BROKER_TRANSPORT = "sqs" 178 | CELERY_BROKER_TRANSPORT_OPTIONS = { 179 | "region": AWS_REGION_NAME, 180 | "visibility_timeout": 3600, 181 | "polling_interval": 5, 182 | 'predefined_queues': { # We use an SQS queue created previously with CDK 183 | CELERY_TASK_DEFAULT_QUEUE: { 184 | 'url': SQS_DEFAULT_QUEUE_URL # Important: Set the queue URL with https:// here when using VPC endpoints 185 | } 186 | } 187 | } 188 | # This setting makes the tasks to run synchronously. Useful for local debugging and CI tests. 189 | CELERY_TASK_ALWAYS_EAGER = strtobool(os.getenv("CELERY_TASK_ALWAYS_EAGER", "False")) 190 | -------------------------------------------------------------------------------- /app/app/settings/ci_tests.py: -------------------------------------------------------------------------------- 1 | """ CI tests Settings """ 2 | 3 | 4 | DEBUG = True 5 | EMAIL_BACKEND = "django.core.mail.backends.dummy.EmailBackend" 6 | -------------------------------------------------------------------------------- /app/app/settings/local.py: -------------------------------------------------------------------------------- 1 | """ Local development Settings """ 2 | from .base import * 3 | 4 | 5 | EMAIL_BACKEND = "django.core.mail.backends.dummy.EmailBackend" 6 | -------------------------------------------------------------------------------- /app/app/settings/prod.py: -------------------------------------------------------------------------------- 1 | """ Production Settings """ 2 | from .stage import * 3 | 4 | 5 | # Set to your Domain here 6 | ALLOWED_HOSTS = [ 7 | "scalabledjango.com", 8 | "www.scalabledjango.com", 9 | ] 10 | # The ALB uses the IP while calling the health check endpoint 11 | if os.environ.get("AWS_EXECUTION_ENV"): 12 | ALLOWED_HOSTS.append(gethostbyname(gethostname())) 13 | -------------------------------------------------------------------------------- /app/app/settings/stage.py: -------------------------------------------------------------------------------- 1 | """ Staging Settings """ 2 | from socket import gethostname, gethostbyname 3 | from .base import * 4 | 5 | DEBUG = strtobool(os.getenv("DJANGO_DEBUG", "False")) 6 | # Set to your Domain here 7 | ALLOWED_HOSTS = [ 8 | "stage.scalabledjango.com", 9 | ] 10 | # The ALB uses the IP while calling the health check endpoint 11 | if os.environ.get("AWS_EXECUTION_ENV"): 12 | ALLOWED_HOSTS.append(gethostbyname(gethostname())) 13 | 14 | print("Loading env vars..") 15 | # AWS Settings 16 | AWS_ACCOUNT_ID = os.getenv("AWS_ACCOUNT_ID") 17 | AWS_REGION_NAME = os.getenv("AWS_REGION_NAME") 18 | AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID") 19 | AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY") 20 | 21 | # Static files and Media are stored in S3 and served with CloudFront 22 | DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage' 23 | STATICFILES_STORAGE = 'storages.backends.s3boto3.S3StaticStorage' 24 | AWS_STORAGE_BUCKET_NAME = os.getenv("AWS_STATIC_FILES_BUCKET_NAME") 25 | AWS_S3_CUSTOM_DOMAIN = os.getenv("AWS_STATIC_FILES_CLOUDFRONT_URL") 26 | print(f"Static files served from:{AWS_S3_CUSTOM_DOMAIN}") 27 | 28 | # Redirects all non-HTTPS requests to HTTPS. 29 | SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") 30 | SECURE_SSL_REDIRECT = False # The TLS connection is terminated at the load balancer 31 | 32 | # Override celery settings for SQS when running in AWS 33 | CELERY_BROKER_URL = "sqs://" # Let celery get credentials from env vars or from queue settings 34 | SQS_DEFAULT_QUEUE_URL = os.getenv("SQS_DEFAULT_QUEUE_URL") 35 | CELERY_TASK_DEFAULT_QUEUE = SQS_DEFAULT_QUEUE_URL.split('/')[-1] # Get the queue name 36 | CELERY_BROKER_TRANSPORT_OPTIONS = { 37 | "region": AWS_REGION_NAME, 38 | "visibility_timeout": 3600, 39 | "polling_interval": 5, 40 | 'predefined_queues': { # We use an SQS queue created previously with CDK 41 | CELERY_TASK_DEFAULT_QUEUE: { 42 | 'url': SQS_DEFAULT_QUEUE_URL # Important: Set the queue URL with https:// here when using VPC endpoints 43 | } 44 | } 45 | } -------------------------------------------------------------------------------- /app/app/urls.py: -------------------------------------------------------------------------------- 1 | """app URL Configuration 2 | 3 | The `urlpatterns` list routes URLs to views. For more information please see: 4 | https://docs.djangoproject.com/en/4.0/topics/http/urls/ 5 | Examples: 6 | Function views 7 | 1. Add an import: from my_app import views 8 | 2. Add a URL to urlpatterns: path('', views.home, name='home') 9 | Class-based views 10 | 1. Add an import: from other_app.views import Home 11 | 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') 12 | Including another URLconf 13 | 1. Import the include() function: from django.urls import include, path 14 | 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) 15 | """ 16 | from django.contrib import admin 17 | from django.urls import path 18 | from django.views.generic import View 19 | from django.http import JsonResponse 20 | 21 | 22 | # status endpoint for health checks 23 | class StatusView(View): 24 | def get(self, request, *args, **kwargs): 25 | return JsonResponse({"status": "OK"}, status=200) 26 | 27 | 28 | urlpatterns = [ 29 | # A status endpoint for health-checks 30 | path("status/", view=StatusView.as_view(), name="status"), 31 | # The django back-office interface 32 | path('admin/', admin.site.urls), 33 | ] 34 | -------------------------------------------------------------------------------- /app/app/wsgi.py: -------------------------------------------------------------------------------- 1 | """ 2 | WSGI config for app project. 3 | 4 | It exposes the WSGI callable as a module-level variable named ``application``. 5 | 6 | For more information on this file, see 7 | https://docs.djangoproject.com/en/4.0/howto/deployment/wsgi/ 8 | """ 9 | 10 | import os 11 | 12 | from django.core.wsgi import get_wsgi_application 13 | 14 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'app.settings.local') 15 | 16 | application = get_wsgi_application() 17 | -------------------------------------------------------------------------------- /app/aws_utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/marianobrc/scalable-django-apps/d43a81d0efba150c47c20d39974a92250dbfae0a/app/aws_utils/__init__.py -------------------------------------------------------------------------------- /app/aws_utils/aws_secrets.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | import base64 3 | from botocore.exceptions import ClientError 4 | 5 | 6 | def get_secret(secret_name, region_name, aws_access_key_id=None, aws_secret_access_key=None): 7 | # Create a Secrets Manager client 8 | session = boto3.session.Session( 9 | aws_access_key_id=aws_access_key_id, 10 | aws_secret_access_key=aws_secret_access_key 11 | ) 12 | client = session.client( 13 | service_name='secretsmanager', 14 | region_name=region_name 15 | ) 16 | 17 | # We only handle the specific exceptions for the 'GetSecretValue' API. 18 | # See https://docs.aws.amazon.com/secretsmanager/latest/apireference/API_GetSecretValue.html 19 | try: 20 | get_secret_value_response = client.get_secret_value( 21 | SecretId=secret_name 22 | ) 23 | except ClientError as e: 24 | if e.response['Error']['Code'] == 'DecryptionFailureException': 25 | # Secrets Manager can't decrypt the protected secret text using the provided KMS key. 26 | # Deal with the exception here, and/or rethrow at your discretion. 27 | raise e 28 | elif e.response['Error']['Code'] == 'InternalServiceErrorException': 29 | # An error occurred on the server side. 30 | # Deal with the exception here, and/or rethrow at your discretion. 31 | raise e 32 | elif e.response['Error']['Code'] == 'InvalidParameterException': 33 | # You provided an invalid value for a parameter. 34 | # Deal with the exception here, and/or rethrow at your discretion. 35 | raise e 36 | elif e.response['Error']['Code'] == 'InvalidRequestException': 37 | # You provided a parameter value that is not valid for the current state of the resource. 38 | # Deal with the exception here, and/or rethrow at your discretion. 39 | raise e 40 | elif e.response['Error']['Code'] == 'ResourceNotFoundException': 41 | # We can't find the resource that you asked for. 42 | # Deal with the exception here, and/or rethrow at your discretion. 43 | raise e 44 | else: 45 | # Decrypts secret using the associated KMS CMK. 46 | # Depending on whether the secret is a string or binary, one of these fields will be populated. 47 | if 'SecretString' in get_secret_value_response: 48 | return get_secret_value_response['SecretString'] 49 | else: 50 | return base64.b64decode(get_secret_value_response['SecretBinary']) 51 | -------------------------------------------------------------------------------- /app/docker/.env.template: -------------------------------------------------------------------------------- 1 | DJANGO_SETTINGS_MODULE=app.settings.local 2 | DB_NAME=db_dev 3 | DB_USER=dev 4 | DB_PASSWORD=S@mEP455w0rd 5 | DB_HOST=db 6 | DB_PORT=5432 7 | POSTGRES_PASSWORD=postgres 8 | AWS_ACCOUNT_ID=000000000000 9 | AWS_REGION_NAME=us-east-1 10 | AWS_ACCESS_KEY_ID=FAKEABCDEFGHIJKLMNOP 11 | AWS_SECRET_ACCESS_KEY=FAKE7NiynG+TogH8Nj+P9nlE73sq3 12 | CELERY_BROKER_URL=sqs://broker:9324 13 | CELERY_TASK_ALWAYS_EAGER=False 14 | -------------------------------------------------------------------------------- /app/docker/app/Dockerfile: -------------------------------------------------------------------------------- 1 | # Base off the official python image 2 | # Define a common stage for dev and prod images called base 3 | FROM python:3.10 as base 4 | # Set environment variables 5 | ENV PYTHONDONTWRITEBYTECODE 1 6 | ENV PYTHONUNBUFFERED 1 7 | # Create a user to avoid running containers as root in production 8 | RUN addgroup --system web \ 9 | && adduser --system --ingroup web web 10 | # Install os-level dependencies (as root) 11 | RUN apt-get update && apt-get install -y -q --no-install-recommends \ 12 | # dependencies for building Python packages 13 | build-essential \ 14 | # postgress client (psycopg2) dependencies 15 | libpq-dev \ 16 | # cleaning up unused files to reduce the image size 17 | && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \ 18 | && rm -rf /var/lib/apt/lists/* 19 | # Switch to the non-root user 20 | USER web 21 | # Create a directory for the source code and use it as base path 22 | WORKDIR /home/web/code/ 23 | # Copy the python depencencies list for pip 24 | COPY --chown=web:web ./requirements/base.txt requirements/base.txt 25 | # Switch to the root user temporary, to grant execution permissions. 26 | USER root 27 | # Install python packages at system level 28 | RUN pip install --no-cache-dir -r requirements/base.txt 29 | # Copy entrypoint script which waits for the db to be ready 30 | COPY --chown=web:web ./docker/app/entrypoint.sh /usr/local/bin/entrypoint.sh 31 | RUN chmod +x /usr/local/bin/entrypoint.sh 32 | # Copy the scripts that starts the default worker 33 | COPY --chown=web:web ./docker/app/start-celery-worker.sh /usr/local/bin/start-celery-worker.sh 34 | RUN chmod +x /usr/local/bin/start-celery-worker.sh 35 | USER web 36 | # This script will run before every command executed in the container 37 | ENTRYPOINT ["entrypoint.sh"] 38 | 39 | 40 | # Define an image for local development. Inherits common packages from the base stage. 41 | FROM base as dev 42 | # Copy the scripts that starts the development application server (runserver) 43 | COPY --chown=web:web ./docker/app/start-dev-server.sh /usr/local/bin/start-dev-server.sh 44 | USER root 45 | RUN chmod +x /usr/local/bin/start-dev-server.sh 46 | USER web 47 | # The development server starts by default when the container starts 48 | CMD ["start-dev-server.sh"] 49 | 50 | 51 | # Define an image for production. Inherits common packages from the base stage. 52 | FROM base as prod 53 | # Install extra packages required in production 54 | USER root 55 | COPY --chown=web:web ./requirements/prod.txt requirements/prod.txt 56 | RUN pip install --no-cache-dir -r requirements/prod.txt 57 | # Copy the script that starts the production application server (gunicorn) 58 | COPY --chown=web:web ./docker/app/start-prod-server.sh /usr/local/bin/start-prod-server.sh 59 | RUN chmod +x /usr/local/bin/start-prod-server.sh 60 | USER web 61 | # Copy the source code of our django app to the working directoy 62 | COPY --chown=web:web . ./ 63 | # The production server starts by default when the container starts 64 | CMD ["start-prod-server.sh"] 65 | -------------------------------------------------------------------------------- /app/docker/app/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # When starting the django app container, we need to wait until the postgress DB is ready to receive connections 3 | # docker-compose "depends_on: - db" checks the container started, but is not enough to check that the database is ready to take connections 4 | # This script also accepts a command to be executed after the DB is ready (i.e. migrate, runserver or a script..) 5 | function postgres_ready(){ 6 | python << END 7 | import sys 8 | import psycopg2 9 | try: 10 | print("Trying to connect to database '$DB_NAME' on host '$DB_HOST'..") 11 | conn = psycopg2.connect(dbname="$DB_NAME", user="$DB_USER", password="$DB_PASSWORD", host="$DB_HOST") 12 | except psycopg2.OperationalError as e: 13 | print(e) 14 | sys.exit(-1) 15 | sys.exit(0) 16 | END 17 | } 18 | 19 | until postgres_ready; do 20 | >&2 echo "Postgres is unavailable - sleeping" 21 | sleep 1 22 | done 23 | 24 | >&2 echo "Postgres is up - continuing..." 25 | # Here the received command is executed 26 | exec "$@" 27 | -------------------------------------------------------------------------------- /app/docker/app/start-celery-worker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | echo "CELERY_BROKER_URL: ${CELERY_BROKER_URL}" 3 | celery -A app worker -Q $1 -l info 4 | -------------------------------------------------------------------------------- /app/docker/app/start-dev-server.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | echo "Running migrations.." 3 | python manage.py migrate 4 | echo "Starting server.." 5 | python manage.py runserver 0.0.0.0:8000 6 | -------------------------------------------------------------------------------- /app/docker/app/start-prod-server.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | python manage.py migrate 3 | python manage.py collectstatic --noinput 4 | gunicorn app.wsgi --bind 0.0.0.0:8000 --timeout 60 --access-logfile - --error-logfile - 5 | -------------------------------------------------------------------------------- /app/docker/broker/custom.conf: -------------------------------------------------------------------------------- 1 | include classpath("application.conf") 2 | 3 | # What is the outside visible address of this ElasticMQ node 4 | # Used to create the queue URL (may be different from bind address!) 5 | node-address { 6 | protocol = http 7 | host = "*" 8 | port = 9324 9 | context-path = "" 10 | } 11 | 12 | rest-sqs { 13 | enabled = true 14 | bind-port = 9324 15 | bind-hostname = "0.0.0.0" 16 | # Possible values: relaxed, strict 17 | sqs-limits = strict 18 | } 19 | 20 | rest-stats { 21 | enabled = true 22 | bind-port = 9325 23 | bind-hostname = "0.0.0.0" 24 | } 25 | 26 | # Should the node-address be generated from the bind port/hostname 27 | # Set this to true e.g. when assigning port automatically by using port 0. 28 | generate-node-address = false 29 | 30 | queues { 31 | default { 32 | defaultVisibilityTimeout = 3600 seconds 33 | delay = 0 seconds 34 | receiveMessageWait = 0 seconds 35 | fifo = false 36 | contentBasedDeduplication = false 37 | } 38 | } 39 | 40 | # Region and accountId which will be included in resource ids 41 | aws { 42 | region = us-east-1 43 | accountId = 000000000000 44 | } 45 | -------------------------------------------------------------------------------- /app/docker/db/psql-init/db.sql: -------------------------------------------------------------------------------- 1 | \set DB_NAME `echo "$DB_NAME"` 2 | \set DB_USER `echo "$DB_USER"` 3 | \set DB_PASSWORD `echo "$DB_PASSWORD"` 4 | CREATE USER :DB_USER WITH PASSWORD :'DB_PASSWORD'; 5 | CREATE DATABASE :DB_NAME; 6 | GRANT ALL PRIVILEGES ON DATABASE :DB_NAME TO :DB_USER; 7 | ALTER ROLE :DB_USER CREATEDB; 8 | -------------------------------------------------------------------------------- /app/docker/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.5' 2 | 3 | services: 4 | db: 5 | image: postgres:10 6 | restart: always 7 | ports: 8 | - ${DB_PORT}:${DB_PORT} 9 | environment: 10 | - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} 11 | - DB_NAME=${DB_NAME} 12 | - DB_USER=${DB_USER} 13 | - DB_PASSWORD=${DB_PASSWORD} 14 | volumes: 15 | - ./db/psql-init/db.sql:/docker-entrypoint-initdb.d/db.sql 16 | - postgres_data:/var/lib/postgresql/data/ 17 | 18 | app: 19 | build: &app-image 20 | context: ../ 21 | dockerfile: ./docker/app/Dockerfile 22 | target: dev 23 | restart: always 24 | depends_on: 25 | - db 26 | environment: &app-env 27 | - DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE} 28 | - DB_HOST=${DB_HOST} 29 | - DB_PORT=${DB_PORT} 30 | - DB_NAME=${DB_NAME} 31 | - DB_USER=${DB_USER} 32 | - DB_PASSWORD=${DB_PASSWORD} 33 | - AWS_ACCOUNT_ID=${AWS_ACCOUNT_ID} 34 | - AWS_REGION_NAME=${AWS_REGION_NAME} 35 | - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} 36 | - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} 37 | - CELERY_BROKER_URL=${CELERY_BROKER_URL} 38 | - CELERY_TASK_ALWAYS_EAGER=${CELERY_TASK_ALWAYS_EAGER} 39 | volumes: &code 40 | - ../:/home/web/code 41 | ports: 42 | - 8000:8000 43 | 44 | broker: 45 | image: softwaremill/elasticmq-native 46 | ports: 47 | - 9324:9324 48 | - 9325:9325 49 | volumes: 50 | - ./broker/custom.conf:/opt/elasticmq.conf 51 | restart: always 52 | 53 | worker-default: 54 | build: *app-image 55 | image: worker-default 56 | restart: always 57 | command: start-celery-worker.sh default 58 | depends_on: 59 | - db 60 | - broker 61 | environment: *app-env 62 | volumes: *code 63 | 64 | volumes: 65 | postgres_data: 66 | -------------------------------------------------------------------------------- /app/manage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """Django's command-line utility for administrative tasks.""" 3 | import os 4 | import sys 5 | 6 | 7 | def main(): 8 | """Run administrative tasks.""" 9 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'app.settings') 10 | try: 11 | from django.core.management import execute_from_command_line 12 | except ImportError as exc: 13 | raise ImportError( 14 | "Couldn't import Django. Are you sure it's installed and " 15 | "available on your PYTHONPATH environment variable? Did you " 16 | "forget to activate a virtual environment?" 17 | ) from exc 18 | execute_from_command_line(sys.argv) 19 | 20 | 21 | if __name__ == '__main__': 22 | main() 23 | -------------------------------------------------------------------------------- /app/requirements/base.txt: -------------------------------------------------------------------------------- 1 | Django==4.0.2 2 | psycopg2==2.9 3 | celery[sqs]==5.2.3 4 | boto3==1.21.21 5 | django-storages==1.12.3 -------------------------------------------------------------------------------- /app/requirements/prod.txt: -------------------------------------------------------------------------------- 1 | gunicorn==20.1.0 2 | -------------------------------------------------------------------------------- /app/users/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/marianobrc/scalable-django-apps/d43a81d0efba150c47c20d39974a92250dbfae0a/app/users/__init__.py -------------------------------------------------------------------------------- /app/users/admin.py: -------------------------------------------------------------------------------- 1 | from django.contrib import admin 2 | from django.contrib.auth.admin import UserAdmin 3 | from .models import CustomUser 4 | 5 | 6 | admin.site.register(CustomUser, UserAdmin) 7 | -------------------------------------------------------------------------------- /app/users/apps.py: -------------------------------------------------------------------------------- 1 | from django.apps import AppConfig 2 | 3 | 4 | class UsersConfig(AppConfig): 5 | default_auto_field = 'django.db.models.BigAutoField' 6 | name = 'users' 7 | -------------------------------------------------------------------------------- /app/users/management/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/marianobrc/scalable-django-apps/d43a81d0efba150c47c20d39974a92250dbfae0a/app/users/management/__init__.py -------------------------------------------------------------------------------- /app/users/management/commands/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/marianobrc/scalable-django-apps/d43a81d0efba150c47c20d39974a92250dbfae0a/app/users/management/commands/__init__.py -------------------------------------------------------------------------------- /app/users/management/commands/run_celery_test_task.py: -------------------------------------------------------------------------------- 1 | from django.core.management import BaseCommand 2 | from ...tasks import test_task 3 | 4 | 5 | class Command(BaseCommand): 6 | help = "Trigger a test task with celery" 7 | 8 | def handle(self, *args, **options): 9 | print(f"Starting test task..") 10 | test_task.delay() 11 | print(f"Task msg sent successfully") 12 | -------------------------------------------------------------------------------- /app/users/migrations/0001_initial.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.0.2 on 2022-02-11 19:32 2 | 3 | import django.contrib.auth.models 4 | import django.contrib.auth.validators 5 | from django.db import migrations, models 6 | import django.utils.timezone 7 | 8 | 9 | class Migration(migrations.Migration): 10 | 11 | initial = True 12 | 13 | dependencies = [ 14 | ('auth', '0012_alter_user_first_name_max_length'), 15 | ] 16 | 17 | operations = [ 18 | migrations.CreateModel( 19 | name='CustomUser', 20 | fields=[ 21 | ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), 22 | ('password', models.CharField(max_length=128, verbose_name='password')), 23 | ('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')), 24 | ('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')), 25 | ('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')), 26 | ('first_name', models.CharField(blank=True, max_length=150, verbose_name='first name')), 27 | ('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')), 28 | ('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')), 29 | ('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')), 30 | ('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')), 31 | ('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')), 32 | ('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')), 33 | ('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')), 34 | ], 35 | options={ 36 | 'verbose_name': 'user', 37 | 'verbose_name_plural': 'users', 38 | 'abstract': False, 39 | }, 40 | managers=[ 41 | ('objects', django.contrib.auth.models.UserManager()), 42 | ], 43 | ), 44 | ] 45 | -------------------------------------------------------------------------------- /app/users/migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/marianobrc/scalable-django-apps/d43a81d0efba150c47c20d39974a92250dbfae0a/app/users/migrations/__init__.py -------------------------------------------------------------------------------- /app/users/models.py: -------------------------------------------------------------------------------- 1 | from django.db import models 2 | from django.contrib.auth.models import AbstractUser 3 | 4 | 5 | class CustomUser(AbstractUser): 6 | pass -------------------------------------------------------------------------------- /app/users/tasks.py: -------------------------------------------------------------------------------- 1 | from celery import shared_task 2 | 3 | 4 | @shared_task 5 | def test_task(): 6 | print("This is a test task running with celery!") 7 | -------------------------------------------------------------------------------- /app/users/tests.py: -------------------------------------------------------------------------------- 1 | from django.test import TestCase 2 | 3 | # Create your tests here. 4 | -------------------------------------------------------------------------------- /app/users/views.py: -------------------------------------------------------------------------------- 1 | from django.shortcuts import render 2 | 3 | # Create your views here. 4 | -------------------------------------------------------------------------------- /cdk.context.json: -------------------------------------------------------------------------------- 1 | {} 2 | -------------------------------------------------------------------------------- /cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "python3 app.py", 3 | "watch": { 4 | "include": [ 5 | "**" 6 | ], 7 | "exclude": [ 8 | "README.md", 9 | "cdk*.json", 10 | "requirements*.txt", 11 | "source.bat", 12 | "**/__init__.py", 13 | "python/__pycache__", 14 | "tests" 15 | ] 16 | }, 17 | "context": { 18 | "@aws-cdk/aws-apigateway:usagePlanKeyOrderInsensitiveId": true, 19 | "@aws-cdk/core:stackRelativeExports": true, 20 | "@aws-cdk/aws-rds:lowercaseDbIdentifier": true, 21 | "@aws-cdk/aws-lambda:recognizeVersionProps": true, 22 | "@aws-cdk/aws-cloudfront:defaultSecurityPolicyTLSv1.2_2021": true, 23 | "@aws-cdk-containers/ecs-service-extensions:enableDefaultLogDriver": true, 24 | "@aws-cdk/aws-ec2:uniqueImdsv2TemplateName": true, 25 | "@aws-cdk/core:target-partitions": [ 26 | "aws", 27 | "aws-cn" 28 | ] 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /my_django_app/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/marianobrc/scalable-django-apps/d43a81d0efba150c47c20d39974a92250dbfae0a/my_django_app/__init__.py -------------------------------------------------------------------------------- /my_django_app/backend_workers_stack.py: -------------------------------------------------------------------------------- 1 | from aws_cdk import ( 2 | Stack, 3 | aws_ec2 as ec2, 4 | aws_ecs as ecs, 5 | aws_sqs as sqs, 6 | aws_ecs_patterns as ecs_patterns, 7 | ) 8 | from constructs import Construct 9 | 10 | 11 | class BackendWorkersStack(Stack): 12 | 13 | def __init__( 14 | self, 15 | scope: Construct, 16 | construct_id: str, 17 | vpc: ec2.Vpc, 18 | ecs_cluster: ecs.Cluster, 19 | queue: sqs.Queue, 20 | env_vars: dict, 21 | secrets: dict, 22 | task_cpu: int = 256, 23 | task_memory_mib: int = 1024, 24 | task_min_scaling_capacity: int = 0, 25 | task_max_scaling_capacity: int = 4, 26 | scaling_steps: list = None, 27 | **kwargs 28 | ) -> None: 29 | self.vpc = vpc 30 | self.ecs_cluster = ecs_cluster 31 | self.queue = queue 32 | self.env_vars = env_vars 33 | self.secrets = secrets 34 | self.task_cpu = task_cpu 35 | self.task_memory_mib = task_memory_mib 36 | self.task_min_scaling_capacity = task_min_scaling_capacity 37 | self.task_max_scaling_capacity = task_max_scaling_capacity 38 | if scaling_steps: 39 | self.scaling_steps = scaling_steps 40 | else: 41 | self.scaling_steps = [ 42 | {"upper": 0, "change": -1}, # 0 msgs = 0 workers 43 | {"lower": 1, "change": +1}, # 1 msg = 1 worker 44 | {"lower": 100, "change": +1}, # 100 msgs = 2 workers 45 | {"lower": 200, "change": +2}, # 200 msgs = 4 workers 46 | ] 47 | super().__init__(scope, construct_id, **kwargs) 48 | 49 | # Instantiate the worker 50 | self.container_name = f"celery_worker" 51 | self.workers_fargate_service = ecs_patterns.QueueProcessingFargateService( 52 | self, 53 | f"CeleryWorkers", 54 | queue=queue, 55 | platform_version=ecs.FargatePlatformVersion.VERSION1_4, 56 | cluster=self.ecs_cluster, # Required 57 | task_subnets=ec2.SubnetSelection(subnet_type=ec2.SubnetType.PRIVATE_ISOLATED), 58 | cpu=task_cpu, # Default is 256 59 | memory_limit_mib=task_memory_mib, # Default is 512 60 | min_scaling_capacity=self.task_min_scaling_capacity, 61 | max_scaling_capacity=self.task_max_scaling_capacity, 62 | scaling_steps=self.scaling_steps, 63 | image=ecs.ContainerImage.from_asset( 64 | directory="app/", 65 | file="docker/app/Dockerfile", 66 | target="prod" 67 | ), 68 | container_name=self.container_name, 69 | command=["start-celery-worker.sh", queue.queue_name], 70 | environment=self.env_vars, 71 | secrets=self.secrets 72 | ) 73 | -------------------------------------------------------------------------------- /my_django_app/database_stack.py: -------------------------------------------------------------------------------- 1 | from aws_cdk import ( 2 | Duration, 3 | Stack, 4 | aws_rds as rds, 5 | aws_ec2 as ec2, 6 | aws_ssm as ssm 7 | ) 8 | from constructs import Construct 9 | 10 | 11 | class DatabaseStack(Stack): 12 | 13 | def __init__( 14 | self, 15 | scope: Construct, 16 | construct_id: str, 17 | vpc: ec2.Vpc, 18 | database_name: str, 19 | min_capacity: rds.AuroraCapacityUnit = rds.AuroraCapacityUnit.ACU_2, 20 | max_capacity: rds.AuroraCapacityUnit = rds.AuroraCapacityUnit.ACU_4, 21 | auto_pause_minutes: int = 30, 22 | backup_retention_days: int = 1, 23 | **kwargs 24 | ) -> None: 25 | super().__init__(scope, construct_id, **kwargs) 26 | self.vpc = vpc 27 | self.database_name = database_name 28 | self.min_capacity = min_capacity 29 | self.max_capacity = max_capacity 30 | self.auto_pause_minutes = auto_pause_minutes 31 | self.backup_retention_days = backup_retention_days 32 | 33 | # Our network in the cloud 34 | self.aurora_serverless_db = rds.ServerlessCluster( 35 | self, 36 | "AuroraServerlessCluster", 37 | engine=rds.DatabaseClusterEngine.AURORA_POSTGRESQL, 38 | vpc=self.vpc, 39 | vpc_subnets=ec2.SubnetSelection(subnet_type=ec2.SubnetType.PRIVATE_ISOLATED), 40 | default_database_name=self.database_name, 41 | backup_retention=Duration.days(self.backup_retention_days), # 1 day retention is free 42 | deletion_protection=True, 43 | enable_data_api=True, # Allow running queries in AWS console (free) 44 | parameter_group=rds.ParameterGroup.from_parameter_group_name( # Specify the postgresql version 45 | self, 46 | "AuroraDBParameterGroup", 47 | "default.aurora-postgresql10" # Only this version is supported for Aurora Serverless now 48 | ), 49 | scaling=rds.ServerlessScalingOptions( 50 | auto_pause=Duration.minutes(self.auto_pause_minutes), # Shutdown after minutes of inactivity to save costs 51 | min_capacity=self.min_capacity, 52 | max_capacity=self.max_capacity 53 | ), 54 | ) 55 | # Allow ingress traffic from ECS tasks 56 | self.aurora_serverless_db.connections.allow_default_port_from_any_ipv4( 57 | description="Services in private subnets can access the DB" 58 | ) 59 | # Save the name of the autogenerated secrets manager secret holding database credentials 60 | self.ssm_db_secret_name_param = ssm.StringParameter( 61 | self, 62 | "DatabaseSecretNameParam", 63 | parameter_name=f"/{scope.stage_name}/DatabaseSecretNameParam", 64 | string_value=self.aurora_serverless_db.secret.secret_name 65 | ) 66 | -------------------------------------------------------------------------------- /my_django_app/deployment_stage.py: -------------------------------------------------------------------------------- 1 | import os 2 | from constructs import Construct 3 | from aws_cdk import ( 4 | Stage, 5 | Environment, 6 | aws_rds as rds, 7 | ) 8 | from my_django_app.network_stack import NetworkStack 9 | from my_django_app.database_stack import DatabaseStack 10 | from my_django_app.my_django_app_stack import MyDjangoAppStack 11 | from my_django_app.static_files_stack import StaticFilesStack 12 | from my_django_app.queues_stack import QueuesStack 13 | from my_django_app.backend_workers_stack import BackendWorkersStack 14 | from my_django_app.external_secrets_stack import ExternalSecretsStack 15 | from my_django_app.dns_route_to_alb_stack import DnsRouteToAlbStack 16 | 17 | 18 | class MyDjangoAppPipelineStage(Stage): 19 | 20 | def __init__( 21 | self, 22 | scope: Construct, 23 | construct_id: str, 24 | django_settings_module: str, 25 | django_debug: bool, 26 | domain_name: str, 27 | subdomain: str = None, 28 | db_min_capacity: rds.AuroraCapacityUnit = rds.AuroraCapacityUnit.ACU_2, 29 | db_max_capacity: rds.AuroraCapacityUnit = rds.AuroraCapacityUnit.ACU_4, 30 | db_auto_pause_minutes: int = 0, 31 | app_task_min_scaling_capacity: int = 2, 32 | app_task_max_scaling_capacity: int = 4, 33 | worker_task_min_scaling_capacity: int = 1, 34 | worker_task_max_scaling_capacity: int = 4, 35 | worker_scaling_steps: list = None, 36 | **kwargs 37 | ): 38 | 39 | super().__init__(scope, construct_id, **kwargs) 40 | self.django_settings_module = django_settings_module 41 | self.django_debug = django_debug 42 | self.domain_name = domain_name 43 | self.subdomain = subdomain 44 | self.db_min_capacity = db_min_capacity 45 | self.db_max_capacity = db_max_capacity 46 | self.db_auto_pause_minutes = db_auto_pause_minutes 47 | self.app_task_min_scaling_capacity = app_task_min_scaling_capacity 48 | self.app_task_max_scaling_capacity = app_task_max_scaling_capacity 49 | self.worker_task_min_scaling_capacity = worker_task_min_scaling_capacity 50 | self.worker_task_max_scaling_capacity = worker_task_max_scaling_capacity 51 | self.worker_scaling_steps = worker_scaling_steps 52 | aws_env = kwargs.get("env") 53 | self.network = NetworkStack( 54 | self, 55 | "Network", 56 | env=aws_env, # AWS Account and Region 57 | ) 58 | self.database = DatabaseStack( 59 | self, 60 | "Database", 61 | env=aws_env, # AWS Account and Region 62 | vpc=self.network.vpc, 63 | database_name="app_db", 64 | min_capacity=self.db_min_capacity, 65 | max_capacity=self.db_max_capacity, 66 | auto_pause_minutes=self.db_auto_pause_minutes 67 | ) 68 | # Serve static files for the Backoffice (django-admin) 69 | self.static_files = StaticFilesStack( 70 | self, 71 | "StaticFiles", 72 | env=aws_env, # AWS Account and Region 73 | cors_allowed_origins=[ 74 | f"https://{self.subdomain}.{self.domain_name}" if self.subdomain else f"https://{self.domain_name}" 75 | ] 76 | ) 77 | self.queues = QueuesStack( 78 | self, 79 | "Queues", 80 | env=aws_env, # AWS Account and Region 81 | ) 82 | self.app_env_vars = { 83 | "DJANGO_SETTINGS_MODULE": self.django_settings_module, 84 | "DJANGO_DEBUG": str(self.django_debug), 85 | "AWS_ACCOUNT_ID": os.getenv('CDK_DEFAULT_ACCOUNT'), 86 | "AWS_STATIC_FILES_BUCKET_NAME": self.static_files.s3_bucket.bucket_name, 87 | "AWS_STATIC_FILES_CLOUDFRONT_URL": self.static_files.cloudfront_distro.distribution_domain_name, 88 | "SQS_DEFAULT_QUEUE_URL": self.queues.default_queue.queue_url, 89 | "CELERY_TASK_ALWAYS_EAGER": "False" 90 | } 91 | self.secrets = ExternalSecretsStack( 92 | self, 93 | "ExternalParameters", 94 | env=aws_env, # AWS Account and Region 95 | name_prefix=f"/{self.stage_name}/", 96 | database_secrets=self.database.aurora_serverless_db.secret, 97 | ) 98 | self.django_app = MyDjangoAppStack( 99 | self, 100 | "AppService", 101 | env=aws_env, # AWS Account and Region 102 | vpc=self.network.vpc, 103 | ecs_cluster=self.network.ecs_cluster, 104 | queue=self.queues.default_queue, 105 | env_vars=self.app_env_vars, 106 | secrets=self.secrets.app_secrets, 107 | task_cpu=256, 108 | task_memory_mib=512, 109 | task_desired_count=self.app_task_min_scaling_capacity, 110 | task_min_scaling_capacity=self.app_task_min_scaling_capacity, 111 | task_max_scaling_capacity=self.app_task_max_scaling_capacity, 112 | ) 113 | # Grant permissions to the app to put messages in hte queue 114 | self.queues.default_queue.grant_send_messages( 115 | self.django_app.alb_fargate_service.service.task_definition.task_role 116 | ) 117 | self.workers = BackendWorkersStack( 118 | self, 119 | "Workers", 120 | env=aws_env, # AWS Account and Region 121 | vpc=self.network.vpc, 122 | ecs_cluster=self.network.ecs_cluster, 123 | queue=self.queues.default_queue, 124 | env_vars=self.app_env_vars, 125 | secrets=self.secrets.app_secrets, 126 | task_cpu=256, 127 | task_memory_mib=512, 128 | task_min_scaling_capacity=self.worker_task_min_scaling_capacity, 129 | task_max_scaling_capacity=self.worker_task_max_scaling_capacity, 130 | scaling_steps=self.worker_scaling_steps 131 | ) 132 | # Route requests made in the domain to the ALB 133 | self.dns = DnsRouteToAlbStack( 134 | self, 135 | "DnsToAlb", 136 | env=aws_env, # AWS Account and Region 137 | domain_name=self.domain_name, 138 | subdomain=self.subdomain, 139 | alb=self.django_app.alb_fargate_service.load_balancer, 140 | ) 141 | -------------------------------------------------------------------------------- /my_django_app/dns_route_to_alb_stack.py: -------------------------------------------------------------------------------- 1 | from aws_cdk import ( 2 | Stack, 3 | aws_route53 as route53, 4 | aws_route53_targets as targets, 5 | aws_elasticloadbalancingv2 as elbv2, 6 | ) 7 | from constructs import Construct 8 | 9 | 10 | class DnsRouteToAlbStack(Stack): 11 | 12 | def __init__( 13 | self, 14 | scope: Construct, 15 | construct_id: str, 16 | alb: elbv2.ApplicationLoadBalancer, 17 | domain_name: str, # example.com 18 | subdomain: str = None, # api(.example.com) 19 | **kwargs 20 | ) -> None: 21 | super().__init__(scope, construct_id, **kwargs) 22 | self.hosted_zone = route53.HostedZone.from_lookup( 23 | self, 24 | "HostedZone", 25 | domain_name=domain_name 26 | ) 27 | self.dns_record = route53.ARecord( 28 | self, 29 | "ARecord", 30 | zone=self.hosted_zone, 31 | record_name=subdomain, 32 | target=route53.RecordTarget.from_alias(targets.LoadBalancerTarget(alb)) 33 | ) 34 | -------------------------------------------------------------------------------- /my_django_app/external_secrets_stack.py: -------------------------------------------------------------------------------- 1 | from aws_cdk import ( 2 | Stack, 3 | aws_ecs as ecs, 4 | aws_secretsmanager as secretsmanager, 5 | ) 6 | from constructs import Construct 7 | 8 | 9 | class ExternalSecretsStack(Stack): 10 | 11 | def __init__( 12 | self, 13 | scope: Construct, 14 | construct_id: str, 15 | database_secrets: secretsmanager.ISecret, 16 | name_prefix: str, # Naming convention for parameters: i.e; /AppNameStageName/SecretName 17 | **kwargs 18 | ) -> None: 19 | super().__init__(scope, construct_id, **kwargs) 20 | 21 | # Secret values required by the app which are store in the Secrets Manager 22 | # This values will be injected as env vars on runtime 23 | self.app_secrets = { 24 | "DJANGO_SECRET_KEY": ecs.Secret.from_secrets_manager( 25 | secretsmanager.Secret.from_secret_name_v2( 26 | self, 27 | f"DjangoKeySecret", 28 | secret_name=f"{name_prefix}DjangoSecretKey" 29 | ) 30 | ), 31 | "DB_HOST": ecs.Secret.from_secrets_manager( 32 | database_secrets, 33 | field="host" 34 | ), 35 | "DB_PORT": ecs.Secret.from_secrets_manager( 36 | database_secrets, 37 | field="port" 38 | ), 39 | "DB_NAME": ecs.Secret.from_secrets_manager( 40 | database_secrets, 41 | field="dbname" 42 | ), 43 | "DB_USER": ecs.Secret.from_secrets_manager( 44 | database_secrets, 45 | field="username" 46 | ), 47 | "DB_PASSWORD": ecs.Secret.from_secrets_manager( 48 | database_secrets, 49 | field="password" 50 | ), 51 | "AWS_ACCESS_KEY_ID": ecs.Secret.from_secrets_manager( 52 | secretsmanager.Secret.from_secret_name_v2( 53 | self, 54 | f"AWSAccessKeyIDSecret", 55 | secret_name=f"{name_prefix}AwsApiKeyId" 56 | ) 57 | ), 58 | "AWS_SECRET_ACCESS_KEY": ecs.Secret.from_secrets_manager( 59 | secretsmanager.Secret.from_secret_name_v2( 60 | self, 61 | f"AWSAccessKeySecretSecret", 62 | secret_name=f"{name_prefix}AwsApiKeySecret", 63 | ) 64 | ), 65 | } 66 | -------------------------------------------------------------------------------- /my_django_app/my_django_app_stack.py: -------------------------------------------------------------------------------- 1 | from aws_cdk import ( 2 | Stack, 3 | aws_ec2 as ec2, 4 | aws_sqs as sqs, 5 | aws_ecs as ecs, 6 | aws_ecs_patterns as ecs_patterns, 7 | aws_certificatemanager as acm, 8 | aws_elasticloadbalancingv2 as elbv2, 9 | aws_ssm as ssm 10 | ) 11 | from constructs import Construct 12 | 13 | 14 | class MyDjangoAppStack(Stack): 15 | 16 | def __init__( 17 | self, 18 | scope: Construct, 19 | construct_id: str, 20 | vpc: ec2.Vpc, 21 | ecs_cluster: ecs.Cluster, 22 | queue: sqs.Queue, 23 | env_vars: dict, 24 | secrets: dict, 25 | task_cpu: int = 256, 26 | task_memory_mib: int = 1024, 27 | task_desired_count: int = 2, 28 | task_min_scaling_capacity: int = 2, 29 | task_max_scaling_capacity: int = 4, 30 | **kwargs 31 | ) -> None: 32 | 33 | super().__init__(scope, construct_id, **kwargs) 34 | self.vpc = vpc 35 | self.ecs_cluster = ecs_cluster 36 | self.queue = queue 37 | self.env_vars = env_vars 38 | self.secrets = secrets 39 | self.task_cpu = task_cpu 40 | self.task_memory_mib = task_memory_mib 41 | self.task_desired_count = task_desired_count 42 | self.task_min_scaling_capacity = task_min_scaling_capacity 43 | self.task_max_scaling_capacity = task_max_scaling_capacity 44 | 45 | # Prepare parameters 46 | self.container_name = f"django_app" 47 | # Retrieve the arn of the TLS certificate from SSM Parameter Store 48 | self.certificate_arn = ssm.StringParameter.value_for_string_parameter( 49 | self, f"/{scope.stage_name}/CertificateArn" 50 | ) 51 | # Instantiate the certificate which will be required by the load balancer later 52 | self.domain_certificate = acm.Certificate.from_certificate_arn( 53 | self, "DomainCertificate", 54 | certificate_arn=self.certificate_arn 55 | ) 56 | # Create the load balancer, ECS service and fargate task for teh Django App 57 | self.alb_fargate_service = ecs_patterns.ApplicationLoadBalancedFargateService( 58 | self, 59 | f"MyDjangoApp", 60 | protocol=elbv2.ApplicationProtocol.HTTPS, 61 | certificate=self.domain_certificate, 62 | redirect_http=True, 63 | platform_version=ecs.FargatePlatformVersion.VERSION1_4, 64 | cluster=self.ecs_cluster, # Required 65 | task_subnets=ec2.SubnetSelection(subnet_type=ec2.SubnetType.PRIVATE_ISOLATED), 66 | cpu=self.task_cpu, # Default is 256 67 | memory_limit_mib=self.task_memory_mib, # Default is 512 68 | desired_count=self.task_desired_count, # Default is 1 69 | task_image_options=ecs_patterns.ApplicationLoadBalancedTaskImageOptions( 70 | image=ecs.ContainerImage.from_asset( 71 | directory="app/", 72 | file="docker/app/Dockerfile", 73 | target="prod" 74 | ), 75 | container_name=self.container_name, 76 | container_port=8000, 77 | environment=self.env_vars, 78 | secrets=self.secrets 79 | ), 80 | public_load_balancer=True 81 | ) 82 | # Set the health checks settings 83 | self.alb_fargate_service.target_group.configure_health_check( 84 | path="/status/", 85 | healthy_threshold_count=3, 86 | unhealthy_threshold_count=2 87 | ) 88 | # Autoscaling based on CPU utilization 89 | scalable_target = self.alb_fargate_service.service.auto_scale_task_count( 90 | min_capacity=self.task_min_scaling_capacity, 91 | max_capacity=self.task_max_scaling_capacity 92 | ) 93 | scalable_target.scale_on_cpu_utilization( 94 | f"CpuScaling", 95 | target_utilization_percent=75, 96 | ) 97 | # Save useful values in in SSM 98 | self.ecs_cluster_name_param = ssm.StringParameter( 99 | self, 100 | "EcsClusterNameParam", 101 | parameter_name=f"/{scope.stage_name}/EcsClusterNameParam", 102 | string_value=self.ecs_cluster.cluster_name 103 | ) 104 | self.task_def_arn_param = ssm.StringParameter( 105 | self, 106 | "TaskDefArnParam", 107 | parameter_name=f"/{scope.stage_name}/TaskDefArnParam", 108 | string_value=self.alb_fargate_service.task_definition.task_definition_arn 109 | ) 110 | self.task_def_family_param = ssm.StringParameter( 111 | self, 112 | "TaskDefFamilyParam", 113 | parameter_name=f"/{scope.stage_name}/TaskDefFamilyParam", 114 | string_value=f"family:{self.alb_fargate_service.task_definition.family}" 115 | ) 116 | self.exec_role_arn_param = ssm.StringParameter( 117 | self, 118 | "TaskExecRoleArnParam", 119 | parameter_name=f"/{scope.stage_name}/TaskExecRoleArnParam", 120 | string_value=self.alb_fargate_service.task_definition.execution_role.role_arn 121 | ) 122 | self.task_role_arn_param = ssm.StringParameter( 123 | self, 124 | "TaskRoleArnParam", 125 | parameter_name=f"/{scope.stage_name}/TaskRoleArnParam", 126 | string_value=self.alb_fargate_service.task_definition.task_role.role_arn 127 | ) 128 | -------------------------------------------------------------------------------- /my_django_app/network_stack.py: -------------------------------------------------------------------------------- 1 | from aws_cdk import ( 2 | Stack, 3 | aws_ec2 as ec2, 4 | aws_ssm as ssm, 5 | aws_ecs as ecs, 6 | ) 7 | from constructs import Construct 8 | 9 | 10 | class NetworkStack(Stack): 11 | 12 | def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: 13 | super().__init__(scope, construct_id, **kwargs) 14 | 15 | # Our network in the cloud 16 | self.vpc = ec2.Vpc( 17 | self, 18 | "VPC", 19 | max_azs=2, # default is all AZs in region 20 | nat_gateways=0, # No Nat GWs are required as we will add VPC endpoints 21 | enable_dns_hostnames=True, 22 | enable_dns_support=True 23 | ) 24 | self.ecs_cluster = ecs.Cluster(self, f"ECSCluster", vpc=self.vpc) 25 | # Add VPC endpoints to keep the traffic inside AWS 26 | self.s3_private_link = ec2.GatewayVpcEndpoint( 27 | self, 28 | "S3GWEndpoint", 29 | vpc=self.vpc, 30 | service=ec2.GatewayVpcEndpointAwsService.S3 31 | ) 32 | self.ecr_api_private_link = ec2.InterfaceVpcEndpoint( 33 | self, 34 | "ECRapiEndpoint", 35 | vpc=self.vpc, 36 | service=ec2.InterfaceVpcEndpointAwsService.ECR, 37 | open=True, 38 | private_dns_enabled=True 39 | ) 40 | self.ecr_dkr_private_link = ec2.InterfaceVpcEndpoint( 41 | self, 42 | "ECRdkrEndpoint", 43 | vpc=self.vpc, 44 | service=ec2.InterfaceVpcEndpointAwsService.ECR_DOCKER, 45 | open=True, 46 | private_dns_enabled=True 47 | ) 48 | self.cloudwatch_private_link = ec2.InterfaceVpcEndpoint( 49 | self, 50 | "CloudWatchEndpoint", 51 | vpc=self.vpc, 52 | service=ec2.InterfaceVpcEndpointAwsService.CLOUDWATCH_LOGS, 53 | open=True, 54 | private_dns_enabled=True 55 | ) 56 | self.secrets_manager_private_link = ec2.InterfaceVpcEndpoint( 57 | self, 58 | "SecretsManagerEndpoint", 59 | vpc=self.vpc, 60 | service=ec2.InterfaceVpcEndpointAwsService.SECRETS_MANAGER, 61 | open=True, 62 | private_dns_enabled=True 63 | ) 64 | self.sqs_private_link = ec2.InterfaceVpcEndpoint( 65 | self, 66 | "SQSEndpoint", 67 | vpc=self.vpc, 68 | service=ec2.InterfaceVpcEndpointAwsService.SQS, 69 | open=True, 70 | private_dns_enabled=True 71 | ) 72 | # Save useful info in SSM for later usage 73 | ssm.StringParameter( 74 | self, 75 | "VpcIdParam", 76 | parameter_name=f"/{scope.stage_name}/VpcId", 77 | string_value=self.vpc.vpc_id 78 | ) 79 | self.task_subnets = ssm.StringListParameter( 80 | self, 81 | "VpcPrivateSubnetsParam", 82 | parameter_name=f"/{scope.stage_name}/VpcPrivateSubnetsParam", 83 | string_list_value=[ 84 | s.subnet_id 85 | for s in self.vpc.select_subnets(subnet_type=ec2.SubnetType.PRIVATE_ISOLATED).subnets 86 | ] 87 | ) 88 | -------------------------------------------------------------------------------- /my_django_app/pipeline_stack.py: -------------------------------------------------------------------------------- 1 | from constructs import Construct 2 | from aws_cdk import ( 3 | Stack, 4 | pipelines as pipelines, 5 | aws_ssm as ssm, 6 | aws_secretsmanager as secretsmanager, 7 | aws_rds as rds, 8 | ) 9 | from .deployment_stage import MyDjangoAppPipelineStage 10 | 11 | 12 | class MyDjangoAppPipelineStack(Stack): 13 | def __init__( 14 | self, 15 | scope: Construct, 16 | construct_id: str, 17 | repository: str, 18 | branch: str, 19 | ssm_gh_connection_param: str, 20 | **kwargs 21 | ) -> None: 22 | super().__init__(scope, construct_id, **kwargs) 23 | self.repository = repository 24 | self.branch = branch 25 | self.ssm_gh_connection_param = ssm_gh_connection_param 26 | self.gh_connection_arn = ssm.StringParameter.value_for_string_parameter( 27 | self, ssm_gh_connection_param 28 | ) 29 | aws_env = kwargs.get("env") 30 | pipeline = pipelines.CodePipeline( 31 | self, 32 | "Pipeline", 33 | docker_credentials=[ 34 | pipelines.DockerCredential.docker_hub( 35 | secretsmanager.Secret.from_secret_name_v2( 36 | self, 37 | "DockerHubSecret", 38 | secret_name="/MyDjangoAppPipeline/DockerHubSecret" 39 | ) 40 | ), 41 | ], 42 | synth=pipelines.ShellStep( 43 | "Synth", 44 | input=pipelines.CodePipelineSource.connection( 45 | self.repository, 46 | self.branch, 47 | connection_arn=self.gh_connection_arn, 48 | trigger_on_push=True 49 | ), 50 | commands=[ 51 | "npm install -g aws-cdk", # Installs the cdk cli on Codebuild 52 | "pip install -r requirements.txt", # Instructs Codebuild to install required packages 53 | "npx cdk synth MyDjangoAppPipeline", 54 | ] 55 | ), 56 | ) 57 | # Deploy to a staging environment 58 | self.staging_env = MyDjangoAppPipelineStage( 59 | self, "MyDjangoAppStaging", 60 | env=aws_env, # AWS Account and Region 61 | django_settings_module="app.settings.stage", 62 | django_debug=True, 63 | domain_name="scalabledjango.com", 64 | subdomain="stage", 65 | # Limit scaling in staging to reduce costs 66 | db_min_capacity=rds.AuroraCapacityUnit.ACU_2, 67 | db_max_capacity=rds.AuroraCapacityUnit.ACU_2, 68 | db_auto_pause_minutes=5, 69 | app_task_min_scaling_capacity=1, 70 | app_task_max_scaling_capacity=2, 71 | worker_task_min_scaling_capacity=1, 72 | worker_task_max_scaling_capacity=2, 73 | worker_scaling_steps=[ 74 | {"upper": 0, "change": 0}, # 0 msgs = 1 workers 75 | {"lower": 10, "change": +1}, # 10 msgs = 2 workers 76 | ] 77 | ) 78 | pipeline.add_stage(self.staging_env) 79 | # Deploy to production after manual approval 80 | self.production_env = MyDjangoAppPipelineStage( 81 | self, "MyDjangoAppProduction", 82 | env=aws_env, # AWS Account and Region 83 | django_settings_module="app.settings.prod", 84 | django_debug=False, 85 | domain_name="scalabledjango.com", 86 | db_auto_pause_minutes=0, # Keep the database always up in production 87 | app_task_min_scaling_capacity=2, 88 | app_task_max_scaling_capacity=5, 89 | worker_task_min_scaling_capacity=2, 90 | worker_task_max_scaling_capacity=4, 91 | worker_scaling_steps=[ 92 | {"upper": 0, "change": 0}, # 0 msgs = 1 workers 93 | {"lower": 100, "change": +1}, # > 100 msg = 2 worker 94 | {"lower": 200, "change": +1}, # > 200 msgs = 3 workers 95 | {"lower": 500, "change": +2}, # > 500 msgs = 5 workers 96 | ] 97 | ) 98 | pipeline.add_stage( 99 | self.production_env, 100 | pre=[ 101 | pipelines.ManualApprovalStep("PromoteToProduction") 102 | ] 103 | ) 104 | -------------------------------------------------------------------------------- /my_django_app/queues_stack.py: -------------------------------------------------------------------------------- 1 | from aws_cdk import ( 2 | Stack, 3 | aws_sqs as sqs, 4 | aws_ssm as ssm, 5 | ) 6 | from constructs import Construct 7 | 8 | 9 | class QueuesStack(Stack): 10 | 11 | def __init__( 12 | self, 13 | scope: Construct, 14 | construct_id: str, 15 | **kwargs 16 | ) -> None: 17 | super().__init__(scope, construct_id, **kwargs) 18 | # Create a SQS queue 19 | self.default_queue = sqs.Queue( 20 | self, 21 | "SQSQueue" 22 | ) 23 | # Save the queue url in SSM Parameter Store 24 | self.default_queue_url_param = ssm.StringParameter( 25 | self, 26 | "SqsDefaultQueueUrlParam", 27 | parameter_name=f"/{scope.stage_name}/SqsDefaultQueueUrlParam", 28 | string_value=self.default_queue.queue_url 29 | ) 30 | -------------------------------------------------------------------------------- /my_django_app/static_files_stack.py: -------------------------------------------------------------------------------- 1 | import typing 2 | from aws_cdk import ( 3 | Stack, 4 | RemovalPolicy, 5 | aws_s3 as s3, 6 | aws_cloudfront as cloudfront, 7 | aws_cloudfront_origins as origins, 8 | aws_ssm as ssm 9 | ) 10 | from constructs import Construct 11 | 12 | 13 | class StaticFilesStack(Stack): 14 | 15 | def __init__( 16 | self, 17 | scope: Construct, 18 | construct_id: str, 19 | bucket_name: str = None, 20 | cors_allowed_origins: typing.Sequence[str] = None, 21 | **kwargs 22 | ) -> None: 23 | super().__init__(scope, construct_id, **kwargs) 24 | self.bucket_name = bucket_name 25 | self.cors_allowed_origins = cors_allowed_origins 26 | # Create a private bucket 27 | self.s3_bucket = s3.Bucket( 28 | self, f"Bucket", 29 | bucket_name=bucket_name, # Bucket name must be globally unique. If not set it's assigned by Cloudformation 30 | block_public_access=s3.BlockPublicAccess.BLOCK_ALL, 31 | removal_policy=RemovalPolicy.DESTROY, # Delete objects on bucket removal 32 | auto_delete_objects=True 33 | ) 34 | # Add an OriginAccessIdentity to access the bucket 35 | self.oai = cloudfront.OriginAccessIdentity( 36 | self, f"BucketOAI", 37 | comment="OAI to access backend static files." 38 | ) 39 | self.s3_bucket.grant_read(self.oai) 40 | # Prepare CORS settings 41 | if self.cors_allowed_origins: 42 | response_headers_policy = cloudfront.ResponseHeadersPolicy( 43 | self, "ResponseHeadersPolicy", 44 | response_headers_policy_name=f"{scope.stage_name}CORSPolicy", 45 | comment="CORS Policy", 46 | cors_behavior=cloudfront.ResponseHeadersCorsBehavior( 47 | access_control_allow_credentials=True, 48 | access_control_allow_headers=[ 49 | "accept", 50 | "accept-encoding", 51 | "authorization", 52 | "content-type", 53 | "dnt", 54 | "origin", 55 | "user-agent", 56 | "x-csrftoken" 57 | ], 58 | access_control_allow_methods=["GET", "HEAD", "OPTIONS"], 59 | access_control_allow_origins=cors_allowed_origins, 60 | origin_override=True 61 | ) 62 | ) 63 | else: 64 | response_headers_policy = cloudfront.ResponseHeadersPolicy.CORS_ALLOW_ALL_ORIGINS 65 | # Create the cloudfront distribution 66 | self.cloudfront_distro = cloudfront.Distribution( 67 | self, "CFDistribution", 68 | default_behavior=cloudfront.BehaviorOptions( 69 | origin=origins.S3Origin( 70 | self.s3_bucket, 71 | origin_access_identity=self.oai 72 | ), 73 | response_headers_policy=response_headers_policy 74 | ) 75 | ) 76 | # Save useful parameters to SSM Parameter Store 77 | self.static_files_bucket_name = ssm.StringParameter( 78 | self, 79 | "StaticFilesBucketNameParam", 80 | parameter_name=f"/{scope.stage_name}/StaticFilesBucketNameParam", 81 | string_value=self.s3_bucket.bucket_name 82 | ) 83 | self.static_files_cloudfront_url = ssm.StringParameter( 84 | self, 85 | "StaticFilesCloudFrontUrlParam", 86 | parameter_name=f"/{scope.stage_name}/StaticFilesCloudFrontUrlParam", 87 | string_value=self.cloudfront_distro.distribution_domain_name 88 | ) 89 | -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | pytest==6.2.5 2 | boto3==1.21.21 3 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | aws-cdk-lib==2.16.0 2 | constructs>=10.0.0,<11.0.0 3 | -------------------------------------------------------------------------------- /scripts/run_cmd.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | import boto3 4 | import argparse 5 | 6 | 7 | AWS_ACCOUNT_ID = os.getenv("AWS_ACCOUNT_ID") 8 | AWS_ACCESS_KEY_ID = os.getenv("AWS_ACCESS_KEY_ID") 9 | AWS_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY") 10 | AWS_REGION_NAME = os.getenv("AWS_REGION_NAME") 11 | 12 | ecs_client = boto3.client( 13 | 'ecs', 14 | aws_access_key_id=AWS_ACCESS_KEY_ID, 15 | aws_secret_access_key=AWS_SECRET_ACCESS_KEY, 16 | region_name=AWS_REGION_NAME, 17 | ) 18 | 19 | ssm_client = boto3.client( 20 | 'ssm', 21 | aws_access_key_id=AWS_ACCESS_KEY_ID, 22 | aws_secret_access_key=AWS_SECRET_ACCESS_KEY, 23 | region_name=AWS_REGION_NAME, 24 | ) 25 | 26 | secrets_client = boto3.client( 27 | 'secretsmanager', 28 | aws_access_key_id=AWS_ACCESS_KEY_ID, 29 | aws_secret_access_key=AWS_SECRET_ACCESS_KEY, 30 | region_name=AWS_REGION_NAME, 31 | ) 32 | 33 | 34 | aws_ssm_parameters_map = { 35 | "vpcId": "VpcId", 36 | "cluster": "EcsClusterNameParam", 37 | "taskDefinition": "TaskDefArnParam", 38 | "group": "TaskDefFamilyParam", 39 | "subnets": [], 40 | "securityGroups": [], 41 | "executionRoleArn": "TaskExecRoleArnParam", 42 | "taskRoleArn": "TaskRoleArnParam" 43 | } 44 | aws_ssm_parameters = [ 45 | "VpcId", 46 | "EcsClusterNameParam", 47 | "TaskDefArnParam", 48 | "TaskDefFamilyParam", 49 | "TaskExecRoleArnParam", 50 | "TaskRoleArnParam" 51 | ] 52 | 53 | 54 | def _build_execution_cofig(env_name, extra_env_vars=None): 55 | # Get the parameters stored in SSM 56 | config = {} 57 | 58 | for p in aws_ssm_parameters: 59 | response = ssm_client.get_parameter( 60 | Name=f"/{env_name}/{p}" 61 | ) 62 | config[p] = response['Parameter']['Value'] 63 | 64 | # Networking config 65 | response = ssm_client.get_parameter( 66 | Name=f"/{env_name}/VpcPrivateSubnetsParam" 67 | ) 68 | config["subnets"] = response['Parameter']['Value'].split(',') 69 | # Let it use the default security group 70 | # config["securityGroups"] = [ 71 | # "sg-011d894ce2289d62b" 72 | # ] 73 | config["container"] = "django_app" 74 | # Env vars for running django commands 75 | config["environment"] = [ 76 | # Regular parameters 77 | { 78 | "name": "DJANGO_SETTINGS_MODULE", 79 | "value": "app.settings.stage" 80 | }, 81 | { 82 | "name": "DJANGO_DEBUG", 83 | "value": "True" 84 | }, 85 | { 86 | "name": "AWS_ACCOUNT_ID", 87 | "value": AWS_ACCOUNT_ID 88 | }, 89 | { 90 | "name": "CELERY_TASK_ALWAYS_EAGER", 91 | "value": "False" 92 | } 93 | ] 94 | # Retrieve extra env var values from SSM Parameter Store 95 | response = ssm_client.get_parameter( 96 | Name=f"/{env_name}/StaticFilesBucketNameParam" 97 | ) 98 | config["environment"].append( 99 | { 100 | "name": "AWS_STATIC_FILES_BUCKET_NAME", 101 | "value": response['Parameter']['Value'] 102 | } 103 | ) 104 | response = ssm_client.get_parameter( 105 | Name=f"/{env_name}/StaticFilesCloudFrontUrlParam" 106 | ) 107 | config["environment"].append( 108 | { 109 | "name": "AWS_STATIC_FILES_CLOUDFRONT_URL", 110 | "value": response['Parameter']['Value'] 111 | } 112 | ) 113 | response = ssm_client.get_parameter( 114 | Name=f"/{env_name}/SqsDefaultQueueUrlParam" 115 | ) 116 | config["environment"].append( 117 | { 118 | "name": "SQS_DEFAULT_QUEUE_URL", 119 | "value": response['Parameter']['Value'] 120 | } 121 | ) 122 | 123 | # Retrieve secret values from secrets manager 124 | response = secrets_client.get_secret_value( 125 | SecretId=f"/{env_name}/DjangoSecretKey" 126 | ) 127 | config["environment"].append( 128 | { 129 | "name": "DJANGO_SECRET_KEY", 130 | "value": response['SecretString'] 131 | } 132 | ) 133 | # Get the name of the secret containing database secrets from SSM 134 | response = ssm_client.get_parameter( 135 | Name=f"/{env_name}/DatabaseSecretNameParam" 136 | ) 137 | db_secret_name = response['Parameter']['Value'] 138 | # Now get the actual secrets from secrets manager 139 | response = secrets_client.get_secret_value( 140 | SecretId=db_secret_name 141 | ) 142 | db_secrets = json.loads( 143 | response['SecretString'] 144 | ) 145 | config["environment"].append( 146 | { 147 | "name": "DB_HOST", 148 | "value": db_secrets['host'] 149 | } 150 | ) 151 | config["environment"].append( 152 | { 153 | "name": "DB_PORT", 154 | "value": str(db_secrets['port']) 155 | } 156 | ) 157 | config["environment"].append( 158 | { 159 | "name": "DB_USER", 160 | "value": db_secrets['username'] 161 | } 162 | ) 163 | config["environment"].append( 164 | { 165 | "name": "DB_PASSWORD", 166 | "value": db_secrets['password'] 167 | } 168 | ) 169 | config["environment"].append( 170 | { 171 | "name": "AWS_ACCESS_KEY_ID", 172 | "value": AWS_ACCESS_KEY_ID 173 | } 174 | ) 175 | config["environment"].append( 176 | { 177 | "name": "AWS_SECRET_ACCESS_KEY", 178 | "value": AWS_SECRET_ACCESS_KEY 179 | } 180 | ) 181 | # Add extra env vars if any 182 | if extra_env_vars: 183 | for var in extra_env_vars: 184 | name, value = var.split('=', maxsplit=1) 185 | config["environment"].append( 186 | { 187 | "name": name, 188 | "value": value 189 | } 190 | ) 191 | return config 192 | 193 | 194 | # This method runs a command as a task in AWS ECS Fargate 195 | def run_task_in_fargate(docker_cmd, config): 196 | 197 | # Call AWS API 198 | aws_response = ecs_client.run_task( 199 | cluster=config["EcsClusterNameParam"], 200 | # Let it use the latest active revision of the task 201 | taskDefinition=config["TaskDefArnParam"], 202 | count=1, 203 | enableECSManagedTags=False, 204 | group=config["TaskDefFamilyParam"], 205 | launchType='FARGATE', 206 | networkConfiguration={ 207 | 'awsvpcConfiguration': { 208 | 'subnets': config["subnets"], 209 | #'securityGroups': config["securityGroups"], 210 | 'assignPublicIp': 'DISABLED' 211 | } 212 | }, 213 | overrides={ 214 | 'containerOverrides': [ 215 | { 216 | 'name': config["container"], 217 | 'command': docker_cmd.split(" "), # Expects a list 218 | 'environment': config["environment"], 219 | }, 220 | ], 221 | 'executionRoleArn': config["TaskExecRoleArnParam"], 222 | 'taskRoleArn': config["TaskRoleArnParam"] 223 | } 224 | ) 225 | return aws_response 226 | 227 | 228 | def init_argparse() -> argparse.ArgumentParser: 229 | parser = argparse.ArgumentParser( 230 | description="Run a command as a fargate task in ecs, using the same container and settings used by the App" 231 | ) 232 | parser.add_argument( 233 | "command", 234 | type=str, 235 | #nargs=1 236 | ) 237 | parser.add_argument( 238 | "--env", 239 | dest="env_name", 240 | help="The environment where the command will be run: MyDjangoAppStaging or MyDjangoAppProduction.", 241 | required=True 242 | ) 243 | parser.add_argument( 244 | "--env-var", 245 | dest="env_vars", 246 | help="Set extra env vars as --env-var NAME1=VALUE1 --env-var NAME2=VALUE2", 247 | action='append', # Make a list witht he multiple env vars 248 | required=False 249 | ) 250 | return parser 251 | 252 | 253 | if __name__ == "__main__": 254 | parser = init_argparse() 255 | args = parser.parse_args() 256 | env_name = args.env_name 257 | docker_cmd = args.command 258 | env_vars = args.env_vars 259 | print(f"Building execution config for {env_name}") 260 | config = _build_execution_cofig(env_name=env_name, extra_env_vars=env_vars) 261 | print(f"Config loaded:\n{config}") 262 | print(f"Starting task in ECS with command:\n{docker_cmd}") 263 | aws_response = run_task_in_fargate(docker_cmd=docker_cmd, config=config) 264 | print(f"AWS Response:\n{aws_response}") 265 | -------------------------------------------------------------------------------- /scripts/set_env_vars.sh: -------------------------------------------------------------------------------- 1 | set -a 2 | . ./.env 3 | set +a 4 | -------------------------------------------------------------------------------- /scripts/set_parameters.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import argparse 4 | import subprocess 5 | 6 | 7 | def init_argparse() -> argparse.ArgumentParser: 8 | parser = argparse.ArgumentParser( 9 | description="Set parameters in AWS SSM Parameter Store or Secrets Manager" 10 | ) 11 | parser.add_argument( 12 | "file", 13 | help="A json file with parameters.", 14 | ) 15 | parser.add_argument( 16 | "--profile", 17 | dest="profile", 18 | help="Set the profile to use with the aws client.", 19 | required=False 20 | ) 21 | parser.add_argument( 22 | "--tags", 23 | dest="tags", 24 | help="Add tags in Key=Value format, e.g. Key=project,Value=MyDjangoApp", 25 | required=False 26 | ) 27 | parser.add_argument( 28 | "--overwrite", 29 | help="The parameter will be overwriten", 30 | dest="is_overwrite", 31 | action="store_true", 32 | default=False 33 | ) 34 | parser.add_argument( 35 | "--secret", 36 | help="The parameters will be saved as secrets in the Secrets Manager", 37 | dest="is_secret", 38 | action="store_true", 39 | default=False 40 | ) 41 | return parser 42 | 43 | 44 | if __name__ == "__main__": 45 | parser = init_argparse() 46 | args = parser.parse_args() 47 | print("Settings parameters in AWS..") 48 | with open(args.file, "r") as parameters_file: 49 | parameters = json.load(parameters_file) 50 | for key, value in parameters.items(): 51 | command = ["aws"] 52 | if args.profile: 53 | command.extend(["--profile", args.profile]) 54 | if args.is_secret: # Secret in secrets manager 55 | command.extend(["secretsmanager", "create-secret", "--name", key, "--secret-string", value]) 56 | if args.is_overwrite: 57 | command.append("--force-overwrite-replica-secret") 58 | else: # Regular parameter in SSM 59 | command.extend(["ssm", "put-parameter", "--name", key, "--value", value, "--type", "String"]) 60 | if args.is_overwrite: 61 | command.append("--overwrite") 62 | elif args.tags: 63 | command.extend(["--tags", args.tags]) 64 | print(command) 65 | response = subprocess.call(command) 66 | print("Finished.") 67 | -------------------------------------------------------------------------------- /source.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | 3 | rem The sole purpose of this script is to make the command 4 | rem 5 | rem source .venv/bin/activate 6 | rem 7 | rem (which activates a Python virtualenv on Linux or Mac OS X) work on Windows. 8 | rem On Windows, this command just runs this batch file (the argument is ignored). 9 | rem 10 | rem Now we don't need to document a Windows command for activating a virtualenv. 11 | 12 | echo Executing .venv\Scripts\activate.bat for you 13 | .venv\Scripts\activate.bat 14 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/marianobrc/scalable-django-apps/d43a81d0efba150c47c20d39974a92250dbfae0a/tests/__init__.py -------------------------------------------------------------------------------- /tests/unit/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/marianobrc/scalable-django-apps/d43a81d0efba150c47c20d39974a92250dbfae0a/tests/unit/__init__.py -------------------------------------------------------------------------------- /tests/unit/test_my_django_app_stack.py: -------------------------------------------------------------------------------- 1 | import aws_cdk as core 2 | import aws_cdk.assertions as assertions 3 | 4 | from my_django_app.my_django_app_stack import MyDjangoAppStack 5 | 6 | # example tests. To run these tests, uncomment this file along with the example 7 | # resource in my_django_app/my_django_app_stack.py 8 | def test_sqs_queue_created(): 9 | app = core.App() 10 | stack = MyDjangoAppStack(app, "my-django-app") 11 | template = assertions.Template.from_stack(stack) 12 | 13 | # template.has_resource_properties("AWS::SQS::Queue", { 14 | # "VisibilityTimeout": 300 15 | # }) 16 | --------------------------------------------------------------------------------