├── .Dockerignore ├── .gitignore ├── .travis.yml ├── Dockerfile ├── Pipfile ├── Pipfile.lock ├── README.md ├── deploy_to_aws.py ├── microservice ├── __init__.py └── api.py └── tests └── test_microservice.py /.Dockerignore: -------------------------------------------------------------------------------- 1 | .git 2 | .gitignore 3 | .travis.yml 4 | .aws_credentials.json 5 | .vscode/ 6 | __pycache__/ 7 | README.md 8 | test.rest 9 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .vscode/ 2 | .aws_credentials.json 3 | __pycache__/ 4 | test.rest 5 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: required 2 | 3 | language: python 4 | 5 | python: 6 | - "3.6.5" 7 | 8 | install: 9 | - pip install pipenv 10 | - pipenv install --dev 11 | 12 | script: 13 | - pipenv run python -m unittest tests/*.py 14 | 15 | deploy: 16 | provider: script 17 | services: 18 | - docker 19 | script: 20 | - pipenv run python deploy_to_aws.py 21 | on: 22 | branch: master 23 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.6.5-slim 2 | WORKDIR /usr/src/app 3 | COPY . . 4 | EXPOSE 5000 5 | RUN pip install pipenv 6 | RUN pipenv install 7 | CMD ["pipenv", "run", "python", "-m", "microservice.api"] 8 | -------------------------------------------------------------------------------- /Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | url = "https://pypi.org/simple" 3 | verify_ssl = true 4 | name = "pypi" 5 | 6 | [packages] 7 | flask = "*" 8 | 9 | [dev-packages] 10 | "boto3" = "*" 11 | ipython = "*" 12 | docker = "*" 13 | 14 | [requires] 15 | python_version = "3.6" 16 | -------------------------------------------------------------------------------- /Pipfile.lock: -------------------------------------------------------------------------------- 1 | { 2 | "_meta": { 3 | "hash": { 4 | "sha256": "a8e748ed2d22e2a94469c2f10c81444fec393b2cad2f4e0947857089a8c2a442" 5 | }, 6 | "pipfile-spec": 6, 7 | "requires": { 8 | "python_version": "3.6" 9 | }, 10 | "sources": [ 11 | { 12 | "name": "pypi", 13 | "url": "https://pypi.org/simple", 14 | "verify_ssl": true 15 | } 16 | ] 17 | }, 18 | "default": { 19 | "click": { 20 | "hashes": [ 21 | "sha256:29f99fc6125fbc931b758dc053b3114e55c77a6e4c6c3a2674a2dc986016381d", 22 | "sha256:f15516df478d5a56180fbf80e68f206010e6d160fc39fa508b65e035fd75130b" 23 | ], 24 | "version": "==6.7" 25 | }, 26 | "flask": { 27 | "hashes": [ 28 | "sha256:2271c0070dbcb5275fad4a82e29f23ab92682dc45f9dfbc22c02ba9b9322ce48", 29 | "sha256:a080b744b7e345ccfcbc77954861cb05b3c63786e93f2b3875e0913d44b43f05" 30 | ], 31 | "index": "pypi", 32 | "version": "==1.0.2" 33 | }, 34 | "itsdangerous": { 35 | "hashes": [ 36 | "sha256:cbb3fcf8d3e33df861709ecaf89d9e6629cff0a217bc2848f1b41cd30d360519" 37 | ], 38 | "version": "==0.24" 39 | }, 40 | "jinja2": { 41 | "hashes": [ 42 | "sha256:74c935a1b8bb9a3947c50a54766a969d4846290e1e788ea44c1392163723c3bd", 43 | "sha256:f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4" 44 | ], 45 | "version": "==2.10" 46 | }, 47 | "markupsafe": { 48 | "hashes": [ 49 | "sha256:a6be69091dac236ea9c6bc7d012beab42010fa914c459791d627dad4910eb665" 50 | ], 51 | "version": "==1.0" 52 | }, 53 | "werkzeug": { 54 | "hashes": [ 55 | "sha256:c3fd7a7d41976d9f44db327260e263132466836cef6f91512889ed60ad26557c", 56 | "sha256:d5da73735293558eb1651ee2fddc4d0dedcfa06538b8813a2e20011583c9e49b" 57 | ], 58 | "version": "==0.14.1" 59 | } 60 | }, 61 | "develop": { 62 | "appnope": { 63 | "hashes": [ 64 | "sha256:5b26757dc6f79a3b7dc9fab95359328d5747fcb2409d331ea66d0272b90ab2a0", 65 | "sha256:8b995ffe925347a2138d7ac0fe77155e4311a0ea6d6da4f5128fe4b3cbe5ed71" 66 | ], 67 | "markers": "sys_platform == 'darwin'", 68 | "version": "==0.1.0" 69 | }, 70 | "backcall": { 71 | "hashes": [ 72 | "sha256:38ecd85be2c1e78f77fd91700c76e14667dc21e2713b63876c0eb901196e01e4", 73 | "sha256:bbbf4b1e5cd2bdb08f915895b51081c041bac22394fdfcfdfbe9f14b77c08bf2" 74 | ], 75 | "version": "==0.1.0" 76 | }, 77 | "boto3": { 78 | "hashes": [ 79 | "sha256:581a1b77e72ece6190df1f6be1a5601a9877c256304298358d0387189652bf0e", 80 | "sha256:c5a52cb93f7ae28e960810cb69d1fc8a9d00c1f122a681d7454a4d70e42dd5de" 81 | ], 82 | "index": "pypi", 83 | "version": "==1.7.48" 84 | }, 85 | "botocore": { 86 | "hashes": [ 87 | "sha256:1218517172be766acb9f5de7f4044f9f0f9d9589614e7e6fe6562072586e6290", 88 | "sha256:541e08b6eedb3156a6bec66092b76729704310c61bccf3ad5867ca774f4fa87a" 89 | ], 90 | "version": "==1.10.48" 91 | }, 92 | "certifi": { 93 | "hashes": [ 94 | "sha256:13e698f54293db9f89122b0581843a782ad0934a4fe0172d2a980ba77fc61bb7", 95 | "sha256:9fa520c1bacfb634fa7af20a76bcbd3d5fb390481724c597da32c719a7dca4b0" 96 | ], 97 | "version": "==2018.4.16" 98 | }, 99 | "chardet": { 100 | "hashes": [ 101 | "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", 102 | "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" 103 | ], 104 | "version": "==3.0.4" 105 | }, 106 | "decorator": { 107 | "hashes": [ 108 | "sha256:2c51dff8ef3c447388fe5e4453d24a2bf128d3a4c32af3fabef1f01c6851ab82", 109 | "sha256:c39efa13fbdeb4506c476c9b3babf6a718da943dab7811c206005a4a956c080c" 110 | ], 111 | "version": "==4.3.0" 112 | }, 113 | "docker": { 114 | "hashes": [ 115 | "sha256:52cf5b1c3c394f9abf897638bfc3336d6b63a0f65969d0d4d2da6d3b1d8032b6", 116 | "sha256:ad077b49660b711d20f50f344f70cfae014d635ef094bf21b0d7df5f0aeedf99" 117 | ], 118 | "index": "pypi", 119 | "version": "==3.4.1" 120 | }, 121 | "docker-pycreds": { 122 | "hashes": [ 123 | "sha256:0a941b290764ea7286bd77f54c0ace43b86a8acd6eb9ead3de9840af52384079", 124 | "sha256:8b0e956c8d206f832b06aa93a710ba2c3bcbacb5a314449c040b0b814355bbff" 125 | ], 126 | "version": "==0.3.0" 127 | }, 128 | "docutils": { 129 | "hashes": [ 130 | "sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6", 131 | "sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274", 132 | "sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6" 133 | ], 134 | "version": "==0.14" 135 | }, 136 | "idna": { 137 | "hashes": [ 138 | "sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e", 139 | "sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16" 140 | ], 141 | "version": "==2.7" 142 | }, 143 | "ipython": { 144 | "hashes": [ 145 | "sha256:a0c96853549b246991046f32d19db7140f5b1a644cc31f0dc1edc86713b7676f", 146 | "sha256:eca537aa61592aca2fef4adea12af8e42f5c335004dfa80c78caf80e8b525e5c" 147 | ], 148 | "index": "pypi", 149 | "version": "==6.4.0" 150 | }, 151 | "ipython-genutils": { 152 | "hashes": [ 153 | "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8", 154 | "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8" 155 | ], 156 | "version": "==0.2.0" 157 | }, 158 | "jedi": { 159 | "hashes": [ 160 | "sha256:b409ed0f6913a701ed474a614a3bb46e6953639033e31f769ca7581da5bd1ec1", 161 | "sha256:c254b135fb39ad76e78d4d8f92765ebc9bf92cbc76f49e97ade1d5f5121e1f6f" 162 | ], 163 | "version": "==0.12.1" 164 | }, 165 | "jmespath": { 166 | "hashes": [ 167 | "sha256:6a81d4c9aa62caf061cb517b4d9ad1dd300374cd4706997aff9cd6aedd61fc64", 168 | "sha256:f11b4461f425740a1d908e9a3f7365c3d2e569f6ca68a2ff8bc5bcd9676edd63" 169 | ], 170 | "version": "==0.9.3" 171 | }, 172 | "parso": { 173 | "hashes": [ 174 | "sha256:8105449d86d858e53ce3e0044ede9dd3a395b1c9716c696af8aa3787158ab806", 175 | "sha256:d250235e52e8f9fc5a80cc2a5f804c9fefd886b2e67a2b1099cf085f403f8e33" 176 | ], 177 | "version": "==0.3.0" 178 | }, 179 | "pexpect": { 180 | "hashes": [ 181 | "sha256:2a8e88259839571d1251d278476f3eec5db26deb73a70be5ed5dc5435e418aba", 182 | "sha256:3fbd41d4caf27fa4a377bfd16fef87271099463e6fa73e92a52f92dfee5d425b" 183 | ], 184 | "markers": "sys_platform != 'win32'", 185 | "version": "==4.6.0" 186 | }, 187 | "pickleshare": { 188 | "hashes": [ 189 | "sha256:84a9257227dfdd6fe1b4be1319096c20eb85ff1e82c7932f36efccfe1b09737b", 190 | "sha256:c9a2541f25aeabc070f12f452e1f2a8eae2abd51e1cd19e8430402bdf4c1d8b5" 191 | ], 192 | "version": "==0.7.4" 193 | }, 194 | "prompt-toolkit": { 195 | "hashes": [ 196 | "sha256:1df952620eccb399c53ebb359cc7d9a8d3a9538cb34c5a1344bdbeb29fbcc381", 197 | "sha256:3f473ae040ddaa52b52f97f6b4a493cfa9f5920c255a12dc56a7d34397a398a4", 198 | "sha256:858588f1983ca497f1cf4ffde01d978a3ea02b01c8a26a8bbc5cd2e66d816917" 199 | ], 200 | "version": "==1.0.15" 201 | }, 202 | "ptyprocess": { 203 | "hashes": [ 204 | "sha256:923f299cc5ad920c68f2bc0bc98b75b9f838b93b599941a6b63ddbc2476394c0", 205 | "sha256:d7cc528d76e76342423ca640335bd3633420dc1366f258cb31d05e865ef5ca1f" 206 | ], 207 | "version": "==0.6.0" 208 | }, 209 | "pygments": { 210 | "hashes": [ 211 | "sha256:78f3f434bcc5d6ee09020f92ba487f95ba50f1e3ef83ae96b9d5ffa1bab25c5d", 212 | "sha256:dbae1046def0efb574852fab9e90209b23f556367b5a320c0bcb871c77c3e8cc" 213 | ], 214 | "version": "==2.2.0" 215 | }, 216 | "python-dateutil": { 217 | "hashes": [ 218 | "sha256:1adb80e7a782c12e52ef9a8182bebeb73f1d7e24e374397af06fb4956c8dc5c0", 219 | "sha256:e27001de32f627c22380a688bcc43ce83504a7bc5da472209b4c70f02829f0b8" 220 | ], 221 | "markers": "python_version >= '2.7'", 222 | "version": "==2.7.3" 223 | }, 224 | "requests": { 225 | "hashes": [ 226 | "sha256:63b52e3c866428a224f97cab011de738c36aec0185aa91cfacd418b5d58911d1", 227 | "sha256:ec22d826a36ed72a7358ff3fe56cbd4ba69dd7a6718ffd450ff0e9df7a47ce6a" 228 | ], 229 | "version": "==2.19.1" 230 | }, 231 | "s3transfer": { 232 | "hashes": [ 233 | "sha256:90dc18e028989c609146e241ea153250be451e05ecc0c2832565231dacdf59c1", 234 | "sha256:c7a9ec356982d5e9ab2d4b46391a7d6a950e2b04c472419f5fdec70cc0ada72f" 235 | ], 236 | "version": "==0.1.13" 237 | }, 238 | "simplegeneric": { 239 | "hashes": [ 240 | "sha256:dc972e06094b9af5b855b3df4a646395e43d1c9d0d39ed345b7393560d0b9173" 241 | ], 242 | "version": "==0.8.1" 243 | }, 244 | "six": { 245 | "hashes": [ 246 | "sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9", 247 | "sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb" 248 | ], 249 | "version": "==1.11.0" 250 | }, 251 | "traitlets": { 252 | "hashes": [ 253 | "sha256:9c4bd2d267b7153df9152698efb1050a5d84982d3384a37b2c1f7723ba3e7835", 254 | "sha256:c6cb5e6f57c5a9bdaa40fa71ce7b4af30298fbab9ece9815b5d995ab6217c7d9" 255 | ], 256 | "version": "==4.3.2" 257 | }, 258 | "urllib3": { 259 | "hashes": [ 260 | "sha256:a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf", 261 | "sha256:b5725a0bd4ba422ab0e66e89e030c806576753ea3ee08554382c14e685d117b5" 262 | ], 263 | "version": "==1.23" 264 | }, 265 | "wcwidth": { 266 | "hashes": [ 267 | "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e", 268 | "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c" 269 | ], 270 | "version": "==0.1.7" 271 | }, 272 | "websocket-client": { 273 | "hashes": [ 274 | "sha256:18f1170e6a1b5463986739d9fd45c4308b0d025c1b2f9b88788d8f69e8a5eb4a", 275 | "sha256:db70953ae4a064698b27ae56dcad84d0ee68b7b43cb40940f537738f38f510c1" 276 | ], 277 | "version": "==0.48.0" 278 | } 279 | } 280 | } 281 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Build Status](https://travis-ci.org/AlexIoannides/py-docker-aws-example-project.svg?branch=master)](https://travis-ci.org/AlexIoannides/py-docker-aws-example-project) 2 | 3 | # Automated Testing and Deployment of a Python Micro-service to AWS, using Docker, Boto3 and Travis-CI 4 | 5 | The purpose of this project is to demonstrate how to automate the testing and deployment of a simple Flask-based (RESTful) micro-service to a production-like environment on AWS. The deployment pipeline is handled by [Travis-CI](https://travis-ci.org), that has been granted access to this GitHub repository and configured to run upon a pull request or a merge to the master branch. The pipeline is defined in the `.travis.yaml` file and consists of the following steps: 6 | 7 | 1. define which Python version to use; 8 | 2. install the `Pipenv` package using `pip`; 9 | 3. use `Pipenv` to install the project dependencies defined in `Pipfile.lock`; 10 | 4. run unit tests by executing `pipenv run python -m unittest tests/*.py`; and, 11 | 5. if on the `master` branch - e.g. if a pull request has been merged - then start Docker and run the `deploy_to_aws.py` script. 12 | 13 | The `deploy_to_aws.py` script defines the deployment process, which performs the following steps without any manual intervention: 14 | 15 | 1. build the required Docker image; 16 | 2. pushe the image to AWS's Elastic Container Registry (ECR); and, 17 | 3. trigger a rolling redeployment of the service across an Elastic Container Service (ECS) cluster. 18 | 19 | It is reliant on the definition of three environment variables: `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY`, and `AWS_REGION`. For security reasons, these are kept out of the `.travis.yml` and are instead defined using the Travis-CI UI. 20 | 21 | Although the micro-service used in this example - as defined in `microservice/api.py` module - only returns a simple message upon a simple `GET` request, it could just as easily be a Machine Learning (ML) model-scoring service that receives the values of feature variables and returns a prediction - the overall pattern is the same. 22 | 23 | ## Initial Configuration of AWS Infrastructure 24 | 25 | Currently, the initial setup of the required AWS infrastructure is entirely manual (although this could also be scripted in the future). What's required, is an ECS cluster that is capable hosting multiple groups of Docker containers (or 'tasks' - i.e. web applications or in our case just a single micro-service), that sit behind a load balances that accepts incoming traffic and routes it to different containers in the cluster. Collectively,this constitutes a 'service' that is highly available. At a high-level, the steps required to setup this infrastructure using the AWS management console, are as follows (assuming the existence of a repository in ECR, containing our docker image): 26 | 27 | 1. create a new ECS cluster, in new VPC, using instance that are ~ `t2.medium`; 28 | - when configuring the security group (firewall) for the cluster, consider allowing a rule for single IP to assist debugging (e.g. YOUR_LOCAL_IP_ADDRESS/32); 29 | 2. create a new application load balancer for new VPC; 30 | - then create a custom security group for the load balancer (from the EC2 console), that allows anything from the outside world to pass; 31 | 3. modify ECS cluster's security group to allow the load balancer access, by explicitly referencing the security group for the load balancer, that we have just created; 32 | 4. create a new target group for the new VPC (from within the EC2 console under the 'Load Balancers' section), which we will eventually point the load balancer to; 33 | - there is no need to add the instances from the ECS cluster in this step, as this will he handled automatically when creating the service; 34 | - modify the health check path to `/microservice`, otherwise it won't get 200s and and will try to re-register hosts; 35 | 5. create a new task in ECS; 36 | - for the sake of simplicity, choose `daemon` mode - i.e. assume there is only one container per-task; 37 | - when adding the container for the task, be sure to reference the Docker image uploaded to ECR; 38 | 6. create a new service for our ECS cluster; 39 | - referencing the task, load balancer and target group that we have created in the steps above. 40 | 41 | ## Project Dependencies 42 | 43 | We use [pipenv](https://docs.pipenv.org) for managing project dependencies and Python environments (i.e. virtual environments). All of the direct packages dependencies required to run the code (e.g. docker and boto3), as well as all the packages used during development (e.g. IPython for interactive console sessions), are described in the `Pipfile`. Their precise downstream dependencies are described in `Pipfile.lock`. 44 | 45 | ### Installing Pipenv 46 | 47 | To get started with Pipenv, first of all download it - assuming that there is a global version of Python available on your system and on the PATH, then this can be achieved by running the following command, 48 | 49 | ```bash 50 | pip3 install pipenv 51 | ``` 52 | 53 | Pipenv is also available to install from many non-Python package managers. For example, on OS X it can be installed using the [Homebrew](https://brew.sh) package manager, with the following terminal command, 54 | 55 | ```bash 56 | brew install pipenv 57 | ``` 58 | 59 | For more information, including advanced configuration options, see the [official pipenv documentation](https://docs.pipenv.org). 60 | 61 | ### Installing this Projects' Dependencies 62 | 63 | Make sure that you're in the project's root directory (the same one in which `Pipfile` resides), and then run, 64 | 65 | ```bash 66 | pipenv install --dev 67 | ``` 68 | 69 | This will install all of the direct project dependencies as well as the development dependencies (the latter a consequence of the `--dev` flag). 70 | 71 | ### Running Python and IPython from the Project's Virtual Environment 72 | 73 | In order to continue development in a Python environment that precisely mimics the one the project was initially developed with, use Pipenv from the command line as follows, 74 | 75 | ```bash 76 | pipenv run python3 77 | ``` 78 | 79 | The `python3` command could just as well be `ipython3` or the Jupyter notebook server, for example, 80 | 81 | ```bash 82 | pipenv run jupyter notebook 83 | ``` 84 | 85 | This will fire-up a Jupyter notebook server where the default Python 3 kernel includes all of the direct and development project dependencies. This is how we advise that the notebooks within this project are used. 86 | 87 | ## Running Unit Tests 88 | 89 | All test have been written using the [unittest](https://docs.python.org/3/library/unittest.html) package from the Python standard library. Tests are kept in the `tests` folder and can be run from the command line by - e.g. by evoking, 90 | 91 | ```bash 92 | pipenv run python -m unittest tests/test_*.py 93 | ``` 94 | 95 | ## Starting the Micro-service Locally 96 | 97 | This can be started via the command line, from the root directory using, 98 | 99 | ```bash 100 | pipenv run python -m microservice.api 101 | ``` 102 | 103 | Which will start the server at `http://localhost:5000/microservice`. -------------------------------------------------------------------------------- /deploy_to_aws.py: -------------------------------------------------------------------------------- 1 | """ 2 | deploy_to_aws.py 3 | ~~~~~~~~~~~~~~~~ 4 | 5 | A simple script that demonstrates how the docker and AWS Python clients 6 | can be used to automate the process of: building a Docker image, as 7 | defined by the Dockerfile in the project's root directory; pushing the 8 | image to AWS's Elastic Container Registry (ECR); and, then forcing a 9 | redeployment of a AWS Elastic Container Service (ECS) that uses the 10 | image to host the service. 11 | 12 | For now, it is assumed that the AWS infrastructure is already in 13 | existence and that Docker is running on the host machine. 14 | """ 15 | 16 | import base64 17 | import json 18 | import os 19 | 20 | import boto3 21 | import docker 22 | 23 | ECS_CLUSTER = 'py-docker-aws-example-project-cluster' 24 | ECS_SERVICE = 'py-docker-aws-example-project-service' 25 | 26 | LOCAL_REPOSITORY = 'py-docker-aws-example-project:latest' 27 | 28 | 29 | def main(): 30 | """Build Docker image, push to AWS and update ECS service. 31 | 32 | :rtype: None 33 | """ 34 | 35 | # get AWS credentials 36 | aws_credentials = read_aws_credentials() 37 | access_key_id = aws_credentials['access_key_id'] 38 | secret_access_key = aws_credentials['secret_access_key'] 39 | aws_region = aws_credentials['region'] 40 | 41 | # build Docker image 42 | docker_client = docker.from_env() 43 | image, build_log = docker_client.images.build( 44 | path='.', tag=LOCAL_REPOSITORY, rm=True) 45 | 46 | # get AWS ECR login token 47 | ecr_client = boto3.client( 48 | 'ecr', aws_access_key_id=access_key_id, 49 | aws_secret_access_key=secret_access_key, region_name=aws_region) 50 | 51 | ecr_credentials = ( 52 | ecr_client 53 | .get_authorization_token() 54 | ['authorizationData'][0]) 55 | 56 | ecr_username = 'AWS' 57 | 58 | ecr_password = ( 59 | base64.b64decode(ecr_credentials['authorizationToken']) 60 | .replace(b'AWS:', b'') 61 | .decode('utf-8')) 62 | 63 | ecr_url = ecr_credentials['proxyEndpoint'] 64 | 65 | # get Docker to login/authenticate with ECR 66 | docker_client.login( 67 | username=ecr_username, password=ecr_password, registry=ecr_url) 68 | 69 | # tag image for AWS ECR 70 | ecr_repo_name = '{}/{}'.format( 71 | ecr_url.replace('https://', ''), LOCAL_REPOSITORY) 72 | 73 | image.tag(ecr_repo_name, tag='latest') 74 | 75 | # push image to AWS ECR 76 | push_log = docker_client.images.push(ecr_repo_name, tag='latest') 77 | 78 | # force new deployment of ECS service 79 | ecs_client = boto3.client( 80 | 'ecs', aws_access_key_id=access_key_id, 81 | aws_secret_access_key=secret_access_key, region_name=aws_region) 82 | 83 | ecs_client.update_service( 84 | cluster=ECS_CLUSTER, service=ECS_SERVICE, forceNewDeployment=True) 85 | 86 | return None 87 | 88 | 89 | def read_aws_credentials(filename='.aws_credentials.json'): 90 | """Read AWS credentials from file. 91 | 92 | :param filename: Credentials filename, defaults to '.aws_credentials.json' 93 | :param filename: str, optional 94 | :return: Dictionary of AWS credentials. 95 | :rtype: Dict[str, str] 96 | """ 97 | 98 | try: 99 | with open(filename) as json_data: 100 | credentials = json.load(json_data) 101 | 102 | for variable in ('access_key_id', 'secret_access_key', 'region'): 103 | if variable not in credentials.keys(): 104 | msg = '"{}" cannot be found in {}'.format(variable, filename) 105 | raise KeyError(msg) 106 | 107 | except FileNotFoundError: 108 | try: 109 | credentials = { 110 | 'access_key_id': os.environ['AWS_ACCESS_KEY_ID'], 111 | 'secret_access_key': os.environ['AWS_SECRET_ACCESS_KEY'], 112 | 'region': os.environ['AWS_REGION'] 113 | } 114 | except KeyError: 115 | msg = 'no AWS credentials found in file or environment variables' 116 | raise RuntimeError(msg) 117 | 118 | return credentials 119 | 120 | 121 | if __name__ == '__main__': 122 | main() -------------------------------------------------------------------------------- /microservice/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AlexIoannides/py-docker-aws-example-project/2dcf83cb3d4b563888e10464f1efa464a9e35e17/microservice/__init__.py -------------------------------------------------------------------------------- /microservice/api.py: -------------------------------------------------------------------------------- 1 | """ 2 | api.py 3 | ~~~~~~ 4 | 5 | This module defines the simple API for our example project and will 6 | also start the service if executed. 7 | """ 8 | 9 | from flask import Flask, jsonify, make_response, request 10 | 11 | app = Flask(__name__) 12 | 13 | 14 | @app.route('/microservice', methods=['GET']) 15 | def service_health_check(): 16 | """Service health-check endpoint. 17 | 18 | Returns a simple message string to confirm that the service is 19 | operational. 20 | 21 | :return: A message. 22 | :rtype: str 23 | """ 24 | 25 | message = 'The microservice is operational.' 26 | return make_response(jsonify({'health_check': message})) 27 | 28 | 29 | if __name__ == '__main__': 30 | app.run(host='0.0.0.0', port=5000) -------------------------------------------------------------------------------- /tests/test_microservice.py: -------------------------------------------------------------------------------- 1 | """ 2 | test_microservice.py 3 | ~~~~~~~~~~~~~~~~~~~~ 4 | 5 | Tests the the service returns the expected responses . 6 | """ 7 | 8 | import json 9 | import unittest 10 | 11 | from microservice.api import app 12 | 13 | 14 | class TestMicroserviceAPI(unittest.TestCase): 15 | def setUp(self): 16 | self.client = app.test_client() 17 | 18 | def test_service(self): 19 | """Test endpoint mesage""" 20 | 21 | # arrange 22 | uri = '/microservice' 23 | 24 | # act 25 | response = self.client.get(uri) 26 | 27 | # assert 28 | exp_response = { 29 | 'health_check': 'The microservice is operational.'} 30 | 31 | self.assertEqual(response.json, exp_response) 32 | 33 | 34 | if __name__ == '__main__': 35 | unittest.main() --------------------------------------------------------------------------------