├── .gitmodules ├── AWSArchitecture.jpg ├── LICENSE ├── README.md ├── README ├── SDLC-Automation.md └── aws-cdk-logo.png ├── TODO.md ├── amplify.yml ├── amplify ├── .gitignore ├── .gitmodules ├── LICENSE ├── README.md ├── archetypes │ └── default.md ├── config.toml ├── content │ ├── _index.en.md │ ├── _index.vi.md │ ├── build-pipeline │ │ ├── _index.en.md │ │ ├── blue-green-canary-deployment.md │ │ ├── cdkinit │ │ │ └── _index.en.md │ │ ├── codecommit │ │ │ ├── _index.en.md │ │ │ ├── credhelper │ │ │ │ └── _index.en.md │ │ │ ├── gitpush │ │ │ │ └── _index.en.md │ │ │ └── gitrepo │ │ │ │ └── _index.en.md │ │ ├── howto │ │ │ └── _index.en.md │ │ ├── pipeline-as-code │ │ │ ├── _index.en.md │ │ │ ├── bucket │ │ │ │ └── _index.en.md │ │ │ ├── build │ │ │ │ └── _index.en.md │ │ │ ├── buildspec │ │ │ │ └── _index.en.md │ │ │ ├── deploy │ │ │ │ └── _index.en.md │ │ │ └── source │ │ │ │ └── _index.en.md │ │ └── verify-pipeline │ │ │ └── _index.en.md │ ├── canaries │ │ ├── _index.en.md │ │ ├── codedeploy │ │ │ └── _index.en.md │ │ ├── finished │ │ │ └── _index.en.md │ │ ├── intro │ │ │ └── _index.en.md │ │ ├── monitor │ │ │ └── _index.en.md │ │ ├── rollbacks │ │ │ ├── _index.en.md │ │ │ ├── codedeploy │ │ │ │ └── _index.en.md │ │ │ └── faketraffic │ │ │ │ └── _index.en.md │ │ └── sam │ │ │ └── _index.en.md │ ├── cdk8s │ │ ├── _index.en.md │ │ └── cdk8s-typescript.md │ ├── container-typescript │ │ ├── _index.md │ │ ├── container-stack │ │ │ ├── _index.md │ │ │ ├── eks-Cluster-EC2-spot.md │ │ │ ├── eks-Cluster-EC2.md │ │ │ ├── eks-FargateCluster.md │ │ │ └── vpc.md │ │ ├── create-cdk-project │ │ │ └── _index.md │ │ └── logging_monitoring.zip │ ├── prerequisites │ │ ├── _index.en.md │ │ ├── _index.vi.md │ │ ├── aws-workshop-portal.md │ │ ├── bootstrap │ │ │ ├── _index.en.md │ │ │ ├── _index.vi.md │ │ │ ├── k8stools.md │ │ │ └── verify-prerequisites.md │ │ └── cloud9-workspace │ │ │ ├── _index.en.md │ │ │ └── admin-iam-role.md │ ├── showcase.en.md │ └── showcase.vi.md ├── data │ └── common.toml ├── diagrams │ ├── hitcounter │ └── table-viewer ├── layouts │ ├── partials │ │ ├── custom-footer.html │ │ ├── logo.html │ │ └── menu-footer.html │ └── shortcodes │ │ ├── cdkversion.html │ │ ├── ghcontributors.html │ │ ├── tab.html │ │ └── tabs.html ├── package-lock.json ├── package.json └── static │ ├── AWS_Logo.svg │ ├── assets │ ├── bootstrap.sh │ ├── buildspec.yml │ ├── chapter6 │ │ ├── artifacts-store.yml │ │ ├── prod-roles.yml │ │ └── step1 │ │ │ └── pipeline.yml │ └── pipeline.yaml │ ├── css │ ├── theme-mine.css │ └── theme-workshop.css │ ├── fonts │ ├── monogramos-webfont.eot │ ├── monogramos-webfont.svg │ ├── monogramos-webfont.ttf │ ├── monogramos-webfont.woff │ └── monogramos-webfont.woff2 │ └── images │ ├── EKS-K8s-on-AWS.png │ ├── build-pipeline │ ├── cicd-cloudformation.png │ ├── pipeline-art.png │ ├── pipeline-verify-fail.png │ ├── pipeline-verify-success.png │ ├── screenshot-hidden-files-cloud9.png │ └── screenshot-verify-codecommit.png │ ├── canaries │ ├── canary-deployments.png │ ├── lambda-versions-aliases.png │ ├── screenshot-canary-codedeploy-0.png │ ├── screenshot-canary-codedeploy-00.png │ ├── screenshot-canary-codedeploy-1.png │ ├── screenshot-canary-codedeploy-2.png │ ├── screenshot-codedeploy-rollback.png │ ├── traffic-shifting-codedeploy.png │ └── traffic-shifting.png │ ├── cdk8s │ └── cdk8s.gif │ ├── container-typescript │ ├── AWS-DevOps.png │ ├── alb-dns.png │ ├── alb-tg-check1.png │ ├── alb-tg-check2.png │ ├── canary-lb.png │ ├── cdk-eks-architecture.png │ ├── cfn-kubectl.png │ ├── eks-bg-1.png │ ├── eks-bg-2.png │ ├── eks-canary.png │ ├── eks-cicd-codebuild.png │ ├── flask01.png │ ├── flask02.png │ ├── stage12-green.png │ ├── stage34-green.png │ ├── web-blue-inv.png │ ├── web-blue.png │ ├── web-default.png │ ├── web-green-inv.png │ └── web-green.png │ ├── favicon-16x16.png │ ├── favicon.png │ ├── logging_monitoring.zip │ ├── prerequisites │ ├── Monoliths-to-Micro-Services-Journey.png │ ├── Unicorn-Serverless-Application.png │ ├── c9disableiam.png │ ├── c9instancerole.png │ ├── event-engine-dashboard.png │ ├── event-engine-initial-screen.png │ ├── get-ready.png │ └── modern-apps.png │ └── showcase │ ├── Online Markdown Editor - Dillinger, the Last Markdown Editor ever._files │ ├── app.css │ ├── button.svg │ ├── css │ ├── main.bundle.js │ └── monetization.js │ ├── ecs-fargate.png │ ├── eks-fargate.png │ ├── scale-out-computing-on-aws.png │ ├── serverless-data-lake.png │ └── serverless.png ├── container-typescript ├── cdk-eks-fargate │ ├── .env │ ├── .gitignore │ ├── .npmignore │ ├── README.md │ ├── bin │ │ └── cdk-eks.ts │ ├── cdk.json │ ├── jest.config.js │ ├── lib │ │ ├── cdk-eks-ec2-stack.ts │ │ └── cdk-eks-stack.ts │ ├── package-lock.json │ ├── package.json │ ├── project-directory.txt │ ├── test │ │ └── cdk-eks.test.ts │ └── tsconfig.json ├── cdk-eks │ ├── .env │ ├── .gitignore │ ├── .npmignore │ ├── README.md │ ├── bin │ │ └── cdk-eks.ts │ ├── cdk.json │ ├── jest.config.js │ ├── lib │ │ └── cdk-eks-stack.ts │ ├── package-lock.json │ ├── package.json │ ├── project-directory.txt │ ├── test │ │ └── cdk-eks.test.ts │ └── tsconfig.json ├── eks-blue-green-cicd │ ├── .gitignore │ ├── README.md │ ├── cdk │ │ ├── .gitignore │ │ ├── README.md │ │ ├── bin │ │ │ └── cdk.ts │ │ ├── cdk.json │ │ ├── jest.config.js │ │ ├── lib │ │ │ └── cdk-stack.ts │ │ ├── package.json │ │ ├── test │ │ │ └── cdk.test.ts │ │ └── tsconfig.json │ ├── dockerAssets.d │ │ ├── Dockerfile │ │ └── entrypoint.sh │ ├── flask-docker-app │ │ ├── Dockerfile │ │ ├── app.py │ │ ├── k8s │ │ │ ├── alb-ingress-controller.yaml │ │ │ ├── flask-ALB-namespace.yaml │ │ │ ├── flask.yaml │ │ │ ├── flaskALBBlue.yaml │ │ │ ├── flaskALBGreen.yaml │ │ │ ├── flaskALBIngress_query.yaml │ │ │ ├── flaskALBIngress_query2.yaml │ │ │ ├── flaskBlue.yaml │ │ │ ├── flaskGreen.yaml │ │ │ └── setup.sh │ │ ├── pyvenv.cfg │ │ ├── requirements.txt │ │ ├── static │ │ │ └── css │ │ │ │ ├── bootstrap-responsive.min.css │ │ │ │ └── bootstrap.min.css │ │ └── templates │ │ │ └── hello.html │ └── images │ │ ├── alb-dns.png │ │ ├── alb-tg-check1.png │ │ ├── alb-tg-check2.png │ │ ├── canary-lb.png │ │ ├── cfn-kubectl.png │ │ ├── eks-bg-1.png │ │ ├── eks-bg-2.png │ │ ├── eks-canary.png │ │ ├── eks-cicd-codebuild.png │ │ ├── flask01.png │ │ ├── flask02.png │ │ ├── stage12-green.png │ │ ├── stage34-green.png │ │ ├── web-blue-inv.png │ │ ├── web-blue.png │ │ ├── web-default.png │ │ ├── web-green-inv.png │ │ └── web-green.png ├── twitter4u-docker │ ├── .env.sh │ ├── .gitignore │ ├── Dockerfile │ ├── README.md │ ├── Twitter4U │ │ └── twitter_stream_producer.js │ ├── deploy.sh │ └── src │ │ ├── config.js │ │ ├── package.json │ │ ├── twitter_reader_config.js │ │ ├── twitter_stream_producer.js │ │ ├── twitter_stream_producer_app.js │ │ └── util │ │ └── logger.js └── twitter4u-fargate │ ├── .gitignore │ ├── .npmignore │ ├── README.md │ ├── bin │ └── social_reader.ts │ ├── cdk.context.json │ ├── cdk.json │ ├── jest.config.js │ ├── lib │ └── social_reader-stack.ts │ ├── package-lock.json │ ├── package.json │ ├── test │ └── social_reader.test.ts │ └── tsconfig.json ├── devsecops ├── Dockerfile ├── Makefile ├── README.md ├── copilot │ ├── .workspace │ └── devsecopslb │ │ └── manifest.yml └── install.sh ├── eks-cluster ├── .env ├── .gitignore ├── .npmignore ├── README.md ├── bin │ └── eks-cluster.ts ├── cdk.json ├── jest.config.js ├── lib │ └── eks-cluster-stack.ts ├── package-lock.json ├── package.json ├── project-directory.txt ├── test │ └── eks-cluster.test.ts └── tsconfig.json ├── jenkins ├── README.md ├── jenkins-ecs-workshop.json ├── jenkins-ecs-workshop2.json └── pipelines-pic.png ├── podinfo ├── .gitignore ├── __snapshots__ │ └── main.test.ts.snap ├── cdk8s.yaml ├── help ├── imports │ └── k8s.ts ├── jest.config.js ├── main.test.ts ├── main.ts ├── package-lock.json ├── package.json └── tsconfig.json └── serverless-python ├── base_common └── __init__.py └── url-shortener ├── .gitignore ├── README.md ├── app.py ├── cdk.context.json ├── cdk.json ├── lambda └── handler.py ├── pinger ├── Dockerfile └── ping.sh ├── requirements.txt ├── setup.py ├── source.bat ├── traffic101.py └── url_shortener ├── __init__.py └── url_shortener_stack.py /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "amplify/themes/hugo-theme-learn"] 2 | path = amplify/themes/hugo-theme-learn 3 | url = https://github.com/nnthanh101/hugo-theme-learn 4 | -------------------------------------------------------------------------------- /AWSArchitecture.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/AWSArchitecture.jpg -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 AWS Devops 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Serverless Data Lake on AWS 2 | 3 | 4 | 1. Prerequisites 5 | 6 | ``` 7 | brew install node 8 | brew install hugo 9 | ``` 10 | 11 | 2. Git clone 12 | 13 | ``` 14 | git clone https://github.com/AWS-Devops-Projects/eks-workshop.git 15 | 16 | cd eks-workshop 17 | npm run theme 18 | ``` 19 | 20 | # Run Hugo 21 | 22 | ``` 23 | npm start 24 | ``` 25 | 26 | > http://localhost:8080 27 | 28 | # Submodule 29 | 30 | ``` 31 | git submodule add https://github.com/AWS-Devops-Projects/hugo-theme-learn themes/hugo-theme-learn 32 | ``` 33 | 34 | # rm -rf .git/modules/themes/hugo-theme-learn .git/modules/hugo-theme-learn 35 | -------------------------------------------------------------------------------- /README/aws-cdk-logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/README/aws-cdk-logo.png -------------------------------------------------------------------------------- /amplify.yml: -------------------------------------------------------------------------------- 1 | version: 0.1 2 | frontend: 3 | phases: 4 | # IMPORTANT - Please verify your build commands 5 | build: 6 | commands: 7 | - cd amplify 8 | - npm run theme 9 | - hugo 10 | artifacts: 11 | # IMPORTANT - Please verify your build output directory 12 | baseDirectory: amplify/public 13 | files: 14 | - '**/*' 15 | cache: 16 | paths: [] 17 | 18 | -------------------------------------------------------------------------------- /amplify/.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | 9 | # Diagnostic reports (https://nodejs.org/api/report.html) 10 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 11 | 12 | # Runtime data 13 | pids 14 | *.pid 15 | *.seed 16 | *.pid.lock 17 | 18 | # Directory for instrumented libs generated by jscoverage/JSCover 19 | lib-cov 20 | 21 | # Coverage directory used by tools like istanbul 22 | coverage 23 | *.lcov 24 | 25 | # nyc test coverage 26 | .nyc_output 27 | 28 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 29 | .grunt 30 | 31 | # Bower dependency directory (https://bower.io/) 32 | bower_components 33 | 34 | # node-waf configuration 35 | .lock-wscript 36 | 37 | # Compiled binary addons (https://nodejs.org/api/addons.html) 38 | build/Release 39 | 40 | # Dependency directories 41 | node_modules/ 42 | jspm_packages/ 43 | public/ 44 | 45 | # TypeScript v1 declaration files 46 | typings/ 47 | 48 | # TypeScript cache 49 | *.tsbuildinfo 50 | 51 | # Optional npm cache directory 52 | .npm 53 | 54 | # Optional eslint cache 55 | .eslintcache 56 | 57 | # Microbundle cache 58 | .rpt2_cache/ 59 | .rts2_cache_cjs/ 60 | .rts2_cache_es/ 61 | .rts2_cache_umd/ 62 | 63 | # Optional REPL history 64 | .node_repl_history 65 | 66 | # Output of 'npm pack' 67 | *.tgz 68 | 69 | # Yarn Integrity file 70 | .yarn-integrity 71 | 72 | # dotenv environment variables file 73 | .env 74 | .env.test 75 | 76 | # parcel-bundler cache (https://parceljs.org/) 77 | .cache 78 | 79 | # Next.js build output 80 | .next 81 | 82 | # Nuxt.js build / generate output 83 | .nuxt 84 | dist 85 | 86 | # Gatsby files 87 | .cache/ 88 | # Comment in the public line in if your project uses Gatsby and *not* Next.js 89 | # https://nextjs.org/blog/next-9-1#public-directory-support 90 | # public 91 | 92 | # vuepress build output 93 | .vuepress/dist 94 | 95 | # Serverless directories 96 | .serverless/ 97 | 98 | # FuseBox cache 99 | .fusebox/ 100 | 101 | # DynamoDB Local files 102 | .dynamodb/ 103 | 104 | # TernJS port file 105 | .tern-port 106 | -------------------------------------------------------------------------------- /amplify/.gitmodules: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/.gitmodules -------------------------------------------------------------------------------- /amplify/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Thanh Nguyen 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /amplify/README.md: -------------------------------------------------------------------------------- 1 | # Modern Application Development 2 | 3 | > This repository requires `hugo` version [v0.64.0](https://github.com/gohugoio/hugo/releases/tag/v0.64.0). 4 | Please follow the link and install the right version. 5 | ### 1. Prerequisites 6 | ``` 7 | brew install node 8 | brew install hugo 9 | ``` 10 | 11 | ### 2. Git clone 12 | 13 | ``` 14 | git clone https://github.com/nnthanh101/aws-cdk.git 15 | 16 | cd eks-workshop 17 | npm run theme 18 | ``` 19 | 20 | ### 3. Run Hugo 21 | 22 | ``` 23 | npm start 24 | ``` 25 | 26 | > http://localhost:8080 27 | 28 | ### Submodule 29 | 30 | ``` 31 | git submodule add https://github.com/nnthanh101/hugo-theme-learn themes/hugo-theme-learn 32 | 33 | rm -rf .git/modules/themes/hugo-theme-learn .git/modules/hugo-theme-learn 34 | ``` 35 | 36 | ### Remove Submodule 37 | 38 | * Delete the section referring to the submodule from the .gitmodules file 39 | * Stage the changes via git add .gitmodules 40 | * Delete the relevant section of the submodule from .git/config. 41 | * Run git rm --cached path_to_submodule (no trailing slash) 42 | * Run rm -rf .git/modules/path_to_submodule 43 | * Commit the changes with ```git commit -m "Removed submodule " 44 | * Delete the now untracked submodule files rm -rf path_to_submodule -------------------------------------------------------------------------------- /amplify/archetypes/default.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "{{ replace .Name "-" " " | title }}" 3 | date: {{ .Date }} 4 | --- 5 | -------------------------------------------------------------------------------- /amplify/config.toml: -------------------------------------------------------------------------------- 1 | baseURL = "/" 2 | languageCode = "en-US" 3 | defaultContentLanguage = "en" 4 | googleAnalytics = "UA-158079754-2" 5 | 6 | title = "Modern Application Development" 7 | theme = "hugo-theme-learn" 8 | themesdir = "themes" 9 | metaDataFormat = "yaml" 10 | defaultContentLanguageInSubdir= true 11 | 12 | [params] 13 | themeVariant = "workshop" 14 | editURL = "https://github.com/nnthanh101/aws-cdk/edit/master/content/" 15 | description = "Modern Application Development" 16 | author = "nnthanh101@gmail.com" 17 | showVisitedLinks = true 18 | disableBreadcrumb = false 19 | disableNextPrev = false 20 | 21 | [outputs] 22 | home = [ "HTML", "RSS", "JSON"] 23 | 24 | [Languages] 25 | [Languages.en] 26 | title = "Modern Application Development" 27 | weight = 1 28 | languageName = "English" 29 | 30 | [[Languages.en.menu.shortcuts]] 31 | name = " GitHub" 32 | identifier = "ds" 33 | url = "https://github.com/nnthanh101/aws-cdk" 34 | weight = 10 35 | 36 | [[Languages.en.menu.shortcuts]] 37 | name = " Showcases" 38 | url = "showcase" 39 | weight = 11 40 | 41 | [[Languages.en.menu.shortcuts]] 42 | name = " DevOps Blog" 43 | identifier = "doc" 44 | url = "https://devops.job4u.io/" 45 | weight = 20 46 | 47 | 48 | [Languages.vi] 49 | title = "Modern Application Development" 50 | weight = 2 51 | languageName = "Tiếng Việt" 52 | 53 | [[Languages.vi.menu.shortcuts]] 54 | name = " GitHub" 55 | identifier = "ds" 56 | url = "https://github.com/nnthanh101/aws-cdk" 57 | weight = 10 58 | 59 | [[Languages.vi.menu.shortcuts]] 60 | name = " Showcases" 61 | url = "showcase" 62 | weight = 11 63 | 64 | [[Languages.vi.menu.shortcuts]] 65 | name = " DevOps" 66 | identifier = "doc" 67 | url = "https://devops.job4u.io/" 68 | weight = 20 -------------------------------------------------------------------------------- /amplify/content/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Kubernetes on AWS with EKS" 3 | chapter: true 4 | weight: 1 5 | --- 6 | 7 | # Kubernetes on AWS 🚀 8 | 9 | ![Kubernetes using Amazon EKS & AWS CDK](/images/EKS-K8s-on-AWS.png?width=50pc) 10 | 11 | **Modern Applications** are built with a combination of these new architecture patterns, operational models, and software delivery processes, and they allow businesses to innovate faster while reducing risk, time to market, and total cost of ownership. 12 | 13 | {{% children showhidden="false" %}} -------------------------------------------------------------------------------- /amplify/content/_index.vi.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Kubernetes on AWS with EKS" 3 | chapter: true 4 | weight: 1 5 | --- 6 | 7 | # Kubernetes on AWS 🚀 8 | 9 | ![Kubernetes using Amazon EKS & AWS CDK](/images/EKS-K8s-on-AWS.png?width=50pc) 10 | 11 | **Modern Applications** are built with a combination of these new architecture patterns, operational models, and software delivery processes, and they allow businesses to innovate faster while reducing risk, time to market, and total cost of ownership. 12 | 13 | {{% children showhidden="false" %}} -------------------------------------------------------------------------------- /amplify/content/build-pipeline/_index.en.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Build the CI/CD Pipeline" 3 | weight = 50 4 | chapter = true 5 | pre = "5. " 6 | +++ 7 | 8 | # Build the pipeline 9 | 10 | In this chapter you are going to learn how to automate the build, package and deploy commands by creating a continous delivery pipeline using AWS Code Pipeline. 11 | 12 | ![SimplePipeline](/images/build-pipeline/pipeline-art.png) 13 | 14 | The services used in this chapter are CodeCommit, CodeBuild, CodePipeline, CloudFormation and the AWS CDK. 15 | 16 | ```bash 17 | cp -avr sls-app/demo/sam-app/ . 18 | 19 | cd ~/environment/sam-app 20 | ``` -------------------------------------------------------------------------------- /amplify/content/build-pipeline/blue-green-canary-deployment.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Blue/Green Deployments" 3 | weight = 50 4 | pre= "5.5. " 5 | +++ 6 | 7 | 8 | > 🎯 CI/CD with Blue/Green & Canary Deployments on EKS using CDK 9 | 10 | ![Blue/Green and Canary Deployments on EKS using CDK](/images/container-typescript/eks-bg-1.png?width=50pc) 11 | 12 | ![Blue/Green and Canary Deployments on EKS using CDK](/images/container-typescript/eks-bg-2.png?width=50pc) 13 | 14 | ![Blue/Green and Canary Deployments on EKS using CDK](/images/container-typescript/eks-canary.png?width=50pc) 15 | -------------------------------------------------------------------------------- /amplify/content/build-pipeline/cdkinit/_index.en.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Setup a CDK Project" 3 | weight = 20 4 | pre= "5.2. " 5 | +++ 6 | 7 | ## Install the latest CDK 8 | 9 | If you are using Cloud9, the CDK is already pre-installed but it will likely be a few versions old. Run the following commands from the Cloud9 terminal to remove your current version and install the latest one: 10 | ``` 11 | npm uninstall -g aws-cdk --force 12 | npm install -g aws-cdk 13 | ``` 14 | 15 | ### Initialize project 16 | 17 | Now, let's create a folder within our _sls-api_ directory where the pipeline code will reside. 18 | ``` 19 | cd ~/environment/sls-api 20 | mkdir pipeline 21 | cd pipeline 22 | ``` 23 | 24 | Initialize a new CDK project within the _pipeline_ folder by running the following command: 25 | 26 | ``` 27 | cdk init --language typescript 28 | ``` 29 | 30 | Now install the CDK modules that we will be using to build a pipeline: 31 | 32 | ``` 33 | npm install --save @aws-cdk/aws-codedeploy @aws-cdk/aws-codebuild 34 | npm install --save @aws-cdk/aws-codecommit @aws-cdk/aws-codepipeline-actions 35 | npm install --save @aws-cdk/aws-s3 36 | ``` 37 | 38 | 39 | ### Project Structure 40 | 41 | At this point, your project should have the structure below (only the most relevant files and folders are shown). Within the CDK project, the main file you will be interacting with is the _pipeline-stack.ts_. Don't worry about the rest of the files for now. 42 | 43 | ``` 44 | sls-api # SAM application root 45 | ├── hello_world # Lambda code 46 | ├── samconfig.toml # Config file for manual deployments 47 | ├── template.yaml # SAM template 48 | └── pipeline # CDK project root 49 | └── lib 50 | └── pipeline-stack.ts # Pipeline definition 51 | └── bin 52 | └── pipeline.ts # Entry point for CDK project 53 | ├── cdk.json 54 | ├── tsconfig.json 55 | ├── package.json 56 | └── jest.config.js 57 | ``` 58 | 59 | ### Modify stack name 60 | 61 | Open the `bin/pipeline.ts` file, which is your entry point to the CDK project, and change the name of the stack to **sls-api-cicd**. 62 | 63 | {{}} 64 | #!/usr/bin/env node 65 | import 'source-map-support/register'; 66 | import * as cdk from '@aws-cdk/core'; 67 | import { PipelineStack } from '../lib/pipeline-stack'; 68 | 69 | const app = new cdk.App(); 70 | new PipelineStack(app, 'sls-api-cicd'); 71 | {{}} 72 | 73 | **Save the file**. 74 | 75 | -------------------------------------------------------------------------------- /amplify/content/build-pipeline/codecommit/_index.en.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Source Code Control" 3 | weight = 10 4 | pre= "5.1. " 5 | +++ 6 | 7 | 8 | {{% children showhidden="false" %}} -------------------------------------------------------------------------------- /amplify/content/build-pipeline/codecommit/credhelper/_index.en.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Configure Credentials" 3 | weight = 22 4 | pre= "5.1.2. " 5 | +++ 6 | 7 | One of the cool things about CodeCommit is the support for IAM authentication. And if you are running this workshop from a Cloud9 workspace, you can leverage the fact that your terminal is already pre-authenticated with valid AWS credentials. 8 | 9 | Run the following commands from your terminal: 10 | 11 | ```bash 12 | git config --global credential.helper '!aws codecommit credential-helper $@' 13 | git config --global credential.UseHttpPath true 14 | ``` 15 | 16 | Now configure the git client with username and email, so your commits have an author defined. 17 | 18 | ``` 19 | git config --global user.name "Replace with your name" 20 | git config --global user.email "replace_with_your_email@example.com" 21 | ``` 22 | 23 | Example: 24 | 25 | ```bash 26 | git config --global user.name "Thanh Nguyen" 27 | git config --global user.email "nnthanh101@gmail.com" 28 | ``` -------------------------------------------------------------------------------- /amplify/content/build-pipeline/codecommit/gitpush/_index.en.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Push the Code" 3 | weight = 30 4 | pre= "5.1.3. " 5 | +++ 6 | 7 | ### Ignore the build artifacts 8 | Copy and paste the following lines at the end of the `sls-api/.gitgnore` file. There is no need to track the .aws-sam directory or the packaged.yaml under version control as they are re-generated on every build. 9 | 10 | ``` 11 | .aws-sam/ 12 | packaged.yaml 13 | ``` 14 | 15 | In Cloud9, remember to enable hidden files: 16 | 17 | ![EnableHiddenFiles](/images/build-pipeline/screenshot-hidden-files-cloud9.png) 18 | 19 | Open the `.gitignore` file and paste the two lines described above. 20 | 21 | From the root directory of your _sls-api_ project, run the following commands: 22 | 23 | ``` 24 | cd ~/environment/sls-api 25 | git init 26 | git add . 27 | git commit -m "Initial commit" 28 | ``` 29 | 30 | ### Push the code 31 | Add your CodeCommit repository URL as a _remote_ on your local git project. This is the `cloneUrlHttp` value that you got back after creating the repository in Step 1 of this chapter. 32 | 33 | {{% notice tip %}} 34 | If you can't find the CodeCommit repository URL, you can find it by running this command: `aws codecommit get-repository --repository-name sls-api`. 35 | {{% /notice %}} 36 | 37 | ``` 38 | git remote add origin REPLACE_WITH_HTTP_CLONE_URL 39 | 40 | # git remote set-url origin https://git-codecommit.ap-southeast-1.amazonaws.com/v1/repos/sls-api 41 | ``` 42 | 43 | {{% notice tip %}} 44 | If you typed the origin url incorrectly, you can remove it by running: `git remote rm origin`. 45 | {{% /notice %}} 46 | 47 | Now, push the code: 48 | 49 | ``` 50 | git push -u origin master 51 | ``` 52 | 53 | ### Verify in CodeCommit 54 | Navigate to the [AWS CodeCommit console](https://console.aws.amazon.com/codesuite/codecommit/home), find your _sls-api_ repository and click on it to view its contents. Make sure your code is there. You should see a screen like the following: 55 | 56 | ![VerifyCodeCommit](/images/build-pipeline/screenshot-verify-codecommit.png) -------------------------------------------------------------------------------- /amplify/content/build-pipeline/codecommit/gitrepo/_index.en.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Create a Git Repository" 3 | weight = 10 4 | pre= "5.1.1. " 5 | +++ 6 | 7 | Any CI/CD pipeline starts with a code repository. In this workshop we use AWS CodeCommit for ease of integration, but you could use other source code integrations, like GitHub for example. 8 | 9 | Run the following command from your terminal to create a new CodeCommit repository: 10 | 11 | ``` 12 | aws codecommit create-repository --repository-name sls-api 13 | ``` 14 | 15 | ✍️ You should see the following output. Copy the value of `cloneUrlHttp`, you will need it later. 16 | 17 | {{}} 18 | { 19 | "repositoryMetadata": { 20 | "accountId": "111111111111", 21 | "repositoryId": "ab66211f-fe79-4f0c-8b8f-937c73ff380f", 22 | "repositoryName": "sls-api", 23 | "lastModifiedDate": 1589626684.833, 24 | "creationDate": 1589626684.833, 25 | "cloneUrlHttp": "https://git-codecommit.ap-southeast-1.amazonaws.com/v1/repos/sls-api", 26 | "cloneUrlSsh": "ssh://git-codecommit.ap-southeast-1.amazonaws.com/v1/repos/sls-api", 27 | "Arn": "arn:aws:codecommit:ap-southeast-1:701571471198:sls-api" 28 | } 29 | } 30 | {{}} -------------------------------------------------------------------------------- /amplify/content/build-pipeline/howto/_index.en.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "How to build a Pipeline" 3 | weight = 60 4 | pre= "5.6. " 5 | +++ 6 | 7 | The best way to automate the creation of CI/CD pipelines is by provisioning them programmatically via Infrastructure as Code. This is specially useful in a Micro-Services environment, where you have a pipeline per Micro-Service, which potentially means dozens of pipelines, if not more. Having an automated way to create these pipelines enables developers to create as many as necessary without building them manually from the console every time. 8 | 9 | ### Different ways to create pipelines 10 | We see customers using different mechanisms for creating pipelines programmatically. Nowadays developers have many choices to pick from, but the most common ones we see are the following: 11 | 12 | - [AWS CloudFormation](https://docs.aws.amazon.com/codepipeline/latest/userguide/tutorials.html) 13 | - [AWS CDK](https://docs.aws.amazon.com/cdk/latest/guide/codepipeline_example.html) 14 | - [Terraform](https://www.terraform.io/docs/providers/aws/r/codepipeline.html) 15 | - [AWS Serverless App Repository](https://serverlessrepo.aws.amazon.com/applications/arn:aws:serverlessrepo:us-east-1:646794253159:applications~aws-sam-codepipeline-cd) 16 | 17 | ### Introducing the AWS CDK 18 | In this workshop, we are going to use the AWS Cloud Development Kit (also known as CDK), as the pipeline vending mechanism. The AWS CDK is a software development framework for defining cloud infrastructure in code and provisioning it through AWS CloudFormation. 19 | 20 | That's right! You can describe your infrastructure by writing code in TypeScript, C#, Python or Java. Your code is then synthesized into CloudFormation and using the CDK CLI you can deploy it to an AWS account. 21 | 22 | ### How does SAM and CDK play together? 23 | 24 | Serverless developers use the SAM framework to define their applications, SAM CLI to build them and deploy them and AWS CDK to provision any infrastructure related resources, like their CI/CD Pipeline. The nice thing about these tools is that they all share a common ground: CloudFormation. -------------------------------------------------------------------------------- /amplify/content/build-pipeline/pipeline-as-code/_index.en.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Pipeline as Code" 3 | weight = 30 4 | pre= "5.3. " 5 | +++ 6 | 7 | Open the file `lib/pipeline-stack.ts` in your Cloud9 workspace. 8 | 9 | ### Build the CDK project 10 | 11 | Even though we haven't wrote any code yet, let's get familiar with how to build and deploy a CDK project, as you will be doing it multiple times in this workshop and you should get comfortable with the process. Start by building the project with the following command: 12 | 13 | ``` 14 | cd ~/environment/sls-api/pipeline 15 | npm install 16 | npm run build 17 | ``` 18 | 19 | ### Deploy a CDK project 20 | 21 | After the build has finished, go ahead and deploy the pipeline project by using the CDK CLI: 22 | 23 | ``` 24 | cdk deploy 25 | ``` 26 | 27 | A new CloudFormation stack was created in your account, but because your CDK project is empty, the only resource that was created was an AWS::CDK::Metadata. If you check your [CloudFormation Console](https://console.aws.amazon.com/cloudformation/home), you will see the new stack and the metadata resource. 28 | 29 | ![CdkMetadata](/images/build-pipeline/cicd-cloudformation.png) -------------------------------------------------------------------------------- /amplify/content/build-pipeline/pipeline-as-code/bucket/_index.en.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Artifacts Bucket" 3 | date = 2019-11-01T15:26:09-07:00 4 | weight = 10 5 | pre = "5.3.1. " 6 | +++ 7 | 8 | Every Code Pipeline needs an artifacts bucket, also known as Artifact Store. CodePipeline will use this bucket to pass artifacts to the downstream jobs and its also where SAM will upload the artifacts during the build process. 9 | 10 | Let's get started and write the code for creating this bucket: 11 | 12 | **Make sure you are editing the pipeline-stack file with _.ts_ extension `sls-api/pipeline/lib/pipeline-stack.ts`** 13 | 14 | {{}} 15 | import * as cdk from '@aws-cdk/core'; 16 | 17 | import s3 = require('@aws-cdk/aws-s3'); 18 | import codecommit = require('@aws-cdk/aws-codecommit'); 19 | import codepipeline = require('@aws-cdk/aws-codepipeline'); 20 | import codepipeline_actions = require('@aws-cdk/aws-codepipeline-actions'); 21 | import codebuild = require('@aws-cdk/aws-codebuild'); 22 | 23 | export class PipelineStack extends cdk.Stack { 24 | constructor(scope: cdk.Construct, id: string, props?: cdk.StackProps) { 25 | super(scope, id, props); 26 | 27 | // The code that defines your stack goes here 28 | 29 | /** Step 1: */ 30 | const artifactsBucket = new s3.Bucket(this, "ArtifactsBucket"); 31 | 32 | } 33 | } 34 | {{}} 35 | 36 | Easy right? Now build and deploy the project like you did it earlier: 37 | 38 | ``` 39 | cd ~/environment/sls-api/pipeline 40 | npm run build 41 | cdk deploy 42 | ``` 43 | 44 | {{% notice info %}} 45 | If you get a build error, check that all the @aws-cdk dependencies in the package.json file have the same version number, if not, fix it, delete the node_modules folder and run npm install. More info: https://github.com/aws/aws-cdk/issues/542#issuecomment-449694450. 46 | {{% /notice %}} 47 | -------------------------------------------------------------------------------- /amplify/content/build-pipeline/pipeline-as-code/source/_index.en.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Source Stage" 3 | weight = 20 4 | pre = "5.3.2. " 5 | +++ 6 | 7 | The **Source Stage** is the first step of any CI/CD pipeline and it represents your source code. This stage is in charge of triggering the pipeline based on new code changes (i.e. git push or pull requests). In this workshop, we will be using AWS CodeCommit as the source provider, but CodePipeline also supports S3, GitHub and Amazon ECR as source providers. 8 | 9 | Append the following code snippet after your bucket definition in the `sls-api/pipeline/lib/pipeline-stack.ts`** file: 10 | 11 | {{}} 12 | import * as cdk from '@aws-cdk/core'; 13 | 14 | import s3 = require('@aws-cdk/aws-s3'); 15 | import codecommit = require('@aws-cdk/aws-codecommit'); 16 | import codepipeline = require('@aws-cdk/aws-codepipeline'); 17 | import codepipeline_actions = require('@aws-cdk/aws-codepipeline-actions'); 18 | import codebuild = require('@aws-cdk/aws-codebuild'); 19 | 20 | export class PipelineStack extends cdk.Stack { 21 | constructor(scope: cdk.Construct, id: string, props?: cdk.StackProps) { 22 | super(scope, id, props); 23 | 24 | // The code that defines your stack goes here 25 | 26 | /** Step 1: */ 27 | const artifactsBucket = new s3.Bucket(this, "ArtifactsBucket"); 28 | 29 | /** Step 2: */ 30 | // Import existing CodeCommit sls-api repository 31 | const codeRepo = codecommit.Repository.fromRepositoryName( 32 | this, 33 | 'AppRepository', // Logical name within CloudFormation 34 | 'sls-api' // Repository name 35 | ); 36 | 37 | // Pipeline creation starts 38 | const pipeline = new codepipeline.Pipeline(this, 'Pipeline', { 39 | artifactBucket: artifactsBucket 40 | }); 41 | 42 | // Declare source code as an artifact 43 | const sourceOutput = new codepipeline.Artifact(); 44 | 45 | // Add source stage to pipeline 46 | pipeline.addStage({ 47 | stageName: 'Source', 48 | actions: [ 49 | new codepipeline_actions.CodeCommitSourceAction({ 50 | actionName: 'CodeCommit_Source', 51 | repository: codeRepo, 52 | output: sourceOutput, 53 | }), 54 | ], 55 | }); 56 | 57 | } 58 | } 59 | {{}} 60 | 61 | Since we already have the CodeCommit repository, we don't need to create a new one, we just need to import it using the repository name. 62 | 63 | Also notice how we define an object `sourceOutput` as a pipeline artifact; This is necessary for any files that you want CodePipeline to pass to downstream stages. In this case, we want our source code to be passed to the Build stage. 64 | 65 | {{% notice info %}} 66 | Don't do a `cdk deploy` just yet, because a pipeline needs to have at least 2 stages to be created. Lets continue to the next page to add a Build stage first. 67 | {{% /notice%}} 68 | -------------------------------------------------------------------------------- /amplify/content/build-pipeline/verify-pipeline/_index.en.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Verify pipeline" 3 | weight = 40 4 | pre = "5.4. " 5 | +++ 6 | 7 | Let your pipeline run every stage. After it finishes it will look all green like the following screenshot: 8 | 9 | ![VerifyPipelineRunning](/images/build-pipeline/pipeline-verify-success.png) 10 | 11 | #### Congratulations! You have created a CI/CD pipeline for a Serverless application! -------------------------------------------------------------------------------- /amplify/content/canaries/_index.en.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Canary Deployments" 3 | weight = 60 4 | chapter = true 5 | pre = "6. " 6 | +++ 7 | 8 | # Canary Deployments 9 | 10 | A Canary Deployment is a technique that reduces the risk of deploying a new version of an application by slowly rolling out the changes to a small subset of users before rolling it out to the entire customer base. 11 | 12 | ![CanaryDeployments](/images/canaries/canary-deployments.png) 13 | 14 | In this chapter you will learn how to implement gradual deployments with AWS SAM, AWS CloudFormation and AWS CodeDeploy with just a few lines of configuration. -------------------------------------------------------------------------------- /amplify/content/canaries/codedeploy/_index.en.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Verify in CodeDeploy" 3 | weight = 25 4 | +++ 5 | 6 | Wait for your pipeline to get to the deployment stage (ExecuteChangeSet) and when you see it _In Progress_. Navigate to the CodeDeploy console to watch the deployment progress. 7 | 8 | ![CanaryCodeDeploy](/images/canaries/screenshot-canary-codedeploy-00.png) 9 | 10 | Navigate to the [AWS CodeDeploy](https://console.aws.amazon.com/codesuite/codedeploy/home) console and after a couple of minutes, you should see a new deployment in progress. Click on the Deployment to see the details. 11 | 12 | ![CanaryCodeDeploy](/images/canaries/screenshot-canary-codedeploy-0.png) 13 | 14 | The deployment status shows that 10% of the traffic has been shifted to the new version (aka The Canary). CodeDeploy will hold the remaining percentage until the specified time interval has ellapsed, in this case we specified the interval to be 5 minutes. 15 | 16 | ![CanaryCodeDeploy](/images/canaries/screenshot-canary-codedeploy-1.png) 17 | 18 | Shortly after the 5 minutes, the remaining traffic should be shifted to the new version: 19 | 20 | ![CanaryCodeDeploy](/images/canaries/screenshot-canary-codedeploy-2.png) 21 | 22 | -------------------------------------------------------------------------------- /amplify/content/canaries/finished/_index.en.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Next Steps" 3 | date = 2019-11-20T21:13:50-08:00 4 | weight = 50 5 | +++ 6 | 7 | 8 | **Thank you for doing this workshop!** 9 | 10 | We hope you learned something. 11 | 12 | 13 | Stay tuned on this website as we are working on new modules! 14 | If you have a suggestion or feedback to improve this workshop, please let us know in our [GitHub repository](https://github.com/nnthanh101/eks-workshop). 15 | -------------------------------------------------------------------------------- /amplify/content/canaries/intro/_index.en.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "How does it work" 3 | date = 2019-11-11T14:45:51-08:00 4 | weight = 10 5 | +++ 6 | 7 | Before we jump into implementing Canary Deployments, lets first understand how this works: 8 | 9 | The concepts of blue/green and canary deployments have been around for a while and have been well-established as best-practices for reducing the risk of software deployments. In traditional applications, you slowly and incrementally update the servers in your fleet while simultaneously verifying application health. However, there is somewhat of an impedance mismatch when mapping these concepts to a serverless world. You can’t incrementally deploy your software across a fleet of servers when there are no servers! 10 | 11 | The answer is that there are a couple of services and features involved in making this possible. Let us explain: 12 | 13 | ### Lambda versions and aliases 14 | 15 | AWS Lambda allows you to publish multiple versions of the same function. Each version has its own code and associated dependencies, as well as its own function settings (like memory allocation, timeout and environment variables). You can then refer to a given version by using a Lambda Alias. An alias is nothing but a name that can be pointed to a given version of a Lambda function. 16 | 17 | ![VersionsAndAliases](/images/canaries/lambda-versions-aliases.png) 18 | 19 | ### Traffic shifting with Lambda aliases 20 | 21 | With the introduction of alias traffic shifting, it is now possible to trivially implement canary deployments of Lambda functions. By updating additional version weights on an alias, invocation traffic is routed to the new function versions based on the weight specified. Detailed CloudWatch metrics for the alias and version can be analyzed during the deployment, or other health checks performed, to ensure that the new version is healthy before proceeding. 22 | 23 | ![TrafficShifting](/images/canaries/traffic-shifting.png) 24 | 25 | ### Traffic shifting with SAM and CodeDeploy 26 | 27 | AWS CodeDeploy provides an intuitive turn-key implementation of this functionality integrated directly into AWS SAM. Traffic-shifted deployments can be declared in a SAM template, and CodeDeploy manages the function rollout as part of the CloudFormation stack update. CloudWatch alarms can also be configured to trigger a stack rollback if something goes wrong. 28 | 29 | ![TrafficShiftingCodeDeploy](/images/canaries/traffic-shifting-codedeploy.png) -------------------------------------------------------------------------------- /amplify/content/canaries/rollbacks/_index.en.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Rollbacks" 3 | date = 2019-11-12T08:00:36-08:00 4 | weight = 30 5 | +++ 6 | 7 | Monitoring the health of your canary allows CodeDeploy to make a decision to whether a rollback is needed or not. If any of the CloudWatch Alarms specified gets to ALARM status, CodeDeploy rollsback the deployment automatically. 8 | 9 | ### Introduce an error on purpose 10 | 11 | Lets break the Lambda function on purpose so that the _CanaryErrorsAlarm_ gets triggered during deployment. Update the lambda code in `sls-api/hello_world/app.py` to throw an error on every invocation, like this: 12 | 13 | {{}} 14 | # return { 15 | # "statusCode": 200, 16 | # "body": json.dumps({ 17 | # "message": "hello world", 18 | # # "location": ip.text.replace("\n", "") 19 | # }), 20 | # } 21 | 22 | raise SystemExit('This will cause a deployment rollback!') 23 | {{}} 24 | 25 | Make sure to update the unit test, otherwise the build will fail. Comment out every line in the `sls-api/buildspec.yml` file: 26 | 27 | {{}} 28 | pre_build: 29 | commands: 30 | # Run tests, lint scripts or any other pre-build checks. 31 | - cd .. 32 | - pip install pytest pytest-mock --user 33 | # - python -m pytest tests/ -v 34 | {{}} 35 | 36 | ### Push the changes 37 | 38 | In the terminal, run the following commands from the root directory of your `sls-api` project. 39 | 40 | ``` 41 | git add . 42 | git commit -m "Breaking the lambda function for CodeDeploy Rollback purpose" 43 | git push 44 | ``` -------------------------------------------------------------------------------- /amplify/content/canaries/rollbacks/codedeploy/_index.en.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Watch CodeDeploy rollback" 3 | date = 2019-11-12T13:29:37-08:00 4 | weight = 15 5 | +++ 6 | 7 | Navigate to the [AWS CodeDeploy Console](https://console.aws.amazon.com/codedeploy/home) and go into the deployment In-Progress to view its details. 8 | 9 | After a few minutes, CodeDeploy will detect that the _CanaryErrorsAlarm_ has triggered and it will start rolling back the deployment. The screen will look something like this: 10 | 11 | ![CodeDeployRollback](/images/canaries/screenshot-codedeploy-rollback.png) -------------------------------------------------------------------------------- /amplify/content/canaries/rollbacks/faketraffic/_index.en.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Invoke the canary" 3 | weight = 10 4 | +++ 5 | 6 | * [x] **Wait for deployment to start** 7 | 8 | Again, wait for your Pipeline to reach the deployment phase (ExecuteChangeSet). It should turn blue when it begins. 9 | 10 | **While the deployment is running**, you need to generate traffic to the new Lambda function to make it fail and trigger the CloudWatch Alarm. In a real production environment, your users will likely generate organic traffic to the canary function, so you may not need to do this. 11 | 12 | In your terminal, run the following command to invoke the Lambda function: 13 | 14 | ``` 15 | aws lambda invoke --function-name \ 16 | $(aws lambda list-functions | jq -r -c '.Functions[] | select( .FunctionName | contains("sam-app-HelloWorldFunction")).FunctionName'):live \ 17 | --payload '{}' \ 18 | response.json 19 | ``` 20 | 21 | {{% notice tip %}} 22 | If you get an error that `jq` command is not installed, you can install it by running `sudo yum install -y jq`. 23 | {{% /notice%}} 24 | 25 | There will be a new file `response.json` created. It contains the response of the lambda invocation. If you open it, you may see the the response of the old Lambda version, or you may see the new one that causes an error. 26 | 27 | **Remember:** During deployment, only 10% of the traffic will be routed to the new version. So, **keep on invoking your lambda many times**. 1 out of 10 invocations should trigger the new broken lambda, which is what you want to cause a rollback. 28 | 29 | Here is a command that invokes your function 15 times in a loop. Feel free to run it in your terminal. 30 | 31 | ``` 32 | counter=1 33 | while [ $counter -le 15 ] 34 | do 35 | aws lambda invoke --function-name \ 36 | $(aws lambda list-functions | jq -r -c '.Functions[] | select( .FunctionName | contains("sam-app-HelloWorldFunction")).FunctionName'):live \ 37 | --payload '{}' \ 38 | response.json 39 | sleep 1 40 | ((counter++)) 41 | done 42 | ``` -------------------------------------------------------------------------------- /amplify/content/cdk8s/_index.en.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "CDK for Kubernetes" 3 | weight = 30 4 | chapter = true 5 | pre= "3. " 6 | +++ 7 | 8 | # CDK for Kubernetes 🚀 9 | 10 | ![CDK for Kubernetes](/images/cdk8s/cdk8s.gif?width=50pc) 11 | 12 | * [ ] [Getting Started with TypeScript](./cdk8s-typescript/index.html) 13 | 14 | {{% children showhidden="false" %}} -------------------------------------------------------------------------------- /amplify/content/container-typescript/_index.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Launch an EKS Cluster" 3 | weight = 20 4 | pre= "2. " 5 | +++ 6 | 7 | 8 | ![Launch & Configure an EKS Cluster](/images/container-typescript/cdk-eks-architecture.png?width=50pc) 9 | 10 | {{%expand "✍️ Why Elastic Kubernetes Service (EKS) on AWS" %}} 11 | * Enterprise Class Platform to run **Production-Grade** workloads. [Rapid innovation based on our customer needs](https://github.com/aws/containers-roadmap/projects/1) 12 | 13 | * **Native & Upstream Kubernetes** experience. Flexibility over your *Container* build including bring your own *AMI*, *VPC*, *Nodes* (*Spot*, *GPU* etc.), *IAM*, *Logging*, *Monitoring* and *Storage* etc. 14 | 15 | * **Seamless Integrations** with AWS services, allowing our Customers to gain long term benefits around *cost*, *time to market* and *Developer productivity*. 16 | 17 | * Actively contributes to the **CNCF Kubernetes Community** 18 | 19 | {{% /expand%}} 20 | 21 | 22 | To provision a ready-to-use **Amazon EKS** cluster by simply `cdk deploy` with **AWS Cloud Development Kit**. 23 | 24 | ### 🎯 To run a the CDK TypeScript 25 | 26 | * [x] ~~Prerequisites~~: [Quick-Setup Cloud9](../prerequisites/bootstrap/) 27 | 28 | ``` 29 | # npm install -g aws-cdk --force 30 | cdk --version 31 | 32 | # git clone the project 33 | git clone https://github.com/nnthanh101/eks-workshop.git 34 | ``` 35 | 36 | * [x] Deploy **EksClusterStack** in a new **VPC** (default) 37 | 38 | ``` 39 | cd ~/environment/eks-workshop/eks-cluster 40 | 41 | # install other required npm modules 42 | npm install 43 | npm run build 44 | 45 | cdk synth 46 | 47 | # cdk bootstrapping (only required for the 1st time) 48 | cdk bootstrap aws://$ACCOUNT_ID/$AWS_REGION 49 | 50 | # cdk diff to see what will be created 51 | cdk diff 52 | 53 | cdk deploy 54 | ``` 55 | 56 | {{% children showhidden="false" %}} -------------------------------------------------------------------------------- /amplify/content/container-typescript/container-stack/_index.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "EKS Container Stack" 3 | weight = 20 4 | pre= "2.2. " 5 | +++ 6 | 7 | 8 | > 🎯 We'll add a **EKS Cluster** with an **API-Gateway Endpoint** in front of it. 9 | 10 | ![Container Stack Architecture](/images/container-typescript/container-stack.png) 11 | 12 | {{%expand "✍️ The key Goals/Outcomes from EKS" %}} 13 | * What type of workloads will you be deploying on **Kubernetes**? 14 | * Migrate legacy Apps to Cloud 15 | * Micro-Services / Cloud Native development 16 | * Batch processing 17 | * Hybrid Container deployments 18 | 19 | * What Cloud Services will your workloads need to integrate with? 20 | 21 | * What is your development teams maturity in **DevOps**? 22 | 23 | {{% /expand%}} 24 | 25 | > 🎯 Install the related Construct Library 26 | 27 | ``` 28 | npm install --save dotenv @aws-cdk/aws-iam @aws-cdk/aws-eks @aws-cdk/aws-ec2 @aws-cdk/aws-autoscaling @aws-cdk/aws-ecr @aws-cdk/aws-codecommit @aws-cdk/aws-codebuild @aws-cdk/aws-events-targets @aws-cdk/aws-codepipeline @aws-cdk/aws-codepipeline-actions 29 | ``` 30 | -------------------------------------------------------------------------------- /amplify/content/container-typescript/container-stack/eks-FargateCluster.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "EKS FargateCluster" 3 | weight = 400 4 | pre= "2.2.4. " 5 | +++ 6 | 7 | ### Step 1. Add a EKS-FargateCluster to your stack 8 | 9 | * 🎯 EKS-FargateCluster ... 10 | 11 | 12 | {{}} 13 | import * as cdk from '@aws-cdk/core'; 14 | 15 | import * as ec2 from '@aws-cdk/aws-ec2'; 16 | import * as iam from '@aws-cdk/aws-iam'; 17 | import * as eks from '@aws-cdk/aws-eks'; 18 | 19 | export class EksClusterStack extends cdk.Stack { 20 | constructor(scope: cdk.Construct, id: string, props?: cdk.StackProps) { 21 | super(scope, id, props); 22 | 23 | // The code that defines your stack goes here 24 | 25 | // Step 1. Create a new VPC for our EKS Cluster 26 | // The default VPC will create a NAT Gateway for each AZs --> Cost 27 | const vpc = new ec2.Vpc(this, 'EKS-FargateCluster-VPC', { 28 | cidr: '10.20.0.0/18', 29 | natGateways: 1 30 | }) 31 | 32 | // Step 2. EKS Cluster with Fargate 33 | 34 | // IAM Role for our Fargate worker nodes 35 | const mastersRole = new iam.Role(this, 'masters-role', { 36 | assumedBy: new iam.AccountRootPrincipal() 37 | }); 38 | 39 | const fargateProfileRole = new iam.Role(this, "fargate-profile-role", { 40 | assumedBy: new iam.ServicePrincipal("eks-fargate-pods.amazonaws.com"), 41 | managedPolicies: [ 42 | iam.ManagedPolicy.fromAwsManagedPolicyName("AmazonEKSFargatePodExecutionRolePolicy") 43 | ] 44 | }) 45 | 46 | const cluster = new eks.FargateCluster(this, "fargate-cluster", { 47 | clusterName: "EKS-FargateCluster", 48 | vpc, 49 | mastersRole, 50 | coreDnsComputeType: eks.CoreDnsComputeType.FARGATE, 51 | defaultProfile: { 52 | fargateProfileName: "default-profile", 53 | selectors: [ 54 | { namespace: "default" }, 55 | { namespace: "kube-system" } 56 | ], 57 | podExecutionRole: fargateProfileRole 58 | } 59 | }); 60 | 61 | } 62 | } 63 | {{}} 64 | 65 | 66 | ## Step 2. CDK Diff 67 | 68 | Save your code, and let's take a quick look at the `cdk diff` before we deploy: 69 | 70 | ``` 71 | npm run build 72 | 73 | cdk diff EksClusterStack 74 | ``` 75 | 76 | 77 | ## Step 3. Let's deploy 78 | 79 | ``` 80 | cdk deploy EksClusterStack 81 | ``` 82 | -------------------------------------------------------------------------------- /amplify/content/container-typescript/create-cdk-project/_index.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "New TypeScript CDK Project" 3 | weight = 10 4 | pre= "2.1. " 5 | +++ 6 | 7 | 8 | * 🎯 We will use `cdk init` to create a new AWS CDK Python project. 9 | 10 | * 🎯 We will also use the CDK Toolkit to synthesize an AWS 11 | CloudFormation template for the starter app and how to deploy your app into your AWS Account. 12 | 13 | > **Step 1.** Creating a CDK application 14 | 15 | ```bash 16 | mkdir eks-cluster 17 | cd eks-cluster 18 | 19 | cdk init --language typescript 20 | ``` 21 | 22 | > **Step 2.** Compile TypeScript sources to JavaScript 23 | 24 | This will start the TypeScript compiler `tsc` in “watch” mode, which will monitor your project directory and will automatically compile any changes to your `.ts` files to `.js`. 25 | 26 | ```bash 27 | npm run watch 28 | # npm run build 29 | ``` 30 | 31 | > **Step 3.** Explore Your Project Directory 32 | 33 | ```bash 34 | npm install -g tree-cli 35 | tree -l 2 -o project-directory.txt 36 | ``` 37 | 38 | {{%expand "✍️ Project Structure" %}} 39 | * __`lib/eks-cluster-stack.ts`__ is where your CDK application's main stack is defined. 40 | This is the file we'll be spending most of our time in. 41 | * `bin/eks-cluster.ts` is the entrypoint of the CDK application. It will load 42 | the stack defined in `lib/eks-cluster-stack.ts`. 43 | * `package.json` is your npm module manifest. It includes information like the 44 | name of your app, version, dependencies and build scripts like "watch" and 45 | "build" (`package-lock.json` is maintained by npm) 46 | * `cdk.json` tells the toolkit how to run your app. In our case it will be 47 | `"npx ts-node bin/eks-cluster.ts"` 48 | * `tsconfig.json` your project's [typescript 49 | configuration](https://www.typescriptlang.org/docs/handbook/tsconfig-json.html) 50 | * `.gitignore` and `.npmignore` tell git and npm which files to include/exclude 51 | from source control and when publishing this module to the package manager. 52 | * `node_modules` is maintained by npm and includes all your project's 53 | dependencies. 54 | {{% /expand%}} 55 | 56 | > **Step 4.** Synthesize a template from your app 57 | 58 | ```bash 59 | cdk synth 60 | ``` 61 | 62 | > **Step 5.** Bootstrapping an environment then Deploy 63 | 64 | ```bash 65 | cdk bootstrap 66 | # cdk bootstrap aws://$ACCOUNT_ID/$AWS_REGION 67 | 68 | cdk deploy EksClusterStack 69 | ``` 70 | 71 | {{%expand "✍️ Bootstrapping an Environment" %}} 72 | The first time you deploy an AWS CDK app into an Environment (AWS Account/Region), you’ll need to install a “Bootstrap Stack”. 73 | 74 | This stack includes resources that are needed for the toolkit’s operation. For example, the stack includes an S3 bucket that is used to store templates and assets during the deployment process. 75 | {{% /expand%}} 76 | 77 | {{% notice note %}} 78 | You're READY ✅ for some actual CODING! 👌 79 | {{% /notice %}} 80 | -------------------------------------------------------------------------------- /amplify/content/container-typescript/logging_monitoring.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/content/container-typescript/logging_monitoring.zip -------------------------------------------------------------------------------- /amplify/content/prerequisites/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Get Ready! 3 | weight: 10 4 | pre: "1. " 5 | --- 6 | 7 | 8 | ![🌤️⚡ Get Ready!](/images/prerequisites/get-ready.png) 9 | 10 | * [x] 1.1. 🌤️ Get your **AWS account** ready! 11 | 12 | You have received AWS credentials; make sure you have sufficient privileges in your AWS account. 13 | 14 | * [x] [Using AWS Workshop Portal](./aws-workshop-portal/) 15 | * [ ] [Create your own AWS account](https://aws.amazon.com/free/?all-free-tier.sort-by=item.additionalFields.SortRank&all-free-tier.sort-order=asc) 16 | 17 | * [x] 1.2. ⚡ Provisioning **Cloud9** Cloud-based IDE 18 | * [x] [Create a Cloud9 Workspace](./cloud9-workspace/) 19 | * [ ] [Provisioning Cloud9 Workspace using CloudFormation IaC](https://devops.job4u.io/Modern-Apps/VPC-Cloud9-IDE/index.html) 20 | 21 | * [x] 1.3. 🎯Configuring **Cloud9** Workspace 22 | * [x] [Quick Setup Cloud9](./bootstrap) 23 | * [ ] [Install Kubernetes Tools](./bootstrap/k8stools) 24 | * [ ] [Verify Prerequisites Resources](./bootstrap/verify-prerequisites) 25 | 26 | -------------------------------------------------------------------------------- /amplify/content/prerequisites/_index.vi.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Get Ready! 3 | weight: 10 4 | pre: "1. " 5 | --- 6 | 7 | 8 | ![🌤️⚡ Get Ready!](/images/prerequisites/get-ready.png) 9 | 10 | * [x] 1.1. 🌤️ Get your **AWS account** ready! 11 | 12 | You have received AWS credentials; make sure you have sufficient privileges in your AWS account. 13 | 14 | * [x] Using AWS Workshop Portal 15 | * [ ] [Create your own AWS account](https://aws.amazon.com/free/?all-free-tier.sort-by=item.additionalFields.SortRank&all-free-tier.sort-order=asc) 16 | 17 | * [x] 1.2. ⚡ Provisioning **Cloud9** Cloud-based IDE 18 | * [x] Create a Cloud9 Workspace 19 | * [ ] [Provisioning Cloud9 Workspace using CloudFormation IaC](https://devops.job4u.io/Modern-Apps/VPC-Cloud9-IDE/index.html) 20 | 21 | * [x] 1.3. 🎯Configuring **Cloud9** Workspace 22 | 23 | -------------------------------------------------------------------------------- /amplify/content/prerequisites/aws-workshop-portal.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "AWS Workshop Portal" 3 | chapter: false 4 | weight: 10 5 | pre: "1.1. " 6 | --- 7 | 8 | {{% notice warning %}} 9 | AWS hosted event by an AWS employee (such as re:Invent, Kubecon, Immersion Day, ...). 10 | {{% /notice %}} 11 | 12 | > [1. Login to AWS Workshop Portal](https://dashboard.eventengine.run/) 13 | 14 | This workshop creates an AWS account and a Cloud9 environment. You will need the **Participant Hash** provided upon entry, and your email address to track your unique session. 15 | 16 | Connect to the portal by clicking the button or browsing to [https://dashboard.eventengine.run/](https://dashboard.eventengine.run/). The following screen shows up. 17 | 18 | ![Event Engine](/images/prerequisites/event-engine-initial-screen.png?width=50pc) 19 | 20 | > 2. Enter the provided hash in the text box. The button on the bottom right corner changes to **Accept Terms & Login**. Click on that button to continue. 21 | 22 | ![Event Engine Dashboard](/images/prerequisites/event-engine-dashboard.png?width=50pc) 23 | 24 | > 3. Click on **AWS Console** on dashboard. 25 | 26 | Take the defaults and click on **Open AWS Console**. This will open AWS Console in a new browser tab. -------------------------------------------------------------------------------- /amplify/content/prerequisites/bootstrap/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Quick Setup Cloud9 3 | weight: 30 4 | pre: "1.3. " 5 | --- 6 | 7 | 8 | * [x] 🚀 1.3.1. Bootstrap Script 9 | 10 | We have put together a bootstrap script that will make the upgrade easier for you. Download it by running the following command from your Cloud9 terminal. 11 | 12 | 13 | ```bash 14 | wget https://eks.aws.job4u.io/assets/bootstrap.sh 15 | 16 | chmod +x bootstrap.sh 17 | ./bootstrap.sh 18 | ``` 19 | 20 | ✍️: **This may take a few minute to complete! ⏳** 21 | 22 | 23 | * [x] 🚀1.3.2. **Cloud9 IDE**: [Create a Cloud9 Workspace](../cloud9-workspace/index.html) or [Provision your AWS Cloud resources](https://devops.job4u.io/Modern-Apps/VPC-Cloud9-IDE/index.html) 24 | 25 | * [x] Verify CDK 26 | 27 | ``` bash 28 | cdk --version 29 | ``` 30 | 31 | {{%expand "🤓 Install CDK" %}} 32 | npm install -g aws-cdk --force 33 | {{% /expand%}} 34 | 35 | * [ ] You can choose Themes by selecting *View* / *Themes* / *Solarized* / *Solarized Dark* in the Cloud9 workspace menu. 36 | 37 | -------------------------------------------------------------------------------- /amplify/content/prerequisites/bootstrap/_index.vi.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Prerequisite Resources 3 | weight: 12 4 | pre: "1.2. " 5 | --- 6 | 7 | > 🎯 Get your AWS account ready! 8 | 9 | * [x] AWS Account 10 | 11 | You have received AWS credentials; make sure you have sufficient privileges in your AWS account. 12 | 13 | * [x] [AWS CloudFormation: provision your AWS Cloud resources](https://devops.job4u.io/Modern-Apps/VPC-Cloud9-IDE/index.html) 14 | 15 | > 🎯 Install AWS CDK 16 | 17 | ```bash 18 | npm install -g aws-cdk 19 | cdk --version 20 | ``` -------------------------------------------------------------------------------- /amplify/content/prerequisites/bootstrap/k8stools.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Install Kubernetes Tools" 3 | chapter: false 4 | weight: 16 5 | --- 6 | 7 | Amazon EKS clusters require `kubectl` and `kubelet` binaries and the `aws-cli` or `aws-iam-authenticator` binary to allow IAM authentication for your Kubernetes cluster. 8 | 9 | {{% notice tip %}} 10 | We will give you the commands to download the **Linux** binaries. If you are running **Mac OSX** / **Windows**, please [see the official EKS docs for the download links.](https://docs.aws.amazon.com/eks/latest/userguide/getting-started.html) 11 | {{% /notice %}} 12 | 13 | #### Install kubectl CLI (EKS 1.16) 14 | 15 | ``` 16 | sudo curl --silent --location -o /usr/local/bin/kubectl https://amazon-eks.s3.us-west-2.amazonaws.com/1.16.8/2020-04-16/bin/linux/amd64/kubectl 17 | 18 | sudo chmod +x /usr/local/bin/kubectl 19 | 20 | kubectl version --short --client 21 | ``` 22 | 23 | #### Update AWS CLI 24 | 25 | Upgrade AWS CLI according to guidance in [AWS documentation](https://docs.aws.amazon.com/cli/latest/userguide/install-linux.html). 26 | 27 | ``` 28 | sudo pip install --upgrade awscli && hash -r 29 | ``` 30 | 31 | #### Install jq, envsubst (from GNU gettext utilities) and bash-completion 32 | ``` 33 | sudo yum -y install jq gettext bash-completion 34 | ``` 35 | 36 | #### Verify the binaries are in the path and executable 37 | ``` 38 | for command in kubectl jq envsubst aws 39 | do 40 | which $command &>/dev/null && echo "[x] $command in path" || echo "[ ] $command NOT FOUND" 41 | done 42 | ``` 43 | 44 | #### Enable kubectl bash_completion 45 | 46 | ``` 47 | kubectl completion bash >> ~/.bash_completion 48 | . /etc/profile.d/bash_completion.sh 49 | . ~/.bash_completion 50 | ``` 51 | 52 | 53 | 68 | 69 | #### Install the Helm CLI 70 | 71 | ```bash 72 | curl -sSL https://raw.githubusercontent.com/helm/helm/master/scripts/get-helm-3 | bash 73 | 74 | helm version --short 75 | ``` 76 | 77 | #### Stable `Helm Chart` Repository 78 | 79 | ```bash 80 | helm repo add stable https://kubernetes-charts.storage.googleapis.com/ 81 | 82 | helm search repo stable 83 | ``` 84 | 85 | ```bash 86 | helm completion bash >> ~/.bash_completion 87 | . /etc/profile.d/bash_completion.sh 88 | . ~/.bash_completion 89 | source <(helm completion bash) 90 | ``` 91 | -------------------------------------------------------------------------------- /amplify/content/prerequisites/bootstrap/verify-prerequisites.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Verify Prerequisites Resources" 3 | chapter: false 4 | weight: 17 5 | --- 6 | 7 | 8 | We should configure our AWS CLI with our current **region** as default. 9 | 10 | ```sh 11 | export ACCOUNT_ID=$(aws sts get-caller-identity --output text --query Account) 12 | export AWS_REGION=$(curl -s 169.254.169.254/latest/dynamic/instance-identity/document | jq -r '.region') 13 | ``` 14 | 15 | Check if AWS_REGION is set to desired region 16 | ```sh 17 | test -n "$ACCOUNT_ID" && echo ACCOUNT_ID is "$ACCOUNT_ID" || echo ACCOUNT_ID is not set 18 | test -n "$AWS_REGION" && echo AWS_REGION is "$AWS_REGION" || echo AWS_REGION is not set 19 | ``` 20 | 21 | Let's save these into bash_profile 22 | ```sh 23 | echo "export ACCOUNT_ID=${ACCOUNT_ID}" | tee -a ~/.bash_profile 24 | echo "export AWS_REGION=${AWS_REGION}" | tee -a ~/.bash_profile 25 | aws configure set default.region ${AWS_REGION} 26 | aws configure get default.region 27 | ``` 28 | 29 | ### Validate the IAM role `eks-admin-role` 30 | 31 | Use the [GetCallerIdentity](https://docs.aws.amazon.com/cli/latest/reference/sts/get-caller-identity.html) CLI command to validate that the Cloud9 IDE is using the correct IAM role. 32 | 33 | ``` 34 | aws sts get-caller-identity --query Arn | grep eks-admin-role -q && echo "IAM role valid" || echo "IAM role NOT valid" 35 | ``` 36 | 37 | {{%expand "✍️ Get the IAM Role name from the AWS CLI" %}} 38 | ```bash 39 | INSTANCE_PROFILE_NAME=`basename $(aws ec2 describe-instances --filters Name=tag:Name,Values=aws-cloud9-${C9_PROJECT}-${C9_PID} | jq -r '.Reservations[0].Instances[0].IamInstanceProfile.Arn' | awk -F "/" "{print $2}")` 40 | 41 | aws iam get-instance-profile --instance-profile-name $INSTANCE_PROFILE_NAME --query "InstanceProfile.Roles[0].RoleName" --output text 42 | ``` 43 | {{% /expand%}} 44 | 45 | 46 | If the IAM role is not valid, **DO NOT PROCEED**. Go back and confirm the steps on this page. 47 | 48 | {{%expand "✍️ @FIXME Export the Cluster Name & Worker Role Name" %}} 49 | 64 | 65 | 76 | 77 | ```bash 78 | export CLUSTER_NAME=EKS-Cluster 79 | # export ROLE_NAME=eks-admin-role 80 | 81 | echo "export ROLE_NAME=${CLUSTER_NAME}" | tee -a ~/.bash_profile 82 | echo "export ROLE_NAME=${ROLE_NAME}" | tee -a ~/.bash_profile 83 | ``` 84 | {{% /expand%}} 85 | -------------------------------------------------------------------------------- /amplify/content/prerequisites/cloud9-workspace/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Cloud9 Workspace" 3 | chapter: false 4 | weight: 20 5 | pre: "1.2. " 6 | --- 7 | 8 | {{% notice warning %}} 9 | The Cloud9 workspace should be built by an IAM user with Administrator privileges, 10 | not the root account user. Please ensure you are logged in as an IAM user, not the root account user. 11 | {{% /notice %}} 12 | 13 | {{% notice tip %}} 14 | Ad blockers, javascript disablers, and tracking blockers should be disabled for 15 | the cloud9 domain, or connecting to the workspace might be impacted. 16 | Cloud9 requires third-party-cookies. You can whitelist the [specific domains]( https://docs.aws.amazon.com/cloud9/latest/user-guide/troubleshooting.html#troubleshooting-env-loading). 17 | {{% /notice %}} 18 | 19 | ### Launch Cloud9 in your closest region: 20 | 21 | > Log into the AWS Console. 22 | 23 | - [Create a Cloud9 Environment](https://ap-southeast-1.console.aws.amazon.com/cloud9/home?region=ap-southeast-1) 24 | - Select **Create environment** 25 | - Name it **`Cloud9`**, and select **Next Step** 26 | - Stick with the default settings, and select **Next Step**: `t3.micro` & `Amazon Linux` | ~~Ubuntu Server 18.04 LTS~~ 27 | - Lastly, select **Create Environment** 28 | - When it comes up, customize the environment by closing the **welcome tab** 29 | and **lower work area**, and opening a new **terminal** tab in the main work area 30 | 31 | > If you like this theme, you can choose it yourself by selecting **View / Themes / Solarized / Solarized Dark** 32 | in the Cloud9 workspace menu. -------------------------------------------------------------------------------- /amplify/content/prerequisites/cloud9-workspace/admin-iam-role.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "IAM Role for Workspace" 3 | chapter: false 4 | weight: 40 5 | pre: "1.2.1. " 6 | --- 7 | 8 | ### 1. Create an IAM Role for your Workspace 9 | 10 | 1. Follow [this deep link to create an IAM role with Administrator access.](https://console.aws.amazon.com/iam/home#/roles$new?step=review&commonUseCase=EC2%2BEC2&selectedUseCase=EC2&policies=arn:aws:iam::aws:policy%2FAdministratorAccess&roleName=eks-admin-role) 11 | 2. Confirm that **AWS service** and **EC2** are selected, then click **Next** to view permissions. 12 | 3. Confirm that **AdministratorAccess** is checked, then click **Next: Tags** to assign tags. 13 | 4. Take the defaults, and click **Next: Review** to review. 14 | 5. Enter `eks-admin-role` for the Name, and click **Create role**. 15 | 16 | ### 2. Attach the IAM role to your Workspace 17 | 18 | 1. Follow [this deep link to find your Cloud9 EC2 instance](https://console.aws.amazon.com/ec2/v2/home?#Instances:tag:Name=aws-cloud9-.*;sort=desc:launchTime) 19 | 2. Select the instance, then choose **Actions / Instance Settings / Attach/Replace IAM Role** 20 | ![c9instancerole](/images/prerequisites/c9instancerole.png) 21 | 3. Choose `eks-admin-role` from the **IAM Role** drop down, and select **Apply** 22 | 23 | ### 3. Update IAM settings for your Workspace 24 | 25 | {{% notice info %}} 26 | Cloud9 normally manages IAM credentials dynamically. This isn't currently compatible with the EKS IAM authentication, so we will disable it and rely on the IAM role instead. 27 | {{% /notice %}} 28 | 29 | - Return to your workspace and click the gear icon (in top right corner), or click to open a new tab and choose "Open Preferences" 30 | - Select **AWS SETTINGS** 31 | - Turn off **AWS managed temporary credentials** 32 | - Close the Preferences tab 33 | ![c9disableiam](/images/prerequisites/c9disableiam.png) 34 | 35 | To ensure temporary credentials aren't already in place we will also remove 36 | any existing credentials file: 37 | ```sh 38 | rm -vf ${HOME}/.aws/credentials 39 | ``` -------------------------------------------------------------------------------- /amplify/content/showcase.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Showcase 3 | disableToc: true 4 | slug: showcase 5 | --- 6 | 7 | #### ⛅ [Serverless Data Lake](https://datalake.aws.job4u.io) 8 | 9 | ![Serverless Data Lake](/images/showcase/serverless-data-lake.png?width=50pc) 10 | 11 | #### 🌤 [Scale-Out Computing on AWS](#) 12 | 13 | ![Scale-Out Computing on AWS](/images/showcase/scale-out-computing-on-aws.png?width=50pc) 14 | 15 | #### ⚡ [Modern Application Development](https://eks.aws.job4u.io) 16 | 17 | ![Kubernetes on AWS](/images/showcase/serverless.png?width=50pc) 18 | 19 | ![ECS Fargate](/images/showcase/ecs-fargate.png?width=50pc) 20 | -------------------------------------------------------------------------------- /amplify/content/showcase.vi.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Showcase 3 | disableToc: true 4 | slug: showcase 5 | --- 6 | 7 | 8 | #### ⛅ [Serverless Data Lake](https://datalake.aws.job4u.io) 9 | ![Serverless Data Lake](/images/showcase/serverless-data-lake.png?width=50pc) 10 | 11 | #### 🌤 [ECS Fargate](https://ecs-fargate.aws.job4u.io) 12 | ![ECS Fargate](/images/showcase/ecs-fargate.png?width=50pc) 13 | 14 | #### ⚡ [Modern Application Development](https://devopssec.aws.job4u.io) 15 | ![Modern Application Development](/images/showcase/serverless.png?width=50pc) -------------------------------------------------------------------------------- /amplify/data/common.toml: -------------------------------------------------------------------------------- 1 | [cdk] 2 | version = "1.21.1 (build 842cc5f)" -------------------------------------------------------------------------------- /amplify/diagrams/hitcounter: -------------------------------------------------------------------------------- 1 | 7VlRj5s4EP41ka4PF4GNgX3cpNvrQytV2pPusXLAC24NRsZskv762mAINqTJtexub3X7sMLj8Xj4vpnxmKzgtjj8JXCVf+QpYSvgpYcVfLsC4AZ56r8WHDtBCFAnyARNO5F/EtzTb8QIzbqsoSmpLUXJOZO0soUJL0uSSEuGheB7W+2BM3vXCmdkIrhPMJtK/6GpzDtp3L+Wlr8nNMv7nX3PzOxw8jUTvCnNfisAH9q/brrAvS2jX+c45fuRCN6t4FZwLrun4rAlTEPbw9ate3dmdvBbkFJesyDqFjxi1pDe49YveeyxaN+GaH1vBTf7nEpyX+FEz+4V+UqWy4Kpka8eU1znra4eGNtESHI4658/vLUKJsILIsVRqZgFIDRAmUDyb8x4f6IFBEaWjyiBfShhEwrZYPsEh3owiJyBM56gQVIVKGbIhcx5xkvM7k7SjY3XCJsvRMqjiXXcSK5EJwsfOK8s2PRGPwZN+cUbkRgtk2ASi4wYrXgeWkEYlvTRtv4rMKHLQcQbyWhJtkO+euNg0QMdJlRl4Ae8I+wTr6mkvFRzOy4lL0YKt4xmekJqwDbYjBKFDRE24Cq5Kr1/cch0lVrjfQ3XDBe7FKvZB8rYljMuWg/hOxQjGCi50k2pstbPlbwkywQz9JEVzEMVGAVzFK7RNJpv4K+zFP7P0lUsIafkPC9L8X+JpfRY4oJ/TndTosBdBDfxE6dTZBEVeDNEgSlN8QIlzweXeSJleqt7EY0ow3VNExvSxep8OA/TCAY0c0L2squPA7PDJ06Ve6d08e10gcDJgs55s2rciriG3LyLHEPdK08MtVQNr30de/A52RtThV6SKh84CIfRz1HlR44h5D0ZVdPmwl/r+fKRfyUT1lQ1kTZPgtT0G961Cro2Vtqp1k20WaG3uuypXqzu2jJ/rgrOF8ta9cC0zP5u27Y/g2VKmo8cYIPhoBmFSDgTImCJdnfaIgCNdVOlWJI/Kn15AVu9MVUIAvXCmzevjQGI7B4Noumh4s9dOBYh4GZCAHy9wR645ehZg70v+SOs31PVpTQtEJehrqVQnDhdzbgJMqKLCBc0TduL49zV2r5MWrfrB15Kc5tURdKMjcf+AvSA2Ll6R9NMeKpEAFcc0EkjHgcwLp3WLljXn97j3iv83Q50p/dCkYP91Qe603shl8QzB7rCHB9HaqbenHU4cGIKBtbnKfXQWfzZbgEEk7gJ1i3OdcXL+vWVUOQ239McjZ8qR6/4oPACOfqiCYmcxhh63hpEQRz4AYxC2CP0r9PTPSljx9BC6Qk8x324cHpOPzej15ye7j1p+Ci9fHqq4elXg46a0y8z8O47 -------------------------------------------------------------------------------- /amplify/diagrams/table-viewer: -------------------------------------------------------------------------------- 1 | 7Vhbj6MgFP4t++BjG5Va7WMvc9lkJpmkm708bahSJYPSIL3Nr19QtAI26WybmWSzfZiRw+Fw/L7DB+iAeX54YHCTPdMEEcd3k4MDFo7ve57nin/ScqwtkRvUhpThRDmdDEv8hpRRjUu3OEGl5sgpJRxvdGNMiwLFXLNBxuhed1tTos+6gSmyDMsYEtv6Ayc8U28RuCf7I8Jp1szsuapnBePXlNFtoeZzfLCufnV3DptYyr/MYEL3HRO4c8CcUcrrp/wwR0Ri28BWj7s/09vmzVDBLxkQ1QN2kGxRk3GVFz82WNAtJ7hA8xZq1wGzBJYZSlRjhxjHArwnuELkhZaYY1qIvhXlnOYdhynBqezgdCOsULVikStiwpDxnIi2Jx4FLhs5f35IZYUN4b4Ew+RYwJz+TlbCYY0JmVNCWZUk8O9CMIuEXbgnWARs+gpaiDgzGxcFlUwNHTomhdMDojni7ChcVG8QKs5UTftgNFRVvT/VSKgAzDrlETWVqaoybWOfmBEPipx+onqYep4+fJ1/sfiqqq/lZp9hjpYbGMvevcBSx7nlUTbWtOBLFQjYGKtCFuRwRl9Rp8etfi3MFqY9yJ+FGQAD5kYTOiB7zfLpouw1A6+CeWLB/A2uCPqO0V7UqIm1eE1uFK6Gjaq+LpDK1BQ/QWves0JynCRykl7+dIZtCms59QKD0nMVfw073qSHnVHPGvBvQA7wLHIsRlCRTOUGIHWFwLLEsc5PPQAllv5bylDSLYuRvvo4ZClqVnU/UB0ggp4qbWwMEcjxTs+iDxw1wwvFIr+Wh9E41HkYB3qIOns1qrsBGIECzwjkGYHqV7YCCZDhseO2kQ6lxWYLzGUE+9cTLGhkx59qbVSNX7IxDN5Ffpfoz2XaM7ed8C+ZBr4/jIKJGwJQ/x3rcc0V+qHEA4v4aSFnWVRb/kIw51Y6fIECM1Tit8q3rgGVoPAOZk6w6Dt4nFVfIk80s/ZM17cZnhVVdTZVmTgtvt1qqwv+rNoO3KEfRZOb1BEIJxrfgVFGdL0uEb+Wx7HFY8a5PLRPZVj/fif20UF+HPD/ZF4lCkDjcmCK9m3IHN1AjQ+YV2I8dN1ItWs9DlXrBTEskpLULdwrN+jP0mh/oms0cA0tvVSjTbG3An2oKAcW/4/fnp/+tVU7+rBVO4iMVWuQe5tVG9pbacypfXdprtjbnNQOF1zkLyCj/67fKY7e7wnGdegso5ffVDzzHhkEPdd10CMK4/dfVUTz9M2mJu30YQzc/QE= -------------------------------------------------------------------------------- /amplify/layouts/partials/custom-footer.html: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /amplify/layouts/partials/logo.html: -------------------------------------------------------------------------------- 1 | 6 | -------------------------------------------------------------------------------- /amplify/layouts/partials/menu-footer.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Star 7 | 8 | 9 | Fork 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /amplify/layouts/shortcodes/cdkversion.html: -------------------------------------------------------------------------------- 1 | {{ $.Site.Data.common.cdk.version }} -------------------------------------------------------------------------------- /amplify/layouts/shortcodes/ghcontributors.html: -------------------------------------------------------------------------------- 1 | 22 |
23 | {{ $url := .Get 0 }} 24 | {{ range getJSON $url }} 25 |
26 | 27 | 28 | {{.contributions}} commits 29 |
30 | {{ end }} 31 |
-------------------------------------------------------------------------------- /amplify/layouts/shortcodes/tab.html: -------------------------------------------------------------------------------- 1 | {{ if .Parent }} 2 | {{ $name := trim (.Get "name") " " }} 3 | {{ $include := trim (.Get "include") " "}} 4 | {{ $codelang := .Get "codelang" }} 5 | {{ if not (.Parent.Scratch.Get "tabs") }} 6 | {{ .Parent.Scratch.Set "tabs" slice }} 7 | {{ end }} 8 | {{ with .Inner }} 9 | {{ if $codelang }} 10 | {{ $.Parent.Scratch.Add "tabs" (dict "name" $name "content" (highlight . $codelang "") ) }} 11 | {{ else }} 12 | {{ $.Parent.Scratch.Add "tabs" (dict "name" $name "content" . ) }} 13 | {{ end }} 14 | {{ else }} 15 | {{ $.Parent.Scratch.Add "tabs" (dict "name" $name "include" $include "codelang" $codelang) }} 16 | {{ end }} 17 | {{ else }} 18 | {{- errorf "[%s] %q: tab shortcode missing its parent" .Page.Site.Language.Lang .Page.Path -}} 19 | {{ end}} -------------------------------------------------------------------------------- /amplify/layouts/shortcodes/tabs.html: -------------------------------------------------------------------------------- 1 | {{ .Page.Scratch.Add "tabset-counter" 1 }} 2 | {{ $tab_set_id := .Get "name" | default (printf "tabset-%s-%d" (.Page.RelPermalink) (.Page.Scratch.Get "tabset-counter") ) | anchorize }} 3 | {{ $tabs := .Scratch.Get "tabs" }} 4 | {{ if .Inner }}{{/* We don't use the inner content, but Hugo will complain if we don't reference it. */}}{{ end }} 5 |
6 | 12 | {{ range $i, $e := $tabs }} 13 | {{ $id := printf "%s-%d" $tab_set_id $i }} 14 |
15 | {{ with .content }} 16 | {{ . }} 17 | {{ else }} 18 | {{ if eq $.Page.BundleType "leaf" }} 19 | {{/* find the file somewhere inside the bundle. Note the use of double asterisk */}} 20 | {{ with $.Page.Resources.GetMatch (printf "**%s*" .include) }} 21 | {{ if ne .ResourceType "page" }} 22 | {{/* Assume it is a file that needs code highlighting. */}} 23 | {{ $codelang := $e.codelang | default ( path.Ext .Name | strings.TrimPrefix ".") }} 24 | {{ highlight .Content $codelang "" }} 25 | {{ else}} 26 | {{ .Content }} 27 | {{ end }} 28 | {{ end }} 29 | {{ else}} 30 | {{ $path := path.Join $.Page.Dir .include }} 31 | {{ $page := $.Page.Site.GetPage "page" $path }} 32 | {{ with $page }} 33 | {{ .Content }} 34 | {{ else }} 35 | {{ errorf "[%s] tabs include not found for path %q" $.Page.Site.Language.Lang $path}} 36 | {{ end }} 37 | {{ end }} 38 | {{ end }} 39 |
40 | {{ end }} 41 |
42 | {{ $elem := $tab_set_id | safeJS }} 43 | 44 | -------------------------------------------------------------------------------- /amplify/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "modernapps", 3 | "version": "1.0.0", 4 | "lockfileVersion": 1 5 | } 6 | -------------------------------------------------------------------------------- /amplify/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "modernapps", 3 | "version": "1.0.0", 4 | "homepage": "https://github.com/nnthanh101/aws-cdk#readme", 5 | "bugs": { 6 | "url": "https://github.com/nnthanh101/aws-cdk/issues" 7 | }, 8 | "license": "MIT", 9 | "repository": { 10 | "type": "git", 11 | "url": "git+https://github.com/nnthanh101/aws-cdk.git" 12 | }, 13 | "scripts": { 14 | "theme": "git submodule init && git submodule update --checkout --recursive", 15 | "start": "hugo server -w -v --enableGitInfo --bind=0.0.0.0 --port 8080", 16 | "deploy": "aws s3 sync public/ s3://WORKSHOP_BUCKET/ --delete", 17 | "deploycontent": "aws s3 sync public/ s3://WORKSHOP_BUCKET/ --delete --cache-control \"max-age=3600, public\" --exclude \"*\" --include \"*.html\" --include \"*.xml\"", 18 | "deployothers": "aws s3 sync public/ s3://WORKSHOP_BUCKET/ --delete --cache-control \"max-age=86400, public\" --exclude \"*.html\" --exclude \"*.xml\"" 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /amplify/static/AWS_Logo.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 9 | 10 | 31 | 32 | 34 | 36 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /amplify/static/assets/buildspec.yml: -------------------------------------------------------------------------------- 1 | version: 0.2 2 | phases: 3 | install: 4 | runtime-versions: 5 | python: 3.7 6 | commands: 7 | ## Install packages or any pre-reqs in this phase. 8 | ## Upgrading SAM CLI to latest version 9 | # - pip3 install --upgrade aws-sam-cli 10 | # - sam --version 11 | ## Installing project dependencies 12 | - cd sls_api 13 | - pip install -r requirements.txt 14 | 15 | pre_build: 16 | commands: 17 | ## Run tests, lint scripts or any other pre-build checks. 18 | - cd .. 19 | - pip install pytest pytest-mock --user 20 | - python -m pytest tests/ -v 21 | 22 | build: 23 | commands: 24 | ## Use Build phase to build your artifacts (compile, etc.) 25 | # - sam build 26 | 27 | post_build: 28 | commands: 29 | ## Use Post-Build for notifications, git tags, upload artifacts to S3 30 | # - sam package --s3-bucket $PACKAGE_BUCKET --output-template-file packaged.yaml 31 | 32 | artifacts: 33 | discard-paths: yes 34 | files: 35 | ## List of local artifacts that will be passed down the pipeline 36 | # - packaged.yaml 37 | -------------------------------------------------------------------------------- /amplify/static/assets/chapter6/artifacts-store.yml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Description: S3 bucket for CodePipeline artifacts store with KMS Customer Managed Key 3 | Parameters: 4 | ProdAccountNumber: 5 | Description: 12-digit AWS account number for production 6 | Type: Number 7 | 8 | Resources: 9 | KMSKey: 10 | Type: AWS::KMS::Key 11 | Properties: 12 | Description: Used to encrypt artifacts by CodePipeline 13 | EnableKeyRotation: true 14 | KeyPolicy: 15 | Version: "2012-10-17" 16 | Id: !Ref AWS::StackName 17 | Statement: 18 | - 19 | Effect: Allow 20 | Principal: 21 | AWS: !Sub arn:${AWS::Partition}:iam::${AWS::AccountId}:root 22 | Action: 23 | - "kms:Create*" 24 | - "kms:Describe*" 25 | - "kms:Enable*" 26 | - "kms:List*" 27 | - "kms:Put*" 28 | - "kms:Update*" 29 | - "kms:Revoke*" 30 | - "kms:Disable*" 31 | - "kms:Get*" 32 | - "kms:Delete*" 33 | - "kms:ScheduleKeyDeletion" 34 | - "kms:CancelKeyDeletion" 35 | Resource: "*" 36 | - 37 | Effect: Allow 38 | Principal: 39 | AWS: 40 | - !Sub arn:aws:iam::${ProdAccountNumber}:root 41 | - !Sub arn:aws:iam::485020055381:role/service-role/codebuild-sls-api-build-service-role 42 | Action: 43 | - kms:Encrypt 44 | - kms:Decrypt 45 | - kms:ReEncrypt* 46 | - kms:GenerateDataKey* 47 | - kms:DescribeKey 48 | Resource: "*" 49 | 50 | KMSAlias: 51 | Type: AWS::KMS::Alias 52 | Properties: 53 | AliasName: !Sub alias/codepipeline-crossaccounts 54 | TargetKeyId: !Ref KMSKey 55 | 56 | # ArtifactsBucket: 57 | # Type: AWS::S3::Bucket 58 | # DeletionPolicy: Retain 59 | 60 | Outputs: 61 | CMK: 62 | Value: !GetAtt [KMSKey, Arn] 63 | # ArtifactsBucket: 64 | # Value: !Ref ArtifactsBucket -------------------------------------------------------------------------------- /amplify/static/assets/chapter6/prod-roles.yml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Description: Resources needed for CodePipeline to deploy across accounts 3 | Parameters: 4 | ArtifactsBucket: 5 | Description: S3 Bucket that holds the pipeline artifacts 6 | Type: String 7 | ToolsAccount: 8 | Description: AWS AccountNumber for Tools 9 | Type: Number 10 | CMKARN: 11 | Description: ARN of the KMS CMK creates in Tools account 12 | Type: String 13 | Resources: 14 | CFRole: 15 | Type: AWS::IAM::Role 16 | Properties: 17 | RoleName: !Sub ToolsAcctCodePipelineCloudFormationRole 18 | AssumeRolePolicyDocument: 19 | Version: 2012-10-17 20 | Statement: 21 | - 22 | Effect: Allow 23 | Principal: 24 | AWS: 25 | - !Ref ToolsAccount 26 | Action: 27 | - sts:AssumeRole 28 | Path: / 29 | CFPolicy: 30 | Type: AWS::IAM::Policy 31 | Properties: 32 | PolicyName: !Sub ToolsAcctCodePipelineCloudFormationPolicy 33 | PolicyDocument: 34 | Version: 2012-10-17 35 | Statement: 36 | - 37 | Effect: Allow 38 | Action: 39 | - cloudformation:* 40 | - s3:* 41 | - iam:PassRole 42 | Resource: "*" 43 | - 44 | Effect: Allow 45 | Action: 46 | - kms:* 47 | Resource: !Ref CMKARN 48 | Roles: 49 | - 50 | !Ref CFRole 51 | CFDeployerRole: 52 | Type: AWS::IAM::Role 53 | Properties: 54 | RoleName: !Sub cloudformationdeployer-role 55 | AssumeRolePolicyDocument: 56 | Version: 2012-10-17 57 | Statement: 58 | - 59 | Effect: Allow 60 | Principal: 61 | Service: 62 | - cloudformation.amazonaws.com 63 | Action: 64 | - sts:AssumeRole 65 | Path: / 66 | CFDeployerPolicy: 67 | Type: AWS::IAM::Policy 68 | Properties: 69 | PolicyName: !Sub cloudformationdeployer-policy 70 | PolicyDocument: 71 | Version: 2012-10-17 72 | Statement: 73 | - 74 | Effect: Allow 75 | Action: 76 | - lambda:AddPermission 77 | - lambda:CreateFunction 78 | - lambda:DeleteFunction 79 | - lambda:InvokeFunction 80 | - lambda:RemovePermission 81 | - lambda:UpdateFunctionCode 82 | - lambda:GetFunctionConfiguration 83 | - lambda:GetFunction 84 | - lambda:UpdateFunctionConfiguration 85 | - events:* # Required for the sample lambda function to work 86 | - iam:CreateRole 87 | - iam:CreatePolicy 88 | - iam:GetRole 89 | - iam:DeleteRole 90 | - iam:PutRolePolicy 91 | - iam:PassRole 92 | - iam:DeleteRolePolicy 93 | - cloudformation:* 94 | Resource: "*" 95 | - 96 | Effect: Allow 97 | Action: 98 | - s3:PutObject 99 | - s3:GetBucketPolicy 100 | - s3:GetObject 101 | - s3:ListBucket 102 | Resource: 103 | - !Join ['',['arn:aws:s3:::',!Ref ArtifactsBucket, '/*']] 104 | - !Join ['',['arn:aws:s3:::',!Ref ArtifactsBucket]] 105 | Roles: 106 | - 107 | !Ref CFDeployerRole -------------------------------------------------------------------------------- /amplify/static/fonts/monogramos-webfont.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/fonts/monogramos-webfont.eot -------------------------------------------------------------------------------- /amplify/static/fonts/monogramos-webfont.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/fonts/monogramos-webfont.ttf -------------------------------------------------------------------------------- /amplify/static/fonts/monogramos-webfont.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/fonts/monogramos-webfont.woff -------------------------------------------------------------------------------- /amplify/static/fonts/monogramos-webfont.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/fonts/monogramos-webfont.woff2 -------------------------------------------------------------------------------- /amplify/static/images/EKS-K8s-on-AWS.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/EKS-K8s-on-AWS.png -------------------------------------------------------------------------------- /amplify/static/images/build-pipeline/cicd-cloudformation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/build-pipeline/cicd-cloudformation.png -------------------------------------------------------------------------------- /amplify/static/images/build-pipeline/pipeline-art.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/build-pipeline/pipeline-art.png -------------------------------------------------------------------------------- /amplify/static/images/build-pipeline/pipeline-verify-fail.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/build-pipeline/pipeline-verify-fail.png -------------------------------------------------------------------------------- /amplify/static/images/build-pipeline/pipeline-verify-success.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/build-pipeline/pipeline-verify-success.png -------------------------------------------------------------------------------- /amplify/static/images/build-pipeline/screenshot-hidden-files-cloud9.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/build-pipeline/screenshot-hidden-files-cloud9.png -------------------------------------------------------------------------------- /amplify/static/images/build-pipeline/screenshot-verify-codecommit.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/build-pipeline/screenshot-verify-codecommit.png -------------------------------------------------------------------------------- /amplify/static/images/canaries/canary-deployments.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/canaries/canary-deployments.png -------------------------------------------------------------------------------- /amplify/static/images/canaries/lambda-versions-aliases.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/canaries/lambda-versions-aliases.png -------------------------------------------------------------------------------- /amplify/static/images/canaries/screenshot-canary-codedeploy-0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/canaries/screenshot-canary-codedeploy-0.png -------------------------------------------------------------------------------- /amplify/static/images/canaries/screenshot-canary-codedeploy-00.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/canaries/screenshot-canary-codedeploy-00.png -------------------------------------------------------------------------------- /amplify/static/images/canaries/screenshot-canary-codedeploy-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/canaries/screenshot-canary-codedeploy-1.png -------------------------------------------------------------------------------- /amplify/static/images/canaries/screenshot-canary-codedeploy-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/canaries/screenshot-canary-codedeploy-2.png -------------------------------------------------------------------------------- /amplify/static/images/canaries/screenshot-codedeploy-rollback.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/canaries/screenshot-codedeploy-rollback.png -------------------------------------------------------------------------------- /amplify/static/images/canaries/traffic-shifting-codedeploy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/canaries/traffic-shifting-codedeploy.png -------------------------------------------------------------------------------- /amplify/static/images/canaries/traffic-shifting.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/canaries/traffic-shifting.png -------------------------------------------------------------------------------- /amplify/static/images/cdk8s/cdk8s.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/cdk8s/cdk8s.gif -------------------------------------------------------------------------------- /amplify/static/images/container-typescript/AWS-DevOps.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/container-typescript/AWS-DevOps.png -------------------------------------------------------------------------------- /amplify/static/images/container-typescript/alb-dns.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/container-typescript/alb-dns.png -------------------------------------------------------------------------------- /amplify/static/images/container-typescript/alb-tg-check1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/container-typescript/alb-tg-check1.png -------------------------------------------------------------------------------- /amplify/static/images/container-typescript/alb-tg-check2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/container-typescript/alb-tg-check2.png -------------------------------------------------------------------------------- /amplify/static/images/container-typescript/canary-lb.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/container-typescript/canary-lb.png -------------------------------------------------------------------------------- /amplify/static/images/container-typescript/cdk-eks-architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/container-typescript/cdk-eks-architecture.png -------------------------------------------------------------------------------- /amplify/static/images/container-typescript/cfn-kubectl.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/container-typescript/cfn-kubectl.png -------------------------------------------------------------------------------- /amplify/static/images/container-typescript/eks-bg-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/container-typescript/eks-bg-1.png -------------------------------------------------------------------------------- /amplify/static/images/container-typescript/eks-bg-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/container-typescript/eks-bg-2.png -------------------------------------------------------------------------------- /amplify/static/images/container-typescript/eks-canary.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/container-typescript/eks-canary.png -------------------------------------------------------------------------------- /amplify/static/images/container-typescript/eks-cicd-codebuild.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/container-typescript/eks-cicd-codebuild.png -------------------------------------------------------------------------------- /amplify/static/images/container-typescript/flask01.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/container-typescript/flask01.png -------------------------------------------------------------------------------- /amplify/static/images/container-typescript/flask02.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/container-typescript/flask02.png -------------------------------------------------------------------------------- /amplify/static/images/container-typescript/stage12-green.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/container-typescript/stage12-green.png -------------------------------------------------------------------------------- /amplify/static/images/container-typescript/stage34-green.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/container-typescript/stage34-green.png -------------------------------------------------------------------------------- /amplify/static/images/container-typescript/web-blue-inv.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/container-typescript/web-blue-inv.png -------------------------------------------------------------------------------- /amplify/static/images/container-typescript/web-blue.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/container-typescript/web-blue.png -------------------------------------------------------------------------------- /amplify/static/images/container-typescript/web-default.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/container-typescript/web-default.png -------------------------------------------------------------------------------- /amplify/static/images/container-typescript/web-green-inv.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/container-typescript/web-green-inv.png -------------------------------------------------------------------------------- /amplify/static/images/container-typescript/web-green.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/container-typescript/web-green.png -------------------------------------------------------------------------------- /amplify/static/images/favicon-16x16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/favicon-16x16.png -------------------------------------------------------------------------------- /amplify/static/images/favicon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/favicon.png -------------------------------------------------------------------------------- /amplify/static/images/logging_monitoring.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/logging_monitoring.zip -------------------------------------------------------------------------------- /amplify/static/images/prerequisites/Monoliths-to-Micro-Services-Journey.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/prerequisites/Monoliths-to-Micro-Services-Journey.png -------------------------------------------------------------------------------- /amplify/static/images/prerequisites/Unicorn-Serverless-Application.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/prerequisites/Unicorn-Serverless-Application.png -------------------------------------------------------------------------------- /amplify/static/images/prerequisites/c9disableiam.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/prerequisites/c9disableiam.png -------------------------------------------------------------------------------- /amplify/static/images/prerequisites/c9instancerole.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/prerequisites/c9instancerole.png -------------------------------------------------------------------------------- /amplify/static/images/prerequisites/event-engine-dashboard.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/prerequisites/event-engine-dashboard.png -------------------------------------------------------------------------------- /amplify/static/images/prerequisites/event-engine-initial-screen.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/prerequisites/event-engine-initial-screen.png -------------------------------------------------------------------------------- /amplify/static/images/prerequisites/get-ready.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/prerequisites/get-ready.png -------------------------------------------------------------------------------- /amplify/static/images/prerequisites/modern-apps.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/prerequisites/modern-apps.png -------------------------------------------------------------------------------- /amplify/static/images/showcase/ecs-fargate.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/showcase/ecs-fargate.png -------------------------------------------------------------------------------- /amplify/static/images/showcase/eks-fargate.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/showcase/eks-fargate.png -------------------------------------------------------------------------------- /amplify/static/images/showcase/scale-out-computing-on-aws.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/showcase/scale-out-computing-on-aws.png -------------------------------------------------------------------------------- /amplify/static/images/showcase/serverless-data-lake.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/showcase/serverless-data-lake.png -------------------------------------------------------------------------------- /amplify/static/images/showcase/serverless.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/amplify/static/images/showcase/serverless.png -------------------------------------------------------------------------------- /container-typescript/cdk-eks-fargate/.env: -------------------------------------------------------------------------------- 1 | AWS_ACCOUNT_ID="" 2 | AWS_REGION="ap-southeast-1" -------------------------------------------------------------------------------- /container-typescript/cdk-eks-fargate/.gitignore: -------------------------------------------------------------------------------- 1 | *.js 2 | !jest.config.js 3 | *.d.ts 4 | node_modules 5 | 6 | ## CDK asset staging directory 7 | .cdk.staging 8 | cdk.out 9 | 10 | ## Parcel build directories 11 | .cache 12 | .build 13 | 14 | ## 15 | -------------------------------------------------------------------------------- /container-typescript/cdk-eks-fargate/.npmignore: -------------------------------------------------------------------------------- 1 | *.ts 2 | !*.d.ts 3 | 4 | # CDK asset staging directory 5 | .cdk.staging 6 | cdk.out 7 | -------------------------------------------------------------------------------- /container-typescript/cdk-eks-fargate/README.md: -------------------------------------------------------------------------------- 1 | # Amazon EKS using CDK TypeScript 2 | 3 | This is a Amazon EKS for TypeScript development with CDK. 4 | 5 | The `cdk.json` file tells the CDK Toolkit how to execute your app. 6 | 7 | ## Useful commands 8 | 9 | * `npm run build` compile typescript to js 10 | * `npm run watch` watch for changes and compile 11 | * `npm run test` perform the jest unit tests 12 | * `cdk deploy` deploy this stack to your default AWS account/region 13 | * `cdk diff` compare deployed stack with current state 14 | * `cdk synth` emits the synthesized CloudFormation template 15 | -------------------------------------------------------------------------------- /container-typescript/cdk-eks-fargate/bin/cdk-eks.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | import 'source-map-support/register'; 3 | import * as cdk from '@aws-cdk/core'; 4 | import { CdkEksStack } from '../lib/cdk-eks-stack'; 5 | // import { CdkEksEC2Stack } from '../lib/cdk-eks-ec2-stack'; 6 | 7 | const app = new cdk.App(); 8 | new CdkEksStack(app, 'CdkEksStack'); 9 | 10 | // TODO .env 11 | // new CdkEksStack(app, 'CdkEksStack', { 12 | // env: { 13 | // region: process.env.AWS_REGION, 14 | // account: process.env.AWS_ACCOUNT_ID 15 | // } 16 | // }); 17 | 18 | // TODO EKS-Cluster 19 | // new CdkEksEC2Stack(app, 'CDK-EKS-Cluster-Stack'); 20 | -------------------------------------------------------------------------------- /container-typescript/cdk-eks-fargate/cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "npx ts-node bin/cdk-eks.ts", 3 | "context": { 4 | "@aws-cdk/core:enableStackNameDuplicates": "true", 5 | "aws-cdk:enableDiffNoFail": "true" 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /container-typescript/cdk-eks-fargate/jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | roots: ['/test'], 3 | testMatch: ['**/*.test.ts'], 4 | transform: { 5 | '^.+\\.tsx?$': 'ts-jest' 6 | } 7 | }; 8 | -------------------------------------------------------------------------------- /container-typescript/cdk-eks-fargate/lib/cdk-eks-ec2-stack.ts: -------------------------------------------------------------------------------- 1 | import * as cdk from '@aws-cdk/core'; 2 | 3 | import * as ec2 from '@aws-cdk/aws-ec2'; 4 | import * as iam from '@aws-cdk/aws-iam'; 5 | import * as eks from '@aws-cdk/aws-eks'; 6 | import * as autoscaling from '@aws-cdk/aws-autoscaling'; 7 | 8 | export class CdkEksEC2Stack extends cdk.Stack { 9 | constructor(scope: cdk.Construct, id: string, props?: cdk.StackProps) { 10 | super(scope, id, props); 11 | 12 | // The code that defines your stack goes here 13 | 14 | // Step 1. Create a new VPC for our EKS Cluster 15 | // The default VPC will create a NAT Gateway for each AZs --> Cost 16 | const vpc = new ec2.Vpc(this, 'EKS-Cluster-VPC', { 17 | cidr: '10.20.0.0/16', 18 | natGateways: 1 19 | }) 20 | 21 | // Step 2. EKS-Cluster Cluster 22 | 23 | // IAM role for our EC2 worker nodes 24 | const workerRole = new iam.Role(this, 'EKSWorkerRole', { 25 | assumedBy: new iam.ServicePrincipal('ec2.amazonaws.com') 26 | }); 27 | 28 | const eksCluster = new eks.Cluster(this, 'Cluster', { 29 | clusterName: "EKS-Cluster", 30 | vpc: vpc, 31 | kubectlEnabled: true, // We want to be able to manage k8s resources using CDK 32 | defaultCapacity: 0 // We want to manage capacity ourself 33 | }); 34 | 35 | const onDemandASG = new autoscaling.AutoScalingGroup(this, 'OnDemandASG', { 36 | vpc: vpc, 37 | role: workerRole, 38 | minCapacity: 1, 39 | maxCapacity: 3, 40 | instanceType: new ec2.InstanceType('t3.medium'), 41 | machineImage: new eks.EksOptimizedImage({ 42 | kubernetesVersion: '1.15', 43 | nodeType: eks.NodeType.STANDARD // without this, incorrect SSM parameter for AMI is resolved 44 | }), 45 | updateType: autoscaling.UpdateType.ROLLING_UPDATE 46 | }); 47 | 48 | eksCluster.addAutoScalingGroup(onDemandASG, {}); 49 | 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /container-typescript/cdk-eks-fargate/lib/cdk-eks-stack.ts: -------------------------------------------------------------------------------- 1 | import * as cdk from '@aws-cdk/core'; 2 | 3 | import * as ec2 from '@aws-cdk/aws-ec2'; 4 | import * as iam from '@aws-cdk/aws-iam'; 5 | import * as eks from '@aws-cdk/aws-eks'; 6 | 7 | export class CdkEksStack extends cdk.Stack { 8 | constructor(scope: cdk.Construct, id: string, props?: cdk.StackProps) { 9 | super(scope, id, props); 10 | 11 | // The code that defines your stack goes here 12 | 13 | // Step 1. Create a new VPC for our EKS Cluster 14 | // The default VPC will create a NAT Gateway for each AZs --> Cost 15 | const vpc = new ec2.Vpc(this, 'EKS-VPC', { 16 | cidr: '10.10.0.0/16', 17 | natGateways: 1 18 | }) 19 | 20 | // Step 2. EKS Cluster with Fargate 21 | 22 | // IAM Role for our Fargate worker nodes 23 | const mastersRole = new iam.Role(this, 'masters-role', { 24 | assumedBy: new iam.AccountRootPrincipal() 25 | }); 26 | 27 | const fargateProfileRole = new iam.Role(this, "fargate-profile-role", { 28 | assumedBy: new iam.ServicePrincipal("eks-fargate-pods.amazonaws.com"), 29 | managedPolicies: [ 30 | iam.ManagedPolicy.fromAwsManagedPolicyName("AmazonEKSFargatePodExecutionRolePolicy") 31 | ] 32 | }) 33 | 34 | const cluster = new eks.FargateCluster(this, "fargate-cluster", { 35 | clusterName: "EKS-Fargate", 36 | vpc, 37 | mastersRole, 38 | coreDnsComputeType: eks.CoreDnsComputeType.FARGATE, 39 | defaultProfile: { 40 | fargateProfileName: "default-profile", 41 | selectors: [ 42 | { namespace: "default" }, 43 | { namespace: "kube-system" } 44 | ], 45 | podExecutionRole: fargateProfileRole 46 | } 47 | }); 48 | 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /container-typescript/cdk-eks-fargate/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cdk-eks", 3 | "version": "0.1.0", 4 | "bin": { 5 | "cdk-eks": "bin/cdk-eks.js" 6 | }, 7 | "scripts": { 8 | "build": "tsc", 9 | "watch": "tsc -w", 10 | "test": "jest", 11 | "cdk": "cdk" 12 | }, 13 | "devDependencies": { 14 | "@aws-cdk/assert": "1.37.0", 15 | "@types/jest": "^25.2.1", 16 | "@types/node": "10.17.5", 17 | "jest": "^25.5.0", 18 | "ts-jest": "^25.3.1", 19 | "aws-cdk": "1.37.0", 20 | "ts-node": "^8.1.0", 21 | "typescript": "~3.7.2" 22 | }, 23 | "dependencies": { 24 | "@aws-cdk/aws-autoscaling": "^1.37.0", 25 | "@aws-cdk/aws-ec2": "^1.37.0", 26 | "@aws-cdk/aws-ecr": "^1.37.0", 27 | "@aws-cdk/aws-ecr-assets": "^1.37.0", 28 | "@aws-cdk/aws-eks": "^1.37.0", 29 | "@aws-cdk/aws-iam": "^1.37.0", 30 | "@aws-cdk/core": "1.37.0", 31 | "dotenv": "^8.2.0", 32 | "source-map-support": "^0.5.16" 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /container-typescript/cdk-eks-fargate/project-directory.txt: -------------------------------------------------------------------------------- 1 | /home/ec2-user/environment/eks-workshop/container-typescript/cdk-eks 2 | ├── README.md 3 | ├── bin 4 | | └── cdk-eks.ts 5 | ├── cdk.json 6 | ├── jest.config.js 7 | ├── lib 8 | | ├── cdk-eks-ec2-stack.ts 9 | | └── cdk-eks-stack.ts 10 | ├── package-lock.json 11 | ├── package.json 12 | ├── test 13 | | └── cdk-eks.test.ts 14 | └── tsconfig.json 15 | -------------------------------------------------------------------------------- /container-typescript/cdk-eks-fargate/test/cdk-eks.test.ts: -------------------------------------------------------------------------------- 1 | import { expect as expectCDK, matchTemplate, MatchStyle } from '@aws-cdk/assert'; 2 | import * as cdk from '@aws-cdk/core'; 3 | import * as CdkEks from '../lib/cdk-eks-stack'; 4 | 5 | test('Empty Stack', () => { 6 | const app = new cdk.App(); 7 | // WHEN 8 | const stack = new CdkEks.CdkEksStack(app, 'MyTestStack'); 9 | // THEN 10 | expectCDK(stack).to(matchTemplate({ 11 | "Resources": {} 12 | }, MatchStyle.EXACT)) 13 | }); 14 | -------------------------------------------------------------------------------- /container-typescript/cdk-eks-fargate/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2018", 4 | "module": "commonjs", 5 | "lib": ["es2018"], 6 | "declaration": true, 7 | "strict": true, 8 | "noImplicitAny": true, 9 | "strictNullChecks": true, 10 | "noImplicitThis": true, 11 | "alwaysStrict": true, 12 | "noUnusedLocals": false, 13 | "noUnusedParameters": false, 14 | "noImplicitReturns": true, 15 | "noFallthroughCasesInSwitch": false, 16 | "inlineSourceMap": true, 17 | "inlineSources": true, 18 | "experimentalDecorators": true, 19 | "strictPropertyInitialization": false, 20 | "typeRoots": ["./node_modules/@types"] 21 | }, 22 | "exclude": ["cdk.out"] 23 | } 24 | -------------------------------------------------------------------------------- /container-typescript/cdk-eks/.env: -------------------------------------------------------------------------------- 1 | # AWS_ACCOUNT_ID="" 2 | # AWS_REGION="ap-southeast-1" 3 | # AWS_ACCESS_KEY_ID="" 4 | # AWS_SECRET_ACCESS_KEY="" 5 | 6 | AWS_VPC_NAME="EKS-VPC" 7 | AWS_VPC_CIDR="10.10.0.0/16" 8 | 9 | EKS_CLUSTER_NAME="EKS-Cluster" 10 | EKS_CLUSTER_ROLE_NAME="EKS-Cluster-Role" 11 | 12 | ECR_REPOSITORY="cdk-eks-ecr-repo" 13 | CODECOMMIT_REPOSITORY="cdk-eks-codecommit-repo" -------------------------------------------------------------------------------- /container-typescript/cdk-eks/.gitignore: -------------------------------------------------------------------------------- 1 | *.js 2 | !jest.config.js 3 | *.d.ts 4 | node_modules 5 | 6 | # CDK asset staging directory 7 | .cdk.staging 8 | cdk.out 9 | 10 | # Parcel build directories 11 | .cache 12 | .build 13 | -------------------------------------------------------------------------------- /container-typescript/cdk-eks/.npmignore: -------------------------------------------------------------------------------- 1 | *.ts 2 | !*.d.ts 3 | 4 | # CDK asset staging directory 5 | .cdk.staging 6 | cdk.out 7 | -------------------------------------------------------------------------------- /container-typescript/cdk-eks/README.md: -------------------------------------------------------------------------------- 1 | # Amazon EKS using CDK TypeScript 2 | 3 | This is a Amazon EKS for TypeScript development with CDK. 4 | 5 | The `cdk.json` file tells the CDK Toolkit how to execute your app. 6 | 7 | ## Useful commands 8 | 9 | * `npm run build` compile typescript to js 10 | * `npm run watch` watch for changes and compile 11 | * `npm run test` perform the jest unit tests 12 | * `cdk deploy` deploy this stack to your default AWS account/region 13 | * `cdk diff` compare deployed stack with current state 14 | * `cdk synth` emits the synthesized CloudFormation template 15 | -------------------------------------------------------------------------------- /container-typescript/cdk-eks/bin/cdk-eks.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | import 'source-map-support/register'; 3 | import * as cdk from '@aws-cdk/core'; 4 | import { CdkEksStack } from '../lib/cdk-eks-stack'; 5 | 6 | const app = new cdk.App(); 7 | new CdkEksStack(app, 'CdkEksStack'); 8 | 9 | // TODO .env 10 | // new CdkEksStack(app, 'CdkEksStack', { 11 | // env: { 12 | // region: process.env.AWS_REGION, 13 | // account: process.env.AWS_ACCOUNT_ID 14 | // } 15 | // }); 16 | -------------------------------------------------------------------------------- /container-typescript/cdk-eks/cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "npx ts-node bin/cdk-eks.ts", 3 | "context": { 4 | "@aws-cdk/core:enableStackNameDuplicates": "true", 5 | "aws-cdk:enableDiffNoFail": "true" 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /container-typescript/cdk-eks/jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | roots: ['/test'], 3 | testMatch: ['**/*.test.ts'], 4 | transform: { 5 | '^.+\\.tsx?$': 'ts-jest' 6 | } 7 | }; 8 | -------------------------------------------------------------------------------- /container-typescript/cdk-eks/lib/cdk-eks-stack.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | import * as cdk from '@aws-cdk/core'; 4 | 5 | import * as dotenv from 'dotenv'; 6 | import * as iam from '@aws-cdk/aws-iam'; 7 | import * as eks from '@aws-cdk/aws-eks'; 8 | import * as ec2 from '@aws-cdk/aws-ec2'; 9 | // import * as autoscaling from '@aws-cdk/aws-autoscaling'; 10 | 11 | import * as ecr from '@aws-cdk/aws-ecr'; 12 | // import * as codecommit from '@aws-cdk/aws-codecommit'; 13 | // import * as codebuild from '@aws-cdk/aws-codebuild'; 14 | // import * as targets from '@aws-cdk/aws-events-targets'; 15 | // import * as codepipeline from '@aws-cdk/aws-codepipeline'; 16 | // import * as codepipeline_actions from '@aws-cdk/aws-codepipeline-actions'; 17 | 18 | export class CdkEksStack extends cdk.Stack { 19 | constructor(scope: cdk.Construct, id: string, props?: cdk.StackProps) { 20 | super(scope, id, props); 21 | 22 | // The code that defines your stack goes here 23 | dotenv.config(); 24 | // console.log(`vpc_name is ${process.env.AWS_VPC_NAME}`); 25 | // console.log(`vpc_cidr is ${process.env.AWS_VPC_CIDR}`); 26 | 27 | 28 | /** 29 | * Step 1. Create a new VPC for our EKS Cluster 30 | */ 31 | var vpc_name = process.env.AWS_VPC_NAME || "EKS-VPC"; 32 | var vpc_cidr = process.env.AWS_VPC_CIDR || "10.10.0.0/16"; 33 | 34 | // ONLY 1 NAT Gateway --> Cost Optimization trade-off 35 | const vpc = new ec2.Vpc(this, vpc_name, { 36 | cidr: vpc_cidr, 37 | natGateways: 1, 38 | subnetConfiguration: [ 39 | { cidrMask: 24, subnetType: ec2.SubnetType.PUBLIC, name: "PublicDMZ" }, 40 | { cidrMask: 24, subnetType: ec2.SubnetType.PRIVATE, name: "PrivateServices" } 41 | ], 42 | maxAzs: 2 43 | }) 44 | 45 | 46 | /** 47 | * Step 2. Create a new EKS Cluster 48 | */ 49 | 50 | // IAM role for our EC2 worker nodes 51 | const clusterAdmin = new iam.Role(this, 'EKS-AdminRole', { 52 | assumedBy: new iam.AccountRootPrincipal() 53 | }); 54 | 55 | var cluster_name = process.env.EKS_CLUSTER_NAME || "EKS-Cluster"; 56 | // console.log(`cluster_name is ${process.env.EKS_CLUSTER_NAME}`); 57 | 58 | const cluster = new eks.Cluster(this, cluster_name, { 59 | clusterName: cluster_name, 60 | vpc, 61 | defaultCapacity: 2, 62 | mastersRole: clusterAdmin, 63 | outputClusterName: true, 64 | }); 65 | 66 | 67 | /** 68 | * TODO 69 | * 70 | * Step 3: Code* CI/CD 71 | */ 72 | 73 | var ecr_repository_name = process.env.ECR_REPOSITORY || "eks-cicd-ecr-repo"; 74 | // console.log(`repository_name is ${process.env.ECR_REPOSITORY}`); 75 | const ecrRepo = new ecr.Repository(this, ecr_repository_name, { 76 | repositoryName: ecr_repository_name 77 | }); 78 | 79 | // TODO AWS Code* 80 | // const repository = new codecommit.Repository(this, 'EKS-CodeCommitRepo', { 81 | // repositoryName: `${this.stackName}-repo` 82 | // }); 83 | 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /container-typescript/cdk-eks/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cdk-eks", 3 | "version": "0.1.0", 4 | "bin": { 5 | "cdk-eks": "bin/cdk-eks.js" 6 | }, 7 | "scripts": { 8 | "build": "tsc", 9 | "watch": "tsc -w", 10 | "test": "jest", 11 | "cdk": "cdk" 12 | }, 13 | "devDependencies": { 14 | "@aws-cdk/assert": "1.37.0", 15 | "@types/jest": "^25.2.1", 16 | "@types/node": "10.17.5", 17 | "jest": "^25.5.0", 18 | "ts-jest": "^25.3.1", 19 | "aws-cdk": "1.37.0", 20 | "ts-node": "^8.1.0", 21 | "typescript": "~3.7.2" 22 | }, 23 | "dependencies": { 24 | "@aws-cdk/aws-autoscaling": "^1.38.0", 25 | "@aws-cdk/aws-codebuild": "^1.38.0", 26 | "@aws-cdk/aws-codecommit": "^1.38.0", 27 | "@aws-cdk/aws-codepipeline": "^1.38.0", 28 | "@aws-cdk/aws-codepipeline-actions": "^1.38.0", 29 | "@aws-cdk/aws-ec2": "^1.37.0", 30 | "@aws-cdk/aws-ecr": "^1.37.0", 31 | "@aws-cdk/aws-ecr-assets": "^1.37.0", 32 | "@aws-cdk/aws-eks": "^1.37.0", 33 | "@aws-cdk/aws-events-targets": "^1.38.0", 34 | "@aws-cdk/aws-iam": "^1.37.0", 35 | "@aws-cdk/core": "1.37.0", 36 | "dotenv": "^8.2.0", 37 | "source-map-support": "^0.5.16" 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /container-typescript/cdk-eks/project-directory.txt: -------------------------------------------------------------------------------- 1 | eks-workshop/container-typescript/cdk-eks 2 | . 3 | ├── README.md 4 | ├── bin 5 | | └── cdk-eks.ts 6 | ├── cdk.json 7 | ├── jest.config.js 8 | ├── lib 9 | | └── cdk-eks-stack.ts 10 | ├── package-lock.json 11 | ├── package.json 12 | ├── test 13 | | └── cdk-eks.test.ts 14 | └── tsconfig.json 15 | -------------------------------------------------------------------------------- /container-typescript/cdk-eks/test/cdk-eks.test.ts: -------------------------------------------------------------------------------- 1 | import { expect as expectCDK, matchTemplate, MatchStyle } from '@aws-cdk/assert'; 2 | import * as cdk from '@aws-cdk/core'; 3 | import * as CdkEks from '../lib/cdk-eks-stack'; 4 | 5 | test('Empty Stack', () => { 6 | const app = new cdk.App(); 7 | // WHEN 8 | const stack = new CdkEks.CdkEksStack(app, 'MyTestStack'); 9 | // THEN 10 | expectCDK(stack).to(matchTemplate({ 11 | "Resources": {} 12 | }, MatchStyle.EXACT)) 13 | }); 14 | -------------------------------------------------------------------------------- /container-typescript/cdk-eks/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2018", 4 | "module": "commonjs", 5 | "lib": ["es2018"], 6 | "declaration": true, 7 | "strict": true, 8 | "noImplicitAny": true, 9 | "strictNullChecks": true, 10 | "noImplicitThis": true, 11 | "alwaysStrict": true, 12 | "noUnusedLocals": false, 13 | "noUnusedParameters": false, 14 | "noImplicitReturns": true, 15 | "noFallthroughCasesInSwitch": false, 16 | "inlineSourceMap": true, 17 | "inlineSources": true, 18 | "experimentalDecorators": true, 19 | "strictPropertyInitialization": false, 20 | "typeRoots": ["./node_modules/@types"] 21 | }, 22 | "exclude": ["cdk.out"] 23 | } 24 | -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/cdk/.gitignore: -------------------------------------------------------------------------------- 1 | *.js 2 | !jest.config.js 3 | *.d.ts 4 | node_modules 5 | 6 | # CDK asset staging directory 7 | .cdk.staging 8 | cdk.out 9 | -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/cdk/README.md: -------------------------------------------------------------------------------- 1 | # Useful commands 2 | 3 | * `npm run build` compile typescript to js 4 | * `npm run watch` watch for changes and compile 5 | * `npm run test` perform the jest unit tests 6 | * `cdk deploy` deploy this stack to your default AWS account/region 7 | * `cdk diff` compare deployed stack with current state 8 | * `cdk synth` emits the synthesized CloudFormation template 9 | -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/cdk/bin/cdk.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | import 'source-map-support/register'; 3 | import cdk = require('@aws-cdk/core'); 4 | import { CdkStackALBEksBg } from '../lib/cdk-stack'; 5 | 6 | const app = new cdk.App(); 7 | 8 | const env = { 9 | region: app.node.tryGetContext('region') || process.env.CDK_INTEG_REGION || process.env.CDK_DEFAULT_REGION, 10 | account: app.node.tryGetContext('account') || process.env.CDK_INTEG_ACCOUNT || process.env.CDK_DEFAULT_ACCOUNT 11 | }; 12 | 13 | 14 | new CdkStackALBEksBg(app, 'CdkStackALBEksBg', { env }); 15 | -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/cdk/cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "npx ts-node bin/cdk.ts" 3 | } 4 | -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/cdk/jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | "roots": [ 3 | "/test" 4 | ], 5 | testMatch: [ '**/*.test.ts'], 6 | "transform": { 7 | "^.+\\.tsx?$": "ts-jest" 8 | }, 9 | } 10 | -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/cdk/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cdk", 3 | "version": "0.1.0", 4 | "bin": { 5 | "cdk": "bin/cdk.js" 6 | }, 7 | "scripts": { 8 | "build": "tsc", 9 | "watch": "tsc -w", 10 | "test": "jest", 11 | "cdk": "cdk" 12 | }, 13 | "devDependencies": { 14 | "@aws-cdk/assert": "^1.19.0", 15 | "@types/jest": "^24.0.18", 16 | "jest": "^24.9.0", 17 | "ts-jest": "^24.0.2", 18 | "aws-cdk": "^1.19.0", 19 | "ts-node": "^8.1.0", 20 | "typescript": "~3.7.3" 21 | }, 22 | "dependencies": { 23 | "@aws-cdk/aws-codebuild": "^1.19.0", 24 | "@aws-cdk/aws-ec2": "^1.19.0", 25 | "@aws-cdk/aws-ecr": "^1.19.0", 26 | "@aws-cdk/aws-ecs": "^1.19.0", 27 | "@aws-cdk/aws-eks": "^1.19.0", 28 | "@aws-cdk/aws-events-targets": "^1.19.0", 29 | "@aws-cdk/core": "^1.19.0", 30 | "@aws-cdk/aws-codepipeline": "^1.19.0", 31 | "@aws-cdk/aws-codepipeline-actions": "^1.19.0", 32 | "@types/node": "^12.11.1", 33 | "source-map-support": "^0.5.9" 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/cdk/test/cdk.test.ts: -------------------------------------------------------------------------------- 1 | import { expect as expectCDK, matchTemplate, MatchStyle } from '@aws-cdk/assert'; 2 | import cdk = require('@aws-cdk/core'); 3 | import Cdk = require('../lib/cdk-stack'); 4 | 5 | test('Empty Stack', () => { 6 | const app = new cdk.App(); 7 | // WHEN 8 | const stack = new Cdk.CdkStackALBEksBg(app, 'MyTestStack'); 9 | // THEN 10 | expectCDK(stack).to(matchTemplate({ 11 | "Resources": {} 12 | }, MatchStyle.EXACT)) 13 | }); 14 | -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/cdk/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target":"ES2018", 4 | "module": "commonjs", 5 | "lib": ["es2016", "es2017.object", "es2017.string"], 6 | "declaration": true, 7 | "strict": true, 8 | "noImplicitAny": true, 9 | "strictNullChecks": true, 10 | "noImplicitThis": true, 11 | "alwaysStrict": true, 12 | "noUnusedLocals": false, 13 | "noUnusedParameters": false, 14 | "noImplicitReturns": true, 15 | "noFallthroughCasesInSwitch": false, 16 | "inlineSourceMap": true, 17 | "inlineSources": true, 18 | "experimentalDecorators": true, 19 | "strictPropertyInitialization":false, 20 | "typeRoots": ["./node_modules/@types"] 21 | }, 22 | "exclude": ["cdk.out"] 23 | } 24 | -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/dockerAssets.d/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine:latest 2 | 3 | 4 | ENV KUBECONFIG /home/kubectl/.kube/kubeconfig 5 | ENV HOME /home/kubectl 6 | # ENV KUBECONFIG /root/.kube/kubeconfig 7 | 8 | 9 | RUN \ 10 | mkdir /root/bin /aws; \ 11 | apk -Uuv add groff less bash python py-pip jq curl docker && \ 12 | pip install --upgrade pip; \ 13 | pip install awscli && \ 14 | apk --purge -v del py-pip && \ 15 | rm /var/cache/apk/* && \ 16 | # Create non-root user (with a randomly chosen UID/GUI). 17 | adduser kubectl -Du 5566 18 | 19 | ADD https://amazon-eks.s3-us-west-2.amazonaws.com/1.14.6/2019-08-22/bin/linux/amd64/kubectl /usr/local/bin/kubectl 20 | #COPY kubectl /usr/local/bin/kubectl 21 | 22 | WORKDIR $HOME 23 | 24 | COPY entrypoint.sh /usr/local/bin/entrypoint.sh 25 | 26 | RUN chmod a+x /usr/local/bin/kubectl /usr/local/bin/entrypoint.sh 27 | 28 | 29 | # USER kubectl 30 | ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] 31 | -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/dockerAssets.d/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | # export PATH=$PATH:/root/bin 5 | HOME=/home/kubectl 6 | 7 | export KUBECONFIG=$HOME/.kube/kubeconfig 8 | 9 | start_dockerd() { 10 | /usr/bin/dockerd \ 11 | --host=unix:///var/run/docker.sock \ 12 | --host=tcp://127.0.0.1:2375 \ 13 | --storage-driver=overlay &>/var/log/docker.log & 14 | tries=0 15 | d_timeout=60 16 | until docker info >/dev/null 2>&1 17 | do 18 | if [ "$tries" -gt "$d_timeout" ]; then 19 | cat /var/log/docker.log 20 | echo 'Timed out trying to connect to internal docker host.' >&2 21 | exit 1 22 | fi 23 | tries=$(( $tries + 1 )) 24 | sleep 1 25 | done 26 | } 27 | 28 | 29 | if [[ ! -z ${CODEBUILD_BUILD_ID} ]]; then 30 | # in AWS CodeBuild 31 | echo "found myself in AWS CodeBuild, starting dockerd..." 32 | start_dockerd 33 | fi 34 | 35 | 36 | if [[ ! -z ${AWS_REGION} ]]; then 37 | region=$AWS_REGION 38 | echo "[INFO] region=$AWS_REGION" 39 | else 40 | echo "REGION not defined, trying to lookup from EC2 metadata..." 41 | region=$(curl -s http://169.254.169.254/latest/dynamic/instance-identity/document | jq .region -r) 42 | fi 43 | 44 | # export AWS_DEFAULT_REGION=${REGION-${CODEBUILD_AGENT_ENV_CODEBUILD_REGION-$region}} 45 | export AWS_DEFAULT_REGION=$region 46 | 47 | CLUSTER_NAME=${CLUSTER_NAME-default} 48 | 49 | update_kubeconfig(){ 50 | if [[ -n ${EKS_ROLE_ARN} ]]; then 51 | echo "[INFO] got EKS_ROLE_ARN=${EKS_ROLE_ARN}, updating kubeconfig with this role" 52 | aws eks update-kubeconfig --name $CLUSTER_NAME --kubeconfig $KUBECONFIG --role-arn "${EKS_ROLE_ARN}" 53 | else 54 | aws eks update-kubeconfig --name $CLUSTER_NAME --kubeconfig $KUBECONFIG 55 | fi 56 | } 57 | 58 | update_kubeconfig 59 | exec "$@" 60 | -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/flask-docker-app/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine:3.8 2 | RUN apk add python3 py-pip && \ 3 | python3 -m ensurepip && \ 4 | pip install --upgrade pip && \ 5 | pip install flask 6 | 7 | ENV FLASK_APP app.py 8 | ENV PLATFORM 'Amazon EKS' 9 | 10 | WORKDIR /app 11 | COPY . /app/ 12 | 13 | CMD ["python", "app.py"] 14 | -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/flask-docker-app/app.py: -------------------------------------------------------------------------------- 1 | # from flask import Flask, escape, request, render_template 2 | import flask 3 | import datetime 4 | import platform 5 | import os 6 | 7 | app = flask.Flask(__name__) 8 | 9 | 10 | @app.route('/') 11 | def hello(): 12 | name = flask.request.args.get("name", "Flask-demo") 13 | time = datetime.datetime.now() 14 | python_version = platform.python_version() 15 | aws_platform = os.environ.get('PLATFORM', 'Amazon Web Services') 16 | return flask.render_template('hello.html', 17 | platform=aws_platform, 18 | flask_version=flask.__version__, 19 | python_version=python_version, 20 | flask_url='https://palletsprojects.com/p/flask/', 21 | time=time, 22 | name=name) 23 | 24 | 25 | if __name__ == '__main__': 26 | app.run( 27 | debug=True, 28 | host='0.0.0.0', 29 | port=5000 30 | ) 31 | -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/flask-docker-app/k8s/alb-ingress-controller.yaml: -------------------------------------------------------------------------------- 1 | # Application Load Balancer (ALB) Ingress Controller Deployment Manifest. 2 | # This manifest details sensible defaults for deploying an ALB Ingress Controller. 3 | # GitHub: https://github.com/kubernetes-sigs/aws-alb-ingress-controller 4 | apiVersion: apps/v1 5 | kind: Deployment 6 | metadata: 7 | labels: 8 | app.kubernetes.io/name: alb-ingress-controller 9 | name: alb-ingress-controller 10 | # Namespace the ALB Ingress Controller should run in. Does not impact which 11 | # namespaces it's able to resolve ingress resource for. For limiting ingress 12 | # namespace scope, see --watch-namespace. 13 | namespace: kube-system 14 | spec: 15 | selector: 16 | matchLabels: 17 | app.kubernetes.io/name: alb-ingress-controller 18 | template: 19 | metadata: 20 | labels: 21 | app.kubernetes.io/name: alb-ingress-controller 22 | spec: 23 | containers: 24 | - name: alb-ingress-controller 25 | args: 26 | # Limit the namespace where this ALB Ingress Controller deployment will 27 | # resolve ingress resources. If left commented, all namespaces are used. 28 | # - --watch-namespace=your-k8s-namespace 29 | 30 | # Setting the ingress-class flag below ensures that only ingress resources with the 31 | # annotation kubernetes.io/ingress.class: "alb" are respected by the controller. You may 32 | # choose any class you'd like for this controller to respect. 33 | - --ingress-class=alb 34 | 35 | # REQUIRED 36 | # Name of your cluster. Used when naming resources created 37 | # by the ALB Ingress Controller, providing distinction between 38 | # clusters. 39 | - --cluster-name=devCluster 40 | 41 | # AWS VPC ID this ingress controller will use to create AWS resources. 42 | # If unspecified, it will be discovered from ec2metadata. 43 | # - --aws-vpc-id=vpc-xxxxxx 44 | 45 | # AWS region this ingress controller will operate in. 46 | # If unspecified, it will be discovered from ec2metadata. 47 | # List of regions: http://docs.aws.amazon.com/general/latest/gr/rande.html#vpc_region 48 | # - --aws-region=us-west-1 49 | 50 | # Enables logging on all outbound requests sent to the AWS API. 51 | # If logging is desired, set to true. 52 | # - --aws-api-debug 53 | # Maximum number of times to retry the aws calls. 54 | # defaults to 10. 55 | # - --aws-max-retries=10 56 | # env: 57 | # AWS key id for authenticating with the AWS API. 58 | # This is only here for examples. It's recommended you instead use 59 | # a project like kube2iam for granting access. 60 | #- name: AWS_ACCESS_KEY_ID 61 | # value: KEYVALUE 62 | 63 | # AWS key secret for authenticating with the AWS API. 64 | # This is only here for examples. It's recommended you instead use 65 | # a project like kube2iam for granting access. 66 | #- name: AWS_SECRET_ACCESS_KEY 67 | # value: SECRETVALUE 68 | # Repository location of the ALB Ingress Controller. 69 | image: docker.io/amazon/aws-alb-ingress-controller:v1.1.6 70 | serviceAccountName: alb-ingress-controller -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/flask-docker-app/k8s/flask-ALB-namespace.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Namespace 3 | metadata: 4 | name: "flask-alb" 5 | -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/flask-docker-app/k8s/flask.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: flask-svc 5 | spec: 6 | selector: 7 | app: flask 8 | ports: 9 | - name: web 10 | port: 80 11 | targetPort: 5000 12 | type: LoadBalancer 13 | --- 14 | apiVersion: apps/v1beta1 15 | kind: Deployment 16 | metadata: 17 | labels: 18 | run: flask 19 | name: flask 20 | spec: 21 | replicas: 1 22 | template: 23 | metadata: 24 | labels: 25 | app: flask 26 | spec: 27 | containers: 28 | - name: flask 29 | image: nikunjv/flask-image:blue 30 | ports: 31 | - containerPort: 5000 32 | # command: 33 | # - "sh" 34 | # - "-c" 35 | # - "yum install -y python3-pip && tail -f /var/log/yum.log " 36 | resources: 37 | limits: 38 | memory: "500Mi" 39 | cpu: "0.25" 40 | -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/flask-docker-app/k8s/flaskALBBlue.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: "flask-svc-alb-blue" 5 | namespace: "flask-alb" 6 | spec: 7 | selector: 8 | app: "flask-deploy-alb-blue" 9 | type: NodePort 10 | ports: 11 | - name: web 12 | port: 80 13 | targetPort: 5000 14 | --- 15 | apiVersion: apps/v1 16 | kind: Deployment 17 | metadata: 18 | labels: 19 | run: "flask" 20 | name: "flask-deploy-alb-blue" 21 | namespace: "flask-alb" 22 | spec: 23 | selector: 24 | matchLabels: 25 | app: "flask-deploy-alb-blue" 26 | replicas: 1 27 | template: 28 | metadata: 29 | labels: 30 | app: "flask-deploy-alb-blue" 31 | spec: 32 | containers: 33 | - name: "flask" 34 | image: nikunjv/flask-image:blue 35 | ports: 36 | - containerPort: 5000 37 | # command: 38 | # - "sh" 39 | # - "-c" 40 | # - "yum install -y python3-pip && tail -f /var/log/yum.log " 41 | resources: 42 | limits: 43 | memory: "500Mi" 44 | cpu: "0.25" 45 | -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/flask-docker-app/k8s/flaskALBGreen.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: "flask-svc-alb-green" 5 | namespace: "flask-alb" 6 | spec: 7 | selector: 8 | app: "flask-deploy-alb-green" 9 | type: NodePort 10 | ports: 11 | - name: web 12 | port: 8080 13 | targetPort: 5000 14 | --- 15 | apiVersion: apps/v1 16 | kind: Deployment 17 | metadata: 18 | labels: 19 | run: "flask" 20 | name: "flask-deploy-alb-green" 21 | namespace: "flask-alb" 22 | spec: 23 | selector: 24 | matchLabels: 25 | app: "flask-deploy-alb-green" 26 | replicas: 1 27 | template: 28 | metadata: 29 | labels: 30 | app: "flask-deploy-alb-green" 31 | spec: 32 | containers: 33 | - name: "flask" 34 | image: nikunjv/flask-image:green 35 | ports: 36 | - containerPort: 5000 37 | # command: 38 | # - "sh" 39 | # - "-c" 40 | # - "yum install -y python3-pip && tail -f /var/log/yum.log " 41 | resources: 42 | limits: 43 | memory: "500Mi" 44 | cpu: "0.25" 45 | -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/flask-docker-app/k8s/flaskALBIngress_query.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: extensions/v1beta1 2 | kind: Ingress 3 | metadata: 4 | name: alb-ingress 5 | namespace: "flask-alb" 6 | annotations: 7 | kubernetes.io/ingress.class: alb 8 | alb.ingress.kubernetes.io/scheme: internet-facing 9 | alb.ingress.kubernetes.io/subnets: public-subnets 10 | alb.ingress.kubernetes.io/security-groups: sec-grp 11 | alb.ingress.kubernetes.io/conditions.flask-svc-alb-green: '[{"Field":"query-string","QueryStringConfig":{"Values":[{"Key":"group","Value":"green"}]}}]' 12 | alb.ingress.kubernetes.io/conditions.forward-multiple-tg: '[{"Field":"query-string","QueryStringConfig":{"Values":[{"Key":"group","Value":"blue"}]}}]' 13 | alb.ingress.kubernetes.io/actions.forward-multiple-tg: '{"Type":"forward","ForwardConfig":{"TargetGroups":[{"ServiceName":"flask-svc-alb-blue","ServicePort":"80","Weight":100},{"ServiceName":"flask-svc-alb-green","ServicePort":"8080","Weight":0}]}}' 14 | 15 | labels: 16 | app: flask-ingress 17 | spec: 18 | rules: 19 | - http: 20 | paths: 21 | - backend: 22 | serviceName: flask-svc-alb-green 23 | servicePort: 8080 24 | - backend: 25 | serviceName: forward-multiple-tg 26 | servicePort: use-annotation 27 | - path: /* 28 | backend: 29 | serviceName: flask-svc-alb-blue 30 | servicePort: 80 31 | -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/flask-docker-app/k8s/flaskALBIngress_query2.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: extensions/v1beta1 2 | kind: Ingress 3 | metadata: 4 | name: alb-ingress 5 | namespace: "flask-alb" 6 | annotations: 7 | kubernetes.io/ingress.class: alb 8 | alb.ingress.kubernetes.io/scheme: internet-facing 9 | alb.ingress.kubernetes.io/subnets: public-subnets 10 | alb.ingress.kubernetes.io/security-groups: sec-grp 11 | alb.ingress.kubernetes.io/conditions.flask-svc-alb-green: '[{"Field":"query-string","QueryStringConfig":{"Values":[{"Key":"group","Value":"green"}]}}]' 12 | alb.ingress.kubernetes.io/conditions.forward-multiple-tg: '[{"Field":"query-string","QueryStringConfig":{"Values":[{"Key":"group","Value":"blue"}]}}]' 13 | alb.ingress.kubernetes.io/actions.forward-multiple-tg: '{"Type":"forward","ForwardConfig":{"TargetGroups":[{"ServiceName":"flask-svc-alb-blue","ServicePort":"80","Weight":90},{"ServiceName":"flask-svc-alb-green","ServicePort":"8080","Weight":10}]}}' 14 | 15 | labels: 16 | app: flask-ingress 17 | spec: 18 | rules: 19 | - http: 20 | paths: 21 | - backend: 22 | serviceName: flask-svc-alb-green 23 | servicePort: 8080 24 | - backend: 25 | serviceName: forward-multiple-tg 26 | servicePort: use-annotation 27 | - path: /* 28 | backend: 29 | serviceName: flask-svc-alb-blue 30 | servicePort: 80 31 | -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/flask-docker-app/k8s/flaskBlue.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: flask-svc-blue 5 | spec: 6 | selector: 7 | app: flask-deploy-blue 8 | ports: 9 | - name: web 10 | port: 80 11 | targetPort: 5000 12 | type: LoadBalancer 13 | --- 14 | apiVersion: apps/v1beta1 15 | kind: Deployment 16 | metadata: 17 | labels: 18 | run: flask 19 | name: flask-deploy-blue 20 | spec: 21 | replicas: 1 22 | template: 23 | metadata: 24 | labels: 25 | app: flask-deploy-blue 26 | spec: 27 | containers: 28 | - name: flask 29 | image: nikunjv/flask-image:blue 30 | ports: 31 | - containerPort: 5000 32 | # command: 33 | # - "sh" 34 | # - "-c" 35 | # - "yum install -y python3-pip && tail -f /var/log/yum.log " 36 | resources: 37 | limits: 38 | memory: "500Mi" 39 | cpu: "0.25" 40 | -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/flask-docker-app/k8s/flaskGreen.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: flask-svc-green 5 | spec: 6 | selector: 7 | app: flask-deploy-green 8 | ports: 9 | - name: web 10 | port: 8080 11 | targetPort: 5000 12 | type: LoadBalancer 13 | --- 14 | apiVersion: apps/v1beta1 15 | kind: Deployment 16 | metadata: 17 | labels: 18 | run: flask 19 | name: flask-deploy-green 20 | spec: 21 | replicas: 1 22 | template: 23 | metadata: 24 | labels: 25 | app: flask-deploy-green 26 | spec: 27 | containers: 28 | - name: flask 29 | image: nikunjv/flask-image:blue 30 | ports: 31 | - containerPort: 5000 32 | # command: 33 | # - "sh" 34 | # - "-c" 35 | # - "yum install -y python3-pip && tail -f /var/log/yum.log " 36 | resources: 37 | limits: 38 | memory: "500Mi" 39 | cpu: "0.25" 40 | -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/flask-docker-app/k8s/setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -x 4 | 5 | #Setup Env Vars 6 | export REGION=$1 7 | export NODE_ROLE_NAME=$2 8 | export CLUSTER_NAME=$3 9 | 10 | export ALB_POLICY_NAME=alb-ingress-controller 11 | policyExists=$(aws iam list-policies | jq '.Policies[].PolicyName' | grep alb-ingress-controller | tr -d '["\r\n]') 12 | if [[ "$policyExists" != "alb-ingress-controller" ]]; then 13 | echo "Policy does not exist, creating..." 14 | export ALB_POLICY_ARN=$(aws iam create-policy --region=$REGION --policy-name $ALB_POLICY_NAME --policy-document "https://raw.githubusercontent.com/kubernetes-sigs/aws-alb-ingress-controller/master/docs/examples/iam-policy.json" --query "Policy.Arn" | sed 's/"//g') 15 | aws iam attach-role-policy --region=$REGION --role-name=$NODE_ROLE_NAME --policy-arn=$ALB_POLICY_ARN 16 | fi 17 | 18 | #Create Ingress Controller 19 | if [ ! -f alb-ingress-controller.yaml ]; then 20 | wget https://raw.githubusercontent.com/kubernetes-sigs/aws-alb-ingress-controller/v1.1.6/docs/examples/alb-ingress-controller.yaml 21 | fi 22 | sed -i "s/devCluster/$CLUSTER_NAME/g" alb-ingress-controller.yaml 23 | sed -i "s/# - --cluster-name/- --cluster-name/g" alb-ingress-controller.yaml 24 | kubectl apply -f https://raw.githubusercontent.com/kubernetes-sigs/aws-alb-ingress-controller/v1.1.6/docs/examples/rbac-role.yaml 25 | kubectl apply -f alb-ingress-controller.yaml 26 | 27 | #Check 28 | kubectl get pods -n kube-system 29 | #kubectl logs -n kube-system $(kubectl get po -n kube-system | egrep -o "alb-ingress[a-zA-Z0-9-]+") 30 | 31 | #Attach IAM policy to Worker Node Role 32 | if [ ! -f iam-policy.json ]; then 33 | curl -O https://raw.githubusercontent.com/kubernetes-sigs/aws-alb-ingress-controller/master/docs/examples/iam-policy.json 34 | fi 35 | aws iam put-role-policy --role-name $NODE_ROLE_NAME --policy-name elb-policy --policy-document file://iam-policy.json 36 | 37 | #Instantiate Blue and Green PODS 38 | kubectl apply -f flask-ALB-namespace.yaml 39 | kubectl apply -f flaskALBBlue.yaml 40 | kubectl apply -f flaskALBGreen.yaml 41 | 42 | #Check 43 | kubectl get deploy -n flask-alb 44 | kubectl get svc -n flask-alb 45 | kubectl get pods -n flask-alb 46 | 47 | #Update Ingress Resource file and spawn ALB 48 | sg=$(aws ec2 describe-security-groups --filters Name=tag:aws:cloudformation:stack-name,Values=CdkStackALBEksBg | jq '.SecurityGroups[0].GroupId' | tr -d '["]') 49 | vpcid=$(aws ec2 describe-security-groups --filters Name=tag:aws:cloudformation:stack-name,Values=CdkStackALBEksBg | jq '.SecurityGroups[0].VpcId' | tr -d '["]') 50 | subnets=$(aws ec2 describe-subnets --filters "Name=vpc-id,Values=$vpcid" "Name=tag:aws-cdk:subnet-name,Values=Public" | jq '.Subnets[0].SubnetId' | tr -d '["]') 51 | subnets="$subnets, $(aws ec2 describe-subnets --filters "Name=vpc-id,Values=$vpcid" "Name=tag:aws-cdk:subnet-name,Values=Public" | jq '.Subnets[1].SubnetId' | tr -d '["]')" 52 | subnets="$subnets, $(aws ec2 describe-subnets --filters "Name=vpc-id,Values=$vpcid" "Name=tag:aws-cdk:subnet-name,Values=Public" | jq '.Subnets[2].SubnetId' | tr -d '["]')" 53 | 54 | sed -i "s/public-subnets/$subnets/g" flaskALBIngress_query.yaml 55 | sed -i "s/public-subnets/$subnets/g" flaskALBIngress_query2.yaml 56 | sed -i "s/sec-grp/$sg/g" flaskALBIngress_query.yaml 57 | sed -i "s/sec-grp/$sg/g" flaskALBIngress_query2.yaml 58 | kubectl apply -f flaskALBIngress_query.yaml 59 | -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/flask-docker-app/pyvenv.cfg: -------------------------------------------------------------------------------- 1 | home = /Users/pahud/homebrew/bin 2 | include-system-site-packages = false 3 | version = 3.7.4 4 | -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/flask-docker-app/requirements.txt: -------------------------------------------------------------------------------- 1 | Click==7.0 2 | Flask==1.1.1 3 | itsdangerous==1.1.0 4 | Jinja2==2.10.1 5 | MarkupSafe==1.1.1 6 | Werkzeug==0.15.6 7 | -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/flask-docker-app/templates/hello.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | Simple Flask App 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 |
17 |
18 |

{{ name }}

19 |

Congratulations

20 |

Your Flask application is now running on a container in {{ platform }}

21 |

The container is running Flask version {{ flask_version }} and Python {{ python_version }}

22 | 23 | -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/images/alb-dns.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/container-typescript/eks-blue-green-cicd/images/alb-dns.png -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/images/alb-tg-check1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/container-typescript/eks-blue-green-cicd/images/alb-tg-check1.png -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/images/alb-tg-check2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/container-typescript/eks-blue-green-cicd/images/alb-tg-check2.png -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/images/canary-lb.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/container-typescript/eks-blue-green-cicd/images/canary-lb.png -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/images/cfn-kubectl.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/container-typescript/eks-blue-green-cicd/images/cfn-kubectl.png -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/images/eks-bg-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/container-typescript/eks-blue-green-cicd/images/eks-bg-1.png -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/images/eks-bg-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/container-typescript/eks-blue-green-cicd/images/eks-bg-2.png -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/images/eks-canary.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/container-typescript/eks-blue-green-cicd/images/eks-canary.png -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/images/eks-cicd-codebuild.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/container-typescript/eks-blue-green-cicd/images/eks-cicd-codebuild.png -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/images/flask01.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/container-typescript/eks-blue-green-cicd/images/flask01.png -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/images/flask02.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/container-typescript/eks-blue-green-cicd/images/flask02.png -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/images/stage12-green.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/container-typescript/eks-blue-green-cicd/images/stage12-green.png -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/images/stage34-green.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/container-typescript/eks-blue-green-cicd/images/stage34-green.png -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/images/web-blue-inv.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/container-typescript/eks-blue-green-cicd/images/web-blue-inv.png -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/images/web-blue.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/container-typescript/eks-blue-green-cicd/images/web-blue.png -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/images/web-default.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/container-typescript/eks-blue-green-cicd/images/web-default.png -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/images/web-green-inv.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/container-typescript/eks-blue-green-cicd/images/web-green-inv.png -------------------------------------------------------------------------------- /container-typescript/eks-blue-green-cicd/images/web-green.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/container-typescript/eks-blue-green-cicd/images/web-green.png -------------------------------------------------------------------------------- /container-typescript/twitter4u-docker/.env.sh: -------------------------------------------------------------------------------- 1 | # export AWS_ACCOUNT_ID=$(curl -s 169.254.169.254/latest/dynamic/instance-identity/document | jq -r .accountId) 2 | # export AWS_REGION=$(curl -s 169.254.169.254/latest/dynamic/instance-identity/document | jq -r .region) 3 | # export AWS_DEFAULT_REGION=ap-southeast-2 4 | # export AWS_ACCESS_KEY_ID= 5 | # export AWS_SECRET_ACCESS_KEY= 6 | # export AWS_SESSION_TOKEN="" #if using AWS SSO 7 | 8 | # export DOCKER_REGISTRY_EMAIL= 9 | # export DOCKER_REGISTRY_USERNAME= 10 | # export DOCKER_REGISTRY_NAMESPACE= 11 | # export DOCKER_REGISTRY_PASSWORD= 12 | export DOCKER_REGISTRY_EMAIL=nnthanh101@gmail.com 13 | export DOCKER_REGISTRY_USERNAME=nnthanh101 14 | export DOCKER_REGISTRY_NAMESPACE=nnthanh101 15 | export DOCKER_REGISTRY_PASSWORD=XXXX 16 | 17 | export DOCKER_REPOSITORY=twitter4u 18 | 19 | export TWITTER_CONSUMER_KEY= 20 | export TWITTER_CONSUMER_SECRET= 21 | export TWITTER_ACCESS_TOKEN= 22 | export TWITTER_ACCESS_TOKEN_SECRET= 23 | -------------------------------------------------------------------------------- /container-typescript/twitter4u-docker/.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | 9 | # Diagnostic reports (https://nodejs.org/api/report.html) 10 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 11 | 12 | # Runtime data 13 | pids 14 | *.pid 15 | *.seed 16 | *.pid.lock 17 | 18 | # Directory for instrumented libs generated by jscoverage/JSCover 19 | lib-cov 20 | 21 | # Coverage directory used by tools like istanbul 22 | coverage 23 | *.lcov 24 | 25 | # nyc test coverage 26 | .nyc_output 27 | 28 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 29 | .grunt 30 | 31 | # Bower dependency directory (https://bower.io/) 32 | bower_components 33 | 34 | # node-waf configuration 35 | .lock-wscript 36 | 37 | # Compiled binary addons (https://nodejs.org/api/addons.html) 38 | build/Release 39 | 40 | # Dependency directories 41 | node_modules/ 42 | jspm_packages/ 43 | 44 | # TypeScript v1 declaration files 45 | typings/ 46 | 47 | # TypeScript cache 48 | *.tsbuildinfo 49 | 50 | # Optional npm cache directory 51 | .npm 52 | 53 | # Optional eslint cache 54 | .eslintcache 55 | 56 | # Microbundle cache 57 | .rpt2_cache/ 58 | .rts2_cache_cjs/ 59 | .rts2_cache_es/ 60 | .rts2_cache_umd/ 61 | 62 | # Optional REPL history 63 | .node_repl_history 64 | 65 | # Output of 'npm pack' 66 | *.tgz 67 | 68 | # Yarn Integrity file 69 | .yarn-integrity 70 | 71 | # dotenv environment variables file 72 | .env 73 | .env.test 74 | 75 | # parcel-bundler cache (https://parceljs.org/) 76 | .cache 77 | 78 | # Next.js build output 79 | .next 80 | 81 | # Nuxt.js build / generate output 82 | .nuxt 83 | dist 84 | 85 | # Gatsby files 86 | .cache/ 87 | # Comment in the public line in if your project uses Gatsby and *not* Next.js 88 | # https://nextjs.org/blog/next-9-1#public-directory-support 89 | # public 90 | 91 | # vuepress build output 92 | .vuepress/dist 93 | 94 | # Serverless directories 95 | .serverless/ 96 | 97 | # FuseBox cache 98 | .fusebox/ 99 | 100 | # DynamoDB Local files 101 | .dynamodb/ 102 | 103 | # TernJS port file 104 | .tern-port 105 | -------------------------------------------------------------------------------- /container-typescript/twitter4u-docker/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:12.16.1-alpine3.11 as builder 2 | 3 | WORKDIR /home/node 4 | 5 | COPY ["src/package.json", "./"] 6 | 7 | RUN npm install 8 | 9 | FROM node:12.16.1-alpine3.11 10 | 11 | USER node 12 | 13 | WORKDIR /home/node 14 | 15 | COPY --from=builder /home/node/node_modules/ ./node_modules 16 | 17 | COPY src/ ./ 18 | 19 | ENV TWITTER_TOPICS=AWS,EC2,S3,Workspaces,Covid \ 20 | TWITTER_LANGUAGES=en,vi \ 21 | TWITTER_FILTER_LEVEL=none \ 22 | DESTINATION=stdout 23 | 24 | CMD [ "node", "./twitter_stream_producer_app.js" ] -------------------------------------------------------------------------------- /container-typescript/twitter4u-docker/README.md: -------------------------------------------------------------------------------- 1 | # Twitter Streaming Reader 2 | 3 | ## Configuring: `.env.sh` 4 | 5 | * [ ] DOCKER_REGISTRY_PASSWORD 6 | * [ ] TWITTER_ACCESS_TOKEN= Twitter access_token 7 | * [ ] TWITTER_ACCESS_TOKEN_SECRET= Twitter access_token_secret 8 | * [ ] TWITTER_CONSUMER_KEY= Twitter consumer_key 9 | * [ ] TWITTER_CONSUMER_SECRET= Twitter consumer_secret 10 | 11 | * [ ] TWITTER_TOPICS=AWS,EC2,S3,Workspaces,Covid 12 | * [ ] TWITTER_LANGUAGES=en,vi 13 | * [ ] TWITTER_FILTER_LEVEL=none 14 | * [ ] DESTINATION=kinesis:,firehose:,stdout 15 | 16 | > 🚀 `./deploy.sh` 17 | 18 | ## How to use this Docker Image 19 | 20 | ```bash 21 | $ docker run \ 22 | -e TWITTER_ACCESS_TOKEN=xxxxx \ 23 | -e TWITTER_ACCESS_TOKEN_SECRET=xxxxx \ 24 | -e TWITTER_CONSUMER_KEY=xxxxx \ 25 | -e TWITTER_CONSUMER_SECRET=xxxxx \ 26 | nnthanh101/twitter4u 27 | ``` 28 | 29 | ### about Twitter Authentication 30 | 31 | - `-e TWITTER_CONSUMER_KEY=...` 32 | - `-e TWITTER_CONSUMER_SECRET=...` 33 | - `-e TWITTER_ACCESS_TOKEN=...` 34 | - `-e TWITTER_ACCESS_TOKEN_SECRET=...` 35 | 36 | ### Other options 37 | 38 | - `-e TWITTER_TOPICS=AWS,EC2,S3,Workspaces,Covid` 39 | - `-e TWITTER_LANGUAGES=en,vi` 40 | - `-e TWITTER_FILTER_LEVEL=none` # default: none 41 | - `-e DESTINATION=kinesis:,firehose:,stdout` # default: stdout 42 | -------------------------------------------------------------------------------- /container-typescript/twitter4u-docker/Twitter4U/twitter_stream_producer.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const AWS = require('./node_modules/aws-sdk'); 4 | const config = require('./config'); 5 | const twitter_config = require('./twitter_reader_config.js.js'); 6 | const Twit = require('twit'); 7 | const util = require('util'); 8 | const logger = require('./util/logger'); 9 | 10 | function twitterStreamProducer() { 11 | const log = logger().getLogger('producer'); 12 | const waitBetweenPutRecordsCallsInMilliseconds = config.waitBetweenPutRecordsCallsInMilliseconds; 13 | const T = new Twit(twitter_config.twitter) 14 | 15 | function _sendToFirehose() { 16 | const kinesis = new AWS.Kinesis({apiVersion: '2013-12-02'}); 17 | const firehose = new AWS.Firehose({ apiVersion: '2015-08-04' }); 18 | const dest_config = twitter_config.dest; 19 | 20 | const twitterParams = { 21 | track: twitter_config.topics, 22 | language: twitter_config.languages, 23 | filter_level: twitter_config.filter_level, 24 | stall_warnings: true 25 | } 26 | 27 | const stream = T.stream('statuses/filter', twitterParams); 28 | 29 | log.info('start streaming...') 30 | stream.on('tweet', function (tweet) { 31 | var tweetString = JSON.stringify(tweet) 32 | 33 | for (var i = 0; i < dest_config.length; i++) { 34 | var dest = dest_config[i]; 35 | if (dest === 'stdout') { 36 | console.log(tweetString) 37 | } else if (dest.startsWith('kinesis:')) { 38 | const stream_name = dest.split(':')[1]; 39 | const kinesisParams = { 40 | StreamName: stream_name, 41 | PartitionKey: tweet.id_str, 42 | Data: tweetString +'\n', 43 | }; 44 | kinesis.putRecord(kinesisParams, function (err, data) { 45 | if (err) { 46 | log.error(err); 47 | } 48 | }); 49 | } else if (dest.startsWith('firehose:')) { 50 | const stream_name = dest.split(':')[1]; 51 | const firehoseParams = { 52 | DeliveryStreamName: stream_name, 53 | Record: { 54 | Data: tweetString + '\n' 55 | } 56 | }; 57 | firehose.putRecord(firehoseParams, function (err, data) { 58 | if (err) { 59 | log.error(err); 60 | } 61 | }); 62 | } else { 63 | log.warn('This destination is not supported. ' + dest); 64 | } 65 | } 66 | } 67 | ); 68 | } 69 | 70 | return { 71 | run: function () { 72 | log.info(util.format('Configured wait between consecutive PutRecords call in milliseconds: %d', 73 | waitBetweenPutRecordsCallsInMilliseconds)); 74 | _sendToFirehose(); 75 | } 76 | } 77 | } 78 | 79 | module.exports = twitterStreamProducer; 80 | -------------------------------------------------------------------------------- /container-typescript/twitter4u-docker/deploy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | source .env.sh 5 | 6 | started_time=$(date '+%d/%m/%Y %H:%M:%S') 7 | echo 8 | echo "#########################################################" 9 | echo "Docker build & push >> starting at ${started_time}" 10 | echo "#########################################################" 11 | echo 12 | docker login --username ${DOCKER_REGISTRY_USERNAME} --password ${DOCKER_REGISTRY_PASSWORD} 13 | docker build -t ${DOCKER_REPOSITORY} . 14 | docker tag ${DOCKER_REPOSITORY} ${DOCKER_REGISTRY_NAMESPACE}/${DOCKER_REPOSITORY} 15 | docker push ${DOCKER_REGISTRY_NAMESPACE}/${DOCKER_REPOSITORY} 16 | ended_time=$(date '+%d/%m/%Y %H:%M:%S') 17 | echo 18 | echo "#########################################################" 19 | echo "Docker build & push >> finished at ${ended_time}" 20 | echo "#########################################################" 21 | echo 22 | 23 | docker run -it --rm \ 24 | -e TWITTER_CONSUMER_KEY=${TWITTER_CONSUMER_KEY} \ 25 | -e TWITTER_CONSUMER_SECRET=${TWITTER_CONSUMER_SECRET} \ 26 | -e TWITTER_ACCESS_TOKEN=${TWITTER_ACCESS_TOKEN} \ 27 | -e TWITTER_ACCESS_TOKEN_SECRET=${TWITTER_ACCESS_TOKEN_SECRET} \ 28 | -e TWITTER_TOPICS=AWS,EC2,S3,Workspaces,Covid \ 29 | -e TWITTER_LANGUAGES=en,vi \ 30 | -e TWITTER_FILTER_LEVEL=none \ 31 | nnthanh101/${DOCKER_REPOSITORY} 32 | 33 | ## TODO 34 | # -e DESTINATION=kinesis:,firehose:,stdout \ -------------------------------------------------------------------------------- /container-typescript/twitter4u-docker/src/config.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var config = module.exports = { 4 | waitBetweenDescribeCallsInSeconds: 2, 5 | recordsToWritePerBatch: 100, 6 | waitBetweenPutRecordsCallsInMilliseconds: 50, 7 | //region: 'ap-southeast-2' 8 | }; 9 | -------------------------------------------------------------------------------- /container-typescript/twitter4u-docker/src/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "twitter4u", 3 | "version": "0.1.0", 4 | "author": "Thanh Nguyen", 5 | "private": true, 6 | "scripts": {}, 7 | "dependencies": { 8 | "aws-sdk": "2.x", 9 | "fluent-logger": "^3.4.1", 10 | "log4js": "~0.6.22", 11 | "request": "^2.81.0", 12 | "twit": "*" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /container-typescript/twitter4u-docker/src/twitter_reader_config.js: -------------------------------------------------------------------------------- 1 | const twitter_config = module.exports = { 2 | twitter: { 3 | consumer_key: process.env.TWITTER_CONSUMER_KEY, 4 | consumer_secret: process.env.TWITTER_CONSUMER_SECRET, 5 | access_token: process.env.TWITTER_ACCESS_TOKEN, 6 | access_token_secret: process.env.TWITTER_ACCESS_TOKEN_SECRET 7 | }, 8 | topics: process.env.TWITTER_TOPICS.split(','), 9 | languages: process.env.TWITTER_LANGUAGES.split(','), 10 | filter_level: process.env.TWITTER_FILTER_LEVEL, 11 | dest: process.env.DESTINATION.split(',') 12 | } 13 | -------------------------------------------------------------------------------- /container-typescript/twitter4u-docker/src/twitter_stream_producer.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const AWS = require('./node_modules/aws-sdk'); 4 | const config = require('./config'); 5 | const twitter_config = require('./twitter_reader_config.js'); 6 | const Twit = require('twit'); 7 | const util = require('util'); 8 | const logger = require('./util/logger'); 9 | 10 | function twitterStreamProducer() { 11 | const log = logger().getLogger('producer'); 12 | const waitBetweenPutRecordsCallsInMilliseconds = config.waitBetweenPutRecordsCallsInMilliseconds; 13 | const T = new Twit(twitter_config.twitter) 14 | 15 | function _sendToFirehose() { 16 | const kinesis = new AWS.Kinesis({apiVersion: '2013-12-02'}); 17 | const firehose = new AWS.Firehose({ apiVersion: '2015-08-04' }); 18 | const dest_config = twitter_config.dest; 19 | 20 | const twitterParams = { 21 | track: twitter_config.topics, 22 | language: twitter_config.languages, 23 | filter_level: twitter_config.filter_level, 24 | stall_warnings: true 25 | } 26 | 27 | const stream = T.stream('statuses/filter', twitterParams); 28 | 29 | log.info('start streaming...') 30 | stream.on('tweet', function (tweet) { 31 | var tweetString = JSON.stringify(tweet) 32 | 33 | for (var i = 0; i < dest_config.length; i++) { 34 | var dest = dest_config[i]; 35 | if (dest === 'stdout') { 36 | console.log(tweetString) 37 | } else if (dest.startsWith('kinesis:')) { 38 | const stream_name = dest.split(':')[1]; 39 | const kinesisParams = { 40 | StreamName: stream_name, 41 | PartitionKey: tweet.id_str, 42 | Data: tweetString +'\n', 43 | }; 44 | kinesis.putRecord(kinesisParams, function (err, data) { 45 | if (err) { 46 | log.error(err); 47 | } 48 | }); 49 | } else if (dest.startsWith('firehose:')) { 50 | const stream_name = dest.split(':')[1]; 51 | const firehoseParams = { 52 | DeliveryStreamName: stream_name, 53 | Record: { 54 | Data: tweetString + '\n' 55 | } 56 | }; 57 | firehose.putRecord(firehoseParams, function (err, data) { 58 | if (err) { 59 | log.error(err); 60 | } 61 | }); 62 | } else { 63 | log.warn('This destination is not supported. ' + dest); 64 | } 65 | } 66 | } 67 | ); 68 | } 69 | 70 | return { 71 | run: function () { 72 | log.info(util.format('Configured wait between consecutive PutRecords call in milliseconds: %d', 73 | waitBetweenPutRecordsCallsInMilliseconds)); 74 | _sendToFirehose(); 75 | } 76 | } 77 | } 78 | 79 | module.exports = twitterStreamProducer; 80 | -------------------------------------------------------------------------------- /container-typescript/twitter4u-docker/src/twitter_stream_producer_app.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var producer = require('./twitter_stream_producer'); 4 | 5 | producer().run(); 6 | -------------------------------------------------------------------------------- /container-typescript/twitter4u-docker/src/util/logger.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const log4js = require('./node_modules/log4js'); 4 | 5 | function logger() { 6 | const config = { 7 | "appenders": [ 8 | { 9 | "type": "console", 10 | "layout": { 11 | "type": "pattern", 12 | "pattern": "%d (PID: %x{pid}) %p %c - %m", 13 | "tokens": { 14 | "pid" : function() { return process.pid; } 15 | } 16 | } 17 | } 18 | ] 19 | }; 20 | 21 | log4js.configure(config, {}); 22 | 23 | return { 24 | getLogger: function(category) { 25 | return log4js.getLogger(category); 26 | } 27 | }; 28 | } 29 | 30 | module.exports = logger; 31 | -------------------------------------------------------------------------------- /container-typescript/twitter4u-fargate/.gitignore: -------------------------------------------------------------------------------- 1 | *.js 2 | !jest.config.js 3 | *.d.ts 4 | node_modules 5 | 6 | # CDK asset staging directory 7 | .cdk.staging 8 | cdk.out 9 | 10 | # Parcel build directories 11 | .cache 12 | .build 13 | -------------------------------------------------------------------------------- /container-typescript/twitter4u-fargate/.npmignore: -------------------------------------------------------------------------------- 1 | *.ts 2 | !*.d.ts 3 | 4 | # CDK asset staging directory 5 | .cdk.staging 6 | cdk.out 7 | -------------------------------------------------------------------------------- /container-typescript/twitter4u-fargate/bin/social_reader.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | import 'source-map-support/register'; 3 | import * as cdk from '@aws-cdk/core'; 4 | import { SocialReaderStack } from '../lib/social_reader-stack'; 5 | 6 | const app = new cdk.App(); 7 | new SocialReaderStack(app, 'SocialReaderStack'); 8 | -------------------------------------------------------------------------------- /container-typescript/twitter4u-fargate/cdk.context.json: -------------------------------------------------------------------------------- 1 | { 2 | "@aws-cdk/core:enableStackNameDuplicates": "true", 3 | "aws-cdk:enableDiffNoFail": "true" 4 | } 5 | -------------------------------------------------------------------------------- /container-typescript/twitter4u-fargate/cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "npx ts-node bin/social_reader.ts" 3 | } 4 | -------------------------------------------------------------------------------- /container-typescript/twitter4u-fargate/jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | roots: ['/test'], 3 | testMatch: ['**/*.test.ts'], 4 | transform: { 5 | '^.+\\.tsx?$': 'ts-jest' 6 | } 7 | }; 8 | -------------------------------------------------------------------------------- /container-typescript/twitter4u-fargate/lib/social_reader-stack.ts: -------------------------------------------------------------------------------- 1 | import * as cdk from '@aws-cdk/core'; 2 | import ec2 = require("@aws-cdk/aws-ec2"); 3 | import ecs = require("@aws-cdk/aws-ecs"); 4 | import ecs_patterns = require("@aws-cdk/aws-ecs-patterns"); 5 | 6 | 7 | export class SocialReaderStack extends cdk.Stack { 8 | constructor(scope: cdk.Construct, id: string, props?: cdk.StackProps) { 9 | super(scope, id, props); 10 | 11 | // The code that defines your stack goes here 12 | const vpc = new ec2.Vpc(this, "TwitterFargateVPC", { 13 | maxAzs: 2 // Default is all AZs in region 14 | }); 15 | 16 | const cluster = new ecs.Cluster(this, "TwitterFargateCluster", { 17 | vpc: vpc 18 | }); 19 | 20 | // Create a load-balanced Fargate service and make it public 21 | new ecs_patterns.ApplicationLoadBalancedFargateService(this, "Twitter4uFargateService", { 22 | cluster: cluster, // Required 23 | cpu: 512, // Default is 256 24 | desiredCount: 2, // Default is 1 25 | // taskImageOptions: { image: ecs.ContainerImage.fromRegistry("amazon/amazon-ecs-sample") }, 26 | taskImageOptions: { image: ecs.ContainerImage.fromRegistry("nnthanh101/twitter4u") }, 27 | memoryLimitMiB: 1024, // Default is 512 28 | publicLoadBalancer: true // Default is false 29 | }); 30 | 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /container-typescript/twitter4u-fargate/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "social_reader", 3 | "version": "0.1.0", 4 | "bin": { 5 | "social_reader": "bin/social_reader.js" 6 | }, 7 | "scripts": { 8 | "build": "tsc", 9 | "watch": "tsc -w", 10 | "test": "jest", 11 | "cdk": "cdk" 12 | }, 13 | "devDependencies": { 14 | "@aws-cdk/assert": "1.31.0", 15 | "@types/jest": "^24.0.22", 16 | "@types/node": "10.17.5", 17 | "jest": "^24.9.0", 18 | "ts-jest": "^24.1.0", 19 | "aws-cdk": "1.31.0", 20 | "ts-node": "^8.1.0", 21 | "typescript": "~3.7.2" 22 | }, 23 | "dependencies": { 24 | "@aws-cdk/aws-ec2": "^1.31.0", 25 | "@aws-cdk/aws-ecs": "^1.31.0", 26 | "@aws-cdk/aws-ecs-patterns": "^1.31.0", 27 | "@aws-cdk/core": "1.31.0", 28 | "source-map-support": "^0.5.16" 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /container-typescript/twitter4u-fargate/test/social_reader.test.ts: -------------------------------------------------------------------------------- 1 | import { expect as expectCDK, matchTemplate, MatchStyle } from '@aws-cdk/assert'; 2 | import * as cdk from '@aws-cdk/core'; 3 | import SocialReader = require('../lib/social_reader-stack'); 4 | 5 | test('Empty Stack', () => { 6 | const app = new cdk.App(); 7 | // WHEN 8 | const stack = new SocialReader.SocialReaderStack(app, 'MyTestStack'); 9 | // THEN 10 | expectCDK(stack).to(matchTemplate({ 11 | "Resources": {} 12 | }, MatchStyle.EXACT)) 13 | }); 14 | -------------------------------------------------------------------------------- /container-typescript/twitter4u-fargate/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target":"ES2018", 4 | "module": "commonjs", 5 | "lib": ["es2018"], 6 | "declaration": true, 7 | "strict": true, 8 | "noImplicitAny": true, 9 | "strictNullChecks": true, 10 | "noImplicitThis": true, 11 | "alwaysStrict": true, 12 | "noUnusedLocals": false, 13 | "noUnusedParameters": false, 14 | "noImplicitReturns": true, 15 | "noFallthroughCasesInSwitch": false, 16 | "inlineSourceMap": true, 17 | "inlineSources": true, 18 | "experimentalDecorators": true, 19 | "strictPropertyInitialization":false, 20 | "typeRoots": ["./node_modules/@types"] 21 | }, 22 | "exclude": ["cdk.out"] 23 | } 24 | -------------------------------------------------------------------------------- /devsecops/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM jenkins/jenkins:lts 2 | 3 | USER root 4 | RUN apt-get update && apt-get install -y ruby make 5 | USER jenkins 6 | 7 | EXPOSE 8080 8 | EXPOSE 50000 -------------------------------------------------------------------------------- /devsecops/Makefile: -------------------------------------------------------------------------------- 1 | APP_DIR ?=$(PWD) 2 | DOCKER_PATH ?= ./Dockerfile 3 | PROJECT_NAME ?= devsecops 4 | SERVICE_NAME ?= devsecopslb 5 | SERVICE_TYPE ?= 'Load Balanced Web Service' 6 | PROJECT_PORT ?= '8080' 7 | ENV_NAME ?= prod 8 | CHECK_DOCKER := $(shell docker --version) 9 | 10 | all: docker install-copilot init-app env deploy 11 | .PHONY: all 12 | 13 | install-copilot: 14 | @$(shell $(PWD)/install.sh) 15 | 16 | init-app: 17 | mkdir -p ${APP_DIR} 18 | cd ${APP_DIR} 19 | copilot init --app ${PROJECT_NAME} \ 20 | --svc ${SERVICE_NAME} \ 21 | --svc-type ${SERVICE_TYPE} \ 22 | --dockerfile ${DOCKER_PATH} \ 23 | --profile default \ 24 | --port ${PROJECT_PORT} \ 25 | 26 | docker: 27 | @echo ${CHECK_DOCKER} 28 | 29 | env: 30 | copilot env init --name ${ENV_NAME} --profile default --app ${PROJECT_NAME} 31 | 32 | deploy: 33 | copilot svc deploy --name ${SERVICE_NAME} --env ${ENV_NAME} 34 | 35 | clean: 36 | copilot env delete --name ${ENV_NAME} --profile default --yes 37 | copilot app delete --yes -------------------------------------------------------------------------------- /devsecops/copilot/.workspace: -------------------------------------------------------------------------------- 1 | application: devsecops 2 | -------------------------------------------------------------------------------- /devsecops/copilot/devsecopslb/manifest.yml: -------------------------------------------------------------------------------- 1 | # The manifest for the "devsecopslb" service. 2 | # Read the full specification for the "Load Balanced Web Service" type at: 3 | # https://github.com/aws/amazon-ecs-cli-v2/wiki/Manifests#load-balanced-web-svc 4 | 5 | # Your service name will be used in naming your resources like log groups, ECS services, etc. 6 | name: devsecopslb 7 | # The "architecture" of the service you're running. 8 | type: Load Balanced Web Service 9 | 10 | image: 11 | # Path to your service's Dockerfile. 12 | build: ./Dockerfile 13 | # Port exposed through your container to route traffic to it. 14 | port: 8080 15 | 16 | http: 17 | # Requests to this path will be forwarded to your service. 18 | # To match all requests you can use the "/" path. 19 | path: "devsecopslb" 20 | # You can specify a custom health check path. The default is "/" 21 | # healthcheck: '/' 22 | 23 | # Number of CPU units for the task. 24 | cpu: 256 25 | # Amount of memory in MiB used by the task. 26 | memory: 512 27 | # Number of tasks that should be running in your service. 28 | count: 1 29 | # Optional fields for more advanced use-cases. 30 | # 31 | #variables: # Pass environment variables as key value pairs. 32 | # LOG_LEVEL: info 33 | # 34 | #secrets: # Pass secrets from AWS Systems Manager (SSM) Parameter Store. 35 | # GITHUB_TOKEN: GITHUB_TOKEN # The key is the name of the environment variable, the value is the name of the SSM parameter. 36 | 37 | # You can override any of the values defined above by environment. 38 | #environments: 39 | # test: 40 | # count: 2 # Number of tasks to run for the "test" environment. 41 | -------------------------------------------------------------------------------- /devsecops/install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | platform='unknown' 4 | uname=`uname -s` 5 | detect_copilot=`which copilot` 6 | if [[ "$uname" == 'Linux' ]]; then 7 | platform='linux' 8 | elif [[ "$uname" == 'Darwin' ]]; then 9 | platform='darwin' 10 | fi 11 | 12 | if [[ -z '$detect_copilot' ]]; then 13 | sudo curl -Lo /usr/local/bin/copilot https://github.com/aws/copilot-cli/releases/download/v0.1.0/copilot-${platform}-v0.1.0 # run sudo or root 14 | sudo chmod +x /usr/local/bin/copilot 15 | fi 16 | 17 | -------------------------------------------------------------------------------- /eks-cluster/.env: -------------------------------------------------------------------------------- 1 | # ACCOUNT_ID=$(aws sts get-caller-identity --output text --query Account) 2 | # AWS_REGION=$(curl -s 169.254.169.254/latest/dynamic/instance-identity/document | jq -r '.region') 3 | 4 | AWS_VPC_NAME="EKS-VPC" 5 | AWS_VPC_CIDR="10.10.0.0/18" 6 | # AWS_VPC_MAX_AZ=2 7 | # AWS_VPC_NAT_GATEWAY=1 8 | 9 | EKS_CLUSTER_NAME="EKS-Cluster" 10 | EKS_CLUSTER_ROLE_NAME="EKS-Cluster-Role" 11 | 12 | ECR_REPOSITORY="eks-ecr-repo" 13 | CODECOMMIT_REPOSITORY="eks-codecommit-repo" -------------------------------------------------------------------------------- /eks-cluster/.gitignore: -------------------------------------------------------------------------------- 1 | *.js 2 | !jest.config.js 3 | *.d.ts 4 | node_modules 5 | 6 | # CDK asset staging directory 7 | .cdk.staging 8 | cdk.out 9 | 10 | # Parcel default cache directory 11 | .parcel-cache 12 | -------------------------------------------------------------------------------- /eks-cluster/.npmignore: -------------------------------------------------------------------------------- 1 | *.ts 2 | !*.d.ts 3 | 4 | # CDK asset staging directory 5 | .cdk.staging 6 | cdk.out 7 | -------------------------------------------------------------------------------- /eks-cluster/README.md: -------------------------------------------------------------------------------- 1 | # Welcome to your CDK TypeScript project! 2 | 3 | This is a blank project for TypeScript development with CDK. 4 | 5 | The `cdk.json` file tells the CDK Toolkit how to execute your app. 6 | 7 | ## Useful commands 8 | 9 | * `npm run build` compile typescript to js 10 | * `npm run watch` watch for changes and compile 11 | * `npm run test` perform the jest unit tests 12 | * `cdk deploy` deploy this stack to your default AWS account/region 13 | * `cdk diff` compare deployed stack with current state 14 | * `cdk synth` emits the synthesized CloudFormation template 15 | 16 | -------------------------------------------------------------------------------- /eks-cluster/bin/eks-cluster.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | import 'source-map-support/register'; 3 | import * as cdk from '@aws-cdk/core'; 4 | import { EksClusterStack } from '../lib/eks-cluster-stack'; 5 | 6 | const app = new cdk.App(); 7 | new EksClusterStack(app, 'EksClusterStack'); 8 | -------------------------------------------------------------------------------- /eks-cluster/cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "npx ts-node bin/eks-cluster.ts", 3 | "context": { 4 | "@aws-cdk/core:enableStackNameDuplicates": "true", 5 | "aws-cdk:enableDiffNoFail": "true" 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /eks-cluster/jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | roots: ['/test'], 3 | testMatch: ['**/*.test.ts'], 4 | transform: { 5 | '^.+\\.tsx?$': 'ts-jest' 6 | } 7 | }; 8 | -------------------------------------------------------------------------------- /eks-cluster/lib/eks-cluster-stack.ts: -------------------------------------------------------------------------------- 1 | import * as cdk from '@aws-cdk/core'; 2 | 3 | import * as dotenv from 'dotenv'; 4 | import * as ec2 from '@aws-cdk/aws-ec2'; 5 | import * as iam from '@aws-cdk/aws-iam'; 6 | import * as eks from '@aws-cdk/aws-eks'; 7 | 8 | export class EksClusterStack extends cdk.Stack { 9 | constructor(scope: cdk.Construct, id: string, props?: cdk.StackProps) { 10 | super(scope, id, props); 11 | 12 | // The code that defines your stack goes here 13 | 14 | dotenv.config(); 15 | 16 | /** 17 | * Step 1. Using an existing VPC or create a new one for our EKS Cluster 18 | */ 19 | const vpc = getOrCreateVpc(this); 20 | 21 | // /** 22 | // * Create a new VPC with single NAT Gateway 23 | // */ 24 | // const vpc = new ec2.Vpc(this, 'EKS-VPC', { 25 | // cidr: '10.20.0.0/18', 26 | // natGateways: 1 27 | // }); 28 | 29 | /** 30 | * Step 2. Creating a new EKS Cluster 31 | */ 32 | 33 | // IAM role for our EC2 worker nodes 34 | const clusterAdmin = new iam.Role(this, 'EKS-AdminRole', { 35 | assumedBy: new iam.AccountRootPrincipal() 36 | }); 37 | 38 | var cluster_name = process.env.EKS_CLUSTER_NAME || "EKS-Cluster"; 39 | // console.log(`cluster_name is ${process.env.EKS_CLUSTER_NAME}`); 40 | 41 | /** Create the Cluster and a default managed NodeGroup of 2 x m5.large */ 42 | /* const cluster = new eks.Cluster(this, 'cluster-with-no-capacity', { 43 | defaultCapacity: 0 }); */ 44 | const cluster = new eks.Cluster(this, cluster_name, { 45 | clusterName: cluster_name, 46 | vpc, 47 | defaultCapacity: 1, 48 | defaultCapacityInstance: new ec2.InstanceType('t3.medium'), 49 | mastersRole: clusterAdmin, 50 | outputClusterName: true, 51 | version: eks.KubernetesVersion.V1_17, 52 | }); 53 | 54 | } 55 | } 56 | 57 | 58 | /** 59 | * Step 1. use an existing VPC or create a new one for our EKS Cluster 60 | * 61 | * Note: only 1 NAT Gateway --> Cost Optimization trade-off 62 | */ 63 | function getOrCreateVpc(stack: cdk.Stack): ec2.IVpc { 64 | 65 | var vpc_name = process.env.AWS_VPC_NAME || "EKS-VPC"; 66 | var vpc_cidr = process.env.AWS_VPC_CIDR || "10.10.0.0/18"; 67 | // console.log(`vpc_name is ${process.env.AWS_VPC_NAME}`); 68 | // console.log(`vpc_cidr is ${process.env.AWS_VPC_CIDR}`); 69 | // console.log(`vpc_name is ${process.env.AWS_VPC_MAX_AZ}`); 70 | // console.log(`vpc_cidr is ${process.env.AWS_VPC_NAT_GATEWAY}`); 71 | 72 | /** Use an existing VPC or create a new one */ 73 | const vpc = stack.node.tryGetContext('use_default_vpc') === '1' ? 74 | ec2.Vpc.fromLookup(stack, vpc_name, { isDefault: true }) : 75 | stack.node.tryGetContext('use_vpc_id') ? 76 | ec2.Vpc.fromLookup(stack, vpc_name, 77 | { vpcId: stack.node.tryGetContext('use_vpc_id') }) : 78 | new ec2.Vpc(stack, vpc_name, 79 | { cidr: vpc_cidr, 80 | maxAzs: 2, 81 | natGateways: 1, 82 | subnetConfiguration: [ 83 | { cidrMask: 24, subnetType: ec2.SubnetType.PUBLIC, 84 | name: "PublicDMZ" }, 85 | { cidrMask: 24, subnetType: ec2.SubnetType.PRIVATE, 86 | name: "PrivateServices" } ] 87 | }); 88 | 89 | return vpc 90 | } 91 | -------------------------------------------------------------------------------- /eks-cluster/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "eks-cluster", 3 | "version": "0.2.0", 4 | "bin": { 5 | "eks-cluster": "bin/eks-cluster.js" 6 | }, 7 | "scripts": { 8 | "build": "tsc", 9 | "watch": "tsc -w", 10 | "test": "jest", 11 | "cdk": "cdk" 12 | }, 13 | "devDependencies": { 14 | "@aws-cdk/assert": "^1.49.1", 15 | "@types/jest": "^25.2.3", 16 | "@types/node": "^10.17.5", 17 | "aws-cdk": "^1.49.1", 18 | "jest": "^25.5.0", 19 | "ts-jest": "^25.5.1", 20 | "ts-node": "^8.10.2", 21 | "typescript": "^3.7.5" 22 | }, 23 | "dependencies": { 24 | "@aws-cdk/aws-autoscaling": "^1.51.0", 25 | "@aws-cdk/aws-codebuild": "^1.51.0", 26 | "@aws-cdk/aws-codecommit": "^1.51.0", 27 | "@aws-cdk/aws-codepipeline": "^1.51.0", 28 | "@aws-cdk/aws-codepipeline-actions": "^1.51.0", 29 | "@aws-cdk/aws-ec2": "^1.51.0", 30 | "@aws-cdk/aws-ecr": "^1.51.0", 31 | "@aws-cdk/aws-eks": "^1.51.0", 32 | "@aws-cdk/aws-events-targets": "^1.51.0", 33 | "@aws-cdk/aws-iam": "^1.51.0", 34 | "@aws-cdk/core": "1.49.1", 35 | "dotenv": "^8.2.0", 36 | "source-map-support": "^0.5.16" 37 | } 38 | } -------------------------------------------------------------------------------- /eks-cluster/test/eks-cluster.test.ts: -------------------------------------------------------------------------------- 1 | import { expect as expectCDK, matchTemplate, MatchStyle } from '@aws-cdk/assert'; 2 | import * as cdk from '@aws-cdk/core'; 3 | import * as EksCluster from '../lib/eks-cluster-stack'; 4 | 5 | test('Empty Stack', () => { 6 | const app = new cdk.App(); 7 | // WHEN 8 | const stack = new EksCluster.EksClusterStack(app, 'MyTestStack'); 9 | // THEN 10 | expectCDK(stack).to(matchTemplate({ 11 | "Resources": {} 12 | }, MatchStyle.EXACT)) 13 | }); 14 | -------------------------------------------------------------------------------- /eks-cluster/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2018", 4 | "module": "commonjs", 5 | "lib": ["es2018"], 6 | "declaration": true, 7 | "strict": true, 8 | "noImplicitAny": true, 9 | "strictNullChecks": true, 10 | "noImplicitThis": true, 11 | "alwaysStrict": true, 12 | "noUnusedLocals": false, 13 | "noUnusedParameters": false, 14 | "noImplicitReturns": true, 15 | "noFallthroughCasesInSwitch": false, 16 | "inlineSourceMap": true, 17 | "inlineSources": true, 18 | "experimentalDecorators": true, 19 | "strictPropertyInitialization": false, 20 | "typeRoots": ["./node_modules/@types"] 21 | }, 22 | "exclude": ["cdk.out"] 23 | } 24 | -------------------------------------------------------------------------------- /jenkins/README.md: -------------------------------------------------------------------------------- 1 | https://ap-southeast-1.console.aws.amazon.com/cloudformation/home?region=ap-southeast-1#/stacks/create/template 2 | 3 | https://awsvn.s3-ap-southeast-1.amazonaws.com/Shared/cloudformation/jenkins-ecs-workshop2.json 4 | 5 | https://us-east-2.console.aws.amazon.com/cloudformation/home?region=us-east-2#/stacks/quickcreate?templateUrl=https%3A%2F%2Fawsvn.s3-ap-southeast-1.amazonaws.com%2FShared%2Fcloudformation%2Fjenkins-ecs-workshop2.json&stackName=JenkinsStack¶m_AllowedIPRange=0.0.0.0%2F0¶m_DockerImage=nikunjv%2Fjenkins%3Av4¶m_InstanceType=t2.medium¶m_KeyName=¶m_VPCIPRange=10.0.0.0%2F18 6 | 7 | https://ap-southeast-1.console.aws.amazon.com/cloudformation/home?region=ap-southeast-1#/stacks/quickcreate?templateUrl=https%3A%2F%2Fawsvn.s3-ap-southeast-1.amazonaws.com%2FShared%2Fcloudformation%2Fjenkins-ecs-workshop2.json&stackName=JenkinsStack¶m_AllowedIPRange=0.0.0.0%2F0¶m_DockerImage=nikunjv%2Fjenkins%3Av4¶m_InstanceType=t2.medium¶m_KeyName=¶m_VPCIPRange=10.0.0.0%2F18 8 | 9 | aws elb describe-instance-health --load-balancer-name jenkins-elb 10 | 11 | -------------------------------------------------------------------------------- /jenkins/pipelines-pic.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/jenkins/pipelines-pic.png -------------------------------------------------------------------------------- /podinfo/.gitignore: -------------------------------------------------------------------------------- 1 | *.d.ts 2 | *.js 3 | !jest.config.js 4 | node_modules 5 | dist/ 6 | -------------------------------------------------------------------------------- /podinfo/__snapshots__/main.test.ts.snap: -------------------------------------------------------------------------------- 1 | // Jest Snapshot v1, https://goo.gl/fbAQLP 2 | 3 | exports[`Placeholder Empty 1`] = `Array []`; 4 | -------------------------------------------------------------------------------- /podinfo/cdk8s.yaml: -------------------------------------------------------------------------------- 1 | language: typescript 2 | app: node main.js 3 | imports: 4 | - k8s 5 | -------------------------------------------------------------------------------- /podinfo/help: -------------------------------------------------------------------------------- 1 | ======================================================================================================== 2 | 3 | Your cdk8s typescript project is ready! 4 | 5 | cat help Print this message 6 | 7 | Compile: 8 | npm run compile Compile typescript code to javascript (or "yarn watch") 9 | npm run watch Watch for changes and compile typescript in the background 10 | npm run build Compile + synth 11 | 12 | Synthesize: 13 | npm run synth Synthesize k8s manifests from charts to dist/ (ready for 'kubectl apply -f') 14 | 15 | Deploy: 16 | kubectl apply -f dist/*.k8s.yaml 17 | 18 | Upgrades: 19 | npm run import Import/update k8s apis (you should check-in this directory) 20 | npm run upgrade Upgrade cdk8s modules to latest version 21 | npm run upgrade:next Upgrade cdk8s modules to latest "@next" version (last commit) 22 | 23 | ======================================================================================================== 24 | -------------------------------------------------------------------------------- /podinfo/jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | "roots": [ 3 | "" 4 | ], 5 | testMatch: [ '**/*.test.ts'], 6 | "transform": { 7 | "^.+\\.tsx?$": "ts-jest" 8 | }, 9 | } 10 | -------------------------------------------------------------------------------- /podinfo/main.test.ts: -------------------------------------------------------------------------------- 1 | import {MyChart} from './main'; 2 | import {Testing} from "cdk8s"; 3 | 4 | describe('Placeholder', () => { 5 | test('Empty', () => { 6 | const app = Testing.app(); 7 | const chart = new MyChart(app, 'test-chart'); 8 | const results = Testing.synth(chart) 9 | expect(results).toMatchSnapshot(); 10 | }); 11 | }); 12 | -------------------------------------------------------------------------------- /podinfo/main.ts: -------------------------------------------------------------------------------- 1 | import { Construct } from 'constructs'; 2 | import { App, Chart } from 'cdk8s'; 3 | /** imported constructs */ 4 | import { Deployment, Service, IntOrString } from './imports/k8s'; 5 | import { Redis } from 'cdk8s-redis'; 6 | 7 | export class MyChart extends Chart { 8 | constructor(scope: Construct, name: string) { 9 | super(scope, name); 10 | 11 | // define resources here 12 | const label = { app: 'podinfo-k8s' }; 13 | 14 | /** Deploys hello-kubernetes as a Service behind a LoadBalancer */ 15 | new Service(this, 'service', { 16 | spec: { 17 | type: 'LoadBalancer', 18 | ports: [ { port: 80, targetPort: IntOrString.fromNumber(8080) } ], 19 | selector: label 20 | } 21 | }); 22 | 23 | new Deployment(this, 'deployment', { 24 | spec: { 25 | replicas: 3, 26 | selector: { 27 | matchLabels: label 28 | }, 29 | template: { 30 | metadata: { labels: label }, 31 | spec: { 32 | containers: [ 33 | { 34 | name: 'podinfo-kubernetes', 35 | image: 'paulbouwer/hello-kubernetes:1.8', 36 | ports: [ { containerPort: 8080 } ] 37 | } 38 | ] 39 | } 40 | } 41 | } 42 | }); 43 | 44 | /** Redis construct for cdk8s */ 45 | // const redis = new Redis(this, 'my-redis', { 46 | new Redis(this, 'my-redis', { 47 | slaveReplicas: 4 48 | }); 49 | 50 | } 51 | } 52 | 53 | const app = new App(); 54 | new MyChart(app, 'podinfo'); 55 | app.synth(); 56 | 57 | -------------------------------------------------------------------------------- /podinfo/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "podinfo", 3 | "version": "1.0.0", 4 | "main": "main.js", 5 | "types": "main.ts", 6 | "license": "Apache-2.0", 7 | "private": true, 8 | "scripts": { 9 | "import": "cdk8s import", 10 | "synth": "cdk8s synth", 11 | "compile": "tsc", 12 | "watch": "tsc -w", 13 | "test": "jest", 14 | "build": "npm run compile && npm run test && npm run synth", 15 | "upgrade": "npm i cdk8s@latest cdk8s-cli@latest", 16 | "upgrade:next": "npm i cdk8s@next cdk8s-cli@next" 17 | }, 18 | "dependencies": { 19 | "cdk8s": "^0.26.0", 20 | "cdk8s-redis": "^0.1.0", 21 | "constructs": "^2.0.2" 22 | }, 23 | "devDependencies": { 24 | "@types/jest": "^26.0.3", 25 | "@types/node": "^14.0.14", 26 | "cdk8s-cli": "^0.26.0", 27 | "jest": "^26.1.0", 28 | "ts-jest": "^26.1.1", 29 | "typescript": "^3.9.6" 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /podinfo/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "alwaysStrict": true, 4 | "charset": "utf8", 5 | "declaration": true, 6 | "experimentalDecorators": true, 7 | "inlineSourceMap": true, 8 | "inlineSources": true, 9 | "lib": [ 10 | "es2016" 11 | ], 12 | "module": "CommonJS", 13 | "noEmitOnError": true, 14 | "noFallthroughCasesInSwitch": true, 15 | "noImplicitAny": true, 16 | "noImplicitReturns": true, 17 | "noImplicitThis": true, 18 | "noUnusedLocals": true, 19 | "noUnusedParameters": true, 20 | "resolveJsonModule": true, 21 | "strict": true, 22 | "strictNullChecks": true, 23 | "strictPropertyInitialization": true, 24 | "stripInternal": true, 25 | "target": "ES2017" 26 | }, 27 | "include": [ 28 | "**/*.ts" 29 | ], 30 | "exclude": [ 31 | "node_modules" 32 | ] 33 | } 34 | -------------------------------------------------------------------------------- /serverless-python/base_common/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | from aws_cdk.core import Stack, Construct, Environment 3 | from aws_cdk import aws_apigateway, aws_route53, aws_route53_targets, aws_certificatemanager, aws_ec2 4 | 5 | # we need default values here since aws-cdk-examples build synthesizes the app 6 | ACCOUNT=os.environ.get('AWS_ACCOUNT', '701571471198') 7 | REGION=os.environ.get('AWS_REGION', 'ap-southeast-1') 8 | VPC_ID = os.environ.get('AWS_VPC_ID', 'vpc-04f6bf98089c883b4') 9 | ZONE_NAME = os.environ.get('AWS_ZONE_NAME', 'aws.job4u.io') 10 | ZONE_ID = os.environ.get('AWS_ZONE_ID', 'Z18LLN6ULFZKNH') 11 | ZONE_CERT = os.environ.get('AWS_ZONE_CERT', 'arn:aws:acm:ap-southeast-1:701571471198:certificate/12049ee9-d585-44b4-bd06-00190aa5cca7') 12 | 13 | AWS_ENV = Environment(account=ACCOUNT, region=REGION) 14 | 15 | class BaseStack(Stack): 16 | """ 17 | A base CDK stack class for all stacks defined in our fun little company. 18 | """ 19 | 20 | def __init__(self, scope: Construct, id: str, **kwargs): 21 | super().__init__(scope, id, env=AWS_ENV, **kwargs) 22 | 23 | # lookup our pre-created VPC by ID 24 | self._vpc = aws_ec2.Vpc.from_lookup(self, "vpc", vpc_id=VPC_ID) 25 | 26 | @property 27 | def base_vpc(self) -> aws_ec2.IVpc: 28 | """ 29 | :return: The walters co. vpc 30 | """ 31 | return self._vpc 32 | 33 | def map_base_subdomain(self, subdomain: str, api: aws_apigateway.RestApi) -> str: 34 | """ 35 | Maps a sub-domain of aws.job4u.io to an API gateway 36 | 37 | :param subdomain: The sub-domain (e.g. "www") 38 | :param api: The API gateway endpoint 39 | :return: The base url (e.g. "https://www.aws.job4u.io") 40 | """ 41 | domain_name = subdomain + '.' + ZONE_NAME 42 | url = 'https://' + domain_name 43 | 44 | cert = aws_certificatemanager.Certificate.from_certificate_arn(self, 'DomainCertificate', ZONE_CERT) 45 | hosted_zone = aws_route53.HostedZone.from_hosted_zone_attributes(self, 'HostedZone', 46 | hosted_zone_id=ZONE_ID, 47 | zone_name=ZONE_NAME) 48 | 49 | # add the domain name to the api and the A record to our hosted zone 50 | domain = api.add_domain_name('Domain', certificate=cert, domain_name=domain_name) 51 | 52 | aws_route53.ARecord( 53 | self, 'UrlShortenerDomain', 54 | record_name=subdomain, 55 | zone=hosted_zone, 56 | target=aws_route53.RecordTarget.from_alias(aws_route53_targets.ApiGatewayDomain(domain))) 57 | 58 | return url 59 | 60 | 61 | __all__ = ["BaseStack"] 62 | -------------------------------------------------------------------------------- /serverless-python/url-shortener/.gitignore: -------------------------------------------------------------------------------- 1 | *.swp 2 | package-lock.json 3 | __pycache__ 4 | .pytest_cache 5 | .env 6 | *.egg-info 7 | 8 | # CDK asset staging directory 9 | .cdk.staging 10 | cdk.out 11 | -------------------------------------------------------------------------------- /serverless-python/url-shortener/README.md: -------------------------------------------------------------------------------- 1 | 2 | # URL_Shortener CDK Python project 3 | 4 | 5 | Use the `Python CDK` to quickly assemble your AWS infrastructure and show you how easy to configure your cloud resources, manage permissions, connect event sources and even build and publish your own constructs. 6 | 7 | 8 | * [x] Create and source a Python virtualenv on MacOS and Linux, and install python dependencies: 9 | 10 | ``` 11 | python3 -m venv .env 12 | source .env/bin/activate 13 | pip install -r requirements.txt 14 | ``` 15 | 16 | * [ ] Install the latest version of the AWS CDK CLI: 17 | 18 | ``` 19 | # npm i -g aws-cdk --force 20 | ``` 21 | 22 | ```shell 23 | export AWS_ACCOUNT='701571471198' 24 | export AWS_REGION='ap-southeast-1' 25 | export AWS_VPC_ID='vpc-04f6bf98089c883b4' 26 | export AWS_ZONE_NAME='aws.job4u.io' 27 | export AWS_ZONE_ID='Z18LLN6ULFZKNH' 28 | export AWS_ZONE_CERT='arn:aws:acm:ap-southeast-1:701571471198:certificate/12049ee9-d585-44b4-bd06-00190aa5cca7' 29 | ``` 30 | 31 | 32 | ``` 33 | cdk bootstrap aws://701571471198/ap-southeast-1 34 | cdk deploy '*' 35 | ``` 36 | 37 | 38 | > Generate a shortened URL 39 | 40 | ``` 41 | curl https://shortener.aws.job4u.io?targetUrl=https://aws.amazon.com/cdk/ 42 | 43 | ## to access a shortened URL 44 | curl -I https://shortener.aws.job4u.io/XXXXXXXX 45 | ``` 46 | 47 | 1. Creating a CDK Application 48 | 2. Modeling DynamoDB 49 | 3. Creating a Lambda function 50 | 4. Lambda permission settings 51 | 5. Build API Gateway 52 | 6. DNS settings 53 | 7. Cleanup 54 | 55 | ### 1. Creating a CDK Application 56 | 57 | 58 | > Install AWS CDK 59 | 60 | ```bash 61 | npm install -g aws-cdk 62 | cdk --version 63 | 64 | # brew install tree 65 | tree 66 | ``` 67 | 68 | > Creating a CDK application 69 | 70 | ``` 71 | mkdir url-shortener 72 | cd url-shortener 73 | cdk init --language python 74 | ``` 75 | 76 | > Install packages 77 | 78 | ```bash 79 | ## Manually create a virtualenv on MacOS and Linux: 80 | python3 -m venv .env 81 | 82 | ## Activate your virtualenv. 83 | source .env/bin/activate 84 | ## Windows 85 | # % .env\Scripts\activate.bat 86 | 87 | ## Install the required dependencies. 88 | pip install -r requirements.txt 89 | ``` 90 | 91 | > Synthesize a template from your app 92 | 93 | ``` 94 | cdk synth 95 | ``` 96 | 97 | > Bootstrapping an environment 98 | 99 | ``` 100 | cdk bootstrap 101 | cdk deploy url-shortener 102 | ``` 103 | 104 | ### 2. Modeling DynamoDB 105 | 106 | * [x] Table Name: `mapping-table` 107 | * [x] Partition Key: `id` (AttributeType.STRING) 108 | 109 | ### 3. Creating a Lambda function 110 | 111 | > **Lambda permission settings** 112 | 113 | ### 4. Build API Gateway 114 | 115 | ### 5. DNS settings 116 | 117 | ### 6. Cleanup 118 | 119 | 120 | ### 7. Useful commands 121 | 122 | * `cdk ls` list all stacks in the app 123 | * `cdk synth` emits the synthesized CloudFormation template 124 | * `cdk deploy` deploy this stack to your default AWS account/region 125 | * `cdk diff` compare deployed stack with current state 126 | * `cdk docs` open CDK documentation 127 | 128 | ### 8. References 129 | 130 | * [x] [CDK Workshop](https://cdkworkshop.com) 131 | * [x] [the Infrastructure is Code with the AWS CDK](https://youtu.be/ZWCvNFUN-sU) 132 | * [x] [url-shortener](https://github.com/aws-samples/aws-cdk-examples/tree/master/python/url-shortener) 133 | 134 | Enjoy! 135 | -------------------------------------------------------------------------------- /serverless-python/url-shortener/app.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | from aws_cdk import core 4 | 5 | from url_shortener.url_shortener_stack import UrlShortenerStack, TrafficGeneratorStack 6 | 7 | app = core.App() 8 | UrlShortenerStack(app, "url-shortener") 9 | TrafficGeneratorStack(app, 'url-shortener-load-test') 10 | 11 | app.synth() 12 | -------------------------------------------------------------------------------- /serverless-python/url-shortener/cdk.context.json: -------------------------------------------------------------------------------- 1 | { 2 | "@aws-cdk/core:enableStackNameDuplicates": "true", 3 | "aws-cdk:enableDiffNoFail": "true" 4 | } 5 | -------------------------------------------------------------------------------- /serverless-python/url-shortener/cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "python3 app.py" 3 | } 4 | -------------------------------------------------------------------------------- /serverless-python/url-shortener/lambda/handler.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import uuid 4 | import logging 5 | 6 | import boto3 7 | 8 | LOG = logging.getLogger() 9 | LOG.setLevel(logging.INFO) 10 | 11 | 12 | def main(event, context): 13 | LOG.info("EVENT: " + json.dumps(event)) 14 | 15 | query_string_params = event["queryStringParameters"] 16 | if query_string_params is not None: 17 | target_url = query_string_params['targetUrl'] 18 | if target_url is not None: 19 | return create_short_url(event) 20 | 21 | path_parameters = event['pathParameters'] 22 | if path_parameters is not None: 23 | if path_parameters['proxy'] is not None: 24 | return read_short_url(event) 25 | 26 | return { 27 | 'statusCode': 200, 28 | 'body': 'usage: ?targetUrl=URL' 29 | } 30 | 31 | 32 | def create_short_url(event): 33 | # Pull out the DynamoDB table name from environment 34 | table_name = os.environ.get('TABLE_NAME') 35 | 36 | # Parse targetUrl 37 | target_url = event["queryStringParameters"]['targetUrl'] 38 | 39 | # Create a unique id (take first 8 chars) 40 | id = str(uuid.uuid4())[0:8] 41 | 42 | # Create item in DynamoDB 43 | dynamodb = boto3.resource('dynamodb') 44 | table = dynamodb.Table(table_name) 45 | table.put_item(Item={ 46 | 'id': id, 47 | 'target_url': target_url 48 | }) 49 | 50 | # Create the redirect URL 51 | url = "https://" \ 52 | + event["requestContext"]["domainName"] \ 53 | + event["requestContext"]["path"] \ 54 | + id 55 | 56 | return { 57 | 'statusCode': 200, 58 | 'headers': {'Content-Type': 'text/plain'}, 59 | 'body': "Created URL: %s" % url 60 | } 61 | 62 | def read_short_url(event): 63 | # Parse redirect ID from path 64 | id = event['pathParameters']['proxy'] 65 | 66 | # Pull out the DynamoDB table name from the environment 67 | table_name = os.environ.get('TABLE_NAME') 68 | 69 | # Load redirect target from DynamoDB 70 | ddb = boto3.resource('dynamodb') 71 | table = ddb.Table(table_name) 72 | response = table.get_item(Key={'id': id}) 73 | LOG.debug("RESPONSE: " + json.dumps(response)) 74 | 75 | item = response.get('Item', None) 76 | if item is None: 77 | return { 78 | 'statusCode': 400, 79 | 'headers': {'Content-Type': 'text/plain'}, 80 | 'body': 'No redirect found for ' + id 81 | } 82 | 83 | # Respond with a redirect 84 | return { 85 | 'statusCode': 301, 86 | 'headers': { 87 | 'Location': item.get('target_url') 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /serverless-python/url-shortener/pinger/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine 2 | 3 | RUN apk add curl 4 | ADD ping.sh /ping.sh 5 | 6 | CMD [ "/bin/sh", "/ping.sh" ] 7 | -------------------------------------------------------------------------------- /serverless-python/url-shortener/pinger/ping.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | while true; do 4 | curl -i $URL 5 | sleep 1 6 | done 7 | -------------------------------------------------------------------------------- /serverless-python/url-shortener/requirements.txt: -------------------------------------------------------------------------------- 1 | -e . 2 | -------------------------------------------------------------------------------- /serverless-python/url-shortener/setup.py: -------------------------------------------------------------------------------- 1 | import setuptools 2 | 3 | 4 | with open("README.md") as fp: 5 | long_description = fp.read() 6 | 7 | 8 | setuptools.setup( 9 | name="url_shortener", 10 | version="0.1.0", 11 | 12 | description="Infrastructure is Code with the AWS CDK", 13 | long_description=long_description, 14 | long_description_content_type="text/markdown", 15 | 16 | author="Thanh Nguyen", 17 | 18 | package_dir={"": "url_shortener"}, 19 | packages=setuptools.find_packages(where="url_shortener"), 20 | 21 | install_requires=[ 22 | "aws-cdk.core", 23 | "aws-cdk.aws-dynamodb", 24 | "aws-cdk.aws-events", 25 | "aws-cdk.aws-events-targets", 26 | "aws-cdk.aws-lambda", 27 | "aws-cdk.aws-s3", 28 | "aws-cdk.aws-ec2", 29 | "aws-cdk.aws-ecs-patterns", 30 | "aws-cdk.aws-certificatemanager", 31 | "aws-cdk.aws-apigateway", 32 | "aws-cdk.aws-cloudwatch", 33 | "cdk.watchful", 34 | "boto3" 35 | ], 36 | 37 | python_requires=">=3.6", 38 | 39 | classifiers=[ 40 | "Development Status :: 4 - Beta", 41 | 42 | "Intended Audience :: Developers", 43 | 44 | "License :: OSI Approved :: Apache Software License", 45 | 46 | "Programming Language :: JavaScript", 47 | "Programming Language :: Python :: 3 :: Only", 48 | "Programming Language :: Python :: 3.6", 49 | "Programming Language :: Python :: 3.7", 50 | "Programming Language :: Python :: 3.8", 51 | 52 | "Topic :: Software Development :: Code Generators", 53 | "Topic :: Utilities", 54 | 55 | "Typing :: Typed", 56 | ], 57 | ) 58 | -------------------------------------------------------------------------------- /serverless-python/url-shortener/source.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | 3 | rem The sole purpose of this script is to make the command 4 | rem 5 | rem source .env/bin/activate 6 | rem 7 | rem (which activates a Python virtualenv on Linux or Mac OS X) work on Windows. 8 | rem On Windows, this command just runs this batch file (the argument is ignored). 9 | rem 10 | rem Now we don't need to document a Windows command for activating a virtualenv. 11 | 12 | echo Executing .env\Scripts\activate.bat for you 13 | .env\Scripts\activate.bat 14 | -------------------------------------------------------------------------------- /serverless-python/url-shortener/traffic101.py: -------------------------------------------------------------------------------- 1 | from aws_cdk.core import Construct 2 | from aws_cdk import aws_ecs, aws_ec2 3 | 4 | 5 | ## a User-Defined Construct 6 | ## just a Class the inherits from the core.Construct Base Class 7 | class Traffic101(Construct): 8 | """ 9 | An HTTP traffic generator. 10 | 11 | Hits a specified URL at some TPS. 12 | """ 13 | 14 | def __init__(self, scope: Construct, id: str, *, vpc: aws_ec2.IVpc, url: str, tps: int): 15 | """ 16 | Defines an instance of the traffic generator. 17 | 18 | :param scope: construct scope 19 | :param id: construct id 20 | :param vpc: the VPC in which to host the traffic generator 21 | :param url: the URL to hit 22 | :param tps: the number of transactions per second 23 | """ 24 | super().__init__(scope, id) 25 | 26 | ## Define an ECS Cluster hosted within the requested VPC 27 | cluster = aws_ecs.Cluster(self, 'cluster', vpc=vpc) 28 | 29 | ## Define our ECS Task Definition with a single Container. 30 | ## The image is built & published from a local asset directory 31 | task_definition = aws_ecs.FargateTaskDefinition(self, 'PingTask') 32 | task_definition.add_container('Pinger', 33 | image=aws_ecs.ContainerImage.from_asset("pinger"), 34 | environment={'URL': url}) 35 | 36 | ## Define our Fargate Service. TPS determines how many Instances we 37 | ## want from our Task (each Task produces a single TPS) 38 | aws_ecs.FargateService(self, 'service', 39 | cluster=cluster, 40 | task_definition=task_definition, 41 | desired_count=tps) 42 | -------------------------------------------------------------------------------- /serverless-python/url-shortener/url_shortener/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AWS-Devops-Projects/eks-workshop/7573608bd2d4bde5f479b1e7e3f7b7569072d38a/serverless-python/url-shortener/url_shortener/__init__.py -------------------------------------------------------------------------------- /serverless-python/url-shortener/url_shortener/url_shortener_stack.py: -------------------------------------------------------------------------------- 1 | from aws_cdk import core 2 | from aws_cdk.core import App, Construct, Duration 3 | from aws_cdk import aws_dynamodb, aws_lambda, aws_apigateway 4 | 5 | from base_common import BaseStack 6 | 7 | from traffic101 import Traffic101 8 | from cdk_watchful import Watchful 9 | 10 | ## Our main Application Stack 11 | class UrlShortenerStack(BaseStack): 12 | # class UrlShortenerStack(core.Stack): 13 | 14 | def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: 15 | super().__init__(scope, id, **kwargs) 16 | 17 | # The code that defines your stack goes here 18 | 19 | ## Define the table that maps short codes to URLs. 20 | table = aws_dynamodb.Table(self, "mapping-table", 21 | partition_key=aws_dynamodb.Attribute( 22 | name="id", 23 | type=aws_dynamodb.AttributeType.STRING), 24 | read_capacity=10, 25 | write_capacity=5) 26 | 27 | ## Defines Lambda resource & API-Gateway request handler 28 | ## All API requests will go to the same function. 29 | handler = aws_lambda.Function(self, "UrlShortenerFunction", 30 | code=aws_lambda.Code.asset("./lambda"), 31 | handler="handler.main", 32 | timeout=core.Duration.minutes(5), 33 | runtime=aws_lambda.Runtime.PYTHON_3_7) 34 | 35 | ## Pass the table name to the handler through an env variable 36 | ## and grant the handler read/write permissions on the table. 37 | table.grant_read_write_data(handler) 38 | handler.add_environment('TABLE_NAME', table.table_name) 39 | 40 | ## Define the API endpoint and associate the handler 41 | api = aws_apigateway.LambdaRestApi(self, "UrlShortenerApi", 42 | handler=handler) 43 | 44 | ## Map shortener.aws.job4u.io to this API Gateway endpoint 45 | ## The shared Domain Name that can be accessed through the API in BaseStack 46 | ## NOTE: you can comment out if you want to bypass the Domain Name mapping 47 | self.map_base_subdomain('shortener', api) 48 | 49 | ## Define a Watchful monitoring system and watch the entire scope. 50 | ## This will automatically find all watchable resources 51 | ## and addthem to our dashboard. 52 | wf = Watchful(self, 'watchful', alarm_email='nnthanh101@gmail.com') 53 | wf.watch_scope(self) 54 | 55 | ## Separate Stack that includes the Traffic Generator 56 | class TrafficGeneratorStack(BaseStack): 57 | def __init__(self, scope: Construct, id: str): 58 | super().__init__(scope, id) 59 | 60 | ## Define a Traffic Generator instance that hits the URL at 10 TPS 61 | ## and hosted within the shared Base-VPC 62 | Traffic101(self, 'generator', 63 | url='https://shortener.aws.job4u.io/f84b55e1', 64 | tps=10, 65 | vpc=self.base_vpc) 66 | --------------------------------------------------------------------------------