├── .gitignore ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── environments └── prod │ └── tooling │ ├── ap-southeast-1.tfvars │ ├── eu-central-1.tfvars │ ├── us-east-1.tfvars │ └── variables.tfvars ├── main.tf ├── modules ├── global │ ├── README.md │ ├── iam.tf │ ├── outputs.tf │ ├── templates │ │ ├── codebuild_iam_policy.tpl │ │ └── codepipeline_iam_policy.tpl │ └── variables.tf └── regional │ ├── README.md │ ├── buildspec-tagged_source.yml │ ├── buildspec-terraform_apply.yml │ ├── buildspec-terraform_checkov.yml │ ├── buildspec-terraform_plan.yml │ ├── buildspec-terraform_tflint.yml │ ├── main.tf │ ├── repo.tf │ ├── templates │ ├── cloud_watch_event_policy.tpl │ ├── key_policy.tpl │ ├── s3_bucket_policy_codebuild.tpl │ └── s3_bucket_policy_codepipeline.tpl │ ├── variables.tf │ └── vpc │ ├── outputs.tf │ ├── variables.tf │ └── vpc.tf ├── outputs.tf ├── provider.tf ├── readme-images ├── cicd.png ├── overall-architecture.png ├── target-workload-multi-region.png ├── target-workload.png └── tf-remote-state.png ├── scripts ├── prerequisites │ ├── create-iam-resources_for_tooling_account.sh │ └── create-iam-resources_for_workload_account.sh ├── run-tf-prod-destroy.sh ├── run-tf-prod-global-destroy.sh ├── run-tf-prod-global.sh └── run-tf-prod.sh └── variables.tf /.gitignore: -------------------------------------------------------------------------------- 1 | .idea 2 | .terraform.lock.hcl 3 | .terraform 4 | tfplan -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | ## Code of Conduct 2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 4 | opensource-codeofconduct@amazon.com with any additional questions or comments. 5 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing Guidelines 2 | 3 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional 4 | documentation, we greatly value feedback and contributions from our community. 5 | 6 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary 7 | information to effectively respond to your bug report or contribution. 8 | 9 | 10 | ## Reporting Bugs/Feature Requests 11 | 12 | We welcome you to use the GitHub issue tracker to report bugs or suggest features. 13 | 14 | When filing an issue, please check existing open, or recently closed, issues to make sure somebody else hasn't already 15 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful: 16 | 17 | * A reproducible test case or series of steps 18 | * The version of our code being used 19 | * Any modifications you've made relevant to the bug 20 | * Anything unusual about your environment or deployment 21 | 22 | 23 | ## Contributing via Pull Requests 24 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that: 25 | 26 | 1. You are working against the latest source on the *main* branch. 27 | 2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already. 28 | 3. You open an issue to discuss any significant work - we would hate for your time to be wasted. 29 | 30 | To send us a pull request, please: 31 | 32 | 1. Fork the repository. 33 | 2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change. 34 | 3. Ensure local tests pass. 35 | 4. Commit to your fork using clear commit messages. 36 | 5. Send us a pull request, answering any default questions in the pull request interface. 37 | 6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation. 38 | 39 | GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and 40 | [creating a pull request](https://help.github.com/articles/creating-a-pull-request/). 41 | 42 | 43 | ## Finding contributions to work on 44 | Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any 'help wanted' issues is a great place to start. 45 | 46 | 47 | ## Code of Conduct 48 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 49 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 50 | opensource-codeofconduct@amazon.com with any additional questions or comments. 51 | 52 | 53 | ## Security issue notifications 54 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue. 55 | 56 | 57 | ## Licensing 58 | 59 | See the [LICENSE](LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution. 60 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of 4 | this software and associated documentation files (the "Software"), to deal in 5 | the Software without restriction, including without limitation the rights to 6 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 7 | the Software, and to permit persons to whom the Software is furnished to do so. 8 | 9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 10 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 11 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 12 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 13 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 14 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 15 | 16 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # aws-multi-region-cicd-with-terraform 2 | 3 | A multi region CI/CD pipeline with AWS CodePipeline and AWS CodeBuild in Terraform 4 | 5 | ## Table of contents 6 | * [Overall architecture](#overall-architecture) 7 | * [Instructions to deploy the CI/CD pipeline](#instructions-to-deploy-the-cicd-pipeline) 8 | * [Step 1: Prerequisites](#step-1-prerequisites) 9 | * [Step 2: Deploy CI/CD pipeline resources in the central tooling account](#step-2-deploy-cicd-pipeline-resources-in-the-central-tooling-account) 10 | * [Step 3: Push the infra repo code into AWS CodeCommit in the central tooling account](#step-3-push-the-infra-repo-code-into-aws-codecommit-in-the-central-tooling-account) 11 | * [Kick off a pipeline to deploy to a target workload account and a target region](#kick-off-a-pipeline-to-deploy-to-a-target-workload-account-and-a-target-region) 12 | * [Examples](#examples) 13 | * [Dev pipeline](#dev-pipeline) 14 | * [QA pipeline](#qa-pipeline) 15 | * [Staging pipeline](#staging-pipeline) 16 | * [Prod pipeline](#prod-pipeline) 17 | * [Instructions to destroy resources](#instructions-to-destroy-resources) 18 | * [Step 1: Destroy the resources in the target workload account(s)](#step-1-destroy-the-resources-in-the-target-workload-accounts) 19 | * [Step 2: Destroy the resources in the central tooling account created in this repo](#step-2-destroy-the-resources-in-the-central-tooling-account-created-in-this-repo) 20 | * [Step 3: Destroy the Terraform state resources](#step-3-destroy-the-terraform-state-resources) 21 | * [Common Errors or Warnings](#common-errors-or-warnings) 22 | * [Security](#security) 23 | * [License](#license) 24 | 25 | 26 | ## Overall Architecture 27 | This is what we will build. 28 | * First we create the CI/CD resources inside the Central Tooling account using Terraform. 29 | * Next we will use the pipeline created as part of the CI/CD resources to deploy the sample infra into the target workload account. 30 | * Below is the overall architecture. 31 | 32 | ![image](readme-images/overall-architecture.png) 33 | 34 | ## Instructions to deploy the CI/CD pipeline 35 | ## Step 1: Prerequisites 36 | To follow this sample you will need: 37 | * [Terraform CLI (0.14+)](https://www.terraform.io/downloads) installed. 38 | * [AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html) installed. 39 | * [AWS account](https://aws.amazon.com/free) for the central tooling account for CI/CD and [associated credentials](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys) that allow you to create resources in this account. 40 | * [AWS account](https://aws.amazon.com/free) for the target workload account where you deploy sample infra resources and [associated credentials](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys) that allow you to create resources in this account. 41 | * Next, create IAM resources in the central tooling account using below: 42 | ```shell 43 | chmod +x ./scripts/prerequisites/* 44 | ./scripts/prerequisites/create-iam-resources_for_tooling_account.sh 45 | ``` 46 | 47 | #### Terraform Remote State Management 48 | * Use `aws configure` with your IAM user credentials for the central tooling account and then assume InfraBuildRole: 49 | ```shell 50 | # You can use below one liner 51 | # For details, see [this](https://aws.amazon.com/premiumsupport/knowledge-center/iam-assume-role-cli/ 52 | # Update 111122223333 below to your central tooling account number 53 | OUT=$(aws sts assume-role --role-arn arn:aws:iam::111122223333:role/InfraBuildRole --role-session-name INFRA_BUILD);export AWS_ACCESS_KEY_ID=$(echo $OUT | jq -r '.Credentials''.AccessKeyId');export AWS_SECRET_ACCESS_KEY=$(echo $OUT | jq -r '.Credentials''.SecretAccessKey');export AWS_SESSION_TOKEN=$(echo $OUT | jq -r '.Credentials''.SessionToken'); 54 | 55 | # Verify you assumed the role 56 | aws sts get-caller-identity 57 | { 58 | "UserId": "AAA:INFRA_BUILD", 59 | "Account": "111122223333", 60 | "Arn": "arn:aws:sts::111122223333:assumed-role/InfraBuildRole/INFRA_BUILD" 61 | } 62 | ``` 63 | * Create terraform remote state management resources preferably in the same region you plan to use for your CI/CD resources: 64 | * Create S3 buckets per environment with name `"-"` and DynamoDB tables for state locks with name `"-lock-"` 65 | * env is one of (dev, qa, staging, prod) or it can be configured as a list of tags you have passed as input param in tag_prefix_list. 66 | * tf_backend_config_prefix is a name of your choice that's globally unique in S3. See https://github.com/cloudposse/terraform-aws-tfstate-backend#input_namespace 67 | * Make a note of this value.You will then pass this tf_backend_config_prefix value to each script using -b . 68 | * Below is an example on how to create terraform remote state resources along with **a valid provider config**. For more details including on input params, pls see [this](https://registry.terraform.io/modules/cloudposse/tfstate-backend/aws/latest). 69 | * You'd then perform the usual terraform init, terraform plan and apply. 70 | ```hcl 71 | locals { 72 | # namespace = "" # For example: 73 | namespace = "org-awesome-tf-state" # This is the tf_backend_config_prefix. Pick a namespace that's globally unique in S3. See https://github.com/cloudposse/terraform-aws-tfstate-backend#input_namespace 74 | environment_list = ["dev", "qa", "staging", "prod"] 75 | } 76 | 77 | module "terraform_state_backend" { 78 | for_each = toset(local.environment_list) 79 | source = "github.com/cloudposse/terraform-aws-tfstate-backend?ref=0.38.1" 80 | namespace = local.namespace 81 | stage = each.key 82 | dynamodb_table_name = "${local.namespace}-lock-${each.key}" 83 | 84 | terraform_backend_config_file_path = "." 85 | terraform_backend_config_file_name = "backend.tf" 86 | force_destroy = false 87 | } 88 | 89 | provider "aws" { 90 | region = "" 91 | } 92 | ``` 93 | * This will create the following where the value of the prefix is set to the namespace. This is the tf_backend_config_prefix ("org-awesome-tf-state" in our example) 94 | ![image](readme-images/tf-remote-state.png) 95 | 96 | ## CI/CD 97 | * First we will create the CI/CD resources. To do this, we use Terraform locally. 98 | * We need to first create CI/CD resources in the central tooling account so we can use them to deploy the sample infrastructure workload to the target workload account using the pipeline. 99 | * Since there are no CI/CD resources yet in the central tooling account and we need to create them, we create them with Terraform. Below is what we will create. 100 | 101 | ![image](readme-images/cicd.png) 102 | 103 | ### Step 2: Deploy CI/CD pipeline resources in the central tooling account 104 | * Clone this repo into your local shadow. 105 | * Scripts are under ./scripts directory and are expected to be run from the root directory of this project. 106 | * Make sure to `chmod +x scripts/*` 107 | * Update environments/prod/tooling/variables.tfvars with the input parameter values of your choice as listed in the next section under "Inputs". 108 | * account is mandatory and it should be set to the account number of your central tooling account. 109 | * target_accounts is mandatory and, it should be set to the account numbers of your workload accounts. 110 | ```hcl 111 | account=111122223333 # This is required to deploy the CI/CD resources in the central tooling account that you created 112 | target_accounts=["555555555555", "444455556666"] # This is required for entitling AWS CodeBuild to assume the Cross Account IAM role in the target workload accounts. 113 | tag_prefix_list=["dev", "qa", "staging", "prod"] # Optional. Update to the environment names of your choice, otherwise this is the default list of environments that are also the git tag prefixes for deployments. 114 | number_of_azs=3 # Optional. Set to the number of az's to deploy CI/CD resources. Otherwise, it defaults to 3. 115 | ``` 116 | * Use `aws configure` with your IAM user credentials for the central tooling account and then assume InfraBuildRole: 117 | ```shell 118 | # You can use below one liner 119 | # For details, see [this](https://aws.amazon.com/premiumsupport/knowledge-center/iam-assume-role-cli/ 120 | # Update 111122223333 below to your central tooling account number 121 | OUT=$(aws sts assume-role --role-arn arn:aws:iam::111122223333:role/InfraBuildRole --role-session-name INFRA_BUILD);export AWS_ACCESS_KEY_ID=$(echo $OUT | jq -r '.Credentials''.AccessKeyId');export AWS_SECRET_ACCESS_KEY=$(echo $OUT | jq -r '.Credentials''.SecretAccessKey');export AWS_SESSION_TOKEN=$(echo $OUT | jq -r '.Credentials''.SessionToken'); 122 | 123 | # Verify you assumed the role 124 | aws sts get-caller-identity 125 | { 126 | "UserId": "AAA:INFRA_BUILD", 127 | "Account": "111122223333", 128 | "Arn": "arn:aws:sts::111122223333:assumed-role/InfraBuildRole/INFRA_BUILD" 129 | } 130 | ``` 131 | * There is an order we follow for the deployment. We create the global resources first, followed by regional resources. This is because some global resources (i.e. IAM roles) are looked up by the regional resources. 132 | * First run the script to generate the tf plan for global resources (i.e. IAM resources for CodeBuild/CodePipeline), inspect the plan and then run `terraform apply "tfplan"`: 133 | ```shell 134 | ./scripts/run-tf-prod-global.sh -b 135 | # Ex: ./scripts/run-tf-prod-global.sh -b unique-namespace-for-tf-state-created-in-prereqs 136 | #Note: If you are using a region other than us-east-1 for CI/CD tooling, also set -r param (similarly if you use a region other than us-east-1 to deploy global resources from, then pass -g param) 137 | #For ex: ./scripts/run-tf-prod-global.sh -b org-awesome-tf-state -g eu-central-1 -r eu-central-1 138 | # check the plan and then run: 139 | terraform apply "tfplan" 140 | ``` 141 | * You should see the following along with the usual warning around using -target option. 142 | ```shell 143 | ╷ 144 | │ Warning: Applied changes may be incomplete 145 | │ 146 | │ The plan was created with the -target option in effect, so some changes requested in the configuration may have been ignored and the output values may not be fully updated. Run the following command to verify that no other changes are pending: 147 | │ terraform plan 148 | │ 149 | │ Note that the -target option is not suitable for routine use, and is provided only for exceptional situations such as recovering from errors or mistakes, or when Terraform specifically suggests to use it as part of an error message. 150 | ╵ 151 | Releasing state lock. This may take a few moments... 152 | 153 | Apply complete! Resources: 5 added, 0 changed, 0 destroyed. 154 | ``` 155 | 156 | * Run the script to generate the tf plan for regional resources (VPC, CodeBuild, CodePipeline, CodeCommit, etc. resources), inspect the plan and then run `terraform apply "tfplan"`: 157 | ```shell 158 | ./scripts/run-tf-prod.sh -b 159 | # Ex: ./scripts/run-tf-prod.sh -b unique-namespace-for-tf-state-created-in-prereqs 160 | #Note: If you are using a region other than us-east-1 for CI/CD tooling, also set -r param 161 | #For ex: ./scripts/run-tf-prod-global.sh -b org-awesome-tf-state -r eu-central-1 162 | # check the plan and then run: 163 | terraform apply "tfplan" 164 | ``` 165 | * You will see a message like following for the plan: 166 | ```shell 167 | [..] 168 | Plan: 80 to add, 0 to change, 0 to destroy. 169 | [..] 170 | ``` 171 | * You will see a message like following after apply including the usual warning about using -target: 172 | ```shell 173 | ╷ 174 | │ Warning: Applied changes may be incomplete 175 | │ 176 | │ The plan was created with the -target option in effect, so some changes requested in the configuration may have been ignored and the output values may not be fully updated. Run the following command to verify that no other changes are pending: 177 | │ terraform plan 178 | │ 179 | │ Note that the -target option is not suitable for routine use, and is provided only for exceptional situations such as recovering from errors or mistakes, or when Terraform specifically suggests to use it as part of an error message. 180 | ╵ 181 | Releasing state lock. This may take a few moments... 182 | 183 | Apply complete! Resources: 80 added, 0 changed, 0 destroyed. 184 | ``` 185 | * You just created the CI/CD resources in your central tooling account! 186 | * Next, please see the section [to push the infra repo code into AWS CodeCommit in the central tooling account](#step-3-to-push-the-infra-repo-code-into-aws-codecommit-in-the-central-tooling-account) 187 | 188 | ## Sample infra workload 189 | Below is the sample infrastructure workload that we will deploy using the pipeline to the target workload account. 190 | Here we are showing only a single region deployment. 191 | 192 | ![image](readme-images/target-workload.png) 193 | 194 | Here is what it looks like after the same sample infrastructure workload is deployed to multiple regions using the pipeline. 195 | 196 | ![image](readme-images/target-workload-multi-region.png) 197 | 198 | ### Prerequisite 199 | * Create IAM resources in the workload account using below: 200 | ```shell 201 | ./scripts/prerequisites/create-iam-resources_for_workload_account.sh 202 | ``` 203 | 204 | * We created the AWS CodeCommit repository till now but it is empty. Next we need to push the sample infra Terraform code into the AWS CodeCommit repo. 205 | ### Step 3: Push the infra repo code into AWS CodeCommit in the central tooling account 206 | * Use `aws configure` with your IAM user credentials for the central tooling account and then assume CloudOps role: 207 | ```shell 208 | # You can use below one liner 209 | # For details, see [this](https://aws.amazon.com/premiumsupport/knowledge-center/iam-assume-role-cli/ 210 | # Update 111122223333 below to your central tooling account number 211 | OUT=$(aws sts assume-role --role-arn arn:aws:iam::111122223333:role/CloudOps --role-session-name CLOUD_OPS);export AWS_ACCESS_KEY_ID=$(echo $OUT | jq -r '.Credentials''.AccessKeyId');export AWS_SECRET_ACCESS_KEY=$(echo $OUT | jq -r '.Credentials''.SecretAccessKey');export AWS_SESSION_TOKEN=$(echo $OUT | jq -r '.Credentials''.SessionToken'); 212 | 213 | # Verify you assumed the role 214 | aws sts get-caller-identity 215 | { 216 | "UserId": "AAA:INFRA_BUILD", 217 | "Account": "111122223333", 218 | "Arn": "arn:aws:sts::111122223333:assumed-role/CloudOps/CLOUD_OPS" 219 | } 220 | ``` 221 | * Git clone the sample infra code at the repo [aws-sample-infra-resources-terraform](https://github.com/aws-samples/aws-sample-infra-resources-terraform) into your local shadow. 222 | * Update `environments///variables.tfvars` per env and team. 223 | * If you prefer to use different team names, be sure to update the team folder names. 224 | * Similarly if you prefer to use diff environment names, be sure to update the env folder names. These should match the tag_prefix_list in aws-multi-region-cicd-with-terraform repo. 225 | * Mandatory: Update the `account` number with the corresponding target workload account number. 226 | * Optionally, update `number_of_azs` in variables.tfvars. Otherwise it defaults to 2. 227 | * You can also set the value in each region's `.tfvars` if you prefer to set it differently for a region (including any other region specific input params values though this may result in config drift and is not recommended. Please see accompanying [blog post](https://aws.amazon.com/blogs/devops/multi-region-terraform-deployments-with-aws-codepipeline-using-terraform-built-ci-cd/) on recommendations.) 228 | * Go to AWS CodeCommit in the console of the central tooling account -> Repositories -> "awsome-infra-project" and copy the git clone url (HTTPS GRC) under "Clone URL" 229 | * Add the AWS CodeCommit remote repo as git remote with name "codecommit". If you name the remote repo differently then make sure to use the same name in the next step! 230 | ```shell 231 | git remote add codecommit 232 | Ex: 233 | git remote add codecommit codecommit::us-east-1://awsome-infra-project 234 | ``` 235 | * Please be sure to push to the **"main"** branch in the "codecommit" remote repo as that's the **branch we use in the sample** unless you had set the default branch name using default_branch input param: 236 | ```shell 237 | git push codecommit :main # make sure to update curr_branch to the name of your current branch 238 | # Ex: git push codecommit feature-123:main # if you named your remote repo other than "codecommit", ensure to use the same name here 239 | ``` 240 | * Go to AWS CodeCommit in the console of the central tooling account -> Repositories -> "awsome-infra-project". You should see the infra code under Repositories -> Code in "main" branch. 241 | * For future updates, you can make your changes in the repo as needed and tag to deploy. 242 | 243 | Now we can deploy the sample infra to the target workload account using a pipeline. 244 | ### Kick off a pipeline to deploy to a target workload account and a target region 245 | * Use `aws configure` with your IAM user credentials for the central tooling account and then assume CloudOps role: 246 | ```shell 247 | # You can use below one liner 248 | # For details, see [this](https://aws.amazon.com/premiumsupport/knowledge-center/iam-assume-role-cli/ 249 | # Update 111122223333 below to your central tooling account number 250 | OUT=$(aws sts assume-role --role-arn arn:aws:iam::111122223333:role/CloudOps --role-session-name CLOUD_OPS);export AWS_ACCESS_KEY_ID=$(echo $OUT | jq -r '.Credentials''.AccessKeyId');export AWS_SECRET_ACCESS_KEY=$(echo $OUT | jq -r '.Credentials''.SecretAccessKey');export AWS_SESSION_TOKEN=$(echo $OUT | jq -r '.Credentials''.SessionToken'); 251 | 252 | # Verify you assumed the role 253 | aws sts get-caller-identity 254 | { 255 | "UserId": "AAA:INFRA_BUILD", 256 | "Account": "111122223333", 257 | "Arn": "arn:aws:sts::111122223333:assumed-role/CloudOps/CLOUD_OPS" 258 | } 259 | ``` 260 | * Go to your local shadow of the [aws-sample-infra-resources-terraform](https://github.com/aws-samples/aws-sample-infra-resources-terraform) repo. See examples in the next section and details below. 261 | * Typically, if there are global resource(s) required by regional resources, then you'll need to kick off the global resource pipeline once for the account first so they are available ahead of the regional deployments targeting that same account. 262 | * Global resources are deployed once per account. Regional resources are deployed once per region in the account. 263 | * First deploy global resources by git tagging the AWS CodeCommit repo with the `_global//` (env can be one of (dev, qa, staging, prod) or what you set in tag_prefix_list as per previous instructions and team can be one of (risk ,research) or what you set as per previous instructions as defined in the infra repo environments config. 264 | * Git tag the AWS CodeCommit repo with the `_//` (env can be one of (dev, qa, staging, prod) or what you set in tag_prefix_list as per previous instructions and team can be one of (risk ,research) or what you set as per previous instructions as defined in the infra repo environments config. 265 | * Below is what happens when you git tag the AWS CodeCommit repo: 266 | * The git tagging first kicks off the respective `-awsome-infra-project-src` AWS CodeBuild project that adds the git tag along with full clone of the repo into an S3 source bucket. 267 | * This triggers the AWS CodePipeline pipeline named `-awsome-infra-project-deploy` for this environment. The pipeline performs the deployment once you approve after each manual approval step. For full details, please also see the accompanying [blog post](https://aws.amazon.com/blogs/devops/multi-region-terraform-deployments-with-aws-codepipeline-using-terraform-built-ci-cd/). 268 | * You can see the tags created under Repository -> Git tags. 269 | * You can see the running CodeBuild job logs under Build -> Build Projects -> Click on the job -> Build History tab -> Click on Build run -> Build Logs tab 270 | #### Examples: 271 | ##### Dev pipeline: 272 | * Global resources for research dev account: 273 | ```shell 274 | TAG=dev_global/research/1.0; git tag -a $TAG -m "Deploying $TAG"; git push codecommit $TAG 275 | ``` 276 | * Regional resources for research dev account: 277 | ```shell 278 | TAG=dev_us-east-1/research/1.0; git tag -a $TAG -m "Deploying $TAG"; git push codecommit $TAG 279 | TAG=dev_eu-central-1/research/1.0; git tag -a $TAG -m "Deploying $TAG"; git push codecommit $TAG 280 | TAG=dev_ap-southeast-1/research/1.0; git tag -a $TAG -m "Deploying $TAG"; git push codecommit $TAG 281 | ``` 282 | * Global resources for risk dev account: 283 | ```shell 284 | TAG=dev_global/risk/1.2; git tag -a $TAG -m "Deploying $TAG"; git push codecommit $TAG 285 | ``` 286 | * Regional resources: 287 | ```shell 288 | TAG=dev_eu-central-1/risk/1.2; git tag -a $TAG -m "Deploying $TAG"; git push codecommit $TAG 289 | ``` 290 | ##### QA pipeline: 291 | * Global resources for research qa account: 292 | ```shell 293 | TAG=qa_global/research/1.0; git tag -a $TAG -m "Deploying $TAG"; git push codecommit $TAG 294 | ``` 295 | * Regional resources: 296 | ```shell 297 | TAG=qa_us-east-1/research/1.0; git tag -a $TAG -m "Deploying $TAG"; git push codecommit $TAG 298 | ``` 299 | * Global resources for risk qa account: 300 | ```shell 301 | TAG=qa_global/risk/1.2; git tag -a $TAG -m "Deploying $TAG"; git push codecommit $TAG 302 | ``` 303 | * Regional resources: 304 | ```shell 305 | TAG=qa_eu-central-1/risk/1.2; git tag -a $TAG -m "Deploying $TAG"; git push codecommit $TAG 306 | ``` 307 | ##### Staging pipeline: 308 | * Global resources for research staging account: 309 | ```shell 310 | TAG=staging_global/research/1.0; git tag -a $TAG -m "Deploying $TAG"; git push codecommit $TAG 311 | ``` 312 | * Regional resources: 313 | ```shell 314 | TAG=staging_us-east-1/research/1.0; git tag -a $TAG -m "Deploying $TAG"; git push codecommit $TAG 315 | ``` 316 | * Global resources for risk staging account: 317 | ```shell 318 | TAG=staging_global/risk/1.2; git tag -a $TAG -m "Deploying $TAG"; git push codecommit $TAG 319 | ``` 320 | * Regional resources: 321 | ```shell 322 | TAG=staging_eu-central-1/risk/1.2; git tag -a $TAG -m "Deploying $TAG"; git push codecommit $TAG 323 | ``` 324 | ##### Prod pipeline: 325 | * Global resources for research prod account: 326 | ```shell 327 | TAG=prod_global/research/1.0; git tag -a $TAG -m "Deploying $TAG"; git push codecommit $TAG 328 | ``` 329 | * Regional resources: 330 | ```shell 331 | TAG=prod_us-east-1/research/1.0; git tag -a $TAG -m "Deploying $TAG"; git push codecommit $TAG 332 | ``` 333 | * Global resources for risk prod account: 334 | ```shell 335 | TAG=prod_global/risk/1.2; git tag -a $TAG -m "Deploying $TAG"; git push codecommit $TAG 336 | ``` 337 | * Regional resources: 338 | ```shell 339 | TAG=prod_eu-central-1/risk/1.2; git tag -a $TAG -m "Deploying $TAG"; git push codecommit $TAG 340 | ``` 341 | 342 | ## Instructions to destroy resources 343 | * Order matters here. Perform in below order! 344 | 345 | ### Step 1: Destroy the resources in the target workload account(s) 346 | ## Prerequisites 347 | * Use `aws configure` with your IAM user credentials for the central tooling account and then assume InfraBuildRole: 348 | ```shell 349 | # You can use below one liner 350 | # For details, see [this](https://aws.amazon.com/premiumsupport/knowledge-center/iam-assume-role-cli/ 351 | OUT=$(aws sts assume-role --role-arn arn:aws:iam::111122223333:role/InfraBuildRole --role-session-name INFRA_BUILD);export AWS_ACCESS_KEY_ID=$(echo $OUT | jq -r '.Credentials''.AccessKeyId');export AWS_SECRET_ACCESS_KEY=$(echo $OUT | jq -r '.Credentials''.SecretAccessKey');export AWS_SESSION_TOKEN=$(echo $OUT | jq -r '.Credentials''.SessionToken'); 352 | 353 | # Verify you assumed the role 354 | aws sts get-caller-identity 355 | { 356 | "UserId": "AAA:INFRA_BUILD", 357 | "Account": "111122223333", 358 | "Arn": "arn:aws:sts::111122223333:assumed-role/InfraBuildRole/INFRA_BUILD" 359 | } 360 | ``` 361 | * See the README for the sister repo [aws-sample-infra-resources-terraform](https://github.com/aws-samples/aws-sample-infra-resources-terraform) to destroy the sample infra workload resources in the target workload account(s) 362 | * Ensure all resources created by the sample in the target workload account(s) in each region are destroyed prior to proceeding to the next step. 363 | 364 | ### Step 2: Destroy the resources in the central tooling account created in this repo 365 | * As InfraBuildRole in the central tooling account, **perform the following in below order**: 366 | * To destroy regional resources, first run the script `./scripts/run-tf-prod-destroy.sh` passing in args as needed and inspect the tf plan. Then run `terraform apply "tfplan"` 367 | ```shell 368 | ./scripts/run-tf-prod-destroy.sh -t -b -r -g 369 | # Ex: ./scripts/run-tf-prod-destroy.sh -t prod_us-east-1/tooling/1.0 -b org-awesome-tf-state -r us-east-1 -g us-east-1 370 | ``` 371 | * Then, to destroy global resources, run the script `./scripts/run-tf-prod-global-destroy.sh` passing in args as needed and inspect the tf plan. Then run `terraform apply "tfplan"` 372 | ```shell 373 | ./scripts/run-tf-prod-global-destroy.sh -t -b -r -g 374 | # Ex: ./scripts/run-tf-prod-global-destroy.sh -t prod_global/tooling/1.0 -g eu-central-1 -r eu-central-1 -b org-awesome-tf-state 375 | ``` 376 | ### Step 3: Destroy the Terraform state resources 377 | * Please proceed with caution! You will need the Terraform state resources for above 2 sections ([to destroy central tooling](#step-2-destroy-the-resources-in-the-central-tooling-account-created-in-this-repo) and [target workload resources](#step-1-destroy-the-resources-in-the-target-workload-accounts) created by these sample projects). Do not destroy Terraform remote state resources until after you are done with both! 378 | * The S3 buckets for Terraform state and DynamoDB Tables for Terraform state locks you created on your end outside of these sample projects in your central tooling account, also would need to be deleted by you if you no longer need any Terraform state files stored in them. 379 | 380 | ### Step 4: Destroy the IAM roles InfraBuildRole, CloudOps and AWS accounts 381 | * Now you can destroy the IAM roles created as part of the prerequisites: InfraBuildRole, CloudOps in the central tooling account and InfraBuildRole in the target workload account. 382 | * If you created them solely for this sample, you can also destroy the AWS accounts you used for this sample. 383 | 384 | ### Common Errors or Warnings 385 | * If you see below error, you are hitting the number of concurrent builds limit in your account. Check all the CodeBuild builds that are in progress to confirm and proceed accordingly. 386 | ``` 387 | Action execution failed 388 | Error calling startBuild: Cannot have more than 1 builds in queue for the account (Service: AWSCodeBuild; Status Code: 400; Error Code: AccountLimitExceededException; Request ID: XXX; Proxy: null) 389 | ``` 390 | * If you tag multiple times in quick succession that may result in multiple pipeline executions queuing up for the same pipeline stage (say Terraform Build), this then would cause waiting executions that get superseded by more recent executions of the pipeline. 391 | * You can see pipelines history to see which executions got superseded at `https://console.aws.amazon.com/codesuite/codepipeline/pipelines/-awsome-infra-project-deploy/executions?region=us-east-1` 392 | * For more details, please also see: https://docs.aws.amazon.com/codepipeline/latest/userguide/concepts-how-it-works.html -> Rule 3 393 | * You can instead have a pipeline per env, team and region such that the executions are for the same account and region at all times. 394 | * Because we are using -target, you will see below warning as expected: 395 | ``` 396 | │ Warning: Applied changes may be incomplete 397 | │ 398 | │ The plan was created with the -target option in effect, so some changes requested in the configuration may have been ignored and the output values 399 | │ may not be fully updated. Run the following command to verify that no other changes are pending: 400 | │ terraform plan 401 | │ 402 | │ Note that the -target option is not suitable for routine use, and is provided only for exceptional situations such as recovering from errors or 403 | │ mistakes, or when Terraform specifically suggests to use it as part of an error message. 404 | ``` 405 | * When you first deploy the pipelines, they all run by default and because there is nothing in S3 source bucket, they show as "Failed" in the console. This behavior of pipelines running at first deployment is as expected so you can safely ignore this. 406 | * tflint output junit report is empty when tflint is successful. Due to this, we see a NaN report in CodeBuild report tab when tflint is successful as per the log output. 407 | 408 | ## Security 409 | 410 | See [CONTRIBUTING](CONTRIBUTING.md#security-issue-notifications) for more information. 411 | 412 | ## License 413 | 414 | This library is licensed under the MIT-0 License. See the LICENSE file. 415 | 416 | 417 | ## Requirements 418 | 419 | | Name | Version | 420 | |------|---------| 421 | | [terraform](#requirement\_terraform) | > 0.14 | 422 | | [aws](#requirement\_aws) | ~> 3.74 | 423 | 424 | ## Providers 425 | 426 | | Name | Version | 427 | |------|---------| 428 | | [aws](#provider\_aws) | 3.74.0 | 429 | 430 | ## Modules 431 | 432 | | Name | Source | Version | 433 | |------|--------|---------| 434 | | [global](#module\_global) | ./modules/global | n/a | 435 | | [regional](#module\_regional) | ./modules/regional | n/a | 436 | 437 | ## Resources 438 | 439 | | Name | Type | 440 | |------|------| 441 | | [aws_caller_identity.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/caller_identity) | data source | 442 | | [aws_region.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/region) | data source | 443 | 444 | ## Inputs 445 | 446 | | Name | Description | Type | Default | Required | 447 | |------|-------------|------|---------|:--------:| 448 | | [account](#input\_account) | Target AWS account number | `number` | n/a | yes | 449 | | [codebuild\_artifacts\_prefix](#input\_codebuild\_artifacts\_prefix) | A prefix for S3 bucket name to house the AWS CodeBuild artifacts for cache, etc. | `string` | `"awsome-cb-artifact"` | no | 450 | | [codepipeline\_artifacts\_prefix](#input\_codepipeline\_artifacts\_prefix) | A prefix for S3 bucket name to house the AWS CodePipeline artifacts for cache, etc. | `string` | `"awsome-cp-artifact"` | no | 451 | | [env](#input\_env) | Environment name | `string` | `"dev"` | no | 452 | | [global\_resource\_deploy\_from\_region](#input\_global\_resource\_deploy\_from\_region) | Region from which to deploy global resources in our pipeline | `string` | `"us-east-1"` | no | 453 | | [number\_of\_azs](#input\_number\_of\_azs) | Number of azs to deploy to | `number` | `3` | no | 454 | | [region](#input\_region) | Target region | `string` | `"us-east-1"` | no | 455 | | [source\_repo\_bucket\_prefix](#input\_source\_repo\_bucket\_prefix) | A prefix for S3 bucket name to house the src code in the Source stage post tagging | `string` | `"awsome-cb-repo"` | no | 456 | | [tag\_prefix\_list](#input\_tag\_prefix\_list) | List of tag prefixes | `list(string)` |
[
"dev",
"qa",
"staging",
"prod"
]
| no | 457 | | [target\_accounts](#input\_target\_accounts) | List of target accounts | `list(string)` | n/a | yes | 458 | | [tf\_backend\_config\_prefix](#input\_tf\_backend\_config\_prefix) | A name to prefix the S3 bucket for terraform state files and the DynamoDB table for terraform state locks for backend config | `string` | n/a | yes | 459 | 460 | ## Outputs 461 | 462 | | Name | Description | 463 | |------|-------------| 464 | | [account\_id](#output\_account\_id) | The effective account id in which Terraform is operating | 465 | | [caller\_arn](#output\_caller\_arn) | The effective user arn that Terraform is running as | 466 | | [caller\_user](#output\_caller\_user) | The effective user id that Terraform is running as | 467 | | [region](#output\_region) | The region in which Terraform is operating | 468 | 469 | -------------------------------------------------------------------------------- /environments/prod/tooling/ap-southeast-1.tfvars: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-multi-region-cicd-with-terraform/8eff661a51acda550415d70adca7bcf652b46286/environments/prod/tooling/ap-southeast-1.tfvars -------------------------------------------------------------------------------- /environments/prod/tooling/eu-central-1.tfvars: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-multi-region-cicd-with-terraform/8eff661a51acda550415d70adca7bcf652b46286/environments/prod/tooling/eu-central-1.tfvars -------------------------------------------------------------------------------- /environments/prod/tooling/us-east-1.tfvars: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-multi-region-cicd-with-terraform/8eff661a51acda550415d70adca7bcf652b46286/environments/prod/tooling/us-east-1.tfvars -------------------------------------------------------------------------------- /environments/prod/tooling/variables.tfvars: -------------------------------------------------------------------------------- 1 | account="111122223333" 2 | target_accounts=["555555555555", "444455556666"] 3 | -------------------------------------------------------------------------------- /main.tf: -------------------------------------------------------------------------------- 1 | data "aws_caller_identity" "current" {} 2 | data "aws_region" "current" {} 3 | 4 | locals { 5 | name = "demo" 6 | } 7 | 8 | # Regional CI/CD Resources such as CodeBuild, CodePipeline, CodeCommit resources 9 | module "regional" { 10 | source = "./modules/regional" 11 | env = var.env 12 | tag_prefix_list = var.tag_prefix_list 13 | name = local.name 14 | number_of_azs = var.number_of_azs 15 | global_resource_deploy_from_region = var.global_resource_deploy_from_region 16 | codebuild_artifacts_prefix = var.codebuild_artifacts_prefix 17 | source_repo_bucket_prefix = var.source_repo_bucket_prefix 18 | codepipeline_artifacts_prefix = var.codepipeline_artifacts_prefix 19 | tf_backend_config_prefix = var.tf_backend_config_prefix 20 | } 21 | 22 | # Provider to deploy global resources from the region set in var.global_resource_deploy_from_region 23 | provider "aws" { 24 | alias = "global_resource_deploy_from_region" 25 | region = var.global_resource_deploy_from_region 26 | assume_role { 27 | role_arn = "arn:aws:iam::${var.account}:role/InfraBuildRole" 28 | session_name = "INFRA_BUILD" 29 | } 30 | } 31 | 32 | # Global CI/CD resources such as IAM roles 33 | module "global" { 34 | source = "./modules/global" 35 | env = var.env 36 | target_accounts = var.target_accounts 37 | tag_prefix_list = var.tag_prefix_list 38 | name = local.name 39 | 40 | providers = { 41 | aws = aws.global_resource_deploy_from_region 42 | } 43 | } -------------------------------------------------------------------------------- /modules/global/README.md: -------------------------------------------------------------------------------- 1 | 2 | ## Requirements 3 | 4 | | Name | Version | 5 | |------|---------| 6 | | [aws](#requirement\_aws) | >= 3.74 | 7 | 8 | ## Providers 9 | 10 | | Name | Version | 11 | |------|---------| 12 | | [aws](#provider\_aws) | >= 3.74 | 13 | | [template](#provider\_template) | n/a | 14 | 15 | ## Modules 16 | 17 | No modules. 18 | 19 | ## Resources 20 | 21 | | Name | Type | 22 | |------|------| 23 | | [aws_iam_role.cloudwatch_event_role](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role) | resource | 24 | | [aws_iam_role.codebuild_role](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role) | resource | 25 | | [aws_iam_role.codepipeline_role](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role) | resource | 26 | | [aws_iam_role_policy.attach_codebuild_policy](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy) | resource | 27 | | [aws_iam_role_policy.attach_codepipeline_policy](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy) | resource | 28 | | [aws_caller_identity.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/caller_identity) | data source | 29 | | [aws_iam_policy_document.codepipeline_assume_policy](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/iam_policy_document) | data source | 30 | | [aws_region.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/region) | data source | 31 | | [template_file.codebuild_policy_template](https://registry.terraform.io/providers/hashicorp/template/latest/docs/data-sources/file) | data source | 32 | | [template_file.codepipeline_policy_template](https://registry.terraform.io/providers/hashicorp/template/latest/docs/data-sources/file) | data source | 33 | 34 | ## Inputs 35 | 36 | | Name | Description | Type | Default | Required | 37 | |------|-------------|------|---------|:--------:| 38 | | [env](#input\_env) | Environment name | `any` | n/a | yes | 39 | | [name](#input\_name) | Name to give resources | `any` | n/a | yes | 40 | | [tag\_prefix\_list](#input\_tag\_prefix\_list) | List of tag prefixes | `list(string)` | n/a | yes | 41 | | [target\_accounts](#input\_target\_accounts) | List of target accounts | `list(string)` | n/a | yes | 42 | 43 | ## Outputs 44 | 45 | | Name | Description | 46 | |------|-------------| 47 | | [cloudwatch\_event\_role\_arn](#output\_cloudwatch\_event\_role\_arn) | n/a | 48 | | [codebuild\_role\_arn](#output\_codebuild\_role\_arn) | n/a | 49 | | [codepipeline\_role\_arn](#output\_codepipeline\_role\_arn) | n/a | 50 | -------------------------------------------------------------------------------- /modules/global/iam.tf: -------------------------------------------------------------------------------- 1 | terraform { 2 | required_providers { 3 | aws = { 4 | source = "hashicorp/aws" 5 | version = ">= 3.74" 6 | } 7 | } 8 | } 9 | 10 | data "aws_caller_identity" "current" {} 11 | data "aws_region" "current" {} 12 | 13 | locals { 14 | region = data.aws_region.current.name 15 | account = data.aws_caller_identity.current.account_id 16 | 17 | tags = { 18 | Environment = var.env 19 | Name = var.name 20 | } 21 | } 22 | 23 | resource "aws_iam_role" "cloudwatch_event_role" { 24 | name = "cloudwatch-event-role" 25 | 26 | assume_role_policy = < 2 | ## Requirements 3 | 4 | No requirements. 5 | 6 | ## Providers 7 | 8 | | Name | Version | 9 | |------|---------| 10 | | [aws](#provider\_aws) | n/a | 11 | | [template](#provider\_template) | n/a | 12 | 13 | ## Modules 14 | 15 | | Name | Source | Version | 16 | |------|--------|---------| 17 | | [endpoints](#module\_endpoints) | terraform-aws-modules/vpc/aws//modules/vpc-endpoints | 3.7.0 | 18 | | [vpc](#module\_vpc) | ./vpc | n/a | 19 | 20 | ## Resources 21 | 22 | | Name | Type | 23 | |------|------| 24 | | [aws_cloudwatch_event_rule.trigger_build_on_tag_updates](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudwatch_event_rule) | resource | 25 | | [aws_cloudwatch_event_rule.trigger_pipeline_on_s3_updates](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudwatch_event_rule) | resource | 26 | | [aws_cloudwatch_event_target.codebuild](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudwatch_event_target) | resource | 27 | | [aws_cloudwatch_event_target.codepipeline](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudwatch_event_target) | resource | 28 | | [aws_codebuild_project.build_upon_tag_creation](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/codebuild_project) | resource | 29 | | [aws_codebuild_project.terraform](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/codebuild_project) | resource | 30 | | [aws_codecommit_repository.awsomerepo](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/codecommit_repository) | resource | 31 | | [aws_codepipeline.infra_pipeline](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/codepipeline) | resource | 32 | | [aws_iam_role_policy.attach_cwe_policy](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy) | resource | 33 | | [aws_iam_role_policy.attach_s3_bucket_policy](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy) | resource | 34 | | [aws_iam_role_policy.attach_s3_bucket_policy_codepipeline](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy) | resource | 35 | | [aws_kms_key.artifact_encryption_key](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_key) | resource | 36 | | [aws_s3_bucket.codebuild_artifacts](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket) | resource | 37 | | [aws_s3_bucket.codebuild_repo_artifacts](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket) | resource | 38 | | [aws_s3_bucket.codepipeline_artifacts](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket) | resource | 39 | | [aws_s3_bucket_notification.bucket_notification](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_notification) | resource | 40 | | [aws_s3_bucket_public_access_block.codebuild_artifacts](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_public_access_block) | resource | 41 | | [aws_s3_bucket_public_access_block.codebuild_repo_artifacts](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_public_access_block) | resource | 42 | | [aws_s3_bucket_public_access_block.codepipeline_artifacts](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_public_access_block) | resource | 43 | | [aws_availability_zones.all](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/availability_zones) | data source | 44 | | [aws_caller_identity.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/caller_identity) | data source | 45 | | [aws_iam_role.cloudwatch_event_role](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/iam_role) | data source | 46 | | [aws_iam_role.codebuild_role](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/iam_role) | data source | 47 | | [aws_iam_role.codepipeline_role](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/iam_role) | data source | 48 | | [aws_region.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/region) | data source | 49 | | [template_file.cloudwatchevent_policy_template](https://registry.terraform.io/providers/hashicorp/template/latest/docs/data-sources/file) | data source | 50 | | [template_file.key_policy_template](https://registry.terraform.io/providers/hashicorp/template/latest/docs/data-sources/file) | data source | 51 | | [template_file.s3_bucket_policy_codebuild_template](https://registry.terraform.io/providers/hashicorp/template/latest/docs/data-sources/file) | data source | 52 | | [template_file.s3_bucket_policy_codepipeline_template](https://registry.terraform.io/providers/hashicorp/template/latest/docs/data-sources/file) | data source | 53 | 54 | ## Inputs 55 | 56 | | Name | Description | Type | Default | Required | 57 | |------|-------------|------|---------|:--------:| 58 | | [build\_spec\_file](#input\_build\_spec\_file) | Build spec file name for the pipeline | `map` |
{
"terraform_apply": "buildspec-terraform_apply.yml",
"terraform_checkov": "buildspec-terraform_checkov.yml",
"terraform_plan": "buildspec-terraform_plan.yml",
"terraform_tflint": "buildspec-terraform_tflint.yml"
}
| no | 59 | | [codebuild\_artifacts\_prefix](#input\_codebuild\_artifacts\_prefix) | A prefix for S3 bucket name to house the AWS CodeBuild artifacts for cache, etc. | `any` | n/a | yes | 60 | | [codepipeline\_artifacts\_prefix](#input\_codepipeline\_artifacts\_prefix) | A prefix for S3 bucket name to house the AWS CodePipeline artifacts for logs, etc. | `any` | n/a | yes | 61 | | [default\_branch](#input\_default\_branch) | Name of the default branch for the repo | `string` | `"main"` | no | 62 | | [env](#input\_env) | Environment name | `any` | n/a | yes | 63 | | [global\_resource\_deploy\_from\_region](#input\_global\_resource\_deploy\_from\_region) | Region from which to deploy global resources in our pipeline | `any` | n/a | yes | 64 | | [name](#input\_name) | Name to give resources | `any` | n/a | yes | 65 | | [number\_of\_azs](#input\_number\_of\_azs) | Number of azs to deploy to | `any` | n/a | yes | 66 | | [repository\_name](#input\_repository\_name) | Name of the remote source repository | `string` | `"awsome-infra-project"` | no | 67 | | [source\_repo\_bucket\_prefix](#input\_source\_repo\_bucket\_prefix) | A prefix for S3 bucket name to house the src code in the Source stage post tagging | `any` | n/a | yes | 68 | | [tag\_prefix\_list](#input\_tag\_prefix\_list) | List of tag prefixes | `list(string)` | n/a | yes | 69 | | [tf\_backend\_config\_prefix](#input\_tf\_backend\_config\_prefix) | A name to prefix the s3 bucket for terraform state files and the dyanamodb table for terraform state locks for backend config | `any` | n/a | yes | 70 | 71 | ## Outputs 72 | 73 | No outputs. 74 | -------------------------------------------------------------------------------- /modules/regional/buildspec-tagged_source.yml: -------------------------------------------------------------------------------- 1 | version: 0.2 2 | 3 | phases: 4 | build: 5 | commands: 6 | - echo "Add tag ${TAG} to source" 7 | - cd "${CODEBUILD_SRC_DIR}" 8 | - echo "${TAG}" > tag.txt 9 | - cat tag.txt 10 | post_build: 11 | commands: 12 | - echo "[Post Build]:Completed adding tag to source..." 13 | artifacts: 14 | files: 15 | - '**/*' -------------------------------------------------------------------------------- /modules/regional/buildspec-terraform_apply.yml: -------------------------------------------------------------------------------- 1 | version: 0.2 2 | env: 3 | variables: 4 | TERRAFORM_VERSION: 1.0.7 5 | 6 | phases: 7 | install: 8 | commands: 9 | - echo "Installing terraform..." 10 | - cd /usr/bin 11 | - curl -O https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip 12 | - unzip terraform_${TERRAFORM_VERSION}_linux_amd64.zip 13 | - terraform --version 14 | - echo "Completed installing terraform..." 15 | build: 16 | commands: 17 | - echo "Terraform apply for ${TEAM}/${ENV}-${TARGET_DEPLOYMENT_SCOPE} in REGION=${REGION}" 18 | - echo "TARGET_MODULE=${TARGET_MODULE}" 19 | - echo "REGION_TFVARS=${REGION_TFVARS}" 20 | - cd "${CODEBUILD_SRC_DIR}" 21 | - echo "terraform init -backend-config="key=${TEAM}/${ENV}-${TARGET_DEPLOYMENT_SCOPE}/terraform.tfstate" -backend-config="region=$AWS_REGION" -backend-config="bucket=${TF_BACKEND_CONFIG_PREFIX}-${ENV}" -backend-config="dynamodb_table=${TF_BACKEND_CONFIG_PREFIX}-lock-${ENV}" -backend-config="encrypt=true"" 22 | - terraform init -backend-config="key=${TEAM}/${ENV}-${TARGET_DEPLOYMENT_SCOPE}/terraform.tfstate" -backend-config="region=$AWS_REGION" -backend-config="bucket=${TF_BACKEND_CONFIG_PREFIX}-${ENV}" -backend-config="dynamodb_table=${TF_BACKEND_CONFIG_PREFIX}-lock-${ENV}" -backend-config="encrypt=true" 23 | - echo "terraform plan -var-file "environments/${ENV}/${TEAM}/variables.tfvars" ${REGION_TFVARS} -var "env=${ENV}" -var "region=${REGION}" -target ${TARGET_MODULE} -out=tfplan" # in case of retry 24 | - terraform plan -var-file "environments/${ENV}/${TEAM}/variables.tfvars" ${REGION_TFVARS} -var "env=${ENV}" -var "region=${REGION}" -target ${TARGET_MODULE} -out=tfplan -compact-warnings # in case of retry 25 | - terraform apply -auto-approve "tfplan" -compact-warnings 26 | post_build: 27 | commands: 28 | - echo "[Post Build]:Completed terraform apply..." -------------------------------------------------------------------------------- /modules/regional/buildspec-terraform_checkov.yml: -------------------------------------------------------------------------------- 1 | version: 0.2 2 | env: 3 | exported-variables: 4 | - review_link 5 | - tests 6 | - failures 7 | 8 | phases: 9 | install: 10 | commands: 11 | - echo "Installing checkov..." 12 | - pip install checkov 13 | - checkov -v 14 | - echo "Completed installing checkov..." 15 | build: 16 | commands: 17 | - echo "Starting checkov static analysis" 18 | - cd "${CODEBUILD_SRC_DIR}" 19 | - checkov --soft-fail -d . --output junitxml | tee checkov.xml 20 | - export failures=$(xmllint --xpath '//testsuites/@failures' checkov.xml) 21 | - export tests=$(xmllint --xpath '//testsuites/@tests' checkov.xml) 22 | - export review_link="https://${AWS_REGION}.console.aws.amazon.com/codesuite/codebuild/${ACCOUNT}/projects/terraform_checkov/build/${CODEBUILD_BUILD_ID}/reports?region=${AWS_REGION}" 23 | post_build: 24 | commands: 25 | - echo "[Post Build]:Completed checkov..." 26 | reports: 27 | checkov-report-group: 28 | files: 29 | - 'checkov.xml' 30 | file-format: "JUNITXML" -------------------------------------------------------------------------------- /modules/regional/buildspec-terraform_plan.yml: -------------------------------------------------------------------------------- 1 | version: 0.2 2 | env: 3 | variables: 4 | TERRAFORM_VERSION: 1.0.7 5 | exported-variables: 6 | - build_id 7 | - build_tag 8 | - pipeline_region 9 | 10 | phases: 11 | install: 12 | commands: 13 | - echo "Installing terraform..." 14 | - cd /usr/bin 15 | - curl -O https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip 16 | - unzip terraform_${TERRAFORM_VERSION}_linux_amd64.zip 17 | - terraform --version 18 | - echo "Completed installing terraform..." 19 | build: 20 | commands: 21 | - echo "Terraform plan for ${TEAM}/${ENV}-${TARGET_DEPLOYMENT_SCOPE} in REGION=${REGION}" 22 | - echo "TARGET_MODULE=${TARGET_MODULE}" 23 | - echo "REGION_TFVARS=${REGION_TFVARS}" 24 | - export build_id=$(echo $CODEBUILD_BUILD_ID | cut -d':' -f1) 25 | - export build_tag=$(echo $CODEBUILD_BUILD_ID | cut -d':' -f2) 26 | - export pipeline_region=$AWS_REGION 27 | - cd "${CODEBUILD_SRC_DIR}" 28 | - echo "terraform init -backend-config="key=${TEAM}/${ENV}-${TARGET_DEPLOYMENT_SCOPE}/terraform.tfstate" -backend-config="region=$AWS_REGION" -backend-config="bucket=${TF_BACKEND_CONFIG_PREFIX}-${ENV}" -backend-config="dynamodb_table=${TF_BACKEND_CONFIG_PREFIX}-lock-${ENV}" -backend-config="encrypt=true"" 29 | - terraform init -backend-config="key=${TEAM}/${ENV}-${TARGET_DEPLOYMENT_SCOPE}/terraform.tfstate" -backend-config="region=$AWS_REGION" -backend-config="bucket=${TF_BACKEND_CONFIG_PREFIX}-${ENV}" -backend-config="dynamodb_table=${TF_BACKEND_CONFIG_PREFIX}-lock-${ENV}" -backend-config="encrypt=true" 30 | - terraform fmt 31 | - terraform validate -no-color 32 | - echo "terraform plan -var-file "environments/${ENV}/${TEAM}/variables.tfvars" ${REGION_TFVARS} -var "env=${ENV}" -var "region=${REGION}" -target ${TARGET_MODULE} -out=tfplan" 33 | - terraform plan -var-file "environments/${ENV}/${TEAM}/variables.tfvars" ${REGION_TFVARS} -var "env=${ENV}" -var "region=${REGION}" -target ${TARGET_MODULE} -out=tfplan -compact-warnings 34 | 35 | post_build: 36 | commands: 37 | - echo "[Post Build]:Completed terraform plan..." 38 | artifacts: 39 | files: 40 | - '**/*' -------------------------------------------------------------------------------- /modules/regional/buildspec-terraform_tflint.yml: -------------------------------------------------------------------------------- 1 | version: 0.2 2 | env: 3 | variables: 4 | TFLINT_VERSION: "0.34.1" 5 | TFLINT_OS: "amd64" 6 | TERRAFORM_VERSION: 1.0.7 7 | exported-variables: 8 | - ENV 9 | - TARGET_DEPLOYMENT_SCOPE 10 | - TEAM 11 | - REGION_TFVARS 12 | - TARGET_MODULE 13 | - REGION 14 | - TAG 15 | 16 | phases: 17 | install: 18 | commands: 19 | - echo "Installing terraform" 20 | - cd /usr/bin 21 | - curl -O https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip 22 | - unzip terraform_${TERRAFORM_VERSION}_linux_amd64.zip 23 | - terraform --version 24 | - echo "Completed installing terraform" 25 | - echo "Installing tflint" 26 | - cd "${CODEBUILD_SRC_DIR}" 27 | - wget https://github.com/terraform-linters/tflint/releases/download/v${TFLINT_VERSION}/tflint_linux_${TFLINT_OS}.zip -O tflint.zip 28 | - unzip tflint.zip 29 | - | 30 | cat <<-EOF > .tflint.hcl 31 | plugin "aws" { 32 | enabled = true 33 | version = "0.11.0" 34 | source = "github.com/terraform-linters/tflint-ruleset-aws" 35 | } 36 | EOF 37 | - echo "Installing tflint plugins" 38 | - cat .tflint.hcl 39 | - ./tflint --init 40 | - cd "${CODEBUILD_SRC_DIR}" 41 | - export TAG=$(cat tag.txt) 42 | - export ENV=$(echo $TAG | cut -d/ -f1 | cut -d_ -f1) 43 | - export TARGET_DEPLOYMENT_SCOPE=$(echo $TAG | cut -d/ -f1 | cut -d_ -f2) 44 | - export TEAM=$(echo $TAG | cut -d/ -f2) 45 | - export TARGET_MODULE=$(if [[ ${TARGET_DEPLOYMENT_SCOPE} == *"global"* ]];then echo "module.global"; else echo "module.regional"; fi) 46 | - export REGION=$(if [[ ${TARGET_DEPLOYMENT_SCOPE} == *"global"* ]];then echo "${GLOBAL_RESOURCE_DEPLOY_FROM_REGION}"; else echo "${TARGET_DEPLOYMENT_SCOPE}"; fi) 47 | - export REGION_TFVARS=$([ -s "environments/${ENV}/${TEAM}/${REGION}.tfvars" ] && echo "-var-file environments/${ENV}/${TEAM}/${REGION}.tfvars" || echo "") 48 | - echo "ENV=${ENV}" 49 | - echo "TEAM=${TEAM}" 50 | - echo "TARGET_DEPLOYMENT_SCOPE=${TARGET_DEPLOYMENT_SCOPE}" 51 | - echo "REGION=${REGION}" 52 | - echo "REGION_TFVARS=${REGION_TFVARS}" 53 | - echo "TARGET_MODULE=${TARGET_MODULE}" 54 | - echo "terraform init -backend-config="key=${TEAM}/${ENV}-${TARGET_DEPLOYMENT_SCOPE}/terraform.tfstate" -backend-config="region=$AWS_REGION" -backend-config="bucket=${TF_BACKEND_CONFIG_PREFIX}-${ENV}" -backend-config="dynamodb_table=${TF_BACKEND_CONFIG_PREFIX}-lock-${ENV}" -backend-config="encrypt=true"" 55 | - terraform init -backend-config="key=${TEAM}/${ENV}-${TARGET_DEPLOYMENT_SCOPE}/terraform.tfstate" -backend-config="region=$AWS_REGION" -backend-config="bucket=${TF_BACKEND_CONFIG_PREFIX}-${ENV}" -backend-config="dynamodb_table=${TF_BACKEND_CONFIG_PREFIX}-lock-${ENV}" -backend-config="encrypt=true" 56 | build: 57 | commands: 58 | - ./tflint --module -f junit > tflint_report.xml 59 | post_build: 60 | commands: 61 | - echo "[Post Build]:Completed tflint..." 62 | reports: 63 | tflint: 64 | files: 65 | - tflint_report.xml 66 | file-format: "JUNITXML" 67 | -------------------------------------------------------------------------------- /modules/regional/main.tf: -------------------------------------------------------------------------------- 1 | data "aws_caller_identity" "current" {} 2 | data "aws_region" "current" {} 3 | data "aws_availability_zones" "all" {} 4 | 5 | locals { 6 | region = data.aws_region.current.name 7 | account = data.aws_caller_identity.current.account_id 8 | availability_zones = slice(sort(data.aws_availability_zones.all.zone_ids), 0, var.number_of_azs) 9 | tags = { 10 | Environment = var.env 11 | Name = var.name 12 | } 13 | } 14 | 15 | module "vpc" { 16 | source = "./vpc" 17 | 18 | vpc_name = var.name 19 | vpc_azs = local.availability_zones 20 | vpc_single_nat_gateway = true 21 | vpc_enable_nat_gateway = true 22 | vpc_enable_dns_hostnames = true 23 | vpc_tags = local.tags 24 | } 25 | 26 | module "endpoints" { 27 | source = "terraform-aws-modules/vpc/aws//modules/vpc-endpoints" 28 | version = "3.7.0" 29 | 30 | vpc_id = module.vpc.vpc_id 31 | security_group_ids = [module.vpc.vpc_security_group_ids] 32 | 33 | endpoints = { 34 | codebuild = { 35 | # interface endpoint 36 | service = "codebuild" 37 | tags = { Name = "codebuild-vpc-endpoint" } 38 | private_dns_enabled = true 39 | subnet_ids = module.vpc.vpc_private_subnet_ids 40 | }, 41 | codecommit = { 42 | # interface endpoint 43 | service = "codecommit" 44 | tags = { Name = "codecommit-vpc-endpoint" } 45 | private_dns_enabled = true 46 | subnet_ids = module.vpc.vpc_private_subnet_ids 47 | }, 48 | codepipeline = { 49 | # interface endpoint 50 | service = "codepipeline" 51 | tags = { Name = "codepipeline-vpc-endpoint" } 52 | private_dns_enabled = true 53 | subnet_ids = module.vpc.vpc_private_subnet_ids 54 | }, 55 | kms = { 56 | service = "kms" 57 | private_dns_enabled = true 58 | subnet_ids = module.vpc.vpc_private_subnet_ids 59 | security_group_ids = [module.vpc.vpc_security_group_tls_id] 60 | } 61 | } 62 | 63 | tags = local.tags 64 | } 65 | 66 | data aws_iam_role codebuild_role { 67 | name = "codebuild-role" 68 | } 69 | 70 | data "template_file" "key_policy_template" { 71 | template = file("${path.module}/templates/key_policy.tpl") 72 | vars = { 73 | region = local.region 74 | account = local.account 75 | codebuild-role = data.aws_iam_role.codebuild_role.arn 76 | } 77 | } 78 | 79 | data "template_file" "s3_bucket_policy_codebuild_template" { 80 | template = file("${path.module}/templates/s3_bucket_policy_codebuild.tpl") 81 | vars = { 82 | cb_s3_resource_arns = jsonencode(concat([ aws_s3_bucket.codebuild_artifacts.arn, format("%s/*", aws_s3_bucket.codebuild_artifacts.arn) ], 83 | [ format("arn:aws:s3:::%s*", var.tf_backend_config_prefix) ], 84 | flatten([for tag in var.tag_prefix_list : [aws_s3_bucket.codepipeline_artifacts[tag].arn, 85 | aws_s3_bucket.codebuild_repo_artifacts[tag].arn, 86 | format("%s/*", aws_s3_bucket.codepipeline_artifacts[tag].arn), 87 | format("%s/*", aws_s3_bucket.codebuild_repo_artifacts[tag].arn)]]))) 88 | } 89 | } 90 | 91 | resource "aws_iam_role_policy" "attach_s3_bucket_policy" { 92 | name_prefix = "s3_bucket-policy-cb" 93 | role = data.aws_iam_role.codebuild_role.id 94 | policy = data.template_file.s3_bucket_policy_codebuild_template.rendered 95 | } 96 | 97 | data "template_file" "s3_bucket_policy_codepipeline_template" { 98 | template = file("${path.module}/templates/s3_bucket_policy_codepipeline.tpl") 99 | vars = { 100 | cp_s3_resource_arns = jsonencode(concat([ aws_s3_bucket.codebuild_artifacts.arn, format("%s/*", aws_s3_bucket.codebuild_artifacts.arn) ], 101 | flatten([for tag in var.tag_prefix_list : [aws_s3_bucket.codepipeline_artifacts[tag].arn, 102 | aws_s3_bucket.codebuild_repo_artifacts[tag].arn, 103 | format("%s/*", aws_s3_bucket.codepipeline_artifacts[tag].arn), 104 | format("%s/*", aws_s3_bucket.codebuild_repo_artifacts[tag].arn)] 105 | ] 106 | ) 107 | ) 108 | ) 109 | } 110 | } 111 | 112 | data aws_iam_role codepipeline_role { 113 | name = "codepipeline-role" 114 | } 115 | 116 | resource "aws_iam_role_policy" "attach_s3_bucket_policy_codepipeline" { 117 | name_prefix = "s3_bucket-policy-cp" 118 | role = data.aws_iam_role.codepipeline_role.id 119 | policy = data.template_file.s3_bucket_policy_codepipeline_template.rendered 120 | } 121 | 122 | resource "aws_kms_key" "artifact_encryption_key" { 123 | description = "Code artifact kms key" 124 | deletion_window_in_days = 7 125 | enable_key_rotation = true 126 | policy = data.template_file.key_policy_template.rendered 127 | } 128 | 129 | resource "aws_s3_bucket_notification" "bucket_notification" { 130 | for_each = toset(var.tag_prefix_list) 131 | bucket = aws_s3_bucket.codebuild_repo_artifacts[each.key].id 132 | eventbridge = true 133 | } 134 | 135 | resource "aws_s3_bucket" "codebuild_repo_artifacts" { 136 | for_each = toset(var.tag_prefix_list) 137 | bucket_prefix = "${var.source_repo_bucket_prefix}-artifacts-${each.key}" 138 | acl = "private" 139 | force_destroy = true // for demo purposes only 140 | 141 | versioning { 142 | enabled = true 143 | } 144 | 145 | server_side_encryption_configuration { 146 | rule { 147 | apply_server_side_encryption_by_default { 148 | kms_master_key_id = aws_kms_key.artifact_encryption_key.arn 149 | sse_algorithm = "aws:kms" 150 | } 151 | } 152 | } 153 | } 154 | 155 | resource "aws_s3_bucket_public_access_block" "codebuild_repo_artifacts" { 156 | for_each = toset(var.tag_prefix_list) 157 | bucket = aws_s3_bucket.codebuild_repo_artifacts[each.key].id 158 | 159 | block_public_acls = true 160 | block_public_policy = true 161 | restrict_public_buckets = true 162 | ignore_public_acls=true 163 | } 164 | 165 | resource "aws_s3_bucket" "codebuild_artifacts" { 166 | bucket_prefix = var.codebuild_artifacts_prefix 167 | force_destroy = true // for demo purposes only 168 | 169 | versioning { 170 | enabled = true 171 | } 172 | 173 | server_side_encryption_configuration { 174 | rule { 175 | apply_server_side_encryption_by_default { 176 | kms_master_key_id = aws_kms_key.artifact_encryption_key.arn 177 | sse_algorithm = "aws:kms" 178 | } 179 | } 180 | } 181 | } 182 | 183 | resource "aws_s3_bucket_public_access_block" "codebuild_artifacts" { 184 | bucket = aws_s3_bucket.codebuild_artifacts.id 185 | 186 | block_public_acls = true 187 | block_public_policy = true 188 | restrict_public_buckets = true 189 | ignore_public_acls=true 190 | } 191 | 192 | resource "aws_s3_bucket" "codepipeline_artifacts" { 193 | for_each = toset(var.tag_prefix_list) 194 | bucket_prefix = "${var.codepipeline_artifacts_prefix}-${each.key}" 195 | force_destroy = true // for demo purposes only 196 | 197 | versioning { 198 | enabled = true 199 | } 200 | 201 | server_side_encryption_configuration { 202 | rule { 203 | apply_server_side_encryption_by_default { 204 | kms_master_key_id = aws_kms_key.artifact_encryption_key.arn 205 | sse_algorithm = "aws:kms" 206 | } 207 | } 208 | } 209 | } 210 | 211 | resource "aws_s3_bucket_public_access_block" "codepipeline_artifacts" { 212 | for_each = toset(var.tag_prefix_list) 213 | bucket = aws_s3_bucket.codepipeline_artifacts[each.key].id 214 | 215 | block_public_acls = true 216 | block_public_policy = true 217 | restrict_public_buckets = true 218 | ignore_public_acls=true 219 | } 220 | 221 | resource "aws_codebuild_project" "terraform" { 222 | for_each = toset(keys(var.build_spec_file)) 223 | name = each.key 224 | description = "${each.key}_codebuild_project" 225 | build_timeout = "15" 226 | service_role = data.aws_iam_role.codebuild_role.arn 227 | encryption_key = aws_kms_key.artifact_encryption_key.arn 228 | 229 | artifacts { 230 | type = "CODEPIPELINE" 231 | } 232 | 233 | cache { 234 | type = "S3" 235 | location = aws_s3_bucket.codebuild_artifacts.bucket 236 | } 237 | 238 | environment { 239 | compute_type = "BUILD_GENERAL1_SMALL" 240 | image = "aws/codebuild/amazonlinux2-x86_64-standard:3.0" 241 | type = "LINUX_CONTAINER" 242 | image_pull_credentials_type = "CODEBUILD" 243 | } 244 | 245 | logs_config { 246 | cloudwatch_logs { 247 | group_name = "log-group" 248 | stream_name = "log-stream" 249 | } 250 | 251 | s3_logs { 252 | status = "ENABLED" 253 | location = "${aws_s3_bucket.codebuild_artifacts.bucket}/build-log" 254 | } 255 | } 256 | 257 | source { 258 | type = "CODEPIPELINE" 259 | buildspec = file("${path.cwd}/modules/regional/${var.build_spec_file[each.key]}") 260 | } 261 | 262 | source_version = var.default_branch 263 | 264 | vpc_config { 265 | vpc_id = module.vpc.vpc_id 266 | subnets = module.vpc.vpc_private_subnet_ids 267 | security_group_ids = [module.vpc.vpc_security_group_ids] 268 | } 269 | 270 | tags = { 271 | Environment = var.env 272 | } 273 | } 274 | 275 | resource "aws_codepipeline" "infra_pipeline" { 276 | for_each = toset(var.tag_prefix_list) 277 | name = "${each.key}-${var.repository_name}-deploy" 278 | role_arn = data.aws_iam_role.codepipeline_role.arn 279 | 280 | artifact_store { 281 | location = aws_s3_bucket.codepipeline_artifacts[each.key].bucket 282 | type = "S3" 283 | 284 | encryption_key { 285 | id = aws_kms_key.artifact_encryption_key.arn 286 | type = "KMS" 287 | } 288 | } 289 | 290 | stage { 291 | name = "Source" 292 | 293 | action { 294 | name = "${each.key}-${var.repository_name}-Source" 295 | category = "Source" 296 | owner = "AWS" 297 | provider = "S3" 298 | version = "1" 299 | namespace = "S3_SOURCE" 300 | output_artifacts = [ 301 | "source"] 302 | region = local.region 303 | 304 | configuration = { 305 | S3Bucket = aws_s3_bucket.codebuild_repo_artifacts[each.key].bucket 306 | S3ObjectKey = aws_codebuild_project.build_upon_tag_creation[each.key].name 307 | PollForSourceChanges = false 308 | } 309 | } 310 | } 311 | 312 | stage { 313 | name = "Terraform_tflint" 314 | 315 | action { 316 | name = "${var.repository_name}-Terraform_Tflint" 317 | category = "Build" 318 | owner = "AWS" 319 | provider = "CodeBuild" 320 | version = "1" 321 | input_artifacts = [ 322 | "source"] 323 | output_artifacts = [ 324 | "tflint"] 325 | namespace = "TFLINT" 326 | run_order = 1 327 | 328 | configuration = { 329 | ProjectName = aws_codebuild_project.terraform["terraform_tflint"].name 330 | EnvironmentVariables = jsonencode([ 331 | { 332 | name = "GLOBAL_RESOURCE_DEPLOY_FROM_REGION", 333 | value = var.global_resource_deploy_from_region, 334 | type = "PLAINTEXT" 335 | }, 336 | { 337 | name = "TF_BACKEND_CONFIG_PREFIX", 338 | value = var.tf_backend_config_prefix, 339 | type = "PLAINTEXT" 340 | } 341 | ]) 342 | } 343 | } 344 | } 345 | 346 | stage { 347 | name = "Terraform_checkov" 348 | 349 | action { 350 | name = "${var.repository_name}-Terraform_Checkov" 351 | category = "Build" 352 | owner = "AWS" 353 | provider = "CodeBuild" 354 | version = "1" 355 | input_artifacts = [ 356 | "source"] 357 | output_artifacts = [ 358 | "checkov"] 359 | namespace = "CHECKOV" 360 | run_order = 1 361 | 362 | configuration = { 363 | ProjectName = aws_codebuild_project.terraform["terraform_checkov"].name 364 | EnvironmentVariables = jsonencode([ 365 | { 366 | name = "ACCOUNT", 367 | value = local.account, 368 | type = "PLAINTEXT" 369 | } 370 | ]) 371 | } 372 | } 373 | 374 | action { 375 | name = "${var.repository_name}-Terraform_Checkov_Approval" 376 | category = "Approval" 377 | owner = "AWS" 378 | provider = "Manual" 379 | version = "1" 380 | run_order = 2 381 | 382 | configuration = { 383 | CustomData = "checkov: #{CHECKOV.failures}, #{CHECKOV.tests}" 384 | ExternalEntityLink = "#{CHECKOV.review_link}" 385 | } 386 | } 387 | } 388 | 389 | 390 | stage { 391 | name = "Terraform_Build" 392 | 393 | action { 394 | name = "${var.repository_name}-Terraform_Plan" 395 | category = "Build" 396 | owner = "AWS" 397 | provider = "CodeBuild" 398 | input_artifacts = [ 399 | "source"] 400 | output_artifacts = [ 401 | "plan"] 402 | namespace = "TF" 403 | version = "1" 404 | run_order = 1 405 | 406 | configuration = { 407 | ProjectName = aws_codebuild_project.terraform["terraform_plan"].name 408 | EnvironmentVariables = jsonencode([ 409 | { 410 | name = "ENV", 411 | value = "#{TFLINT.ENV}", 412 | type = "PLAINTEXT" 413 | }, 414 | { 415 | name = "TEAM", 416 | value = "#{TFLINT.TEAM}", 417 | type = "PLAINTEXT" 418 | }, 419 | { 420 | name = "TARGET_DEPLOYMENT_SCOPE", 421 | value = "#{TFLINT.TARGET_DEPLOYMENT_SCOPE}", 422 | type = "PLAINTEXT" 423 | }, 424 | { 425 | name = "REGION_TFVARS", 426 | value = "#{TFLINT.REGION_TFVARS}", 427 | type = "PLAINTEXT" 428 | }, 429 | { 430 | name = "TARGET_MODULE", 431 | value = "#{TFLINT.TARGET_MODULE}", 432 | type = "PLAINTEXT" 433 | }, 434 | { 435 | name = "REGION", 436 | value = "#{TFLINT.REGION}", 437 | type = "PLAINTEXT" 438 | }, 439 | { 440 | name = "TF_BACKEND_CONFIG_PREFIX", 441 | value = var.tf_backend_config_prefix, 442 | type = "PLAINTEXT" 443 | } 444 | ]) 445 | } 446 | } 447 | 448 | action { 449 | name = "${var.repository_name}-Terraform_Apply_Approval" 450 | category = "Approval" 451 | owner = "AWS" 452 | provider = "Manual" 453 | version = "1" 454 | run_order = 2 455 | 456 | configuration = { 457 | CustomData = "Please review and approve the terraform plan" 458 | ExternalEntityLink = "https://#{TF.pipeline_region}.console.aws.amazon.com/codesuite/codebuild/${local.account}/projects/#{TF.build_id}/build/#{TF.build_id}%3A#{TF.build_tag}/?region=#{TF.pipeline_region}" 459 | } 460 | } 461 | 462 | action { 463 | name = "${var.repository_name}-Terraform_Apply" 464 | category = "Build" 465 | owner = "AWS" 466 | provider = "CodeBuild" 467 | input_artifacts = [ 468 | "plan"] 469 | output_artifacts = [ 470 | "apply"] 471 | version = "1" 472 | run_order = 3 473 | 474 | configuration = { 475 | ProjectName = aws_codebuild_project.terraform["terraform_apply"].name 476 | EnvironmentVariables = jsonencode([ 477 | { 478 | name = "ENV", 479 | value = "#{TFLINT.ENV}", 480 | type = "PLAINTEXT" 481 | }, 482 | { 483 | name = "TEAM", 484 | value = "#{TFLINT.TEAM}", 485 | type = "PLAINTEXT" 486 | }, 487 | { 488 | name = "TARGET_DEPLOYMENT_SCOPE", 489 | value = "#{TFLINT.TARGET_DEPLOYMENT_SCOPE}", 490 | type = "PLAINTEXT" 491 | }, 492 | { 493 | name = "REGION_TFVARS", 494 | value = "#{TFLINT.REGION_TFVARS}", 495 | type = "PLAINTEXT" 496 | }, 497 | { 498 | name = "TARGET_MODULE", 499 | value = "#{TFLINT.TARGET_MODULE}", 500 | type = "PLAINTEXT" 501 | }, 502 | { 503 | name = "REGION", 504 | value = "#{TFLINT.REGION}", 505 | type = "PLAINTEXT" 506 | }, 507 | { 508 | name = "TF_BACKEND_CONFIG_PREFIX", 509 | value = var.tf_backend_config_prefix, 510 | type = "PLAINTEXT" 511 | } 512 | ]) 513 | } 514 | } 515 | } 516 | } 517 | 518 | resource "aws_codebuild_project" "build_upon_tag_creation" { 519 | for_each = toset(var.tag_prefix_list) 520 | name = "${each.key}-${aws_codecommit_repository.awsomerepo.repository_name}-src" 521 | description = "src_codebuild_project" 522 | build_timeout = "5" 523 | service_role = data.aws_iam_role.codebuild_role.arn 524 | encryption_key = aws_kms_key.artifact_encryption_key.arn 525 | 526 | artifacts { 527 | type = "S3" 528 | location = aws_s3_bucket.codebuild_repo_artifacts[each.key].bucket 529 | name = "${each.key}-${aws_codecommit_repository.awsomerepo.repository_name}-src" 530 | packaging = "ZIP" 531 | } 532 | 533 | cache { 534 | type = "S3" 535 | location = aws_s3_bucket.codebuild_repo_artifacts[each.key].bucket 536 | } 537 | 538 | environment { 539 | compute_type = "BUILD_GENERAL1_SMALL" 540 | image = "aws/codebuild/amazonlinux2-x86_64-standard:3.0" 541 | type = "LINUX_CONTAINER" 542 | image_pull_credentials_type = "CODEBUILD" 543 | } 544 | 545 | logs_config { 546 | cloudwatch_logs { 547 | group_name = "log-group" 548 | stream_name = "log-stream" 549 | } 550 | 551 | s3_logs { 552 | status = "ENABLED" 553 | location = "${aws_s3_bucket.codebuild_repo_artifacts[each.key].bucket}/build-log" 554 | } 555 | } 556 | 557 | source { 558 | type = "CODECOMMIT" 559 | location = aws_codecommit_repository.awsomerepo.clone_url_http 560 | buildspec = file("${path.cwd}/modules/regional/buildspec-tagged_source.yml") 561 | } 562 | 563 | source_version = var.default_branch 564 | 565 | vpc_config { 566 | vpc_id = module.vpc.vpc_id 567 | subnets = module.vpc.vpc_private_subnet_ids 568 | security_group_ids = [ 569 | module.vpc.vpc_security_group_ids] 570 | } 571 | 572 | tags = { 573 | Environment = var.env 574 | } 575 | } -------------------------------------------------------------------------------- /modules/regional/repo.tf: -------------------------------------------------------------------------------- 1 | resource "aws_codecommit_repository" "awsomerepo" { 2 | repository_name = var.repository_name 3 | description = "This is the Sample IaC Repository for Infrastructure Resources" 4 | default_branch = var.default_branch 5 | } 6 | 7 | data "template_file" "cloudwatchevent_policy_template" { 8 | template = file("${path.module}/templates/cloud_watch_event_policy.tpl") 9 | vars = { 10 | pipeline_arn = jsonencode([for tag in var.tag_prefix_list : aws_codepipeline.infra_pipeline[tag].arn]) 11 | codebuildproj_arn = jsonencode([for tag in var.tag_prefix_list : aws_codebuild_project.build_upon_tag_creation[tag].arn]) 12 | } 13 | } 14 | 15 | data aws_iam_role cloudwatch_event_role { 16 | name = "cloudwatch-event-role" 17 | } 18 | 19 | resource "aws_iam_role_policy" "attach_cwe_policy" { 20 | name_prefix = "cwe-policy" 21 | role = data.aws_iam_role.cloudwatch_event_role.name 22 | 23 | policy = data.template_file.cloudwatchevent_policy_template.rendered 24 | } 25 | 26 | // CodeBuild as Target for git tag push 27 | resource "aws_cloudwatch_event_rule" "trigger_build_on_tag_updates" { 28 | for_each = toset(var.tag_prefix_list) 29 | name = "trigger_codebuild_on_tag_update_${each.key}" 30 | description = "Trigger code build on ${each.key} tag update" 31 | 32 | event_pattern = < } ]}" 74 | } 75 | } 76 | 77 | resource "aws_cloudwatch_event_rule" "trigger_pipeline_on_s3_updates" { 78 | for_each = toset(var.tag_prefix_list) 79 | name = "trigger_pipeline_on_s3_updates_${each.key}" 80 | description = "Trigger code pipeline on s3 update" 81 | 82 | event_pattern = < trust_policy.json 23 | { 24 | "Version": "2012-10-17", 25 | "Statement": [ 26 | { 27 | "Effect": "Allow", 28 | "Principal": { 29 | "AWS": "$user_arn" 30 | }, 31 | "Action": "sts:AssumeRole" 32 | } 33 | ] 34 | } 35 | EOF 36 | 37 | infra_build_role="InfraBuildRole" 38 | echo "Checking if $infra_build_role role exists in the tooling account" 39 | role_exists=$(aws iam get-role --profile aws_sample_central_tooling --role-name $infra_build_role) 40 | if [ "$role_exists" ]; then 41 | printf "%s exists. Returning...\n" $infra_build_role 42 | return 43 | fi 44 | echo "Creating InfraBuildRole for deployment of CI/CD resources into tooling account" 45 | aws iam create-role --profile aws_sample_central_tooling --role-name $infra_build_role --assume-role-policy-document file://trust_policy.json 46 | read -r -p "Enter the IAM policy ARN created in the tooling account to attach to InfraBuildRole. Ensure it has the necessary permissions to create the CI/CD resources and Terraform remote state management resources in the central tooling account: " infra_build_iam_policy 47 | get_iam_policy="aws iam get-policy --profile aws_sample_central_tooling --policy-arn $infra_build_iam_policy" 48 | eval $get_iam_policy 49 | ret_code=$? 50 | if [ $ret_code != 0 ]; then 51 | printf "Error: [%d] when retrieving the IAM policy using: '$get_iam_policy'" $ret_code 52 | printf "Please verify the IAM policy to attach to InfraBuildRole exists and if not create it. Exiting..." 53 | exit $ret_code 54 | fi 55 | aws iam attach-role-policy --profile aws_sample_central_tooling --policy-arn $infra_build_iam_policy --role-name $infra_build_role 56 | } 57 | 58 | # Create a CloudOps IAM role in the central tooling account. 59 | # This role is what's used by the DevOps engineers to interact with the CodeCommit repo among other things. 60 | function create_tooling_cloudops_role() 61 | { 62 | user_arn=$(aws sts get-caller-identity --profile aws_sample_central_tooling --query 'Arn' --output text) 63 | cat << EOF > trust_policy.json 64 | { 65 | "Version": "2012-10-17", 66 | "Statement": [ 67 | { 68 | "Effect": "Allow", 69 | "Principal": { 70 | "AWS": "$user_arn" 71 | }, 72 | "Action": "sts:AssumeRole" 73 | } 74 | ] 75 | } 76 | EOF 77 | 78 | # Check if role already exists 79 | cloud_ops_role="CloudOps" 80 | echo "Checking if $cloud_ops_role role exists in the tooling account" 81 | role_exists=$(aws iam get-role --profile aws_sample_central_tooling --role-name $cloud_ops_role) 82 | if [ "$role_exists" ]; then 83 | printf "%s exists. Returning...\n" $cloud_ops_role 84 | return 85 | fi 86 | 87 | echo "Creating $cloud_ops_role role for the devops team member in the tooling account" 88 | aws iam create-role --profile aws_sample_central_tooling --role-name $cloud_ops_role --assume-role-policy-document file://trust_policy.json 89 | aws iam attach-role-policy --profile aws_sample_central_tooling --policy-arn arn:aws:iam::aws:policy/AWSCodeCommitPowerUser --role-name $cloud_ops_role 90 | aws iam attach-role-policy --profile aws_sample_central_tooling --policy-arn arn:aws:iam::aws:policy/AWSConfigUserAccess --role-name $cloud_ops_role 91 | aws iam attach-role-policy --profile aws_sample_central_tooling --policy-arn arn:aws:iam::aws:policy/AWSCloudTrail_ReadOnlyAccess --role-name $cloud_ops_role 92 | aws iam attach-role-policy --profile aws_sample_central_tooling --policy-arn arn:aws:iam::aws:policy/AWSCodeBuildReadOnlyAccess --role-name $cloud_ops_role 93 | aws iam attach-role-policy --profile aws_sample_central_tooling --policy-arn arn:aws:iam::aws:policy/CloudWatchReadOnlyAccess --role-name $cloud_ops_role 94 | # As needed for the use case, you can use AWSCodePipelineApproverAccess instead that's more restricted 95 | # aws iam attach-role-policy --profile aws_sample_central_tooling --policy-arn arn:aws:iam::aws:policy/AWSCodePipelineApproverAccess --role-name $cloud_ops_role 96 | aws iam attach-role-policy --profile aws_sample_central_tooling --policy-arn arn:aws:iam::aws:policy/AWSCodePipeline_FullAccess --role-name $cloud_ops_role 97 | aws iam attach-role-policy --profile aws_sample_central_tooling --policy-arn arn:aws:iam::aws:policy/AmazonEventBridgeReadOnlyAccess --role-name $cloud_ops_role 98 | } 99 | 100 | configure_aws_cli_for_tooling_account 101 | create_tooling_cloudops_role 102 | create_tooling_infra_build_role -------------------------------------------------------------------------------- /scripts/prerequisites/create-iam-resources_for_workload_account.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function configure_aws_cli_for_workload_account() 4 | { 5 | echo "Configuring aws cli using aws configure for the workload account" 6 | read -r -p "Enter aws_access_key_id for the workload account IAM user with necessary permissions to create IAM resources: " aws_access_key_id 7 | read -r -p "Enter aws_secret_access_key for this user: " aws_secret_access_key 8 | read -r -p "Enter default.region: " default_region 9 | aws configure --profile aws_sample_workload_account set aws_access_key_id $aws_access_key_id 10 | aws configure --profile aws_sample_workload_account set aws_secret_access_key $aws_secret_access_key 11 | aws configure --profile aws_sample_workload_account set default.region $default_region 12 | export AWS_PROFILE=aws_sample_workload_account 13 | echo "Current User is" 14 | aws sts get-caller-identity --profile aws_sample_workload_account 15 | } 16 | 17 | # Creates the workload account IAM role named InfraBuildRole in the target workload account. 18 | # See details on cross account IAM roles at https://docs.aws.amazon.com/IAM/latest/UserGuide/tutorial_cross-account-with-roles.html 19 | # including this step https://docs.aws.amazon.com/IAM/latest/UserGuide/tutorial_cross-account-with-roles.html#tutorial_cross-account-with-roles-2 20 | function create_workload_account_role() 21 | { 22 | # Get the central tooling account number in which codebuild-role IAM role exists 23 | # The CodeBuild build inside the pipeline runs as this IAM role "arn:aws:iam::$tooling_account_number:role/codebuild-role" 24 | # We need to ensure the InfraBuildRole in the workload account can be assumed by this role. 25 | read -r -p "Enter the CI/CD central tooling account number:" tooling_account_number 26 | if ! [[ $tooling_account_number =~ ^[0-9]{12}$ ]]; then 27 | printf "%s is not an account number. Please provide a valid account number. Exiting...\n" $tooling_account_number 28 | exit 1 29 | fi 30 | # Deployments into target workload account: "codebuild-role" needs to be able to assume target workload's "InfraBuildRole" for deployments 31 | # Destroying resources in target workload account for this sample: "InfraBuildRole" in the tooling account needs to be able to assume target workload's "InfraBuildRole" 32 | # for destroying the resources in the target workload account. This "destroy" functionality could be made available via a pipeline instead as well but that is outside the scope of this sample. 33 | cat << EOF > workload_trust_policy.json 34 | { 35 | "Version": "2012-10-17", 36 | "Statement": [ 37 | { 38 | "Effect": "Allow", 39 | "Principal": { "AWS": "arn:aws:iam::$tooling_account_number:role/codebuild-role" }, 40 | "Action": "sts:AssumeRole" 41 | }, 42 | { 43 | "Effect": "Allow", 44 | "Principal": { "AWS": "arn:aws:iam::$tooling_account_number:role/InfraBuildRole" }, 45 | "Action": "sts:AssumeRole" 46 | } 47 | ] 48 | } 49 | EOF 50 | 51 | infra_build_role="InfraBuildRole" 52 | echo "Checking if $infra_build_role role exists in the workload account" 53 | role_exists=$(aws iam get-role --profile aws_sample_workload_account --role-name $infra_build_role) 54 | if [ "$role_exists" ]; then 55 | printf "%s exists. Returning...\n" $infra_build_role 56 | return 57 | fi 58 | aws iam create-role --profile aws_sample_workload_account --role-name $infra_build_role --assume-role-policy-document file://workload_trust_policy.json 59 | read -r -p "Enter the IAM policy ARN created in the workload account to attach to $infra_build_role. Ensure it has the necessary permissions to create the workload infra resources in this account: " infra_build_iam_policy 60 | get_iam_policy="aws iam get-policy --profile aws_sample_workload_account --policy-arn $infra_build_iam_policy" 61 | eval $get_iam_policy 62 | ret_code=$? 63 | if [ $ret_code != 0 ]; then 64 | printf "Error: [%d] when retrieving the IAM policy using: '$get_iam_policy'" $ret_code 65 | printf "Please verify the IAM policy exists and if not, create it. Exiting..." 66 | exit $ret_code 67 | fi 68 | aws iam attach-role-policy --profile aws_sample_workload_account --policy-arn $infra_build_iam_policy --role-name $infra_build_role 69 | } 70 | 71 | configure_aws_cli_for_workload_account 72 | create_workload_account_role -------------------------------------------------------------------------------- /scripts/run-tf-prod-destroy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | usage_help="Usage: $0 [-t prod_/tooling/] [-b ] [-g ] [-r ]" 4 | usage() { echo "$usage_help" 1>&2; exit 1; } 5 | 6 | # Default values if none provided 7 | global_resource_deploy_from_region="us-east-1" 8 | tf_state_region="us-east-1" 9 | while getopts ":t:b:g:r:" o; do 10 | case "${o}" in 11 | t) 12 | tag=${OPTARG} 13 | ;; 14 | b) 15 | tf_backend_config_prefix=${OPTARG} 16 | ;; 17 | g) 18 | global_resource_deploy_from_region=${OPTARG} 19 | ;; 20 | r) 21 | tf_state_region=${OPTARG} 22 | ;; 23 | *) 24 | usage 25 | ;; 26 | esac 27 | done 28 | shift $((OPTIND-1)) 29 | 30 | if [ -z "$tf_backend_config_prefix" ]; then echo "Please pass in [-b ] Terraform state S3 bucket prefix. Please see README for details. Exiting..."; usage; fi 31 | if [ $OPTIND -eq 1 ]; then echo "$usage_help"; echo "No arguments were passed...Trying to run with default values. If it doesn't succeed, please read the Prerequisites inside README of this project."; fi 32 | if [ -z "$tag" ]; then tag="prod_$tf_state_region/tooling/1.0"; fi 33 | 34 | echo "tag is set to $tag" 35 | echo "tf_backend_config_prefix is set to $tf_backend_config_prefix" 36 | echo "global_resource_deploy_from_region is set to $global_resource_deploy_from_region" 37 | echo "tf_state_region is set to $tf_state_region" 38 | 39 | ENV=$(echo $tag | cut -d/ -f1 | cut -d_ -f1) 40 | TARGET_DEPLOYMENT_SCOPE=$(echo $tag | cut -d/ -f1 | cut -d_ -f2) 41 | TEAM=$(echo $tag | cut -d/ -f2) 42 | GLOBAL_RESOURCE_DEPLOY_FROM_REGION=$global_resource_deploy_from_region 43 | TARGET_MODULE=$(if [[ ${TARGET_DEPLOYMENT_SCOPE} == *"global"* ]];then echo "module.global"; else echo "module.regional"; fi) 44 | REGION=$(if [[ ${TARGET_DEPLOYMENT_SCOPE} == *"global"* ]];then echo "${GLOBAL_RESOURCE_DEPLOY_FROM_REGION}"; else echo "${TARGET_DEPLOYMENT_SCOPE}"; fi) # default to us-east-1 if global resource deployment 45 | echo "terraform init -reconfigure -backend-config="key=$TEAM/$ENV-$TARGET_DEPLOYMENT_SCOPE/terraform.tfstate" -backend-config="region=$tf_state_region" -backend-config="bucket=$tf_backend_config_prefix-$ENV" -backend-config="dynamodb_table=$tf_backend_config_prefix-lock-$ENV" -backend-config="encrypt=true"" 46 | terraform init -reconfigure -backend-config="key=$TEAM/$ENV-$TARGET_DEPLOYMENT_SCOPE/terraform.tfstate" -backend-config="region=$tf_state_region" -backend-config="bucket=$tf_backend_config_prefix-$ENV" -backend-config="dynamodb_table=$tf_backend_config_prefix-lock-$ENV" -backend-config="encrypt=true" 47 | terraform fmt 48 | terraform validate 49 | REGION_TFVARS=$([ -s "environments/${ENV}/${TEAM}/${REGION}.tfvars" ] && echo "-var-file environments/${ENV}/${TEAM}/${REGION}.tfvars" || echo "") 50 | echo "terraform plan -var-file "environments/${ENV}/${TEAM}/variables.tfvars" ${REGION_TFVARS} -var "env=${ENV}" -var "region=${REGION}" -var "tf_backend_config_prefix=${tf_backend_config_prefix}" -var "global_resource_deploy_from_region=${global_resource_deploy_from_region}" -target ${TARGET_MODULE} -out=tfplan" 51 | terraform plan -destroy -var-file "environments/${ENV}/${TEAM}/variables.tfvars" ${REGION_TFVARS} -var "env=${ENV}" -var "region=${REGION}" -var "tf_backend_config_prefix=${tf_backend_config_prefix}" -var "global_resource_deploy_from_region=${global_resource_deploy_from_region}" -target ${TARGET_MODULE} -out=tfplan -compact-warnings 52 | -------------------------------------------------------------------------------- /scripts/run-tf-prod-global-destroy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | usage_help="Usage: $0 [-t prod_global/tooling/] [-b ] [-g ] [-r ]" 4 | usage() { echo "$usage_help" 1>&2; exit 1; } 5 | 6 | # Default values if none provided 7 | tag="prod_global/tooling/1.0" 8 | global_resource_deploy_from_region="us-east-1" 9 | tf_state_region="us-east-1" 10 | while getopts ":t:b:g:r:" o; do 11 | case "${o}" in 12 | t) 13 | tag=${OPTARG} 14 | ;; 15 | b) 16 | tf_backend_config_prefix=${OPTARG} 17 | ;; 18 | g) 19 | global_resource_deploy_from_region=${OPTARG} 20 | ;; 21 | r) 22 | tf_state_region=${OPTARG} 23 | ;; 24 | *) 25 | usage 26 | ;; 27 | esac 28 | done 29 | shift $((OPTIND-1)) 30 | 31 | if [ -z "$tf_backend_config_prefix" ]; then echo "Please pass in [-b ] Terraform state S3 bucket prefix. Please see README for details. Exiting..."; usage; fi 32 | if [ $OPTIND -eq 1 ]; then echo "$usage_help"; echo "No arguments were passed...Trying to run with default values. If it doesn't succeed, please read the Prerequisites inside README of this project."; fi 33 | echo "tag is set to $tag" 34 | echo "tf_backend_config_prefix is set to $tf_backend_config_prefix" 35 | echo "global_resource_deploy_from_region is set to $global_resource_deploy_from_region" 36 | echo "tf_state_region is set to $tf_state_region" 37 | 38 | ENV=$(echo $tag | cut -d/ -f1 | cut -d_ -f1) 39 | TARGET_DEPLOYMENT_SCOPE=$(echo $tag | cut -d/ -f1 | cut -d_ -f2) 40 | TEAM=$(echo $tag | cut -d/ -f2) 41 | GLOBAL_RESOURCE_DEPLOY_FROM_REGION=$global_resource_deploy_from_region 42 | TARGET_MODULE=$(if [[ ${TARGET_DEPLOYMENT_SCOPE} == *"global"* ]];then echo "module.global"; else echo "module.regional"; fi) 43 | REGION=$(if [[ ${TARGET_DEPLOYMENT_SCOPE} == *"global"* ]];then echo "${GLOBAL_RESOURCE_DEPLOY_FROM_REGION}"; else echo "${TARGET_DEPLOYMENT_SCOPE}"; fi) # default to us-east-1 if global resource deployment 44 | echo "terraform init -reconfigure -backend-config="key=$TEAM/$ENV-$TARGET_DEPLOYMENT_SCOPE/terraform.tfstate" -backend-config="region=$tf_state_region" -backend-config="bucket=$tf_backend_config_prefix-$ENV" -backend-config="dynamodb_table=$tf_backend_config_prefix-lock-$ENV" -backend-config="encrypt=true"" 45 | terraform init -reconfigure -backend-config="key=$TEAM/$ENV-$TARGET_DEPLOYMENT_SCOPE/terraform.tfstate" -backend-config="region=$tf_state_region" -backend-config="bucket=$tf_backend_config_prefix-$ENV" -backend-config="dynamodb_table=$tf_backend_config_prefix-lock-$ENV" -backend-config="encrypt=true" 46 | terraform fmt 47 | terraform validate 48 | REGION_TFVARS=$([ -s "environments/${ENV}/${TEAM}/${REGION}.tfvars" ] && echo "-var-file environments/${ENV}/${TEAM}/${REGION}.tfvars" || echo "") 49 | echo "terraform plan -var-file "environments/${ENV}/${TEAM}/variables.tfvars" ${REGION_TFVARS} -var "env=${ENV}" -var "region=${REGION}" -var "tf_backend_config_prefix=${tf_backend_config_prefix}" -var "global_resource_deploy_from_region=${global_resource_deploy_from_region}" -target ${TARGET_MODULE} -out=tfplan" 50 | terraform plan -destroy -var-file "environments/${ENV}/${TEAM}/variables.tfvars" ${REGION_TFVARS} -var "env=${ENV}" -var "region=${REGION}" -var "tf_backend_config_prefix=${tf_backend_config_prefix}" -var "global_resource_deploy_from_region=${global_resource_deploy_from_region}" -target ${TARGET_MODULE} -out=tfplan -compact-warnings -------------------------------------------------------------------------------- /scripts/run-tf-prod-global.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | usage_help="Usage: $0 [-t prod_global/tooling/] [-b ] [-g ] [-r ]" 4 | usage() { echo "$usage_help" 1>&2; exit 1; } 5 | 6 | # Default values if none provided 7 | tag="prod_global/tooling/1.0" 8 | global_resource_deploy_from_region="us-east-1" 9 | tf_state_region="us-east-1" 10 | while getopts ":t:p:b:g:r:" o; do 11 | case "${o}" in 12 | t) 13 | tag=${OPTARG} 14 | ;; 15 | b) 16 | tf_backend_config_prefix=${OPTARG} 17 | ;; 18 | g) 19 | global_resource_deploy_from_region=${OPTARG} 20 | ;; 21 | r) 22 | tf_state_region=${OPTARG} 23 | ;; 24 | *) 25 | usage 26 | ;; 27 | esac 28 | done 29 | shift $((OPTIND-1)) 30 | 31 | if [ -z "$tf_backend_config_prefix" ]; then echo "Please pass in [-b ] Terraform state S3 bucket prefix. Please see README for details. Exiting..."; usage; fi 32 | if [ $OPTIND -eq 1 ]; then echo "$usage_help"; echo "No arguments were passed...Trying to run with default values. If it doesn't succeed, please read the Prerequisites inside README of this project."; fi 33 | echo "tag is set to $tag" 34 | echo "tf_backend_config_prefix is set to $tf_backend_config_prefix" 35 | echo "global_resource_deploy_from_region is set to $global_resource_deploy_from_region" 36 | echo "tf_state_region is set to $tf_state_region" 37 | 38 | ENV=$(echo $tag | cut -d/ -f1 | cut -d_ -f1) 39 | TARGET_DEPLOYMENT_SCOPE=$(echo $tag | cut -d/ -f1 | cut -d_ -f2) 40 | TEAM=$(echo $tag | cut -d/ -f2) 41 | GLOBAL_RESOURCE_DEPLOY_FROM_REGION=$global_resource_deploy_from_region 42 | TARGET_MODULE=$(if [[ ${TARGET_DEPLOYMENT_SCOPE} == *"global"* ]];then echo "module.global"; else echo "module.regional"; fi) 43 | REGION=$(if [[ ${TARGET_DEPLOYMENT_SCOPE} == *"global"* ]];then echo "${GLOBAL_RESOURCE_DEPLOY_FROM_REGION}"; else echo "${TARGET_DEPLOYMENT_SCOPE}"; fi) # default to us-east-1 if global resource deployment 44 | echo "terraform init -reconfigure -backend-config="key=$TEAM/$ENV-$TARGET_DEPLOYMENT_SCOPE/terraform.tfstate" -backend-config="region=$tf_state_region" -backend-config="bucket=$tf_backend_config_prefix-$ENV" -backend-config="dynamodb_table=$tf_backend_config_prefix-lock-$ENV" -backend-config="encrypt=true"" 45 | terraform init -reconfigure -backend-config="key=$TEAM/$ENV-$TARGET_DEPLOYMENT_SCOPE/terraform.tfstate" -backend-config="region=$tf_state_region" -backend-config="bucket=$tf_backend_config_prefix-$ENV" -backend-config="dynamodb_table=$tf_backend_config_prefix-lock-$ENV" -backend-config="encrypt=true" 46 | terraform fmt 47 | terraform validate 48 | REGION_TFVARS=$([ -s "environments/${ENV}/${TEAM}/${REGION}.tfvars" ] && echo "-var-file environments/${ENV}/${TEAM}/${REGION}.tfvars" || echo "") 49 | echo "terraform plan -var-file "environments/${ENV}/${TEAM}/variables.tfvars" ${REGION_TFVARS} -var "env=${ENV}" -var "region=${REGION}" -var "tf_backend_config_prefix=${tf_backend_config_prefix}" -var "global_resource_deploy_from_region=${global_resource_deploy_from_region}" -target ${TARGET_MODULE} -out=tfplan" 50 | terraform plan -var-file "environments/${ENV}/${TEAM}/variables.tfvars" ${REGION_TFVARS} -var "env=${ENV}" -var "region=${REGION}" -var "tf_backend_config_prefix=${tf_backend_config_prefix}" -var "global_resource_deploy_from_region=${global_resource_deploy_from_region}" -target ${TARGET_MODULE} -out=tfplan -compact-warnings -------------------------------------------------------------------------------- /scripts/run-tf-prod.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | usage_help="Usage: $0 [-t prod_/tooling/] [-b ] [-g ] [-r ]" 4 | usage() { echo "$usage_help" 1>&2; exit 1; } 5 | 6 | # Default values if none provided 7 | global_resource_deploy_from_region="us-east-1" 8 | tf_state_region="us-east-1" 9 | while getopts ":t:b:g:r:" o; do 10 | case "${o}" in 11 | t) 12 | tag=${OPTARG} 13 | ;; 14 | b) 15 | tf_backend_config_prefix=${OPTARG} 16 | ;; 17 | g) 18 | global_resource_deploy_from_region=${OPTARG} 19 | ;; 20 | r) 21 | tf_state_region=${OPTARG} 22 | ;; 23 | *) 24 | usage 25 | ;; 26 | esac 27 | done 28 | shift $((OPTIND-1)) 29 | 30 | if [ -z "$tf_backend_config_prefix" ]; then echo "Please pass in [-b ] Terraform state S3 bucket prefix. Please see README for details. Exiting..."; usage; fi 31 | if [ $OPTIND -eq 1 ]; then echo "$usage_help"; echo "No arguments were passed...Trying to run with default values. If it doesn't succeed, please read the Prerequisites inside README of this project."; fi 32 | if [ -z "$tag" ]; then tag="prod_$tf_state_region/tooling/1.0"; fi 33 | 34 | echo "tf_backend_config_prefix is set to $tf_backend_config_prefix" 35 | echo "global_resource_deploy_from_region is set to $global_resource_deploy_from_region" 36 | echo "tf_state_region is set to $tf_state_region" 37 | echo "tag is set to $tag" 38 | 39 | ENV=$(echo $tag | cut -d/ -f1 | cut -d_ -f1) 40 | TARGET_DEPLOYMENT_SCOPE=$(echo $tag | cut -d/ -f1 | cut -d_ -f2) 41 | TEAM=$(echo $tag | cut -d/ -f2) 42 | GLOBAL_RESOURCE_DEPLOY_FROM_REGION=$global_resource_deploy_from_region 43 | TARGET_MODULE=$(if [[ ${TARGET_DEPLOYMENT_SCOPE} == *"global"* ]];then echo "module.global"; else echo "module.regional"; fi) 44 | REGION=$(if [[ ${TARGET_DEPLOYMENT_SCOPE} == *"global"* ]];then echo "${GLOBAL_RESOURCE_DEPLOY_FROM_REGION}"; else echo "${TARGET_DEPLOYMENT_SCOPE}"; fi) 45 | echo "terraform init -reconfigure -backend-config="key=$TEAM/$ENV-$TARGET_DEPLOYMENT_SCOPE/terraform.tfstate" -backend-config="region=$tf_state_region" -backend-config="bucket=$tf_backend_config_prefix-$ENV" -backend-config="dynamodb_table=$tf_backend_config_prefix-lock-$ENV" -backend-config="encrypt=true"" 46 | terraform init -reconfigure -backend-config="key=$TEAM/$ENV-$TARGET_DEPLOYMENT_SCOPE/terraform.tfstate" -backend-config="region=$tf_state_region" -backend-config="bucket=$tf_backend_config_prefix-$ENV" -backend-config="dynamodb_table=$tf_backend_config_prefix-lock-$ENV" -backend-config="encrypt=true" 47 | terraform fmt 48 | terraform validate 49 | REGION_TFVARS=$([ -s "environments/${ENV}/${TEAM}/${REGION}.tfvars" ] && echo "-var-file environments/${ENV}/${TEAM}/${REGION}.tfvars" || echo "") 50 | echo "terraform plan -var-file "environments/${ENV}/${TEAM}/variables.tfvars" ${REGION_TFVARS} -var "env=${ENV}" -var "region=${REGION}" -var "tf_backend_config_prefix=${tf_backend_config_prefix}" -var "global_resource_deploy_from_region=${global_resource_deploy_from_region}" -target ${TARGET_MODULE} -out=tfplan" 51 | terraform plan -var-file "environments/${ENV}/${TEAM}/variables.tfvars" ${REGION_TFVARS} -var "env=${ENV}" -var "region=${REGION}" -var "tf_backend_config_prefix=${tf_backend_config_prefix}" -var "global_resource_deploy_from_region=${global_resource_deploy_from_region}" -target ${TARGET_MODULE} -out=tfplan -compact-warnings 52 | -------------------------------------------------------------------------------- /variables.tf: -------------------------------------------------------------------------------- 1 | variable "region" { 2 | type = string 3 | description = "Target region" 4 | default = "us-east-1" 5 | } 6 | 7 | variable "global_resource_deploy_from_region" { 8 | type = string 9 | description = "Region from which to deploy global resources in our pipeline" 10 | default = "us-east-1" 11 | } 12 | 13 | variable "account" { 14 | type = string 15 | description = "Target AWS account number" 16 | } 17 | 18 | variable "env" { 19 | type = string 20 | description = "Environment name" 21 | default = "dev" 22 | } 23 | 24 | variable "tag_prefix_list" { 25 | type = list(string) 26 | description = "List of tag prefixes" 27 | default = ["dev", "qa", "staging", "prod"] 28 | } 29 | 30 | # Required for provisioning assume_role perms for cross account access 31 | variable "target_accounts" { 32 | type = list(string) 33 | description = "List of target accounts" 34 | } 35 | 36 | variable "number_of_azs" { 37 | type = number 38 | description = "Number of azs to deploy to" 39 | default = 3 40 | } 41 | 42 | variable "tf_backend_config_prefix" { 43 | type = string 44 | description = "A name to prefix the S3 bucket for terraform state files and the DynamoDB table for terraform state locks for backend config" 45 | } 46 | 47 | variable "source_repo_bucket_prefix" { 48 | type = string 49 | description = "A prefix for S3 bucket name to house the src code in the Source stage post tagging" 50 | default = "awsome-cb-repo" 51 | } 52 | 53 | variable "codebuild_artifacts_prefix" { 54 | type = string 55 | description = "A prefix for S3 bucket name to house the AWS CodeBuild artifacts for cache, etc." 56 | default = "awsome-cb-artifact" 57 | } 58 | 59 | variable "codepipeline_artifacts_prefix" { 60 | type = string 61 | description = "A prefix for S3 bucket name to house the AWS CodePipeline artifacts for cache, etc." 62 | default = "awsome-cp-artifact" 63 | } --------------------------------------------------------------------------------