├── .gitignore ├── CHANGELOG.md ├── LICENSE ├── README.md ├── assets └── images │ ├── approved_icon.png │ ├── approved_icon_trans.png │ ├── codepipeline.jpg │ ├── codepipeline.png │ ├── denied_icon.png │ ├── denied_icon_trans.png │ ├── error_icon.png │ ├── error_icon_trans.png │ ├── question_icon.png │ ├── question_icon_trans.png │ ├── success_icon.png │ ├── success_icon_trans.png │ ├── unknown_icon.png │ └── unknown_icon_trans.png ├── cf ├── cicd │ ├── build_dynamo.sh │ ├── build_lambdas.sh │ ├── build_rdk.sh │ ├── build_ts_aws_cdk.sh │ ├── buildspec_prod.yaml │ ├── buildspec_test.yaml │ └── stackset_pipeline.yaml ├── project │ ├── _template │ │ └── XStack.yaml │ ├── iamselfmanagedstacksettrustroles │ │ └── IAMSelfManagedStackSetTrustRolesStack.yaml │ ├── rdkconfigrules │ │ └── RDKConfigCrossAccountRole.yaml │ └── s3example │ │ └── S3Stack.yaml └── setup │ ├── 01_create_codecommit_repo.yaml │ ├── 02_deployment_artifacts_bucket.yaml │ ├── 03_iam_role_codepipeline.yaml │ ├── 04_target_deploy_roles.yaml │ ├── 05_orgs_stackset_selfmanaged_roles.yaml │ ├── 06_dynamo_db.yaml │ ├── 07_setup_RDK_role_and_deploy_bucket.yaml │ ├── Manual_Deployment.md │ └── automated_deployment.sh ├── code ├── nodejs │ └── src │ │ ├── sns_to_msteams │ │ ├── index.js │ │ ├── package.json │ │ ├── package │ │ │ └── sns_to_msteams.zip │ │ └── setup.md │ │ └── sns_to_slack │ │ ├── index.js │ │ ├── package-lock.json │ │ ├── package.json │ │ └── package │ │ └── sns_to_slack.zip ├── python-rdk │ ├── AWSConfigRuleKMSLeastPrivilege │ │ ├── AWSConfigRuleKMSLeastPrivilege.py │ │ ├── AWSConfigRuleKMSLeastPrivilege_test.py │ │ ├── AWSConfigRuleKMSStatementProcessor.py │ │ ├── parameters.json │ │ └── rules.txt │ └── README.md └── ts-cdk │ └── src │ └── stackset_creator │ ├── .npmignore │ ├── README.md │ ├── bin │ └── cdk.ts │ ├── cdk.json │ ├── jest.config.js │ ├── lib │ ├── cdk_master_stack_creator.ts │ ├── cdk_stackset_creator.ts │ ├── cfModels.ts │ ├── ddbClient.ts │ └── ddbDocClient.ts │ ├── package-lock.json │ ├── package.json │ ├── test │ └── cdk.test.ts │ └── tsconfig.json └── config ├── Accounts.json ├── DeploymentGroups.json ├── GlobalParams.json ├── OrgUnits.json └── Stacks.json /.gitignore: -------------------------------------------------------------------------------- 1 | # build folders 2 | .build 3 | 4 | # Typescript 5 | !jest.config.js 6 | *.d.ts 7 | node_modules 8 | 9 | # CDK asset staging directory 10 | .cdk.staging 11 | cdk.out 12 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | All notable changes to this project will be documented in this file. 4 | 5 | ## [4.0.0] - 2022-07-27 6 | 7 | ### Added 8 | - Solution now uses StackSet 'Delegated Admin' to manage StackSets from the Deployment Account (on behalf of the management account). This improves security (reduced need to log in to the management account) and make management of StackSets more convenient as you no longer need to keep moving between so many accounts (deployment>management>target acounts). 9 | - Now supports `Account level targets for service-managed Stack Sets` [https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/account-level-targets.html]. You can now target Individual AWS Accounts and Org Units with various combinations using `AccountFilterType`. 10 | - Can now disable specific envs of a stack from deploying by setting optional variable ` env[].enabled:"false" ` in Stacks.json 11 | 12 | ### Changed 13 | - Upgraded CodeBuild NodeJS version 14 (from v12) 14 | - Upgraded AWS CDK to version 2 (from v1) 15 | 16 | ## [3.0.0] - 2021-10-06 17 | 18 | ### Added 19 | 20 | - AWS Config Rules Development Kit (RDK) now integrated in DevSecOps framework - deploy custom rules to the organisation 21 | - Added 'Default Region' to OrgUnits.json to provision resources that are 'install-once-per-account' 22 | - Added new deployment override in Stacks.json 'overrideDeploymentRegionUseDefaultOnly' - allows deploy to default region only 23 | - Additional setup role required for (RDK) 24 | 25 | ### Changed 26 | 27 | - Renamed root folder '/lambda' to '/code' with inclusion of RDK and CDK frameworks (not strictly Lambda) 28 | - CodePipeline Workflow has changed and will need to be re-deployed (re-run `/cf/setup/automated_deployment.sh`) 29 | 30 | ## [2.0.0] - 2021-08-10 31 | 32 | ### Added 33 | 34 | - Solution now supports Targeted Multi-region, Multi-OrgUnit environments 35 | - Amazon CDK/Typescript now used to generate Master/Nested Stacks and Automated StackSets 36 | 37 | ### Changed 38 | 39 | - **NOTE: This version introduces Breaking Changes from v1.1.0**- There is no easy migration path for getting stacks deployed with v1.1.0 to be retained. They must be deleted and reprovisioned through the new method introduced in v2.0.0 40 | - You no longer need a manual MasterStack or create StackSets - the tool now generated these automatically base on settings applied to config json files. 41 | - Parameter files are replaced with config (/config) files which specify what gets deployed and where. 42 | - A lot less effort now to hook up new templates as no longer need to pass reference parameters manually from master to nested+ templates. 43 | - Manual Deployment steps moved from [README.md] to separate file (/cf/setup/Manual_Deployment.md) for improved readability 44 | 45 | ## [1.1.0] - 2021-07-20 46 | 47 | ### Added 48 | 49 | - Added trust roles for SELF-MANAGED deploy of StackSets to support targeted deployment to individual accounts (instead of just OrgUnit targets). These roles can be used by a Lambda to assume role the 'AWSCloudFormationStackSetAdministrationRole' account, to kick off a manual StackSet instance 50 | - Changelog file 51 | 52 | ### Changed 53 | 54 | - Updated automated deployment script 55 | - Updated target deploy roles for working with Service Catalog 56 | - Fixed typo in 'BuildGuid' variable 57 | 58 | ## [1.0.0] - 2021-05-27 59 | 60 | ### Added 61 | 62 | - Initial release 63 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /assets/images/approved_icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/comunet/aws-devsecops-cicd/cb881cc66640674f44dbc65db2e428acffecedc6/assets/images/approved_icon.png -------------------------------------------------------------------------------- /assets/images/approved_icon_trans.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/comunet/aws-devsecops-cicd/cb881cc66640674f44dbc65db2e428acffecedc6/assets/images/approved_icon_trans.png -------------------------------------------------------------------------------- /assets/images/codepipeline.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/comunet/aws-devsecops-cicd/cb881cc66640674f44dbc65db2e428acffecedc6/assets/images/codepipeline.jpg -------------------------------------------------------------------------------- /assets/images/codepipeline.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/comunet/aws-devsecops-cicd/cb881cc66640674f44dbc65db2e428acffecedc6/assets/images/codepipeline.png -------------------------------------------------------------------------------- /assets/images/denied_icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/comunet/aws-devsecops-cicd/cb881cc66640674f44dbc65db2e428acffecedc6/assets/images/denied_icon.png -------------------------------------------------------------------------------- /assets/images/denied_icon_trans.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/comunet/aws-devsecops-cicd/cb881cc66640674f44dbc65db2e428acffecedc6/assets/images/denied_icon_trans.png -------------------------------------------------------------------------------- /assets/images/error_icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/comunet/aws-devsecops-cicd/cb881cc66640674f44dbc65db2e428acffecedc6/assets/images/error_icon.png -------------------------------------------------------------------------------- /assets/images/error_icon_trans.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/comunet/aws-devsecops-cicd/cb881cc66640674f44dbc65db2e428acffecedc6/assets/images/error_icon_trans.png -------------------------------------------------------------------------------- /assets/images/question_icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/comunet/aws-devsecops-cicd/cb881cc66640674f44dbc65db2e428acffecedc6/assets/images/question_icon.png -------------------------------------------------------------------------------- /assets/images/question_icon_trans.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/comunet/aws-devsecops-cicd/cb881cc66640674f44dbc65db2e428acffecedc6/assets/images/question_icon_trans.png -------------------------------------------------------------------------------- /assets/images/success_icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/comunet/aws-devsecops-cicd/cb881cc66640674f44dbc65db2e428acffecedc6/assets/images/success_icon.png -------------------------------------------------------------------------------- /assets/images/success_icon_trans.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/comunet/aws-devsecops-cicd/cb881cc66640674f44dbc65db2e428acffecedc6/assets/images/success_icon_trans.png -------------------------------------------------------------------------------- /assets/images/unknown_icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/comunet/aws-devsecops-cicd/cb881cc66640674f44dbc65db2e428acffecedc6/assets/images/unknown_icon.png -------------------------------------------------------------------------------- /assets/images/unknown_icon_trans.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/comunet/aws-devsecops-cicd/cb881cc66640674f44dbc65db2e428acffecedc6/assets/images/unknown_icon_trans.png -------------------------------------------------------------------------------- /cf/cicd/build_dynamo.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | #set stop on error 4 | set -e 5 | 6 | usage() { 7 | echo "options:" 8 | echo "p The AWS Credentials Profile (used for running script locally)" 9 | echo "r The Resource Prefix for resource names" 10 | echo "y The AWS Region for deployed management resources" 11 | echo "Usage: $0 [ -p awsProfile ] [ -r projectResourcePrefix ] " 1>&2 12 | } 13 | exit_abnormal() { # Function: Exit with error. 14 | usage 15 | exit 1 16 | } 17 | # check whether user had supplied -h or --help . If yes display usage 18 | if [[ ( $# == "--help") || $# == "-h" ]] 19 | then 20 | usage 21 | exit 0 22 | fi 23 | #get params 24 | while getopts :p:r:y: flag 25 | do 26 | case "${flag}" in 27 | p) awsProfile=${OPTARG};; 28 | r) projectResourcePrefix=${OPTARG};; 29 | y) awsRegion=${OPTARG};; 30 | esac 31 | done 32 | 33 | if [ "$projectResourcePrefix" == "" || "$awsProfile" == "" || "$awsRegion" == "" ]; then 34 | echo "Error: (opt -p, -r and -y) must have a value" 35 | exit_abnormal 36 | exit 1 37 | fi 38 | 39 | pwd 40 | origPath=$(pwd) 41 | 42 | # declare DynamoDB Config Data Folder 43 | declare -a dynamoDBConfigDataDirs=("./config") 44 | pwd 45 | 46 | echo "Update DynamoDB DevSecOps configuration data" 47 | #https://github.com/lmammino/json-dynamo-putrequest 48 | 49 | #Make build folder - only if doesnt already exist (-p) 50 | mkdir -p .build 51 | echo " Step \#1 - Finding and converting files in path ${dynamoDBConfigDataDirs[@]} to dynamo request format" 52 | 53 | for f in ${dynamoDBConfigDataDirs[@]}/*; do 54 | echo " - processing file $f" 55 | fileName=$(basename -s .json $f) 56 | dynamoDbTableName="${projectResourcePrefix}-DynDBTable-${fileName}" 57 | json-dynamo-putrequest $dynamoDbTableName < $f > .build/${fileName}_dynamo.json 58 | done 59 | 60 | echo " Step \#2 - Writing requests to dynamodb" 61 | for outputFile in .build/*_dynamo.json; do 62 | echo " - uploading file $outputFile" 63 | #Check if awsProfile has a non-null/non-zero value 64 | if [ -n "${awsProfile}" ]; then 65 | echo " - running using local profile '${awsProfile}'" 66 | aws dynamodb batch-write-item --region ${awsRegion} --profile ${awsProfile} --request-items file://$outputFile 67 | else 68 | echo " - running using default profile" 69 | aws dynamodb batch-write-item --region ${awsRegion} --request-items file://$outputFile 70 | fi 71 | done 72 | 73 | cd $origPath 74 | pwd 75 | -------------------------------------------------------------------------------- /cf/cicd/build_lambdas.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # these are the directories we want to check for lambda functions 4 | declare -a lambdirs=("./code/nodejs/src") 5 | pwd 6 | origPath=$(pwd) 7 | for p in "${lambdirs[@]}" 8 | do 9 | ( 10 | for d in "$p"/* 11 | do 12 | ( 13 | cd $d 14 | pwd 15 | if [ -f "index.js" ] && [ -f "package.json" ]; then 16 | # only process lambda function if the requisite files exist 17 | 18 | # delete node_modules folder to sure that the npm install definitely gets all the required files 19 | rm -rf node_modules 20 | 21 | npm install 22 | 23 | ## build the lambda package zip 24 | node-lambda package -A ./.build -D . 25 | fi 26 | ) 27 | done 28 | ) 29 | done 30 | cd $origPath 31 | pwd 32 | -------------------------------------------------------------------------------- /cf/cicd/build_rdk.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | #set stop on error 4 | set -e 5 | 6 | echo "> Executing RDK Script" 7 | usage() { 8 | echo "options:" 9 | echo "l The Lambda Role Arn to be used by RDK" 10 | echo "s The S3 Output Bucket for the generated RDK files" 11 | echo "e The Environment Type for the runnning execution" 12 | echo "Usage: $0 [ -l LambdaRoleArn ] [ -s s3OutputBucket ] [ -e environmentType ] " 1>&2 13 | } 14 | exit_abnormal() { # Function: Exit with error. 15 | usage 16 | exit 1 17 | } 18 | # check whether user had supplied -h or --help . If yes display usage 19 | if [[ ( $# == "--help") || $# == "-h" ]] 20 | then 21 | usage 22 | exit 0 23 | fi 24 | #get params 25 | while getopts :l:s:e: flag 26 | do 27 | case "${flag}" in 28 | l) LambdaRoleArn=${OPTARG};; 29 | s) s3OutputBucket=${OPTARG};; 30 | e) environmentType=${OPTARG};; 31 | esac 32 | done 33 | 34 | if [[ "$s3OutputBucket" == "" || "$LambdaRoleArn" == "" || "$environmentType" == "" ]]; then 35 | echo "Error: (opt -s, -l and -e) must have a value" 36 | exit_abnormal 37 | exit 1 38 | fi 39 | 40 | pwd 41 | origPath=$(pwd) 42 | 43 | #Make build folder - only if doesnt already exist (-p) 44 | mkdir -p .build 45 | mkdir -p ./.build/rdk/ 46 | 47 | cd code/python-rdk 48 | 49 | # For each folder in RDK, find parameter.json, replace ENV with environmentType 50 | for l_paramFile in $(find . -type d \( -name _template \) -prune -false -o -name 'parameters.json'); do 51 | echo "Updating $l_paramFile with environment type" 52 | sed -i "s/{ENV}/${environmentType}/g" $l_paramFile 53 | cat $l_paramFile 54 | done 55 | 56 | echo " - Deploying RDK Rules" 57 | rdk deploy -s "all-rules-${environmentType}" --stack-name "awsconfig-allrules-lambda-${environmentType}" -f --lambda-role-arn $LambdaRoleArn --custom-code-bucket $s3OutputBucket --lambda-timeout 600 58 | 59 | echo " - Creating Rule Template" 60 | rdk create-rule-template -s "all-rules-${environmentType}" --rules-only -o "../../.build/rdk/awsconfig-allrules-${environmentType}.template.json" 61 | 62 | cd $origPath 63 | pwd 64 | echo "> End RDK Script" -------------------------------------------------------------------------------- /cf/cicd/build_ts_aws_cdk.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | #set stop on error 4 | set -e 5 | 6 | usage() { 7 | echo "options:" 8 | echo "a The AWS Account Id of CodeBuild (required for RDK)" 9 | echo "b The unique BuildGuid" 10 | echo "p The AWS Credentials Profile (used for running script locally)" 11 | echo "r The Resource Prefix for resource names" 12 | echo "Usage: $0 [ -b buildGuid ] [ -p awsProfile ] [ -r projectResourcePrefix ] " 1>&2 13 | } 14 | exit_abnormal() { # Function: Exit with error. 15 | usage 16 | exit 1 17 | } 18 | # check whether user had supplied -h or --help . If yes display usage 19 | if [[ ( $# == "--help") || $# == "-h" ]] 20 | then 21 | usage 22 | exit 0 23 | fi 24 | #get params 25 | while getopts :a:b:p:r: flag 26 | do 27 | case "${flag}" in 28 | a) awsAccountId=${OPTARG};; 29 | b) buildGuid=${OPTARG};; 30 | p) awsProfile=${OPTARG};; 31 | r) projectResourcePrefix=${OPTARG};; 32 | esac 33 | done 34 | 35 | if [[ "$buildGuid" == "" || "$projectResourcePrefix" == "" ]]; then 36 | echo "Error: (opt -a, -b, -p and -r) must have a value" 37 | exit_abnormal 38 | exit 1 39 | fi 40 | 41 | pwd 42 | origPath=$(pwd) 43 | # move to CDK location 44 | declare -a tscdkdirs=("./code/ts-cdk/src") 45 | pwd 46 | for p in "${tscdkdirs[@]}" 47 | do 48 | ( 49 | for d in "$p"/* 50 | do 51 | ( 52 | cd $d 53 | pwd 54 | if [ -f "cdk.json" ] && [ -f "package.json" ]; then 55 | # only process CDK function if the requisite files exist 56 | cdkProjectName=${PWD##*/} # to assign to a variable 57 | npm install 58 | npm run build; 59 | 60 | #Check if awsProfile has a non-null/non-zero value 61 | if [ -n "${awsProfile}" ]; then 62 | echo "Running using local profile '${awsProfile}'" 63 | cdk synth --context buildGuid=${buildGuid} --context awsprofile=${awsProfile} --context awsAccountId=${awsAccountId} --context projectResourcePrefix=${projectResourcePrefix} -o=${origPath}/.build/cdk 64 | else 65 | echo "Running using default profile" 66 | cdk synth --context buildGuid=${buildGuid} --context awsAccountId=${awsAccountId} --context projectResourcePrefix=${projectResourcePrefix} -o=${origPath}/.build/cdk 67 | fi 68 | fi 69 | ) 70 | done 71 | ) 72 | done 73 | cd $origPath 74 | pwd 75 | -------------------------------------------------------------------------------- /cf/cicd/buildspec_prod.yaml: -------------------------------------------------------------------------------- 1 | version: 0.2 2 | #Paths are relative to project root 3 | phases: 4 | install: 5 | runtime-versions: 6 | nodejs: 18 7 | commands: 8 | - orignpmversion=$(npm -v) 9 | - echo "current npm version:" $orignpmversion 10 | - pip install rdk 11 | pre_build: 12 | commands: 13 | # Setting up build for Lambda NodeJS Functions 14 | - cd $CODEBUILD_SRC_DIR/ 15 | - chmod +x ./cf/cicd/*.sh 16 | build: 17 | commands: 18 | # Get unique Build Guid from Initial CodeBuild Step 19 | - buildGuid=`cat "${CODEBUILD_SRC_DIR_DeployableArtifactInitial}/buildguid.txt"` 20 | - echo "buildGuid=${buildGuid}" 21 | - pwd 22 | - cd $CODEBUILD_SRC_DIR/ 23 | - pwd 24 | # Build AWS Config RDK 25 | - ./cf/cicd/build_rdk.sh -s $RDK_RULES_S3_BUCKET -l $RDK_LAMBDA_ROLE -e $ENVIRONMENT_TYPE 26 | # Update Rule Template to Artifact S3 Bucket 27 | - echo "Copy RDK Generated CF Stack Templates to S3" 28 | - | 29 | for cf_template in $(find ./.build/rdk -type d \( -name _template \) -prune -false -o -name '*.template.json'); do 30 | templateName="$(echo $cf_template | rev | cut -d'/' -f 1 | rev)" 31 | echo "$templateName" 32 | aws s3 cp "$cf_template" "s3://$CODEPIPELINE_BUCKET/$buildGuid/$templateName" --sse aws:kms --sse-kms-key-id $KMS_KEY_ARN 33 | done 34 | -------------------------------------------------------------------------------- /cf/cicd/buildspec_test.yaml: -------------------------------------------------------------------------------- 1 | version: 0.2 2 | #Paths are relative to project root 3 | phases: 4 | install: 5 | runtime-versions: 6 | nodejs: 18 7 | commands: 8 | - orignpmversion=$(npm -v) 9 | - echo "current npm version:" $orignpmversion 10 | - npm install -g jsonlint 11 | - npm install -g json-dynamo-putrequest 12 | - npm install -g node-lambda 13 | - npm install -g typescript 14 | - npm install -g aws-cdk 15 | - pip install rdk 16 | pre_build: 17 | commands: 18 | # Setting up build for Lambda NodeJS Functions 19 | - cd $CODEBUILD_SRC_DIR/ 20 | - chmod +x ./cf/cicd/*.sh 21 | # Validating CFN Templates 22 | - echo "Validating Static CF templates" 23 | # This fancy line compiles all yaml in our project folder excluding the '_template' folder 24 | - | 25 | for f in $(find ./cf/project -type d \( -name _template \) -prune -false -o -name '*.yaml' -o -name '*.yml'); do 26 | echo "Validating CloudFormation template file $f" 27 | aws cloudformation validate-template --template-body file://$f 28 | done 29 | build: 30 | commands: 31 | # Create unique Build Guid 32 | - buildGuid=$(date +"%Y%m%d%H%S") 33 | - echo -n "$buildGuid" > buildguid.txt 34 | - pwd 35 | - cd $CODEBUILD_SRC_DIR/ 36 | - pwd 37 | # Build DynamoDB configuration data/update 38 | - ./cf/cicd/build_dynamo.sh -r $PROJECT_PREFIX -y $CURRENT_AWS_REGION 39 | # Package Lambdas as ZIP 40 | - ./cf/cicd/build_lambdas.sh 41 | # Build Typescript AWS CDK 42 | - ./cf/cicd/build_ts_aws_cdk.sh -b $buildGuid -r $PROJECT_PREFIX -a $CURRENT_AWS_ACCOUNT_ID 43 | # Build AWS Config RDK 44 | - ./cf/cicd/build_rdk.sh -s $RDK_RULES_S3_BUCKET -l $RDK_LAMBDA_ROLE -e $ENVIRONMENT_TYPE 45 | # Copy stacks to S3 46 | - echo "Copy Static CF Stack Templates to S3" 47 | - | 48 | for cf_template in $(find ./cf/project -type d \( -name _template \) -prune -false -o -name '*.yaml' -o -name '*.yml'); do 49 | templateName="$(echo $cf_template | rev | cut -d'/' -f 1 | rev)" 50 | echo "$templateName" 51 | aws cloudformation package --template-file $cf_template --output-template-file $templateName --s3-bucket $CODEPIPELINE_BUCKET --kms-key-id $KMS_KEY_ARN 52 | aws s3 cp "$templateName" "s3://$CODEPIPELINE_BUCKET/$buildGuid/$templateName" --sse aws:kms --sse-kms-key-id $KMS_KEY_ARN 53 | done 54 | - echo "Copy CDK Generated CF Stack Templates to S3" 55 | - | 56 | for cf_template in $(find ./.build/cdk -type d \( -name _template \) -prune -false -o -name '*.template.json'); do 57 | templateName="$(echo $cf_template | rev | cut -d'/' -f 1 | rev)" 58 | echo "$templateName" 59 | aws cloudformation package --template-file $cf_template --output-template-file $templateName --s3-bucket $CODEPIPELINE_BUCKET --kms-key-id $KMS_KEY_ARN 60 | aws s3 cp "$templateName" "s3://$CODEPIPELINE_BUCKET/$buildGuid/$templateName" --sse aws:kms --sse-kms-key-id $KMS_KEY_ARN 61 | done 62 | # Update Rule Template to Artifact S3 Bucket 63 | - echo "Copy RDK Generated CF Stack Templates to S3" 64 | - | 65 | for cf_template in $(find ./.build/rdk -type d \( -name _template \) -prune -false -o -name '*.template.json'); do 66 | templateName="$(echo $cf_template | rev | cut -d'/' -f 1 | rev)" 67 | echo "$templateName" 68 | aws s3 cp "$cf_template" "s3://$CODEPIPELINE_BUCKET/$buildGuid/$templateName" --sse aws:kms --sse-kms-key-id $KMS_KEY_ARN 69 | done 70 | artifacts: 71 | files: 72 | - ./MasterStack-*.template.json 73 | - ./buildguid.txt 74 | -------------------------------------------------------------------------------- /cf/project/_template/XStack.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: "2010-09-09" 2 | Description: "DevSecOps Resources for the DevSecOps Project" 3 | 4 | Parameters: 5 | ProjectFriendlyName: 6 | Type: String 7 | Description: Enter an friendly project name for Tagging 8 | ProjectResourcePrefix: 9 | Type: String 10 | Description: Enter a unique prefix for the project resources. 11 | EnvironmentType: 12 | Type: String 13 | AllowedValues: 14 | - prod 15 | - test 16 | Description: Enter destination environment type 17 | EnvironmentFriendlyName: 18 | Type: String 19 | Description: Friendly environment name of project 20 | BuildGuid: 21 | Type: String 22 | Description: Unique Id used in build to force CloudFormation StackSet to update 23 | #Project Specific Parameters >> 24 | 25 | #<< Project Specific Parameters 26 | 27 | Resources: 28 | 29 | #Add DevSecOps Resource Here.. 30 | 31 | # TestS3Deploy: 32 | # Type: AWS::S3::Bucket 33 | # Properties: 34 | # BucketName: !Sub "${ProjectResourcePrefix}-${EnvironmentType}-${AWS::AccountId}" 35 | # PublicAccessBlockConfiguration: 36 | # BlockPublicAcls: True 37 | # BlockPublicPolicy: True 38 | # IgnorePublicAcls: True 39 | # RestrictPublicBuckets: True 40 | # Tags: 41 | # - 42 | # Key: "Project" 43 | # Value: 44 | # !Sub "${ProjectFriendlyName}" 45 | # - 46 | # Key: "Environment" 47 | # Value: !Ref EnvironmentFriendlyName 48 | # - 49 | # Key: "Purpose" 50 | # Value: "Pipeline Orchestration" -------------------------------------------------------------------------------- /cf/project/iamselfmanagedstacksettrustroles/IAMSelfManagedStackSetTrustRolesStack.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: "2010-09-09" 2 | Description: "Creates Child Account roles in Account to allow AWS Organisations Self-managed StackSet Deployment" 3 | 4 | Parameters: 5 | ProjectFriendlyName: 6 | Type: String 7 | Description: Enter an friendly project name for Tagging 8 | ProjectResourcePrefix: 9 | Type: String 10 | Description: Enter a unique prefix for the project resources. 11 | EnvironmentType: 12 | Type: String 13 | AllowedValues: 14 | - prod 15 | - test 16 | Description: Enter destination environment type 17 | EnvironmentFriendlyName: 18 | Type: String 19 | Description: Friendly environment name of project 20 | BuildGuid: 21 | Type: String 22 | Description: Unique Id used in build to force CloudFormation StackSet to update 23 | #Project Specific Parameters >> 24 | DeploymentAccountAWSCloudFormationStackSetAdministrationRoleArn: 25 | Type: String 26 | Description: Arn of the Deployment Account AWSCloudFormationStackSetAdministrationRole 27 | DeploymentAccountLambdaVPCStackSetInstanceExecutionRoleArn: 28 | Type: String 29 | Description: Arn of the Deployment Account AWSLambdaVPCInstanceExecutionRole 30 | #<< Project Specific Parameters 31 | 32 | Resources: 33 | # IAM Role for CloudFormation StackSet Execution in a Child Account account 34 | AWSCloudFormationStackSetExecutionRole: 35 | Type: AWS::IAM::Role 36 | Metadata: 37 | cfn_nag: 38 | rules_to_suppress: 39 | - id: W28 40 | reason: "The role name is defined to allow cross account access from the master account." 41 | - id: W43 42 | reason: "The admin permissions needed for StackSet service to deploy unknown customer defined resources." 43 | Properties: 44 | RoleName: AWSCloudFormationStackSetExecutionRole 45 | AssumeRolePolicyDocument: 46 | Version: '2012-10-17' 47 | Statement: 48 | - Effect: Allow 49 | Principal: 50 | AWS: 51 | - !Sub "${DeploymentAccountAWSCloudFormationStackSetAdministrationRoleArn}" 52 | - !Sub "${DeploymentAccountLambdaVPCStackSetInstanceExecutionRoleArn}" 53 | Action: 54 | - sts:AssumeRole 55 | ManagedPolicyArns: 56 | - arn:aws:iam::aws:policy/AdministratorAccess 57 | -------------------------------------------------------------------------------- /cf/project/rdkconfigrules/RDKConfigCrossAccountRole.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Description: AWS CloudFormation template to create custom AWS Config role. 3 | 4 | Parameters: 5 | ProjectFriendlyName: 6 | Type: String 7 | Description: Enter an friendly project name for Tagging 8 | ProjectResourcePrefix: 9 | Type: String 10 | Description: Enter a unique prefix for the project resources. 11 | EnvironmentType: 12 | Type: String 13 | AllowedValues: 14 | - prod 15 | - test 16 | Description: Enter destination environment type 17 | EnvironmentFriendlyName: 18 | Type: String 19 | Description: Friendly environment name of project 20 | BuildGuid: 21 | Type: String 22 | Description: Unique Id used in build to force CloudFormation StackSet to update 23 | #Project Specific Parameters >> 24 | AWSDeploymentAccountNumber: 25 | Type: String 26 | Description: AWS Account Id of the Deployment Account for where RDK rules will be run from 27 | #<< Project Specific Parameters 28 | 29 | Resources: 30 | CrossAccountRDKConfigRole: 31 | Type: AWS::IAM::Role 32 | Properties: 33 | #This role name is used in the RDK Config master stack and so if changed here should be changed there 34 | RoleName: rdk-cross-account-config-role 35 | Path: "/" 36 | ManagedPolicyArns: 37 | - !Sub "arn:${AWS::Partition}:iam::aws:policy/service-role/AWSConfigRole" 38 | - !Sub "arn:${AWS::Partition}:iam::aws:policy/ReadOnlyAccess" 39 | AssumeRolePolicyDocument: 40 | Version: '2012-10-17' 41 | Statement: 42 | - Sid: LOCAL 43 | Effect: Allow 44 | Principal: 45 | Service: 46 | - config.amazonaws.com 47 | Action: sts:AssumeRole 48 | - Sid: REMOTE 49 | Effect: Allow 50 | Principal: 51 | AWS: !Sub "arn:aws:iam::${AWSDeploymentAccountNumber}:role/RDK-Config-Lambda-Role" 52 | Action: sts:AssumeRole 53 | -------------------------------------------------------------------------------- /cf/project/s3example/S3Stack.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: "2010-09-09" 2 | Description: "Test deployment of an S3 Bucket" 3 | 4 | Parameters: 5 | ProjectFriendlyName: 6 | Type: String 7 | Description: Enter an friendly project name for Tagging 8 | ProjectResourcePrefix: 9 | Type: String 10 | Description: Enter a unique prefix for the project resources. 11 | EnvironmentType: 12 | Type: String 13 | AllowedValues: 14 | - prod 15 | - test 16 | Description: Enter destination environment type 17 | EnvironmentFriendlyName: 18 | Type: String 19 | Description: Friendly environment name of project 20 | BuildGuid: 21 | Type: String 22 | Description: Unique Id used in build to force CloudFormation StackSet to update 23 | #Project Specific Parameters >> 24 | 25 | #<< Project Specific Parameters 26 | 27 | Resources: 28 | 29 | TestS3Deploy: 30 | Type: AWS::S3::Bucket 31 | # DeletionPolicy: Retain 32 | Properties: 33 | BucketName: !Sub "${ProjectResourcePrefix}-${EnvironmentType}-${AWS::AccountId}" 34 | PublicAccessBlockConfiguration: 35 | BlockPublicAcls: True 36 | BlockPublicPolicy: True 37 | IgnorePublicAcls: True 38 | RestrictPublicBuckets: True 39 | Tags: 40 | - 41 | Key: "Project" 42 | Value: 43 | !Sub "${ProjectFriendlyName}" 44 | - 45 | Key: "Environment" 46 | Value: !Ref EnvironmentFriendlyName 47 | - 48 | Key: "Purpose" 49 | Value: "Pipeline Orchestration" 50 | -------------------------------------------------------------------------------- /cf/setup/01_create_codecommit_repo.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Description: Setup Stack (1) Create CodeCommit Repo in Sandboxed Account 3 | 4 | Parameters: 5 | #Application Generic Parameters 6 | ProjectFriendlyName: 7 | Type: String 8 | Default: PROJECT-FRIENDLY-NAME 9 | Description: Enter an friendly project name for Tagging 10 | 11 | ProjectResourcePrefix: 12 | Type: String 13 | Default: PROJECT-RESOURCE-PREFIX 14 | Description: Enter a unique prefix for the project resources. 15 | 16 | Resources: 17 | ProjectCodeCommitRepo: 18 | Type: AWS::CodeCommit::Repository 19 | Properties: 20 | RepositoryDescription: !Ref ProjectFriendlyName 21 | RepositoryName: !Sub "${ProjectResourcePrefix}-repo" 22 | Tags: 23 | - Key: Name 24 | Value: CodeCommit Repo 25 | - Key: Project 26 | Value: 27 | Ref: ProjectFriendlyName 28 | - Key: Purpose 29 | Value: Code Repository 30 | Outputs: 31 | ProjectCodeCommitRepoArn: 32 | Description: Project CodeCommit Repo Arn 33 | Value: !GetAtt ProjectCodeCommitRepo.Arn 34 | Export: 35 | Name: !Sub "${ProjectResourcePrefix}-repo-arn" -------------------------------------------------------------------------------- /cf/setup/02_deployment_artifacts_bucket.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: "2010-09-09" 2 | Description: "Setup Stack (1) CodePipeline Cross Account Deployment Stack" 3 | 4 | Parameters: 5 | ProjectFriendlyName: 6 | Type: String 7 | Default: PROJECT-FRIENDLY-NAME 8 | Description: Enter an friendly project name for Tagging 9 | 10 | ProjectResourcePrefix: 11 | Type: String 12 | Default: PROJECT-RESOURCE-PREFIX 13 | Description: Enter a unique prefix for the project resources. 14 | 15 | Resources: 16 | 17 | LambdaArtifactBucket: 18 | Type: AWS::S3::Bucket 19 | # DeletionPolicy: Retain 20 | Properties: 21 | BucketName: !Sub "${ProjectResourcePrefix}-artifacts-lambda" 22 | PublicAccessBlockConfiguration: 23 | BlockPublicAcls: True 24 | BlockPublicPolicy: True 25 | IgnorePublicAcls: True 26 | RestrictPublicBuckets: True 27 | Tags: 28 | - 29 | Key: "Project" 30 | Value: 31 | !Sub "${ProjectFriendlyName}" 32 | - 33 | Key: "Purpose" 34 | Value: "Lambda Build Artifacts" 35 | 36 | PipelineArtifactBucket: 37 | Type: AWS::S3::Bucket 38 | # DeletionPolicy: Retain 39 | Properties: 40 | BucketName: !Sub "${ProjectResourcePrefix}-artifacts-codebuild" 41 | PublicAccessBlockConfiguration: 42 | BlockPublicAcls: True 43 | BlockPublicPolicy: True 44 | IgnorePublicAcls: True 45 | RestrictPublicBuckets: False 46 | Tags: 47 | - 48 | Key: "Project" 49 | Value: 50 | !Sub "${ProjectFriendlyName}" 51 | - 52 | Key: "Purpose" 53 | Value: "Pipeline Orchestration" 54 | 55 | KMSKey: 56 | Type: AWS::KMS::Key 57 | Properties: 58 | Description: Used by Assumed Roles in Dev/Test/Prod accounts to Encrypt/Decrypt artifact code 59 | EnableKeyRotation: true 60 | KeyPolicy: 61 | Version: "2012-10-17" 62 | Id: !Ref AWS::StackName 63 | Statement: 64 | - 65 | Sid: Allows admin of the key 66 | Effect: Allow 67 | Principal: 68 | AWS: !Sub arn:aws:iam::${AWS::AccountId}:root 69 | Action: 70 | - kms:* 71 | Resource: "*" 72 | 73 | KMSAlias: 74 | Type: AWS::KMS::Alias 75 | Properties: 76 | AliasName: !Sub "alias/kms-${ProjectResourcePrefix}" 77 | TargetKeyId: !Ref KMSKey 78 | 79 | PipelineArtifactBucketPolicy: 80 | Type: AWS::S3::BucketPolicy 81 | Properties: 82 | Bucket: 83 | Ref: PipelineArtifactBucket 84 | PolicyDocument: 85 | Statement: 86 | - 87 | Sid: "DenyUnEncryptedObjectUploads" 88 | Effect: Deny 89 | Action: 90 | - 's3:PutObject' 91 | Principal: "*" 92 | Resource: !Sub 'arn:aws:s3:::${ProjectResourcePrefix}-artifacts-codebuild/*' 93 | Condition: 94 | StringNotEquals: 95 | s3:x-amz-server-side-encryption: 96 | - "aws:kms" 97 | - 98 | Sid: "DenyInsecureConnections" 99 | Effect: Deny 100 | Action: 101 | - 's3:*' 102 | Principal: "*" 103 | Resource: !Sub 'arn:aws:s3:::${ProjectResourcePrefix}-artifacts-codebuild/*' 104 | Condition: 105 | Bool: 106 | aws:SecureTransport: 107 | - "false" 108 | - 109 | Sid: "PipelineToArtifactsBucketPutGet" 110 | Effect: Allow 111 | Action: 112 | - 's3:Get*' 113 | - 's3:Put*' 114 | Principal: 115 | AWS: 116 | - !Sub "arn:aws:iam::${AWS::AccountId}:root" 117 | Resource: !Sub 'arn:aws:s3:::${ProjectResourcePrefix}-artifacts-codebuild/*' 118 | - 119 | Sid: "PipelineToArtifactsBucketList" 120 | Effect: Allow 121 | Action: 122 | - 's3:ListBucket' 123 | Principal: 124 | AWS: 125 | - !Sub "arn:aws:iam::${AWS::AccountId}:root" 126 | Resource: !Sub 'arn:aws:s3:::${ProjectResourcePrefix}-artifacts-codebuild' 127 | 128 | Outputs: 129 | ProjectResourcePrefix: 130 | Description: "The unique prefix given to project/application" 131 | Value: !Ref ProjectResourcePrefix 132 | 133 | ProjectFriendlyName: 134 | Description: "The user-friendly name given to project/application" 135 | Value: !Ref ProjectFriendlyName 136 | 137 | StackName: 138 | Description: "Name of this pipeline stack" 139 | Value: !Ref AWS::StackName 140 | 141 | CodePipelineKMSKeyArn: 142 | Description: "The KMS Arn used for Cross-account deployment of this pipeline" 143 | Value: !GetAtt KMSKey.Arn 144 | Export: 145 | Name: !Sub "${ProjectResourcePrefix}-cp-kms-key" 146 | -------------------------------------------------------------------------------- /cf/setup/03_iam_role_codepipeline.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Description: Setup Stack (3) Creates Sandboxed IAM Role for CodePipeline to access CodeCommit 3 | 4 | Parameters: 5 | ProjectFriendlyName: 6 | Type: String 7 | Default: PROJECT-FRIENDLY-NAME 8 | Description: Enter an friendly project name for Tagging 9 | 10 | ProjectResourcePrefix: 11 | Type: String 12 | Default: PROJECT-RESOURCE-PREFIX 13 | Description: Enter a unique prefix for the project resources. 14 | 15 | KMSKeyArn: 16 | Description: ARN of the unique Project KMS key created in the DEPLOYMENT Account 17 | Type: String 18 | Default: 'arn:aws:kms:us-east-1:011111122222:key/XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX' 19 | 20 | Resources: 21 | 22 | #Role that allows our DEPLOYMENT account to access this Repo 23 | CPCodeCommitRole: 24 | Type: AWS::IAM::Role 25 | Properties: 26 | RoleName: 27 | !Sub "cp-cc-role-${ProjectResourcePrefix}" 28 | AssumeRolePolicyDocument: 29 | Version: 2012-10-17 30 | Statement: 31 | - 32 | Effect: Allow 33 | Principal: 34 | AWS: 35 | - !Sub "${AWS::AccountId}" 36 | Action: 37 | - sts:AssumeRole 38 | Path: / 39 | # ManagedPolicyArns: 40 | # - 'arn:aws:iam::aws:policy/AWSCodeCommitPowerUser' 41 | CPCrossAccountRolePolicy: 42 | Type: AWS::IAM::Policy 43 | Properties: 44 | PolicyName: CodePipelineCrossAccountRolePolicy 45 | PolicyDocument: 46 | Version: 2012-10-17 47 | Statement: 48 | - Sid: CodeCommitAccess 49 | Effect: Allow 50 | Action: 51 | - codecommit:BatchGet* 52 | - codecommit:Create* 53 | - codecommit:DeleteBranch 54 | - codecommit:Get* 55 | - codecommit:List* 56 | - codecommit:Describe* 57 | - codecommit:Put* 58 | - codecommit:Post* 59 | - codecommit:Merge* 60 | - codecommit:Test* 61 | - codecommit:Update* 62 | - codecommit:GitPull 63 | - codecommit:GitPush 64 | - codecommit:UploadArchive 65 | Resource: 66 | - Fn::ImportValue: 67 | !Sub "${ProjectResourcePrefix}-repo-arn" 68 | - Sid: CodeCommitListAccess 69 | Effect: Allow 70 | Action: 71 | - codecommit:ListRepositories 72 | Resource: "*" 73 | - Sid: S3Access 74 | Effect: Allow 75 | Action: 76 | - s3:GetObject* 77 | - s3:PutObject 78 | - s3:PutObjectAcl 79 | Resource: 80 | - !Sub "arn:aws:s3:::${ProjectResourcePrefix}-artifacts-codebuild/*" 81 | - 82 | Effect: Allow 83 | Action: 84 | - kms:Encrypt 85 | - kms:Decrypt 86 | - kms:ReEncrypt* 87 | - kms:GenerateDataKey* 88 | - kms:DescribeKey 89 | Resource: !Ref KMSKeyArn # Allow access to the KMS key in our deploy account 90 | Roles: 91 | - 92 | !Ref CPCodeCommitRole 93 | -------------------------------------------------------------------------------- /cf/setup/04_target_deploy_roles.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Description: Setup Stack (4) Creates roles to be assumed by CodePipeline services for cross account deployment 3 | Parameters: 4 | ProjectFriendlyName: 5 | Type: String 6 | Default: PROJECT-FRIENDLY-NAME 7 | Description: Enter an friendly project name for Tagging 8 | 9 | ProjectResourcePrefix: 10 | Type: String 11 | Default: PROJECT-RESOURCE-PREFIX 12 | Description: Enter a unique prefix for the project resources. 13 | 14 | KMSKeyArn: 15 | Description: ARN of the unique Project KMS key created in the DEPLOYMENT Account 16 | Type: String 17 | Default: 'arn:aws:kms:us-east-1:011111122222:key/XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX' 18 | 19 | Resources: 20 | #Role that allows assumes the passrole and decypts build artifacts 21 | CPCrossAccountRole: 22 | Type: AWS::IAM::Role 23 | Properties: 24 | RoleName: 25 | !Sub "cp-ca-role-${ProjectResourcePrefix}" 26 | AssumeRolePolicyDocument: 27 | Version: 2012-10-17 28 | Statement: 29 | - 30 | Effect: Allow 31 | Principal: 32 | AWS: 33 | - !Sub "${AWS::AccountId}" 34 | Action: 35 | - sts:AssumeRole 36 | Path: / 37 | CPCrossAccountRolePolicy: 38 | Type: AWS::IAM::Policy 39 | Properties: 40 | PolicyName: CodePipelineCrossAccountRolePolicy 41 | PolicyDocument: 42 | Version: 2012-10-17 43 | Statement: 44 | - 45 | Effect: Allow 46 | Action: 47 | - cloudformation:* 48 | - s3:* 49 | - iam:PassRole 50 | - organizations:ListPoliciesForTarget 51 | - organizations:ListRoots 52 | - organizations:ListTargetsForPolicy 53 | - organizations:ListTagsForResource 54 | - organizations:ListDelegatedServicesForAccount 55 | - organizations:ListAWSServiceAccessForOrganization 56 | - organizations:ListChildren 57 | - organizations:ListPolicies 58 | - organizations:ListAccountsForParent 59 | - organizations:ListHandshakesForOrganization 60 | - organizations:ListDelegatedAdministrators 61 | - organizations:ListHandshakesForAccount 62 | - organizations:ListAccounts 63 | - organizations:ListCreateAccountStatus 64 | - organizations:ListParents 65 | - organizations:ListOrganizationalUnitsForParent 66 | Resource: "*" 67 | - 68 | Effect: Allow 69 | Action: 70 | - kms:Encrypt 71 | - kms:Decrypt 72 | - kms:ReEncrypt* 73 | - kms:GenerateDataKey* 74 | - kms:DescribeKey 75 | Resource: !Ref KMSKeyArn # Allow access to the KMS key in our deploy account 76 | Roles: 77 | - 78 | !Ref CPCrossAccountRole 79 | 80 | #Role that will run the Cloud Formation stack (from remote deployment). Needs unique permissions as fits project 81 | CodePipelineCloudFormationDeployRole: 82 | Type: AWS::IAM::Role 83 | Properties: 84 | RoleName: 85 | !Sub "cp-cf-role-${ProjectResourcePrefix}" 86 | AssumeRolePolicyDocument: 87 | Version: 2012-10-17 88 | Statement: 89 | - 90 | Effect: Allow 91 | Principal: 92 | Service: 93 | - cloudformation.amazonaws.com 94 | Action: 95 | - sts:AssumeRole 96 | Path: / 97 | CFDeployerPolicy: 98 | Type: AWS::IAM::Policy 99 | Properties: 100 | PolicyName: CFDeployerPolicy 101 | PolicyDocument: 102 | Version: 2012-10-17 103 | Statement: 104 | - 105 | Effect: Allow 106 | Action: 107 | - iam:CreateRole 108 | - iam:CreatePolicy 109 | - iam:GetRole 110 | - iam:DeleteRole 111 | - iam:PassRole 112 | - iam:GetRolePolicy 113 | - iam:PutRolePolicy 114 | - iam:DeleteRolePolicy 115 | - iam:AttachRolePolicy 116 | - iam:DetachRolePolicy # Adjust all this here as required, whatever your stack needs 117 | - iam:GetGroup 118 | - iam:CreateGroup 119 | - iam:DeleteGroup 120 | - iam:GetGroupPolicy 121 | - iam:PutGroupPolicy 122 | - iam:AttachGroupPolicy 123 | - iam:DetachGroupPolicy 124 | - iam:DeleteGroupPolicy 125 | - ec2:* 126 | - lambda:* 127 | - events:* 128 | - s3:* 129 | - apigateway:* 130 | - cloudformation:* 131 | - kms:* 132 | - servicecatalog:* 133 | - config:* 134 | - organizations:ListPoliciesForTarget 135 | - organizations:ListRoots 136 | - organizations:ListTargetsForPolicy 137 | - organizations:ListTagsForResource 138 | - organizations:ListDelegatedServicesForAccount 139 | - organizations:ListAWSServiceAccessForOrganization 140 | - organizations:ListChildren 141 | - organizations:ListPolicies 142 | - organizations:ListAccountsForParent 143 | - organizations:ListHandshakesForOrganization 144 | - organizations:ListDelegatedAdministrators 145 | - organizations:ListHandshakesForAccount 146 | - organizations:ListAccounts 147 | - organizations:ListCreateAccountStatus 148 | - organizations:ListParents 149 | - organizations:ListOrganizationalUnitsForParent 150 | Resource: "*" 151 | - 152 | Effect: Allow 153 | Action: 154 | - s3:PutObject 155 | - s3:GetBucketPolicy 156 | - s3:GetObject 157 | - s3:ListBucket 158 | Resource: 159 | - !Sub "arn:aws:s3:::${ProjectResourcePrefix}-artifacts-codebuild/*" 160 | - !Sub "arn:aws:s3:::${ProjectResourcePrefix}-artifacts-codebuild" 161 | 162 | Roles: 163 | - 164 | !Ref CodePipelineCloudFormationDeployRole 165 | -------------------------------------------------------------------------------- /cf/setup/05_orgs_stackset_selfmanaged_roles.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Description: Setup Stack (6) Creates roles for SELF MANAGED stacksets to be deployed to child accounts for targeted account-based stacks 3 | Parameters: 4 | ProjectFriendlyName: 5 | Type: String 6 | Default: PROJECT-FRIENDLY-NAME 7 | Description: Enter an friendly project name for Tagging 8 | 9 | ProjectResourcePrefix: 10 | Type: String 11 | Default: PROJECT-RESOURCE-PREFIX 12 | Description: Enter a unique prefix for the project resources. 13 | 14 | Resources: 15 | 16 | LambdaVPCStackSetInstanceExecutionRole: 17 | Type: "AWS::IAM::Role" 18 | Properties: 19 | RoleName: LambdaVPCStackSetInstanceExecutionRole 20 | AssumeRolePolicyDocument: 21 | Version: "2012-10-17" 22 | Statement: 23 | - Effect: "Allow" 24 | Principal: 25 | Service: 26 | - lambda.amazonaws.com 27 | Action: "sts:AssumeRole" 28 | ManagedPolicyArns: 29 | - arn:aws:iam::aws:policy/AWSXrayFullAccess 30 | - arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole 31 | - arn:aws:iam::aws:policy/service-role/AWSLambdaRole 32 | - arn:aws:iam::aws:policy/AWSCloudFormationFullAccess 33 | Path: "/" 34 | Policies: 35 | - PolicyName: AssumeRole-AWSCloudFormationStackSetAdministrationRole 36 | PolicyDocument: 37 | Version: '2012-10-17' 38 | Statement: 39 | - Effect: Allow 40 | Action: 41 | - sts:AssumeRole 42 | Resource: 43 | - "arn:aws:iam::*:role/AWSCloudFormationStackSetAdministrationRole" 44 | - PolicyName: AssumeRole-AWSCloudFormationStackSetExecutionRolePolicy 45 | PolicyDocument: 46 | Version: '2012-10-17' 47 | Statement: 48 | - Effect: Allow 49 | Action: 50 | - sts:AssumeRole 51 | Resource: 52 | - "arn:aws:iam::*:role/AWSCloudFormationStackSetExecutionRole" 53 | - PolicyName: Logs 54 | PolicyDocument: 55 | Version: "2012-10-17" 56 | Statement: 57 | - Effect: Allow 58 | Action: 59 | - logs:* 60 | Resource: "*" 61 | 62 | # IAM Role for CloudFormation StackSet Administration in Deployment account 63 | AWSCloudFormationStackSetAdministrationRole: 64 | Metadata: 65 | cfn_nag: 66 | rules_to_suppress: 67 | - id: W11 68 | reason: "Allow * in the ARN of the execution role to allow cross account access to user created child account in the AWS Organizations" 69 | - id: W28 70 | reason: "The role name is defined to identify AWS Landing Zone resources." 71 | Type: AWS::IAM::Role 72 | Properties: 73 | RoleName: AWSCloudFormationStackSetAdministrationRole 74 | AssumeRolePolicyDocument: 75 | Version: '2012-10-17' 76 | Statement: 77 | - Effect: Allow 78 | Principal: 79 | Service: cloudformation.amazonaws.com 80 | Action: 81 | - sts:AssumeRole 82 | - Effect: Allow 83 | Principal: 84 | AWS: 85 | - !GetAtt LambdaVPCStackSetInstanceExecutionRole.Arn 86 | Action: 87 | - sts:AssumeRole 88 | Path: / 89 | Policies: 90 | - PolicyName: AssumeRole-AWSCloudFormationStackSetExecutionRolePolicy 91 | PolicyDocument: 92 | Version: '2012-10-17' 93 | Statement: 94 | - Effect: Allow 95 | Action: 96 | - sts:AssumeRole 97 | Resource: 98 | - "arn:aws:iam::*:role/AWSCloudFormationStackSetExecutionRole" 99 | - PolicyName: EC2 100 | PolicyDocument: 101 | Version: "2012-10-17" 102 | Statement: 103 | - Effect: Allow 104 | Action: 105 | - ec2:DescribeAvailabilityZones 106 | Resource: "*" 107 | 108 | Outputs: 109 | AWSCloudFormationStackSetAdministrationRoleArn: 110 | Description: Arn of the Deployment Account AWS AWSCloudFormationStackSetAdministrationRole 111 | Value: !GetAtt AWSCloudFormationStackSetAdministrationRole.Arn 112 | Export: 113 | Name: !Sub 'AWSCloudFormationStackSetAdministrationRoleArn' 114 | 115 | LambdaVPCStackSetInstanceExecutionRoleArn: 116 | Description: Arn of the Deployment Account LambdaVPCInstanceExecutionRole 117 | Value: !GetAtt LambdaVPCStackSetInstanceExecutionRole.Arn 118 | Export: 119 | Name: !Sub 'LambdaVPCStackSetInstanceExecutionRoleArn' 120 | -------------------------------------------------------------------------------- /cf/setup/06_dynamo_db.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Description: Setup Stack (6) Creates DynamoDB Tables for storing DevSecOps configuration/orchestration data 3 | Parameters: 4 | ProjectFriendlyName: 5 | Type: String 6 | Default: PROJECT-FRIENDLY-NAME 7 | Description: Enter an friendly project name for Tagging 8 | 9 | ProjectResourcePrefix: 10 | Type: String 11 | Default: PROJECT-RESOURCE-PREFIX 12 | Description: Enter a unique prefix for the project resources. 13 | 14 | Resources: 15 | # DynamoDB Tables 16 | DynDbTableDevSecOpsGlobalParams: 17 | Type: "AWS::DynamoDB::Table" 18 | Properties: 19 | AttributeDefinitions: 20 | - 21 | AttributeName: "object" 22 | AttributeType: "S" 23 | KeySchema: 24 | - 25 | AttributeName: "object" 26 | KeyType: "HASH" 27 | TableName: !Sub '${ProjectResourcePrefix}-DynDBTable-GlobalParams' 28 | BillingMode: 'PAY_PER_REQUEST' 29 | Tags: 30 | - 31 | Key: "Project" 32 | Value: 33 | !Sub "${ProjectFriendlyName}" 34 | - 35 | Key: "Purpose" 36 | Value: "DevSecOps Orchestration Configuration" 37 | 38 | DynDbTableDevSecOpsOrgUnits: 39 | Type: "AWS::DynamoDB::Table" 40 | Properties: 41 | AttributeDefinitions: 42 | - 43 | AttributeName: "orgUnitId" 44 | AttributeType: "S" 45 | KeySchema: 46 | - 47 | AttributeName: "orgUnitId" 48 | KeyType: "HASH" 49 | TableName: !Sub '${ProjectResourcePrefix}-DynDBTable-OrgUnits' 50 | BillingMode: 'PAY_PER_REQUEST' 51 | Tags: 52 | - 53 | Key: "Project" 54 | Value: 55 | !Sub "${ProjectFriendlyName}" 56 | - 57 | Key: "Purpose" 58 | Value: "DevSecOps Orchestration Configuration" 59 | 60 | DynDbTableDevSecOpsAccounts: 61 | Type: "AWS::DynamoDB::Table" 62 | Properties: 63 | AttributeDefinitions: 64 | - 65 | AttributeName: "accountId" 66 | AttributeType: "S" 67 | KeySchema: 68 | - 69 | AttributeName: "accountId" 70 | KeyType: "HASH" 71 | TableName: !Sub '${ProjectResourcePrefix}-DynDBTable-Accounts' 72 | BillingMode: 'PAY_PER_REQUEST' 73 | Tags: 74 | - 75 | Key: "Project" 76 | Value: 77 | !Sub "${ProjectFriendlyName}" 78 | - 79 | Key: "Purpose" 80 | Value: "DevSecOps Orchestration Configuration" 81 | 82 | DynDbTableDevSecOpsDeploymentGroups: 83 | Type: "AWS::DynamoDB::Table" 84 | Properties: 85 | AttributeDefinitions: 86 | - 87 | AttributeName: "groupCode" 88 | AttributeType: "S" 89 | KeySchema: 90 | - 91 | AttributeName: "groupCode" 92 | KeyType: "HASH" 93 | TableName: !Sub '${ProjectResourcePrefix}-DynDBTable-DeploymentGroups' 94 | BillingMode: 'PAY_PER_REQUEST' 95 | Tags: 96 | - 97 | Key: "Project" 98 | Value: 99 | !Sub "${ProjectFriendlyName}" 100 | - 101 | Key: "Purpose" 102 | Value: "DevSecOps Orchestration Configuration" 103 | 104 | DynDbTableDevSecOpsStacks: 105 | Type: "AWS::DynamoDB::Table" 106 | Properties: 107 | AttributeDefinitions: 108 | - 109 | AttributeName: "name" 110 | AttributeType: "S" 111 | KeySchema: 112 | - 113 | AttributeName: "name" 114 | KeyType: "HASH" 115 | TableName: !Sub '${ProjectResourcePrefix}-DynDBTable-Stacks' 116 | BillingMode: 'PAY_PER_REQUEST' 117 | Tags: 118 | - 119 | Key: "Project" 120 | Value: 121 | !Sub "${ProjectFriendlyName}" 122 | - 123 | Key: "Purpose" 124 | Value: "DevSecOps Orchestration Configuration" 125 | 126 | Outputs: 127 | DynDbTableDevSecOpsGlobalParamsArn: 128 | Description: Arn of the DynamoDB Table for Global Parameters 129 | Value: !GetAtt [DynDbTableDevSecOpsGlobalParams,Arn] 130 | Export: 131 | Name: !Sub '${ProjectResourcePrefix}-DynDBTable-GlobalParams' 132 | 133 | DynDbTableDevSecOpsOrgUnitsArn: 134 | Description: Arn of the DynamoDB Table for tracking Org Units active regions 135 | Value: !GetAtt [DynDbTableDevSecOpsOrgUnits,Arn] 136 | Export: 137 | Name: !Sub '${ProjectResourcePrefix}-DynDBTable-OrgUnits' 138 | 139 | DynDbTableDevSecOpsAccountsArn: 140 | Description: Arn of the DynamoDB Table for tracking Accounts active regions 141 | Value: !GetAtt [DynDbTableDevSecOpsAccounts,Arn] 142 | Export: 143 | Name: !Sub '${ProjectResourcePrefix}-DynDBTable-Accounts' 144 | 145 | DynDbTableDevSecOpsDeploymentGroupsArn: 146 | Description: Arn of the DynamoDB Table for configuring DeploymentGroups 147 | Value: !GetAtt [DynDbTableDevSecOpsDeploymentGroups,Arn] 148 | Export: 149 | Name: !Sub '${ProjectResourcePrefix}-DynDBTable-DeploymentGroups' 150 | 151 | DynDbTableDevSecOpsStacksArn: 152 | Description: Arn of the DynamoDB Table for deployed Stacks 153 | Value: !GetAtt [DynDbTableDevSecOpsStacks,Arn] 154 | Export: 155 | Name: !Sub '${ProjectResourcePrefix}-DynDBTable-Stacks' 156 | -------------------------------------------------------------------------------- /cf/setup/07_setup_RDK_role_and_deploy_bucket.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Description: Template for the lambda role used by the AWS Config Rule Lambda's. 3 | Parameters: 4 | ProjectFriendlyName: 5 | Type: String 6 | Default: PROJECT-FRIENDLY-NAME 7 | Description: Enter an friendly project name for Tagging 8 | 9 | ProjectResourcePrefix: 10 | Type: String 11 | Default: PROJECT-RESOURCE-PREFIX 12 | Description: Enter a unique prefix for the project resources. 13 | 14 | Resources: 15 | RDKLambdaRole: 16 | Type: AWS::IAM::Role 17 | Properties: 18 | RoleName: 19 | RDK-Config-Lambda-Role 20 | AssumeRolePolicyDocument: 21 | Statement: 22 | - Action: 23 | - sts:AssumeRole 24 | Effect: Allow 25 | Principal: 26 | Service: 27 | - lambda.amazonaws.com 28 | Version: 2012-10-17 29 | ManagedPolicyArns: 30 | - arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole 31 | Path: / 32 | Policies: 33 | - 34 | PolicyName: "Assume-STS" 35 | PolicyDocument: 36 | Version: "2012-10-17" 37 | Statement: 38 | - 39 | Effect: "Allow" 40 | Action: "sts:AssumeRole" 41 | Resource: "*" 42 | Tags: 43 | - 44 | Key: "Project" 45 | Value: 46 | !Sub "${ProjectFriendlyName}" 47 | - 48 | Key: "Purpose" 49 | Value: "Role used by the RDK Config Rules to execute in a cross account manner" 50 | 51 | RDKLambdaArtifactBucketTest: 52 | Type: AWS::S3::Bucket 53 | # DeletionPolicy: Retain 54 | Properties: 55 | BucketName: !Sub "${ProjectResourcePrefix}-artifacts-rdk-lambda-test" 56 | PublicAccessBlockConfiguration: 57 | BlockPublicAcls: True 58 | BlockPublicPolicy: True 59 | IgnorePublicAcls: True 60 | RestrictPublicBuckets: True 61 | Tags: 62 | - 63 | Key: "Project" 64 | Value: 65 | !Sub "${ProjectFriendlyName}" 66 | - 67 | Key: "Environment" 68 | Value: "Test" 69 | - 70 | Key: "Purpose" 71 | Value: "RDK Lambda Build Artifacts" 72 | 73 | RDKLambdaArtifactBucketProd: 74 | Type: AWS::S3::Bucket 75 | # DeletionPolicy: Retain 76 | Properties: 77 | BucketName: !Sub "${ProjectResourcePrefix}-artifacts-rdk-lambda-prod" 78 | PublicAccessBlockConfiguration: 79 | BlockPublicAcls: True 80 | BlockPublicPolicy: True 81 | IgnorePublicAcls: True 82 | RestrictPublicBuckets: True 83 | Tags: 84 | - 85 | Key: "Project" 86 | Value: 87 | !Sub "${ProjectFriendlyName}" 88 | - 89 | Key: "Environment" 90 | Value: "Production" 91 | - 92 | Key: "Purpose" 93 | Value: "RDK Lambda Build Artifacts" 94 | 95 | Outputs: 96 | RDKLambdaRoleArn: 97 | Description: "Role used by the RDK Config Rules to execute in a cross account manner" 98 | Value: !GetAtt [RDKLambdaRole, Arn] 99 | Export: 100 | Name: !Sub '${ProjectResourcePrefix}-rdk-lambda-execution-role-arn' 101 | -------------------------------------------------------------------------------- /cf/setup/Manual_Deployment.md: -------------------------------------------------------------------------------- 1 | # AWS DevSecOps CI/CD Framework for AWS Organisations (v2) - Manual Deployment 2 | 3 | For ease of readability, the Manual Deployment steps have been moved into this separate document from the [README.md] file. 4 | 5 | For all other details of using the DevSecOps Framework please refer to the [README.md] file. 6 | 7 | ### 3.3 MANUAL DEPLOYMENT 8 | If you prefer the Manual Deployment, steps are outlined below. 9 | 10 | #### 3.3.1 Set deployment variables 11 | Open a bash terminal and run lines below to setup some bash variables which will be reused across script installations: 12 | ``` 13 | profileDeploymentAccount="PROFILE-ORG-DEPLOYMENTACCOUNT" 14 | projectResourcePrefix="PROJECT-RESOURCE-PREFIX" 15 | projectFriendlyName="PROJECT-FRIENDLY-NAME" 16 | emailFailedBuildNotifications="EMAIL-ADDR-FAILEDBUILD" 17 | emailApprovalNotifications="EMAIL-ADDR-APPROVALNOTIFICATIONS" 18 | awsregion="AWS-REGION" 19 | codeCommitRepoName="${projectResourcePrefix}-repo" 20 | codeCommitBranchName="main" 21 | s3ArtifactsBucket="${projectResourcePrefix}-artifacts-lambda" 22 | msTeamsHostName="MSTEAMS-HOSTNAME" 23 | msTeamsWebHookPath="MSTEAMS-WEBHOOKPATH" 24 | slackChannelName="SLACK-CHANNELNAME" 25 | slackWebHookPath="SLACK-WEBHOOKPATH" 26 | printf "Done" 27 | 28 | ``` 29 | 30 | #### 3.3.2 Upload Slack/MS Teams Settings to AWS Secrets in DEPLOYMENT account [OPTIONAL] 31 | 1. Create local variables to be used as Secret Values. 32 | * **[For Slack Integration]** 33 | Assuming you have performed (step 3.2.2.1 above) and replace script placeholders (3.1) 34 | ``` 35 | secretName="SlackSettings" 36 | secretValue="{\"slackChannelName\":\"${slackChannelName}\",\"slackWebHookPath\":\"${slackWebHookPath}\"}" 37 | ``` 38 | * **[MS Teams Integration]** 39 | Assuming you have performed (step 3.2.2.1 above) and replace script placeholders (3.1) 40 | ``` 41 | secretName="MSTeamsSettings" 42 | secretValue="{\"msTeamsHostname\":\"${msTeamsHostName}\",\"msTeamsWebHookPath\":\"${msTeamsWebHookPath}\"}" 43 | ``` 44 | 45 | 2. Create Secrets 46 | ``` 47 | aws secretsmanager create-secret --name $secretName --secret-string $secretValue --profile $profileDeploymentAccount --region $awsregion 48 | ``` 49 | 50 | #### Updating Slack/Teams AWS Secrets 51 | 52 | > Note: If you need to update your secret values you can use the following script. 53 | ``` 54 | aws secretsmanager update-secret --secret-id $secretName --secret-string $secretValue --profile $profileDeploymentAccount --region $awsregion 55 | ``` 56 | 57 | #### 3.3.3 Setup CodeCommit Repo in DEPLOYMENT account 58 | 1. Compile the CloudFormation script 59 | ``` 60 | aws cloudformation package --template-file ./cf/setup/01_create_codecommit_repo.yaml --output-template-file ./.build/_01_create_codecommit_repo.yaml --s3-bucket NOTUSED --profile $profileDeploymentAccount 61 | ``` 62 | 63 | 2. Deploy the CloudFormation script 64 | ``` 65 | aws cloudformation deploy --template-file ./.build/_01_create_codecommit_repo.yaml --stack-name "${projectResourcePrefix}-setup-codecommit-repo" --profile $profileDeploymentAccount --region $awsregion --capabilities CAPABILITY_NAMED_IAM --parameter-overrides ProjectResourcePrefix=$projectResourcePrefix 66 | ``` 67 | 68 | #### 3.3.4 Deploy the S3 Bucket for Build Artifacts with Policy + KMS Key to DEPLOYMENT Account 69 | 1. Compile the CloudFormation script 70 | ``` 71 | aws cloudformation package --template-file ./cf/setup/02_deployment_artifacts_bucket.yaml --output-template-file "./.build/_02_deployment_artifacts_bucket.yaml" --s3-bucket NOTUSED --profile $profileDeploymentAccount 72 | ``` 73 | 74 | 2. Deploy the CloudFormation script 75 | ``` 76 | aws cloudformation deploy --template-file "./.build/_02_deployment_artifacts_bucket.yaml" --stack-name "${projectResourcePrefix}-setup-artif" --profile $profileDeploymentAccount --region $awsregion --capabilities CAPABILITY_NAMED_IAM --parameter-overrides ProjectResourcePrefix=$projectResourcePrefix 77 | ``` 78 | 79 | #### 3.3.5 Get Copy of KMS Key Arn just created 80 | This command will store a local variable of the KMS Key Arn created in previous step 81 | 1. Query CloudFormation Stack for KMS Key Arn 82 | ``` 83 | get_cmk_command="aws cloudformation describe-stacks --stack-name "${projectResourcePrefix}-setup-artif" --profile $profileDeploymentAccount --region $awsregion --query \"Stacks[0].Outputs[?OutputKey=='CodePipelineKMSKeyArn'].OutputValue\" --output text" 84 | CodePipelineKMSKeyArn=$(eval $get_cmk_command) 85 | printf "Got CMK ARN: $CodePipelineKMSKeyArn" 86 | ``` 87 | 88 | #### 3.3.6 Setup IAM Roles for CodePipeline to access DEPLOYMENT Account 89 | 1. Compile the CloudFormation script 90 | ``` 91 | aws cloudformation package --template-file ./cf/setup/03_iam_role_codepipeline.yaml --output-template-file "./.build/_03_iam_role_codepipeline.yaml" --s3-bucket NOTUSED --profile $profileDeploymentAccount 92 | ``` 93 | 94 | 2. Deploy the CloudFormation script 95 | ``` 96 | aws cloudformation deploy --template-file "./.build/_03_iam_role_codepipeline.yaml" --stack-name "${projectResourcePrefix}-setup-cp-roles" --profile $profileDeploymentAccount --region $awsregion --capabilities CAPABILITY_NAMED_IAM --parameter-overrides KMSKeyArn=$CodePipelineKMSKeyArn ProjectResourcePrefix=$projectResourcePrefix RepoPrefix=$repoPrefix 97 | ``` 98 | 99 | #### 3.3.7 Deploy IAM Roles and KMS Trust with TARGET Account 100 | 1. Compile the CloudFormation script 101 | ``` 102 | aws cloudformation package --template-file ./cf/setup/04_target_deploy_roles.yaml --output-template-file "./.build/_04_target_deploy_roles.yaml" --s3-bucket NOTUSED --profile $profileDeploymentAccount 103 | ``` 104 | 105 | 2. Deploy the CloudFormation script 106 | ``` 107 | aws cloudformation deploy --template-file "./.build/_04_target_deploy_roles.yaml" --stack-name "${projectResourcePrefix}-setup-deployroles" --profile $profileDeploymentAccount --region $awsregion --capabilities CAPABILITY_NAMED_IAM --parameter-overrides KMSKeyArn=$CodePipelineKMSKeyArn ProjectResourcePrefix=$projectResourcePrefix 108 | ``` 109 | 110 | # 3.3.8 Deploy StackSet Managed Self-service Roles to Deployment Account 111 | 1. Compile the CloudFormation script 112 | ``` 113 | aws cloudformation package --template-file ./cf/setup/04_target_deploy_roles.yaml --output-template-file "./.build/_04_target_deploy_roles.yaml" --s3-bucket NOTUSED --profile $profileDeploymentAccount 114 | ``` 115 | 2. Deploy the CloudFormation script 116 | ``` 117 | aws cloudformation deploy --template-file "./.build/_04_target_deploy_roles.yaml" --stack-name "${projectResourcePrefix}-setup-deployroles" --profile $profileDeploymentAccount --region $awsregion --capabilities CAPABILITY_NAMED_IAM --parameter-overrides KMSKeyArn=$CodePipelineKMSKeyArn ProjectResourcePrefix=$projectResourcePrefix 118 | ``` 119 | 120 | # 3.3.9 Setup DynamoDB tables used for Config Management to DEPLOYMENT Account 121 | 1. Compile the CloudFormation script 122 | ``` 123 | aws cloudformation package --template-file ./cf/setup/06_dynamo_db.yaml --output-template-file "./.build/_06_dynamo_db.yaml" --s3-bucket NOTUSED --profile $profileDeploymentAccount 124 | ``` 125 | 2. Deploy the CloudFormation script 126 | ``` 127 | aws cloudformation deploy --template-file "./.build/_06_dynamo_db.yaml" --stack-name "${projectResourcePrefix}-setup-dyndb" --profile $profileDeploymentAccount --region $awsregion --capabilities CAPABILITY_NAMED_IAM --parameter-overrides ProjectResourcePrefix=$projectResourcePrefix 128 | ``` 129 | 130 | #### 3.3.9 Compile Lambda Extension for Slack/MS Teams Add-on [OPTIONAL] 131 | 1. Install node-lambda (if not already done) 132 | ``` 133 | npm install node-lambda -g 134 | ``` 135 | 136 | 2. Run following script in bash 137 | ``` 138 | ./cf/cicd/build_lambdas.sh 139 | ``` 140 | 141 | #### 3.3.10 Setup RDK Lambda Role and rdk deploy bucket in DEPLOYMENT account 142 | 1. Compile the CloudFormation script 143 | ``` 144 | aws cloudformation package --template-file ./cf/setup/07_setup_RDK_role_and_deploy_bucket.yaml --output-template-file "./.build/07_setup_RDK_role_and_deploy_bucket.yaml" --s3-bucket NOTUSED --profile $profileDeploymentAccount 145 | ``` 146 | 2. Deploy the CloudFormation script 147 | ``` 148 | aws cloudformation deploy --template-file "./.build/07_setup_RDK_role_and_deploy_bucket.yaml" --stack-name "${projectResourcePrefix}-setup-rdk" --profile $profileDeploymentAccount --region $awsregion --capabilities CAPABILITY_NAMED_IAM --parameter-overrides ProjectResourcePrefix=$projectResourcePrefix 149 | ``` 150 | 151 | #### 3.3.11 Setup CI/CD Infrastructure Pipeline (CodePipeline) to DEPLOYMENT account 152 | 1. Compile the CloudFormation script 153 | ``` 154 | sam package --template-file ./cf/cicd/stackset_pipeline.yaml --output-template-file "./.build/_stackset_pipeline.yaml" --s3-bucket $s3ArtifactsBucket --profile $profileDeploymentAccount --region $awsregion 155 | ``` 156 | 157 | 2. Deploy the CloudFormation script 158 | ``` 159 | sam deploy --template-file "./.build/_stackset_pipeline.yaml" --stack-name "${projectResourcePrefix}-pipeline" --profile $profileDeploymentAccount --region $awsregion --capabilities CAPABILITY_NAMED_IAM --parameter-overrides RepoName=$codeCommitRepoName BranchName=$codeCommitBranchName ProjectResourcePrefix=$projectResourcePrefix EmailFailedBuildNotifications=$emailFailedBuildNotifications EmailApprovalNotifications=$emailApprovalNotifications 160 | ``` 161 | -------------------------------------------------------------------------------- /cf/setup/automated_deployment.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | #set stop on error 4 | set -e 5 | GROUPCHATINTEGRATION="none" # Group Chat Integration 6 | SLACKCHANNELNAME="" # Slack Channel Name 7 | SLACKWEBHOOKPATH="" # Slack Web Hook Path 8 | MSTEAMSHOSTNAME="" # MS Teams Host Name 9 | MSTEAMSWEBHOOKPATH="" # MS Teams Web Hook Path 10 | 11 | SAM_CLI_TELEMETRY=0 12 | 13 | usage() { 14 | echo "options:" 15 | echo "i Group Chat Integration - Disabled (-i none), Slack (-i slack), MS Teams (-i msteams)" 16 | echo "c Set to the Slack Channel Name CICD will post to" 17 | echo "d Set to the Slack Web Hook Path of the Channel" 18 | echo "e Set to the MS Teams Host Name" 19 | echo "f Set to the MS Teams Web Hook Path of the Channel" 20 | echo "Usage: $0 [ -i GROUPCHATINTEGRATION ] [ -c SLACKCHANNELNAME ] [ -d SLACKWEBHOOKPATH ] [ -e MSTEAMSHOSTNAME ] [ -f MSTEAMSWEBHOOKPATH ]" 1>&2 21 | } 22 | exit_abnormal() { # Function: Exit with error. 23 | usage 24 | exit 1 25 | } 26 | # check whether user had supplied -h or --help . If yes display usage 27 | if [[ ( $# == "--help") || $# == "-h" ]] 28 | then 29 | usage 30 | exit 0 31 | fi 32 | #get params 33 | while getopts :i:c:d:e:f:h flag 34 | do 35 | case "${flag}" in 36 | i) GROUPCHATINTEGRATION=${OPTARG};; 37 | c) SLACKCHANNELNAME=${OPTARG};; 38 | d) SLACKWEBHOOKPATH=${OPTARG};; 39 | e) MSTEAMSHOSTNAME=${OPTARG};; 40 | f) MSTEAMSWEBHOOKPATH=${OPTARG};; 41 | esac 42 | done 43 | 44 | if [ "$GROUPCHATINTEGRATION" != "none" ] && [ "$GROUPCHATINTEGRATION" != "slack" ] && [ "$GROUPCHATINTEGRATION" != "msteams" ]; then 45 | echo "Error: (opt -i) must be either 'none', 'slack' or 'msteams'" 46 | exit_abnormal 47 | exit 1 48 | fi 49 | 50 | printf "\n[1 of 12] setting variables\n" 51 | profileDeploymentAccount="PROFILE-ORG-DEPLOYMENTACCOUNT" 52 | projectResourcePrefix="PROJECT-RESOURCE-PREFIX" 53 | projectFriendlyName="PROJECT-FRIENDLY-NAME" 54 | emailFailedBuildNotifications="EMAIL-ADDR-FAILEDBUILD" 55 | emailApprovalNotifications="EMAIL-ADDR-APPROVALNOTIFICATIONS" 56 | awsregion="AWS-REGION" 57 | codeCommitRepoName="${projectResourcePrefix}-repo" 58 | codeCommitBranchName="main" 59 | s3ArtifactsBucket="${projectResourcePrefix}-artifacts-lambda" 60 | 61 | if [ "$GROUPCHATINTEGRATION" == "slack" ] || [ "$GROUPCHATINTEGRATION" == "msteams" ] 62 | then 63 | printf "\n[2 of 12] creating slack secrets in secrets manager\n" 64 | if [ "$GROUPCHATINTEGRATION" == "slack" ] 65 | then 66 | secretName="SlackSettings" 67 | secretValue="{\"slackChannelName\":\"${SLACKCHANNELNAME}\",\"slackWebHookPath\":\"${SLACKWEBHOOKPATH}\"}" 68 | else 69 | secretName="MSTeamsSettings" 70 | secretValue="{\"msTeamsHostname\":\"${MSTEAMSHOSTNAME}\",\"msTeamsWebHookPath\":\"${MSTEAMSWEBHOOKPATH}\"}" 71 | fi 72 | get_slist_command="aws secretsmanager list-secrets --filter Key=name,Values=${secretName} --profile $profileDeploymentAccount --region $awsregion --query \"SecretList[0].ARN\" --output text" 73 | secret_arn=$(eval $get_slist_command) 74 | printf " > aws secrets query result - secret_arn: $secret_arn" 75 | # If secret_arn doesnt exist, create new secret otherwise update secret. 76 | if [ "$secret_arn" == "None" ] 77 | then 78 | printf "\n > creating new secret\n" 79 | aws secretsmanager create-secret --name $secretName --secret-string $secretValue --profile $profileDeploymentAccount --region $awsregion 80 | else 81 | printf "\n > updating secret values\n" 82 | aws secretsmanager update-secret --secret-id $secretName --secret-string $secretValue --profile $profileDeploymentAccount --region $awsregion 83 | fi 84 | else 85 | printf "\n[2 of 12] (STEP SKIPPED) no Group Chat integration selected\n" 86 | fi 87 | 88 | printf "\n[3 of 12] setup CodeCommit Repo in DEPLOYMENT account\n" 89 | aws cloudformation package --template-file ./cf/setup/01_create_codecommit_repo.yaml --output-template-file ./.build/_01_create_codecommit_repo.yaml --s3-bucket NOTUSED --profile $profileDeploymentAccount 90 | aws cloudformation deploy --template-file ./.build/_01_create_codecommit_repo.yaml --stack-name "${projectResourcePrefix}-setup-codecommit-repo" --profile $profileDeploymentAccount --region $awsregion --capabilities CAPABILITY_NAMED_IAM --parameter-overrides ProjectResourcePrefix=$projectResourcePrefix 91 | 92 | printf "\n[4 of 12] deploy the S3 Bucket for Build Artifacts with Policy + KMS Key to DEPLOYMENT Account\n" 93 | aws cloudformation package --template-file ./cf/setup/02_deployment_artifacts_bucket.yaml --output-template-file "./.build/_02_deployment_artifacts_bucket.yaml" --s3-bucket NOTUSED --profile $profileDeploymentAccount 94 | aws cloudformation deploy --template-file "./.build/_02_deployment_artifacts_bucket.yaml" --stack-name "${projectResourcePrefix}-setup-artif" --profile $profileDeploymentAccount --region $awsregion --capabilities CAPABILITY_NAMED_IAM --parameter-overrides ProjectResourcePrefix=$projectResourcePrefix 95 | 96 | printf "\n[5 of 12] get Copy of KMS Key Arn just created\n" 97 | #This command will copy to a local variable the KMS Key Arn for step 3.2 98 | get_cmk_command="aws cloudformation describe-stacks --stack-name "${projectResourcePrefix}-setup-artif" --profile $profileDeploymentAccount --region $awsregion --query \"Stacks[0].Outputs[?OutputKey=='CodePipelineKMSKeyArn'].OutputValue\" --output text" 99 | CodePipelineKMSKeyArn=$(eval $get_cmk_command) 100 | printf " > got CMK ARN: $CodePipelineKMSKeyArn" 101 | 102 | printf "\n[6 of 12] setup IAM Roles for CodePipeline to access DEPLOYMENT account\n" 103 | aws cloudformation package --template-file ./cf/setup/03_iam_role_codepipeline.yaml --output-template-file "./.build/_03_iam_role_codepipeline.yaml" --s3-bucket NOTUSED --profile $profileDeploymentAccount 104 | aws cloudformation deploy --template-file "./.build/_03_iam_role_codepipeline.yaml" --stack-name "${projectResourcePrefix}-setup-cp-roles" --profile $profileDeploymentAccount --region $awsregion --capabilities CAPABILITY_NAMED_IAM --parameter-overrides KMSKeyArn=$CodePipelineKMSKeyArn ProjectResourcePrefix=$projectResourcePrefix RepoPrefix=$repoPrefix 105 | 106 | printf "\n[7 of 12] deploy IAM Roles and KMS Trust with Account\n" 107 | aws cloudformation package --template-file ./cf/setup/04_target_deploy_roles.yaml --output-template-file "./.build/_04_target_deploy_roles.yaml" --s3-bucket NOTUSED --profile $profileDeploymentAccount 108 | aws cloudformation deploy --template-file "./.build/_04_target_deploy_roles.yaml" --stack-name "${projectResourcePrefix}-setup-deployroles" --profile $profileDeploymentAccount --region $awsregion --capabilities CAPABILITY_NAMED_IAM --parameter-overrides KMSKeyArn=$CodePipelineKMSKeyArn ProjectResourcePrefix=$projectResourcePrefix 109 | 110 | printf "\n[8 of 12] deploy StackSet Managed Self-service Roles to Deployment Account\n" 111 | aws cloudformation package --template-file ./cf/setup/05_orgs_stackset_selfmanaged_roles.yaml --output-template-file "./.build/_05_orgs_stackset_selfmanaged_roles.yaml" --s3-bucket NOTUSED --profile $profileDeploymentAccount 112 | aws cloudformation deploy --template-file "./.build/_05_orgs_stackset_selfmanaged_roles.yaml" --stack-name "${projectResourcePrefix}-setup-orgmngselfservroles" --profile $profileDeploymentAccount --region $awsregion --capabilities CAPABILITY_NAMED_IAM --parameter-overrides ProjectResourcePrefix=$projectResourcePrefix 113 | 114 | printf "\n[9 of 12] deploy Dynamo DB DevSecOps Configuration Tables to DEPLOYMENT Account\n" 115 | aws cloudformation package --template-file ./cf/setup/06_dynamo_db.yaml --output-template-file "./.build/_06_dynamo_db.yaml" --s3-bucket NOTUSED --profile $profileDeploymentAccount 116 | aws cloudformation deploy --template-file "./.build/_06_dynamo_db.yaml" --stack-name "${projectResourcePrefix}-setup-dyndb" --profile $profileDeploymentAccount --region $awsregion --capabilities CAPABILITY_NAMED_IAM --parameter-overrides ProjectResourcePrefix=$projectResourcePrefix 117 | 118 | printf "\n[10 of 12] compile Lambda Extension for Notifications Add-ons (Slack and MS Teams)" 119 | # install node-lambda (if not already done) 120 | # npm install node-lambda -g 121 | ./cf/cicd/build_lambdas.sh 122 | 123 | printf "\n [11 of 12] Setup RDK Lambda Role and rdk deploy bucket in DEPLOYMENT account\n" 124 | aws cloudformation package --template-file ./cf/setup/07_setup_RDK_role_and_deploy_bucket.yaml --output-template-file "./.build/07_setup_RDK_role_and_deploy_bucket.yaml" --s3-bucket NOTUSED --profile $profileDeploymentAccount 125 | aws cloudformation deploy --template-file "./.build/07_setup_RDK_role_and_deploy_bucket.yaml" --stack-name "${projectResourcePrefix}-setup-rdk" --profile $profileDeploymentAccount --region $awsregion --capabilities CAPABILITY_NAMED_IAM --parameter-overrides ProjectResourcePrefix=$projectResourcePrefix 126 | 127 | printf "\n[12 of 12] setup CI/CD Infrastructure Pipeline (CodePipeline) to DEPLOYMENT account\n" 128 | sam.cmd package --template-file ./cf/cicd/stackset_pipeline.yaml --output-template-file "./.build/_stackset_pipeline.yaml" --s3-bucket $s3ArtifactsBucket --profile $profileDeploymentAccount --region $awsregion 129 | sam.cmd deploy --template-file "./.build/_stackset_pipeline.yaml" --stack-name "${projectResourcePrefix}-pipeline" --profile $profileDeploymentAccount --region $awsregion --capabilities CAPABILITY_NAMED_IAM --parameter-overrides RepoName=$codeCommitRepoName BranchName=$codeCommitBranchName ProjectResourcePrefix=$projectResourcePrefix EmailFailedBuildNotifications=$emailFailedBuildNotifications EmailApprovalNotifications=$emailApprovalNotifications GroupChatIntegration=$GROUPCHATINTEGRATION 130 | 131 | printf "\n**Deployment complete!**\n\nNext Steps: now setup your local GIT repo, via a CLONE of the new CodeCommit repo created in deployment account.\nCopy all contents of this project to local mapping and push to branch to kick-off CI/CD!\n" 132 | exit 0 133 | -------------------------------------------------------------------------------- /code/nodejs/src/sns_to_msteams/index.js: -------------------------------------------------------------------------------- 1 | var https = require('https'); 2 | var util = require('util'); 3 | 4 | const l_project_name = process.env.PROJECT_NAME; 5 | const l_project_prefix = process.env.PROJECT_PREFIX; 6 | const l_project_masteraccount = process.env.MASTER_DEPLOYMENT_ACCOUNT; 7 | const l_iconbucketurl = "http://devsecops-cicd-public-assets.s3-website-ap-southeast-2.amazonaws.com"; 8 | 9 | const _getPrivateKeyValue = async function (secret_key) { 10 | const AWS = require('aws-sdk'); 11 | const client = new AWS.SecretsManager({ 12 | region: process.env.AWS_REGION 13 | }); 14 | return new Promise((resolve, reject) => { 15 | client.getSecretValue({ SecretId: secret_key }, function (err, data) { 16 | if (err) { 17 | reject(err); 18 | } 19 | else { 20 | if ('SecretString' in data) { 21 | resolve(JSON.parse(data.SecretString)); 22 | } 23 | else { 24 | let buff = new Buffer(data.SecretBinary, 'base64'); 25 | resolve(JSON.parse(buff.toString('ascii'))); 26 | } 27 | } 28 | }); 29 | }); 30 | }; 31 | 32 | /** 33 | * Do a request with options provided. 34 | * 35 | * @param {Object} options 36 | * @param {Object} data 37 | * @return {Promise} a promise of request 38 | */ 39 | function doRequest(options, data) { 40 | return new Promise((resolve, reject) => { 41 | const req = https.request(options, (res) => { 42 | res.setEncoding('utf8'); 43 | let responseBody = ''; 44 | 45 | res.on('data', (chunk) => { 46 | responseBody += chunk; 47 | }); 48 | 49 | res.on('end', () => { 50 | console.log('response: '+ responseBody); 51 | resolve(responseBody); 52 | }); 53 | }); 54 | 55 | req.on('error', (err) => { 56 | reject(err); 57 | }); 58 | 59 | req.write(data); 60 | req.end(); 61 | }); 62 | } 63 | 64 | exports.handler = async (event) => { 65 | console.log(JSON.stringify(event, null, 2)); 66 | console.log('From SNS:' + event.Records[0].Sns.Message); 67 | 68 | var l_msg = JSON.parse(event.Records[0].Sns.Message); 69 | //Set Defaults 70 | var l_title = ""; 71 | var l_state = "succeeded"; 72 | var l_env = "PROD"; 73 | var l_msgdetail = ""; 74 | var l_pipeline = ""; 75 | var l_subject = ""; 76 | var l_iconName = ""; 77 | var l_facts = []; 78 | 79 | var severityColor = '0072C6'; //light blue 80 | var potentialActions = []; 81 | if (l_msg.hasOwnProperty('approval')) { 82 | l_state = "approval"; 83 | l_pipeline = l_msg.approval.pipelineName; 84 | l_iconName = "question_icon.png"; 85 | l_facts.push({"name":"Pipeline name","value": l_pipeline}); 86 | 87 | potentialActions.push({"@type":"OpenUri", "name":"Approve/Reject", "targets": [{"os" : "default", "uri": l_msg.approval.approvalReviewLink}]}); 88 | //First remove any linebreaks added by GIT. 89 | var l_customData = l_msg.approval.customData.replace(/(\r\n|\n|\r)/gm, ""); 90 | //Parse as JSON 91 | var l_jsonData = JSON.parse(l_customData); 92 | l_title = l_jsonData.title; 93 | l_msgdetail = l_jsonData.message; 94 | console.log(JSON.stringify(l_jsonData)); 95 | l_facts.push({"name":"Commit Message","value": l_jsonData.git_commit_msg}); 96 | l_subject = `CI/CD deployment for *'${l_project_name}' to 'PROD'* requires manual approval.`; 97 | 98 | } else if (l_msg.detail.hasOwnProperty('state')) { 99 | l_pipeline = l_msg.detail.pipeline; 100 | if (l_msg.detail.state == "STARTED") { 101 | l_state = "started"; 102 | severityColor = "000000"; //black 103 | } else if (l_msg.detail.state == "SUCCEEDED") { 104 | if (l_msg.detail.stage == "Manual-QA-Approval") { 105 | l_state = "approved"; 106 | severityColor = "00aeef"; //lightblue 107 | l_iconName = "approved_icon.png"; 108 | } else { 109 | l_state = "succeeded"; 110 | severityColor = "32CD32"; //limegreen 111 | l_iconName = "success_icon.png"; 112 | } 113 | } else if(l_msg.detail.state == "FAILED") { 114 | if (l_msg.detail.stage == "Manual-QA-Approval") { 115 | l_state = "approval denied"; 116 | severityColor = "FF8C00"; //darkorange 117 | l_iconName = "denied_icon.png"; 118 | } else { 119 | l_state = "failed"; 120 | severityColor = "FF0000"; //red 121 | l_iconName = "error_icon.png"; 122 | } 123 | } else { 124 | l_state = "unknown"; 125 | severityColor = "FF8C00"; //darkorange 126 | l_iconName = "unknown_icon.png"; 127 | } 128 | 129 | if (l_msg.detail.hasOwnProperty('stage')) { 130 | if (l_msg.detail.stage == "Source") { 131 | l_env = "DEV"; 132 | } else { 133 | l_facts.push({"name":"Stage","value": l_msg.detail.stage}); 134 | if (l_msg.detail.stage == "Build-CloudFormation-Resources") { 135 | l_env = "DEV"; 136 | } else if (l_msg.detail.stage == "Dev-Infrastructure-Deploy") { 137 | l_env = "DEV"; 138 | } else if(l_msg.detail.stage == "Test-Infrastructure-Deploy") { 139 | l_env = "TEST"; 140 | } else if (l_msg.detail.stage == "Manual-QA-Approval") { 141 | l_env = "PROD"; 142 | } else { 143 | l_env = "PROD"; 144 | } 145 | } 146 | } 147 | if (l_msg.detail.state != "STARTED") { 148 | l_facts.push({"name":"Pipeline name","value": l_pipeline}); 149 | l_facts.push({"name":"AWS Master Account","value": l_project_masteraccount}); 150 | l_facts.push({"name":"Project Prefix","value": l_project_prefix}); 151 | } 152 | if (l_state == "started") { 153 | l_subject = `CI/CD pipeline for *${l_project_name}* started..`; 154 | } else if (l_state == "approval denied") { 155 | l_subject = `Deployment approval denied for *${l_project_name}* to publish to *${l_env}*`; 156 | } else { 157 | l_subject = `CI/CD pipeline for *${l_project_name}* to *${l_env}, ${l_state}!*`; 158 | } 159 | } 160 | console.log('Subject: ' + l_subject); 161 | 162 | const private_key_value = await _getPrivateKeyValue("MSTeamsSettings"); 163 | var l_msTeamsHostname = private_key_value["msTeamsHostname"]; 164 | var l_msTeamsWebHookPath = private_key_value["msTeamsWebHookPath"]; 165 | 166 | var postData = { 167 | '@type': 'MessageCard', 168 | '@context': 'http://schema.org/extensions', 169 | 'themeColor': severityColor, 170 | 'summary': l_subject, 171 | "sections": [{ 172 | "activityTitle": l_subject, 173 | "activitySubtitle": l_msgdetail, 174 | "facts": l_facts, 175 | "markdown": true 176 | }] 177 | }; 178 | if (l_iconName.length > 0) { 179 | postData.sections[0].activityImage = `${l_iconbucketurl}/images/${l_iconName}`; 180 | } 181 | if (l_title.length > 0) { 182 | postData.title = l_title; 183 | } 184 | if (potentialActions.length > 0) { 185 | postData.potentialAction = potentialActions; 186 | } 187 | 188 | var options = { 189 | method: 'POST', 190 | hostname: l_msTeamsHostname, 191 | port: 443, 192 | path: l_msTeamsWebHookPath, 193 | headers: { 194 | 'Content-Type': 'application/json', 195 | 'Content-Length': Buffer.byteLength(JSON.stringify(postData)) 196 | } 197 | }; 198 | 199 | // return the response 200 | return await doRequest(options, JSON.stringify(postData)); 201 | }; 202 | -------------------------------------------------------------------------------- /code/nodejs/src/sns_to_msteams/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sns_to_msteams", 3 | "version": "1.0.0", 4 | "description": "Script to subscribe and push SNS notifications from CodePipeline to a Ms Teams Channel", 5 | "main": "index.js", 6 | "keywords": [ 7 | "sns", 8 | "codepipeline" 9 | ], 10 | "author": "", 11 | "license": "ISC" 12 | } 13 | -------------------------------------------------------------------------------- /code/nodejs/src/sns_to_msteams/package/sns_to_msteams.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/comunet/aws-devsecops-cicd/cb881cc66640674f44dbc65db2e428acffecedc6/code/nodejs/src/sns_to_msteams/package/sns_to_msteams.zip -------------------------------------------------------------------------------- /code/nodejs/src/sns_to_msteams/setup.md: -------------------------------------------------------------------------------- 1 | https://docs.microsoft.com/en-us/microsoftteams/platform/webhooks-and-connectors/how-to/add-incoming-webhook 2 | https://pythoncircle.com/post/725/how-to-post-messages-to-microsoft-teams-channel-using-python/ -------------------------------------------------------------------------------- /code/nodejs/src/sns_to_slack/index.js: -------------------------------------------------------------------------------- 1 | var https = require('https'); 2 | var util = require('util'); 3 | 4 | const l_project_name = process.env.PROJECT_NAME; 5 | const l_project_prefix = process.env.PROJECT_PREFIX; 6 | const l_project_masteraccount = process.env.MASTER_DEPLOYMENT_ACCOUNT; 7 | const l_iconbucketurl = "http://devsecops-cicd-public-assets.s3-website-ap-southeast-2.amazonaws.com"; 8 | 9 | const _getPrivateKeyValue = async function (secret_key) { 10 | const AWS = require('aws-sdk'); 11 | const client = new AWS.SecretsManager({ 12 | region: process.env.AWS_REGION 13 | }); 14 | return new Promise((resolve, reject) => { 15 | client.getSecretValue({ SecretId: secret_key }, function (err, data) { 16 | if (err) { 17 | reject(err); 18 | } 19 | else { 20 | if ('SecretString' in data) { 21 | resolve(JSON.parse(data.SecretString)); 22 | } 23 | else { 24 | let buff = new Buffer(data.SecretBinary, 'base64'); 25 | resolve(JSON.parse(buff.toString('ascii'))); 26 | } 27 | } 28 | }); 29 | }); 30 | }; 31 | 32 | /** 33 | * Do a request with options provided. 34 | * 35 | * @param {Object} options 36 | * @param {Object} data 37 | * @return {Promise} a promise of request 38 | */ 39 | function doRequest(options, data) { 40 | return new Promise((resolve, reject) => { 41 | const req = https.request(options, (res) => { 42 | res.setEncoding('utf8'); 43 | let responseBody = ''; 44 | 45 | res.on('data', (chunk) => { 46 | responseBody += chunk; 47 | }); 48 | 49 | res.on('end', () => { 50 | console.log('response: '+ responseBody); 51 | resolve(responseBody); 52 | }); 53 | }); 54 | 55 | req.on('error', (err) => { 56 | reject(err); 57 | }); 58 | 59 | req.write(data); 60 | req.end(); 61 | }); 62 | } 63 | 64 | exports.handler = async (event) => { 65 | console.log(JSON.stringify(event, null, 2)); 66 | console.log('From SNS:' + event.Records[0].Sns.Message); 67 | 68 | var l_msg = JSON.parse(event.Records[0].Sns.Message); 69 | //Set Defaults 70 | var l_state = "succeeded"; 71 | var l_env = "PROD"; 72 | var l_title = ""; 73 | var l_msgdetail = ""; 74 | var l_pipeline = ""; 75 | var l_subject = ""; 76 | var l_iconName = ""; 77 | var l_approvalSection = {}; 78 | 79 | var l_blocks = []; 80 | var l_fields = []; 81 | 82 | if (l_msg.hasOwnProperty('approval')) { 83 | l_state = "approval"; 84 | l_pipeline = l_msg.approval.pipelineName; 85 | l_iconName = "question_icon_trans.png"; 86 | l_fields.push({"type": "mrkdwn","text": `*Pipeline name:*\n${l_pipeline}`}); 87 | 88 | //First remove any linebreaks added by GIT. 89 | var l_customData = l_msg.approval.customData.replace(/(\r\n|\n|\r)/gm, ""); 90 | //Parse as JSON 91 | var l_jsonData = JSON.parse(l_customData); 92 | 93 | l_approvalSection = { 94 | "type": "section", 95 | "text": { 96 | "type": "mrkdwn", 97 | "text": l_jsonData.message 98 | }, 99 | "accessory": { 100 | "type": "button", 101 | "text": { 102 | "type": "plain_text", 103 | "text": "Approve/Reject", 104 | }, 105 | "value": "notimportant", 106 | "url": l_msg.approval.approvalReviewLink, 107 | "action_id": "button-action" 108 | } 109 | }; 110 | 111 | l_title = l_jsonData.title; 112 | l_msgdetail = l_jsonData.message; 113 | console.log(JSON.stringify(l_jsonData)); 114 | l_fields.push({"type": "mrkdwn","text": `*Commit Message:*\n${l_jsonData.git_commit_msg}` }); 115 | l_subject = `CI/CD deployment for *'${l_project_name}' to 'PROD'* requires manual approval.`; 116 | 117 | } else if (l_msg.detail.hasOwnProperty('state')) { 118 | l_pipeline = l_msg.detail.pipeline; 119 | if (l_msg.detail.state == "STARTED") { 120 | l_state = "started"; 121 | } else if (l_msg.detail.state == "SUCCEEDED") { 122 | if (l_msg.detail.stage == "Manual-QA-Approval") { 123 | l_state = "approved"; 124 | l_iconName = "approved_icon_trans.png"; 125 | } else { 126 | l_state = "succeeded"; 127 | l_iconName = "success_icon_trans.png"; 128 | } 129 | } else if(l_msg.detail.state == "FAILED") { 130 | if (l_msg.detail.stage == "Manual-QA-Approval") { 131 | l_state = "approval denied"; 132 | l_iconName = "denied_icon_trans.png"; 133 | } else { 134 | l_state = "failed"; 135 | l_iconName = "error_icon_trans.png"; 136 | } 137 | } else { 138 | l_state = "unknown"; 139 | l_iconName = "unknown_icon_trans.png"; 140 | } 141 | 142 | if (l_msg.detail.hasOwnProperty('stage')) { 143 | if (l_msg.detail.stage == "Source") { 144 | l_env = "DEV"; 145 | } else { 146 | l_fields.push({"type": "mrkdwn","text": `*Stage:*\n${l_msg.detail.stage}` }); 147 | if (l_msg.detail.stage == "Build-CloudFormation-Resources") { 148 | l_env = "DEV"; 149 | } else if (l_msg.detail.stage == "Dev-Infrastructure-Deploy") { 150 | l_env = "DEV"; 151 | } else if(l_msg.detail.stage == "Test-Infrastructure-Deploy") { 152 | l_env = "TEST"; 153 | } else if (l_msg.detail.stage == "Manual-QA-Approval") { 154 | l_env = "PROD"; 155 | } else { 156 | l_env = "PROD"; 157 | } 158 | } 159 | } 160 | if (l_msg.detail.state != "STARTED") { 161 | l_fields.push({"type": "mrkdwn","text": `*Pipeline name:*\n${l_pipeline}`}); 162 | l_fields.push({"type": "mrkdwn","text": `*AWS Master Account:*\n${l_project_masteraccount}` }); 163 | l_fields.push({"type": "mrkdwn","text": `*Project Prefix:*\n${l_project_prefix}` }); 164 | } 165 | if (l_state == "started") { 166 | l_subject = `CI/CD pipeline for *${l_project_name}* started..`; 167 | } else if (l_state == "approval denied") { 168 | l_subject = `Deployment approval denied for *${l_project_name}* to publish to *${l_env}*`; 169 | } else { 170 | l_subject = `CI/CD pipeline for *${l_project_name}* to *${l_env}, ${l_state}!*`; 171 | } 172 | } 173 | console.log('Subject: ' + l_subject); 174 | 175 | const private_key_value = await _getPrivateKeyValue("SlackSettings"); 176 | var l_slackChannelName = private_key_value["slackChannelName"]; 177 | var l_slackWebHookPath = private_key_value["slackWebHookPath"]; 178 | 179 | console.log('Posting to slack channel:' + l_slackChannelName); 180 | 181 | var postData = { 182 | "channel": l_slackChannelName, 183 | }; 184 | 185 | //Add Header 186 | if (l_subject.length > 0) { 187 | var l_header = { 188 | "type": "section", 189 | "text": 190 | { 191 | "type": "mrkdwn", 192 | "text": l_subject 193 | } 194 | }; 195 | l_blocks.push(l_header); 196 | } 197 | //Add Msg Prop Section 198 | if (l_fields.length > 0) { 199 | //Add Section for Fields 200 | var l_section = { 201 | "type": "section", 202 | "fields": l_fields 203 | }; 204 | //Add Image 205 | if (l_iconName.length > 0) { 206 | l_section.accessory = { 207 | "type": "image", 208 | "image_url": `${l_iconbucketurl}/images/${l_iconName}`, 209 | "alt_text": l_state 210 | }; 211 | } 212 | l_blocks.push(l_section); 213 | } 214 | //Add Approval 215 | if (l_msg.hasOwnProperty('approval')) { 216 | l_blocks.push(l_approvalSection); 217 | } 218 | postData.blocks = l_blocks; 219 | console.log('Posting Data:\n' + JSON.stringify(postData)); 220 | var options = { 221 | method: 'POST', 222 | hostname: 'hooks.slack.com', 223 | port: 443, 224 | path: l_slackWebHookPath 225 | }; 226 | 227 | // return the response 228 | return await doRequest(options, JSON.stringify(postData)); 229 | }; 230 | -------------------------------------------------------------------------------- /code/nodejs/src/sns_to_slack/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sns_to_slack", 3 | "version": "1.0.0", 4 | "lockfileVersion": 2, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "version": "1.0.0", 9 | "license": "ISC" 10 | } 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /code/nodejs/src/sns_to_slack/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sns_to_slack", 3 | "version": "1.0.0", 4 | "description": "Script to subscribe and push SNS notifications from CodePipeline to a slack channel", 5 | "main": "index.js", 6 | "keywords": [ 7 | "sns", 8 | "codepipeline", 9 | "slack" 10 | ], 11 | "author": "", 12 | "license": "ISC" 13 | } 14 | -------------------------------------------------------------------------------- /code/nodejs/src/sns_to_slack/package/sns_to_slack.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/comunet/aws-devsecops-cicd/cb881cc66640674f44dbc65db2e428acffecedc6/code/nodejs/src/sns_to_slack/package/sns_to_slack.zip -------------------------------------------------------------------------------- /code/python-rdk/AWSConfigRuleKMSLeastPrivilege/AWSConfigRuleKMSLeastPrivilege.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import json 4 | import datetime 5 | import re 6 | import boto3 7 | import botocore 8 | import logging 9 | from fnmatch import fnmatch 10 | from botocore.exceptions import ClientError 11 | from AWSConfigRuleKMSStatementProcessor import AWSConfigRuleKMSStatementProcessor 12 | 13 | 14 | LOGGING_LEVEL = logging.INFO 15 | if None != os.getenv("LOGGING_LEVEL"): 16 | LOGGING_LEVEL = logging.getLevelName(os.getenv("LOGGING_LEVEL")) 17 | logger = logging.getLogger(__name__) 18 | logger.setLevel(LOGGING_LEVEL) 19 | stream_handler = logging.StreamHandler(sys.stdout) 20 | logger.addHandler(stream_handler) 21 | 22 | 23 | ############## 24 | # Parameters # 25 | ############## 26 | 27 | # define the default resource to report to Config Rules 28 | AWS_CONFIG_CLIENT = boto3.client("config") 29 | DEFAULT_RESOURCE_TYPE = "AWS::KMS::Key" 30 | 31 | # Other parameters (no change needed) 32 | CONFIG_ROLE_TIMEOUT_SECONDS = 900 33 | 34 | 35 | # set to True to get the lambda to assume the Role attached on the Config Service (useful for cross-account) 36 | ASSUME_ROLE_MODE = True 37 | 38 | # pull aliases for CMKs and pass to function below 39 | def cmk_aliases(client): 40 | aliases = [] 41 | response = client.list_aliases(Limit=100) 42 | while response['Aliases']: 43 | for alias in response['Aliases']: 44 | if 'TargetKeyId' in alias: 45 | aliases.append( 46 | {"alias_id": alias['AliasName'], "cmk_id": alias['TargetKeyId'], "account_number": re.search("([0-9]){12}", alias['AliasArn'])[0]} 47 | ) 48 | if not 'NextMarker' in response: 49 | return aliases 50 | response = client.list_aliases( 51 | Marker=response['NextMarker'], Limit=100) 52 | 53 | 54 | # check if CMK alias is in whitelist 55 | def cmk_alias_in_whitelist(alias_id, rule_parameters): 56 | try: 57 | alias_id = alias_id.split('/', 1)[-1] 58 | whitelist_param = rule_parameters["CMKWhitelist"] 59 | except KeyError: 60 | return False 61 | whitelist_entries = re.split(", *", whitelist_param.strip()) 62 | if [x for x in whitelist_entries if fnmatch(alias_id, x)]: 63 | return True 64 | else: 65 | return False 66 | 67 | 68 | # check if KMS is CMK 69 | def is_cmk(client, kms_id): 70 | metadata = client.describe_key(KeyId=kms_id) 71 | keyManager = metadata["KeyMetadata"]["KeyManager"] 72 | return keyManager == 'CUSTOMER' 73 | 74 | 75 | # checks CMK status to determine enabled or disabled 76 | def cmk_status(client, cmk_id): 77 | metadata = client.describe_key(KeyId=cmk_id) 78 | status = metadata["KeyMetadata"]["Enabled"] 79 | return status == False 80 | 81 | 82 | # pulls and returns cmk policy 83 | def get_cmk_policy(client, cmk_id): 84 | try: 85 | policy_response = client.get_key_policy( 86 | KeyId=cmk_id, PolicyName="default" 87 | ) 88 | policy_content = policy_response["Policy"] 89 | 90 | return policy_content 91 | except ClientError as ce: 92 | logger.error = "Failure retrieving key policy for CMK {}".format(cmk_id) 93 | logger.exception(ce) 94 | 95 | return False 96 | 97 | # check if whitelist admin_userid is in aws:userId 98 | def userid_in_whitelist(userIds, rule_parameters): 99 | try: 100 | whitelist_param = rule_parameters["Admin_User_Id"] 101 | except KeyError: 102 | return False 103 | whitelist_entries = re.split(", *", whitelist_param.strip()) 104 | if [x for x in whitelist_entries if any(fnmatch(p, x) for p in userIds)]: 105 | return True 106 | else: 107 | return False 108 | 109 | 110 | # evaluating compliance against rule scenarios 111 | def evaluate_compliance(event, configuration_item, rule_parameters): 112 | AWS_KMS_CLIENT = get_client("kms", event=event) 113 | evaluations = [] 114 | kms_aliases = cmk_aliases(AWS_KMS_CLIENT) 115 | if len(kms_aliases) > 0: 116 | for kms_alias in kms_aliases: 117 | if cmk_alias_in_whitelist(kms_alias["alias_id"], rule_parameters): 118 | ann = "CMK {} is in whitelist for CMK Key Policy check".format( 119 | kms_alias["alias_id"] 120 | ) 121 | logger.info(ann) 122 | ev = build_evaluation( 123 | "{}".format(kms_alias["alias_id"]), 124 | "COMPLIANT", 125 | event, 126 | annotation=ann, 127 | ) 128 | evaluations.append(ev) 129 | # Evaluate for only cmk KMS keys 130 | elif is_cmk(AWS_KMS_CLIENT, kms_alias["cmk_id"]): 131 | # Evaluate if CMK key is disabled 132 | if cmk_status(AWS_KMS_CLIENT, kms_alias["cmk_id"]): 133 | ann = "CMK {} is disabled".format(kms_alias["alias_id"]) 134 | logger.info(ann) 135 | ev = build_evaluation( 136 | "{}".format(kms_alias["alias_id"]), 137 | "NOT_APPLICABLE", 138 | event, 139 | annotation=ann, 140 | ) 141 | evaluations.append(ev) 142 | else: 143 | processor = AWSConfigRuleKMSStatementProcessor( 144 | get_cmk_policy(AWS_KMS_CLIENT, kms_alias["cmk_id"])) 145 | 146 | result = processor.process(kms_alias["alias_id"], kms_alias["account_number"], rule_parameters) 147 | 148 | ev = build_evaluation( 149 | "{}".format(kms_alias["alias_id"]), 150 | result["compliance_type"], 151 | event, 152 | annotation=result["annotation"], 153 | ) 154 | 155 | evaluations.append(ev) 156 | 157 | else: 158 | ann = 'KMS is not a CMK' 159 | ev = build_evaluation( 160 | "{}".format(kms_alias["alias_id"]), 161 | 'NOT_APPLICABLE', 162 | event, 163 | annotation=ann, 164 | ) 165 | evaluations.append(ev) 166 | return evaluations 167 | 168 | def evaluate_parameters(rule_parameters): 169 | """Evaluate the rule parameters dictionary validity. Raise a ValueError for invalid parameters. 170 | 171 | Return: 172 | anything suitable for the evaluate_compliance() 173 | 174 | Keyword arguments: 175 | rule_parameters -- the Key/Value dictionary of the Config Rules parameters 176 | """ 177 | valid_rule_parameters = rule_parameters 178 | 179 | if "PrincipalWhitelist" not in valid_rule_parameters: 180 | valid_rule_parameters["PrincipalWhitelist"] = "" 181 | 182 | return valid_rule_parameters 183 | 184 | #################### 185 | # Helper Functions # 186 | #################### 187 | 188 | # Build an error to be displayed in the logs when the parameter is invalid. 189 | def build_parameters_value_error_response(ex): 190 | """Return an error dictionary when the evaluate_parameters() raises a ValueError. 191 | 192 | Keyword arguments: 193 | ex -- Exception text 194 | """ 195 | return build_error_response(internal_error_message="Parameter value is invalid", 196 | internal_error_details="An ValueError was raised during the validation of the Parameter value", 197 | customer_error_code="InvalidParameterValueException", 198 | customer_error_message=str(ex)) 199 | 200 | # This gets the client after assuming the Config service role 201 | # either in the same AWS account or cross-account. 202 | def get_client(service, event, region=None): 203 | """Return the service boto client. It should be used instead of directly calling the client. 204 | 205 | Keyword arguments: 206 | service -- the service name used for calling the boto.client() 207 | event -- the event variable given in the lambda handler 208 | region -- the region where the client is called (default: None) 209 | """ 210 | if not ASSUME_ROLE_MODE: 211 | return boto3.client(service, region) 212 | credentials = get_assume_role_credentials(get_execution_role_arn(event), region) 213 | return boto3.client(service, aws_access_key_id=credentials['AccessKeyId'], 214 | aws_secret_access_key=credentials['SecretAccessKey'], 215 | aws_session_token=credentials['SessionToken'], 216 | region_name=region 217 | ) 218 | 219 | # This generate an evaluation for config 220 | def build_evaluation(resource_id, compliance_type, event, resource_type=DEFAULT_RESOURCE_TYPE, annotation=None): 221 | """Form an evaluation as a dictionary. Usually suited to report on scheduled rules. 222 | 223 | Keyword arguments: 224 | resource_id -- the unique id of the resource to report 225 | compliance_type -- either COMPLIANT, NON_COMPLIANT or NOT_APPLICABLE 226 | event -- the event variable given in the lambda handler 227 | resource_type -- the CloudFormation resource type (or AWS::::Account) to report on the rule (default DEFAULT_RESOURCE_TYPE) 228 | annotation -- an annotation to be added to the evaluation (default None). It will be truncated to 255 if longer. 229 | """ 230 | eval_cc = {} 231 | if annotation: 232 | eval_cc['Annotation'] = build_annotation(annotation) 233 | eval_cc['ComplianceResourceType'] = resource_type 234 | eval_cc['ComplianceResourceId'] = resource_id 235 | eval_cc['ComplianceType'] = compliance_type 236 | eval_cc['OrderingTimestamp'] = str(json.loads(event['invokingEvent'])['notificationCreationTime']) 237 | return eval_cc 238 | 239 | def build_evaluation_from_config_item(configuration_item, compliance_type, annotation=None): 240 | """Form an evaluation as a dictionary. Usually suited to report on configuration change rules. 241 | 242 | Keyword arguments: 243 | configuration_item -- the configurationItem dictionary in the invokingEvent 244 | compliance_type -- either COMPLIANT, NON_COMPLIANT or NOT_APPLICABLE 245 | annotation -- an annotation to be added to the evaluation (default None). It will be truncated to 255 if longer. 246 | """ 247 | eval_ci = {} 248 | if annotation: 249 | eval_ci['Annotation'] = build_annotation(annotation) 250 | eval_ci['ComplianceResourceType'] = configuration_item['resourceType'] 251 | eval_ci['ComplianceResourceId'] = configuration_item['resourceId'] 252 | eval_ci['ComplianceType'] = compliance_type 253 | eval_ci['OrderingTimestamp'] = configuration_item['configurationItemCaptureTime'] 254 | return eval_ci 255 | 256 | #################### 257 | # Boilerplate Code # 258 | #################### 259 | 260 | # Get execution role for Lambda function 261 | def get_execution_role_arn(event): 262 | role_arn = None 263 | if 'ruleParameters' in event: 264 | rule_params = json.loads(event['ruleParameters']) 265 | role_name = rule_params.get("ExecutionRoleName") 266 | if role_name: 267 | execution_role_prefix = event["executionRoleArn"].split("/")[0] 268 | role_arn = "{}/{}".format(execution_role_prefix, role_name) 269 | 270 | if not role_arn: 271 | role_arn = event['executionRoleArn'] 272 | 273 | return role_arn 274 | 275 | # Build annotation within Service constraints 276 | def build_annotation(annotation_string): 277 | if len(annotation_string) > 256: 278 | return annotation_string[:244] + " [truncated]" 279 | return annotation_string 280 | 281 | # Helper function used to validate input 282 | def check_defined(reference, reference_name): 283 | if not reference: 284 | raise Exception('Error: ', reference_name, 'is not defined') 285 | return reference 286 | 287 | # Check whether the message is OversizedConfigurationItemChangeNotification or not 288 | def is_oversized_changed_notification(message_type): 289 | check_defined(message_type, 'messageType') 290 | return message_type == 'OversizedConfigurationItemChangeNotification' 291 | 292 | # Check whether the message is a ScheduledNotification or not. 293 | def is_scheduled_notification(message_type): 294 | check_defined(message_type, 'messageType') 295 | return message_type == 'ScheduledNotification' 296 | 297 | # Get configurationItem using getResourceConfigHistory API 298 | # in case of OversizedConfigurationItemChangeNotification 299 | def get_configuration(resource_type, resource_id, configuration_capture_time): 300 | result = AWS_CONFIG_CLIENT.get_resource_config_history( 301 | resourceType=resource_type, 302 | resourceId=resource_id, 303 | laterTime=configuration_capture_time, 304 | limit=1) 305 | configuration_item = result['configurationItems'][0] 306 | return convert_api_configuration(configuration_item) 307 | 308 | # Convert from the API model to the original invocation model 309 | def convert_api_configuration(configuration_item): 310 | for k, v in configuration_item.items(): 311 | if isinstance(v, datetime.datetime): 312 | configuration_item[k] = str(v) 313 | configuration_item['awsAccountId'] = configuration_item['accountId'] 314 | configuration_item['ARN'] = configuration_item['arn'] 315 | configuration_item['configurationStateMd5Hash'] = configuration_item['configurationItemMD5Hash'] 316 | configuration_item['configurationItemVersion'] = configuration_item['version'] 317 | configuration_item['configuration'] = json.loads(configuration_item['configuration']) 318 | if 'relationships' in configuration_item: 319 | for i in range(len(configuration_item['relationships'])): 320 | configuration_item['relationships'][i]['name'] = configuration_item['relationships'][i]['relationshipName'] 321 | return configuration_item 322 | 323 | # Based on the type of message get the configuration item 324 | # either from configurationItem in the invoking event 325 | # or using the getResourceConfigHistiry API in getConfiguration function. 326 | def get_configuration_item(invoking_event): 327 | check_defined(invoking_event, 'invokingEvent') 328 | if is_oversized_changed_notification(invoking_event['messageType']): 329 | configuration_item_summary = check_defined(invoking_event['configurationItemSummary'], 'configurationItemSummary') 330 | return get_configuration(configuration_item_summary['resourceType'], configuration_item_summary['resourceId'], configuration_item_summary['configurationItemCaptureTime']) 331 | if is_scheduled_notification(invoking_event['messageType']): 332 | return None 333 | return check_defined(invoking_event['configurationItem'], 'configurationItem') 334 | 335 | # Check whether the resource has been deleted. If it has, then the evaluation is unnecessary. 336 | def is_applicable(configuration_item, event): 337 | try: 338 | check_defined(configuration_item, 'configurationItem') 339 | check_defined(event, 'event') 340 | except: 341 | return True 342 | status = configuration_item['configurationItemStatus'] 343 | event_left_scope = event['eventLeftScope'] 344 | if status == 'ResourceDeleted': 345 | print("Resource Deleted, setting Compliance Status to NOT_APPLICABLE.") 346 | 347 | return status in ('OK', 'ResourceDiscovered') and not event_left_scope 348 | 349 | 350 | def get_assume_role_credentials(role_arn, region=None): 351 | sts_client = boto3.client('sts', region) 352 | try: 353 | assume_role_response = sts_client.assume_role(RoleArn=role_arn, 354 | RoleSessionName="configLambdaExecution", 355 | DurationSeconds=CONFIG_ROLE_TIMEOUT_SECONDS) 356 | if 'liblogging' in sys.modules: 357 | liblogging.logSession(role_arn, assume_role_response) 358 | return assume_role_response['Credentials'] 359 | except botocore.exceptions.ClientError as ex: 360 | # Scrub error message for any internal account info leaks 361 | print(str(ex)) 362 | if 'AccessDenied' in ex.response['Error']['Code']: 363 | ex.response['Error']['Message'] = "AWS Config does not have permission to assume the IAM role." 364 | else: 365 | ex.response['Error']['Message'] = "InternalError" 366 | ex.response['Error']['Code'] = "InternalError" 367 | raise ex 368 | 369 | # This removes older evaluation (usually useful for periodic rule not reporting on AWS::::Account). 370 | def clean_up_old_evaluations(latest_evaluations, event): 371 | 372 | cleaned_evaluations = [] 373 | 374 | old_eval = AWS_CONFIG_CLIENT.get_compliance_details_by_config_rule( 375 | ConfigRuleName=event['configRuleName'], 376 | ComplianceTypes=['COMPLIANT', 'NON_COMPLIANT'], 377 | Limit=100) 378 | 379 | old_eval_list = [] 380 | 381 | while True: 382 | for old_result in old_eval['EvaluationResults']: 383 | old_eval_list.append(old_result) 384 | if 'NextToken' in old_eval: 385 | next_token = old_eval['NextToken'] 386 | old_eval = AWS_CONFIG_CLIENT.get_compliance_details_by_config_rule( 387 | ConfigRuleName=event['configRuleName'], 388 | ComplianceTypes=['COMPLIANT', 'NON_COMPLIANT'], 389 | Limit=100, 390 | NextToken=next_token) 391 | else: 392 | break 393 | 394 | for old_eval in old_eval_list: 395 | old_resource_id = old_eval['EvaluationResultIdentifier']['EvaluationResultQualifier']['ResourceId'] 396 | newer_founded = False 397 | for latest_eval in latest_evaluations: 398 | if old_resource_id == latest_eval['ComplianceResourceId']: 399 | newer_founded = True 400 | if not newer_founded: 401 | cleaned_evaluations.append(build_evaluation(old_resource_id, "NOT_APPLICABLE", event)) 402 | 403 | return cleaned_evaluations + latest_evaluations 404 | 405 | def lambda_handler(event, context): 406 | if 'liblogging' in sys.modules: 407 | liblogging.logEvent(event) 408 | 409 | global AWS_CONFIG_CLIENT 410 | 411 | #print(event) 412 | check_defined(event, 'event') 413 | invoking_event = json.loads(event['invokingEvent']) 414 | rule_parameters = {} 415 | if 'ruleParameters' in event: 416 | rule_parameters = json.loads(event['ruleParameters']) 417 | 418 | try: 419 | valid_rule_parameters = evaluate_parameters(rule_parameters) 420 | except ValueError as ex: 421 | return build_parameters_value_error_response(ex) 422 | 423 | try: 424 | AWS_CONFIG_CLIENT = get_client('config', event) 425 | if invoking_event['messageType'] in ['ConfigurationItemChangeNotification', 'ScheduledNotification', 'OversizedConfigurationItemChangeNotification']: 426 | configuration_item = get_configuration_item(invoking_event) 427 | if is_applicable(configuration_item, event): 428 | compliance_result = evaluate_compliance(event, configuration_item, valid_rule_parameters) 429 | else: 430 | compliance_result = "NOT_APPLICABLE" 431 | else: 432 | return build_internal_error_response('Unexpected message type', str(invoking_event)) 433 | except botocore.exceptions.ClientError as ex: 434 | if is_internal_error(ex): 435 | return build_internal_error_response("Unexpected error while completing API request", str(ex)) 436 | return build_error_response("Customer error while making API request", str(ex), ex.response['Error']['Code'], ex.response['Error']['Message']) 437 | except ValueError as ex: 438 | return build_internal_error_response(str(ex), str(ex)) 439 | 440 | evaluations = [] 441 | latest_evaluations = [] 442 | 443 | if not compliance_result: 444 | latest_evaluations.append(build_evaluation(event['accountId'], "NOT_APPLICABLE", event, resource_type='AWS::::Account')) 445 | evaluations = clean_up_old_evaluations(latest_evaluations, event) 446 | elif isinstance(compliance_result, str): 447 | if configuration_item: 448 | evaluations.append(build_evaluation_from_config_item(configuration_item, compliance_result)) 449 | else: 450 | evaluations.append(build_evaluation(event['accountId'], compliance_result, event, resource_type=DEFAULT_RESOURCE_TYPE)) 451 | elif isinstance(compliance_result, list): 452 | for evaluation in compliance_result: 453 | missing_fields = False 454 | for field in ('ComplianceResourceType', 'ComplianceResourceId', 'ComplianceType', 'OrderingTimestamp'): 455 | if field not in evaluation: 456 | print("Missing " + field + " from custom evaluation.") 457 | missing_fields = True 458 | 459 | if not missing_fields: 460 | latest_evaluations.append(evaluation) 461 | evaluations = clean_up_old_evaluations(latest_evaluations, event) 462 | elif isinstance(compliance_result, dict): 463 | missing_fields = False 464 | for field in ('ComplianceResourceType', 'ComplianceResourceId', 'ComplianceType', 'OrderingTimestamp'): 465 | if field not in compliance_result: 466 | print("Missing " + field + " from custom evaluation.") 467 | missing_fields = True 468 | if not missing_fields: 469 | evaluations.append(compliance_result) 470 | else: 471 | evaluations.append(build_evaluation_from_config_item(configuration_item, 'NOT_APPLICABLE')) 472 | 473 | # Put together the request that reports the evaluation status 474 | result_token = event['resultToken'] 475 | test_mode = False 476 | if result_token == 'TESTMODE': 477 | # Used solely for RDK test to skip actual put_evaluation API call 478 | test_mode = True 479 | 480 | # Invoke the Config API to report the result of the evaluation 481 | evaluation_copy = [] 482 | evaluation_copy = evaluations[:] 483 | while evaluation_copy: 484 | AWS_CONFIG_CLIENT.put_evaluations(Evaluations=evaluation_copy[:100], ResultToken=result_token, TestMode=test_mode) 485 | del evaluation_copy[:100] 486 | 487 | # Used solely for RDK test to be able to test Lambda function 488 | return evaluations 489 | 490 | def is_internal_error(exception): 491 | return ((not isinstance(exception, botocore.exceptions.ClientError)) or exception.response['Error']['Code'].startswith('5') 492 | or 'InternalError' in exception.response['Error']['Code'] or 'ServiceError' in exception.response['Error']['Code']) 493 | 494 | def build_internal_error_response(internal_error_message, internal_error_details=None): 495 | return build_error_response(internal_error_message, internal_error_details, 'InternalError', 'InternalError') 496 | 497 | def build_error_response(internal_error_message, internal_error_details=None, customer_error_code=None, customer_error_message=None): 498 | error_response = { 499 | 'internalErrorMessage': internal_error_message, 500 | 'internalErrorDetails': internal_error_details, 501 | 'customerErrorMessage': customer_error_message, 502 | 'customerErrorCode': customer_error_code 503 | } 504 | print(error_response) 505 | return error_response 506 | -------------------------------------------------------------------------------- /code/python-rdk/AWSConfigRuleKMSLeastPrivilege/AWSConfigRuleKMSLeastPrivilege_test.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | try: 4 | from unittest.mock import MagicMock, patch, ANY 5 | except ImportError: 6 | import mock 7 | from mock import MagicMock, patch, ANY 8 | import botocore 9 | from botocore.exceptions import ClientError 10 | import sys 11 | import os 12 | import json 13 | import logging 14 | 15 | # Define the default resource to report to Config Rules 16 | DEFAULT_RESOURCE_TYPE = 'AWS::KMS::Key' 17 | 18 | CONFIG_CLIENT_MOCK = MagicMock() 19 | STS_CLIENT_MOCK = MagicMock() 20 | KMS_CLIENT_MOCK = MagicMock() 21 | 22 | 23 | class Boto3Mock: 24 | def client(self, client_name, *args, **kwargs): 25 | if client_name == "config": 26 | return CONFIG_CLIENT_MOCK 27 | elif client_name == "sts": 28 | return STS_CLIENT_MOCK 29 | elif client_name == "kms": 30 | return KMS_CLIENT_MOCK 31 | else: 32 | raise Exception("Attempting to create an unknown client") 33 | 34 | 35 | sys.modules["boto3"] = Boto3Mock() 36 | 37 | import AWSConfigRuleKMSLeastPrivilege as rule 38 | 39 | class TestKMSKeyPolicy(unittest.TestCase): 40 | list_aliases = { 41 | "Aliases": [ 42 | { 43 | "AliasName": "alias/testkey", 44 | "AliasArn": "arn:aws:kms:us-east-1:111122223333:alias/testkey", 45 | "TargetKeyId": "000041d6-1111-2222-3333-4444560c5555", 46 | } 47 | ] 48 | } 49 | 50 | def setUp(self): 51 | CONFIG_CLIENT_MOCK.reset_mock() 52 | KMS_CLIENT_MOCK.reset_mock() 53 | 54 | # scenario 1 55 | def test_is_not_cmk(self): 56 | ruleParam = ( 57 | "{\"CMKWhitelist\" : \"\", \"PrincipalWhitelist\" : \"\"}" 58 | ) 59 | KMS_CLIENT_MOCK.list_aliases = MagicMock(return_value=self.list_aliases) 60 | KMS_CLIENT_MOCK.describe_key = MagicMock( 61 | return_value={ 62 | "KeyMetadata": { 63 | "KeyId": "000041d6-1111-2222-3333-4444560c5555", 64 | "KeyManager": "AWS", 65 | } 66 | } 67 | ) 68 | lambda_event = build_lambda_scheduled_event(rule_parameters=ruleParam) 69 | response = rule.lambda_handler(lambda_event, {}) 70 | resp_expected = [] 71 | resp_expected.append( 72 | build_expected_response( 73 | 'NOT_APPLICABLE', 'alias/testkey', annotation='KMS is not a CMK' 74 | ) 75 | ) 76 | assert_successful_evaluation(self, response, resp_expected) 77 | 78 | def test_is_not_disabled(self): 79 | ruleParam = ( 80 | "{\"CMKWhitelist\" : \"\", \"PrincipalWhitelist\" : \"\"}" 81 | ) 82 | KMS_CLIENT_MOCK.list_aliases = MagicMock(return_value=self.list_aliases) 83 | KMS_CLIENT_MOCK.describe_key = MagicMock( 84 | return_value={ 85 | "KeyMetadata": { 86 | "KeyId": "000041d6-1111-2222-3333-4444560c5555", 87 | "KeyManager": "CUSTOMER", 88 | "Enabled": False, 89 | } 90 | } 91 | ) 92 | lambda_event = build_lambda_scheduled_event(rule_parameters=ruleParam) 93 | response = rule.lambda_handler(lambda_event, {}) 94 | resp_expected = [] 95 | resp_expected.append( 96 | build_expected_response( 97 | 'NOT_APPLICABLE', 98 | 'alias/testkey', 99 | annotation='CMK alias/testkey is disabled', 100 | ) 101 | ) 102 | assert_successful_evaluation(self, response, resp_expected) 103 | 104 | 105 | def test_cmk_in_whitelist(self): 106 | ruleParam = ( 107 | "{\"CMKWhitelist\" : \"test*\", \"PrincipalWhitelist\" : \"\"}" 108 | ) 109 | KMS_CLIENT_MOCK.list_aliases = MagicMock( 110 | return_value={ 111 | "Aliases": [ 112 | { 113 | "AliasName": "alias/test", 114 | "AliasArn": "arn:aws:kms:us-east-1:012345678900:alias/testkey", 115 | "TargetKeyId": "000041d6-1111-2222-3333-4444560c5555", 116 | } 117 | ] 118 | } 119 | ) 120 | lambda_event = build_lambda_scheduled_event(rule_parameters=ruleParam) 121 | response = rule.lambda_handler(lambda_event, {}) 122 | resp_expected = [] 123 | resp_expected.append( 124 | build_expected_response( 125 | 'COMPLIANT', 126 | 'alias/test', 127 | annotation='CMK alias/test is in whitelist for CMK Key Policy check', 128 | ) 129 | ) 130 | assert_successful_evaluation(self, response, resp_expected) 131 | 132 | def test_princial_not_set_to_wildcard(self): 133 | 134 | ruleParam = ( 135 | "{\"CMKWhitelist\" : \"\", \"PrincipalWhitelist\" : \"\"}" 136 | ) 137 | KMS_CLIENT_MOCK.list_aliases = MagicMock(return_value=self.list_aliases) 138 | KMS_CLIENT_MOCK.describe_key = MagicMock( 139 | return_value={ 140 | "KeyMetadata": { 141 | "KeyId": "000041d6-1111-2222-3333-4444560c5555", 142 | "KeyManager": "CUSTOMER", 143 | "Enabled": True, 144 | } 145 | } 146 | ) 147 | policy_doc = build_policy_doc(actions="kms:Encrypt", principal = ["*"]) 148 | policy_response = build_policy_response(policy_doc) 149 | KMS_CLIENT_MOCK.get_key_policy = MagicMock(return_value=policy_response) 150 | lambda_event = build_lambda_scheduled_event(rule_parameters=ruleParam) 151 | response = rule.lambda_handler(lambda_event, {}) 152 | resp_expected = [] 153 | resp_expected.append( 154 | build_expected_response( 155 | 'NON_COMPLIANT', 156 | 'alias/testkey', 157 | annotation='In Key Policy for alias/testkey, principal is set to * allowing full access to all users', 158 | ) 159 | ) 160 | assert_successful_evaluation(self, response, resp_expected) 161 | 162 | 163 | def test_ignore_root_account_if_by_self(self): 164 | ruleParam = ( 165 | "{\"CMKWhitelist\" : \"\", \"PrincipalWhitelist\" : \"\"}" 166 | ) 167 | KMS_CLIENT_MOCK.list_aliases = MagicMock(return_value=self.list_aliases) 168 | KMS_CLIENT_MOCK.describe_key = MagicMock( 169 | return_value={ 170 | "KeyMetadata": { 171 | "KeyId": "000041d6-1111-2222-3333-4444560c5555", 172 | "KeyManager": "CUSTOMER", 173 | "Enabled": True, 174 | } 175 | } 176 | ) 177 | policy_doc = build_policy_doc(actions="kms:*", principal = ["arn:aws:iam::111122223333:root"]) 178 | policy_response = build_policy_response(policy_doc) 179 | KMS_CLIENT_MOCK.get_key_policy = MagicMock(return_value=policy_response) 180 | lambda_event = build_lambda_scheduled_event(rule_parameters=ruleParam) 181 | response = rule.lambda_handler(lambda_event, {}) 182 | resp_expected = [] 183 | resp_expected.append( 184 | build_expected_response( 185 | 'COMPLIANT', 186 | 'alias/testkey', 187 | annotation='In Key Policy for alias/testkey, statement has valid rules', 188 | ) 189 | ) 190 | assert_successful_evaluation(self, response, resp_expected) 191 | 192 | def test_do_not_ignore_root_account_if_not_by_self(self): 193 | ruleParam = ( 194 | "{\"CMKWhitelist\" : \"\", \"PrincipalWhitelist\" : \"\"}" 195 | ) 196 | KMS_CLIENT_MOCK.list_aliases = MagicMock(return_value=self.list_aliases) 197 | KMS_CLIENT_MOCK.describe_key = MagicMock( 198 | return_value={ 199 | "KeyMetadata": { 200 | "KeyId": "000041d6-1111-2222-3333-4444560c5555", 201 | "KeyManager": "CUSTOMER", 202 | "Enabled": True, 203 | } 204 | } 205 | ) 206 | policy_doc = build_policy_doc(actions="kms:*", principal = ["arn:aws:iam::111122223333:root", "arn:aws:iam::111122223333:user/test"]) 207 | policy_response = build_policy_response(policy_doc) 208 | KMS_CLIENT_MOCK.get_key_policy = MagicMock(return_value=policy_response) 209 | lambda_event = build_lambda_scheduled_event(rule_parameters=ruleParam) 210 | response = rule.lambda_handler(lambda_event, {}) 211 | resp_expected = [] 212 | resp_expected.append( 213 | build_expected_response( 214 | 'NON_COMPLIANT', 215 | 'alias/testkey', 216 | annotation='In Key Policy for alias/testkey, statement has access to kms:* for non root principal', 217 | ) 218 | ) 219 | assert_successful_evaluation(self, response, resp_expected) 220 | 221 | def test_do_not_permit_kms_star(self): 222 | ruleParam = ( 223 | "{\"CMKWhitelist\" : \"\", \"PrincipalWhitelist\" : \"\"}" 224 | ) 225 | KMS_CLIENT_MOCK.list_aliases = MagicMock(return_value=self.list_aliases) 226 | KMS_CLIENT_MOCK.describe_key = MagicMock( 227 | return_value={ 228 | "KeyMetadata": { 229 | "KeyId": "000041d6-1111-2222-3333-4444560c5555", 230 | "KeyManager": "CUSTOMER", 231 | "Enabled": True, 232 | } 233 | } 234 | ) 235 | policy_doc = build_policy_doc(actions="kms:*") 236 | policy_response = build_policy_response(policy_doc) 237 | KMS_CLIENT_MOCK.get_key_policy = MagicMock(return_value=policy_response) 238 | lambda_event = build_lambda_scheduled_event(rule_parameters=ruleParam) 239 | response = rule.lambda_handler(lambda_event, {}) 240 | resp_expected = [] 241 | resp_expected.append( 242 | build_expected_response( 243 | 'NON_COMPLIANT', 244 | 'alias/testkey', 245 | annotation='In Key Policy for alias/testkey, statement has access to kms:* for non root principal', 246 | ) 247 | ) 248 | assert_successful_evaluation(self, response, resp_expected) 249 | 250 | def test_do_not_permit_encypt_and_manage(self): 251 | ruleParam = ( 252 | "{\"CMKWhitelist\" : \"\", \"PrincipalWhitelist\" : \"\"}" 253 | ) 254 | KMS_CLIENT_MOCK.list_aliases = MagicMock(return_value=self.list_aliases) 255 | KMS_CLIENT_MOCK.describe_key = MagicMock( 256 | return_value={ 257 | "KeyMetadata": { 258 | "KeyId": "000041d6-1111-2222-3333-4444560c5555", 259 | "KeyManager": "CUSTOMER", 260 | "Enabled": True, 261 | } 262 | } 263 | ) 264 | policy_doc = build_policy_doc( 265 | actions=["kms:Encrypt", "kms:Create*", "kms:Delete*", "kms:Put*"], 266 | principal = ["arn:aws:iam::111122223333:user/test"], 267 | ) 268 | policy_response = build_policy_response(policy_doc) 269 | KMS_CLIENT_MOCK.get_key_policy = MagicMock(return_value=policy_response) 270 | lambda_event = build_lambda_scheduled_event(rule_parameters=ruleParam) 271 | response = rule.lambda_handler(lambda_event, {}) 272 | resp_expected = [] 273 | resp_expected.append( 274 | build_expected_response( 275 | 'NON_COMPLIANT', 276 | 'alias/testkey', 277 | annotation='In Key Policy for alias/testkey, statement allows for both management and encryption voilating separation of duties', 278 | ) 279 | ) 280 | assert_successful_evaluation(self, response, resp_expected) 281 | 282 | def test_permit_encypt_and_manage_if_whitelisted(self): 283 | ruleParam = ( 284 | "{\"CMKWhitelist\" : \"\", \"PrincipalWhitelist\" : \"user/test\"}" 285 | ) 286 | KMS_CLIENT_MOCK.list_aliases = MagicMock(return_value=self.list_aliases) 287 | KMS_CLIENT_MOCK.describe_key = MagicMock( 288 | return_value={ 289 | "KeyMetadata": { 290 | "KeyId": "000041d6-1111-2222-3333-4444560c5555", 291 | "KeyManager": "CUSTOMER", 292 | "Enabled": True, 293 | } 294 | } 295 | ) 296 | policy_doc = build_policy_doc( 297 | actions=["kms:Encrypt", "kms:Create*", "kms:Delete*", "kms:Put*"], 298 | principal = ["arn:aws:iam::111122223333:user/test"], 299 | ) 300 | policy_response = build_policy_response(policy_doc) 301 | KMS_CLIENT_MOCK.get_key_policy = MagicMock(return_value=policy_response) 302 | lambda_event = build_lambda_scheduled_event(rule_parameters=ruleParam) 303 | response = rule.lambda_handler(lambda_event, {}) 304 | resp_expected = [] 305 | resp_expected.append( 306 | build_expected_response( 307 | 'COMPLIANT', 308 | 'alias/testkey', 309 | annotation='In Key Policy for alias/testkey, statement has valid rules', 310 | ) 311 | ) 312 | assert_successful_evaluation(self, response, resp_expected) 313 | 314 | def test_non_array_principal(self): 315 | ruleParam = ( 316 | "{\"CMKWhitelist\" : \"\", \"PrincipalWhitelist\" : \"\"}" 317 | ) 318 | KMS_CLIENT_MOCK.list_aliases = MagicMock(return_value=self.list_aliases) 319 | KMS_CLIENT_MOCK.describe_key = MagicMock( 320 | return_value={ 321 | "KeyMetadata": { 322 | "KeyId": "000041d6-1111-2222-3333-4444560c5555", 323 | "KeyManager": "CUSTOMER", 324 | "Enabled": True, 325 | } 326 | } 327 | ) 328 | policy_doc = build_policy_doc(actions="kms:*", principal = "arn:aws:iam::111122223333:root") 329 | policy_response = build_policy_response(policy_doc) 330 | KMS_CLIENT_MOCK.get_key_policy = MagicMock(return_value=policy_response) 331 | lambda_event = build_lambda_scheduled_event(rule_parameters=ruleParam) 332 | response = rule.lambda_handler(lambda_event, {}) 333 | resp_expected = [] 334 | resp_expected.append( 335 | build_expected_response( 336 | 'COMPLIANT', 337 | 'alias/testkey', 338 | annotation='In Key Policy for alias/testkey, statement has valid rules', 339 | ) 340 | ) 341 | assert_successful_evaluation(self, response, resp_expected) 342 | 343 | #################### 344 | # Helper Functions # 345 | #################### 346 | 347 | def build_lambda_configurationchange_event(invoking_event, rule_parameters=None): 348 | event_to_return = { 349 | 'configRuleName':'myrule', 350 | 'executionRoleArn':'roleArn', 351 | 'eventLeftScope': False, 352 | 'invokingEvent': invoking_event, 353 | 'accountId': '123456789012', 354 | 'configRuleArn': 'arn:aws:config:us-east-1:123456789012:config-rule/config-rule-8fngan', 355 | 'resultToken':'token' 356 | } 357 | if rule_parameters: 358 | event_to_return['ruleParameters'] = rule_parameters 359 | return event_to_return 360 | 361 | def build_lambda_scheduled_event(rule_parameters=None): 362 | invoking_event = '{"messageType":"ScheduledNotification","notificationCreationTime":"2017-12-23T22:11:18.158Z"}' 363 | event_to_return = { 364 | 'configRuleName':'myrule', 365 | 'executionRoleArn':'roleArn', 366 | 'eventLeftScope': False, 367 | 'invokingEvent': invoking_event, 368 | 'accountId': '123456789012', 369 | 'configRuleArn': 'arn:aws:config:us-east-1:123456789012:config-rule/config-rule-8fngan', 370 | 'resultToken':'token' 371 | } 372 | if rule_parameters: 373 | event_to_return['ruleParameters'] = rule_parameters 374 | return event_to_return 375 | 376 | def build_expected_response(compliance_type, compliance_resource_id, compliance_resource_type=DEFAULT_RESOURCE_TYPE, annotation=None): 377 | if not annotation: 378 | return { 379 | 'ComplianceType': compliance_type, 380 | 'ComplianceResourceId': compliance_resource_id, 381 | 'ComplianceResourceType': compliance_resource_type 382 | } 383 | return { 384 | 'ComplianceType': compliance_type, 385 | 'ComplianceResourceId': compliance_resource_id, 386 | 'ComplianceResourceType': compliance_resource_type, 387 | 'Annotation': annotation 388 | } 389 | 390 | def assert_successful_evaluation(test_class, response, resp_expected, evaluations_count=1): 391 | if isinstance(response, dict): 392 | test_class.assertEquals(resp_expected['ComplianceResourceType'], response['ComplianceResourceType']) 393 | test_class.assertEquals(resp_expected['ComplianceResourceId'], response['ComplianceResourceId']) 394 | test_class.assertEquals(resp_expected['ComplianceType'], response['ComplianceType']) 395 | test_class.assertTrue(response['OrderingTimestamp']) 396 | if 'Annotation' in resp_expected or 'Annotation' in response: 397 | test_class.assertEquals(resp_expected['Annotation'], response['Annotation']) 398 | elif isinstance(response, list): 399 | test_class.assertEquals(evaluations_count, len(response)) 400 | for i, response_expected in enumerate(resp_expected): 401 | test_class.assertEquals(response_expected['ComplianceResourceType'], response[i]['ComplianceResourceType']) 402 | test_class.assertEquals(response_expected['ComplianceResourceId'], response[i]['ComplianceResourceId']) 403 | test_class.assertEquals(response_expected['ComplianceType'], response[i]['ComplianceType']) 404 | test_class.assertTrue(response[i]['OrderingTimestamp']) 405 | if 'Annotation' in response_expected or 'Annotation' in response[i]: 406 | test_class.assertEquals(response_expected['Annotation'], response[i]['Annotation']) 407 | 408 | def assert_customer_error_response(test_class, response, customer_error_code=None, customer_error_message=None): 409 | if customer_error_code: 410 | test_class.assertEqual(customer_error_code, response['customerErrorCode']) 411 | if customer_error_message: 412 | test_class.assertEqual(customer_error_message, response['customerErrorMessage']) 413 | test_class.assertTrue(response['customerErrorCode']) 414 | test_class.assertTrue(response['customerErrorMessage']) 415 | if "internalErrorMessage" in response: 416 | test_class.assertTrue(response['internalErrorMessage']) 417 | if "internalErrorDetails" in response: 418 | test_class.assertTrue(response['internalErrorDetails']) 419 | 420 | def sts_mock(): 421 | assume_role_response = { 422 | "Credentials": { 423 | "AccessKeyId": "string", 424 | "SecretAccessKey": "string", 425 | "SessionToken": "string"}} 426 | STS_CLIENT_MOCK.reset_mock(return_value=True) 427 | STS_CLIENT_MOCK.assume_role = MagicMock(return_value=assume_role_response) 428 | 429 | ################## 430 | # Common Testing # 431 | ################## 432 | 433 | class TestStsErrors(unittest.TestCase): 434 | 435 | def test_sts_unknown_error(self): 436 | rule.ASSUME_ROLE_MODE = True 437 | rule.evaluate_parameters = MagicMock(return_value=True) 438 | STS_CLIENT_MOCK.assume_role = MagicMock(side_effect=botocore.exceptions.ClientError( 439 | {'Error': {'Code': 'unknown-code', 'Message': 'unknown-message'}}, 'operation')) 440 | response = rule.lambda_handler(build_lambda_configurationchange_event('{}'), {}) 441 | assert_customer_error_response( 442 | self, response, 'InternalError', 'InternalError') 443 | 444 | def test_sts_access_denied(self): 445 | rule.ASSUME_ROLE_MODE = True 446 | rule.evaluate_parameters = MagicMock(return_value=True) 447 | STS_CLIENT_MOCK.assume_role = MagicMock(side_effect=botocore.exceptions.ClientError( 448 | {'Error': {'Code': 'AccessDenied', 'Message': 'access-denied'}}, 'operation')) 449 | response = rule.lambda_handler(build_lambda_configurationchange_event('{}'), {}) 450 | assert_customer_error_response( 451 | self, response, 'AccessDenied', 'AWS Config does not have permission to assume the IAM role.') 452 | 453 | def build_policy_doc(actions=["kms:*"], principal=[ "*" ], has_condition = False): 454 | policy = { 455 | "Id": "key-consolepolicy-3", 456 | "Version": "2012-10-17", 457 | "Statement": [ 458 | { 459 | "Sid": "test", 460 | "Effect": "Allow", 461 | "Principal": { 462 | "AWS": principal 463 | }, 464 | "Action": actions, 465 | "Resource": "*" 466 | } 467 | ] 468 | } 469 | 470 | return json.dumps(policy) 471 | 472 | def build_policy_response(policy): 473 | 474 | return { 475 | "Policy": policy 476 | } -------------------------------------------------------------------------------- /code/python-rdk/AWSConfigRuleKMSLeastPrivilege/AWSConfigRuleKMSStatementProcessor.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | import json 3 | from urllib.parse import unquote 4 | from fnmatch import fnmatch 5 | 6 | 7 | class AWSConfigRuleKMSStatementProcessor(object): 8 | def __init__(self, policy): 9 | self.dvdoc = json.loads(policy) 10 | # if the statement is a plain dict, force it into a list. 11 | statement = self.dvdoc['Statement'] 12 | if type(statement) is dict: 13 | self.dvdoc['Statement'] = [statement] 14 | 15 | # force everything into lists 16 | for stmt in statement: 17 | if "AWS" in stmt["Principal"] and type(stmt["Principal"]["AWS"]) is str: 18 | stmt["Principal"]["AWS"] = [stmt["Principal"]["AWS"]] 19 | 20 | if type(stmt["Action"]) is str: 21 | stmt["Action"] = [stmt["Action"]] 22 | 23 | # returns an object with compliance_type and annotation parameters 24 | def process(self, alias, account_number, rule_parameters): 25 | result = {} 26 | # start out valid, and we will mark not in the logic below 27 | result['compliance_type'] = 'COMPLIANT' 28 | result['annotation'] = "In Key Policy for {}, statement has valid rules".format( 29 | alias 30 | ) 31 | 32 | # iterate over each statement 33 | for stmt in self.dvdoc['Statement']: 34 | # ignore anything with a deny, we do not care 35 | if stmt['Effect'] == 'Deny': 36 | continue 37 | else: 38 | if "AWS" not in stmt["Principal"]: 39 | continue 40 | 41 | # Iterate over the principals 42 | for principal in stmt["Principal"]["AWS"]: 43 | # If it is the root then skip this check (root can do everything) 44 | if (principal == "arn:aws:iam::" + account_number + ":root"): 45 | continue 46 | 47 | if (self.filterUsers(principal, rule_parameters["PrincipalWhitelist"])): 48 | continue 49 | 50 | # If you are not the root, then you can't have kms:* 51 | if ("kms:*" in stmt["Action"]): 52 | result['compliance_type'] = 'NON_COMPLIANT' 53 | result['annotation'] = "In Key Policy for {}, statement has access to kms:* for non root principal".format( 54 | alias 55 | ) 56 | break 57 | 58 | if (principal == "*"): 59 | result['compliance_type'] = 'NON_COMPLIANT' 60 | result['annotation'] = "In Key Policy for {}, principal is set to * allowing full access to all users".format( 61 | alias 62 | ) 63 | break 64 | 65 | if (("kms:Encrypt" in stmt["Action"] or 66 | "kms:Decrypt" in stmt["Action"] or 67 | "kms:ReEncrypt" in stmt["Action"]) and 68 | ("kms:Create" in ''.join(stmt["Action"]) or 69 | "kms:Delete" in ''.join(stmt["Action"]) or 70 | "kms:Put" in ''.join(stmt["Action"]))): 71 | result['compliance_type'] = 'NON_COMPLIANT' 72 | result['annotation'] = "In Key Policy for {}, statement allows for both management and encryption voilating separation of duties".format( 73 | alias 74 | ) 75 | break 76 | 77 | return result 78 | 79 | def filterUsers(self, user, whitelist): 80 | if (whitelist == ""): 81 | return False 82 | 83 | parts = whitelist.split(",") 84 | 85 | for p in parts: 86 | if (p in user): 87 | return True 88 | 89 | return False 90 | -------------------------------------------------------------------------------- /code/python-rdk/AWSConfigRuleKMSLeastPrivilege/parameters.json: -------------------------------------------------------------------------------- 1 | { 2 | "Version": "1.0", 3 | "Parameters": { 4 | "RuleName": "AWSConfigRuleKMSLeastPrivilege-{ENV}", 5 | "Description": "AWSConfigRuleKMSLeastPrivilege", 6 | "SourceRuntime": "python3.8", 7 | "CodeKey": "AWSConfigRuleKMSLeastPrivilege-{ENV}.zip", 8 | "InputParameters": "{}", 9 | "OptionalParameters": "{\"ExecutionRoleName\":\"\",\"CMKWhitelist\" : \"\", \"PrincipalWhitelist\" : \"\"}", 10 | "SourcePeriodic": "TwentyFour_Hours", 11 | "CustomLambdaName":"RDK-KMSLeastPrivilege-{ENV}", 12 | "RuleSets": [ 13 | "all-rules-test", 14 | "all-rules-prod" 15 | ] 16 | }, 17 | "Tags": "[]" 18 | } 19 | -------------------------------------------------------------------------------- /code/python-rdk/AWSConfigRuleKMSLeastPrivilege/rules.txt: -------------------------------------------------------------------------------- 1 | Desired Rules: 2 | - Key is customer managed 3 | - Any key that is disabled is ignored 4 | - Key that is whitelisted is ignored 5 | - Key cannot have a * principal 6 | - Any statement targeting the current root account is ignored 7 | - Any statement that permits kms:* is not permitted 8 | - No statement is permitted to have both Encrypt and any combination of Create/Delete/Put - unless it is a whitelisted principal -------------------------------------------------------------------------------- /code/python-rdk/README.md: -------------------------------------------------------------------------------- 1 | # Rules Development Kit (RDK) Project 2 | 3 | This file details the structure of the associated Rules Development Kit (RDK) rules for AWS Config. It covers the basics of the RDK and how it interacts with the more complete DevSecOps stack. 4 | 5 | - [Rules Development Kit (RDK) Project](#rules-development-kit-rdk-project) 6 | - [1. About](#1-about) 7 | - [1.1 Environment Overview](#11-environment-overview) 8 | - [2. Creating a new rule](#2-creating-a-new-rule) 9 | - [2.1 Setup of your environment](#21-setup-of-your-environment) 10 | - [2.2 Creating a new empty rule](#22-creating-a-new-empty-rule) 11 | - [2.3 Define the conditioners you are intending to implement for](#23-define-the-conditioners-you-are-intending-to-implement-for) 12 | - [2.4 Setup test cases](#24-setup-test-cases) 13 | - [2.5 Implement functionality](#25-implement-functionality) 14 | - [2.6 Test in a development account](#26-test-in-a-development-account) 15 | - [2.7 Include in automated deployment and setup parameters](#27-include-in-automated-deployment-and-setup-parameters) 16 | - [3. Modifying an existing Rule](#3-modifying-an-existing-rule) 17 | - [4. Example Rule](#4-example-rule) 18 | - [Appendix A. References](#appendix-a-references) 19 | - [Acknowledgements](#acknowledgements) 20 | 21 | ## 1. About 22 | The RDK is a framework provided by AWS to allow for the rapid building of custom AWS Config rules, along with associated tooling to allow these to be easily tested and deployed. Use this folder (/code/python-rdk/) to place your custom RDK rules to enforce best practice across your AWS accounts. 23 | 24 | Currently when new rules are created in this folder - and configured correctly (see below) - they will be automatically rolled out to all accounts across the AWS Organization. 25 | 26 | For reference custom AWS Config Rules consist of 2 parts, a backend Lambda function to perform the checks - and report on associated compliance - and the actual AWS Config rule which triggers the target account AND region. 27 | 28 | ### 1.1 Environment Overview 29 | In implementing RDK for the DevSecOps framework, the aim is to deploy custom AWS Config rules globally, but minimise the footprint for management of the custom Lambda code. 30 | 31 | To this end the custom RDK rules are deployed in 2 components: 32 | 1. The backend Lambda functions are deployed to a central account (in this case the DEPLOYMENT account) 33 | 2. The AWS Config rules are then deployed to all target accounts/regions with permissions to execute the central lambda functions on the target accounts 34 | 35 | More details on how this structure is achieved can be found in the section describing the DevSecOps pipeline. 36 | 37 | ## 2. Creating a new rule 38 | ### 2.1 Setup of your environment 39 | In order to prepare your environment - and your associated AWS development account for building and testing RDK it is suggested you follow the following steps: 40 | 1. Setup your default credetials for the AWS CLI in your environment 41 | 2. Install the AWS RDK - outlined in https://github.com/awslabs/aws-config-rdk 42 | 3. Create a new Lambda role in your development environment - this role should include the basic lambda execution policy and a custom policy to assume * 43 | 4. Run the `rdk init` command to ensure that AWS config is running in your test environment - this will use the default credentials outlined above. This will ensure that Config is running and setup a deployment bucket for Lambda functions 44 | 5. If AWS Config is not using a custom role then setup a new role and configure Config to use this - ensure that the new role has the ReadOnly access policy 45 | 6. Extend the custom Config role to trust the Lambda defined above 46 | 47 | ### 2.2 Creating a new empty rule 48 | The RDK provides a method of scaffolding rules via the command line interface. 49 | 1. Change into the python-rdk directory 50 | 2. Run the command `rdk create RULENAMEHERE --runtime python3.8 --maximum-frequency TwentyFour_Hours` 51 | 52 | The above will create a new rule using the Python language (all the automated build and deployment steps later are build around this), that is preconfigured to run every 24 hours when deployed. It is possible to generate a rule that is triggered off changes in environment instead. 53 | 54 | ### 2.3 Define the conditioners you are intending to implement for 55 | It is suggested that you create a rules.txt in the base of your new rule to define the specific criteria you are implementing for. Examples of these can be seen in the existing rules, or below. 56 | 57 | > Desired Rules 58 | > - If rds security group inbound rules have unrestricted IPv4s (0.0.0.0/0) then it is not complient 59 | 60 | This makes it clear to both you as the developer and any other uses who will need to support this in the future the extent to which the implemented rule will impact the environment. 61 | 62 | ### 2.4 Setup test cases 63 | The RDK scaffolding process provides 2 files when it creates the new rule template, a base file into which functionality should be implemented (named the same as the rule) and a test file to implement unit tests. You will need to implement a test case for every one of the rules you defined in the rules.txt above. 64 | 65 | This will allow for local testing of your logic and the validation that your code will function when put into the test environment. It also allows for validating a large number of specific test cases that would otherwise be infeasible to manually setup (IAM least priviledge permissions are the most obvious case for this). 66 | 67 | Examples can be found in the existing rules - the test cases themselves use the existing Python MagicMock framework. 68 | 69 | You can then run your test cases by running the command: 70 | 71 | `rdk test-local RULENAMEHERE` 72 | 73 | This will run the tests and report pass/fail status. 74 | 75 | ### 2.5 Implement functionality 76 | As mentioned in the previous step the implementation of the rules is done in the scaffolded file, prior to beginning to implement code there are several changes you will need to make first: 77 | * Update the `DEFAULT_RESOURCE_TYPE` setting to match that which will be returned by your rule 78 | * Update `ASSUME_ROLE_MODE` to True - this is what is used to assume the role in the cross account scenario 79 | 80 | When implementing the functionality for the rules there are several items that are important to note: 81 | * When you are generating the BOTO3 client to connection to the AWS SDK you need to use the `get_client` function - this will correctly assume the cross account role when you deploy into the live environments 82 | * When checking for IAM least privileges there is an existing helper class you can use AWSConfigIAMProcessor which already has the logic to get all the policies that are associated with roles and users - this means that you only need to modify the logic which checks these to implement new IAM Least Privilege checks 83 | * If there is a need for parameters - at a minimum you will likely need to be able to whitelist specific names - you can add these in the parameters.json file, as seen in the existing rules 84 | 85 | ### 2.6 Test in a development account 86 | > Prior to testing in development please ensure that all test cases pass 87 | 88 | In order to test in your development account (assumed to be the default account setup for your AWS CLI) you can run the deployment command below: 89 | 90 | `rdk deploy RULENAMEHERE --lambda-timeout 600 --lambda-role-arn DEVLAMBDAROLEARNHERE` 91 | 92 | The above command will setup a CloudFormation stack for the noted rule and deploy it using the associated rule - the lambda role is defined so that you do not need to individually trust every Lambda role that RDK creates for testing. 93 | 94 | In order to run the role: 95 | 1. Login to the AWS Console for you account 96 | 2. Navigate to Config 97 | 3. Open the rule 98 | 4. Hit Evaluate 99 | 100 | The above will then either run and report back a list of compliant/non compliant devices (and set the Last Executed Time) or will not run. 101 | 102 | Common issues are: 103 | * You did not wait long enough - if you have an IAM Least Privilege rule then this can take several minutes to run 104 | * The trust relationship is not setup between Config and your Lambda 105 | * The custom Config role does not access to the specific read permission for what is being checked 106 | 107 | You can find the Cloudwatch logs for the Lambda function as you would any other to investigate further. 108 | 109 | ### 2.7 Include in automated deployment and setup parameters 110 | To include the newly created rule in the deployment process you need to add the following to the parameters.json under the `Parameters` object 111 | ``` 112 | "RuleSets": [ 113 | "all-rules-test", 114 | "all-rules-prod" 115 | ] 116 | ``` 117 | - Adding 'all-rules-test' will deploy the rule to either the default 'test' deployment group (`/config/GlobalParams.json`) or the override designated OU in `/config/Stacks.json` for the stack `CustomRDKAWSConfigRulesStackSet` where environmentType="test" 118 | - Adding 'all-rules-prod' will deploy the rule to either the default 'prod' deployment group (/config/GlobalParams.json) or the override designated OU as defined in `/config/Stacks.json` for the stack `CustomRDKAWSConfigRulesStackSet` where environmentType="prod" 119 | 120 | This allows you to selectively work on rules in a test environment prior to sending to a production environment regardless of whether the pipeline deploys to prod or not. 121 | 122 | To ensure that any parameters you setup are propagated as a part of this you will need to: 123 | 1. Add the new settings to the `/config/Stacks.json` file `CustomRDKAWSConfigRulesStackSet` section - the names should match the names in the generated CloudFormation template, you can use the deployment command above along with the flag `--rules-only` to see the generated parameter name 124 | 125 | ## 3. Modifying an existing Rule 126 | To modify an existing rule your should: 127 | 1. Update the `rules.txt` to reflect the changes you are making the enforced rules 128 | 2. Update the unit tests to check any code changes 129 | 3. Test in the local environment first prior to including in the full CICD process - note that you can remove the RuleSet from the rule temporarily to stop it being enforced, although this will remove the rule globally 130 | 131 | 132 | ## 4. Example Rule 133 | The DevSecOps Framework comes with an example rule `AWSConfigRuleKMSLeastPrivilege`. This sample rule is supplied by `aws-samples` has been developed by Tracy Pierce [https://github.com/aws-samples/aws-config-aws-kms-policy-rule] and is explained in the post [https://aws.amazon.com/blogs/security/how-to-use-aws-config-to-determine-compliance-of-aws-kms-key-policies-to-your-specifications/]. 134 | This rule code is made available under a modified MIT license. 135 | 136 | ## Appendix A. References 137 | Documentation for the RDK commands https://github.com/awslabs/aws-config-rdk/blob/master/docs/reference/ 138 | 139 | ## Acknowledgements 140 | - Tracy Piece - example RDK rule `AWSConfigRuleKMSLeastPrivilege` [https://github.com/aws-samples/aws-config-aws-kms-policy-rule] -------------------------------------------------------------------------------- /code/ts-cdk/src/stackset_creator/.npmignore: -------------------------------------------------------------------------------- 1 | *.ts 2 | !*.d.ts 3 | 4 | # CDK asset staging directory 5 | .cdk.staging 6 | cdk.out 7 | -------------------------------------------------------------------------------- /code/ts-cdk/src/stackset_creator/README.md: -------------------------------------------------------------------------------- 1 | # Welcome to your CDK TypeScript project! 2 | 3 | This is a blank project for TypeScript development with CDK. 4 | 5 | The `cdk.json` file tells the CDK Toolkit how to execute your app. 6 | 7 | ## Useful commands 8 | 9 | * `npm run build` compile typescript to js 10 | * `npm run watch` watch for changes and compile 11 | * `npm run test` perform the jest unit tests 12 | * `cdk deploy` deploy this stack to your default AWS account/region 13 | * `cdk diff` compare deployed stack with current state 14 | * `cdk synth` emits the synthesized CloudFormation template 15 | -------------------------------------------------------------------------------- /code/ts-cdk/src/stackset_creator/bin/cdk.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | import "source-map-support/register"; 3 | import * as cdk from "aws-cdk-lib"; 4 | import { 5 | StackInstances, 6 | Cdk_StackSet_Creator, 7 | } from "../lib/cdk_stackset_creator"; 8 | import { 9 | CFStacks, 10 | Cdk_MasterStack_Creator, 11 | } from "../lib/cdk_master_stack_creator"; 12 | import { 13 | ScanCommand, 14 | ScanCommandInput, 15 | QueryCommand, 16 | QueryCommandInput, 17 | } from "@aws-sdk/lib-dynamodb"; 18 | import { ddbDocClient } from "../lib/ddbDocClient"; 19 | import { CFEnvironmentParameters } from "../lib/cfModels"; 20 | import * as cf from "aws-cdk-lib/aws-cloudformation"; 21 | import { DefaultStackSynthesizer } from "aws-cdk-lib"; 22 | 23 | function pop(object: any, propertyName: any) { 24 | let temp = object[propertyName]; 25 | delete object[propertyName]; 26 | return temp; 27 | } 28 | 29 | //Function to assist filtering arrays to only return unique values 30 | function onlyUnique(value: string, index: number, self: string[]) { 31 | return self.indexOf(value) === index; 32 | } 33 | 34 | async function createApp(): Promise { 35 | let app = new cdk.App(); 36 | 37 | let l_generatedStacks = []; 38 | 39 | let l_buildGuid = app.node.tryGetContext("buildGuid") || "123456789"; 40 | let l_awsAccountId = app.node.tryGetContext("awsAccountId") || "123456789012"; 41 | let l_projectResourcePrefix = 42 | app.node.tryGetContext("projectResourcePrefix") || "myproject"; 43 | let l_projectFriendlyName = 44 | app.node.tryGetContext("projectFriendlyName") || "my project"; 45 | 46 | // Get Global Parameters Configuration 47 | let gp_params: ScanCommandInput = { 48 | TableName: `${l_projectResourcePrefix}-DynDBTable-GlobalParams`, 49 | }; 50 | let gp_command = new ScanCommand(gp_params); 51 | let gp_data = await ddbDocClient.send(gp_command); 52 | 53 | // console.log(gp_data); 54 | 55 | let g_stacksList: CFStacks.CloudFormationEnvironments = { 56 | env: [], 57 | }; 58 | let g_globalParamList: CFEnvironmentParameters.CFParams = { 59 | env: [], 60 | }; 61 | for (let l_envItem in gp_data.Items[0].env) { 62 | let l_envParams: CFEnvironmentParameters.CFEnvParameterProperty = { 63 | environmentType: gp_data.Items[0].env[l_envItem].environmentType, 64 | environmentFriendlyName: 65 | gp_data.Items[0].env[l_envItem].environmentFriendlyName, 66 | retainStacksOnAccountRemoval: 67 | gp_data.Items[0].env[l_envItem].defaultRetainStacksOnAccountRemoval, 68 | params: [], 69 | }; 70 | 71 | let l_fieldNameEnvironmentType = "EnvironmentType"; 72 | let l_fieldNameEnvironmentFriendlyName = "EnvironmentFriendlyName"; 73 | let l_fieldNameProjectFriendlyName = "ProjectFriendlyName"; 74 | let l_fieldNameBuildGuid = "BuildGuid"; 75 | let l_fieldPrefixGlobalParams = "GLOBAL"; 76 | 77 | //Add EnvType and EnvFriendlyName as Global Params 78 | l_envParams.params.push({ 79 | parameterUniqueKey: `${l_fieldPrefixGlobalParams}-${l_fieldNameEnvironmentType}`, 80 | parameterKey: l_fieldNameEnvironmentType, 81 | parameterValue: gp_data.Items[0].env[l_envItem].environmentType, 82 | }); 83 | l_envParams.params.push({ 84 | parameterUniqueKey: `${l_fieldPrefixGlobalParams}-${l_fieldNameEnvironmentFriendlyName}`, 85 | parameterKey: l_fieldNameEnvironmentFriendlyName, 86 | parameterValue: gp_data.Items[0].env[l_envItem].environmentFriendlyName, 87 | }); 88 | 89 | for (let l_gparam in gp_data.Items[0].env[l_envItem].globalparams) { 90 | let l_globalParamUniqueKey = `${l_fieldPrefixGlobalParams}-${l_gparam}`; //This could be useful if referencing all params from masterstack. 91 | let l_globalParamKey = `${l_gparam}`; 92 | let l_globalParamValue = `${gp_data.Items[0].env[l_envItem].globalparams[l_gparam]}`; 93 | 94 | //Set the Project Friendly name from the Global Param. 95 | if (l_globalParamKey == l_fieldNameProjectFriendlyName) { 96 | l_projectFriendlyName = l_globalParamValue; 97 | } 98 | //Replace placeholder Global Param 'BuildGuid' with unique guid generated from buildspec 99 | if (l_globalParamKey == l_fieldNameBuildGuid) { 100 | l_globalParamValue = l_buildGuid; 101 | } 102 | 103 | let l_param: CFEnvironmentParameters.CFParameterProperty = { 104 | parameterUniqueKey: l_globalParamUniqueKey, 105 | parameterKey: l_globalParamKey, 106 | parameterValue: l_globalParamValue, 107 | }; 108 | l_envParams.params.push(l_param); 109 | } 110 | g_globalParamList.env.push(l_envParams); 111 | 112 | //Also add placeholder in stacklist for env 113 | g_stacksList.env.push({ 114 | environmentType: gp_data.Items[0].env[l_envItem].environmentType, 115 | stacks: [], 116 | }); 117 | } 118 | 119 | // Get deep copy of param objects and deployment groups. We keep: 120 | // - a global params list used to add to each generated nested stack parameters 121 | // - a master params list which includes global params and all nested stack params 122 | let g_masterParamList = JSON.parse(JSON.stringify(g_globalParamList)); 123 | let g_environments = JSON.parse(JSON.stringify(gp_data.Items[0].env)); 124 | 125 | // StackSet Configuration 126 | let l_stacks_params: ScanCommandInput = { 127 | TableName: `${l_projectResourcePrefix}-DynDBTable-Stacks`, 128 | }; 129 | let l_stacks_command = new ScanCommand(l_stacks_params); 130 | let l_stacks_data = await ddbDocClient.send(l_stacks_command); 131 | 132 | for (let l_stack of l_stacks_data.Items) { 133 | if (l_stack.enabled != null && l_stack.enabled == "false") { 134 | console.log( 135 | `* Skipping ${l_stack.type} - ${l_stack.name} - set to disabled` 136 | ); 137 | //skip disabled stack 138 | continue; 139 | } 140 | console.log(`* Generating ${l_stack.type} - ${l_stack.name}`); 141 | 142 | let l_stackParamList: CFEnvironmentParameters.CFParams = { 143 | env: [], 144 | }; 145 | for (let l_envsettings of l_stack.env) { 146 | let l_stack_dependsOn = undefined; 147 | if (l_stack.dependsOn !== undefined) { 148 | l_stack_dependsOn = `${l_stack.dependsOn}-${l_envsettings.environmentType}`; 149 | console.log(` - stack depends on = '${l_stack_dependsOn}'`); 150 | } 151 | 152 | let l_stack_retainStacksOnAccountRemoval = undefined; 153 | if (l_stack.retainStacksOnAccountRemoval !== undefined) { 154 | // Use Stack-specific Setting 155 | l_stack_retainStacksOnAccountRemoval = 156 | l_stack.retainStacksOnAccountRemoval; 157 | } else { 158 | // Use global setting 159 | l_stack_retainStacksOnAccountRemoval = 160 | g_globalParamList.env.find(function (item: any) { 161 | return item.environmentType === l_envsettings.environmentType; 162 | })?.retainStacksOnAccountRemoval || "true"; 163 | } 164 | //Get Environment Friendly Name from Global Setting (not a stack setting) 165 | let l_stack_environmentFriendlyName = 166 | g_globalParamList.env.find(function (item: any) { 167 | return item.environmentType === l_envsettings.environmentType; 168 | })?.environmentFriendlyName || "ERROR"; 169 | 170 | let l_envStackParams: CFEnvironmentParameters.CFEnvParameterProperty = { 171 | environmentType: l_envsettings.environmentType, 172 | environmentFriendlyName: l_stack_environmentFriendlyName, 173 | retainStacksOnAccountRemoval: l_stack_retainStacksOnAccountRemoval, 174 | params: [], 175 | }; 176 | // let l_stackSetParams: cf.CfnStackSet.ParameterProperty[] = []; 177 | console.log(` - env = ${l_envsettings.environmentType}`); 178 | 179 | //Get local params for env 180 | for (let l_paramName in l_envsettings.localparams) { 181 | let stackParamUniqueKey = `${l_stack.name}-${l_paramName}`; //This could be useful if referencing all params from masterstack. 182 | let stackParamKey = `${l_paramName}`; 183 | let stackParamValue = l_envsettings.localparams[l_paramName]; 184 | let l_stackParam: CFEnvironmentParameters.CFParameterProperty = { 185 | parameterUniqueKey: stackParamUniqueKey, 186 | parameterKey: stackParamKey, 187 | parameterValue: stackParamValue, 188 | }; 189 | l_envStackParams.params.push(l_stackParam); 190 | 191 | //Add param to the master list 192 | for (let l_env in g_masterParamList.env) { 193 | if ( 194 | g_masterParamList.env[l_env].environmentType === 195 | l_envsettings.environmentType 196 | ) { 197 | g_masterParamList.env[l_env].params.push(l_stackParam); 198 | } 199 | } 200 | } 201 | //Then add any global settings to the local stack 202 | if (l_stack.type !== "rdkstackset") { 203 | for (let l_env in g_globalParamList.env) { 204 | if ( 205 | g_globalParamList.env[l_env].environmentType === 206 | l_envsettings.environmentType 207 | ) { 208 | l_envStackParams.params = l_envStackParams.params.concat( 209 | g_globalParamList.env[l_env].params 210 | ); 211 | } 212 | } 213 | } else { 214 | //With RDK Stacksets, dont include any global params, instead auto generate a LambdaAccountId pram with current Account Id 215 | let l_stackParam: CFEnvironmentParameters.CFParameterProperty = { 216 | parameterUniqueKey: `${l_stack.name}-LambdaAccountId`, 217 | parameterKey: "LambdaAccountId", 218 | parameterValue: l_awsAccountId, 219 | }; 220 | l_envStackParams.params.push(l_stackParam); 221 | } 222 | //Complete the local Stack Param List 223 | l_stackParamList.env.push(l_envStackParams); 224 | 225 | if (l_stack.type == "stackset" || l_stack.type == "rdkstackset") { 226 | //Create stackset 227 | 228 | //First, lets determine which DeploymentGroup the Stack will be deployed to. 229 | let l_deploymentGroupCode: string = ""; 230 | if ( 231 | l_envsettings.overrideDeploymentGroup != null && 232 | l_envsettings.overrideDeploymentGroup != "" 233 | ) { 234 | //Use Override Deployment group specific for Stack 235 | console.log( 236 | ` - override deployment group - ${l_envsettings.overrideDeploymentGroup}` 237 | ); 238 | l_deploymentGroupCode = l_envsettings.overrideDeploymentGroup; 239 | } else { 240 | //Use Global Setting for the Env 241 | let l_globalEnvNode = g_environments.find(function (item: any) { 242 | return item.environmentType === l_envsettings.environmentType; 243 | }); 244 | console.log( 245 | ` - default deployment group - ${l_globalEnvNode.defaultDeploymentGroup}` 246 | ); 247 | l_deploymentGroupCode = l_globalEnvNode.defaultDeploymentGroup; 248 | } 249 | 250 | let l_stackInstanceGroups: StackInstances.StackInstanceGroups = { 251 | groups: [], 252 | }; 253 | 254 | //Lookup Deployment Group, get TargetRegions 255 | let dg_params: QueryCommandInput = { 256 | TableName: `${l_projectResourcePrefix}-DynDBTable-DeploymentGroups`, 257 | KeyConditionExpression: "groupCode = :o", 258 | ExpressionAttributeValues: { 259 | ":o": l_deploymentGroupCode, 260 | }, 261 | }; 262 | let dg_command = new QueryCommand(dg_params); 263 | let dg_data = await ddbDocClient.send(dg_command); 264 | 265 | if (dg_data.Items.length == 1) { 266 | for (let l_targetRegion of dg_data.Items[0].targetRegions) { 267 | //Each TargetRegion is a collection of OrgUnitIds +/- Accounts which will share the same target regions. 268 | //Setup your json file to ensure orgunits/accounts in a TargetRegion do not mix regions you dont want to deploy to. 269 | let l_deploymentOrgUnitIdsValues: string[] = []; 270 | let l_deploymentAccountIdsValues: string[] = []; 271 | let l_deployRegions: string[] = []; 272 | 273 | let l_hasAccountIds = false; 274 | let l_hasOrgUnitIds = false; 275 | 276 | //AccountFilterType Defaults to NONE if no AccountIds exist 277 | let l_accountFilterType: string = "NONE"; 278 | 279 | l_deploymentOrgUnitIdsValues = l_targetRegion.orgUnitIds; 280 | l_deploymentAccountIdsValues = l_targetRegion.accountIds; 281 | 282 | if ( 283 | l_targetRegion.accountFilterType == undefined || 284 | l_targetRegion.accountFilterType == null 285 | ) { 286 | if ( 287 | l_deploymentAccountIdsValues != undefined && 288 | l_deploymentAccountIdsValues != null && 289 | l_deploymentAccountIdsValues.length > 0 290 | ) { 291 | //Default if AccountIds exist but no account filter exists 292 | l_accountFilterType = "INTERSECTION"; 293 | } 294 | } else { 295 | //Use setting as defined. 296 | l_accountFilterType = l_targetRegion.accountFilterType; 297 | } 298 | 299 | //Ok, now we need to lookup the correct regions this TargetRegion group will target. 300 | if ( 301 | l_envsettings.overrideDeploymentRegions != null && 302 | l_envsettings.overrideDeploymentRegions.length > 0 303 | ) { 304 | //If the stack specifically overrides regions, use this setting, 305 | console.log( 306 | ` - adding override region(s) (${l_envsettings.overrideDeploymentRegions}) to targetRegion` 307 | ); 308 | l_deployRegions = l_deployRegions.concat(l_envsettings.overrideDeploymentRegions); 309 | } else { 310 | //Otherwise lookup the correct regions.. start with OrgUnitIds 311 | if ( 312 | l_deploymentOrgUnitIdsValues != undefined && 313 | l_deploymentOrgUnitIdsValues != null && 314 | l_deploymentOrgUnitIdsValues.length > 0 315 | ) { 316 | for (let _ou in l_deploymentOrgUnitIdsValues) { 317 | l_hasOrgUnitIds = true; 318 | let ou_params: QueryCommandInput = { 319 | TableName: `${l_projectResourcePrefix}-DynDBTable-OrgUnits`, 320 | KeyConditionExpression: "orgUnitId = :o", 321 | ExpressionAttributeValues: { 322 | ":o": l_deploymentOrgUnitIdsValues[_ou], 323 | }, 324 | }; 325 | let ou_command = new QueryCommand(ou_params); 326 | let ou_data = await ddbDocClient.send(ou_command); 327 | 328 | //Add to region list 329 | for (let l_ouItem of ou_data.Items) { 330 | if ( 331 | l_envsettings.overrideDeploymentRegionUseDefaultOnly !== 332 | undefined && 333 | l_envsettings.overrideDeploymentRegionUseDefaultOnly === 334 | "true" 335 | ) { 336 | //Set to deploy only to default region 337 | console.log( 338 | ` - adding default region (${l_ouItem.defaultRegion}) only to targetRegion` 339 | ); 340 | l_deployRegions = l_deployRegions.concat([l_ouItem.defaultRegion]); 341 | } else { 342 | //Set to all deployment regions listed in OU 343 | console.log( 344 | ` - adding all listed deployment region(s) (${l_ouItem.deploymentRegions}) to targetRegion` 345 | ); 346 | l_deployRegions = l_deployRegions.concat(l_ouItem.deploymentRegions); 347 | } 348 | } 349 | } 350 | } 351 | //Now AccountIds 352 | if ( 353 | l_deploymentAccountIdsValues != undefined && 354 | l_deploymentAccountIdsValues != null && 355 | l_deploymentAccountIdsValues.length > 0 356 | ) { 357 | for (let _acc in l_deploymentAccountIdsValues) { 358 | l_hasAccountIds = true; 359 | let acc_params: QueryCommandInput = { 360 | TableName: `${l_projectResourcePrefix}-DynDBTable-Accounts`, 361 | KeyConditionExpression: "accountId = :o", 362 | ExpressionAttributeValues: { 363 | ":o": l_deploymentAccountIdsValues[_acc], 364 | }, 365 | }; 366 | let acc_command = new QueryCommand(acc_params); 367 | let acc_data = await ddbDocClient.send(acc_command); 368 | 369 | //Add to region list 370 | for (let l_accItem of acc_data.Items) { 371 | if ( 372 | l_envsettings.overrideDeploymentRegionUseDefaultOnly !== 373 | undefined && 374 | l_envsettings.overrideDeploymentRegionUseDefaultOnly === 375 | "true" 376 | ) { 377 | //Set to deploy only to default region 378 | console.log( 379 | ` - adding default region (${l_accItem.defaultRegion}) only to targetRegion` 380 | ); 381 | l_deployRegions = l_deployRegions.concat([l_accItem.defaultRegion]); 382 | } else { 383 | //Set to all deployment regions listed in OU 384 | console.log( 385 | ` - adding all listed deployment region(s) (${l_accItem.deploymentRegions}) to targetRegion` 386 | ); 387 | l_deployRegions = l_deployRegions.concat(l_accItem.deploymentRegions); 388 | } 389 | } 390 | } 391 | } 392 | } 393 | //End of building up region list 394 | console.log( 395 | ` - list of regions (prior to unique): ${l_deployRegions}` 396 | ); 397 | console.log( 398 | ` - list of regions (unique): ${l_deployRegions.filter(onlyUnique)}` 399 | ); 400 | 401 | let l_deploymentTargetsProperty: cf.CfnStackSet.DeploymentTargetsProperty = 402 | { 403 | accountFilterType: l_accountFilterType, 404 | ...(l_hasAccountIds && { 405 | accounts: l_deploymentAccountIdsValues, 406 | }), 407 | ...(l_hasOrgUnitIds && { 408 | organizationalUnitIds: l_deploymentOrgUnitIdsValues, 409 | }), 410 | }; 411 | 412 | let l_stackInstanceGroup: cf.CfnStackSet.StackInstancesProperty = { 413 | deploymentTargets: l_deploymentTargetsProperty, 414 | regions: l_deployRegions.filter(onlyUnique), 415 | parameterOverrides: l_envStackParams.params, 416 | }; 417 | l_stackInstanceGroups.groups?.push(l_stackInstanceGroup); 418 | } 419 | } 420 | //Create StackSet 421 | let l_stackSetName = `${l_stack.name}-${l_envsettings.environmentType}`; 422 | //If templateFile includes environment variable {ENV} - replace with environmentType 423 | let re = /{ENV}/gi; 424 | let l_stacksetTemplateFile = l_stack.templateFile.replace( 425 | re, 426 | l_envsettings.environmentType 427 | ); 428 | let l_stackDescription = `${l_stack.description}`; 429 | let l_parentStackDescription = `Generated CDK stack '${l_stack.name}' from project '${l_projectFriendlyName}' (env=${l_envsettings.environmentType})`; 430 | let myStack1 = Cdk_StackSet_Creator(app, l_stackSetName, { 431 | stackSetName: l_stackSetName, 432 | stackSetTemplateFile: l_stacksetTemplateFile, 433 | stackDescription: l_stackDescription, 434 | // description: l_parentStackDescription, 435 | retainStacksOnAccountRemoval: 436 | l_envStackParams.retainStacksOnAccountRemoval === "true" 437 | ? true 438 | : false, 439 | stackParams: l_envStackParams.params, 440 | stackInstanceGroups: l_stackInstanceGroups, 441 | environmentFriendlyName: l_stack_environmentFriendlyName, 442 | environmentType: l_envsettings.environmentType, 443 | projectFriendlyName: l_projectFriendlyName, 444 | synthesizer: new DefaultStackSynthesizer({ 445 | generateBootstrapVersionRule: false, 446 | }), 447 | }); 448 | let l_type = "GENERATED_CDK_STACK"; 449 | if (l_stack.type == "rdkstackset") { 450 | l_type = "GENERATED_RDK_STACK"; 451 | } 452 | let l_stackItem: CFStacks.CloudFormationStack = { 453 | stackName: l_stackSetName, 454 | type: "GENERATED_CDK_STACK", 455 | dependsOn: l_stack_dependsOn, 456 | }; 457 | let l_envStackListNode = g_stacksList.env.find(function (item: any) { 458 | return item.environmentType === l_envsettings.environmentType; 459 | }); 460 | l_envStackListNode?.stacks.push(l_stackItem); 461 | l_generatedStacks.push(myStack1); //not really needed 462 | } else { 463 | //Create stack 464 | let l_stackName = `${l_stack.name}-${l_envsettings.environmentType}`; 465 | let re = /{ENV}/gi; 466 | let l_stackTemplateFile = l_stack.templateFile.replace( 467 | re, 468 | l_envsettings.environmentType 469 | ); 470 | let l_envStackListNode = g_stacksList.env.find(function (item: any) { 471 | return item.environmentType === l_envsettings.environmentType; 472 | }); 473 | let l_params = CFEnvironmentParameters.ConvertCFParameterListToRecords( 474 | l_envStackParams.params 475 | ); 476 | let l_type = "EXTERNAL_CF_TEMPLATE"; 477 | let l_stackItem: CFStacks.CloudFormationStack = { 478 | stackName: l_stackName, 479 | type: l_type, 480 | templateFile: l_stackTemplateFile, 481 | params: l_params, 482 | dependsOn: l_stack_dependsOn, 483 | }; 484 | l_envStackListNode?.stacks.push(l_stackItem); 485 | } 486 | } 487 | } 488 | 489 | //Build a Master Stack 490 | for (let l_env in g_globalParamList.env) { 491 | //Get Nested Stacks 492 | let l_NestedStacks = g_stacksList.env.find(function (item: any) { 493 | return ( 494 | item.environmentType === g_globalParamList.env[l_env].environmentType 495 | ); 496 | }); 497 | let l_stackName = `MasterStack-${g_globalParamList.env[l_env].environmentType}`; 498 | let myMasterStack1 = Cdk_MasterStack_Creator(app, l_stackName, { 499 | environmentFriendlyName: 500 | g_globalParamList.env[l_env].environmentFriendlyName, 501 | environmentType: g_globalParamList.env[l_env].environmentType, 502 | description: `Generated CDK Master Stack for project '${l_projectFriendlyName}' (env=${g_globalParamList.env[l_env].environmentType})`, 503 | masterStackName: l_stackName, 504 | nestedStacks: l_NestedStacks?.stacks || [], 505 | projectFriendlyName: l_projectFriendlyName, 506 | synthesizer: new DefaultStackSynthesizer({ 507 | generateBootstrapVersionRule: false, 508 | }), 509 | }); 510 | } 511 | 512 | return app; 513 | } 514 | 515 | createApp(); 516 | -------------------------------------------------------------------------------- /code/ts-cdk/src/stackset_creator/cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "npx ts-node --prefer-ts-exts bin/cdk.ts", 3 | "watch": { 4 | "include": [ 5 | "**" 6 | ], 7 | "exclude": [ 8 | "README.md", 9 | "cdk*.json", 10 | "**/*.d.ts", 11 | "**/*.js", 12 | "tsconfig.json", 13 | "package*.json", 14 | "yarn.lock", 15 | "node_modules", 16 | "test" 17 | ] 18 | }, 19 | "context": { 20 | "@aws-cdk/aws-apigateway:usagePlanKeyOrderInsensitiveId": true, 21 | "@aws-cdk/core:stackRelativeExports": true, 22 | "@aws-cdk/aws-rds:lowercaseDbIdentifier": true, 23 | "@aws-cdk/aws-lambda:recognizeVersionProps": true, 24 | "@aws-cdk/aws-lambda:recognizeLayerVersion": true, 25 | "@aws-cdk/aws-cloudfront:defaultSecurityPolicyTLSv1.2_2021": true, 26 | "@aws-cdk-containers/ecs-service-extensions:enableDefaultLogDriver": true, 27 | "@aws-cdk/aws-ec2:uniqueImdsv2TemplateName": true, 28 | "@aws-cdk/core:checkSecretUsage": true, 29 | "@aws-cdk/aws-iam:minimizePolicies": true, 30 | "@aws-cdk/core:validateSnapshotRemovalPolicy": true, 31 | "@aws-cdk/core:target-partitions": [ 32 | "aws", 33 | "aws-cn" 34 | ] 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /code/ts-cdk/src/stackset_creator/jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | testEnvironment: 'node', 3 | roots: ['/test'], 4 | testMatch: ['**/*.test.ts'], 5 | transform: { 6 | '^.+\\.tsx?$': 'ts-jest' 7 | } 8 | }; 9 | -------------------------------------------------------------------------------- /code/ts-cdk/src/stackset_creator/lib/cdk_master_stack_creator.ts: -------------------------------------------------------------------------------- 1 | import { 2 | Stack, 3 | StackProps, 4 | CfnResource, 5 | Tags, 6 | } from "aws-cdk-lib"; 7 | import * as cf from "aws-cdk-lib/aws-cloudformation"; 8 | import { Construct } from "constructs"; 9 | import { DefaultStackSynthesizer } from 'aws-cdk-lib'; 10 | 11 | interface MasterStackProps extends StackProps { 12 | masterStackName: string; 13 | nestedStacks: CFStacks.CloudFormationStack[]; 14 | masterStackParams?: cf.CfnStackSet.ParameterProperty[]; 15 | environmentType: string; 16 | environmentFriendlyName: string; 17 | projectFriendlyName: string; 18 | synthesizer: DefaultStackSynthesizer 19 | } 20 | 21 | export module CFResources { 22 | export class CloudFormationResources { 23 | resources: CloudFormationResource[]; 24 | } 25 | export class CloudFormationResource { 26 | resourceName: string; 27 | resource: CfnResource; 28 | dependsOn?: string; 29 | } 30 | } 31 | 32 | export module CFStacks { 33 | export class CloudFormationEnvironments { 34 | env: CloudFormationEnvironment[]; 35 | } 36 | export class CloudFormationEnvironment { 37 | environmentType: string; 38 | stacks: CloudFormationStack[]; 39 | } 40 | export class CloudFormationStack { 41 | stackName: string; 42 | type: string; 43 | templateFile?: string; 44 | dependsOn?: string; 45 | timeoutInMinutes?: number; 46 | params?: Record; 47 | } 48 | } 49 | 50 | export async function Cdk_MasterStack_Creator( 51 | scope: Construct, 52 | id: string, 53 | props: MasterStackProps, 54 | ) { 55 | const stack = new Stack(scope, id, props); 56 | 57 | const buildGuid = stack.node.tryGetContext("buildGuid") || "123456789"; 58 | const projectResourcePrefix = 59 | stack.node.tryGetContext("projectResourcePrefix") || "myproject"; 60 | 61 | let l_createdResources: CFResources.CloudFormationResources = { 62 | resources: [], 63 | }; 64 | 65 | for (let l_nestedStack in props.nestedStacks) { 66 | let l_stackName: string = props.nestedStacks[l_nestedStack].stackName; 67 | 68 | let l_outputFileName: string = ""; 69 | if ( 70 | props.nestedStacks[l_nestedStack].type == "GENERATED_CDK_STACK" || 71 | props.nestedStacks[l_nestedStack].type == "GENERATED_RDK_STACK" 72 | ) { 73 | l_outputFileName = `${props.nestedStacks[l_nestedStack].stackName}.template.json`; 74 | } else { 75 | l_outputFileName = `${props.nestedStacks[l_nestedStack].templateFile}`; 76 | } 77 | let l_timeoutInMinutes: number = 78 | props.nestedStacks[l_nestedStack].timeoutInMinutes || 30; 79 | let templateUrl: string = `https://${projectResourcePrefix}-artifacts-codebuild.s3.amazonaws.com/${buildGuid}/${l_outputFileName}`; 80 | 81 | let l_param = props.nestedStacks[l_nestedStack].params || {}; 82 | 83 | let l_stack = new cf.CfnStack(stack, l_stackName, { 84 | templateUrl: templateUrl, 85 | timeoutInMinutes: l_timeoutInMinutes, 86 | parameters: l_param, 87 | }); 88 | 89 | let l_purpose: string = "DevSecOps Orchestration Stack"; 90 | Tags.of(l_stack).add("Name", l_stackName); 91 | Tags.of(l_stack).add("Project", props.projectFriendlyName); 92 | Tags.of(l_stack).add("Purpose", l_purpose); 93 | Tags.of(l_stack).add("Environment", props.environmentFriendlyName); 94 | 95 | let l_newResource: CFResources.CloudFormationResource = { 96 | resourceName: l_stackName, 97 | dependsOn: props.nestedStacks[l_nestedStack].dependsOn || undefined, 98 | resource: l_stack, 99 | }; 100 | l_createdResources.resources?.push(l_newResource); 101 | } 102 | 103 | //With all stacks created, now add in any dependancies 104 | for (let l_resource in l_createdResources.resources) { 105 | let l_resourceName = l_createdResources.resources[l_resource].resourceName; 106 | 107 | if (l_createdResources.resources[l_resource].dependsOn || "" !== "") { 108 | //Do a lookup to find the dependant resource: 109 | let l_dependantResource = stack.node.findChild(l_resourceName); 110 | let l_dependantOnResource = stack.node.findChild( 111 | l_createdResources.resources[l_resource].dependsOn || "" 112 | ); 113 | l_dependantResource.node.addDependency(l_dependantOnResource); 114 | console.log( 115 | ` - dependancy added on ${l_resourceName} to ${l_createdResources.resources[l_resource].dependsOn}` 116 | ); 117 | } 118 | } 119 | 120 | return { 121 | stack, 122 | l_createdResources, 123 | }; 124 | } 125 | -------------------------------------------------------------------------------- /code/ts-cdk/src/stackset_creator/lib/cdk_stackset_creator.ts: -------------------------------------------------------------------------------- 1 | import { 2 | Stack, 3 | StackProps, 4 | CfnResource, 5 | Tags, 6 | } from "aws-cdk-lib"; 7 | import * as cf from "aws-cdk-lib/aws-cloudformation"; 8 | import { Construct } from "constructs"; 9 | import { DefaultStackSynthesizer } from 'aws-cdk-lib'; 10 | 11 | interface StackSetCreatorProps extends StackProps { 12 | stackSetName: string; 13 | stackSetTemplateFile: string, 14 | stackDescription: string; 15 | retainStacksOnAccountRemoval?: boolean; 16 | stackParams?: cf.CfnStackSet.ParameterProperty[], 17 | stackInstanceGroups?: StackInstances.StackInstanceGroups, 18 | environmentType: string, 19 | environmentFriendlyName: string, 20 | projectFriendlyName: string 21 | synthesizer: DefaultStackSynthesizer 22 | } 23 | 24 | export module StackInstances { 25 | export class StackInstanceGroups { 26 | groups?: cf.CfnStackSet.StackInstancesProperty[]; 27 | } 28 | } 29 | 30 | export async function Cdk_StackSet_Creator(scope: Construct, id: string, props: StackSetCreatorProps) { 31 | const stack = new Stack(scope, id, props); 32 | 33 | const buildGuid = stack.node.tryGetContext('buildGuid') || '123456789'; 34 | const projectResourcePrefix = stack.node.tryGetContext('projectResourcePrefix') || 'myproject'; 35 | 36 | let templateUrl: string = `https://${projectResourcePrefix}-artifacts-codebuild.s3.amazonaws.com/${buildGuid}/${props.stackSetTemplateFile}`; 37 | 38 | let l_stackInstanceGroups: cf.CfnStackSet.StackInstancesProperty[] = []; 39 | console.log(` - stack instance '${props.stackSetName}' deployment groups`); 40 | //For each Deployment Group 41 | for (let l_groups in props.stackInstanceGroups!.groups!) { 42 | console.log(props.stackInstanceGroups!.groups![l_groups]); 43 | l_stackInstanceGroups.push(props.stackInstanceGroups!.groups![l_groups]); 44 | } 45 | 46 | // var instance = new StackInstances(); 47 | let l_stackset = new cf.CfnStackSet(stack, props.stackSetName, { 48 | templateUrl: templateUrl, 49 | stackSetName: props.stackSetName, 50 | description: props.stackDescription, 51 | permissionModel: "SERVICE_MANAGED", 52 | callAs: "DELEGATED_ADMIN", 53 | stackInstancesGroup: l_stackInstanceGroups, 54 | autoDeployment: { 55 | enabled: true, 56 | retainStacksOnAccountRemoval: props.retainStacksOnAccountRemoval 57 | }, 58 | capabilities: ["CAPABILITY_IAM", "CAPABILITY_NAMED_IAM"], 59 | parameters: props.stackParams, 60 | operationPreferences: { 61 | failureToleranceCount: 2, 62 | maxConcurrentCount: 10 63 | } 64 | }); 65 | let l_purpose :string = "DevSecOps Orchestration Stack"; 66 | Tags.of(l_stackset).add('Name', props.stackSetName); 67 | Tags.of(l_stackset).add('Project', props.projectFriendlyName); 68 | Tags.of(l_stackset).add('Purpose', l_purpose); 69 | Tags.of(l_stackset).add('Environment', props.environmentFriendlyName); 70 | 71 | return { 72 | stack, 73 | l_stackset, 74 | }; 75 | } 76 | -------------------------------------------------------------------------------- /code/ts-cdk/src/stackset_creator/lib/cfModels.ts: -------------------------------------------------------------------------------- 1 | export module CFEnvironmentParameters { 2 | export class CFParams { 3 | env: CFEnvParameterProperty[]; 4 | } 5 | export class CFEnvParameterProperty { 6 | environmentType: string; 7 | environmentFriendlyName: string; 8 | retainStacksOnAccountRemoval: string; 9 | params: CFParameterProperty[]; 10 | } 11 | export class CFParameterProperty { 12 | parameterUniqueKey: string; 13 | parameterKey: string; 14 | parameterValue: string; 15 | } 16 | export function ConvertCFParameterListToRecords(p_params: CFParameterProperty[]) 17 | { 18 | type cfParam = Record; 19 | let l_params: cfParam = {}; 20 | for(let l_record in p_params){ 21 | console.log(`param is: ${p_params[l_record].parameterKey}`); 22 | l_params[p_params[l_record].parameterKey] = p_params[l_record].parameterValue; 23 | } 24 | return l_params 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /code/ts-cdk/src/stackset_creator/lib/ddbClient.ts: -------------------------------------------------------------------------------- 1 | import * as cdk from 'aws-cdk-lib'; 2 | import { DynamoDBClient } from "@aws-sdk/client-dynamodb"; 3 | 4 | // Create an Amazon DynamoDB service client object. 5 | const {fromIni} = require("@aws-sdk/credential-provider-ini"); 6 | 7 | let app = new cdk.App(); 8 | const l_profile = app.node.tryGetContext('awsprofile') || ''; 9 | 10 | var ddbClient :DynamoDBClient; 11 | 12 | if(l_profile != ""){ 13 | ddbClient = new DynamoDBClient({ 14 | credentials: fromIni({profile: l_profile}) 15 | }); 16 | } else { 17 | ddbClient = new DynamoDBClient({}); 18 | } 19 | 20 | export { ddbClient }; -------------------------------------------------------------------------------- /code/ts-cdk/src/stackset_creator/lib/ddbDocClient.ts: -------------------------------------------------------------------------------- 1 | import { DynamoDBDocumentClient} from "@aws-sdk/lib-dynamodb"; 2 | import {ddbClient} from "./ddbClient"; 3 | // Set the AWS Region. 4 | const REGION = "REGION"; //e.g. "us-east-1" 5 | 6 | const marshallOptions = { 7 | // Whether to automatically convert empty strings, blobs, and sets to `null`. 8 | convertEmptyValues: false, // false, by default. 9 | // Whether to remove undefined values while marshalling. 10 | removeUndefinedValues: false, // false, by default. 11 | // Whether to convert typeof object to map attribute. 12 | convertClassInstanceToMap: false, // false, by default. 13 | }; 14 | 15 | const unmarshallOptions = { 16 | // Whether to return numbers as a string instead of converting them to native JavaScript numbers. 17 | wrapNumbers: false, // false, by default. 18 | }; 19 | 20 | const translateConfig = { marshallOptions, unmarshallOptions }; 21 | 22 | // Create the DynamoDB Document client. 23 | const ddbDocClient: any = DynamoDBDocumentClient.from(ddbClient, translateConfig); 24 | 25 | export { ddbDocClient }; -------------------------------------------------------------------------------- /code/ts-cdk/src/stackset_creator/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cdk", 3 | "version": "2.0.0", 4 | "bin": { 5 | "cdk": "bin/cdk.js" 6 | }, 7 | "scripts": { 8 | "build": "tsc", 9 | "watch": "tsc -w", 10 | "test": "jest", 11 | "cdk": "cdk" 12 | }, 13 | "devDependencies": { 14 | "@types/jest": "^27.5.2", 15 | "@types/node": "10.17.27", 16 | "@types/prettier": "2.6.0", 17 | "aws-cdk": "2.33.0", 18 | "jest": "^27.5.1", 19 | "ts-jest": "^27.1.4", 20 | "ts-node": "^10.8.1", 21 | "typescript": "~3.9.7" 22 | }, 23 | "dependencies": { 24 | "aws-cdk-lib": "2.34.2", 25 | "constructs": "^10.0.0", 26 | "source-map-support": "^0.5.16", 27 | "@aws-sdk/types": "^3.3.0", 28 | "@aws-sdk/client-dynamodb": "^3.3.0", 29 | "@aws-sdk/lib-dynamodb": "^3.3.0", 30 | "@aws-sdk/credential-provider-ini": "^3.3.0" 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /code/ts-cdk/src/stackset_creator/test/cdk.test.ts: -------------------------------------------------------------------------------- 1 | test('Empty Stack', () => { 2 | }); -------------------------------------------------------------------------------- /code/ts-cdk/src/stackset_creator/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2018", 4 | "module": "commonjs", 5 | "lib": [ 6 | "es2018", 7 | "dom" 8 | ], 9 | "declaration": true, 10 | "strict": true, 11 | "noImplicitAny": true, 12 | "strictNullChecks": true, 13 | "noImplicitThis": true, 14 | "alwaysStrict": true, 15 | "noUnusedLocals": false, 16 | "noUnusedParameters": false, 17 | "noImplicitReturns": true, 18 | "noFallthroughCasesInSwitch": false, 19 | "inlineSourceMap": true, 20 | "inlineSources": true, 21 | "experimentalDecorators": true, 22 | "strictPropertyInitialization": false, 23 | "typeRoots": [ 24 | "./node_modules/@types" 25 | ] 26 | }, 27 | "exclude": [ 28 | "node_modules", 29 | "cdk.out" 30 | ] 31 | } 32 | -------------------------------------------------------------------------------- /config/Accounts.json: -------------------------------------------------------------------------------- 1 | [{ 2 | "accountId": "000000000000", 3 | "defaultRegion": "us-east-1", 4 | "deploymentRegions": [ 5 | "us-east-1" 6 | ] 7 | } 8 | ] 9 | -------------------------------------------------------------------------------- /config/DeploymentGroups.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "groupCode": "BASELINE-ORG-UNITS-PROD", 4 | "targetRegions": [ 5 | { 6 | "orgUnitIds": [ 7 | "ou-XXXX-XXXXXXX1", 8 | "ou-XXXX-XXXXXXX2", 9 | "ou-XXXX-XXXXXXX3" 10 | ] 11 | } 12 | ] 13 | }, 14 | { 15 | "groupCode": "TEST-ORG-UNITS", 16 | "targetRegions": [ 17 | { 18 | "orgUnitIds": [ 19 | "ou-XXXX-XXXXXXX4" 20 | ] 21 | } 22 | ] 23 | }, 24 | { 25 | "groupCode": "TEST-ACCOUNTDEPLOY-ONLY", 26 | "targetRegions": [ 27 | { 28 | "accountIds": [ 29 | "000000000000" 30 | ], 31 | "accountFilterType": "INTERSECTION" 32 | } 33 | ] 34 | }, 35 | { 36 | "groupCode": "TEST-ACCOUNT-AND-ORGUNIT", 37 | "targetRegions": [ 38 | { 39 | "orgUnitIds": [ 40 | "ou-XXXX-XXXXXXX4" 41 | ], 42 | "accountIds": [ 43 | "000000000000" 44 | ], 45 | "accountFilterType": "UNION" 46 | } 47 | ] 48 | }, 49 | { 50 | "groupCode": "TEST-ORGUNIT-MINUS-ACCOUNT", 51 | "targetRegions": [ 52 | { 53 | "orgUnitIds": [ 54 | "ou-XXXX-XXXXXXX4" 55 | ], 56 | "accountIds": [ 57 | "000000000000" 58 | ], 59 | "accountFilterType": "DIFFERENCE" 60 | } 61 | ] 62 | } 63 | ] -------------------------------------------------------------------------------- /config/GlobalParams.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "object": "globalParameters", 4 | "env": [ 5 | { 6 | "environmentType": "test", 7 | "environmentFriendlyName": "Test Release", 8 | "defaultDeploymentGroup": "TEST-ORG-UNITS", 9 | "defaultRetainStacksOnAccountRemoval": "false", 10 | "globalparams": { 11 | "BuildGuid": ">>PLACEHOLDER_BUILD_GUID<<", 12 | "ProjectFriendlyName": "PROJECT-FRIENDLY-NAME", 13 | "ProjectResourcePrefix": "PROJECT-RESOURCE-PREFIX" 14 | } 15 | }, 16 | { 17 | "environmentType": "prod", 18 | "environmentFriendlyName": "Production Release", 19 | "defaultDeploymentGroup": "BASELINE-ORG-UNITS-PROD", 20 | "defaultRetainStacksOnAccountRemoval": "true", 21 | "globalparams": { 22 | "BuildGuid": ">>PLACEHOLDER_BUILD_GUID<<", 23 | "ProjectFriendlyName": "PROJECT-FRIENDLY-NAME", 24 | "ProjectResourcePrefix": "PROJECT-RESOURCE-PREFIX" 25 | } 26 | } 27 | ] 28 | } 29 | ] 30 | -------------------------------------------------------------------------------- /config/OrgUnits.json: -------------------------------------------------------------------------------- 1 | [{ 2 | "orgUnitId": "ou-XXXX-XXXXXXX1", 3 | "defaultRegion": "ap-southeast-2", 4 | "deploymentRegions": [ 5 | "ap-southeast-2" 6 | ] 7 | }, 8 | { 9 | "orgUnitId": "ou-XXXX-XXXXXXX2", 10 | "defaultRegion": "us-east-2", 11 | "deploymentRegions": [ 12 | "us-east-2" 13 | ] 14 | }, 15 | { 16 | "orgUnitId": "ou-XXXX-XXXXXXX3", 17 | "defaultRegion": "us-east-2", 18 | "deploymentRegions": [ 19 | "us-east-2", 20 | "ap-southeast-2" 21 | ] 22 | }, 23 | { 24 | "orgUnitId": "ou-XXXX-XXXXXXX4", 25 | "defaultRegion": "us-east-1", 26 | "deploymentRegions": [ 27 | "us-east-1", 28 | "us-east-2" 29 | ] 30 | }, 31 | { 32 | "orgUnitId": "ou-XXXX-XXXXXXX5", 33 | "defaultRegion": "eu-central-1", 34 | "deploymentRegions": [ 35 | "eu-central-1" 36 | ] 37 | } 38 | ] 39 | -------------------------------------------------------------------------------- /config/Stacks.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name": "S3StackSet", 4 | "type": "stackset", 5 | "description": "This is a sample StackSet for provisioning a test S3 Bucket in AWS child accounts", 6 | "templateFile": "S3Stack.yaml", 7 | "retainStacksOnAccountRemoval": "false", 8 | "enabled": "true", 9 | "env": [ 10 | { 11 | "environmentType": "prod", 12 | "localparams": { 13 | "ExampleParam1": "Hello", 14 | "ExampleParam2": "Prod World" 15 | } 16 | }, 17 | { 18 | "environmentType": "test", 19 | "overrideDeploymentGroup": "OVERRIDE-EXAMPLE-UNITS", 20 | "localparams": { 21 | "ExampleParam1": "Hello", 22 | "ExampleParam2": "Test World" 23 | } 24 | } 25 | ] 26 | }, 27 | { 28 | "name": "IAMSelfManagedStackSetTrustRolesStackSet", 29 | "type": "stackset", 30 | "description": "IAM Self-Managed StackSet Trust Roles for the DevSecOps Project", 31 | "templateFile": "IAMSelfManagedStackSetTrustRolesStack.yaml", 32 | "retainStacksOnAccountRemoval": "true", 33 | "env": [ 34 | { 35 | "environmentType": "prod", 36 | "overrideDeploymentRegionUseDefaultOnly": "true", 37 | "localparams": { 38 | "ManagementAccountAWSCloudFormationStackSetAdministrationRoleArn": "REPLACE WITH ARN OF STACKSETADMIN ACCOUNT CREATED in (/cf/setup/05_orgs_stackset_selfmanaged_roles.yaml) CF output in Management Account", 39 | "ManagementAccountLambdaVPCStackSetInstanceExecutionRoleArn": "REPLACE WITH ARN OF LAMBDA ACCOUNT CREATED in (/cf/setup/05_orgs_stackset_selfmanaged_roles.yaml) CF output in Management Account" 40 | } 41 | }, 42 | { 43 | "environmentType": "test", 44 | "overrideDeploymentRegionUseDefaultOnly": "true", 45 | "localparams": { 46 | "ManagementAccountAWSCloudFormationStackSetAdministrationRoleArn": "REPLACE WITH ARN OF STACKSETADMIN ACCOUNT CREATED in (/cf/setup/05_orgs_stackset_selfmanaged_roles.yaml) CF output in Management Account", 47 | "ManagementAccountLambdaVPCStackSetInstanceExecutionRoleArn": "REPLACE WITH ARN OF LAMBDA ACCOUNT CREATED in (/cf/setup/05_orgs_stackset_selfmanaged_roles.yaml) CF output in Management Account" 48 | } 49 | } 50 | ] 51 | }, 52 | { 53 | "name": "CrossAccountRDKConfigRoleStackSet", 54 | "type": "stackset", 55 | "description": "AWS Config IAM Role for cross-account rules deployment using RDK for the DevSecOps Project", 56 | "templateFile": "RDKConfigCrossAccountRole.yaml", 57 | "env": [ 58 | { 59 | "environmentType": "prod", 60 | "localparams": { 61 | "AWSDeploymentAccountNumber": "AWSACCNUMBER-DEPLOYMENTACCOUNT" 62 | } 63 | }, 64 | { 65 | "environmentType": "test", 66 | "localparams": { 67 | "AWSDeploymentAccountNumber": "AWSACCNUMBER-DEPLOYMENTACCOUNT" 68 | } 69 | } 70 | ] 71 | }, 72 | { 73 | "name": "CustomRDKAWSConfigRulesStackSet", 74 | "type": "rdkstackset", 75 | "description": "Custom RDK AWS Config Rules StackSet - Template File references generated RDK file", 76 | "templateFile": "awsconfig-allrules-{ENV}.template.json", 77 | "dependsOn": "CrossAccountRDKConfigRoleStackSet", 78 | "env": [ 79 | { 80 | "environmentType": "prod", 81 | "localparams": { 82 | "AWSConfigRuleKMSLeastPrivilegeCMKWhitelist": "", 83 | "AWSConfigRuleKMSLeastPrivilegePrincipalWhitelist": "", 84 | "AWSConfigRuleKMSLeastPrivilegeExecutionRoleName": "rdk-cross-account-config-role" 85 | } 86 | }, 87 | { 88 | "environmentType": "test", 89 | "localparams": { 90 | "AWSConfigRuleKMSLeastPrivilegeCMKWhitelist": "", 91 | "AWSConfigRuleKMSLeastPrivilegePrincipalWhitelist": "", 92 | "AWSConfigRuleKMSLeastPrivilegeExecutionRoleName": "rdk-cross-account-config-role" 93 | } 94 | } 95 | ] 96 | } 97 | ] 98 | --------------------------------------------------------------------------------