├── .circleci ├── config.yml └── trigger-local-config.sh ├── .gitignore ├── Jenkinsfile ├── LICENSE ├── README.md ├── app ├── app.json └── tasks │ ├── apache.sh │ └── docker.sh ├── aws-security ├── README.md ├── controls │ └── example.rb ├── inspec.lock ├── inspec.yml └── libraries │ └── .gitkeep ├── base ├── base.json └── tasks │ ├── baseline.sh │ ├── cleanup.sh │ └── debug.sh ├── images ├── blueocean-master.png └── blueocean-non-master.png ├── master.tfvars ├── scripts ├── build.sh ├── common.sh └── tf-wrapper.sh ├── terraform.tf └── testing-defaults.tfvars /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | # Simple Stacks - pipelines for small single node environments 2 | # 1. Build the base AMI (overkill for this example, should be in seperate repo) 3 | # 2. Build the app AMI from the base AMI 4 | # 3. Test the app ami (via terraform plan/apply/testinfra/destroy? test environment) 5 | # 4. terraform plan using the freshly built app AMI (production) 6 | # 5. if master branch - hold 7 | # 6. if master branch - apply 8 | 9 | version: 2 10 | jobs: 11 | xbuild: 12 | docker: 13 | - image: simonmcc/hashicorp-pipeline:latest 14 | steps: 15 | - run: 16 | name: Hello World 17 | command: echo "Hello World! Where's my workflow?" 18 | terraform-fmt: 19 | docker: 20 | - image: simonmcc/hashicorp-pipeline:latest 21 | steps: 22 | - checkout 23 | - run: 24 | name: terraform fmt 25 | command: terraform fmt -check=true -diff=true 26 | packer-validate: 27 | docker: 28 | - image: simonmcc/hashicorp-pipeline:latest 29 | steps: 30 | - checkout 31 | - run: 32 | name: Validate base 33 | command: packer validate ./base/base.json 34 | - run: 35 | name: Validate app 36 | command: packer validate ./app/app.json 37 | build-base: 38 | docker: 39 | - image: simonmcc/hashicorp-pipeline:latest 40 | steps: 41 | - checkout 42 | - attach_workspace: 43 | at: . 44 | - run: 45 | name: Build base 46 | command: ./scripts/build.sh base base 47 | no_output_timeout: 30m 48 | - persist_to_workspace: 49 | root: . 50 | paths: 51 | - manifest-base.json 52 | - store_artifacts: 53 | path: manifest-base.json 54 | build-app: 55 | docker: 56 | - image: simonmcc/hashicorp-pipeline:latest 57 | steps: 58 | - checkout 59 | - attach_workspace: 60 | at: . 61 | - run: 62 | name: Build app 63 | command: ./scripts/build.sh app app base 64 | - store_artifacts: 65 | path: manifest-app.json 66 | build-test-stack: 67 | docker: 68 | - image: simonmcc/hashicorp-pipeline:latest 69 | steps: 70 | - checkout 71 | - attach_workspace: 72 | at: . 73 | - run: 74 | name: Build test stack via terraform 75 | command: ./scripts/tf-wrapper.sh -a plan 76 | - run: 77 | name: Terraform apply 78 | command: ./scripts/tf-wrapper.sh -a apply 79 | - run: 80 | name: Test the deployed stack 81 | command: | 82 | echo "TODO: Test stuff" 83 | cat output.json 84 | - persist_to_workspace: 85 | root: ./ 86 | paths: 87 | - output.json 88 | test-test-stack: 89 | docker: 90 | - image: chef/inspec:latest 91 | steps: 92 | - checkout 93 | - attach_workspace: 94 | at: . 95 | - run: 96 | name: Test the deployed stack 97 | command: | 98 | cat output.json 99 | mkdir aws-security/files || true 100 | mkdir /tmp/test-results || true 101 | cp output.json aws-security/files/output.json 102 | inspec detect -t aws:// 103 | inspec exec aws-security --reporter=cli junit:/tmp/test-results/inspec-junit.xml -t aws://us-east-1 104 | - store_test_results: 105 | path: /tmp/test-results 106 | destroy-test-stack: 107 | docker: 108 | - image: simonmcc/hashicorp-pipeline:latest 109 | steps: 110 | - checkout 111 | - attach_workspace: 112 | at: . 113 | - run: 114 | name: Destroy Test Stack 115 | command: ./scripts/tf-wrapper.sh -a destroy 116 | terraform-plan: 117 | docker: 118 | - image: simonmcc/hashicorp-pipeline:latest 119 | steps: 120 | - checkout 121 | - run: 122 | name: Terraform plan 123 | command: ./scripts/tf-wrapper.sh -a plan 124 | - persist_to_workspace: 125 | root: ./ 126 | paths: 127 | - plan/plan.out 128 | - .terraform 129 | terraform-apply: 130 | docker: 131 | - image: simonmcc/hashicorp-pipeline:latest 132 | steps: 133 | - checkout 134 | - attach_workspace: 135 | at: . 136 | - run: 137 | name: Terraform apply 138 | command: ./scripts/tf-wrapper.sh -a apply 139 | 140 | workflows: 141 | version: 2 142 | packer-terraform: 143 | jobs: 144 | - packer-validate 145 | - terraform-fmt 146 | - build-base: 147 | requires: 148 | - packer-validate 149 | - build-app: 150 | requires: 151 | - packer-validate 152 | - build-base 153 | - build-test-stack: 154 | filters: 155 | branches: 156 | ignore: master 157 | requires: 158 | - build-app 159 | - terraform-fmt 160 | - test-test-stack: 161 | filters: 162 | branches: 163 | ignore: master 164 | requires: 165 | - build-test-stack 166 | - destroy-test-stack: 167 | filters: 168 | branches: 169 | ignore: master 170 | requires: 171 | - test-test-stack 172 | - terraform-plan: 173 | filters: 174 | branches: 175 | only: master 176 | requires: 177 | - build-app 178 | - hold: 179 | filters: 180 | branches: 181 | only: master 182 | type: approval 183 | requires: 184 | - terraform-plan 185 | - terraform-apply: 186 | filters: 187 | branches: 188 | only: master 189 | requires: 190 | - terraform-plan 191 | - hold 192 | -------------------------------------------------------------------------------- /.circleci/trigger-local-config.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | CURRENT_SHA1=$(git rev-parse HEAD) 4 | 5 | # $repo/tree/$branch?circle-token=$CIRCLE_TOKEN" 6 | 7 | curl \ 8 | --user ${CIRCLE_TOKEN}: \ 9 | --request POST \ 10 | --form revision=${CURRENT_SHA1} \ 11 | --form config=@config.yml \ 12 | --form notify=false \ 13 | https://circleci.com/api/v1.1/project/github/simonmcc/circleci-packer-1/tree/master 14 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | backend_config.tf 2 | plan.out 3 | .terraform/ 4 | aws-security/files/ 5 | manifest-*.json 6 | output.json 7 | -------------------------------------------------------------------------------- /Jenkinsfile: -------------------------------------------------------------------------------- 1 | // Declarative Jenkinsfile Pipeline for a Hashicorp packer/terraform AWS simple ec2 stack 2 | // (n.b. use of env.BRANCH_NAME to filter stages based on branch means this needs to be part 3 | // of a Multibranch Project in Jenkins - this fits with the model of branches/PR's being 4 | // tested & master being deployed) 5 | pipeline { 6 | agent any 7 | environment { 8 | AWS_DEFAULT_REGION = 'us-east-1' 9 | } 10 | 11 | stages { 12 | stage('Validate & lint') { 13 | parallel { 14 | stage('packer validate') { 15 | agent { 16 | docker { 17 | image 'simonmcc/hashicorp-pipeline:latest' 18 | alwaysPull true 19 | } 20 | } 21 | steps { 22 | checkout scm 23 | wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) { 24 | sh "packer validate ./base/base.json" 25 | sh "AMI_BASE=ami-fakefake packer validate app/app.json" 26 | } 27 | } 28 | } 29 | stage('terraform fmt') { 30 | agent { docker { image 'simonmcc/hashicorp-pipeline:latest' } } 31 | steps { 32 | checkout scm 33 | sh "terraform fmt -check=true -diff=true" 34 | } 35 | } 36 | } 37 | } 38 | stage('build AMIs') { 39 | agent { docker { image 'simonmcc/hashicorp-pipeline:latest' } } 40 | steps { 41 | checkout scm 42 | withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', 43 | credentialsId: 'demo-aws-creds', 44 | accessKeyVariable: 'AWS_ACCESS_KEY_ID', 45 | secretKeyVariable: 'AWS_SECRET_ACCESS_KEY' ]]) { 46 | wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) { 47 | sh "./scripts/build.sh base base" 48 | sh "./scripts/build.sh app app" 49 | } 50 | } 51 | } 52 | } 53 | 54 | stage('build test stack') { 55 | agent { docker { image 'simonmcc/hashicorp-pipeline:latest' } } 56 | when { 57 | expression { env.BRANCH_NAME != 'master' } 58 | } 59 | steps { 60 | checkout scm 61 | withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', 62 | credentialsId: 'demo-aws-creds', 63 | accessKeyVariable: 'AWS_ACCESS_KEY_ID', 64 | secretKeyVariable: 'AWS_SECRET_ACCESS_KEY' ]]) { 65 | wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) { 66 | sh "./scripts/tf-wrapper.sh -a plan" 67 | sh "./scripts/tf-wrapper.sh -a apply" 68 | sh "cat output.json" 69 | stash name: 'terraform_output', includes: '**/output.json' 70 | } 71 | } 72 | } 73 | post { 74 | failure { 75 | withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', 76 | credentialsId: 'demo-aws-creds', 77 | accessKeyVariable: 'AWS_ACCESS_KEY_ID', 78 | secretKeyVariable: 'AWS_SECRET_ACCESS_KEY' ]]) { 79 | wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) { 80 | sh "./scripts/tf-wrapper.sh -a destroy" 81 | } 82 | } 83 | } 84 | } 85 | } 86 | stage('test test stack') { 87 | agent { 88 | docker { 89 | image 'chef/inspec:latest' 90 | args "--entrypoint=''" 91 | } 92 | } 93 | when { 94 | expression { env.BRANCH_NAME != 'master' } 95 | } 96 | steps { 97 | checkout scm 98 | withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', 99 | credentialsId: 'demo-aws-creds', 100 | accessKeyVariable: 'AWS_ACCESS_KEY_ID', 101 | secretKeyVariable: 'AWS_SECRET_ACCESS_KEY' ]]) { 102 | wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) { 103 | unstash 'terraform_output' 104 | sh "cat output.json" 105 | sh "mkdir aws-security/files || true" 106 | sh "mkdir test-results || true" 107 | sh "cp output.json aws-security/files/output.json" 108 | sh "inspec exec aws-security --reporter=cli junit:test-results/inspec-junit.xml -t aws://us-east-1" 109 | sh "touch test-results/inspec-junit.xml" 110 | junit 'test-results/*.xml' 111 | } 112 | } 113 | } 114 | } 115 | stage('destroy test stack') { 116 | agent { docker { image 'simonmcc/hashicorp-pipeline:latest' } } 117 | when { 118 | expression { env.BRANCH_NAME != 'master' } 119 | } 120 | steps { 121 | checkout scm 122 | withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', 123 | credentialsId: 'demo-aws-creds', 124 | accessKeyVariable: 'AWS_ACCESS_KEY_ID', 125 | secretKeyVariable: 'AWS_SECRET_ACCESS_KEY' ]]) { 126 | wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) { 127 | sh "./scripts/tf-wrapper.sh -a destroy" 128 | } 129 | } 130 | } 131 | } 132 | stage('terraform plan - master') { 133 | agent { docker { image 'simonmcc/hashicorp-pipeline:latest' } } 134 | when { 135 | expression { env.BRANCH_NAME == 'master' } 136 | } 137 | steps { 138 | checkout scm 139 | withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', 140 | credentialsId: 'demo-aws-creds', 141 | accessKeyVariable: 'AWS_ACCESS_KEY_ID', 142 | secretKeyVariable: 'AWS_SECRET_ACCESS_KEY' ]]) { 143 | wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) { 144 | sh "./scripts/tf-wrapper.sh -a plan" 145 | stash name: 'terraform_plan', includes: 'plan/plan.out,.terraform/**' 146 | } 147 | } 148 | } 149 | } 150 | stage('Manual Approval') { 151 | when { 152 | expression { env.BRANCH_NAME == 'master' } 153 | } 154 | steps { 155 | input 'Do you approve the apply?' 156 | } 157 | } 158 | stage('terraform apply - master') { 159 | agent { docker { image 'simonmcc/hashicorp-pipeline:latest' } } 160 | when { 161 | expression { env.BRANCH_NAME == 'master' } 162 | } 163 | steps { 164 | checkout scm 165 | withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', 166 | credentialsId: 'demo-aws-creds', 167 | accessKeyVariable: 'AWS_ACCESS_KEY_ID', 168 | secretKeyVariable: 'AWS_SECRET_ACCESS_KEY' ]]) { 169 | wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) { 170 | unstash 'terraform_plan' 171 | sh "./scripts/tf-wrapper.sh -a apply" 172 | } 173 | } 174 | } 175 | } 176 | } 177 | } 178 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Real World AWS Packer & Terraform and Inspec Pipeline 2 | 3 | This is a working Jenkinsfile Multibranch Pipeline for building AWS AMI's images & deploying EC2 instances based on the AMI with terraform (in both a per-branch test stack, destroyed by the pipeline, and maintaining production from the master branch). 4 | 5 | ![](images/blueocean-non-master.png) 6 | ![](images/blueocean-master.png) 7 | 8 | The pipeline uses 2 docker images to run jobs, most of the work is done in [hashicorp-pipeline](https://hub.docker.com/r/simonmcc/hashicorp-pipeline/), which contains [packer](https://www.packer.io), [terraform](https://www.terraform.io), [aws cli](https://aws.amazon.com/cli/) & some other [needed binaries](https://github.com/simonmcc/hashicorp-pipeline/blob/master/Dockerfile#L3-L4) (jq, perl), the other docker image used is [chef/inspec](https://hub.docker.com/r/chef/inspec/), which is used to test the stack built during non-master pipeline executions. 9 | 10 | ### Features 11 | * `terraform fmt -check=true -diff=true` used to check terraform config files 12 | * `packer validate` used to validate packer config files 13 | * `base` and `application` AMI's built and tagged with SHA1 of the `base/base.json` and `app/app.json`, to help prevent unnecessary AMI builds 14 | * Automatically configures terraform remote state (S3 & DynamoDB) 15 | * terraform workspaces used to store per-branch terraform state 16 | * parameterized terraform build using per-branch tfvars 17 | * `chef/inspec` used to validate AWS resources 18 | * `terraform plan` with stash of plan, human approval, `terraform apply` workflow for master/production changes 19 | 20 | ### What next to use this in your own project 21 | * Add `demo-aws-creds` credential set (uses [AWS Credentials](https://plugins.jenkins.io/aws-credentials) plugin) 22 | * Update terraform to actually deploy an EC2 instance (for demo purposes, this only builds a VPC, so zero cost & quick cycle time for pipeline experimentation, trivial to add an EC2 instance) 23 | * Use AWS Auto Scaling group to replace running EC2 instances with EC2 instances built from the master branch `app` AMI 24 | * Use AMI tags to ensure `app` AMI matches the branch (Use the SHA1 of `app/app.json` to search for the AMI in terraform, requires some tfvars templating/injection) 25 | * Extend `chef/inspec` stage to test AWS EC2 26 | * Add a Selenium or other web test stage 27 | 28 | If you want a simple dockerized Jenkins setup to host this, I used [Jenkins 201](https://github.com/jenkins201/jenkins-container), there's also example Job DSL [here](https://github.com/jenkins201/jenkins-container/blob/master/jobs/packer_terraform.groovy). 29 | 30 | ### Further info 31 | Some of the scripts in this pipeline ([build.sh](scripts/build.sh) & [common.sh](scripts/common.sh)) are based on an incomplete [packer/AMI](https://github.com/CircleCI-Public/circleci-packer) example from [CircleCI]( 32 | https://circleci.com/blog/how-to-build-immutable-infrastructure-with-packer-and-circleci-workflows/). 33 | 34 | 35 | -------------------------------------------------------------------------------- /app/app.json: -------------------------------------------------------------------------------- 1 | { 2 | "_readme": [ 3 | "The AMI used is generated by the packer template base/base.json", 4 | "" 5 | ], 6 | "variables": { 7 | "ami_name": "app", 8 | "ami_base": "{{env `AMI_BASE`}}", 9 | "ami_sha": "{{env `SHA`}}", 10 | "aws_access_key": "{{env `AWS_ACCESS_KEY`}}", 11 | "aws_secret_key": "{{env `AWS_SECRET_ACCESS_KEY`}}" 12 | }, 13 | "builders": [ 14 | { 15 | "ami_description": "{{user `ami_name`}} AMI", 16 | "ami_name": "{{user `ami_name`}} {{timestamp}}", 17 | "ami_regions": [ 18 | "us-east-1" 19 | ], 20 | "instance_type": "t1.micro", 21 | "region": "us-east-1", 22 | "run_tags": { 23 | "ami-create": "{{user `ami_name`}}" 24 | }, 25 | "source_ami": "{{user `ami_base`}}", 26 | "ssh_username": "ubuntu", 27 | "subnet_id": "", 28 | "tags": { 29 | "OS_Name": "Ubuntu", 30 | "OS_Version": "16.04", 31 | "SHA": "{{user `ami_sha`}}", 32 | "AMI": "{{user `ami_name`}}" 33 | }, 34 | "type": "amazon-ebs", 35 | "vpc_id": "" 36 | } 37 | ], 38 | "post-processors": [ 39 | { 40 | "output": "manifest-app.json", 41 | "strip_path": true, 42 | "type": "manifest" 43 | } 44 | ], 45 | "provisioners": [ 46 | { 47 | "inline": [ 48 | "while [ ! -f /var/lib/cloud/instance/boot-finished ]; do echo 'Waiting for cloud-init...'; sleep 1; done" 49 | ], 50 | "type": "shell" 51 | }, 52 | { 53 | "execute_command": "chmod +x {{ .Path }}; {{ .Vars }} sudo -E {{ .Path }}", 54 | "scripts": [ 55 | "./app/tasks/apache.sh" 56 | ], 57 | "type": "shell" 58 | }, 59 | { 60 | "execute_command": "chmod +x {{ .Path }}; {{ .Vars }} sudo -E {{ .Path }}", 61 | "scripts": [ 62 | "./base/tasks/cleanup.sh", 63 | "./base/tasks/debug.sh" 64 | ], 65 | "type": "shell" 66 | } 67 | ] 68 | } 69 | -------------------------------------------------------------------------------- /app/tasks/apache.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | echo '---- install Apache' 5 | 6 | DEBIAN_FRONTEND=noninteractive apt-get -y update 7 | DEBIAN_FRONTEND=noninteractive apt-get -y install apache2 8 | 9 | cat > /var/www/html/index.html <> /etc/skel/.bashrc' 8 | 9 | echo "---- make Apt non interactive" 10 | sudo /bin/bash -c 'echo "force-confnew" >> /etc/dpkg/dpkg.cfg' 11 | #sudo /bin/bash -c 'cat /tmp/dpkg.cfg.update >> /etc/sudoers.d/env_keep' 12 | #sudo cp /tmp/apt.conf.update /etc/apt/apt.conf 13 | 14 | echo "---- Update and Upgrade" 15 | sudo DEBIAN_FRONTEND=noninteractive apt-get -y update 16 | sudo DEBIAN_FRONTEND=noninteractive apt-get -y upgrade 17 | sudo DEBIAN_FRONTEND=noninteractive apt-get -y install apt-transport-https 18 | sudo DEBIAN_FRONTEND=noninteractive apt-get -y install curl unzip zip jq 19 | -------------------------------------------------------------------------------- /base/tasks/cleanup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | echo "---- cleanup" 5 | echo Ubuntu Provision Cleanup 6 | sudo DEBIAN_FRONTEND=noninteractive apt-get -y autoremove --purge 7 | sudo DEBIAN_FRONTEND=noninteractive apt-get -y autoclean 8 | sudo DEBIAN_FRONTEND=noninteractive apt-get check 9 | 10 | sudo rm -rf /var/lib/apt/lists/* 11 | sudo rm -rf /tmp/* 12 | -------------------------------------------------------------------------------- /base/tasks/debug.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | echo "---- debug info" 5 | uname -a 6 | cat /etc/os-release 7 | dpkg -l | grep linux- 8 | -------------------------------------------------------------------------------- /images/blueocean-master.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jenkins201/packer-terraform-cicd-aws/7885a02f91d0dab5b0353a6016e44c47c84b71d2/images/blueocean-master.png -------------------------------------------------------------------------------- /images/blueocean-non-master.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jenkins201/packer-terraform-cicd-aws/7885a02f91d0dab5b0353a6016e44c47c84b71d2/images/blueocean-non-master.png -------------------------------------------------------------------------------- /master.tfvars: -------------------------------------------------------------------------------- 1 | vpc_main_cidr = "172.18.0.0/16" 2 | vpc_dmz_cidr = "172.19.0.0/16" 3 | -------------------------------------------------------------------------------- /scripts/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Hashicorp packer/terraform simple stack build wrapper 4 | # * facilitates building base & service/app AMI (service/app depends on base) 5 | # * tag AMI's with SHA1 of the packer .json file that built it 6 | # 7 | # Wrapping a few CLI command in bash always seems like a good idea at the start. 8 | # It's not. Don't do it. Use python to wrap & possible call API's directly. 9 | 10 | # Exit immediately if a command exits with a non-zero status 11 | set -e 12 | 13 | # debug - expand all commands 14 | # set -x 15 | 16 | # load our helper functions 17 | source scripts/common.sh 18 | 19 | # check that the tools we require are present 20 | package_check 21 | 22 | # 23 | # base.sh DIR TARGET [BASE_NAME] 24 | DIR="$1" 25 | NAME="$2" 26 | BASE_NAME="$3" 27 | if [[ -z "$DIR" ]]; then 28 | echo "please specify the directory as first runtime argument" 29 | exit 1 30 | fi 31 | if [[ -z "$NAME" ]]; then 32 | echo "please specify the name as second runtime argument" 33 | exit 1 34 | fi 35 | if [[ -z "$BASE_NAME" ]]; then 36 | echo "No base AMI given" 37 | else 38 | export BASE_BUILT=$(base_rebuilt $BASE_NAME) 39 | if [ "${BASE_BUILT}" = "true" ]; then 40 | echo "Couldn't find ${BASE_NAME} in manifest-${BASE_NAME}.json, looking up AMI via EC2 API" 41 | fi 42 | export AMI_BASE="$(get_base_ami "$BASE_BUILT" "$BASE_NAME" "$BASE_NAME")" 43 | fi 44 | 45 | export SHA=$(git ls-tree HEAD "$DIR" | cut -d" " -f3 | cut -f1) 46 | TAG_EXISTS=$(tag_exists $SHA) 47 | 48 | if [ "$TAG_EXISTS" = "false" ]; then 49 | echo "No AMI found for ${NAME} (SHA: ${SHA}), building one.." 50 | packer build ${DIR}/$NAME.json 51 | PACKER_EXIT=$? 52 | echo "Packer exit code: ${PACKER_EXIT}" 53 | else 54 | echo "AMI found for ${NAME} (SHA: ${SHA})" 55 | touch manifest-${NAME}.json 56 | fi 57 | -------------------------------------------------------------------------------- /scripts/common.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Wrapping a few CLI command in bash always seems like a good idea at the start. 4 | # It's not. Don't do it. Use python to wrap & possible call API's directly. 5 | tag_exists () { 6 | local SHA=$1 7 | if [[ -z "$SHA" ]]; then 8 | echo "-- ERROR: there was a problem looking up AMI by sha" 9 | exit 1 10 | fi 11 | EMPTY=$(aws ec2 describe-images --filters Name=tag:SHA,Values=$SHA --query 'Images[*]') 12 | AWS_CLI_EXIT_CODE=$? 13 | if [[ "${AWS_CLI_EXIT_CODE}" -eq 0 ]]; then 14 | if [ "$EMPTY" = "[]" ]; then 15 | echo "false" 16 | else 17 | echo "true" 18 | fi 19 | else 20 | (>&2 echo "ERROR: AWS CLI error checking for existing images matching ${SHA}") 21 | exit 2 22 | fi 23 | } 24 | 25 | get_git_branch () { 26 | # output the current branch, handling detached HEAD as found in Jenkins 27 | # https://stackoverflow.com/questions/6059336/how-to-find-the-current-git-branch-in-detached-head-state 28 | local GIT_BRANCH=$(git rev-parse --abbrev-ref HEAD) 29 | 30 | # Jenkins will often checkout the SHA of a branch, (detached HEAD) 31 | if [[ "${GIT_BRANCH}" == 'HEAD' ]]; then 32 | # lookup branch against remotes, without network access (we may not have creds to talk to git remote) 33 | echo "$(git branch --remote --verbose --no-abbrev --contains | sed -Ene 's/^[^\/]*\/([^\ ]+).*$/\1/p')" 34 | else 35 | echo "${GIT_BRANCH}" 36 | fi 37 | } 38 | 39 | base_rebuilt () { 40 | local NAME=$1 41 | if [[ -e "manifest-$NAME.json" ]] && [[ -s "manifest-$NAME.json" ]]; then 42 | echo "true" 43 | else 44 | echo "false" 45 | fi 46 | } 47 | 48 | extract_artifact_id () { 49 | local NAME="$1" 50 | local AMI="$(cat manifest-$NAME.json | jq '.builds[0].artifact_id' | perl -n -e'/us-east-1:(ami-[a-z0-9]+)/ && print $1')" 51 | echo "${AMI}" 52 | } 53 | 54 | get_base_ami () { 55 | local BASE_BUILT=$1 56 | local DIR=$2 57 | local NAME=$3 58 | if [ "$BASE_BUILT" = "false" ]; then 59 | EXISTING_BASE_SHA="$(git ls-tree HEAD $DIR | cut -d" " -f3 | cut -f1)" 60 | EXISTING_BASE_IMAGE=$(aws ec2 describe-images --filters Name=tag:SHA,Values=$EXISTING_BASE_SHA --query 'Images[*]' | jq -r '.[0].ImageId') 61 | echo "$EXISTING_BASE_IMAGE" 62 | else 63 | BASE_AMI_US_EAST_1="$(extract_artifact_id $NAME)" 64 | echo "${BASE_AMI_US_EAST_1}" 65 | fi 66 | } 67 | 68 | package_check () { 69 | command -v aws > /dev/null || (echo "aws cli must be installed" && exit 1) 70 | command -v packer > /dev/null || (echo "packer must be installed" && exit 1) 71 | command -v terraform > /dev/null || (echo "packer must be installed" && exit 1) 72 | command -v git > /dev/null || (echo "git must be installed" && exit 1) 73 | command -v jq > /dev/null || (echo "jq must be installed" && exit 1) 74 | command -v perl > /dev/null || (echo "perl must be installed" && exit 1) 75 | } 76 | 77 | check_terraform_version() { 78 | # TODO: extract from requirements.txt or something? 79 | TERRAFORM_REQUIRED_VERSION="v0.11.7" 80 | 81 | TERRAFORM_BIN=$(which terraform) 82 | TERRAFORM_INSTALLED_VERSION=$(${TERRAFORM_BIN} -version | awk '/^Terraform/{ print $2 }') 83 | 84 | if [[ "${TERRAFORM_INSTALLED_VERSION}" != "${TERRAFORM_REQUIRED_VERSION}" ]]; then 85 | log "ERROR: ${TERRAFORM_BIN} is reporting ${TERRAFORM_INSTALLED_VERSION}, ${TERRAFORM_REQUIRED_VERSION} required, aborting." 86 | exit 1 87 | fi 88 | } 89 | 90 | check_aws_credentials () { 91 | [[ -z "${AWS_DEFAULT_REGION}" ]] && (echo "AWS_DEFAULT_REGION must be set" && exit 1) 92 | [[ -z "${AWS_ACCESS_KEY_ID}" ]] && (echo "AWS_ACCESS_KEY_ID must be set" && exit 1) 93 | [[ -z "${AWS_SECRET_ACCESS_KEY}" ]] && (echo "AWS_SECRET_ACCESS_KEY must be set" && exit 1) 94 | [[ 1 ]] 95 | } 96 | 97 | generate_terraform_backend() { 98 | # inspired by https://github.com/hashicorp/terraform/issues/12877#issuecomment-311649591 99 | local PROJECT_NAME 100 | local ACCOUNT_ID 101 | local LOCATION_CONSTRAINT 102 | local BUCKET_NAME 103 | local BUCKET_EXISTS 104 | local TABLE_INDEX 105 | local TABLE_NAME 106 | 107 | if [[ -z "$1" ]]; then 108 | PROJECT_NAME="${PWD##*/}" # use current dir name 109 | else 110 | PROJECT_NAME=$1 111 | fi 112 | ACCOUNT_ID="$(aws sts get-caller-identity --query Account --output text)" 113 | 114 | if [[ "${AWS_DEFAULT_REGION}" = "us-east-1" ]]; then 115 | LOCATION_CONSTRAINT="" 116 | else 117 | LOCATION_CONSTRAINT='--create-bucket-configuration LocationConstraint="${AWS_DEFAULT_REGION}"' 118 | fi 119 | 120 | BUCKET_NAME="terraform-tfstate-${ACCOUNT_ID}" 121 | BUCKET_EXISTS=$(aws s3api list-buckets | jq ".Buckets[] | select(.Name == \"${BUCKET_NAME}\")") 122 | if [[ -z "${BUCKET_EXISTS}" ]]; then 123 | echo "Creating Terraform State S3 Bucket ${BUCKET_NAME} in ${AWS_DEFAULT_REGION}" 124 | aws s3api create-bucket \ 125 | --region "${AWS_DEFAULT_REGION}" \ 126 | ${LOCATION_CONSTRAINT} \ 127 | --bucket "${BUCKET_NAME}" 128 | fi 129 | 130 | TABLE_NAME="terraform_locks" 131 | TABLE_INDEX=$(aws dynamodb list-tables | jq ".TableNames | index(\"${TABLE_NAME}\")") 132 | if [[ "${TABLE_INDEX}" = 'null' ]];then 133 | echo "Creating Terraform State DynamoDB Lock Table ${TABLE_NAME} in ${AWS_DEFAULT_REGION}" 134 | aws dynamodb create-table \ 135 | --region "${AWS_DEFAULT_REGION}" \ 136 | --table-name ${TABLE_NAME} \ 137 | --attribute-definitions AttributeName=LockID,AttributeType=S \ 138 | --key-schema AttributeName=LockID,KeyType=HASH \ 139 | --provisioned-throughput ReadCapacityUnits=1,WriteCapacityUnits=1 140 | aws dynamodb wait table-exists --table-name terraform_locks 141 | fi 142 | 143 | 144 | # NB - the pattern of managing the S3 bucket & DynamoDB table in Terraform 145 | # makes it impossible to cleanly destroy the terraform stack, so we don't do that 146 | cat < ./backend_config.tf 147 | terraform { 148 | backend "s3" { 149 | bucket = "${BUCKET_NAME}" 150 | key = "${PROJECT_NAME}" 151 | region = "${AWS_DEFAULT_REGION}" 152 | dynamodb_table = "terraform_locks" 153 | } 154 | } 155 | EOF 156 | } 157 | 158 | map_branch_to_workspace() { 159 | # TODO input & output sanity checking.. 160 | if [[ $1 = 'master' ]]; then 161 | echo "default" | tr / - 162 | else 163 | echo $1 | tr / - 164 | fi 165 | } 166 | 167 | map_branch_to_tfvars() { 168 | # map the branch to a tfvars file, with some sensible defaults 169 | local TF_VARS_FILE 170 | case "$1" in 171 | master) 172 | TF_VARS_FILE=master.tfvars 173 | ;; 174 | develop) 175 | TF_VARS_FILE=develop.tfvars 176 | ;; 177 | *) 178 | if [[ -f "$1".tfvars ]]; then 179 | TF_VARS_FILE="$1".tfvars 180 | else 181 | TF_VARS_FILE="testing-defaults.tfvars" 182 | fi 183 | ;; 184 | esac 185 | 186 | if [[ ! -f "${TF_VARS_FILE}" ]]; then 187 | touch "${TF_VARS_FILE}" 188 | fi 189 | 190 | echo "${TF_VARS_FILE}" 191 | } 192 | -------------------------------------------------------------------------------- /scripts/tf-wrapper.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # terraform wrapper 4 | # 5 | # * takes care of pre "terraform init" steps (S3 & DynamoDB setup) 6 | # * creates a terraform workspace to match the branch (master==default) 7 | # * destroy workspace after destroying a stack 8 | # 9 | # Use case: build an ephemeral terraform configuration in CI to test & then destroy 10 | # Use Case: Maintain default/master safely (how?!?) 11 | # 12 | set -e 13 | # DEBUG 14 | #set -x 15 | 16 | THIS_SCRIPT=${BASH_SOURCE[0]:-$0} 17 | # grumble, moan, PATH, symlinks 18 | if [[ -L "${THIS_SCRIPT}" ]]; then 19 | THIS_SCRIPT=`readlink ${THIS_SCRIPT} 2>&1` 20 | fi 21 | PROJECT_HOME="$( cd "$( dirname "${THIS_SCRIPT}" )/.." && pwd )" 22 | 23 | # load our helper functions 24 | source ${PROJECT_HOME}/scripts/common.sh 25 | 26 | # default to plan, to show changes, valid opions are plan, apply & destroy 27 | TF_ACTION=plan 28 | 29 | OPTIND=1 # Reset is necessary if getopts was used previously in the script. It is a good idea to make this local in a function. 30 | while getopts "a:e:hv" opt; do 31 | case "$opt" in 32 | a) TF_ACTION=${OPTARG} 33 | ;; 34 | *) 35 | show_help 36 | exit 1 37 | ;; 38 | esac 39 | done 40 | shift "$((OPTIND-1))" # Shift off the options and optional --. 41 | 42 | # check that the tools we require are present 43 | package_check 44 | 45 | # check that we have AWS credentials 46 | check_aws_credentials 47 | 48 | GIT_BRANCH=$(get_git_branch) 49 | TF_WORKSPACE=$(map_branch_to_workspace ${GIT_BRANCH}) 50 | TF_VARS_FILE=$(map_branch_to_tfvars ${GIT_BRANCH}) 51 | 52 | # create the S3 bucket, DynamoDB & matching backend.tf 53 | generate_terraform_backend 54 | 55 | [[ ! -d .terraform ]] && terraform init 56 | # the workspace may already exist - safe to ignore & carry on 57 | terraform workspace new ${TF_WORKSPACE} || true 58 | echo "Selecting workspace: ${TF_WORKSPACE}" 59 | terraform workspace select ${TF_WORKSPACE} 60 | case "${TF_ACTION}" in 61 | plan) 62 | [[ ! -d plan ]] && mkdir plan 63 | terraform plan -var-file=${TF_VARS_FILE} -out=plan/plan.out 64 | ;; 65 | apply) 66 | terraform apply plan/plan.out 67 | terraform output 68 | # once more for the camera 69 | terraform output -json > output.json 70 | ;; 71 | destroy) 72 | terraform destroy -var-file=${TF_VARS_FILE} -auto-approve 73 | terraform workspace select default 74 | terraform workspace delete ${TF_WORKSPACE} 75 | ;; 76 | esac 77 | 78 | echo "Done." 79 | -------------------------------------------------------------------------------- /terraform.tf: -------------------------------------------------------------------------------- 1 | provider "aws" {} 2 | 3 | variable "vpc_main_cidr" { 4 | type = "string" 5 | } 6 | 7 | variable "vpc_dmz_cidr" { 8 | type = "string" 9 | } 10 | 11 | resource "aws_vpc" "main" { 12 | cidr_block = "${var.vpc_main_cidr}" 13 | } 14 | 15 | output "main_vpc_id" { 16 | value = "${aws_vpc.main.id}" 17 | } 18 | 19 | resource "aws_vpc" "dmz" { 20 | cidr_block = "${var.vpc_dmz_cidr}" 21 | } 22 | 23 | output "dmz_vpc_id" { 24 | value = "${aws_vpc.dmz.id}" 25 | } 26 | -------------------------------------------------------------------------------- /testing-defaults.tfvars: -------------------------------------------------------------------------------- 1 | vpc_main_cidr = "172.28.0.0/16" 2 | vpc_dmz_cidr = "172.29.0.0/16" 3 | --------------------------------------------------------------------------------