├── .editorconfig ├── .github ├── CODEOWNERS ├── ISSUE_TEMPLATE │ ├── bug_report.md │ ├── bug_report.yml │ ├── config.yml │ ├── feature_request.md │ ├── feature_request.yml │ └── question.md ├── PULL_REQUEST_TEMPLATE.md ├── banner.png ├── mergify.yml ├── renovate.json ├── settings.yml └── workflows │ ├── branch.yml │ ├── chatops.yml │ ├── release.yml │ └── scheduled.yml ├── .gitignore ├── LICENSE ├── README.md ├── README.yaml ├── atmos.yaml ├── context.tf ├── examples └── complete │ ├── context.tf │ ├── fixtures.us-east-2.tfvars │ ├── main.tf │ ├── outputs.tf │ ├── variables.tf │ └── versions.tf ├── main.tf ├── outputs.tf ├── test ├── .gitignore ├── Makefile ├── Makefile.alpine └── src │ ├── .gitignore │ ├── Makefile │ ├── examples_complete_test.go │ ├── go.mod │ └── go.sum ├── variables.tf └── versions.tf /.editorconfig: -------------------------------------------------------------------------------- 1 | # Unix-style newlines with a newline ending every file 2 | [*] 3 | charset = utf-8 4 | end_of_line = lf 5 | indent_size = 2 6 | indent_style = space 7 | insert_final_newline = true 8 | trim_trailing_whitespace = true 9 | 10 | [*.{tf,tfvars}] 11 | indent_size = 2 12 | indent_style = space 13 | 14 | [*.md] 15 | max_line_length = 0 16 | trim_trailing_whitespace = false 17 | 18 | # Override for Makefile 19 | [{Makefile, makefile, GNUmakefile, Makefile.*}] 20 | tab_width = 2 21 | indent_style = tab 22 | indent_size = 4 23 | 24 | [COMMIT_EDITMSG] 25 | max_line_length = 0 26 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Use this file to define individuals or teams that are responsible for code in a repository. 2 | # Read more: 3 | # 4 | # Order is important: the last matching pattern has the highest precedence 5 | 6 | # These owners will be the default owners for everything 7 | * @cloudposse/engineering @cloudposse/contributors 8 | 9 | # Cloud Posse must review any changes to Makefiles 10 | **/Makefile @cloudposse/engineering 11 | **/Makefile.* @cloudposse/engineering 12 | 13 | # Cloud Posse must review any changes to GitHub actions 14 | .github/* @cloudposse/engineering 15 | 16 | # Cloud Posse must review any changes to standard context definition, 17 | # but some changes can be rubber-stamped. 18 | **/*.tf @cloudposse/engineering @cloudposse/contributors @cloudposse/approvers 19 | README.yaml @cloudposse/engineering @cloudposse/contributors @cloudposse/approvers 20 | README.md @cloudposse/engineering @cloudposse/contributors @cloudposse/approvers 21 | docs/*.md @cloudposse/engineering @cloudposse/contributors @cloudposse/approvers 22 | 23 | # Cloud Posse Admins must review all changes to CODEOWNERS or the mergify configuration 24 | .github/mergify.yml @cloudposse/admins 25 | .github/CODEOWNERS @cloudposse/admins 26 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: 'bug' 6 | assignees: '' 7 | 8 | --- 9 | 10 | Found a bug? Maybe our [Slack Community](https://slack.cloudposse.com) can help. 11 | 12 | [![Slack Community](https://slack.cloudposse.com/badge.svg)](https://slack.cloudposse.com) 13 | 14 | ## Describe the Bug 15 | A clear and concise description of what the bug is. 16 | 17 | ## Expected Behavior 18 | A clear and concise description of what you expected to happen. 19 | 20 | ## Steps to Reproduce 21 | Steps to reproduce the behavior: 22 | 1. Go to '...' 23 | 2. Run '....' 24 | 3. Enter '....' 25 | 4. See error 26 | 27 | ## Screenshots 28 | If applicable, add screenshots or logs to help explain your problem. 29 | 30 | ## Environment (please complete the following information): 31 | 32 | Anything that will help us triage the bug will help. Here are some ideas: 33 | - OS: [e.g. Linux, OSX, WSL, etc] 34 | - Version [e.g. 10.15] 35 | 36 | ## Additional Context 37 | Add any other context about the problem here. -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | description: Create a report to help us improve 4 | labels: ["bug"] 5 | assignees: [""] 6 | body: 7 | - type: markdown 8 | attributes: 9 | value: | 10 | Found a bug? 11 | 12 | Please checkout our [Slack Community](https://slack.cloudposse.com) 13 | or visit our [Slack Archive](https://archive.sweetops.com/). 14 | 15 | [![Slack Community](https://slack.cloudposse.com/badge.svg)](https://slack.cloudposse.com) 16 | 17 | - type: textarea 18 | id: concise-description 19 | attributes: 20 | label: Describe the Bug 21 | description: A clear and concise description of what the bug is. 22 | placeholder: What is the bug about? 23 | validations: 24 | required: true 25 | 26 | - type: textarea 27 | id: expected 28 | attributes: 29 | label: Expected Behavior 30 | description: A clear and concise description of what you expected. 31 | placeholder: What happened? 32 | validations: 33 | required: true 34 | 35 | - type: textarea 36 | id: reproduction-steps 37 | attributes: 38 | label: Steps to Reproduce 39 | description: Steps to reproduce the behavior. 40 | placeholder: How do we reproduce it? 41 | validations: 42 | required: true 43 | 44 | - type: textarea 45 | id: screenshots 46 | attributes: 47 | label: Screenshots 48 | description: If applicable, add screenshots or logs to help explain. 49 | validations: 50 | required: false 51 | 52 | - type: textarea 53 | id: environment 54 | attributes: 55 | label: Environment 56 | description: Anything that will help us triage the bug. 57 | placeholder: | 58 | - OS: [e.g. Linux, OSX, WSL, etc] 59 | - Version [e.g. 10.15] 60 | - Module version 61 | - Terraform version 62 | validations: 63 | required: false 64 | 65 | - type: textarea 66 | id: additional 67 | attributes: 68 | label: Additional Context 69 | description: | 70 | Add any other context about the problem here. 71 | validations: 72 | required: false 73 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: false 2 | 3 | contact_links: 4 | 5 | - name: Community Slack Team 6 | url: https://cloudposse.com/slack/ 7 | about: |- 8 | Please ask and answer questions here. 9 | 10 | - name: Office Hours 11 | url: https://cloudposse.com/office-hours/ 12 | about: |- 13 | Join us every Wednesday for FREE Office Hours (lunch & learn). 14 | 15 | - name: DevOps Accelerator Program 16 | url: https://cloudposse.com/accelerate/ 17 | about: |- 18 | Own your infrastructure in record time. We build it. You drive it. 19 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature Request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: 'feature request' 6 | assignees: '' 7 | 8 | --- 9 | 10 | Have a question? Please checkout our [Slack Community](https://slack.cloudposse.com) or visit our [Slack Archive](https://archive.sweetops.com/). 11 | 12 | [![Slack Community](https://slack.cloudposse.com/badge.svg)](https://slack.cloudposse.com) 13 | 14 | ## Describe the Feature 15 | 16 | A clear and concise description of what the bug is. 17 | 18 | ## Expected Behavior 19 | 20 | A clear and concise description of what you expected to happen. 21 | 22 | ## Use Case 23 | 24 | Is your feature request related to a problem/challenge you are trying to solve? Please provide some additional context of why this feature or capability will be valuable. 25 | 26 | ## Describe Ideal Solution 27 | 28 | A clear and concise description of what you want to happen. If you don't know, that's okay. 29 | 30 | ## Alternatives Considered 31 | 32 | Explain what alternative solutions or features you've considered. 33 | 34 | ## Additional Context 35 | 36 | Add any other context or screenshots about the feature request here. 37 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature Request 3 | description: Suggest an idea for this project 4 | labels: ["feature request"] 5 | assignees: [""] 6 | body: 7 | - type: markdown 8 | attributes: 9 | value: | 10 | Have a question? 11 | 12 | Please checkout our [Slack Community](https://slack.cloudposse.com) 13 | or visit our [Slack Archive](https://archive.sweetops.com/). 14 | 15 | [![Slack Community](https://slack.cloudposse.com/badge.svg)](https://slack.cloudposse.com) 16 | 17 | - type: textarea 18 | id: concise-description 19 | attributes: 20 | label: Describe the Feature 21 | description: A clear and concise description of what the feature is. 22 | placeholder: What is the feature about? 23 | validations: 24 | required: true 25 | 26 | - type: textarea 27 | id: expected 28 | attributes: 29 | label: Expected Behavior 30 | description: A clear and concise description of what you expected. 31 | placeholder: What happened? 32 | validations: 33 | required: true 34 | 35 | - type: textarea 36 | id: use-case 37 | attributes: 38 | label: Use Case 39 | description: | 40 | Is your feature request related to a problem/challenge you are trying 41 | to solve? 42 | 43 | Please provide some additional context of why this feature or 44 | capability will be valuable. 45 | validations: 46 | required: true 47 | 48 | - type: textarea 49 | id: ideal-solution 50 | attributes: 51 | label: Describe Ideal Solution 52 | description: A clear and concise description of what you want to happen. 53 | validations: 54 | required: true 55 | 56 | - type: textarea 57 | id: alternatives-considered 58 | attributes: 59 | label: Alternatives Considered 60 | description: Explain alternative solutions or features considered. 61 | validations: 62 | required: false 63 | 64 | - type: textarea 65 | id: additional 66 | attributes: 67 | label: Additional Context 68 | description: | 69 | Add any other context about the problem here. 70 | validations: 71 | required: false 72 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/question.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cloudposse/terraform-aws-cicd/835b3d04cd612750622e46efed52625c34f6c41d/.github/ISSUE_TEMPLATE/question.md -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | ## what 2 | 3 | 7 | 8 | ## why 9 | 10 | 15 | 16 | ## references 17 | 18 | 22 | -------------------------------------------------------------------------------- /.github/banner.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cloudposse/terraform-aws-cicd/835b3d04cd612750622e46efed52625c34f6c41d/.github/banner.png -------------------------------------------------------------------------------- /.github/mergify.yml: -------------------------------------------------------------------------------- 1 | extends: .github 2 | -------------------------------------------------------------------------------- /.github/renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": [ 3 | "config:base", 4 | ":preserveSemverRanges", 5 | ":rebaseStalePrs" 6 | ], 7 | "baseBranches": ["main"], 8 | "labels": ["auto-update"], 9 | "dependencyDashboardAutoclose": true, 10 | "enabledManagers": ["terraform"], 11 | "terraform": { 12 | "ignorePaths": ["**/context.tf"] 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /.github/settings.yml: -------------------------------------------------------------------------------- 1 | # Upstream changes from _extends are only recognized when modifications are made to this file in the default branch. 2 | _extends: .github 3 | repository: 4 | name: terraform-aws-cicd 5 | description: Terraform Module for CI/CD with AWS Code Pipeline and Code Build 6 | homepage: https://cloudposse.com/accelerate 7 | topics: terraform, terraform-modules, cicd, codepipeline, aws, codebuild, continuous-integration, continuous-delivery, hcl2 8 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /.github/workflows/branch.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Branch 3 | on: 4 | pull_request: 5 | branches: 6 | - main 7 | - release/** 8 | types: [opened, synchronize, reopened, labeled, unlabeled] 9 | push: 10 | branches: 11 | - main 12 | - release/v* 13 | paths-ignore: 14 | - '.github/**' 15 | - 'docs/**' 16 | - 'examples/**' 17 | - 'test/**' 18 | - 'README.md' 19 | 20 | permissions: {} 21 | 22 | jobs: 23 | terraform-module: 24 | uses: cloudposse/.github/.github/workflows/shared-terraform-module.yml@main 25 | secrets: inherit 26 | -------------------------------------------------------------------------------- /.github/workflows/chatops.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: chatops 3 | on: 4 | issue_comment: 5 | types: [created] 6 | 7 | permissions: 8 | pull-requests: write 9 | id-token: write 10 | contents: write 11 | statuses: write 12 | 13 | jobs: 14 | test: 15 | uses: cloudposse/.github/.github/workflows/shared-terraform-chatops.yml@main 16 | if: ${{ github.event.issue.pull_request && contains(github.event.comment.body, '/terratest') }} 17 | secrets: inherit 18 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: release 3 | on: 4 | release: 5 | types: 6 | - published 7 | 8 | permissions: 9 | id-token: write 10 | contents: write 11 | pull-requests: write 12 | 13 | jobs: 14 | terraform-module: 15 | uses: cloudposse/.github/.github/workflows/shared-release-branches.yml@main 16 | secrets: inherit 17 | -------------------------------------------------------------------------------- /.github/workflows/scheduled.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: scheduled 3 | on: 4 | workflow_dispatch: { } # Allows manually trigger this workflow 5 | schedule: 6 | - cron: "0 3 * * *" 7 | 8 | permissions: 9 | pull-requests: write 10 | id-token: write 11 | contents: write 12 | 13 | jobs: 14 | scheduled: 15 | uses: cloudposse/.github/.github/workflows/shared-terraform-scheduled.yml@main 16 | secrets: inherit 17 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Local .terraform directories 2 | **/.terraform 3 | **/.terraform.d 4 | 5 | # .tfstate files 6 | *.tfstate 7 | *.tfstate.* 8 | .terraform 9 | .terraform.tfstate.lock.info 10 | .terraform.lock.hcl 11 | 12 | **/.idea 13 | **/*.iml 14 | 15 | # Cloud Posse Build Harness https://github.com/cloudposse/build-harness 16 | **/.build-harness 17 | **/build-harness 18 | 19 | # Crash log files 20 | crash.log 21 | test.log 22 | 23 | # Editor backups 24 | *.orig 25 | *.draft 26 | *~ 27 | 28 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2017-2019 Cloud Posse, LLC 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Project Banner
5 |

6 | Latest ReleaseLast UpdatedSlack Community

7 | 8 | 9 | 29 | 30 | Terraform module to create AWS [`CodePipeline`](https://aws.amazon.com/codepipeline/) with [`CodeBuild`](https://aws.amazon.com/codebuild/) for [`CI/CD`](https://en.wikipedia.org/wiki/CI/CD) 31 | 32 | This module supports three use-cases: 33 | 34 | 1. **GitHub -> S3 (build artifact) -> Elastic Beanstalk (running application stack)**. 35 | The module gets the code from a ``GitHub`` repository (public or private), builds it by executing the ``buildspec.yml`` file from the repository, pushes the built artifact to an S3 bucket, 36 | and deploys the artifact to ``Elastic Beanstalk`` running one of the supported stacks (_e.g._ ``Java``, ``Go``, ``Node``, ``IIS``, ``Python``, ``Ruby``, etc.). 37 | - http://docs.aws.amazon.com/codebuild/latest/userguide/sample-maven-5m.html 38 | - http://docs.aws.amazon.com/codebuild/latest/userguide/sample-nodejs-hw.html 39 | - http://docs.aws.amazon.com/codebuild/latest/userguide/sample-go-hw.html 40 | 41 | 42 | 2. **GitHub -> ECR (Docker image) -> Elastic Beanstalk (running Docker stack)**. 43 | The module gets the code from a ``GitHub`` repository, builds a ``Docker`` image from it by executing the ``buildspec.yml`` and ``Dockerfile`` files from the repository, 44 | pushes the ``Docker`` image to an ``ECR`` repository, and deploys the ``Docker`` image to ``Elastic Beanstalk`` running ``Docker`` stack. 45 | - http://docs.aws.amazon.com/codebuild/latest/userguide/sample-docker.html 46 | 47 | 48 | 3. **GitHub -> ECR (Docker image)**. 49 | The module gets the code from a ``GitHub`` repository, builds a ``Docker`` image from it by executing the ``buildspec.yml`` and ``Dockerfile`` files from the repository, 50 | and pushes the ``Docker`` image to an ``ECR`` repository. This is used when we want to build a ``Docker`` image from the code and push it to ``ECR`` without deploying to ``Elastic Beanstalk``. 51 | To activate this mode, don't specify the ``app`` and ``env`` attributes for the module. 52 | - http://docs.aws.amazon.com/codebuild/latest/userguide/sample-docker.html 53 | 54 | 55 | > [!TIP] 56 | > #### 👽 Use Atmos with Terraform 57 | > Cloud Posse uses [`atmos`](https://atmos.tools) to easily orchestrate multiple environments using Terraform.
58 | > Works with [Github Actions](https://atmos.tools/integrations/github-actions/), [Atlantis](https://atmos.tools/integrations/atlantis), or [Spacelift](https://atmos.tools/integrations/spacelift). 59 | > 60 | >
61 | > Watch demo of using Atmos with Terraform 62 | >
63 | > Example of running atmos to manage infrastructure from our Quick Start tutorial. 64 | > 65 | 66 | 67 | 68 | 69 | 70 | ## Usage 71 | 72 | Include this repository as a module in your existing terraform code: 73 | 74 | ```hcl 75 | module "build" { 76 | source = "cloudposse/cicd/aws" 77 | # Cloud Posse recommends pinning every module to a specific version 78 | # version = "x.x.x" 79 | namespace = "eg" 80 | stage = "staging" 81 | name = "app" 82 | 83 | # Enable the pipeline creation 84 | enabled = true 85 | 86 | # Elastic Beanstalk 87 | elastic_beanstalk_application_name = "<(Optional) Elastic Beanstalk application name>" 88 | elastic_beanstalk_environment_name = "<(Optional) Elastic Beanstalk environment name>" 89 | 90 | # Application repository on GitHub 91 | github_oauth_token = "(Required) " 92 | repo_owner = "" 93 | repo_name = "" 94 | branch = "" 95 | 96 | # http://docs.aws.amazon.com/codebuild/latest/userguide/build-env-ref.html 97 | # http://docs.aws.amazon.com/codebuild/latest/userguide/build-spec-ref.html 98 | build_image = "aws/codebuild/standard:2.0" 99 | build_compute_type = "BUILD_GENERAL1_SMALL" 100 | 101 | # These attributes are optional, used as ENV variables when building Docker images and pushing them to ECR 102 | # For more info: 103 | # http://docs.aws.amazon.com/codebuild/latest/userguide/sample-docker.html 104 | # https://www.terraform.io/docs/providers/aws/r/codebuild_project.html 105 | privileged_mode = true 106 | region = "us-east-1" 107 | aws_account_id = "xxxxxxxxxx" 108 | image_repo_name = "ecr-repo-name" 109 | image_tag = "latest" 110 | 111 | # Optional extra environment variables 112 | environment_variables = [{ 113 | name = "JENKINS_URL" 114 | value = "https://jenkins.example.com" 115 | }, 116 | { 117 | name = "COMPANY_NAME" 118 | value = "Amazon" 119 | }, 120 | { 121 | name = "TIME_ZONE" 122 | value = "Pacific/Auckland" 123 | }] 124 | } 125 | ``` 126 | 127 | > [!IMPORTANT] 128 | > In Cloud Posse's examples, we avoid pinning modules to specific versions to prevent discrepancies between the documentation 129 | > and the latest released versions. However, for your own projects, we strongly advise pinning each module to the exact version 130 | > you're using. This practice ensures the stability of your infrastructure. Additionally, we recommend implementing a systematic 131 | > approach for updating versions to avoid unexpected changes. 132 | 133 | 134 | 135 | 136 | 137 | ## Examples 138 | 139 | ### Example: GitHub, NodeJS, S3 and EB 140 | 141 | This is an example to build a Node app, store the build artifact to an S3 bucket, and then deploy it to Elastic Beanstalk running ``Node`` stack 142 | 143 | 144 | `buildspec.yml` file 145 | 146 | ```yaml 147 | version: 0.2 148 | 149 | phases: 150 | install: 151 | commands: 152 | - echo Starting installation ... 153 | pre_build: 154 | commands: 155 | - echo Installing NPM dependencies... 156 | - npm install 157 | build: 158 | commands: 159 | - echo Build started on `date` 160 | post_build: 161 | commands: 162 | - echo Build completed on `date` 163 | artifacts: 164 | files: 165 | - node_modules/**/* 166 | - public/**/* 167 | - routes/**/* 168 | - views/**/* 169 | - app.js 170 | ``` 171 | 172 | ### Example: GitHub, NodeJS, Docker, ECR and EB 173 | 174 | This is an example to build a ``Docker`` image for a Node app, push the ``Docker`` image to an ECR repository, and then deploy it to Elastic Beanstalk running ``Docker`` stack 175 | 176 | `buildspec.yml` file 177 | 178 | ```yaml 179 | version: 0.2 180 | 181 | phases: 182 | pre_build: 183 | commands: 184 | - echo Logging in to Amazon ECR... 185 | - $(aws ecr get-login --region $AWS_REGION) 186 | build: 187 | commands: 188 | - echo Build started on `date` 189 | - echo Building the Docker image... 190 | - docker build -t $IMAGE_REPO_NAME . 191 | - docker tag $IMAGE_REPO_NAME:$IMAGE_TAG $AWS_ACCOUNT_ID.dkr.ecr.$AWS_REGION.amazonaws.com/$IMAGE_REPO_NAME:$IMAGE_TAG 192 | post_build: 193 | commands: 194 | - echo Build completed on `date` 195 | - echo Pushing the Docker image to ECR... 196 | - docker push $AWS_ACCOUNT_ID.dkr.ecr.$AWS_REGION.amazonaws.com/$IMAGE_REPO_NAME:$IMAGE_TAG 197 | artifacts: 198 | files: 199 | - '**/*' 200 | ``` 201 | 202 | `Dockerfile` 203 | 204 | ```dockerfile 205 | FROM node:latest 206 | 207 | WORKDIR /usr/src/app 208 | 209 | COPY package.json package-lock.json ./ 210 | RUN npm install 211 | COPY . . 212 | 213 | EXPOSE 8081 214 | CMD [ "npm", "start" ] 215 | ``` 216 | 217 | 218 | 219 | 220 | 221 | ## Makefile Targets 222 | ```text 223 | Available targets: 224 | 225 | help Help screen 226 | help/all Display help for all targets 227 | help/short This help short screen 228 | lint Lint terraform code 229 | 230 | ``` 231 | 232 | 233 | ## Requirements 234 | 235 | | Name | Version | 236 | |------|---------| 237 | | [terraform](#requirement\_terraform) | >= 1.3 | 238 | | [aws](#requirement\_aws) | >= 5.0 | 239 | | [random](#requirement\_random) | >= 2.1 | 240 | 241 | ## Providers 242 | 243 | | Name | Version | 244 | |------|---------| 245 | | [aws](#provider\_aws) | >= 5.0 | 246 | | [random](#provider\_random) | >= 2.1 | 247 | 248 | ## Modules 249 | 250 | | Name | Source | Version | 251 | |------|--------|---------| 252 | | [codebuild](#module\_codebuild) | cloudposse/codebuild/aws | 2.0.1 | 253 | | [github\_webhook](#module\_github\_webhook) | cloudposse/repository-webhooks/github | 0.12.1 | 254 | | [this](#module\_this) | cloudposse/label/null | 0.25.0 | 255 | 256 | ## Resources 257 | 258 | | Name | Type | 259 | |------|------| 260 | | [aws_codepipeline.default](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/codepipeline) | resource | 261 | | [aws_codepipeline_webhook.default](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/codepipeline_webhook) | resource | 262 | | [aws_iam_policy.codebuild](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource | 263 | | [aws_iam_policy.default](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource | 264 | | [aws_iam_policy.s3](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource | 265 | | [aws_iam_role.default](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role) | resource | 266 | | [aws_iam_role_policy_attachment.codebuild](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource | 267 | | [aws_iam_role_policy_attachment.codebuild_s3](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource | 268 | | [aws_iam_role_policy_attachment.default](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource | 269 | | [aws_iam_role_policy_attachment.s3](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource | 270 | | [aws_s3_bucket.default](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket) | resource | 271 | | [random_password.webhook_secret](https://registry.terraform.io/providers/hashicorp/random/latest/docs/resources/password) | resource | 272 | | [aws_caller_identity.default](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/caller_identity) | data source | 273 | | [aws_iam_policy_document.assume](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/iam_policy_document) | data source | 274 | | [aws_iam_policy_document.codebuild](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/iam_policy_document) | data source | 275 | | [aws_iam_policy_document.default](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/iam_policy_document) | data source | 276 | | [aws_iam_policy_document.s3](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/iam_policy_document) | data source | 277 | | [aws_region.default](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/region) | data source | 278 | | [aws_s3_bucket.website](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/s3_bucket) | data source | 279 | 280 | ## Inputs 281 | 282 | | Name | Description | Type | Default | Required | 283 | |------|-------------|------|---------|:--------:| 284 | | [access\_log\_bucket\_name](#input\_access\_log\_bucket\_name) | Name of the S3 bucket where s3 access log will be sent to | `string` | `""` | no | 285 | | [additional\_tag\_map](#input\_additional\_tag\_map) | Additional key-value pairs to add to each map in `tags_as_list_of_maps`. Not added to `tags` or `id`.
This is for some rare cases where resources want additional configuration of tags
and therefore take a list of maps with tag key, value, and additional configuration. | `map(string)` | `{}` | no | 286 | | [attributes](#input\_attributes) | ID element. Additional attributes (e.g. `workers` or `cluster`) to add to `id`,
in the order they appear in the list. New attributes are appended to the
end of the list. The elements of the list are joined by the `delimiter`
and treated as a single ID element. | `list(string)` | `[]` | no | 287 | | [aws\_account\_id](#input\_aws\_account\_id) | AWS Account ID. Used as CodeBuild ENV variable when building Docker images. [For more info](http://docs.aws.amazon.com/codebuild/latest/userguide/sample-docker.html) | `string` | `""` | no | 288 | | [branch](#input\_branch) | Branch of the GitHub repository, _e.g._ `master` | `string` | n/a | yes | 289 | | [build\_compute\_type](#input\_build\_compute\_type) | `CodeBuild` instance size. Possible values are:
BUILD_GENERAL1_SMALL
BUILD_GENERAL1_MEDIUM
BUILD_GENERAL1_LARGE
| `string` | `"BUILD_GENERAL1_SMALL"` | no | 290 | | [build\_image](#input\_build\_image) | Docker image for build environment, _e.g._ `aws/codebuild/standard:2.0` or `aws/codebuild/eb-nodejs-6.10.0-amazonlinux-64:4.0.0` | `string` | `"aws/codebuild/standard:2.0"` | no | 291 | | [buildspec](#input\_buildspec) | Declaration to use for building the project. [For more info](http://docs.aws.amazon.com/codebuild/latest/userguide/build-spec-ref.html) | `string` | `""` | no | 292 | | [cache\_type](#input\_cache\_type) | The type of storage that will be used for the AWS CodeBuild project cache. Valid values: NO\_CACHE, LOCAL, and S3. Defaults to S3 to keep same behavior as before upgrading `codebuild` module to 0.18+ version. If cache\_type is S3, it will create an S3 bucket for storing codebuild cache inside | `string` | `"S3"` | no | 293 | | [codebuild\_cache\_bucket\_suffix\_enabled](#input\_codebuild\_cache\_bucket\_suffix\_enabled) | The cache bucket generates a random 13 character string to generate a unique bucket name. If set to false it uses terraform-null-label's id value | `bool` | `true` | no | 294 | | [context](#input\_context) | Single object for setting entire context at once.
See description of individual variables for details.
Leave string and numeric variables as `null` to use default value.
Individual variable settings (non-null) override settings in context object,
except for attributes, tags, and additional\_tag\_map, which are merged. | `any` |
{
"additional_tag_map": {},
"attributes": [],
"delimiter": null,
"descriptor_formats": {},
"enabled": true,
"environment": null,
"id_length_limit": null,
"label_key_case": null,
"label_order": [],
"label_value_case": null,
"labels_as_tags": [
"unset"
],
"name": null,
"namespace": null,
"regex_replace_chars": null,
"stage": null,
"tags": {},
"tenant": null
}
| no | 295 | | [delimiter](#input\_delimiter) | Delimiter to be used between ID elements.
Defaults to `-` (hyphen). Set to `""` to use no delimiter at all. | `string` | `null` | no | 296 | | [descriptor\_formats](#input\_descriptor\_formats) | Describe additional descriptors to be output in the `descriptors` output map.
Map of maps. Keys are names of descriptors. Values are maps of the form
`{
format = string
labels = list(string)
}`
(Type is `any` so the map values can later be enhanced to provide additional options.)
`format` is a Terraform format string to be passed to the `format()` function.
`labels` is a list of labels, in order, to pass to `format()` function.
Label values will be normalized before being passed to `format()` so they will be
identical to how they appear in `id`.
Default is `{}` (`descriptors` output will be empty). | `any` | `{}` | no | 297 | | [elastic\_beanstalk\_application\_name](#input\_elastic\_beanstalk\_application\_name) | Elastic Beanstalk application name. If not provided or set to empty string, the `Deploy` stage of the pipeline will not be created | `string` | `""` | no | 298 | | [elastic\_beanstalk\_environment\_name](#input\_elastic\_beanstalk\_environment\_name) | Elastic Beanstalk environment name. If not provided or set to empty string, the `Deploy` stage of the pipeline will not be created | `string` | `""` | no | 299 | | [enabled](#input\_enabled) | Set to false to prevent the module from creating any resources | `bool` | `null` | no | 300 | | [environment](#input\_environment) | ID element. Usually used for region e.g. 'uw2', 'us-west-2', OR role 'prod', 'staging', 'dev', 'UAT' | `string` | `null` | no | 301 | | [environment\_variables](#input\_environment\_variables) | A list of maps, that contain the keys 'name', 'value', and 'type' to be used as additional environment variables for the build. Valid types are 'PLAINTEXT', 'PARAMETER\_STORE', or 'SECRETS\_MANAGER' |
list(object(
{
name = string
value = string
type = string
}))
|
[
{
"name": "NO_ADDITIONAL_BUILD_VARS",
"type": "PLAINTEXT",
"value": "TRUE"
}
]
| no | 302 | | [force\_destroy](#input\_force\_destroy) | Force destroy the CI/CD S3 bucket even if it's not empty | `bool` | `false` | no | 303 | | [github\_oauth\_token](#input\_github\_oauth\_token) | GitHub Oauth Token | `string` | n/a | yes | 304 | | [github\_webhook\_events](#input\_github\_webhook\_events) | A list of events which should trigger the webhook. See a list of [available events](https://developer.github.com/v3/activity/events/types/) | `list(string)` |
[
"push"
]
| no | 305 | | [github\_webhooks\_token](#input\_github\_webhooks\_token) | GitHub OAuth Token with permissions to create webhooks. If not provided, can be sourced from the `GITHUB_TOKEN` environment variable | `string` | `""` | no | 306 | | [id\_length\_limit](#input\_id\_length\_limit) | Limit `id` to this many characters (minimum 6).
Set to `0` for unlimited length.
Set to `null` for keep the existing setting, which defaults to `0`.
Does not affect `id_full`. | `number` | `null` | no | 307 | | [image\_repo\_name](#input\_image\_repo\_name) | ECR repository name to store the Docker image built by this module. Used as CodeBuild ENV variable when building Docker images. [For more info](http://docs.aws.amazon.com/codebuild/latest/userguide/sample-docker.html) | `string` | `"UNSET"` | no | 308 | | [image\_tag](#input\_image\_tag) | Docker image tag in the ECR repository, e.g. 'latest'. Used as CodeBuild ENV variable when building Docker images. [For more info](http://docs.aws.amazon.com/codebuild/latest/userguide/sample-docker.html) | `string` | `"latest"` | no | 309 | | [label\_key\_case](#input\_label\_key\_case) | Controls the letter case of the `tags` keys (label names) for tags generated by this module.
Does not affect keys of tags passed in via the `tags` input.
Possible values: `lower`, `title`, `upper`.
Default value: `title`. | `string` | `null` | no | 310 | | [label\_order](#input\_label\_order) | The order in which the labels (ID elements) appear in the `id`.
Defaults to ["namespace", "environment", "stage", "name", "attributes"].
You can omit any of the 6 labels ("tenant" is the 6th), but at least one must be present. | `list(string)` | `null` | no | 311 | | [label\_value\_case](#input\_label\_value\_case) | Controls the letter case of ID elements (labels) as included in `id`,
set as tag values, and output by this module individually.
Does not affect values of tags passed in via the `tags` input.
Possible values: `lower`, `title`, `upper` and `none` (no transformation).
Set this to `title` and set `delimiter` to `""` to yield Pascal Case IDs.
Default value: `lower`. | `string` | `null` | no | 312 | | [labels\_as\_tags](#input\_labels\_as\_tags) | Set of labels (ID elements) to include as tags in the `tags` output.
Default is to include all labels.
Tags with empty values will not be included in the `tags` output.
Set to `[]` to suppress all generated tags.
**Notes:**
The value of the `name` tag, if included, will be the `id`, not the `name`.
Unlike other `null-label` inputs, the initial setting of `labels_as_tags` cannot be
changed in later chained modules. Attempts to change it will be silently ignored. | `set(string)` |
[
"default"
]
| no | 313 | | [name](#input\_name) | ID element. Usually the component or solution name, e.g. 'app' or 'jenkins'.
This is the only ID element not also included as a `tag`.
The "name" tag is set to the full `id` string. There is no tag with the value of the `name` input. | `string` | `null` | no | 314 | | [namespace](#input\_namespace) | ID element. Usually an abbreviation of your organization name, e.g. 'eg' or 'cp', to help ensure generated IDs are globally unique | `string` | `null` | no | 315 | | [poll\_source\_changes](#input\_poll\_source\_changes) | Periodically check the location of your source content and run the pipeline if changes are detected | `bool` | `true` | no | 316 | | [privileged\_mode](#input\_privileged\_mode) | If set to true, enables running the Docker daemon inside a Docker container on the CodeBuild instance. Used when building Docker images | `bool` | `false` | no | 317 | | [regex\_replace\_chars](#input\_regex\_replace\_chars) | Terraform regular expression (regex) string.
Characters matching the regex will be removed from the ID elements.
If not set, `"/[^a-zA-Z0-9-]/"` is used to remove all characters other than hyphens, letters and digits. | `string` | `null` | no | 318 | | [region](#input\_region) | AWS Region, e.g. `us-east-1`. Used as CodeBuild ENV variable when building Docker images. [For more info](http://docs.aws.amazon.com/codebuild/latest/userguide/sample-docker.html) | `string` | `""` | no | 319 | | [repo\_name](#input\_repo\_name) | GitHub repository name of the application to be built (and deployed to Elastic Beanstalk if configured) | `string` | n/a | yes | 320 | | [repo\_owner](#input\_repo\_owner) | GitHub Organization or Person name | `string` | n/a | yes | 321 | | [s3\_bucket\_encryption\_enabled](#input\_s3\_bucket\_encryption\_enabled) | When set to 'true' the 'aws\_s3\_bucket' resource will have AES256 encryption enabled by default | `bool` | `true` | no | 322 | | [stage](#input\_stage) | ID element. Usually used to indicate role, e.g. 'prod', 'staging', 'source', 'build', 'test', 'deploy', 'release' | `string` | `null` | no | 323 | | [tags](#input\_tags) | Additional tags (e.g. `{'BusinessUnit': 'XYZ'}`).
Neither the tag keys nor the tag values will be modified by this module. | `map(string)` | `{}` | no | 324 | | [tenant](#input\_tenant) | ID element \_(Rarely used, not included by default)\_. A customer identifier, indicating who this instance of a resource is for | `string` | `null` | no | 325 | | [versioning\_enabled](#input\_versioning\_enabled) | A state of versioning. Versioning is a means of keeping multiple variants of an object in the same bucket | `bool` | `true` | no | 326 | | [webhook\_authentication](#input\_webhook\_authentication) | The type of authentication to use. One of IP, GITHUB\_HMAC, or UNAUTHENTICATED | `string` | `"GITHUB_HMAC"` | no | 327 | | [webhook\_enabled](#input\_webhook\_enabled) | Set to false to prevent the module from creating any webhook resources | `bool` | `false` | no | 328 | | [webhook\_filter\_json\_path](#input\_webhook\_filter\_json\_path) | The JSON path to filter on | `string` | `"$.ref"` | no | 329 | | [webhook\_filter\_match\_equals](#input\_webhook\_filter\_match\_equals) | The value to match on (e.g. refs/heads/{Branch}) | `string` | `"refs/heads/{Branch}"` | no | 330 | | [webhook\_target\_action](#input\_webhook\_target\_action) | The name of the action in a pipeline you want to connect to the webhook. The action must be from the source (first) stage of the pipeline | `string` | `"Source"` | no | 331 | | [website\_bucket\_acl](#input\_website\_bucket\_acl) | Canned ACL of the S3 bucket objects that get served as a website, can be private if using CloudFront with OAI | `string` | `"public-read"` | no | 332 | | [website\_bucket\_name](#input\_website\_bucket\_name) | Name of the S3 bucket where the website will be deployed | `string` | `""` | no | 333 | 334 | ## Outputs 335 | 336 | | Name | Description | 337 | |------|-------------| 338 | | [codebuild\_badge\_url](#output\_codebuild\_badge\_url) | The URL of the build badge when badge\_enabled is enabled | 339 | | [codebuild\_cache\_bucket\_arn](#output\_codebuild\_cache\_bucket\_arn) | CodeBuild cache S3 bucket ARN | 340 | | [codebuild\_cache\_bucket\_name](#output\_codebuild\_cache\_bucket\_name) | CodeBuild cache S3 bucket name | 341 | | [codebuild\_project\_id](#output\_codebuild\_project\_id) | CodeBuild project ID | 342 | | [codebuild\_project\_name](#output\_codebuild\_project\_name) | CodeBuild project name | 343 | | [codebuild\_role\_arn](#output\_codebuild\_role\_arn) | CodeBuild IAM Role ARN | 344 | | [codebuild\_role\_id](#output\_codebuild\_role\_id) | CodeBuild IAM Role ID | 345 | | [codepipeline\_arn](#output\_codepipeline\_arn) | CodePipeline ARN | 346 | | [codepipeline\_id](#output\_codepipeline\_id) | CodePipeline ID | 347 | 348 | 349 | 350 | ## Related Projects 351 | 352 | Check out these related projects. 353 | 354 | 355 | 356 | > [!TIP] 357 | > #### Use Terraform Reference Architectures for AWS 358 | > 359 | > Use Cloud Posse's ready-to-go [terraform architecture blueprints](https://cloudposse.com/reference-architecture/) for AWS to get up and running quickly. 360 | > 361 | > ✅ We build it together with your team.
362 | > ✅ Your team owns everything.
363 | > ✅ 100% Open Source and backed by fanatical support.
364 | > 365 | > Request Quote 366 | >
📚 Learn More 367 | > 368 | >
369 | > 370 | > Cloud Posse is the leading [**DevOps Accelerator**](https://cpco.io/commercial-support?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-cicd&utm_content=commercial_support) for funded startups and enterprises. 371 | > 372 | > *Your team can operate like a pro today.* 373 | > 374 | > Ensure that your team succeeds by using Cloud Posse's proven process and turnkey blueprints. Plus, we stick around until you succeed. 375 | > #### Day-0: Your Foundation for Success 376 | > - **Reference Architecture.** You'll get everything you need from the ground up built using 100% infrastructure as code. 377 | > - **Deployment Strategy.** Adopt a proven deployment strategy with GitHub Actions, enabling automated, repeatable, and reliable software releases. 378 | > - **Site Reliability Engineering.** Gain total visibility into your applications and services with Datadog, ensuring high availability and performance. 379 | > - **Security Baseline.** Establish a secure environment from the start, with built-in governance, accountability, and comprehensive audit logs, safeguarding your operations. 380 | > - **GitOps.** Empower your team to manage infrastructure changes confidently and efficiently through Pull Requests, leveraging the full power of GitHub Actions. 381 | > 382 | > Request Quote 383 | > 384 | > #### Day-2: Your Operational Mastery 385 | > - **Training.** Equip your team with the knowledge and skills to confidently manage the infrastructure, ensuring long-term success and self-sufficiency. 386 | > - **Support.** Benefit from a seamless communication over Slack with our experts, ensuring you have the support you need, whenever you need it. 387 | > - **Troubleshooting.** Access expert assistance to quickly resolve any operational challenges, minimizing downtime and maintaining business continuity. 388 | > - **Code Reviews.** Enhance your team’s code quality with our expert feedback, fostering continuous improvement and collaboration. 389 | > - **Bug Fixes.** Rely on our team to troubleshoot and resolve any issues, ensuring your systems run smoothly. 390 | > - **Migration Assistance.** Accelerate your migration process with our dedicated support, minimizing disruption and speeding up time-to-value. 391 | > - **Customer Workshops.** Engage with our team in weekly workshops, gaining insights and strategies to continuously improve and innovate. 392 | > 393 | > Request Quote 394 | >
395 | 396 | ## ✨ Contributing 397 | 398 | This project is under active development, and we encourage contributions from our community. 399 | 400 | 401 | 402 | Many thanks to our outstanding contributors: 403 | 404 | 405 | 406 | 407 | 408 | For 🐛 bug reports & feature requests, please use the [issue tracker](https://github.com/cloudposse/terraform-aws-cicd/issues). 409 | 410 | In general, PRs are welcome. We follow the typical "fork-and-pull" Git workflow. 411 | 1. Review our [Code of Conduct](https://github.com/cloudposse/terraform-aws-cicd/?tab=coc-ov-file#code-of-conduct) and [Contributor Guidelines](https://github.com/cloudposse/.github/blob/main/CONTRIBUTING.md). 412 | 2. **Fork** the repo on GitHub 413 | 3. **Clone** the project to your own machine 414 | 4. **Commit** changes to your own branch 415 | 5. **Push** your work back up to your fork 416 | 6. Submit a **Pull Request** so that we can review your changes 417 | 418 | **NOTE:** Be sure to merge the latest changes from "upstream" before making a pull request! 419 | 420 | ### 🌎 Slack Community 421 | 422 | Join our [Open Source Community](https://cpco.io/slack?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-cicd&utm_content=slack) on Slack. It's **FREE** for everyone! Our "SweetOps" community is where you get to talk with others who share a similar vision for how to rollout and manage infrastructure. This is the best place to talk shop, ask questions, solicit feedback, and work together as a community to build totally *sweet* infrastructure. 423 | 424 | ### 📰 Newsletter 425 | 426 | Sign up for [our newsletter](https://cpco.io/newsletter?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-cicd&utm_content=newsletter) and join 3,000+ DevOps engineers, CTOs, and founders who get insider access to the latest DevOps trends, so you can always stay in the know. 427 | Dropped straight into your Inbox every week — and usually a 5-minute read. 428 | 429 | ### 📆 Office Hours 430 | 431 | [Join us every Wednesday via Zoom](https://cloudposse.com/office-hours?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-cicd&utm_content=office_hours) for your weekly dose of insider DevOps trends, AWS news and Terraform insights, all sourced from our SweetOps community, plus a _live Q&A_ that you can’t find anywhere else. 432 | It's **FREE** for everyone! 433 | ## License 434 | 435 | License 436 | 437 |
438 | Preamble to the Apache License, Version 2.0 439 |
440 |
441 | 442 | Complete license is available in the [`LICENSE`](LICENSE) file. 443 | 444 | ```text 445 | Licensed to the Apache Software Foundation (ASF) under one 446 | or more contributor license agreements. See the NOTICE file 447 | distributed with this work for additional information 448 | regarding copyright ownership. The ASF licenses this file 449 | to you under the Apache License, Version 2.0 (the 450 | "License"); you may not use this file except in compliance 451 | with the License. You may obtain a copy of the License at 452 | 453 | https://www.apache.org/licenses/LICENSE-2.0 454 | 455 | Unless required by applicable law or agreed to in writing, 456 | software distributed under the License is distributed on an 457 | "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 458 | KIND, either express or implied. See the License for the 459 | specific language governing permissions and limitations 460 | under the License. 461 | ``` 462 |
463 | 464 | ## Trademarks 465 | 466 | All other trademarks referenced herein are the property of their respective owners. 467 | 468 | 469 | --- 470 | Copyright © 2017-2024 [Cloud Posse, LLC](https://cpco.io/copyright) 471 | 472 | 473 | README footer 474 | 475 | Beacon 476 | -------------------------------------------------------------------------------- /README.yaml: -------------------------------------------------------------------------------- 1 | # 2 | # This is the canonical configuration for the `README.md` 3 | # Run `make readme` to rebuild the `README.md` 4 | # 5 | 6 | # Name of this project 7 | name: terraform-aws-cicd 8 | 9 | # Tags of this project 10 | tags: 11 | - aws 12 | - terraform 13 | - terraform-modules 14 | - cicd 15 | - codepipeline 16 | - codebuild 17 | - continuous-integration 18 | - continuous-delivery 19 | 20 | # Categories of this project 21 | categories: 22 | - terraform-modules/cicd 23 | 24 | # Logo for this project 25 | #logo: docs/logo.png 26 | 27 | # License of this project 28 | license: "APACHE2" 29 | 30 | # Canonical GitHub repo 31 | github_repo: cloudposse/terraform-aws-cicd 32 | 33 | # Badges to display 34 | badges: 35 | - name: Latest Release 36 | image: https://img.shields.io/github/release/cloudposse/terraform-aws-cicd.svg?style=for-the-badge 37 | url: https://github.com/cloudposse/terraform-aws-cicd/releases/latest 38 | - name: Last Updated 39 | image: https://img.shields.io/github/last-commit/cloudposse/terraform-aws-cicd.svg?style=for-the-badge 40 | url: https://github.com/cloudposse/terraform-aws-cicd/commits 41 | - name: Slack Community 42 | image: https://slack.cloudposse.com/for-the-badge.svg 43 | url: https://cloudposse.com/slack 44 | 45 | # List any related terraform modules that this module may be used with or that this module depends on. 46 | related: 47 | 48 | # Short description of this project 49 | description: |- 50 | Terraform module to create AWS [`CodePipeline`](https://aws.amazon.com/codepipeline/) with [`CodeBuild`](https://aws.amazon.com/codebuild/) for [`CI/CD`](https://en.wikipedia.org/wiki/CI/CD) 51 | 52 | This module supports three use-cases: 53 | 54 | 1. **GitHub -> S3 (build artifact) -> Elastic Beanstalk (running application stack)**. 55 | The module gets the code from a ``GitHub`` repository (public or private), builds it by executing the ``buildspec.yml`` file from the repository, pushes the built artifact to an S3 bucket, 56 | and deploys the artifact to ``Elastic Beanstalk`` running one of the supported stacks (_e.g._ ``Java``, ``Go``, ``Node``, ``IIS``, ``Python``, ``Ruby``, etc.). 57 | - http://docs.aws.amazon.com/codebuild/latest/userguide/sample-maven-5m.html 58 | - http://docs.aws.amazon.com/codebuild/latest/userguide/sample-nodejs-hw.html 59 | - http://docs.aws.amazon.com/codebuild/latest/userguide/sample-go-hw.html 60 | 61 | 62 | 2. **GitHub -> ECR (Docker image) -> Elastic Beanstalk (running Docker stack)**. 63 | The module gets the code from a ``GitHub`` repository, builds a ``Docker`` image from it by executing the ``buildspec.yml`` and ``Dockerfile`` files from the repository, 64 | pushes the ``Docker`` image to an ``ECR`` repository, and deploys the ``Docker`` image to ``Elastic Beanstalk`` running ``Docker`` stack. 65 | - http://docs.aws.amazon.com/codebuild/latest/userguide/sample-docker.html 66 | 67 | 68 | 3. **GitHub -> ECR (Docker image)**. 69 | The module gets the code from a ``GitHub`` repository, builds a ``Docker`` image from it by executing the ``buildspec.yml`` and ``Dockerfile`` files from the repository, 70 | and pushes the ``Docker`` image to an ``ECR`` repository. This is used when we want to build a ``Docker`` image from the code and push it to ``ECR`` without deploying to ``Elastic Beanstalk``. 71 | To activate this mode, don't specify the ``app`` and ``env`` attributes for the module. 72 | - http://docs.aws.amazon.com/codebuild/latest/userguide/sample-docker.html 73 | 74 | # How to use this project 75 | usage: |- 76 | Include this repository as a module in your existing terraform code: 77 | 78 | ```hcl 79 | module "build" { 80 | source = "cloudposse/cicd/aws" 81 | # Cloud Posse recommends pinning every module to a specific version 82 | # version = "x.x.x" 83 | namespace = "eg" 84 | stage = "staging" 85 | name = "app" 86 | 87 | # Enable the pipeline creation 88 | enabled = true 89 | 90 | # Elastic Beanstalk 91 | elastic_beanstalk_application_name = "<(Optional) Elastic Beanstalk application name>" 92 | elastic_beanstalk_environment_name = "<(Optional) Elastic Beanstalk environment name>" 93 | 94 | # Application repository on GitHub 95 | github_oauth_token = "(Required) " 96 | repo_owner = "" 97 | repo_name = "" 98 | branch = "" 99 | 100 | # http://docs.aws.amazon.com/codebuild/latest/userguide/build-env-ref.html 101 | # http://docs.aws.amazon.com/codebuild/latest/userguide/build-spec-ref.html 102 | build_image = "aws/codebuild/standard:2.0" 103 | build_compute_type = "BUILD_GENERAL1_SMALL" 104 | 105 | # These attributes are optional, used as ENV variables when building Docker images and pushing them to ECR 106 | # For more info: 107 | # http://docs.aws.amazon.com/codebuild/latest/userguide/sample-docker.html 108 | # https://www.terraform.io/docs/providers/aws/r/codebuild_project.html 109 | privileged_mode = true 110 | region = "us-east-1" 111 | aws_account_id = "xxxxxxxxxx" 112 | image_repo_name = "ecr-repo-name" 113 | image_tag = "latest" 114 | 115 | # Optional extra environment variables 116 | environment_variables = [{ 117 | name = "JENKINS_URL" 118 | value = "https://jenkins.example.com" 119 | }, 120 | { 121 | name = "COMPANY_NAME" 122 | value = "Amazon" 123 | }, 124 | { 125 | name = "TIME_ZONE" 126 | value = "Pacific/Auckland" 127 | }] 128 | } 129 | ``` 130 | 131 | # Example usage 132 | examples: |- 133 | ### Example: GitHub, NodeJS, S3 and EB 134 | 135 | This is an example to build a Node app, store the build artifact to an S3 bucket, and then deploy it to Elastic Beanstalk running ``Node`` stack 136 | 137 | 138 | `buildspec.yml` file 139 | 140 | ```yaml 141 | version: 0.2 142 | 143 | phases: 144 | install: 145 | commands: 146 | - echo Starting installation ... 147 | pre_build: 148 | commands: 149 | - echo Installing NPM dependencies... 150 | - npm install 151 | build: 152 | commands: 153 | - echo Build started on `date` 154 | post_build: 155 | commands: 156 | - echo Build completed on `date` 157 | artifacts: 158 | files: 159 | - node_modules/**/* 160 | - public/**/* 161 | - routes/**/* 162 | - views/**/* 163 | - app.js 164 | ``` 165 | 166 | ### Example: GitHub, NodeJS, Docker, ECR and EB 167 | 168 | This is an example to build a ``Docker`` image for a Node app, push the ``Docker`` image to an ECR repository, and then deploy it to Elastic Beanstalk running ``Docker`` stack 169 | 170 | `buildspec.yml` file 171 | 172 | ```yaml 173 | version: 0.2 174 | 175 | phases: 176 | pre_build: 177 | commands: 178 | - echo Logging in to Amazon ECR... 179 | - $(aws ecr get-login --region $AWS_REGION) 180 | build: 181 | commands: 182 | - echo Build started on `date` 183 | - echo Building the Docker image... 184 | - docker build -t $IMAGE_REPO_NAME . 185 | - docker tag $IMAGE_REPO_NAME:$IMAGE_TAG $AWS_ACCOUNT_ID.dkr.ecr.$AWS_REGION.amazonaws.com/$IMAGE_REPO_NAME:$IMAGE_TAG 186 | post_build: 187 | commands: 188 | - echo Build completed on `date` 189 | - echo Pushing the Docker image to ECR... 190 | - docker push $AWS_ACCOUNT_ID.dkr.ecr.$AWS_REGION.amazonaws.com/$IMAGE_REPO_NAME:$IMAGE_TAG 191 | artifacts: 192 | files: 193 | - '**/*' 194 | ``` 195 | 196 | `Dockerfile` 197 | 198 | ```dockerfile 199 | FROM node:latest 200 | 201 | WORKDIR /usr/src/app 202 | 203 | COPY package.json package-lock.json ./ 204 | RUN npm install 205 | COPY . . 206 | 207 | EXPOSE 8081 208 | CMD [ "npm", "start" ] 209 | ``` 210 | 211 | # How to get started quickly 212 | #quickstart: |- 213 | # Here's how to get started... 214 | 215 | # Other files to include in this README from the project folder 216 | include: [] 217 | contributors: [] 218 | -------------------------------------------------------------------------------- /atmos.yaml: -------------------------------------------------------------------------------- 1 | # Atmos Configuration — powered by https://atmos.tools 2 | # 3 | # This configuration enables centralized, DRY, and consistent project scaffolding using Atmos. 4 | # 5 | # Included features: 6 | # - Organizational custom commands: https://atmos.tools/core-concepts/custom-commands 7 | # - Automated README generation: https://atmos.tools/cli/commands/docs/generate 8 | # 9 | 10 | # Import shared configuration used by all modules 11 | import: 12 | - https://raw.githubusercontent.com/cloudposse/.github/refs/heads/main/.github/atmos/terraform-module.yaml 13 | -------------------------------------------------------------------------------- /context.tf: -------------------------------------------------------------------------------- 1 | # 2 | # ONLY EDIT THIS FILE IN github.com/cloudposse/terraform-null-label 3 | # All other instances of this file should be a copy of that one 4 | # 5 | # 6 | # Copy this file from https://github.com/cloudposse/terraform-null-label/blob/master/exports/context.tf 7 | # and then place it in your Terraform module to automatically get 8 | # Cloud Posse's standard configuration inputs suitable for passing 9 | # to Cloud Posse modules. 10 | # 11 | # curl -sL https://raw.githubusercontent.com/cloudposse/terraform-null-label/master/exports/context.tf -o context.tf 12 | # 13 | # Modules should access the whole context as `module.this.context` 14 | # to get the input variables with nulls for defaults, 15 | # for example `context = module.this.context`, 16 | # and access individual variables as `module.this.`, 17 | # with final values filled in. 18 | # 19 | # For example, when using defaults, `module.this.context.delimiter` 20 | # will be null, and `module.this.delimiter` will be `-` (hyphen). 21 | # 22 | 23 | module "this" { 24 | source = "cloudposse/label/null" 25 | version = "0.25.0" # requires Terraform >= 0.13.0 26 | 27 | enabled = var.enabled 28 | namespace = var.namespace 29 | tenant = var.tenant 30 | environment = var.environment 31 | stage = var.stage 32 | name = var.name 33 | delimiter = var.delimiter 34 | attributes = var.attributes 35 | tags = var.tags 36 | additional_tag_map = var.additional_tag_map 37 | label_order = var.label_order 38 | regex_replace_chars = var.regex_replace_chars 39 | id_length_limit = var.id_length_limit 40 | label_key_case = var.label_key_case 41 | label_value_case = var.label_value_case 42 | descriptor_formats = var.descriptor_formats 43 | labels_as_tags = var.labels_as_tags 44 | 45 | context = var.context 46 | } 47 | 48 | # Copy contents of cloudposse/terraform-null-label/variables.tf here 49 | 50 | variable "context" { 51 | type = any 52 | default = { 53 | enabled = true 54 | namespace = null 55 | tenant = null 56 | environment = null 57 | stage = null 58 | name = null 59 | delimiter = null 60 | attributes = [] 61 | tags = {} 62 | additional_tag_map = {} 63 | regex_replace_chars = null 64 | label_order = [] 65 | id_length_limit = null 66 | label_key_case = null 67 | label_value_case = null 68 | descriptor_formats = {} 69 | # Note: we have to use [] instead of null for unset lists due to 70 | # https://github.com/hashicorp/terraform/issues/28137 71 | # which was not fixed until Terraform 1.0.0, 72 | # but we want the default to be all the labels in `label_order` 73 | # and we want users to be able to prevent all tag generation 74 | # by setting `labels_as_tags` to `[]`, so we need 75 | # a different sentinel to indicate "default" 76 | labels_as_tags = ["unset"] 77 | } 78 | description = <<-EOT 79 | Single object for setting entire context at once. 80 | See description of individual variables for details. 81 | Leave string and numeric variables as `null` to use default value. 82 | Individual variable settings (non-null) override settings in context object, 83 | except for attributes, tags, and additional_tag_map, which are merged. 84 | EOT 85 | 86 | validation { 87 | condition = lookup(var.context, "label_key_case", null) == null ? true : contains(["lower", "title", "upper"], var.context["label_key_case"]) 88 | error_message = "Allowed values: `lower`, `title`, `upper`." 89 | } 90 | 91 | validation { 92 | condition = lookup(var.context, "label_value_case", null) == null ? true : contains(["lower", "title", "upper", "none"], var.context["label_value_case"]) 93 | error_message = "Allowed values: `lower`, `title`, `upper`, `none`." 94 | } 95 | } 96 | 97 | variable "enabled" { 98 | type = bool 99 | default = null 100 | description = "Set to false to prevent the module from creating any resources" 101 | } 102 | 103 | variable "namespace" { 104 | type = string 105 | default = null 106 | description = "ID element. Usually an abbreviation of your organization name, e.g. 'eg' or 'cp', to help ensure generated IDs are globally unique" 107 | } 108 | 109 | variable "tenant" { 110 | type = string 111 | default = null 112 | description = "ID element _(Rarely used, not included by default)_. A customer identifier, indicating who this instance of a resource is for" 113 | } 114 | 115 | variable "environment" { 116 | type = string 117 | default = null 118 | description = "ID element. Usually used for region e.g. 'uw2', 'us-west-2', OR role 'prod', 'staging', 'dev', 'UAT'" 119 | } 120 | 121 | variable "stage" { 122 | type = string 123 | default = null 124 | description = "ID element. Usually used to indicate role, e.g. 'prod', 'staging', 'source', 'build', 'test', 'deploy', 'release'" 125 | } 126 | 127 | variable "name" { 128 | type = string 129 | default = null 130 | description = <<-EOT 131 | ID element. Usually the component or solution name, e.g. 'app' or 'jenkins'. 132 | This is the only ID element not also included as a `tag`. 133 | The "name" tag is set to the full `id` string. There is no tag with the value of the `name` input. 134 | EOT 135 | } 136 | 137 | variable "delimiter" { 138 | type = string 139 | default = null 140 | description = <<-EOT 141 | Delimiter to be used between ID elements. 142 | Defaults to `-` (hyphen). Set to `""` to use no delimiter at all. 143 | EOT 144 | } 145 | 146 | variable "attributes" { 147 | type = list(string) 148 | default = [] 149 | description = <<-EOT 150 | ID element. Additional attributes (e.g. `workers` or `cluster`) to add to `id`, 151 | in the order they appear in the list. New attributes are appended to the 152 | end of the list. The elements of the list are joined by the `delimiter` 153 | and treated as a single ID element. 154 | EOT 155 | } 156 | 157 | variable "labels_as_tags" { 158 | type = set(string) 159 | default = ["default"] 160 | description = <<-EOT 161 | Set of labels (ID elements) to include as tags in the `tags` output. 162 | Default is to include all labels. 163 | Tags with empty values will not be included in the `tags` output. 164 | Set to `[]` to suppress all generated tags. 165 | **Notes:** 166 | The value of the `name` tag, if included, will be the `id`, not the `name`. 167 | Unlike other `null-label` inputs, the initial setting of `labels_as_tags` cannot be 168 | changed in later chained modules. Attempts to change it will be silently ignored. 169 | EOT 170 | } 171 | 172 | variable "tags" { 173 | type = map(string) 174 | default = {} 175 | description = <<-EOT 176 | Additional tags (e.g. `{'BusinessUnit': 'XYZ'}`). 177 | Neither the tag keys nor the tag values will be modified by this module. 178 | EOT 179 | } 180 | 181 | variable "additional_tag_map" { 182 | type = map(string) 183 | default = {} 184 | description = <<-EOT 185 | Additional key-value pairs to add to each map in `tags_as_list_of_maps`. Not added to `tags` or `id`. 186 | This is for some rare cases where resources want additional configuration of tags 187 | and therefore take a list of maps with tag key, value, and additional configuration. 188 | EOT 189 | } 190 | 191 | variable "label_order" { 192 | type = list(string) 193 | default = null 194 | description = <<-EOT 195 | The order in which the labels (ID elements) appear in the `id`. 196 | Defaults to ["namespace", "environment", "stage", "name", "attributes"]. 197 | You can omit any of the 6 labels ("tenant" is the 6th), but at least one must be present. 198 | EOT 199 | } 200 | 201 | variable "regex_replace_chars" { 202 | type = string 203 | default = null 204 | description = <<-EOT 205 | Terraform regular expression (regex) string. 206 | Characters matching the regex will be removed from the ID elements. 207 | If not set, `"/[^a-zA-Z0-9-]/"` is used to remove all characters other than hyphens, letters and digits. 208 | EOT 209 | } 210 | 211 | variable "id_length_limit" { 212 | type = number 213 | default = null 214 | description = <<-EOT 215 | Limit `id` to this many characters (minimum 6). 216 | Set to `0` for unlimited length. 217 | Set to `null` for keep the existing setting, which defaults to `0`. 218 | Does not affect `id_full`. 219 | EOT 220 | validation { 221 | condition = var.id_length_limit == null ? true : var.id_length_limit >= 6 || var.id_length_limit == 0 222 | error_message = "The id_length_limit must be >= 6 if supplied (not null), or 0 for unlimited length." 223 | } 224 | } 225 | 226 | variable "label_key_case" { 227 | type = string 228 | default = null 229 | description = <<-EOT 230 | Controls the letter case of the `tags` keys (label names) for tags generated by this module. 231 | Does not affect keys of tags passed in via the `tags` input. 232 | Possible values: `lower`, `title`, `upper`. 233 | Default value: `title`. 234 | EOT 235 | 236 | validation { 237 | condition = var.label_key_case == null ? true : contains(["lower", "title", "upper"], var.label_key_case) 238 | error_message = "Allowed values: `lower`, `title`, `upper`." 239 | } 240 | } 241 | 242 | variable "label_value_case" { 243 | type = string 244 | default = null 245 | description = <<-EOT 246 | Controls the letter case of ID elements (labels) as included in `id`, 247 | set as tag values, and output by this module individually. 248 | Does not affect values of tags passed in via the `tags` input. 249 | Possible values: `lower`, `title`, `upper` and `none` (no transformation). 250 | Set this to `title` and set `delimiter` to `""` to yield Pascal Case IDs. 251 | Default value: `lower`. 252 | EOT 253 | 254 | validation { 255 | condition = var.label_value_case == null ? true : contains(["lower", "title", "upper", "none"], var.label_value_case) 256 | error_message = "Allowed values: `lower`, `title`, `upper`, `none`." 257 | } 258 | } 259 | 260 | variable "descriptor_formats" { 261 | type = any 262 | default = {} 263 | description = <<-EOT 264 | Describe additional descriptors to be output in the `descriptors` output map. 265 | Map of maps. Keys are names of descriptors. Values are maps of the form 266 | `{ 267 | format = string 268 | labels = list(string) 269 | }` 270 | (Type is `any` so the map values can later be enhanced to provide additional options.) 271 | `format` is a Terraform format string to be passed to the `format()` function. 272 | `labels` is a list of labels, in order, to pass to `format()` function. 273 | Label values will be normalized before being passed to `format()` so they will be 274 | identical to how they appear in `id`. 275 | Default is `{}` (`descriptors` output will be empty). 276 | EOT 277 | } 278 | 279 | #### End of copy of cloudposse/terraform-null-label/variables.tf 280 | -------------------------------------------------------------------------------- /examples/complete/context.tf: -------------------------------------------------------------------------------- 1 | # 2 | # ONLY EDIT THIS FILE IN github.com/cloudposse/terraform-null-label 3 | # All other instances of this file should be a copy of that one 4 | # 5 | # 6 | # Copy this file from https://github.com/cloudposse/terraform-null-label/blob/master/exports/context.tf 7 | # and then place it in your Terraform module to automatically get 8 | # Cloud Posse's standard configuration inputs suitable for passing 9 | # to Cloud Posse modules. 10 | # 11 | # curl -sL https://raw.githubusercontent.com/cloudposse/terraform-null-label/master/exports/context.tf -o context.tf 12 | # 13 | # Modules should access the whole context as `module.this.context` 14 | # to get the input variables with nulls for defaults, 15 | # for example `context = module.this.context`, 16 | # and access individual variables as `module.this.`, 17 | # with final values filled in. 18 | # 19 | # For example, when using defaults, `module.this.context.delimiter` 20 | # will be null, and `module.this.delimiter` will be `-` (hyphen). 21 | # 22 | 23 | module "this" { 24 | source = "cloudposse/label/null" 25 | version = "0.25.0" # requires Terraform >= 0.13.0 26 | 27 | enabled = var.enabled 28 | namespace = var.namespace 29 | tenant = var.tenant 30 | environment = var.environment 31 | stage = var.stage 32 | name = var.name 33 | delimiter = var.delimiter 34 | attributes = var.attributes 35 | tags = var.tags 36 | additional_tag_map = var.additional_tag_map 37 | label_order = var.label_order 38 | regex_replace_chars = var.regex_replace_chars 39 | id_length_limit = var.id_length_limit 40 | label_key_case = var.label_key_case 41 | label_value_case = var.label_value_case 42 | descriptor_formats = var.descriptor_formats 43 | labels_as_tags = var.labels_as_tags 44 | 45 | context = var.context 46 | } 47 | 48 | # Copy contents of cloudposse/terraform-null-label/variables.tf here 49 | 50 | variable "context" { 51 | type = any 52 | default = { 53 | enabled = true 54 | namespace = null 55 | tenant = null 56 | environment = null 57 | stage = null 58 | name = null 59 | delimiter = null 60 | attributes = [] 61 | tags = {} 62 | additional_tag_map = {} 63 | regex_replace_chars = null 64 | label_order = [] 65 | id_length_limit = null 66 | label_key_case = null 67 | label_value_case = null 68 | descriptor_formats = {} 69 | # Note: we have to use [] instead of null for unset lists due to 70 | # https://github.com/hashicorp/terraform/issues/28137 71 | # which was not fixed until Terraform 1.0.0, 72 | # but we want the default to be all the labels in `label_order` 73 | # and we want users to be able to prevent all tag generation 74 | # by setting `labels_as_tags` to `[]`, so we need 75 | # a different sentinel to indicate "default" 76 | labels_as_tags = ["unset"] 77 | } 78 | description = <<-EOT 79 | Single object for setting entire context at once. 80 | See description of individual variables for details. 81 | Leave string and numeric variables as `null` to use default value. 82 | Individual variable settings (non-null) override settings in context object, 83 | except for attributes, tags, and additional_tag_map, which are merged. 84 | EOT 85 | 86 | validation { 87 | condition = lookup(var.context, "label_key_case", null) == null ? true : contains(["lower", "title", "upper"], var.context["label_key_case"]) 88 | error_message = "Allowed values: `lower`, `title`, `upper`." 89 | } 90 | 91 | validation { 92 | condition = lookup(var.context, "label_value_case", null) == null ? true : contains(["lower", "title", "upper", "none"], var.context["label_value_case"]) 93 | error_message = "Allowed values: `lower`, `title`, `upper`, `none`." 94 | } 95 | } 96 | 97 | variable "enabled" { 98 | type = bool 99 | default = null 100 | description = "Set to false to prevent the module from creating any resources" 101 | } 102 | 103 | variable "namespace" { 104 | type = string 105 | default = null 106 | description = "ID element. Usually an abbreviation of your organization name, e.g. 'eg' or 'cp', to help ensure generated IDs are globally unique" 107 | } 108 | 109 | variable "tenant" { 110 | type = string 111 | default = null 112 | description = "ID element _(Rarely used, not included by default)_. A customer identifier, indicating who this instance of a resource is for" 113 | } 114 | 115 | variable "environment" { 116 | type = string 117 | default = null 118 | description = "ID element. Usually used for region e.g. 'uw2', 'us-west-2', OR role 'prod', 'staging', 'dev', 'UAT'" 119 | } 120 | 121 | variable "stage" { 122 | type = string 123 | default = null 124 | description = "ID element. Usually used to indicate role, e.g. 'prod', 'staging', 'source', 'build', 'test', 'deploy', 'release'" 125 | } 126 | 127 | variable "name" { 128 | type = string 129 | default = null 130 | description = <<-EOT 131 | ID element. Usually the component or solution name, e.g. 'app' or 'jenkins'. 132 | This is the only ID element not also included as a `tag`. 133 | The "name" tag is set to the full `id` string. There is no tag with the value of the `name` input. 134 | EOT 135 | } 136 | 137 | variable "delimiter" { 138 | type = string 139 | default = null 140 | description = <<-EOT 141 | Delimiter to be used between ID elements. 142 | Defaults to `-` (hyphen). Set to `""` to use no delimiter at all. 143 | EOT 144 | } 145 | 146 | variable "attributes" { 147 | type = list(string) 148 | default = [] 149 | description = <<-EOT 150 | ID element. Additional attributes (e.g. `workers` or `cluster`) to add to `id`, 151 | in the order they appear in the list. New attributes are appended to the 152 | end of the list. The elements of the list are joined by the `delimiter` 153 | and treated as a single ID element. 154 | EOT 155 | } 156 | 157 | variable "labels_as_tags" { 158 | type = set(string) 159 | default = ["default"] 160 | description = <<-EOT 161 | Set of labels (ID elements) to include as tags in the `tags` output. 162 | Default is to include all labels. 163 | Tags with empty values will not be included in the `tags` output. 164 | Set to `[]` to suppress all generated tags. 165 | **Notes:** 166 | The value of the `name` tag, if included, will be the `id`, not the `name`. 167 | Unlike other `null-label` inputs, the initial setting of `labels_as_tags` cannot be 168 | changed in later chained modules. Attempts to change it will be silently ignored. 169 | EOT 170 | } 171 | 172 | variable "tags" { 173 | type = map(string) 174 | default = {} 175 | description = <<-EOT 176 | Additional tags (e.g. `{'BusinessUnit': 'XYZ'}`). 177 | Neither the tag keys nor the tag values will be modified by this module. 178 | EOT 179 | } 180 | 181 | variable "additional_tag_map" { 182 | type = map(string) 183 | default = {} 184 | description = <<-EOT 185 | Additional key-value pairs to add to each map in `tags_as_list_of_maps`. Not added to `tags` or `id`. 186 | This is for some rare cases where resources want additional configuration of tags 187 | and therefore take a list of maps with tag key, value, and additional configuration. 188 | EOT 189 | } 190 | 191 | variable "label_order" { 192 | type = list(string) 193 | default = null 194 | description = <<-EOT 195 | The order in which the labels (ID elements) appear in the `id`. 196 | Defaults to ["namespace", "environment", "stage", "name", "attributes"]. 197 | You can omit any of the 6 labels ("tenant" is the 6th), but at least one must be present. 198 | EOT 199 | } 200 | 201 | variable "regex_replace_chars" { 202 | type = string 203 | default = null 204 | description = <<-EOT 205 | Terraform regular expression (regex) string. 206 | Characters matching the regex will be removed from the ID elements. 207 | If not set, `"/[^a-zA-Z0-9-]/"` is used to remove all characters other than hyphens, letters and digits. 208 | EOT 209 | } 210 | 211 | variable "id_length_limit" { 212 | type = number 213 | default = null 214 | description = <<-EOT 215 | Limit `id` to this many characters (minimum 6). 216 | Set to `0` for unlimited length. 217 | Set to `null` for keep the existing setting, which defaults to `0`. 218 | Does not affect `id_full`. 219 | EOT 220 | validation { 221 | condition = var.id_length_limit == null ? true : var.id_length_limit >= 6 || var.id_length_limit == 0 222 | error_message = "The id_length_limit must be >= 6 if supplied (not null), or 0 for unlimited length." 223 | } 224 | } 225 | 226 | variable "label_key_case" { 227 | type = string 228 | default = null 229 | description = <<-EOT 230 | Controls the letter case of the `tags` keys (label names) for tags generated by this module. 231 | Does not affect keys of tags passed in via the `tags` input. 232 | Possible values: `lower`, `title`, `upper`. 233 | Default value: `title`. 234 | EOT 235 | 236 | validation { 237 | condition = var.label_key_case == null ? true : contains(["lower", "title", "upper"], var.label_key_case) 238 | error_message = "Allowed values: `lower`, `title`, `upper`." 239 | } 240 | } 241 | 242 | variable "label_value_case" { 243 | type = string 244 | default = null 245 | description = <<-EOT 246 | Controls the letter case of ID elements (labels) as included in `id`, 247 | set as tag values, and output by this module individually. 248 | Does not affect values of tags passed in via the `tags` input. 249 | Possible values: `lower`, `title`, `upper` and `none` (no transformation). 250 | Set this to `title` and set `delimiter` to `""` to yield Pascal Case IDs. 251 | Default value: `lower`. 252 | EOT 253 | 254 | validation { 255 | condition = var.label_value_case == null ? true : contains(["lower", "title", "upper", "none"], var.label_value_case) 256 | error_message = "Allowed values: `lower`, `title`, `upper`, `none`." 257 | } 258 | } 259 | 260 | variable "descriptor_formats" { 261 | type = any 262 | default = {} 263 | description = <<-EOT 264 | Describe additional descriptors to be output in the `descriptors` output map. 265 | Map of maps. Keys are names of descriptors. Values are maps of the form 266 | `{ 267 | format = string 268 | labels = list(string) 269 | }` 270 | (Type is `any` so the map values can later be enhanced to provide additional options.) 271 | `format` is a Terraform format string to be passed to the `format()` function. 272 | `labels` is a list of labels, in order, to pass to `format()` function. 273 | Label values will be normalized before being passed to `format()` so they will be 274 | identical to how they appear in `id`. 275 | Default is `{}` (`descriptors` output will be empty). 276 | EOT 277 | } 278 | 279 | #### End of copy of cloudposse/terraform-null-label/variables.tf 280 | -------------------------------------------------------------------------------- /examples/complete/fixtures.us-east-2.tfvars: -------------------------------------------------------------------------------- 1 | region = "us-east-2" 2 | 3 | namespace = "eg" 4 | 5 | stage = "test" 6 | 7 | name = "cicd" 8 | 9 | github_oauth_token = "test" 10 | 11 | repo_owner = "cloudposse" 12 | 13 | repo_name = "terraform-aws-cicd" 14 | 15 | branch = "master" 16 | 17 | poll_source_changes = false 18 | 19 | codebuild_cache_bucket_suffix_enabled = false 20 | 21 | force_destroy = true 22 | 23 | environment_variables = [ 24 | { 25 | name = "APP_URL" 26 | value = "https://app.example.com" 27 | type = "PLAINTEXT" 28 | }, 29 | { 30 | name = "COMPANY_NAME" 31 | value = "Cloud Posse" 32 | type = "PLAINTEXT" 33 | }, 34 | { 35 | name = "TIME_ZONE" 36 | value = "America/Los_Angeles" 37 | type = "PLAINTEXT" 38 | } 39 | ] 40 | 41 | cache_type = "S3" 42 | -------------------------------------------------------------------------------- /examples/complete/main.tf: -------------------------------------------------------------------------------- 1 | provider "aws" { 2 | region = var.region 3 | } 4 | 5 | module "cicd" { 6 | source = "../../" 7 | region = var.region 8 | github_oauth_token = var.github_oauth_token 9 | repo_owner = var.repo_owner 10 | repo_name = var.repo_name 11 | branch = var.branch 12 | poll_source_changes = var.poll_source_changes 13 | environment_variables = var.environment_variables 14 | codebuild_cache_bucket_suffix_enabled = var.codebuild_cache_bucket_suffix_enabled 15 | force_destroy = var.force_destroy 16 | cache_type = var.cache_type 17 | 18 | context = module.this.context 19 | } 20 | -------------------------------------------------------------------------------- /examples/complete/outputs.tf: -------------------------------------------------------------------------------- 1 | output "codebuild_project_name" { 2 | description = "CodeBuild project name" 3 | value = module.cicd.codebuild_project_name 4 | } 5 | 6 | output "codebuild_project_id" { 7 | description = "CodeBuild project ID" 8 | value = module.cicd.codebuild_project_id 9 | } 10 | 11 | output "codebuild_role_id" { 12 | description = "CodeBuild IAM Role ID" 13 | value = module.cicd.codebuild_role_id 14 | } 15 | 16 | output "codebuild_role_arn" { 17 | description = "CodeBuild IAM Role ARN" 18 | value = module.cicd.codebuild_role_arn 19 | } 20 | 21 | output "codebuild_cache_bucket_name" { 22 | description = "CodeBuild cache S3 bucket name" 23 | value = module.cicd.codebuild_cache_bucket_name 24 | } 25 | 26 | output "codebuild_cache_bucket_arn" { 27 | description = "CodeBuild cache S3 bucket ARN" 28 | value = module.cicd.codebuild_cache_bucket_arn 29 | } 30 | 31 | output "codebuild_badge_url" { 32 | description = "The URL of the build badge when badge_enabled is enabled" 33 | value = module.cicd.codebuild_badge_url 34 | } 35 | 36 | output "codepipeline_id" { 37 | description = "CodePipeline ID" 38 | value = module.cicd.codepipeline_id 39 | } 40 | 41 | output "codepipeline_arn" { 42 | description = "CodePipeline ARN" 43 | value = module.cicd.codepipeline_arn 44 | } 45 | -------------------------------------------------------------------------------- /examples/complete/variables.tf: -------------------------------------------------------------------------------- 1 | variable "region" { 2 | type = string 3 | description = "AWS region" 4 | } 5 | 6 | variable "github_oauth_token" { 7 | type = string 8 | description = "GitHub Oauth Token" 9 | } 10 | 11 | variable "repo_owner" { 12 | type = string 13 | description = "GitHub Organization or Person name" 14 | } 15 | 16 | variable "repo_name" { 17 | type = string 18 | description = "GitHub repository name of the application to be built (and deployed to Elastic Beanstalk if configured)" 19 | } 20 | 21 | variable "branch" { 22 | type = string 23 | description = "Branch of the GitHub repository, _e.g._ `master`" 24 | } 25 | 26 | variable "poll_source_changes" { 27 | type = bool 28 | description = "Periodically check the location of your source content and run the pipeline if changes are detected" 29 | } 30 | 31 | variable "environment_variables" { 32 | type = list(object( 33 | { 34 | name = string 35 | value = string 36 | type = string 37 | })) 38 | 39 | default = [ 40 | { 41 | name = "NO_ADDITIONAL_BUILD_VARS" 42 | value = "TRUE" 43 | type = "PLAINTEXT" 44 | }] 45 | 46 | description = "A list of maps, that contain the keys 'name', 'value', and 'type' to be used as additional environment variables for the build. Valid types are 'PLAINTEXT', 'PARAMETER_STORE', or 'SECRETS_MANAGER'" 47 | } 48 | 49 | variable "codebuild_cache_bucket_suffix_enabled" { 50 | type = bool 51 | description = "The cache bucket generates a random 13 character string to generate a unique bucket name. If set to false it uses terraform-null-label's id value" 52 | } 53 | 54 | variable "force_destroy" { 55 | type = bool 56 | description = "Force destroy the CI/CD S3 bucket even if it's not empty" 57 | } 58 | 59 | variable "cache_type" { 60 | type = string 61 | description = "The type of storage that will be used for the AWS CodeBuild project cache. Valid values: NO_CACHE, LOCAL, and S3. Defaults to NO_CACHE. If cache_type is S3, it will create an S3 bucket for storing codebuild cache inside" 62 | } 63 | -------------------------------------------------------------------------------- /examples/complete/versions.tf: -------------------------------------------------------------------------------- 1 | terraform { 2 | required_version = ">= 1.3" 3 | 4 | required_providers { 5 | aws = { 6 | source = "hashicorp/aws" 7 | version = ">= 5.0" 8 | } 9 | random = { 10 | source = "hashicorp/random" 11 | version = ">= 2.1" 12 | } 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /main.tf: -------------------------------------------------------------------------------- 1 | data "aws_caller_identity" "default" { 2 | } 3 | 4 | data "aws_region" "default" { 5 | } 6 | 7 | locals { 8 | enabled = module.this.enabled 9 | webhook_enabled = local.enabled && var.webhook_enabled ? true : false 10 | webhook_count = local.webhook_enabled ? 1 : 0 11 | webhook_secret = join("", random_password.webhook_secret[*].result) 12 | webhook_url = join("", aws_codepipeline_webhook.default[*].url) 13 | } 14 | 15 | resource "aws_s3_bucket" "default" { 16 | #bridgecrew:skip=BC_AWS_S3_13:Skipping `Enable S3 Bucket Logging` check until bridgecrew will support dynamic blocks (https://github.com/bridgecrewio/checkov/issues/776). 17 | #bridgecrew:skip=BC_AWS_S3_14:Skipping `Ensure all data stored in the S3 bucket is securely encrypted at rest` check until bridgecrew will support dynamic blocks (https://github.com/bridgecrewio/checkov/issues/776). 18 | #bridgecrew:skip=CKV_AWS_52:Skipping `Ensure S3 bucket has MFA delete enabled` due to issue in terraform (https://github.com/hashicorp/terraform-provider-aws/issues/629). 19 | count = local.enabled ? 1 : 0 20 | bucket = module.this.id 21 | acl = "private" 22 | force_destroy = var.force_destroy 23 | tags = module.this.tags 24 | 25 | versioning { 26 | enabled = var.versioning_enabled 27 | } 28 | 29 | dynamic "logging" { 30 | for_each = var.access_log_bucket_name != "" ? [1] : [] 31 | content { 32 | target_bucket = var.access_log_bucket_name 33 | target_prefix = "logs/${module.this.id}/" 34 | } 35 | } 36 | 37 | dynamic "server_side_encryption_configuration" { 38 | for_each = var.s3_bucket_encryption_enabled ? [1] : [] 39 | 40 | content { 41 | rule { 42 | apply_server_side_encryption_by_default { 43 | sse_algorithm = "AES256" 44 | } 45 | } 46 | } 47 | } 48 | 49 | } 50 | 51 | resource "aws_iam_role" "default" { 52 | count = local.enabled ? 1 : 0 53 | name = module.this.id 54 | assume_role_policy = join("", data.aws_iam_policy_document.assume[*].json) 55 | tags = module.this.tags 56 | } 57 | 58 | data "aws_iam_policy_document" "assume" { 59 | count = local.enabled ? 1 : 0 60 | 61 | statement { 62 | sid = "" 63 | 64 | actions = [ 65 | "sts:AssumeRole" 66 | ] 67 | 68 | principals { 69 | type = "Service" 70 | identifiers = ["codepipeline.amazonaws.com"] 71 | } 72 | 73 | effect = "Allow" 74 | } 75 | } 76 | 77 | resource "aws_iam_role_policy_attachment" "default" { 78 | count = local.enabled ? 1 : 0 79 | role = join("", aws_iam_role.default[*].id) 80 | policy_arn = join("", aws_iam_policy.default[*].arn) 81 | } 82 | 83 | resource "aws_iam_policy" "default" { 84 | count = local.enabled ? 1 : 0 85 | name = module.this.id 86 | policy = join("", data.aws_iam_policy_document.default[*].json) 87 | } 88 | 89 | data "aws_iam_policy_document" "default" { 90 | count = local.enabled ? 1 : 0 91 | 92 | statement { 93 | sid = "" 94 | 95 | actions = [ 96 | "elasticbeanstalk:*", 97 | "ec2:*", 98 | "elasticloadbalancing:*", 99 | "autoscaling:*", 100 | "cloudwatch:*", 101 | "s3:*", 102 | "sns:*", 103 | "cloudformation:*", 104 | "rds:*", 105 | "sqs:*", 106 | "ecs:*", 107 | "iam:PassRole", 108 | "logs:PutRetentionPolicy", 109 | ] 110 | 111 | resources = ["*"] 112 | effect = "Allow" 113 | } 114 | } 115 | 116 | resource "aws_iam_role_policy_attachment" "s3" { 117 | count = local.enabled ? 1 : 0 118 | role = join("", aws_iam_role.default[*].id) 119 | policy_arn = join("", aws_iam_policy.s3[*].arn) 120 | } 121 | 122 | resource "aws_iam_policy" "s3" { 123 | count = local.enabled ? 1 : 0 124 | name = "${module.this.id}-s3" 125 | policy = join("", data.aws_iam_policy_document.s3[*].json) 126 | } 127 | 128 | data "aws_s3_bucket" "website" { 129 | count = local.enabled && var.website_bucket_name != "" ? 1 : 0 130 | bucket = var.website_bucket_name 131 | } 132 | 133 | data "aws_iam_policy_document" "s3" { 134 | count = local.enabled ? 1 : 0 135 | 136 | statement { 137 | sid = "" 138 | 139 | actions = [ 140 | "s3:GetObject", 141 | "s3:GetObjectVersion", 142 | "s3:GetBucketVersioning", 143 | "s3:PutObject", 144 | ] 145 | 146 | resources = [ 147 | join("", aws_s3_bucket.default[*].arn), 148 | "${join("", aws_s3_bucket.default[*].arn)}/*", 149 | "arn:aws:s3:::elasticbeanstalk*" 150 | ] 151 | 152 | effect = "Allow" 153 | } 154 | 155 | dynamic "statement" { 156 | for_each = var.website_bucket_name != "" ? ["true"] : [] 157 | content { 158 | sid = "" 159 | 160 | actions = [ 161 | "s3:GetObject", 162 | "s3:GetObjectVersion", 163 | "s3:GetBucketVersioning", 164 | "s3:PutObject", 165 | "s3:PutObjectAcl", 166 | ] 167 | 168 | resources = [ 169 | join("", data.aws_s3_bucket.website[*].arn), 170 | "${join("", data.aws_s3_bucket.website[*].arn)}/*" 171 | ] 172 | 173 | effect = "Allow" 174 | } 175 | } 176 | } 177 | 178 | resource "aws_iam_role_policy_attachment" "codebuild" { 179 | count = local.enabled ? 1 : 0 180 | role = join("", aws_iam_role.default[*].id) 181 | policy_arn = join("", aws_iam_policy.codebuild[*].arn) 182 | } 183 | 184 | resource "aws_iam_policy" "codebuild" { 185 | count = local.enabled ? 1 : 0 186 | name = "${module.this.id}-codebuild" 187 | policy = join("", data.aws_iam_policy_document.codebuild[*].json) 188 | } 189 | 190 | data "aws_iam_policy_document" "codebuild" { 191 | count = local.enabled ? 1 : 0 192 | 193 | statement { 194 | sid = "" 195 | 196 | actions = [ 197 | "codebuild:*" 198 | ] 199 | 200 | resources = [module.codebuild.project_id] 201 | effect = "Allow" 202 | } 203 | } 204 | 205 | module "codebuild" { 206 | source = "cloudposse/codebuild/aws" 207 | version = "2.0.1" 208 | build_image = var.build_image 209 | build_compute_type = var.build_compute_type 210 | buildspec = var.buildspec 211 | attributes = ["build"] 212 | privileged_mode = var.privileged_mode 213 | aws_region = var.region != "" ? var.region : data.aws_region.default.name 214 | aws_account_id = var.aws_account_id != "" ? var.aws_account_id : data.aws_caller_identity.default.account_id 215 | image_repo_name = var.image_repo_name 216 | image_tag = var.image_tag 217 | github_token = var.github_oauth_token 218 | github_token_type = "PLAINTEXT" 219 | environment_variables = var.environment_variables 220 | cache_bucket_suffix_enabled = var.codebuild_cache_bucket_suffix_enabled 221 | cache_type = var.cache_type 222 | 223 | context = module.this.context 224 | } 225 | 226 | resource "aws_iam_role_policy_attachment" "codebuild_s3" { 227 | count = local.enabled ? 1 : 0 228 | role = module.codebuild.role_id 229 | policy_arn = join("", aws_iam_policy.s3[*].arn) 230 | } 231 | 232 | # Only one of the `aws_codepipeline` resources below will be created: 233 | 234 | # "source_build_deploy" will be created if `local.enabled` is set to `true` and the Elastic Beanstalk application name and environment name are specified 235 | 236 | # This is used in two use-cases: 237 | 238 | # 1. GitHub -> S3 -> Elastic Beanstalk (running application stack like Node, Go, Java, IIS, Python) 239 | 240 | # 2. GitHub -> ECR (Docker image) -> Elastic Beanstalk (running Docker stack) 241 | 242 | # "source_build" will be created if `local.enabled` is set to `true` and the Elastic Beanstalk application name or environment name are not specified 243 | 244 | # This is used in this use-case: 245 | 246 | # 1. GitHub -> ECR (Docker image) 247 | 248 | resource "aws_codepipeline" "default" { 249 | # Elastic Beanstalk application name and environment name are specified 250 | count = local.enabled ? 1 : 0 251 | name = module.this.id 252 | role_arn = join("", aws_iam_role.default[*].arn) 253 | tags = module.this.tags 254 | 255 | artifact_store { 256 | location = join("", aws_s3_bucket.default[*].bucket) 257 | type = "S3" 258 | } 259 | 260 | stage { 261 | name = "Source" 262 | 263 | action { 264 | name = "Source" 265 | category = "Source" 266 | owner = "ThirdParty" 267 | provider = "GitHub" 268 | version = "1" 269 | output_artifacts = ["code"] 270 | 271 | configuration = { 272 | OAuthToken = var.github_oauth_token 273 | Owner = var.repo_owner 274 | Repo = var.repo_name 275 | Branch = var.branch 276 | PollForSourceChanges = var.poll_source_changes 277 | } 278 | } 279 | } 280 | 281 | stage { 282 | name = "Build" 283 | 284 | action { 285 | name = "Build" 286 | category = "Build" 287 | owner = "AWS" 288 | provider = "CodeBuild" 289 | version = "1" 290 | 291 | input_artifacts = ["code"] 292 | output_artifacts = ["package"] 293 | 294 | configuration = { 295 | ProjectName = module.codebuild.project_name 296 | } 297 | } 298 | } 299 | 300 | dynamic "stage" { 301 | for_each = var.elastic_beanstalk_application_name != "" && var.elastic_beanstalk_environment_name != "" ? ["true"] : [] 302 | content { 303 | name = "Deploy" 304 | 305 | action { 306 | name = "Deploy" 307 | category = "Deploy" 308 | owner = "AWS" 309 | provider = "ElasticBeanstalk" 310 | input_artifacts = ["package"] 311 | version = "1" 312 | 313 | configuration = { 314 | ApplicationName = var.elastic_beanstalk_application_name 315 | EnvironmentName = var.elastic_beanstalk_environment_name 316 | } 317 | } 318 | } 319 | } 320 | 321 | dynamic "stage" { 322 | for_each = var.website_bucket_name != "" ? ["true"] : [] 323 | content { 324 | name = "Deploy" 325 | 326 | action { 327 | name = "Deploy" 328 | category = "Deploy" 329 | owner = "AWS" 330 | provider = "S3" 331 | input_artifacts = ["package"] 332 | version = "1" 333 | 334 | configuration = { 335 | BucketName = var.website_bucket_name 336 | Extract = "true" 337 | CannedACL = var.website_bucket_acl 338 | } 339 | } 340 | } 341 | } 342 | } 343 | 344 | resource "random_password" "webhook_secret" { 345 | count = local.webhook_enabled ? 1 : 0 346 | length = 32 347 | 348 | # Special characters are not allowed in webhook secret (AWS silently ignores webhook callbacks) 349 | special = false 350 | } 351 | 352 | resource "aws_codepipeline_webhook" "default" { 353 | count = local.webhook_count 354 | name = module.this.id 355 | authentication = var.webhook_authentication 356 | target_action = var.webhook_target_action 357 | target_pipeline = join("", aws_codepipeline.default[*].name) 358 | 359 | authentication_configuration { 360 | secret_token = local.webhook_secret 361 | } 362 | 363 | filter { 364 | json_path = var.webhook_filter_json_path 365 | match_equals = var.webhook_filter_match_equals 366 | } 367 | } 368 | 369 | module "github_webhook" { 370 | source = "cloudposse/repository-webhooks/github" 371 | version = "0.12.1" 372 | 373 | enabled = local.webhook_enabled 374 | github_organization = var.repo_owner 375 | github_repositories = [var.repo_name] 376 | github_token = var.github_webhooks_token 377 | webhook_url = local.webhook_url 378 | webhook_secret = local.webhook_secret 379 | webhook_content_type = "json" 380 | events = var.github_webhook_events 381 | 382 | context = module.this.context 383 | } 384 | -------------------------------------------------------------------------------- /outputs.tf: -------------------------------------------------------------------------------- 1 | output "codebuild_project_name" { 2 | description = "CodeBuild project name" 3 | value = module.codebuild.project_name 4 | } 5 | 6 | output "codebuild_project_id" { 7 | description = "CodeBuild project ID" 8 | value = module.codebuild.project_id 9 | } 10 | 11 | output "codebuild_role_id" { 12 | description = "CodeBuild IAM Role ID" 13 | value = module.codebuild.role_id 14 | } 15 | 16 | output "codebuild_role_arn" { 17 | description = "CodeBuild IAM Role ARN" 18 | value = module.codebuild.role_arn 19 | } 20 | 21 | output "codebuild_cache_bucket_name" { 22 | description = "CodeBuild cache S3 bucket name" 23 | value = module.codebuild.cache_bucket_name 24 | } 25 | 26 | output "codebuild_cache_bucket_arn" { 27 | description = "CodeBuild cache S3 bucket ARN" 28 | value = module.codebuild.cache_bucket_arn 29 | } 30 | 31 | output "codebuild_badge_url" { 32 | description = "The URL of the build badge when badge_enabled is enabled" 33 | value = module.codebuild.badge_url 34 | } 35 | 36 | output "codepipeline_id" { 37 | description = "CodePipeline ID" 38 | value = join("", aws_codepipeline.default[*].id) 39 | } 40 | 41 | output "codepipeline_arn" { 42 | description = "CodePipeline ARN" 43 | value = join("", aws_codepipeline.default[*].arn) 44 | } 45 | -------------------------------------------------------------------------------- /test/.gitignore: -------------------------------------------------------------------------------- 1 | .test-harness 2 | -------------------------------------------------------------------------------- /test/Makefile: -------------------------------------------------------------------------------- 1 | TEST_HARNESS ?= https://github.com/cloudposse/test-harness.git 2 | TEST_HARNESS_BRANCH ?= master 3 | TEST_HARNESS_PATH = $(realpath .test-harness) 4 | BATS_ARGS ?= --tap 5 | BATS_LOG ?= test.log 6 | 7 | # Define a macro to run the tests 8 | define RUN_TESTS 9 | @echo "Running tests in $(1)" 10 | @cd $(1) && bats $(BATS_ARGS) $(addsuffix .bats,$(addprefix $(TEST_HARNESS_PATH)/test/terraform/,$(TESTS))) 11 | endef 12 | 13 | default: all 14 | 15 | -include Makefile.* 16 | 17 | ## Provision the test-harnesss 18 | .test-harness: 19 | [ -d $@ ] || git clone --depth=1 -b $(TEST_HARNESS_BRANCH) $(TEST_HARNESS) $@ 20 | 21 | ## Initialize the tests 22 | init: .test-harness 23 | 24 | ## Install all dependencies (OS specific) 25 | deps:: 26 | @exit 0 27 | 28 | ## Clean up the test harness 29 | clean: 30 | [ "$(TEST_HARNESS_PATH)" == "/" ] || rm -rf $(TEST_HARNESS_PATH) 31 | 32 | ## Run all tests 33 | all: module examples/complete 34 | 35 | ## Run basic sanity checks against the module itself 36 | module: export TESTS ?= installed lint module-pinning provider-pinning validate terraform-docs input-descriptions output-descriptions 37 | module: deps 38 | $(call RUN_TESTS, ../) 39 | 40 | ## Run tests against example 41 | examples/complete: export TESTS ?= installed lint validate 42 | examples/complete: deps 43 | $(call RUN_TESTS, ../$@) 44 | -------------------------------------------------------------------------------- /test/Makefile.alpine: -------------------------------------------------------------------------------- 1 | ifneq (,$(wildcard /sbin/apk)) 2 | ## Install all dependencies for alpine 3 | deps:: init 4 | @apk add --update terraform-docs@cloudposse json2hcl@cloudposse 5 | endif 6 | -------------------------------------------------------------------------------- /test/src/.gitignore: -------------------------------------------------------------------------------- 1 | .gopath 2 | vendor/ 3 | -------------------------------------------------------------------------------- /test/src/Makefile: -------------------------------------------------------------------------------- 1 | export TERRAFORM_VERSION ?= $(shell curl -s https://checkpoint-api.hashicorp.com/v1/check/terraform | jq -r -M '.current_version' | cut -d. -f1) 2 | 3 | .DEFAULT_GOAL : all 4 | .PHONY: all 5 | 6 | ## Default target 7 | all: test 8 | 9 | .PHONY : init 10 | ## Initialize tests 11 | init: 12 | @exit 0 13 | 14 | .PHONY : test 15 | ## Run tests 16 | test: init 17 | go mod download 18 | go test -v -timeout 60m 19 | 20 | ## Run tests in docker container 21 | docker/test: 22 | docker run --name terratest --rm -it -e AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY -e AWS_SESSION_TOKEN -e GITHUB_TOKEN \ 23 | -e PATH="/usr/local/terraform/$(TERRAFORM_VERSION)/bin:/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin" \ 24 | -v $(CURDIR)/../../:/module/ cloudposse/test-harness:latest -C /module/test/src test 25 | 26 | .PHONY : clean 27 | ## Clean up files 28 | clean: 29 | rm -rf ../../examples/complete/*.tfstate* 30 | -------------------------------------------------------------------------------- /test/src/examples_complete_test.go: -------------------------------------------------------------------------------- 1 | package test 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/gruntwork-io/terratest/modules/terraform" 7 | "github.com/stretchr/testify/assert" 8 | ) 9 | 10 | // Test the Terraform module in examples/complete using Terratest. 11 | func TestExamplesComplete(t *testing.T) { 12 | t.Parallel() 13 | 14 | terraformOptions := &terraform.Options{ 15 | // The path to where our Terraform code is located 16 | TerraformDir: "../../examples/complete", 17 | Upgrade: true, 18 | // Variables to pass to our Terraform code using -var-file options 19 | VarFiles: []string{"fixtures.us-east-2.tfvars"}, 20 | } 21 | 22 | // At the end of the test, run `terraform destroy` to clean up any resources that were created 23 | defer terraform.Destroy(t, terraformOptions) 24 | 25 | // This will run `terraform init` and `terraform apply` and fail the test if there are any errors 26 | terraform.InitAndApply(t, terraformOptions) 27 | 28 | // Run `terraform output` to get the value of an output variable 29 | codebuildProjectName := terraform.Output(t, terraformOptions, "codebuild_project_name") 30 | // Verify we're getting back the outputs we expect 31 | assert.Equal(t, "eg-test-cicd-build", codebuildProjectName) 32 | 33 | // Run `terraform output` to get the value of an output variable 34 | codebuildCacheS3BucketName := terraform.Output(t, terraformOptions, "codebuild_cache_bucket_name") 35 | // Verify we're getting back the outputs we expect 36 | assert.Equal(t, "eg-test-cicd-build", codebuildCacheS3BucketName) 37 | 38 | // Run `terraform output` to get the value of an output variable 39 | codepipelineId := terraform.Output(t, terraformOptions, "codepipeline_id") 40 | // Verify we're getting back the outputs we expect 41 | assert.Equal(t, "eg-test-cicd", codepipelineId) 42 | } 43 | -------------------------------------------------------------------------------- /test/src/go.mod: -------------------------------------------------------------------------------- 1 | module github.com/cloudposse/terraform-aws-efs 2 | 3 | go 1.17 4 | 5 | require ( 6 | github.com/gruntwork-io/terratest v0.39.0 7 | github.com/stretchr/testify v1.7.0 8 | ) 9 | 10 | require ( 11 | cloud.google.com/go v0.104.0 // indirect 12 | cloud.google.com/go/compute v1.10.0 // indirect 13 | cloud.google.com/go/iam v0.5.0 // indirect 14 | cloud.google.com/go/storage v1.27.0 // indirect 15 | github.com/agext/levenshtein v1.2.3 // indirect 16 | github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect 17 | github.com/aws/aws-sdk-go v1.44.122 // indirect 18 | github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect 19 | github.com/davecgh/go-spew v1.1.1 // indirect 20 | github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect 21 | github.com/golang/protobuf v1.5.2 // indirect 22 | github.com/google/go-cmp v0.5.9 // indirect 23 | github.com/google/uuid v1.3.0 // indirect 24 | github.com/googleapis/enterprise-certificate-proxy v0.2.0 // indirect 25 | github.com/googleapis/gax-go/v2 v2.6.0 // indirect 26 | github.com/hashicorp/errwrap v1.0.0 // indirect 27 | github.com/hashicorp/go-cleanhttp v0.5.2 // indirect 28 | github.com/hashicorp/go-getter v1.7.0 // indirect 29 | github.com/hashicorp/go-multierror v1.1.0 // indirect 30 | github.com/hashicorp/go-safetemp v1.0.0 // indirect 31 | github.com/hashicorp/go-version v1.6.0 // indirect 32 | github.com/hashicorp/hcl/v2 v2.9.1 // indirect 33 | github.com/hashicorp/terraform-json v0.13.0 // indirect 34 | github.com/jinzhu/copier v0.0.0-20190924061706-b57f9002281a // indirect 35 | github.com/jmespath/go-jmespath v0.4.0 // indirect 36 | github.com/klauspost/compress v1.15.11 // indirect 37 | github.com/mattn/go-zglob v0.0.2-0.20190814121620-e3c945676326 // indirect 38 | github.com/mitchellh/go-homedir v1.1.0 // indirect 39 | github.com/mitchellh/go-testing-interface v1.14.1 // indirect 40 | github.com/mitchellh/go-wordwrap v1.0.1 // indirect 41 | github.com/pmezard/go-difflib v1.0.0 // indirect 42 | github.com/tmccombs/hcl2json v0.3.3 // indirect 43 | github.com/ulikunitz/xz v0.5.10 // indirect 44 | github.com/zclconf/go-cty v1.9.1 // indirect 45 | go.opencensus.io v0.23.0 // indirect 46 | golang.org/x/crypto v0.21.0 // indirect 47 | golang.org/x/net v0.23.0 // indirect 48 | golang.org/x/oauth2 v0.1.0 // indirect 49 | golang.org/x/sys v0.18.0 // indirect 50 | golang.org/x/text v0.14.0 // indirect 51 | golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect 52 | google.golang.org/api v0.100.0 // indirect 53 | google.golang.org/appengine v1.6.7 // indirect 54 | google.golang.org/genproto v0.0.0-20221025140454-527a21cfbd71 // indirect 55 | google.golang.org/grpc v1.50.1 // indirect 56 | google.golang.org/protobuf v1.33.0 // indirect 57 | gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect 58 | ) 59 | -------------------------------------------------------------------------------- /variables.tf: -------------------------------------------------------------------------------- 1 | variable "elastic_beanstalk_application_name" { 2 | type = string 3 | default = "" 4 | description = "Elastic Beanstalk application name. If not provided or set to empty string, the ``Deploy`` stage of the pipeline will not be created" 5 | } 6 | 7 | variable "elastic_beanstalk_environment_name" { 8 | type = string 9 | default = "" 10 | description = "Elastic Beanstalk environment name. If not provided or set to empty string, the ``Deploy`` stage of the pipeline will not be created" 11 | } 12 | 13 | variable "github_oauth_token" { 14 | type = string 15 | description = "GitHub Oauth Token" 16 | } 17 | 18 | variable "github_webhooks_token" { 19 | type = string 20 | default = "" 21 | description = "GitHub OAuth Token with permissions to create webhooks. If not provided, can be sourced from the `GITHUB_TOKEN` environment variable" 22 | } 23 | 24 | variable "github_webhook_events" { 25 | type = list(string) 26 | description = "A list of events which should trigger the webhook. See a list of [available events](https://developer.github.com/v3/activity/events/types/)" 27 | default = ["push"] 28 | } 29 | 30 | variable "repo_owner" { 31 | type = string 32 | description = "GitHub Organization or Person name" 33 | } 34 | 35 | variable "repo_name" { 36 | type = string 37 | description = "GitHub repository name of the application to be built (and deployed to Elastic Beanstalk if configured)" 38 | } 39 | 40 | variable "branch" { 41 | type = string 42 | description = "Branch of the GitHub repository, _e.g._ `master`" 43 | } 44 | 45 | variable "webhook_enabled" { 46 | type = bool 47 | description = "Set to false to prevent the module from creating any webhook resources" 48 | default = false 49 | } 50 | 51 | variable "webhook_target_action" { 52 | type = string 53 | description = "The name of the action in a pipeline you want to connect to the webhook. The action must be from the source (first) stage of the pipeline" 54 | default = "Source" 55 | } 56 | 57 | variable "webhook_authentication" { 58 | type = string 59 | description = "The type of authentication to use. One of IP, GITHUB_HMAC, or UNAUTHENTICATED" 60 | default = "GITHUB_HMAC" 61 | } 62 | 63 | variable "webhook_filter_json_path" { 64 | type = string 65 | description = "The JSON path to filter on" 66 | default = "$.ref" 67 | } 68 | 69 | variable "webhook_filter_match_equals" { 70 | type = string 71 | description = "The value to match on (e.g. refs/heads/{Branch})" 72 | default = "refs/heads/{Branch}" 73 | } 74 | 75 | variable "build_image" { 76 | type = string 77 | default = "aws/codebuild/standard:2.0" 78 | description = "Docker image for build environment, _e.g._ `aws/codebuild/standard:2.0` or `aws/codebuild/eb-nodejs-6.10.0-amazonlinux-64:4.0.0`" 79 | } 80 | 81 | variable "build_compute_type" { 82 | type = string 83 | default = "BUILD_GENERAL1_SMALL" 84 | description = "`CodeBuild` instance size. Possible values are: ```BUILD_GENERAL1_SMALL``` ```BUILD_GENERAL1_MEDIUM``` ```BUILD_GENERAL1_LARGE```" 85 | } 86 | 87 | variable "buildspec" { 88 | type = string 89 | default = "" 90 | description = " Declaration to use for building the project. [For more info](http://docs.aws.amazon.com/codebuild/latest/userguide/build-spec-ref.html)" 91 | } 92 | 93 | variable "poll_source_changes" { 94 | type = bool 95 | default = true 96 | description = "Periodically check the location of your source content and run the pipeline if changes are detected" 97 | } 98 | 99 | variable "privileged_mode" { 100 | type = bool 101 | default = false 102 | description = "If set to true, enables running the Docker daemon inside a Docker container on the CodeBuild instance. Used when building Docker images" 103 | } 104 | 105 | variable "region" { 106 | type = string 107 | default = "" 108 | description = "AWS Region, e.g. `us-east-1`. Used as CodeBuild ENV variable when building Docker images. [For more info](http://docs.aws.amazon.com/codebuild/latest/userguide/sample-docker.html)" 109 | } 110 | 111 | variable "aws_account_id" { 112 | type = string 113 | default = "" 114 | description = "AWS Account ID. Used as CodeBuild ENV variable when building Docker images. [For more info](http://docs.aws.amazon.com/codebuild/latest/userguide/sample-docker.html)" 115 | } 116 | 117 | variable "image_repo_name" { 118 | type = string 119 | default = "UNSET" 120 | description = "ECR repository name to store the Docker image built by this module. Used as CodeBuild ENV variable when building Docker images. [For more info](http://docs.aws.amazon.com/codebuild/latest/userguide/sample-docker.html)" 121 | } 122 | 123 | variable "image_tag" { 124 | type = string 125 | default = "latest" 126 | description = "Docker image tag in the ECR repository, e.g. 'latest'. Used as CodeBuild ENV variable when building Docker images. [For more info](http://docs.aws.amazon.com/codebuild/latest/userguide/sample-docker.html)" 127 | } 128 | 129 | variable "environment_variables" { 130 | type = list(object( 131 | { 132 | name = string 133 | value = string 134 | type = string 135 | })) 136 | 137 | default = [ 138 | { 139 | name = "NO_ADDITIONAL_BUILD_VARS" 140 | value = "TRUE" 141 | type = "PLAINTEXT" 142 | }] 143 | 144 | description = "A list of maps, that contain the keys 'name', 'value', and 'type' to be used as additional environment variables for the build. Valid types are 'PLAINTEXT', 'PARAMETER_STORE', or 'SECRETS_MANAGER'" 145 | } 146 | 147 | variable "codebuild_cache_bucket_suffix_enabled" { 148 | type = bool 149 | description = "The cache bucket generates a random 13 character string to generate a unique bucket name. If set to false it uses terraform-null-label's id value" 150 | default = true 151 | } 152 | 153 | variable "force_destroy" { 154 | type = bool 155 | default = false 156 | description = "Force destroy the CI/CD S3 bucket even if it's not empty" 157 | } 158 | 159 | variable "cache_type" { 160 | type = string 161 | default = "S3" 162 | description = "The type of storage that will be used for the AWS CodeBuild project cache. Valid values: NO_CACHE, LOCAL, and S3. Defaults to S3 to keep same behavior as before upgrading `codebuild` module to 0.18+ version. If cache_type is S3, it will create an S3 bucket for storing codebuild cache inside" 163 | } 164 | 165 | variable "access_log_bucket_name" { 166 | type = string 167 | default = "" 168 | description = "Name of the S3 bucket where s3 access log will be sent to" 169 | } 170 | 171 | variable "s3_bucket_encryption_enabled" { 172 | type = bool 173 | default = true 174 | description = "When set to 'true' the 'aws_s3_bucket' resource will have AES256 encryption enabled by default" 175 | } 176 | 177 | variable "versioning_enabled" { 178 | type = bool 179 | default = true 180 | description = "A state of versioning. Versioning is a means of keeping multiple variants of an object in the same bucket" 181 | } 182 | 183 | variable "website_bucket_name" { 184 | type = string 185 | default = "" 186 | description = "Name of the S3 bucket where the website will be deployed" 187 | } 188 | 189 | variable "website_bucket_acl" { 190 | type = string 191 | default = "public-read" 192 | description = "Canned ACL of the S3 bucket objects that get served as a website, can be private if using CloudFront with OAI" 193 | } 194 | -------------------------------------------------------------------------------- /versions.tf: -------------------------------------------------------------------------------- 1 | terraform { 2 | required_version = ">= 1.3" 3 | 4 | required_providers { 5 | aws = { 6 | source = "hashicorp/aws" 7 | version = ">= 5.0" 8 | } 9 | random = { 10 | source = "hashicorp/random" 11 | version = ">= 2.1" 12 | } 13 | } 14 | } 15 | --------------------------------------------------------------------------------