├── .github
├── CODEOWNERS
├── ISSUE_TEMPLATE
│ ├── bug_report.yml
│ ├── config.yml
│ ├── feature_request.yml
│ └── question.md
├── PULL_REQUEST_TEMPLATE.md
├── banner.png
├── mergify.yml
├── renovate.json
├── settings.yml
└── workflows
│ ├── branch.yml
│ ├── chatops.yml
│ ├── release.yml
│ └── scheduled.yml
├── .gitignore
├── .travis.yml
├── LICENSE
├── README.md
├── README.yaml
├── atmos.yaml
├── cloudformation.tf
├── efs.tf
├── iam.tf
├── main.tf
├── network.tf
├── outputs.tf
├── s3.tf
├── security_group.tf
├── templates
├── datapipeline.yml
└── sns.yml
└── variables.tf
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | # Use this file to define individuals or teams that are responsible for code in a repository.
2 | # Read more:
3 | #
4 | # Order is important: the last matching pattern has the highest precedence
5 |
6 | # These owners will be the default owners for everything
7 | * @cloudposse/engineering @cloudposse/contributors
8 |
9 | # Cloud Posse must review any changes to Makefiles
10 | **/Makefile @cloudposse/engineering
11 | **/Makefile.* @cloudposse/engineering
12 |
13 | # Cloud Posse must review any changes to GitHub actions
14 | .github/* @cloudposse/engineering
15 |
16 | # Cloud Posse must review any changes to standard context definition,
17 | # but some changes can be rubber-stamped.
18 | **/*.tf @cloudposse/engineering @cloudposse/contributors @cloudposse/approvers
19 | README.yaml @cloudposse/engineering @cloudposse/contributors @cloudposse/approvers
20 | README.md @cloudposse/engineering @cloudposse/contributors @cloudposse/approvers
21 | docs/*.md @cloudposse/engineering @cloudposse/contributors @cloudposse/approvers
22 |
23 | # Cloud Posse Admins must review all changes to CODEOWNERS or the mergify configuration
24 | .github/mergify.yml @cloudposse/admins
25 | .github/CODEOWNERS @cloudposse/admins
26 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | description: Create a report to help us improve
4 | labels: ["bug"]
5 | assignees: [""]
6 | body:
7 | - type: markdown
8 | attributes:
9 | value: |
10 | Found a bug?
11 |
12 | Please checkout our [Slack Community](https://slack.cloudposse.com)
13 | or visit our [Slack Archive](https://archive.sweetops.com/).
14 |
15 | [](https://slack.cloudposse.com)
16 |
17 | - type: textarea
18 | id: concise-description
19 | attributes:
20 | label: Describe the Bug
21 | description: A clear and concise description of what the bug is.
22 | placeholder: What is the bug about?
23 | validations:
24 | required: true
25 |
26 | - type: textarea
27 | id: expected
28 | attributes:
29 | label: Expected Behavior
30 | description: A clear and concise description of what you expected.
31 | placeholder: What happened?
32 | validations:
33 | required: true
34 |
35 | - type: textarea
36 | id: reproduction-steps
37 | attributes:
38 | label: Steps to Reproduce
39 | description: Steps to reproduce the behavior.
40 | placeholder: How do we reproduce it?
41 | validations:
42 | required: true
43 |
44 | - type: textarea
45 | id: screenshots
46 | attributes:
47 | label: Screenshots
48 | description: If applicable, add screenshots or logs to help explain.
49 | validations:
50 | required: false
51 |
52 | - type: textarea
53 | id: environment
54 | attributes:
55 | label: Environment
56 | description: Anything that will help us triage the bug.
57 | placeholder: |
58 | - OS: [e.g. Linux, OSX, WSL, etc]
59 | - Version [e.g. 10.15]
60 | - Module version
61 | - Terraform version
62 | validations:
63 | required: false
64 |
65 | - type: textarea
66 | id: additional
67 | attributes:
68 | label: Additional Context
69 | description: |
70 | Add any other context about the problem here.
71 | validations:
72 | required: false
73 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/config.yml:
--------------------------------------------------------------------------------
1 | blank_issues_enabled: false
2 |
3 | contact_links:
4 |
5 | - name: Community Slack Team
6 | url: https://cloudposse.com/slack/
7 | about: |-
8 | Please ask and answer questions here.
9 |
10 | - name: Office Hours
11 | url: https://cloudposse.com/office-hours/
12 | about: |-
13 | Join us every Wednesday for FREE Office Hours (lunch & learn).
14 |
15 | - name: DevOps Accelerator Program
16 | url: https://cloudposse.com/accelerate/
17 | about: |-
18 | Own your infrastructure in record time. We build it. You drive it.
19 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature Request
3 | description: Suggest an idea for this project
4 | labels: ["feature request"]
5 | assignees: [""]
6 | body:
7 | - type: markdown
8 | attributes:
9 | value: |
10 | Have a question?
11 |
12 | Please checkout our [Slack Community](https://slack.cloudposse.com)
13 | or visit our [Slack Archive](https://archive.sweetops.com/).
14 |
15 | [](https://slack.cloudposse.com)
16 |
17 | - type: textarea
18 | id: concise-description
19 | attributes:
20 | label: Describe the Feature
21 | description: A clear and concise description of what the feature is.
22 | placeholder: What is the feature about?
23 | validations:
24 | required: true
25 |
26 | - type: textarea
27 | id: expected
28 | attributes:
29 | label: Expected Behavior
30 | description: A clear and concise description of what you expected.
31 | placeholder: What happened?
32 | validations:
33 | required: true
34 |
35 | - type: textarea
36 | id: use-case
37 | attributes:
38 | label: Use Case
39 | description: |
40 | Is your feature request related to a problem/challenge you are trying
41 | to solve?
42 |
43 | Please provide some additional context of why this feature or
44 | capability will be valuable.
45 | validations:
46 | required: true
47 |
48 | - type: textarea
49 | id: ideal-solution
50 | attributes:
51 | label: Describe Ideal Solution
52 | description: A clear and concise description of what you want to happen.
53 | validations:
54 | required: true
55 |
56 | - type: textarea
57 | id: alternatives-considered
58 | attributes:
59 | label: Alternatives Considered
60 | description: Explain alternative solutions or features considered.
61 | validations:
62 | required: false
63 |
64 | - type: textarea
65 | id: additional
66 | attributes:
67 | label: Additional Context
68 | description: |
69 | Add any other context about the problem here.
70 | validations:
71 | required: false
72 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/question.md:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudposse/terraform-aws-efs-backup/36ba8cfdebf32caed133c08af88bd268bb897c09/.github/ISSUE_TEMPLATE/question.md
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | ## what
2 |
3 |
7 |
8 | ## why
9 |
10 |
15 |
16 | ## references
17 |
18 |
22 |
--------------------------------------------------------------------------------
/.github/banner.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudposse/terraform-aws-efs-backup/36ba8cfdebf32caed133c08af88bd268bb897c09/.github/banner.png
--------------------------------------------------------------------------------
/.github/mergify.yml:
--------------------------------------------------------------------------------
1 | extends: .github
2 |
--------------------------------------------------------------------------------
/.github/renovate.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": [
3 | "config:base",
4 | ":preserveSemverRanges"
5 | ],
6 | "baseBranches": ["main", "master", "/^release\\/v\\d{1,2}$/"],
7 | "labels": ["auto-update"],
8 | "dependencyDashboardAutoclose": true,
9 | "enabledManagers": ["terraform"],
10 | "terraform": {
11 | "ignorePaths": ["**/context.tf", "examples/**"]
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/.github/settings.yml:
--------------------------------------------------------------------------------
1 | # Upstream changes from _extends are only recognized when modifications are made to this file in the default branch.
2 | _extends: .github
3 | repository:
4 | name: terraform-aws-efs-backup
5 | description: Terraform module designed to easily backup EFS filesystems to S3 using DataPipeline
6 | homepage: https://cloudposse.com/accelerate
7 | topics: terraform, terraform-modules, datapipeline, aws, s3, efs, nfs, backup, snapshot, lambda, automatic, scheduled-job, cronjob
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/.github/workflows/branch.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Branch
3 | on:
4 | pull_request:
5 | branches:
6 | - main
7 | - release/**
8 | types: [opened, synchronize, reopened, labeled, unlabeled]
9 | push:
10 | branches:
11 | - main
12 | - release/v*
13 | paths-ignore:
14 | - '.github/**'
15 | - 'docs/**'
16 | - 'examples/**'
17 | - 'test/**'
18 | - 'README.md'
19 |
20 | permissions: {}
21 |
22 | jobs:
23 | terraform-module:
24 | uses: cloudposse/.github/.github/workflows/shared-terraform-module.yml@main
25 | secrets: inherit
26 |
--------------------------------------------------------------------------------
/.github/workflows/chatops.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: chatops
3 | on:
4 | issue_comment:
5 | types: [created]
6 |
7 | permissions:
8 | pull-requests: write
9 | id-token: write
10 | contents: write
11 | statuses: write
12 |
13 | jobs:
14 | test:
15 | uses: cloudposse/.github/.github/workflows/shared-terraform-chatops.yml@main
16 | if: ${{ github.event.issue.pull_request && contains(github.event.comment.body, '/terratest') }}
17 | secrets: inherit
18 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: release
3 | on:
4 | release:
5 | types:
6 | - published
7 |
8 | permissions:
9 | id-token: write
10 | contents: write
11 | pull-requests: write
12 |
13 | jobs:
14 | terraform-module:
15 | uses: cloudposse/.github/.github/workflows/shared-release-branches.yml@main
16 | secrets: inherit
17 |
--------------------------------------------------------------------------------
/.github/workflows/scheduled.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: scheduled
3 | on:
4 | workflow_dispatch: { } # Allows manually trigger this workflow
5 | schedule:
6 | - cron: "0 3 * * *"
7 |
8 | permissions:
9 | pull-requests: write
10 | id-token: write
11 | contents: write
12 |
13 | jobs:
14 | scheduled:
15 | uses: cloudposse/.github/.github/workflows/shared-terraform-scheduled.yml@main
16 | secrets: inherit
17 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | keys/*
2 | !keys/.gitkeep
3 | *.info
4 | /tmp/
5 | *tfstate*
6 | secrets.tfvars
7 | *.tfvars
8 | .terraform
9 | .idea
10 | *.iml
11 |
12 | .build-harness
13 | build-harness
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | addons:
2 | apt:
3 | packages:
4 | - git
5 | - make
6 | - curl
7 |
8 | install:
9 | - make init
10 |
11 | script:
12 | - make terraform/install
13 | - make terraform/get-plugins
14 | - make terraform/get-modules
15 | - make terraform/lint
16 | - make terraform/validate
17 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "{}"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright 2017-2018 Cloud Posse, LLC
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | 
5 |
6 | 


7 |
8 |
9 |
29 |
30 | Terraform module designed to easily backup EFS filesystems to S3 using DataPipeline.
31 |
32 | The workflow is simple:
33 |
34 | * Periodically launch resource (EC2 instance) based on schedule
35 | * Execute the shell command defined in the activity on the instance
36 | * Sync data from Production EFS to S3 Bucket by using `aws-cli`
37 | * The execution log of the activity is stored in `S3`
38 | * Publish the success or failure of the activity to an `SNS` topic
39 | * Automatically rotate the backups using `S3 lifecycle rule`
40 |
41 |
42 | > [!TIP]
43 | > #### 👽 Use Atmos with Terraform
44 | > Cloud Posse uses [`atmos`](https://atmos.tools) to easily orchestrate multiple environments using Terraform.
45 | > Works with [Github Actions](https://atmos.tools/integrations/github-actions/), [Atlantis](https://atmos.tools/integrations/atlantis), or [Spacelift](https://atmos.tools/integrations/spacelift).
46 | >
47 | >
48 | > Watch demo of using Atmos with Terraform
49 | > 
50 | > Example of running atmos
to manage infrastructure from our Quick Start tutorial.
51 | >
52 |
53 |
54 |
55 |
56 |
57 | ## Usage
58 |
59 | Include this module in your existing terraform code:
60 |
61 | ```hcl
62 | module "efs_backup" {
63 | source = "git::https://github.com/cloudposse/terraform-aws-efs-backup.git?ref=master"
64 |
65 | name = "${var.name}"
66 | stage = "${var.stage}"
67 | namespace = "${var.namespace}"
68 | vpc_id = "${var.vpc_id}"
69 | efs_mount_target_id = "${var.efs_mount_target_id}"
70 | use_ip_address = "false"
71 | noncurrent_version_expiration_days = "${var.noncurrent_version_expiration_days}"
72 | ssh_key_pair = "${var.ssh_key_pair}"
73 | datapipeline_config = "${var.datapipeline_config}"
74 | modify_security_group = "true"
75 | }
76 |
77 | output "efs_backup_security_group" {
78 | value = "${module.efs_backup.security_group_id}"
79 | }
80 | ```
81 | ## Integration with `EFS`
82 |
83 | To enable connectivity between the `DataPipeline` instances and the `EFS`, use one of the following methods to configure Security Groups:
84 |
85 | 1. Explicitly add the `DataPipeline` SG (the output of this module `security_group_id`) to the list of the `ingress` rules of the `EFS` SG. For example:
86 |
87 | ```hcl
88 | module "elastic_beanstalk_environment" {
89 | source = "git::https://github.com/cloudposse/terraform-aws-elastic-beanstalk-environment.git?ref=master"
90 | namespace = "${var.namespace}"
91 | name = "${var.name}"
92 | stage = "${var.stage}"
93 | delimiter = "${var.delimiter}"
94 | attributes = ["${compact(concat(var.attributes, list("eb-env")))}"]
95 | tags = "${var.tags}"
96 |
97 | # ..............................
98 | }
99 |
100 | module "efs" {
101 | source = "git::https://github.com/cloudposse/terraform-aws-efs.git?ref=tmaster"
102 | namespace = "${var.namespace}"
103 | name = "${var.name}"
104 | stage = "${var.stage}"
105 | delimiter = "${var.delimiter}"
106 | attributes = ["${compact(concat(var.attributes, list("efs")))}"]
107 | tags = "${var.tags}"
108 |
109 | # Allow EB/EC2 instances and DataPipeline instances to connect to the EFS
110 | security_groups = ["${module.elastic_beanstalk_environment.security_group_id}", "${module.efs_backup.security_group_id}"]
111 | }
112 |
113 | module "efs_backup" {
114 | source = "git::https://github.com/cloudposse/terraform-aws-efs-backup.git?ref=master"
115 | name = "${var.name}"
116 | stage = "${var.stage}"
117 | namespace = "${var.namespace}"
118 | delimiter = "${var.delimiter}"
119 | attributes = ["${compact(concat(var.attributes, list("efs-backup")))}"]
120 | tags = "${var.tags}"
121 |
122 | # Important to set it to `false` since we added the `DataPipeline` SG (output of the `efs_backup` module) to the `security_groups` of the `efs` module
123 | # See NOTE below for more information
124 | modify_security_group = "false"
125 |
126 | # ..............................
127 | }
128 | ```
129 |
130 | 2. Set `modify_security_group` attribute to `true` so the module will modify the `EFS` SG to allow the `DataPipeline` to connect to the `EFS`
131 |
132 | **NOTE:** Do not mix these two methods together.
133 | `Terraform` does not support using a Security Group with in-line rules in conjunction with any Security Group Rule resources.
134 | https://www.terraform.io/docs/providers/aws/r/security_group_rule.html
135 | > NOTE on Security Groups and Security Group Rules: Terraform currently provides both a standalone Security Group Rule resource
136 | (a single ingress or egress rule), and a Security Group resource with ingress and egress rules defined in-line.
137 | At this time you cannot use a Security Group with in-line rules in conjunction with any Security Group Rule resources.
138 | Doing so will cause a conflict of rule settings and will overwrite rules.
139 |
140 | > [!IMPORTANT]
141 | > In Cloud Posse's examples, we avoid pinning modules to specific versions to prevent discrepancies between the documentation
142 | > and the latest released versions. However, for your own projects, we strongly advise pinning each module to the exact version
143 | > you're using. This practice ensures the stability of your infrastructure. Additionally, we recommend implementing a systematic
144 | > approach for updating versions to avoid unexpected changes.
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 | ## Requirements
155 |
156 | No requirements.
157 |
158 | ## Providers
159 |
160 | | Name | Version |
161 | |------|---------|
162 | | [aws](#provider\_aws) | n/a |
163 |
164 | ## Modules
165 |
166 | | Name | Source | Version |
167 | |------|--------|---------|
168 | | [backups\_label](#module\_backups\_label) | git::https://github.com/cloudposse/terraform-null-label.git | tags/0.3.1 |
169 | | [datapipeline\_label](#module\_datapipeline\_label) | git::https://github.com/cloudposse/terraform-null-label.git | tags/0.3.1 |
170 | | [label](#module\_label) | git::https://github.com/cloudposse/terraform-null-label.git | tags/0.3.1 |
171 | | [logs\_label](#module\_logs\_label) | git::https://github.com/cloudposse/terraform-null-label.git | tags/0.3.1 |
172 | | [resource\_role\_label](#module\_resource\_role\_label) | git::https://github.com/cloudposse/terraform-null-label.git | tags/0.3.1 |
173 | | [role\_label](#module\_role\_label) | git::https://github.com/cloudposse/terraform-null-label.git | tags/0.3.1 |
174 | | [sns\_label](#module\_sns\_label) | git::https://github.com/cloudposse/terraform-null-label.git | tags/0.3.1 |
175 |
176 | ## Resources
177 |
178 | | Name | Type |
179 | |------|------|
180 | | [aws_cloudformation_stack.datapipeline](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudformation_stack) | resource |
181 | | [aws_cloudformation_stack.sns](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudformation_stack) | resource |
182 | | [aws_iam_instance_profile.resource_role](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_instance_profile) | resource |
183 | | [aws_iam_role.resource_role](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role) | resource |
184 | | [aws_iam_role.role](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role) | resource |
185 | | [aws_iam_role_policy_attachment.resource_role](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource |
186 | | [aws_iam_role_policy_attachment.role](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource |
187 | | [aws_s3_bucket.backups](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket) | resource |
188 | | [aws_s3_bucket.logs](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket) | resource |
189 | | [aws_security_group.datapipeline](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/security_group) | resource |
190 | | [aws_security_group_rule.datapipeline_efs_ingress](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/security_group_rule) | resource |
191 | | [aws_ami.amazon_linux](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/ami) | data source |
192 | | [aws_efs_mount_target.default](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/efs_mount_target) | data source |
193 | | [aws_iam_policy_document.resource_role](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/iam_policy_document) | data source |
194 | | [aws_iam_policy_document.role](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/iam_policy_document) | data source |
195 | | [aws_region.default](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/region) | data source |
196 | | [aws_subnet_ids.default](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/subnet_ids) | data source |
197 | | [aws_vpc.default](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/vpc) | data source |
198 |
199 | ## Inputs
200 |
201 | | Name | Description | Type | Default | Required |
202 | |------|-------------|------|---------|:--------:|
203 | | [attributes](#input\_attributes) | Additional attributes (e.g. `efs-backup`) | `list(string)` | `[]` | no |
204 | | [datapipeline\_config](#input\_datapipeline\_config) | DataPipeline configuration options | `map(string)` | {
"email": "",
"instance_type": "t2.micro",
"period": "24 hours",
"timeout": "60 Minutes"
}
| no |
205 | | [datapipeline\_security\_group](#input\_datapipeline\_security\_group) | Optionally specify a security group to use for the datapipeline instances | `string` | `""` | no |
206 | | [delimiter](#input\_delimiter) | Delimiter to be used between `name`, `namespace`, `stage`, etc. | `string` | `"-"` | no |
207 | | [efs\_mount\_target\_id](#input\_efs\_mount\_target\_id) | EFS Mount Target ID (e.g. `fsmt-279bfc62`) | `string` | n/a | yes |
208 | | [modify\_security\_group](#input\_modify\_security\_group) | Should the module modify the `EFS` security group | `string` | `"false"` | no |
209 | | [name](#input\_name) | The Name of the application or solution (e.g. `bastion` or `portal`) | `any` | n/a | yes |
210 | | [namespace](#input\_namespace) | Namespace (e.g. `cp` or `cloudposse`) | `any` | n/a | yes |
211 | | [noncurrent\_version\_expiration\_days](#input\_noncurrent\_version\_expiration\_days) | S3 object versions expiration period (days) | `string` | `"35"` | no |
212 | | [region](#input\_region) | (Optional) AWS Region. If not specified, will be derived from 'aws\_region' data source | `string` | `""` | no |
213 | | [ssh\_key\_pair](#input\_ssh\_key\_pair) | `SSH` key that will be deployed on DataPipeline's instance | `string` | n/a | yes |
214 | | [stage](#input\_stage) | Stage (e.g. `prod`, `dev`, `staging`) | `any` | n/a | yes |
215 | | [subnet\_id](#input\_subnet\_id) | Optionally specify the subnet to use | `string` | `""` | no |
216 | | [tags](#input\_tags) | Additional tags (e.g. `map('BusinessUnit`,`XYZ`) | `map(string)` | `{}` | no |
217 | | [use\_ip\_address](#input\_use\_ip\_address) | If set to `true`, will use IP address instead of DNS name to connect to the `EFS` | `string` | `"false"` | no |
218 | | [vpc\_id](#input\_vpc\_id) | VPC ID | `string` | `""` | no |
219 |
220 | ## Outputs
221 |
222 | | Name | Description |
223 | |------|-------------|
224 | | [backups\_bucket\_name](#output\_backups\_bucket\_name) | Backups bucket name |
225 | | [datapipeline\_ids](#output\_datapipeline\_ids) | Datapipeline ids |
226 | | [logs\_bucket\_name](#output\_logs\_bucket\_name) | Logs bucket name |
227 | | [security\_group\_id](#output\_security\_group\_id) | Security group id |
228 | | [sns\_topic\_arn](#output\_sns\_topic\_arn) | Backup notification SNS topic ARN |
229 |
230 |
231 |
232 |
233 |
234 |
235 |
236 |
237 | ## Related Projects
238 |
239 | Check out these related projects.
240 |
241 | - [terraform-aws-efs](https://github.com/cloudposse/terraform-aws-efs) - Terraform Module to define an EFS Filesystem (aka NFS)
242 | - [terraform-aws-efs-cloudwatch-sns-alarms](https://github.com/cloudposse/terraform-aws-efs-cloudwatch-sns-alarms) - Terraform module that configures CloudWatch SNS alerts for EFS
243 |
244 |
245 | ## References
246 |
247 | For additional context, refer to some of these links.
248 |
249 | - [datapipeline-efs-backup-demo](https://github.com/knakayama/datapipeline-efs-backup-demo) - Thanks for inspiration
250 |
251 |
252 |
253 | > [!TIP]
254 | > #### Use Terraform Reference Architectures for AWS
255 | >
256 | > Use Cloud Posse's ready-to-go [terraform architecture blueprints](https://cloudposse.com/reference-architecture/) for AWS to get up and running quickly.
257 | >
258 | > ✅ We build it together with your team.
259 | > ✅ Your team owns everything.
260 | > ✅ 100% Open Source and backed by fanatical support.
261 | >
262 | >
263 | > 📚 Learn More
264 | >
265 | >
266 | >
267 | > Cloud Posse is the leading [**DevOps Accelerator**](https://cpco.io/commercial-support?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-efs-backup&utm_content=commercial_support) for funded startups and enterprises.
268 | >
269 | > *Your team can operate like a pro today.*
270 | >
271 | > Ensure that your team succeeds by using Cloud Posse's proven process and turnkey blueprints. Plus, we stick around until you succeed.
272 | > #### Day-0: Your Foundation for Success
273 | > - **Reference Architecture.** You'll get everything you need from the ground up built using 100% infrastructure as code.
274 | > - **Deployment Strategy.** Adopt a proven deployment strategy with GitHub Actions, enabling automated, repeatable, and reliable software releases.
275 | > - **Site Reliability Engineering.** Gain total visibility into your applications and services with Datadog, ensuring high availability and performance.
276 | > - **Security Baseline.** Establish a secure environment from the start, with built-in governance, accountability, and comprehensive audit logs, safeguarding your operations.
277 | > - **GitOps.** Empower your team to manage infrastructure changes confidently and efficiently through Pull Requests, leveraging the full power of GitHub Actions.
278 | >
279 | >
280 | >
281 | > #### Day-2: Your Operational Mastery
282 | > - **Training.** Equip your team with the knowledge and skills to confidently manage the infrastructure, ensuring long-term success and self-sufficiency.
283 | > - **Support.** Benefit from a seamless communication over Slack with our experts, ensuring you have the support you need, whenever you need it.
284 | > - **Troubleshooting.** Access expert assistance to quickly resolve any operational challenges, minimizing downtime and maintaining business continuity.
285 | > - **Code Reviews.** Enhance your team’s code quality with our expert feedback, fostering continuous improvement and collaboration.
286 | > - **Bug Fixes.** Rely on our team to troubleshoot and resolve any issues, ensuring your systems run smoothly.
287 | > - **Migration Assistance.** Accelerate your migration process with our dedicated support, minimizing disruption and speeding up time-to-value.
288 | > - **Customer Workshops.** Engage with our team in weekly workshops, gaining insights and strategies to continuously improve and innovate.
289 | >
290 | >
291 | >
292 |
293 | ## ✨ Contributing
294 |
295 | This project is under active development, and we encourage contributions from our community.
296 |
297 |
298 |
299 | Many thanks to our outstanding contributors:
300 |
301 |
302 |
303 |
304 |
305 | For 🐛 bug reports & feature requests, please use the [issue tracker](https://github.com/cloudposse/terraform-aws-efs-backup/issues).
306 |
307 | In general, PRs are welcome. We follow the typical "fork-and-pull" Git workflow.
308 | 1. Review our [Code of Conduct](https://github.com/cloudposse/terraform-aws-efs-backup/?tab=coc-ov-file#code-of-conduct) and [Contributor Guidelines](https://github.com/cloudposse/.github/blob/main/CONTRIBUTING.md).
309 | 2. **Fork** the repo on GitHub
310 | 3. **Clone** the project to your own machine
311 | 4. **Commit** changes to your own branch
312 | 5. **Push** your work back up to your fork
313 | 6. Submit a **Pull Request** so that we can review your changes
314 |
315 | **NOTE:** Be sure to merge the latest changes from "upstream" before making a pull request!
316 |
317 | ### 🌎 Slack Community
318 |
319 | Join our [Open Source Community](https://cpco.io/slack?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-efs-backup&utm_content=slack) on Slack. It's **FREE** for everyone! Our "SweetOps" community is where you get to talk with others who share a similar vision for how to rollout and manage infrastructure. This is the best place to talk shop, ask questions, solicit feedback, and work together as a community to build totally *sweet* infrastructure.
320 |
321 | ### 📰 Newsletter
322 |
323 | Sign up for [our newsletter](https://cpco.io/newsletter?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-efs-backup&utm_content=newsletter) and join 3,000+ DevOps engineers, CTOs, and founders who get insider access to the latest DevOps trends, so you can always stay in the know.
324 | Dropped straight into your Inbox every week — and usually a 5-minute read.
325 |
326 | ### 📆 Office Hours
327 |
328 | [Join us every Wednesday via Zoom](https://cloudposse.com/office-hours?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-efs-backup&utm_content=office_hours) for your weekly dose of insider DevOps trends, AWS news and Terraform insights, all sourced from our SweetOps community, plus a _live Q&A_ that you can’t find anywhere else.
329 | It's **FREE** for everyone!
330 | ## License
331 |
332 |
333 |
334 |
335 | Preamble to the Apache License, Version 2.0
336 |
337 |
338 |
339 | Complete license is available in the [`LICENSE`](LICENSE) file.
340 |
341 | ```text
342 | Licensed to the Apache Software Foundation (ASF) under one
343 | or more contributor license agreements. See the NOTICE file
344 | distributed with this work for additional information
345 | regarding copyright ownership. The ASF licenses this file
346 | to you under the Apache License, Version 2.0 (the
347 | "License"); you may not use this file except in compliance
348 | with the License. You may obtain a copy of the License at
349 |
350 | https://www.apache.org/licenses/LICENSE-2.0
351 |
352 | Unless required by applicable law or agreed to in writing,
353 | software distributed under the License is distributed on an
354 | "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
355 | KIND, either express or implied. See the License for the
356 | specific language governing permissions and limitations
357 | under the License.
358 | ```
359 |
360 |
361 | ## Trademarks
362 |
363 | All other trademarks referenced herein are the property of their respective owners.
364 |
365 |
366 | ---
367 | Copyright © 2017-2025 [Cloud Posse, LLC](https://cpco.io/copyright)
368 |
369 |
370 |
371 |
372 |
373 |
--------------------------------------------------------------------------------
/README.yaml:
--------------------------------------------------------------------------------
1 | #
2 | # This is the canonical configuration for the `README.md`
3 | # Run `make readme` to rebuild the `README.md`
4 | #
5 |
6 | # Name of this project
7 | name: terraform-aws-efs-backup
8 |
9 | # Tags of this project
10 | tags:
11 | - aws
12 | - terraform
13 | - terraform-modules
14 | - backups
15 | - datapipeline
16 | - s3
17 | - efs
18 | - nfs
19 | - snapshot
20 | - lambda
21 | - automatic
22 | - scheduled-job
23 | - cronjob
24 |
25 | # Categories of this project
26 | categories:
27 | - terraform-modules/backups
28 |
29 | # Logo for this project
30 | #logo: docs/logo.png
31 |
32 | # License of this project
33 | license: "APACHE2"
34 |
35 | # Canonical GitHub repo
36 | github_repo: cloudposse/terraform-aws-efs-backup
37 |
38 | # Badges to display
39 | badges:
40 | - name: Latest Release
41 | image: https://img.shields.io/github/release/cloudposse/terraform-aws-efs-backup.svg?style=for-the-badge
42 | url: https://github.com/cloudposse/terraform-aws-efs-backup/releases/latest
43 | - name: Last Updated
44 | image: https://img.shields.io/github/last-commit/cloudposse/terraform-aws-efs-backup.svg?style=for-the-badge
45 | url: https://github.com/cloudposse/terraform-aws-efs-backup/commits
46 | - name: Slack Community
47 | image: https://slack.cloudposse.com/for-the-badge.svg
48 | url: https://cloudposse.com/slack
49 |
50 | # List any related terraform modules that this module may be used with or that this module depends on.
51 | related:
52 | - name: "terraform-aws-efs"
53 | description: "Terraform Module to define an EFS Filesystem (aka NFS)"
54 | url: "https://github.com/cloudposse/terraform-aws-efs"
55 | - name: "terraform-aws-efs-cloudwatch-sns-alarms"
56 | description: "Terraform module that configures CloudWatch SNS alerts for EFS"
57 | url: "https://github.com/cloudposse/terraform-aws-efs-cloudwatch-sns-alarms"
58 |
59 | # Short description of this project
60 | description: |-
61 | Terraform module designed to easily backup EFS filesystems to S3 using DataPipeline.
62 |
63 | The workflow is simple:
64 |
65 | * Periodically launch resource (EC2 instance) based on schedule
66 | * Execute the shell command defined in the activity on the instance
67 | * Sync data from Production EFS to S3 Bucket by using `aws-cli`
68 | * The execution log of the activity is stored in `S3`
69 | * Publish the success or failure of the activity to an `SNS` topic
70 | * Automatically rotate the backups using `S3 lifecycle rule`
71 |
72 | # How to use this project
73 | usage: |-
74 | Include this module in your existing terraform code:
75 |
76 | ```hcl
77 | module "efs_backup" {
78 | source = "git::https://github.com/cloudposse/terraform-aws-efs-backup.git?ref=master"
79 |
80 | name = "${var.name}"
81 | stage = "${var.stage}"
82 | namespace = "${var.namespace}"
83 | vpc_id = "${var.vpc_id}"
84 | efs_mount_target_id = "${var.efs_mount_target_id}"
85 | use_ip_address = "false"
86 | noncurrent_version_expiration_days = "${var.noncurrent_version_expiration_days}"
87 | ssh_key_pair = "${var.ssh_key_pair}"
88 | datapipeline_config = "${var.datapipeline_config}"
89 | modify_security_group = "true"
90 | }
91 |
92 | output "efs_backup_security_group" {
93 | value = "${module.efs_backup.security_group_id}"
94 | }
95 | ```
96 | ## Integration with `EFS`
97 |
98 | To enable connectivity between the `DataPipeline` instances and the `EFS`, use one of the following methods to configure Security Groups:
99 |
100 | 1. Explicitly add the `DataPipeline` SG (the output of this module `security_group_id`) to the list of the `ingress` rules of the `EFS` SG. For example:
101 |
102 | ```hcl
103 | module "elastic_beanstalk_environment" {
104 | source = "git::https://github.com/cloudposse/terraform-aws-elastic-beanstalk-environment.git?ref=master"
105 | namespace = "${var.namespace}"
106 | name = "${var.name}"
107 | stage = "${var.stage}"
108 | delimiter = "${var.delimiter}"
109 | attributes = ["${compact(concat(var.attributes, list("eb-env")))}"]
110 | tags = "${var.tags}"
111 |
112 | # ..............................
113 | }
114 |
115 | module "efs" {
116 | source = "git::https://github.com/cloudposse/terraform-aws-efs.git?ref=tmaster"
117 | namespace = "${var.namespace}"
118 | name = "${var.name}"
119 | stage = "${var.stage}"
120 | delimiter = "${var.delimiter}"
121 | attributes = ["${compact(concat(var.attributes, list("efs")))}"]
122 | tags = "${var.tags}"
123 |
124 | # Allow EB/EC2 instances and DataPipeline instances to connect to the EFS
125 | security_groups = ["${module.elastic_beanstalk_environment.security_group_id}", "${module.efs_backup.security_group_id}"]
126 | }
127 |
128 | module "efs_backup" {
129 | source = "git::https://github.com/cloudposse/terraform-aws-efs-backup.git?ref=master"
130 | name = "${var.name}"
131 | stage = "${var.stage}"
132 | namespace = "${var.namespace}"
133 | delimiter = "${var.delimiter}"
134 | attributes = ["${compact(concat(var.attributes, list("efs-backup")))}"]
135 | tags = "${var.tags}"
136 |
137 | # Important to set it to `false` since we added the `DataPipeline` SG (output of the `efs_backup` module) to the `security_groups` of the `efs` module
138 | # See NOTE below for more information
139 | modify_security_group = "false"
140 |
141 | # ..............................
142 | }
143 | ```
144 |
145 | 2. Set `modify_security_group` attribute to `true` so the module will modify the `EFS` SG to allow the `DataPipeline` to connect to the `EFS`
146 |
147 | **NOTE:** Do not mix these two methods together.
148 | `Terraform` does not support using a Security Group with in-line rules in conjunction with any Security Group Rule resources.
149 | https://www.terraform.io/docs/providers/aws/r/security_group_rule.html
150 | > NOTE on Security Groups and Security Group Rules: Terraform currently provides both a standalone Security Group Rule resource
151 | (a single ingress or egress rule), and a Security Group resource with ingress and egress rules defined in-line.
152 | At this time you cannot use a Security Group with in-line rules in conjunction with any Security Group Rule resources.
153 | Doing so will cause a conflict of rule settings and will overwrite rules.
154 |
155 | references:
156 | - name: "datapipeline-efs-backup-demo"
157 | description: 'Thanks for inspiration'
158 | url: "https://github.com/knakayama/datapipeline-efs-backup-demo"
159 |
160 | include: []
161 | contributors: []
162 |
--------------------------------------------------------------------------------
/atmos.yaml:
--------------------------------------------------------------------------------
1 | # Atmos Configuration — powered by https://atmos.tools
2 | #
3 | # This configuration enables centralized, DRY, and consistent project scaffolding using Atmos.
4 | #
5 | # Included features:
6 | # - Organizational custom commands: https://atmos.tools/core-concepts/custom-commands
7 | # - Automated README generation: https://atmos.tools/cli/commands/docs/generate
8 | #
9 |
10 | # Import shared configuration used by all modules
11 | import:
12 | - https://raw.githubusercontent.com/cloudposse/.github/refs/heads/main/.github/atmos/terraform-module.yaml
13 |
--------------------------------------------------------------------------------
/cloudformation.tf:
--------------------------------------------------------------------------------
1 | module "sns_label" {
2 | source = "git::https://github.com/cloudposse/terraform-null-label.git?ref=tags/0.3.1"
3 | namespace = var.namespace
4 | stage = var.stage
5 | name = var.name
6 | delimiter = var.delimiter
7 | attributes = ["${compact(concat(var.attributes, list("sns")))}"]
8 | tags = var.tags
9 | }
10 |
11 | resource "aws_cloudformation_stack" "sns" {
12 | name = module.sns_label.id
13 | template_body = file("${path.module}/templates/sns.yml")
14 |
15 | parameters {
16 | Email = var.datapipeline_config["email"]
17 | }
18 |
19 | tags = module.sns_label.tags
20 | }
21 |
22 | module "datapipeline_label" {
23 | source = "git::https://github.com/cloudposse/terraform-null-label.git?ref=tags/0.3.1"
24 | namespace = var.namespace
25 | stage = var.stage
26 | name = var.name
27 | delimiter = var.delimiter
28 | attributes = ["${compact(concat(var.attributes, list("datapipeline")))}"]
29 | tags = var.tags
30 | }
31 |
32 | resource "aws_cloudformation_stack" "datapipeline" {
33 | name = module.datapipeline_label.id
34 | template_body = file("${path.module}/templates/datapipeline.yml")
35 |
36 | parameters {
37 | myInstanceType = var.datapipeline_config["instance_type"]
38 | mySubnetId = var.subnet_id == "" ? data.aws_subnet_ids.default.ids[0] : var.subnet_id
39 | mySecurityGroupId = var.datapipeline_security_group == "" ? join("", aws_security_group.datapipeline.*.id) : var.datapipeline_security_group
40 | myEFSHost = var.use_ip_address == "true" ? data.aws_efs_mount_target.default.ip_address : format("%s.efs.%s.amazonaws.com", data.aws_efs_mount_target.default.file_system_id, (signum(length(var.region)) == 1 ? var.region : data.aws_region.default.name))
41 | myS3BackupsBucket = aws_s3_bucket.backups.id
42 | myRegion = signum(length(var.region)) == 1 ? var.region : data.aws_region.default.name
43 | myImageId = data.aws_ami.amazon_linux.id
44 | myTopicArn = aws_cloudformation_stack.sns.outputs["TopicArn"]
45 | myS3LogBucket = aws_s3_bucket.logs.id
46 | myDataPipelineResourceRole = aws_iam_instance_profile.resource_role.name
47 | myDataPipelineRole = aws_iam_role.role.name
48 | myKeyPair = var.ssh_key_pair
49 | myPeriod = var.datapipeline_config["period"]
50 | Tag = module.label.id
51 | myExecutionTimeout = var.datapipeline_config["timeout"]
52 | }
53 |
54 | tags = module.datapipeline_label.tags
55 | }
56 |
--------------------------------------------------------------------------------
/efs.tf:
--------------------------------------------------------------------------------
1 | # Get Elastic File System Mount Target
2 | data "aws_efs_mount_target" "default" {
3 | mount_target_id = var.efs_mount_target_id
4 | }
5 |
--------------------------------------------------------------------------------
/iam.tf:
--------------------------------------------------------------------------------
1 | data "aws_iam_policy_document" "resource_role" {
2 | statement {
3 | sid = "EC2AssumeRole"
4 | effect = "Allow"
5 | actions = ["sts:AssumeRole"]
6 |
7 | principals = {
8 | type = "Service"
9 | identifiers = ["ec2.amazonaws.com"]
10 | }
11 | }
12 | }
13 |
14 |
15 | module "resource_role_label" {
16 | source = "git::https://github.com/cloudposse/terraform-null-label.git?ref=tags/0.3.1"
17 | namespace = var.namespace
18 | stage = var.stage
19 | name = var.name
20 | delimiter = var.delimiter
21 | attributes = var.attributes
22 | tags = var.tags
23 | }
24 |
25 | resource "aws_iam_role" "resource_role" {
26 | name = module.resource_role_label.id
27 | assume_role_policy = data.aws_iam_policy_document.resource_role.json
28 | }
29 |
30 | resource "aws_iam_role_policy_attachment" "resource_role" {
31 | role = aws_iam_role.resource_role.name
32 | policy_arn = "arn:aws:iam::aws:policy/service-role/AmazonEC2RoleforDataPipelineRole"
33 | }
34 |
35 | resource "aws_iam_instance_profile" "resource_role" {
36 | name = module.resource_role_label.id
37 | role = aws_iam_role.resource_role.name
38 | }
39 |
40 | data "aws_iam_policy_document" "role" {
41 | statement {
42 | sid = "AssumeRole"
43 | effect = "Allow"
44 | actions = ["sts:AssumeRole"]
45 |
46 | principals = {
47 | type = "Service"
48 |
49 | identifiers = [
50 | "elasticmapreduce.amazonaws.com",
51 | "datapipeline.amazonaws.com",
52 | ]
53 | }
54 | }
55 | }
56 |
57 | module "role_label" {
58 | source = "git::https://github.com/cloudposse/terraform-null-label.git?ref=tags/0.3.1"
59 | namespace = var.namespace
60 | stage = var.stage
61 | name = var.name
62 | delimiter = var.delimiter
63 | attributes = ["${compact(concat(var.attributes, list("role")))}"]
64 | tags = var.tags
65 | }
66 |
67 | resource "aws_iam_role" "role" {
68 | name = module.role_label.id
69 | assume_role_policy = data.aws_iam_policy_document.role.json
70 | }
71 |
72 | resource "aws_iam_role_policy_attachment" "role" {
73 | role = aws_iam_role.role.name
74 | policy_arn = "arn:aws:iam::aws:policy/service-role/AWSDataPipelineRole"
75 | }
76 |
--------------------------------------------------------------------------------
/main.tf:
--------------------------------------------------------------------------------
1 | data "aws_region" "default" {}
2 |
3 | module "label" {
4 | source = "git::https://github.com/cloudposse/terraform-null-label.git?ref=tags/0.3.1"
5 | namespace = var.namespace
6 | stage = var.stage
7 | name = var.name
8 | delimiter = var.delimiter
9 | attributes = var.attributes
10 | tags = var.tags
11 | }
12 |
13 | data "aws_ami" "amazon_linux" {
14 | most_recent = true
15 | owners = ["amazon"]
16 |
17 | filter {
18 | name = "architecture"
19 | values = ["x86_64"]
20 | }
21 |
22 | filter {
23 | name = "root-device-type"
24 | values = ["ebs"]
25 | }
26 |
27 | filter {
28 | name = "name"
29 | values = ["amzn-ami-hvm-*"]
30 | }
31 |
32 | filter {
33 | name = "virtualization-type"
34 | values = ["hvm"]
35 | }
36 |
37 | filter {
38 | name = "block-device-mapping.volume-type"
39 | values = ["gp2"]
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/network.tf:
--------------------------------------------------------------------------------
1 | # Get object aws_vpc by vpc_id
2 | data "aws_vpc" "default" {
3 | id = var.vpc_id
4 | }
5 |
6 | # Get all subnets from the VPC
7 | data "aws_subnet_ids" "default" {
8 | vpc_id = data.aws_vpc.default.id
9 | }
10 |
--------------------------------------------------------------------------------
/outputs.tf:
--------------------------------------------------------------------------------
1 | output "logs_bucket_name" {
2 | value = aws_s3_bucket.logs.bucket_domain_name
3 | description = "Logs bucket name"
4 | }
5 |
6 | output "backups_bucket_name" {
7 | value = aws_s3_bucket.backups.bucket_domain_name
8 | description = "Backups bucket name"
9 | }
10 |
11 | output "datapipeline_ids" {
12 | value = aws_cloudformation_stack.datapipeline.outputs["DataPipelineId"]
13 | description = "Datapipeline ids"
14 | }
15 |
16 | output "security_group_id" {
17 | value = var.datapipeline_security_group == "" ? join("", aws_security_group.datapipeline.*.id) : var.datapipeline_security_group
18 | description = "Security group id"
19 | }
20 |
21 | output "sns_topic_arn" {
22 | value = aws_cloudformation_stack.sns.outputs["TopicArn"]
23 | description = "Backup notification SNS topic ARN"
24 | }
25 |
--------------------------------------------------------------------------------
/s3.tf:
--------------------------------------------------------------------------------
1 | module "logs_label" {
2 | source = "git::https://github.com/cloudposse/terraform-null-label.git?ref=tags/0.3.1"
3 | namespace = var.namespace
4 | stage = var.stage
5 | name = var.name
6 | delimiter = var.delimiter
7 | attributes = ["${compact(concat(var.attributes, list("logs")))}"]
8 | tags = var.tags
9 | }
10 |
11 | resource "aws_s3_bucket" "logs" {
12 | bucket = module.logs_label.id
13 | force_destroy = true
14 | tags = module.logs_label.tags
15 | }
16 |
17 | module "backups_label" {
18 | source = "git::https://github.com/cloudposse/terraform-null-label.git?ref=tags/0.3.1"
19 | namespace = var.namespace
20 | stage = var.stage
21 | name = var.name
22 | delimiter = var.delimiter
23 | attributes = ["${compact(concat(var.attributes, list("backups")))}"]
24 | tags = var.tags
25 | }
26 |
27 | resource "aws_s3_bucket" "backups" {
28 | bucket = module.backups_label.id
29 | tags = module.backups_label.tags
30 |
31 | versioning {
32 | enabled = true
33 | }
34 |
35 | lifecycle_rule {
36 | enabled = true
37 | prefix = "efs"
38 |
39 | noncurrent_version_expiration {
40 | days = var.noncurrent_version_expiration_days
41 | }
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/security_group.tf:
--------------------------------------------------------------------------------
1 | resource "aws_security_group" "datapipeline" {
2 | count = var.datapipeline_security_group == "" ? 1 : 0
3 | tags = module.label.tags
4 | vpc_id = data.aws_vpc.default.id
5 | name = module.label.id
6 | description = module.label.id
7 |
8 | ingress {
9 | from_port = 22
10 | to_port = 22
11 | protocol = "tcp"
12 | cidr_blocks = ["0.0.0.0/0"]
13 | }
14 |
15 | egress {
16 | from_port = 0
17 | to_port = 0
18 | protocol = "-1"
19 | cidr_blocks = ["0.0.0.0/0"]
20 | }
21 | }
22 |
23 | resource "aws_security_group_rule" "datapipeline_efs_ingress" {
24 | count = var.modify_security_group == "true" ? 1 : 0
25 | from_port = 0
26 | protocol = "-1"
27 | security_group_id = data.aws_efs_mount_target.default.security_groups[0]
28 | to_port = 0
29 | type = "ingress"
30 | source_security_group_id = var.datapipeline_security_group == "" ? join("", aws_security_group.datapipeline.*.id) : var.datapipeline_security_group
31 | }
32 |
--------------------------------------------------------------------------------
/templates/datapipeline.yml:
--------------------------------------------------------------------------------
1 | ---
2 | AWSTemplateFormatVersion: 2010-09-09
3 | Description: DataPipeline EFS Backup DataPipeline Template
4 |
5 | Parameters:
6 | myS3BackupsBucket:
7 | Type: String
8 | mySecurityGroupId:
9 | Type: AWS::EC2::SecurityGroup::Id
10 | mySubnetId:
11 | Type: AWS::EC2::Subnet::Id
12 | myInstanceType:
13 | Type: String
14 | myEFSHost:
15 | Type: String
16 | myRegion:
17 | Type: String
18 | myImageId:
19 | Type: AWS::EC2::Image::Id
20 | myTopicArn:
21 | Type: String
22 | myS3LogBucket:
23 | Type: String
24 | myDataPipelineResourceRole:
25 | Type: String
26 | myDataPipelineRole:
27 | Type: String
28 | myKeyPair:
29 | Type: AWS::EC2::KeyPair::KeyName
30 | Tag:
31 | Type: String
32 | myPeriod:
33 | Type: String
34 | myExecutionTimeout:
35 | Type: String
36 |
37 |
38 | Resources:
39 | DataPipelineEFSBackup:
40 | Type: AWS::DataPipeline::Pipeline
41 | Properties:
42 | Name: DataPipelineEFSBackup
43 | Description: DataPipeline EFS Backup
44 | PipelineTags:
45 | - Key: Name
46 | Value: !Ref Tag
47 | PipelineObjects:
48 | - Id: Default
49 | Name: Default
50 | Fields:
51 | - Key: type
52 | StringValue: Default
53 | - Key: scheduleType
54 | StringValue: cron
55 | - Key: failureAndRerunMode
56 | StringValue: CASCADE
57 | - Key: schedule
58 | RefValue: DefaultSchedule
59 | - Key: role
60 | StringValue: "#{myDataPipelineRole}"
61 | - Key: resourceRole
62 | StringValue: "#{myDataPipelineResourceRole}"
63 | - Key: pipelineLogUri
64 | StringValue: "#{myS3LogBucket}"
65 | - Id: EC2ResourceObj
66 | Name: EC2ResourceObj
67 | Fields:
68 | - Key: type
69 | StringValue: Ec2Resource
70 | - Key: terminateAfter
71 | StringValue: "#{myExecutionTimeout}"
72 | - Key: instanceType
73 | StringValue: "#{myInstanceType}"
74 | - Key: securityGroupIds
75 | StringValue: "#{mySecurityGroupId}"
76 | - Key: subnetId
77 | StringValue: "#{mySubnetId}"
78 | - Key: associatePublicIpAddress
79 | StringValue: "true"
80 | - Key: imageId
81 | StringValue: "#{myImageId}"
82 | - Key: keyPair
83 | StringValue: "#{myKeyPair}"
84 | - Id: DefaultSchedule
85 | Name: DefaultSchedule
86 | Fields:
87 | - Key: type
88 | StringValue: Schedule
89 | - Key: startAt
90 | StringValue: FIRST_ACTIVATION_DATE_TIME
91 | - Key: period
92 | StringValue: "#{myPeriod}"
93 | - Id: ShellCommandActivityObj
94 | Name: ShellCommandActivityObj
95 | Fields:
96 | - Key: type
97 | StringValue: ShellCommandActivity
98 | - Key: runsOn
99 | RefValue: EC2ResourceObj
100 | - Key: command
101 | StringValue: |
102 | source="$1"
103 | region="$2"
104 | destination="$3"
105 | sudo yum -y install nfs-utils
106 | [[ -d /backup ]] || sudo mkdir /backup
107 | if ! mount -l -t nfs4 | grep -qF $source; then
108 | sudo mount -t nfs -o nfsvers=4.1 -o rsize=1048576 -o wsize=1048576 -o timeo=600 -o retrans=2 -o hard "$source":/ /backup
109 | fi
110 | sudo aws s3 sync --delete --exact-timestamps /backup/ s3://$destination/
111 | backup_status="$?"
112 | # Return code 2 means that the following were skipped:
113 | # files/symlinks that do not exist, files that are character special devices, block special device, FIFO's, or sockets, and files that the user cannot read from
114 | # We need to return 0 in this case for the pipeline to succeed because all normal files were synched successfully
115 | # http://docs.aws.amazon.com/cli/latest/topic/return-codes.html
116 | # https://github.com/aws/aws-cli/issues/1125
117 | if [ "$backup_status" -eq "2" ]; then
118 | backup_status="0"
119 | fi
120 | exit "$backup_status"
121 | - Key: scriptArgument
122 | StringValue: "#{myEFSHost}"
123 | - Key: scriptArgument
124 | StringValue: "#{myRegion}"
125 | - Key: scriptArgument
126 | StringValue: "#{myS3BackupsBucket}"
127 | - Key: onSuccess
128 | RefValue: SuccessNotify
129 | - Key: onFail
130 | RefValue: FailureNotify
131 | - Id: SuccessNotify
132 | Name: SuccessNotify
133 | Fields:
134 | - Key: type
135 | StringValue: SnsAlarm
136 | - Key: topicArn
137 | StringValue: "#{myTopicArn}"
138 | - Key: subject
139 | StringValue: "[Info] EFS Backup Succeeded"
140 | - Key: message
141 | StringValue: |
142 | scheduledStartTime: "#{node.@scheduledStartTime}"
143 | actualStartTime: "#{node.@actualStartTime}"
144 | actualEndTime: "#{node.@actualEndTime}"
145 | hostname: "#{node.hostname}"
146 | - Id: FailureNotify
147 | Name: FailureNotify
148 | Fields:
149 | - Key: type
150 | StringValue: SnsAlarm
151 | - Key: topicArn
152 | StringValue: "#{myTopicArn}"
153 | - Key: subject
154 | StringValue: "[Alert] EFS Backup Failed"
155 | - Key: message
156 | StringValue: |
157 | scheduledStartTime: "#{node.@scheduledStartTime}"
158 | actualStartTime: "#{node.@actualStartTime}"
159 | actualEndTime: "#{node.@actualEndTime}"
160 | hostname: "#{node.hostname}"
161 | ParameterObjects:
162 | - Id: myInstanceType
163 | Attributes:
164 | - Key: description
165 | StringValue: Instance type for performing the restore.
166 | - Key: type
167 | StringValue: String
168 | - Key: default
169 | StringValue: t2.micro
170 | - Id: myExecutionTimeout
171 | Attributes:
172 | - Key: type
173 | StringValue: String
174 | - Key: default
175 | StringValue: "360 Minutes"
176 | - Key: description
177 | StringValue: Terminate the resource after this period.
178 | - Id: mySubnetId
179 | Attributes:
180 | - Key: type
181 | StringValue: String
182 | - Key: default
183 | StringValue: subnet-1234abcd
184 | - Key: description
185 | StringValue: VPC subnet for your restoration EC2 instance (ideally the same subnet used for the backup EFS mount point).
186 | - Id: mySecurityGroupId
187 | Attributes:
188 | - Key: type
189 | StringValue: String
190 | - Key: default
191 | StringValue: sg-1111111b
192 | - Key: description
193 | StringValue: Security group that can connect to the Production/Backup EFS mount point.
194 | - Id: myEFSHost
195 | Attributes:
196 | - Key: type
197 | StringValue: String
198 | - Key: default
199 | StringValue: backup-fs-12345678
200 | - Key: description
201 | StringValue: Name for the directory that already contains your backups.
202 | - Id: myRegion
203 | Attributes:
204 | - Key: type
205 | StringValue: String
206 | - Key: default
207 | StringValue: GMT
208 | - Key: description
209 | StringValue: TimeZone
210 | - Id: myS3BackupsBucket
211 | Attributes:
212 | - Key: type
213 | StringValue: String
214 | - Key: default
215 | StringValue: s3://efs-backups
216 | - Key: description
217 | StringValue: S3 Backup Bucket
218 | - Id: myImageId
219 | Attributes:
220 | - Key: type
221 | StringValue: String
222 | - Key: default
223 | StringValue: ami-12345678
224 | - Key: description
225 | StringValue: AMI ID for the EC2 instance.
226 | - Id: myS3LogBucket
227 | Attributes:
228 | - Key: type
229 | StringValue: String
230 | - Key: default
231 | StringValue: s3://my-s3-log-bucket
232 | - Key: description
233 | StringValue: S3 Log Bucket
234 | - Id: myDataPipelineResourceRole
235 | Attributes:
236 | - Key: type
237 | StringValue: String
238 | - Key: default
239 | StringValue: DataPipelineDefaultResourceRole
240 | - Key: description
241 | StringValue: DataPipeline Resource Role
242 | - Id: myDataPipelineRole
243 | Attributes:
244 | - Key: type
245 | StringValue: String
246 | - Key: default
247 | StringValue: DataPipelineDefaultRole
248 | - Key: description
249 | StringValue: DataPipeline Role
250 | - Id: myTopicArn
251 | Attributes:
252 | - Key: type
253 | StringValue: String
254 | - Key: default
255 | StringValue: arn:aws:sns:hoge
256 | - Key: description
257 | StringValue: Topic ARN
258 | - Id: myKeyPair
259 | Attributes:
260 | - Key: type
261 | StringValue: String
262 | - Key: default
263 | StringValue: my-key
264 | - Key: description
265 | StringValue: Key Pair
266 | - Id: myPeriod
267 | Attributes:
268 | - Key: type
269 | StringValue: String
270 | - Key: default
271 | StringValue: "1 hours"
272 | - Key: description
273 | StringValue: How often the pipeline should run
274 | ParameterValues:
275 | - Id: myInstanceType
276 | StringValue: !Ref myInstanceType
277 | - Id: myExecutionTimeout
278 | StringValue: !Ref myExecutionTimeout
279 | - Id: mySubnetId
280 | StringValue: !Ref mySubnetId
281 | - Id: mySecurityGroupId
282 | StringValue: !Ref mySecurityGroupId
283 | - Id: myEFSHost
284 | StringValue: !Ref myEFSHost
285 | - Id: myRegion
286 | StringValue: !Ref myRegion
287 | - Id: myS3BackupsBucket
288 | StringValue: !Ref myS3BackupsBucket
289 | - Id: myImageId
290 | StringValue: !Ref myImageId
291 | - Id: myS3LogBucket
292 | StringValue: !Sub s3://${myS3LogBucket}
293 | - Id: myDataPipelineResourceRole
294 | StringValue: !Ref myDataPipelineResourceRole
295 | - Id: myDataPipelineRole
296 | StringValue: !Ref myDataPipelineRole
297 | - Id: myTopicArn
298 | StringValue: !Ref myTopicArn
299 | - Id: myKeyPair
300 | StringValue: !Ref myKeyPair
301 | - Id: myPeriod
302 | StringValue: !Ref myPeriod
303 |
304 | Outputs:
305 | DataPipelineId:
306 | Value: !Ref DataPipelineEFSBackup
307 |
--------------------------------------------------------------------------------
/templates/sns.yml:
--------------------------------------------------------------------------------
1 | ---
2 | AWSTemplateFormatVersion: 2010-09-09
3 | Description: DataPipeline EFS Backup SNS Template
4 |
5 | Parameters:
6 | Email:
7 | Type: String
8 |
9 | Conditions:
10 | SubscribeEmail: !Not [!Equals [ !Ref Email, ""]]
11 |
12 | Resources:
13 | Topic:
14 | Type: AWS::SNS::Topic
15 |
16 | EmailSubscription:
17 | Condition: SubscribeEmail
18 | Type: AWS::SNS::Subscription
19 | Properties:
20 | TopicArn: !Ref Topic
21 | Protocol: email
22 | Endpoint: !Ref Email
23 |
24 | Outputs:
25 | TopicArn:
26 | Value: !Ref Topic
27 |
--------------------------------------------------------------------------------
/variables.tf:
--------------------------------------------------------------------------------
1 | variable "name" {
2 | description = "The Name of the application or solution (e.g. `bastion` or `portal`)"
3 | }
4 |
5 | variable "namespace" {
6 | description = "Namespace (e.g. `cp` or `cloudposse`)"
7 | }
8 |
9 | variable "stage" {
10 | description = "Stage (e.g. `prod`, `dev`, `staging`)"
11 | }
12 |
13 | variable "region" {
14 | type = string
15 | default = ""
16 | description = "(Optional) AWS Region. If not specified, will be derived from 'aws_region' data source"
17 | }
18 |
19 | variable "vpc_id" {
20 | default = ""
21 | description = "VPC ID"
22 | }
23 |
24 | # https://www.terraform.io/docs/configuration/variables.html
25 | # simply using string values rather than booleans for variables is recommended
26 | variable "use_ip_address" {
27 | default = "false"
28 | description = "If set to `true`, will use IP address instead of DNS name to connect to the `EFS`"
29 | }
30 |
31 | variable "datapipeline_config" {
32 | description = "DataPipeline configuration options"
33 | type = map(string)
34 |
35 | default = {
36 | instance_type = "t2.micro"
37 | email = ""
38 | period = "24 hours"
39 | timeout = "60 Minutes"
40 | }
41 | }
42 |
43 | variable "efs_mount_target_id" {
44 | type = string
45 | description = "EFS Mount Target ID (e.g. `fsmt-279bfc62`)"
46 | }
47 |
48 | variable "modify_security_group" {
49 | default = "false"
50 | description = " Should the module modify the `EFS` security group"
51 | }
52 |
53 | # Set a name of ssh key that will be deployed on DataPipeline's instance. The key should be present in AWS.
54 | variable "ssh_key_pair" {
55 | type = string
56 | description = "`SSH` key that will be deployed on DataPipeline's instance"
57 | }
58 |
59 | variable "noncurrent_version_expiration_days" {
60 | default = "35"
61 | description = "S3 object versions expiration period (days)"
62 | }
63 |
64 | variable "delimiter" {
65 | type = string
66 | default = "-"
67 | description = "Delimiter to be used between `name`, `namespace`, `stage`, etc."
68 | }
69 |
70 | variable "attributes" {
71 | type = list(string)
72 | default = []
73 | description = "Additional attributes (e.g. `efs-backup`)"
74 | }
75 |
76 | variable "tags" {
77 | type = map(string)
78 | default = {}
79 | description = "Additional tags (e.g. `map('BusinessUnit`,`XYZ`)"
80 | }
81 |
82 | variable "subnet_id" {
83 | type = string
84 | default = ""
85 | description = "Optionally specify the subnet to use"
86 | }
87 |
88 | variable "datapipeline_security_group" {
89 | type = string
90 | default = ""
91 | description = "Optionally specify a security group to use for the datapipeline instances"
92 | }
93 |
--------------------------------------------------------------------------------