├── .editorconfig
├── .github
├── CODEOWNERS
├── ISSUE_TEMPLATE
│ ├── bug_report.md
│ ├── bug_report.yml
│ ├── config.yml
│ ├── feature_request.md
│ ├── feature_request.yml
│ └── question.md
├── PULL_REQUEST_TEMPLATE.md
├── banner.png
├── mergify.yml
├── renovate.json
├── settings.yml
└── workflows
│ ├── branch.yml
│ ├── chatops.yml
│ ├── release.yml
│ └── scheduled.yml
├── .gitignore
├── LICENSE
├── README.md
├── README.yaml
├── atmos.yaml
├── context.tf
├── examples
└── complete
│ ├── context.tf
│ ├── fixtures.us-east-2.tfvars
│ ├── main.tf
│ ├── outputs.tf
│ ├── providers.tf
│ ├── variables.tf
│ └── versions.tf
├── lambda-log.tf
├── lambda-rds.tf
├── lambda-vpc-logs.tf
├── main.tf
├── outputs.tf
├── test
├── .gitignore
├── Makefile
├── Makefile.alpine
└── src
│ ├── .gitignore
│ ├── Makefile
│ ├── examples_complete_test.go
│ ├── go.mod
│ └── go.sum
├── variables.tf
└── versions.tf
/.editorconfig:
--------------------------------------------------------------------------------
1 | # Unix-style newlines with a newline ending every file
2 | [*]
3 | charset = utf-8
4 | end_of_line = lf
5 | indent_size = 2
6 | indent_style = space
7 | insert_final_newline = true
8 | trim_trailing_whitespace = true
9 |
10 | [*.{tf,tfvars}]
11 | indent_size = 2
12 | indent_style = space
13 |
14 | [*.go]
15 | indent_size = 2
16 | indent_style = tab
17 |
18 | [*.md]
19 | max_line_length = 0
20 | trim_trailing_whitespace = false
21 |
22 | # Override for Makefile
23 | [{Makefile, makefile, GNUmakefile, Makefile.*}]
24 | tab_width = 2
25 | indent_style = tab
26 | indent_size = 4
27 |
28 | [COMMIT_EDITMSG]
29 | max_line_length = 0
30 |
--------------------------------------------------------------------------------
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | # Use this file to define individuals or teams that are responsible for code in a repository.
2 | # Read more:
3 | #
4 | # Order is important: the last matching pattern has the highest precedence
5 |
6 | # These owners will be the default owners for everything
7 | * @cloudposse/engineering @cloudposse/contributors
8 |
9 | # Cloud Posse must review any changes to Makefiles
10 | **/Makefile @cloudposse/engineering
11 | **/Makefile.* @cloudposse/engineering
12 |
13 | # Cloud Posse must review any changes to GitHub actions
14 | .github/* @cloudposse/engineering
15 |
16 | # Cloud Posse must review any changes to standard context definition,
17 | # but some changes can be rubber-stamped.
18 | **/*.tf @cloudposse/engineering @cloudposse/contributors @cloudposse/approvers
19 | README.yaml @cloudposse/engineering @cloudposse/contributors @cloudposse/approvers
20 | README.md @cloudposse/engineering @cloudposse/contributors @cloudposse/approvers
21 | docs/*.md @cloudposse/engineering @cloudposse/contributors @cloudposse/approvers
22 |
23 | # Cloud Posse Admins must review all changes to CODEOWNERS or the mergify configuration
24 | .github/mergify.yml @cloudposse/admins
25 | .github/CODEOWNERS @cloudposse/admins
26 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: ''
5 | labels: 'bug'
6 | assignees: ''
7 |
8 | ---
9 |
10 | Found a bug? Maybe our [Slack Community](https://slack.cloudposse.com) can help.
11 |
12 | [](https://slack.cloudposse.com)
13 |
14 | ## Describe the Bug
15 | A clear and concise description of what the bug is.
16 |
17 | ## Expected Behavior
18 | A clear and concise description of what you expected to happen.
19 |
20 | ## Steps to Reproduce
21 | Steps to reproduce the behavior:
22 | 1. Go to '...'
23 | 2. Run '....'
24 | 3. Enter '....'
25 | 4. See error
26 |
27 | ## Screenshots
28 | If applicable, add screenshots or logs to help explain your problem.
29 |
30 | ## Environment (please complete the following information):
31 |
32 | Anything that will help us triage the bug will help. Here are some ideas:
33 | - OS: [e.g. Linux, OSX, WSL, etc]
34 | - Version [e.g. 10.15]
35 |
36 | ## Additional Context
37 | Add any other context about the problem here.
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | description: Create a report to help us improve
4 | labels: ["bug"]
5 | assignees: [""]
6 | body:
7 | - type: markdown
8 | attributes:
9 | value: |
10 | Found a bug?
11 |
12 | Please checkout our [Slack Community](https://slack.cloudposse.com)
13 | or visit our [Slack Archive](https://archive.sweetops.com/).
14 |
15 | [](https://slack.cloudposse.com)
16 |
17 | - type: textarea
18 | id: concise-description
19 | attributes:
20 | label: Describe the Bug
21 | description: A clear and concise description of what the bug is.
22 | placeholder: What is the bug about?
23 | validations:
24 | required: true
25 |
26 | - type: textarea
27 | id: expected
28 | attributes:
29 | label: Expected Behavior
30 | description: A clear and concise description of what you expected.
31 | placeholder: What happened?
32 | validations:
33 | required: true
34 |
35 | - type: textarea
36 | id: reproduction-steps
37 | attributes:
38 | label: Steps to Reproduce
39 | description: Steps to reproduce the behavior.
40 | placeholder: How do we reproduce it?
41 | validations:
42 | required: true
43 |
44 | - type: textarea
45 | id: screenshots
46 | attributes:
47 | label: Screenshots
48 | description: If applicable, add screenshots or logs to help explain.
49 | validations:
50 | required: false
51 |
52 | - type: textarea
53 | id: environment
54 | attributes:
55 | label: Environment
56 | description: Anything that will help us triage the bug.
57 | placeholder: |
58 | - OS: [e.g. Linux, OSX, WSL, etc]
59 | - Version [e.g. 10.15]
60 | - Module version
61 | - Terraform version
62 | validations:
63 | required: false
64 |
65 | - type: textarea
66 | id: additional
67 | attributes:
68 | label: Additional Context
69 | description: |
70 | Add any other context about the problem here.
71 | validations:
72 | required: false
73 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/config.yml:
--------------------------------------------------------------------------------
1 | blank_issues_enabled: false
2 |
3 | contact_links:
4 |
5 | - name: Community Slack Team
6 | url: https://cloudposse.com/slack/
7 | about: |-
8 | Please ask and answer questions here.
9 |
10 | - name: Office Hours
11 | url: https://cloudposse.com/office-hours/
12 | about: |-
13 | Join us every Wednesday for FREE Office Hours (lunch & learn).
14 |
15 | - name: DevOps Accelerator Program
16 | url: https://cloudposse.com/accelerate/
17 | about: |-
18 | Own your infrastructure in record time. We build it. You drive it.
19 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature Request
3 | about: Suggest an idea for this project
4 | title: ''
5 | labels: 'feature request'
6 | assignees: ''
7 |
8 | ---
9 |
10 | Have a question? Please checkout our [Slack Community](https://slack.cloudposse.com) or visit our [Slack Archive](https://archive.sweetops.com/).
11 |
12 | [](https://slack.cloudposse.com)
13 |
14 | ## Describe the Feature
15 |
16 | A clear and concise description of what the bug is.
17 |
18 | ## Expected Behavior
19 |
20 | A clear and concise description of what you expected to happen.
21 |
22 | ## Use Case
23 |
24 | Is your feature request related to a problem/challenge you are trying to solve? Please provide some additional context of why this feature or capability will be valuable.
25 |
26 | ## Describe Ideal Solution
27 |
28 | A clear and concise description of what you want to happen. If you don't know, that's okay.
29 |
30 | ## Alternatives Considered
31 |
32 | Explain what alternative solutions or features you've considered.
33 |
34 | ## Additional Context
35 |
36 | Add any other context or screenshots about the feature request here.
37 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature Request
3 | description: Suggest an idea for this project
4 | labels: ["feature request"]
5 | assignees: [""]
6 | body:
7 | - type: markdown
8 | attributes:
9 | value: |
10 | Have a question?
11 |
12 | Please checkout our [Slack Community](https://slack.cloudposse.com)
13 | or visit our [Slack Archive](https://archive.sweetops.com/).
14 |
15 | [](https://slack.cloudposse.com)
16 |
17 | - type: textarea
18 | id: concise-description
19 | attributes:
20 | label: Describe the Feature
21 | description: A clear and concise description of what the feature is.
22 | placeholder: What is the feature about?
23 | validations:
24 | required: true
25 |
26 | - type: textarea
27 | id: expected
28 | attributes:
29 | label: Expected Behavior
30 | description: A clear and concise description of what you expected.
31 | placeholder: What happened?
32 | validations:
33 | required: true
34 |
35 | - type: textarea
36 | id: use-case
37 | attributes:
38 | label: Use Case
39 | description: |
40 | Is your feature request related to a problem/challenge you are trying
41 | to solve?
42 |
43 | Please provide some additional context of why this feature or
44 | capability will be valuable.
45 | validations:
46 | required: true
47 |
48 | - type: textarea
49 | id: ideal-solution
50 | attributes:
51 | label: Describe Ideal Solution
52 | description: A clear and concise description of what you want to happen.
53 | validations:
54 | required: true
55 |
56 | - type: textarea
57 | id: alternatives-considered
58 | attributes:
59 | label: Alternatives Considered
60 | description: Explain alternative solutions or features considered.
61 | validations:
62 | required: false
63 |
64 | - type: textarea
65 | id: additional
66 | attributes:
67 | label: Additional Context
68 | description: |
69 | Add any other context about the problem here.
70 | validations:
71 | required: false
72 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/question.md:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudposse/terraform-aws-datadog-lambda-forwarder/dfa7a9e14eb91f4c6a01aba5c5da6c913493c2fb/.github/ISSUE_TEMPLATE/question.md
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | ## what
2 |
3 |
7 |
8 | ## why
9 |
10 |
15 |
16 | ## references
17 |
18 |
22 |
--------------------------------------------------------------------------------
/.github/banner.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cloudposse/terraform-aws-datadog-lambda-forwarder/dfa7a9e14eb91f4c6a01aba5c5da6c913493c2fb/.github/banner.png
--------------------------------------------------------------------------------
/.github/mergify.yml:
--------------------------------------------------------------------------------
1 | extends: .github
2 |
--------------------------------------------------------------------------------
/.github/renovate.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": [
3 | "config:base",
4 | ":preserveSemverRanges"
5 | ],
6 | "baseBranches": ["main", "master", "/^release\\/v\\d{1,2}$/"],
7 | "labels": ["auto-update"],
8 | "dependencyDashboardAutoclose": true,
9 | "enabledManagers": ["terraform"],
10 | "terraform": {
11 | "ignorePaths": ["**/context.tf", "examples/**"]
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/.github/settings.yml:
--------------------------------------------------------------------------------
1 | # Upstream changes from _extends are only recognized when modifications are made to this file in the default branch.
2 | _extends: .github
3 | repository:
4 | name: terraform-aws-datadog-lambda-forwarder
5 | description: Terraform module to provision all the necessary infrastructure to deploy Datadog Lambda forwarders
6 | homepage: https://cloudposse.com/accelerate
7 | topics: aws, aws-lambda, datadog, forwarder, terraform, terraform-module
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/.github/workflows/branch.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Branch
3 | on:
4 | pull_request:
5 | branches:
6 | - main
7 | - release/**
8 | types: [opened, synchronize, reopened, labeled, unlabeled]
9 | push:
10 | branches:
11 | - main
12 | - release/v*
13 | paths-ignore:
14 | - '.github/**'
15 | - 'docs/**'
16 | - 'examples/**'
17 | - 'test/**'
18 | - 'README.md'
19 |
20 | permissions: {}
21 |
22 | jobs:
23 | terraform-module:
24 | uses: cloudposse/.github/.github/workflows/shared-terraform-module.yml@main
25 | secrets: inherit
26 |
--------------------------------------------------------------------------------
/.github/workflows/chatops.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: chatops
3 | on:
4 | issue_comment:
5 | types: [created]
6 |
7 | permissions:
8 | pull-requests: write
9 | id-token: write
10 | contents: write
11 | statuses: write
12 |
13 | jobs:
14 | test:
15 | uses: cloudposse/.github/.github/workflows/shared-terraform-chatops.yml@main
16 | if: ${{ github.event.issue.pull_request && contains(github.event.comment.body, '/terratest') }}
17 | secrets: inherit
18 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: release
3 | on:
4 | release:
5 | types:
6 | - published
7 |
8 | permissions:
9 | id-token: write
10 | contents: write
11 | pull-requests: write
12 |
13 | jobs:
14 | terraform-module:
15 | uses: cloudposse/.github/.github/workflows/shared-release-branches.yml@main
16 | secrets: inherit
17 |
--------------------------------------------------------------------------------
/.github/workflows/scheduled.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: scheduled
3 | on:
4 | workflow_dispatch: { } # Allows manually trigger this workflow
5 | schedule:
6 | - cron: "0 3 * * *"
7 |
8 | permissions:
9 | pull-requests: write
10 | id-token: write
11 | contents: write
12 |
13 | jobs:
14 | scheduled:
15 | uses: cloudposse/.github/.github/workflows/shared-terraform-scheduled.yml@main
16 | secrets: inherit
17 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Local .terraform directories
2 | **/.terraform/*
3 |
4 | # .tfstate files
5 | *.tfstate
6 | *.tfstate.*
7 | .terraform
8 | .terraform.tfstate.lock.info
9 |
10 | **/.idea
11 | **/*.iml
12 |
13 | # Cloud Posse Build Harness https://github.com/cloudposse/build-harness
14 | **/.build-harness
15 | **/build-harness
16 |
17 | # Crash log files
18 | crash.log
19 | test.log
20 |
21 | # Downloaded lambda artifact
22 | forwarder-log.zip
23 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright 2020-2023 Cloud Posse, LLC
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | 
5 |
6 |
7 |



8 |
9 |
10 |
11 |
12 |
32 |
33 | Terraform module to provision all the necessary infrastructure to deploy [Datadog Lambda forwarders](https://github.com/DataDog/datadog-serverless-functions/tree/master/aws/logs_monitoring)
34 |
35 |
36 | > [!TIP]
37 | > #### 👽 Use Atmos with Terraform
38 | > Cloud Posse uses [`atmos`](https://atmos.tools) to easily orchestrate multiple environments using Terraform.
39 | > Works with [Github Actions](https://atmos.tools/integrations/github-actions/), [Atlantis](https://atmos.tools/integrations/atlantis), or [Spacelift](https://atmos.tools/integrations/spacelift).
40 | >
41 | >
42 | > Watch demo of using Atmos with Terraform
43 | > 
44 | > Example of running atmos
to manage infrastructure from our Quick Start tutorial.
45 | >
46 |
47 |
48 |
49 |
50 |
51 | ## Usage
52 |
53 | For a complete example, see [examples/complete](examples/complete).
54 |
55 | For automated tests of the complete example using [bats](https://github.com/bats-core/bats-core) and [Terratest](https://github.com/gruntwork-io/terratest)
56 | (which tests and deploys the example on AWS), see [test](test).
57 |
58 | To enable Datadog forwarder for RDS Enhanced monitoring:
59 | ```hcl
60 | module "datadog_lambda_forwarder" {
61 | source = "cloudposse/datadog-lambda-forwarder/aws"
62 | # Cloud Posse recommends pinning every module to a specific version
63 | # version = "x.x.x"
64 |
65 | forwarder_rds_enabled = true
66 | }
67 | ```
68 |
69 | To enable Datadog forwarder for a CloudTrail S3 bucket:
70 | ```hcl
71 | module "datadog_lambda_forwarder" {
72 | source = "cloudposse/datadog-lambda-forwarder/aws"
73 | # Cloud Posse recommends pinning every module to a specific version
74 | # version = "x.x.x"
75 |
76 | forwarder_log_enabled = true
77 | s3_buckets = ["cloudtrail-audit-bucket"]
78 | s3_bucket_kms_arns = ["arn:aws:kms:us-west-2:1234567890:key/b204f3d2-1111-2222-94333332-4444ccc222"]
79 | }
80 | ```
81 |
82 | To enable Datadog forwarder for a S3 bucket with prefix:
83 | ```hcl
84 | module "datadog_lambda_forwarder" {
85 | source = "cloudposse/datadog-lambda-forwarder/aws"
86 | # Cloud Posse recommends pinning every module to a specific version
87 | # version = "x.x.x"
88 |
89 | forwarder_log_enabled = true
90 | s3_buckets_with_prefixes = {
91 | MyBucketWithPrefix = {bucket_name = "my-bucket-with-prefix", bucket_prefix = "events/"}
92 | AnotherWithPrefix = {bucket_name = "another-with-prefix", bucket_prefix = "records/"}
93 | }
94 | s3_bucket_kms_arns = ["arn:aws:kms:us-west-2:1234567890:key/b204f3d2-1111-2222-94333332-4444ccc222"]
95 | }
96 | ```
97 |
98 | To enable Datadog forwarder for RDS authentication CloudWatch logs:
99 | ```hcl
100 | module "datadog_lambda_forwarder" {
101 | source = "cloudposse/datadog-lambda-forwarder/aws"
102 | # Cloud Posse recommends pinning every module to a specific version
103 | # version = "x.x.x"
104 |
105 | forwarder_log_enabled = true
106 | cloudwatch_forwarder_log_groups = {
107 | postgres = {
108 | name = "/aws/rds/cluster/pg-main/postgresql"
109 | filter_pattern = ""
110 | }
111 | }
112 | }
113 | ```
114 |
115 | To enable Datadog forwarder for VPC Flow Logs CloudWatch logs:
116 | ```hcl
117 | module "datadog_lambda_forwarder" {
118 | source = "cloudposse/datadog-lambda-forwarder/aws"
119 | # Cloud Posse recommends pinning every module to a specific version
120 | # version = "x.x.x"
121 |
122 | forwarder_vpc_logs_enabled = true
123 | vpclogs_cloudwatch_log_group = "/aws/vpc/flowlogs/vpc1"
124 | }
125 | ```
126 |
127 | To use a local copy of the lambda code you can specify the artifact url:
128 | ```hcl
129 | module "datadog_lambda_forwarder" {
130 | source = "cloudposse/datadog-lambda-forwarder/aws"
131 | # Cloud Posse recommends pinning every module to a specific version
132 | # version = "x.x.x"
133 |
134 | forwarder_rds_enabled = true
135 | forwarder_rds_artifact_url = file("${path.module}/function.zip")
136 | }
137 | ```
138 |
139 | > [!IMPORTANT]
140 | > In Cloud Posse's examples, we avoid pinning modules to specific versions to prevent discrepancies between the documentation
141 | > and the latest released versions. However, for your own projects, we strongly advise pinning each module to the exact version
142 | > you're using. This practice ensures the stability of your infrastructure. Additionally, we recommend implementing a systematic
143 | > approach for updating versions to avoid unexpected changes.
144 |
145 |
146 |
147 |
148 |
149 | ## Examples
150 |
151 | Here is an example of using this module:
152 | - [`examples/complete`](examples/complete) - complete example of using this module
153 |
154 |
155 |
156 |
157 |
158 | ## Requirements
159 |
160 | | Name | Version |
161 | |------|---------|
162 | | [terraform](#requirement\_terraform) | >= 1.3.0 |
163 | | [archive](#requirement\_archive) | >= 2.2.0 |
164 | | [aws](#requirement\_aws) | >= 3.0 |
165 |
166 | ## Providers
167 |
168 | | Name | Version |
169 | |------|---------|
170 | | [archive](#provider\_archive) | >= 2.2.0 |
171 | | [aws](#provider\_aws) | >= 3.0 |
172 |
173 | ## Modules
174 |
175 | | Name | Source | Version |
176 | |------|--------|---------|
177 | | [cloudwatch\_event](#module\_cloudwatch\_event) | cloudposse/cloudwatch-events/aws | 0.6.1 |
178 | | [forwarder\_log\_artifact](#module\_forwarder\_log\_artifact) | cloudposse/module-artifact/external | 0.8.0 |
179 | | [forwarder\_log\_label](#module\_forwarder\_log\_label) | cloudposse/label/null | 0.25.0 |
180 | | [forwarder\_log\_s3\_label](#module\_forwarder\_log\_s3\_label) | cloudposse/label/null | 0.25.0 |
181 | | [forwarder\_rds\_artifact](#module\_forwarder\_rds\_artifact) | cloudposse/module-artifact/external | 0.8.0 |
182 | | [forwarder\_rds\_label](#module\_forwarder\_rds\_label) | cloudposse/label/null | 0.25.0 |
183 | | [forwarder\_vpclogs\_artifact](#module\_forwarder\_vpclogs\_artifact) | cloudposse/module-artifact/external | 0.8.0 |
184 | | [forwarder\_vpclogs\_label](#module\_forwarder\_vpclogs\_label) | cloudposse/label/null | 0.25.0 |
185 | | [tags\_cache\_s3\_bucket](#module\_tags\_cache\_s3\_bucket) | cloudposse/s3-bucket/aws | 4.2.0 |
186 | | [this](#module\_this) | cloudposse/label/null | 0.25.0 |
187 |
188 | ## Resources
189 |
190 | | Name | Type |
191 | |------|------|
192 | | [aws_cloudwatch_log_group.forwarder_log](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudwatch_log_group) | resource |
193 | | [aws_cloudwatch_log_group.forwarder_rds](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudwatch_log_group) | resource |
194 | | [aws_cloudwatch_log_group.forwarder_vpclogs](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudwatch_log_group) | resource |
195 | | [aws_cloudwatch_log_subscription_filter.cloudwatch_log_subscription_filter](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudwatch_log_subscription_filter) | resource |
196 | | [aws_cloudwatch_log_subscription_filter.datadog_log_subscription_filter_rds](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudwatch_log_subscription_filter) | resource |
197 | | [aws_cloudwatch_log_subscription_filter.datadog_log_subscription_filter_vpclogs](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudwatch_log_subscription_filter) | resource |
198 | | [aws_iam_policy.datadog_custom_policy](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource |
199 | | [aws_iam_policy.lambda_forwarder_log](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource |
200 | | [aws_iam_policy.lambda_forwarder_log_s3](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource |
201 | | [aws_iam_policy.lambda_forwarder_rds](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource |
202 | | [aws_iam_policy.lambda_forwarder_vpclogs](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource |
203 | | [aws_iam_role.lambda_forwarder_log](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role) | resource |
204 | | [aws_iam_role.lambda_forwarder_rds](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role) | resource |
205 | | [aws_iam_role.lambda_forwarder_vpclogs](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role) | resource |
206 | | [aws_iam_role_policy_attachment.datadog_s3](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource |
207 | | [aws_iam_role_policy_attachment.lambda_forwarder_log](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource |
208 | | [aws_iam_role_policy_attachment.lambda_forwarder_rds](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource |
209 | | [aws_iam_role_policy_attachment.lambda_forwarder_vpclogs](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource |
210 | | [aws_lambda_function.forwarder_log](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/lambda_function) | resource |
211 | | [aws_lambda_function.forwarder_rds](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/lambda_function) | resource |
212 | | [aws_lambda_function.forwarder_vpclogs](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/lambda_function) | resource |
213 | | [aws_lambda_permission.allow_eventbridge](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/lambda_permission) | resource |
214 | | [aws_lambda_permission.allow_s3_bucket](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/lambda_permission) | resource |
215 | | [aws_lambda_permission.cloudwatch_enhanced_rds_monitoring](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/lambda_permission) | resource |
216 | | [aws_lambda_permission.cloudwatch_groups](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/lambda_permission) | resource |
217 | | [aws_lambda_permission.cloudwatch_vpclogs](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/lambda_permission) | resource |
218 | | [aws_s3_bucket_notification.s3_bucket_notification](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_notification) | resource |
219 | | [aws_s3_bucket_notification.s3_bucket_notification_with_prefixes](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_notification) | resource |
220 | | [archive_file.forwarder_rds](https://registry.terraform.io/providers/hashicorp/archive/latest/docs/data-sources/file) | data source |
221 | | [archive_file.forwarder_vpclogs](https://registry.terraform.io/providers/hashicorp/archive/latest/docs/data-sources/file) | data source |
222 | | [aws_caller_identity.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/caller_identity) | data source |
223 | | [aws_iam_policy_document.assume_role](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/iam_policy_document) | data source |
224 | | [aws_iam_policy_document.lambda_default](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/iam_policy_document) | data source |
225 | | [aws_iam_policy_document.s3_log_bucket](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/iam_policy_document) | data source |
226 | | [aws_partition.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/partition) | data source |
227 | | [aws_region.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/region) | data source |
228 | | [aws_ssm_parameter.api_key](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/ssm_parameter) | data source |
229 |
230 | ## Inputs
231 |
232 | | Name | Description | Type | Default | Required |
233 | |------|-------------|------|---------|:--------:|
234 | | [additional\_tag\_map](#input\_additional\_tag\_map) | Additional key-value pairs to add to each map in `tags_as_list_of_maps`. Not added to `tags` or `id`.
This is for some rare cases where resources want additional configuration of tags
and therefore take a list of maps with tag key, value, and additional configuration. | `map(string)` | `{}` | no |
235 | | [api\_key\_ssm\_arn](#input\_api\_key\_ssm\_arn) | ARN of the SSM parameter for the Datadog API key.
Passing this removes the need to fetch the key from the SSM parameter store.
This could be the case if the SSM Key is in a different region than the lambda. | `string` | `null` | no |
236 | | [attributes](#input\_attributes) | ID element. Additional attributes (e.g. `workers` or `cluster`) to add to `id`,
in the order they appear in the list. New attributes are appended to the
end of the list. The elements of the list are joined by the `delimiter`
and treated as a single ID element. | `list(string)` | `[]` | no |
237 | | [cloudwatch\_forwarder\_event\_patterns](#input\_cloudwatch\_forwarder\_event\_patterns) | Map of title => CloudWatch Event patterns to forward to Datadog. Event structure from here:
Example:hcl
cloudwatch_forwarder_event_rules = {
"guardduty" = {
source = ["aws.guardduty"]
detail-type = ["GuardDuty Finding"]
}
"ec2-terminated" = {
source = ["aws.ec2"]
detail-type = ["EC2 Instance State-change Notification"]
detail = {
state = ["terminated"]
}
}
}
| map(object({
version = optional(list(string))
id = optional(list(string))
detail-type = optional(list(string))
source = optional(list(string))
account = optional(list(string))
time = optional(list(string))
region = optional(list(string))
resources = optional(list(string))
detail = optional(map(list(string)))
}))
| `{}` | no |
238 | | [cloudwatch\_forwarder\_log\_groups](#input\_cloudwatch\_forwarder\_log\_groups) | Map of CloudWatch Log Groups with a filter pattern that the Lambda forwarder will send logs from. For example: { mysql1 = { name = "/aws/rds/maincluster", filter\_pattern = "" } | map(object({
name = string
filter_pattern = string
}))
| `{}` | no |
239 | | [context](#input\_context) | Single object for setting entire context at once.
See description of individual variables for details.
Leave string and numeric variables as `null` to use default value.
Individual variable settings (non-null) override settings in context object,
except for attributes, tags, and additional\_tag\_map, which are merged. | `any` | {
"additional_tag_map": {},
"attributes": [],
"delimiter": null,
"descriptor_formats": {},
"enabled": true,
"environment": null,
"id_length_limit": null,
"label_key_case": null,
"label_order": [],
"label_value_case": null,
"labels_as_tags": [
"unset"
],
"name": null,
"namespace": null,
"regex_replace_chars": null,
"stage": null,
"tags": {},
"tenant": null
}
| no |
240 | | [datadog\_forwarder\_lambda\_environment\_variables](#input\_datadog\_forwarder\_lambda\_environment\_variables) | Map of environment variables to pass to the Lambda Function | `map(string)` | `{}` | no |
241 | | [dd\_api\_key\_kms\_ciphertext\_blob](#input\_dd\_api\_key\_kms\_ciphertext\_blob) | CiphertextBlob stored in environment variable DD\_KMS\_API\_KEY used by the lambda function, along with the KMS key, to decrypt Datadog API key | `string` | `""` | no |
242 | | [dd\_api\_key\_source](#input\_dd\_api\_key\_source) | One of: ARN for AWS Secrets Manager (asm) to retrieve the Datadog (DD) api key, ARN for the KMS (kms) key used to decrypt the ciphertext\_blob of the api key, or the name of the SSM (ssm) parameter used to retrieve the Datadog API key | object({
resource = string
identifier = string
})
| {
"identifier": "",
"resource": ""
}
| no |
243 | | [dd\_artifact\_filename](#input\_dd\_artifact\_filename) | The Datadog artifact filename minus extension | `string` | `"aws-dd-forwarder"` | no |
244 | | [dd\_forwarder\_version](#input\_dd\_forwarder\_version) | Version tag of Datadog lambdas to use. https://github.com/DataDog/datadog-serverless-functions/releases | `string` | `"3.116.0"` | no |
245 | | [dd\_module\_name](#input\_dd\_module\_name) | The Datadog GitHub repository name | `string` | `"datadog-serverless-functions"` | no |
246 | | [dd\_tags](#input\_dd\_tags) | A list of Datadog tags to apply to all logs forwarded to Datadog | `list(string)` | `[]` | no |
247 | | [dd\_tags\_map](#input\_dd\_tags\_map) | A map of Datadog tags to apply to all logs forwarded to Datadog. This will override dd\_tags. | `map(string)` | `{}` | no |
248 | | [delimiter](#input\_delimiter) | Delimiter to be used between ID elements.
Defaults to `-` (hyphen). Set to `""` to use no delimiter at all. | `string` | `null` | no |
249 | | [descriptor\_formats](#input\_descriptor\_formats) | Describe additional descriptors to be output in the `descriptors` output map.
Map of maps. Keys are names of descriptors. Values are maps of the form
`{
format = string
labels = list(string)
}`
(Type is `any` so the map values can later be enhanced to provide additional options.)
`format` is a Terraform format string to be passed to the `format()` function.
`labels` is a list of labels, in order, to pass to `format()` function.
Label values will be normalized before being passed to `format()` so they will be
identical to how they appear in `id`.
Default is `{}` (`descriptors` output will be empty). | `any` | `{}` | no |
250 | | [enabled](#input\_enabled) | Set to false to prevent the module from creating any resources | `bool` | `null` | no |
251 | | [environment](#input\_environment) | ID element. Usually used for region e.g. 'uw2', 'us-west-2', OR role 'prod', 'staging', 'dev', 'UAT' | `string` | `null` | no |
252 | | [forwarder\_iam\_path](#input\_forwarder\_iam\_path) | Path to the IAM roles and policies created | `string` | `"/"` | no |
253 | | [forwarder\_lambda\_datadog\_host](#input\_forwarder\_lambda\_datadog\_host) | Datadog Site to send data to. Possible values are `datadoghq.com`, `datadoghq.eu`, `us3.datadoghq.com`, `us5.datadoghq.com` and `ddog-gov.com` | `string` | `"datadoghq.com"` | no |
254 | | [forwarder\_lambda\_debug\_enabled](#input\_forwarder\_lambda\_debug\_enabled) | Whether to enable or disable debug for the Lambda forwarder | `bool` | `false` | no |
255 | | [forwarder\_log\_artifact\_url](#input\_forwarder\_log\_artifact\_url) | The URL for the code of the Datadog forwarder for Logs. It can be a local file, URL or git repo | `string` | `null` | no |
256 | | [forwarder\_log\_enabled](#input\_forwarder\_log\_enabled) | Flag to enable or disable Datadog log forwarder | `bool` | `false` | no |
257 | | [forwarder\_log\_layers](#input\_forwarder\_log\_layers) | List of Lambda Layer Version ARNs (maximum of 5) to attach to Datadog log forwarder lambda function | `list(string)` | `[]` | no |
258 | | [forwarder\_log\_retention\_days](#input\_forwarder\_log\_retention\_days) | Number of days to retain Datadog forwarder lambda execution logs. One of [0 1 3 5 7 14 30 60 90 120 150 180 365 400 545 731 1827 3653] | `number` | `14` | no |
259 | | [forwarder\_rds\_artifact\_url](#input\_forwarder\_rds\_artifact\_url) | The URL for the code of the Datadog forwarder for RDS. It can be a local file, url or git repo | `string` | `null` | no |
260 | | [forwarder\_rds\_enabled](#input\_forwarder\_rds\_enabled) | Flag to enable or disable Datadog RDS enhanced monitoring forwarder | `bool` | `false` | no |
261 | | [forwarder\_rds\_filter\_pattern](#input\_forwarder\_rds\_filter\_pattern) | Filter pattern for Lambda forwarder RDS | `string` | `""` | no |
262 | | [forwarder\_rds\_layers](#input\_forwarder\_rds\_layers) | List of Lambda Layer Version ARNs (maximum of 5) to attach to Datadog RDS enhanced monitoring lambda function | `list(string)` | `[]` | no |
263 | | [forwarder\_use\_cache\_bucket](#input\_forwarder\_use\_cache\_bucket) | Flag to enable or disable the cache bucket for lambda tags and failed events. See https://docs.datadoghq.com/logs/guide/forwarder/?tab=cloudformation#upgrade-an-older-version-to-31060. Recommended for forwarder versions 3.106 and higher. | `bool` | `true` | no |
264 | | [forwarder\_vpc\_logs\_artifact\_url](#input\_forwarder\_vpc\_logs\_artifact\_url) | The URL for the code of the Datadog forwarder for VPC Logs. It can be a local file, url or git repo | `string` | `null` | no |
265 | | [forwarder\_vpc\_logs\_enabled](#input\_forwarder\_vpc\_logs\_enabled) | Flag to enable or disable Datadog VPC flow log forwarder | `bool` | `false` | no |
266 | | [forwarder\_vpc\_logs\_layers](#input\_forwarder\_vpc\_logs\_layers) | List of Lambda Layer Version ARNs (maximum of 5) to attach to Datadog VPC flow log forwarder lambda function | `list(string)` | `[]` | no |
267 | | [forwarder\_vpclogs\_filter\_pattern](#input\_forwarder\_vpclogs\_filter\_pattern) | Filter pattern for Lambda forwarder VPC Logs | `string` | `""` | no |
268 | | [id\_length\_limit](#input\_id\_length\_limit) | Limit `id` to this many characters (minimum 6).
Set to `0` for unlimited length.
Set to `null` for keep the existing setting, which defaults to `0`.
Does not affect `id_full`. | `number` | `null` | no |
269 | | [kms\_key\_id](#input\_kms\_key\_id) | Optional KMS key ID to encrypt Datadog Lambda function logs | `string` | `null` | no |
270 | | [label\_key\_case](#input\_label\_key\_case) | Controls the letter case of the `tags` keys (label names) for tags generated by this module.
Does not affect keys of tags passed in via the `tags` input.
Possible values: `lower`, `title`, `upper`.
Default value: `title`. | `string` | `null` | no |
271 | | [label\_order](#input\_label\_order) | The order in which the labels (ID elements) appear in the `id`.
Defaults to ["namespace", "environment", "stage", "name", "attributes"].
You can omit any of the 6 labels ("tenant" is the 6th), but at least one must be present. | `list(string)` | `null` | no |
272 | | [label\_value\_case](#input\_label\_value\_case) | Controls the letter case of ID elements (labels) as included in `id`,
set as tag values, and output by this module individually.
Does not affect values of tags passed in via the `tags` input.
Possible values: `lower`, `title`, `upper` and `none` (no transformation).
Set this to `title` and set `delimiter` to `""` to yield Pascal Case IDs.
Default value: `lower`. | `string` | `null` | no |
273 | | [labels\_as\_tags](#input\_labels\_as\_tags) | Set of labels (ID elements) to include as tags in the `tags` output.
Default is to include all labels.
Tags with empty values will not be included in the `tags` output.
Set to `[]` to suppress all generated tags.
**Notes:**
The value of the `name` tag, if included, will be the `id`, not the `name`.
Unlike other `null-label` inputs, the initial setting of `labels_as_tags` cannot be
changed in later chained modules. Attempts to change it will be silently ignored. | `set(string)` | [
"default"
]
| no |
274 | | [lambda\_architectures](#input\_lambda\_architectures) | Instruction set architecture for your Lambda function. Valid values are ["x86\_64"] and ["arm64"]. | `list(string)` | `null` | no |
275 | | [lambda\_custom\_policy\_name](#input\_lambda\_custom\_policy\_name) | Additional IAM policy document that can optionally be passed and merged with the created policy document | `string` | `"DatadogForwarderCustomPolicy"` | no |
276 | | [lambda\_memory\_size](#input\_lambda\_memory\_size) | Amount of memory in MB your Lambda Function can use at runtime | `number` | `128` | no |
277 | | [lambda\_policy\_source\_json](#input\_lambda\_policy\_source\_json) | Additional IAM policy document that can optionally be passed and merged with the created policy document | `string` | `""` | no |
278 | | [lambda\_reserved\_concurrent\_executions](#input\_lambda\_reserved\_concurrent\_executions) | Amount of reserved concurrent executions for the lambda function. A value of 0 disables Lambda from being triggered and -1 removes any concurrency limitations. Defaults to Unreserved Concurrency Limits -1 | `number` | `-1` | no |
279 | | [lambda\_runtime](#input\_lambda\_runtime) | Runtime environment for Datadog Lambda | `string` | `"python3.11"` | no |
280 | | [lambda\_timeout](#input\_lambda\_timeout) | Amount of time your Datadog Lambda Function has to run in seconds | `number` | `120` | no |
281 | | [log\_permissions\_boundary](#input\_log\_permissions\_boundary) | ARN of the policy that is used to set the permissions boundary for the lambda-log role managed by this module. | `string` | `null` | no |
282 | | [name](#input\_name) | ID element. Usually the component or solution name, e.g. 'app' or 'jenkins'.
This is the only ID element not also included as a `tag`.
The "name" tag is set to the full `id` string. There is no tag with the value of the `name` input. | `string` | `null` | no |
283 | | [namespace](#input\_namespace) | ID element. Usually an abbreviation of your organization name, e.g. 'eg' or 'cp', to help ensure generated IDs are globally unique | `string` | `null` | no |
284 | | [rds\_permissions\_boundary](#input\_rds\_permissions\_boundary) | ARN of the policy that is used to set the permissions boundary for the lambda-rds role managed by this module. | `string` | `null` | no |
285 | | [regex\_replace\_chars](#input\_regex\_replace\_chars) | Terraform regular expression (regex) string.
Characters matching the regex will be removed from the ID elements.
If not set, `"/[^a-zA-Z0-9-]/"` is used to remove all characters other than hyphens, letters and digits. | `string` | `null` | no |
286 | | [s3\_bucket\_kms\_arns](#input\_s3\_bucket\_kms\_arns) | List of KMS key ARNs for s3 bucket encryption | `list(string)` | `[]` | no |
287 | | [s3\_buckets](#input\_s3\_buckets) | The names of S3 buckets to forward logs to Datadog | `list(string)` | `[]` | no |
288 | | [s3\_buckets\_with\_prefixes](#input\_s3\_buckets\_with\_prefixes) | The names S3 buckets and prefix to forward logs to Datadog | `map(object({ bucket_name : string, bucket_prefix : string }))` | `{}` | no |
289 | | [security\_group\_ids](#input\_security\_group\_ids) | List of security group IDs to use when the Lambda Function runs in a VPC | `list(string)` | `null` | no |
290 | | [stage](#input\_stage) | ID element. Usually used to indicate role, e.g. 'prod', 'staging', 'source', 'build', 'test', 'deploy', 'release' | `string` | `null` | no |
291 | | [subnet\_ids](#input\_subnet\_ids) | List of subnet IDs to use when deploying the Lambda Function in a VPC | `list(string)` | `null` | no |
292 | | [tags](#input\_tags) | Additional tags (e.g. `{'BusinessUnit': 'XYZ'}`).
Neither the tag keys nor the tag values will be modified by this module. | `map(string)` | `{}` | no |
293 | | [tenant](#input\_tenant) | ID element \_(Rarely used, not included by default)\_. A customer identifier, indicating who this instance of a resource is for | `string` | `null` | no |
294 | | [tracing\_config\_mode](#input\_tracing\_config\_mode) | Can be either PassThrough or Active. If PassThrough, Lambda will only trace the request from an upstream service if it contains a tracing header with 'sampled=1'. If Active, Lambda will respect any tracing header it receives from an upstream service | `string` | `"PassThrough"` | no |
295 | | [vpc\_logs\_permissions\_boundary](#input\_vpc\_logs\_permissions\_boundary) | ARN of the policy that is used to set the permissions boundary for the lambda-vpc-logs role managed by this module. | `string` | `null` | no |
296 | | [vpclogs\_cloudwatch\_log\_group](#input\_vpclogs\_cloudwatch\_log\_group) | The name of the CloudWatch Log Group for VPC flow logs | `string` | `null` | no |
297 |
298 | ## Outputs
299 |
300 | | Name | Description |
301 | |------|-------------|
302 | | [lambda\_forwarder\_log\_function\_arn](#output\_lambda\_forwarder\_log\_function\_arn) | Datadog Lambda forwarder CloudWatch/S3 function ARN |
303 | | [lambda\_forwarder\_log\_function\_name](#output\_lambda\_forwarder\_log\_function\_name) | Datadog Lambda forwarder CloudWatch/S3 function name |
304 | | [lambda\_forwarder\_rds\_enhanced\_monitoring\_function\_name](#output\_lambda\_forwarder\_rds\_enhanced\_monitoring\_function\_name) | Datadog Lambda forwarder RDS Enhanced Monitoring function name |
305 | | [lambda\_forwarder\_rds\_function\_arn](#output\_lambda\_forwarder\_rds\_function\_arn) | Datadog Lambda forwarder RDS Enhanced Monitoring function ARN |
306 | | [lambda\_forwarder\_vpc\_log\_function\_arn](#output\_lambda\_forwarder\_vpc\_log\_function\_arn) | Datadog Lambda forwarder VPC Flow Logs function ARN |
307 | | [lambda\_forwarder\_vpc\_log\_function\_name](#output\_lambda\_forwarder\_vpc\_log\_function\_name) | Datadog Lambda forwarder VPC Flow Logs function name |
308 |
309 |
310 |
311 |
312 |
313 |
314 |
315 |
316 | ## Related Projects
317 |
318 | Check out these related projects.
319 |
320 | - [terraform-null-label](https://github.com/cloudposse/terraform-null-label) - Terraform module designed to generate consistent names and tags for resources. Use terraform-null-label to implement a strict naming convention.
321 |
322 |
323 | ## References
324 |
325 | For additional context, refer to some of these links.
326 |
327 | - [Terraform Standard Module Structure](https://www.terraform.io/docs/modules/index.html#standard-module-structure) - HashiCorp's standard module structure is a file and directory layout we recommend for reusable modules distributed in separate repositories.
328 | - [Terraform Module Requirements](https://www.terraform.io/docs/registry/modules/publish.html#requirements) - HashiCorp's guidance on all the requirements for publishing a module. Meeting the requirements for publishing a module is extremely easy.
329 | - [Terraform `random_integer` Resource](https://registry.terraform.io/providers/hashicorp/random/latest/docs/resources/integer) - The resource random_integer generates random values from a given range, described by the min and max attributes of a given resource.
330 | - [Terraform Version Pinning](https://www.terraform.io/docs/configuration/terraform.html#specifying-a-required-terraform-version) - The required_version setting can be used to constrain which versions of the Terraform CLI can be used with your configuration
331 |
332 |
333 |
334 | > [!TIP]
335 | > #### Use Terraform Reference Architectures for AWS
336 | >
337 | > Use Cloud Posse's ready-to-go [terraform architecture blueprints](https://cloudposse.com/reference-architecture/) for AWS to get up and running quickly.
338 | >
339 | > ✅ We build it together with your team.
340 | > ✅ Your team owns everything.
341 | > ✅ 100% Open Source and backed by fanatical support.
342 | >
343 | >
344 | > 📚 Learn More
345 | >
346 | >
347 | >
348 | > Cloud Posse is the leading [**DevOps Accelerator**](https://cpco.io/commercial-support?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-datadog-lambda-forwarder&utm_content=commercial_support) for funded startups and enterprises.
349 | >
350 | > *Your team can operate like a pro today.*
351 | >
352 | > Ensure that your team succeeds by using Cloud Posse's proven process and turnkey blueprints. Plus, we stick around until you succeed.
353 | > #### Day-0: Your Foundation for Success
354 | > - **Reference Architecture.** You'll get everything you need from the ground up built using 100% infrastructure as code.
355 | > - **Deployment Strategy.** Adopt a proven deployment strategy with GitHub Actions, enabling automated, repeatable, and reliable software releases.
356 | > - **Site Reliability Engineering.** Gain total visibility into your applications and services with Datadog, ensuring high availability and performance.
357 | > - **Security Baseline.** Establish a secure environment from the start, with built-in governance, accountability, and comprehensive audit logs, safeguarding your operations.
358 | > - **GitOps.** Empower your team to manage infrastructure changes confidently and efficiently through Pull Requests, leveraging the full power of GitHub Actions.
359 | >
360 | >
361 | >
362 | > #### Day-2: Your Operational Mastery
363 | > - **Training.** Equip your team with the knowledge and skills to confidently manage the infrastructure, ensuring long-term success and self-sufficiency.
364 | > - **Support.** Benefit from a seamless communication over Slack with our experts, ensuring you have the support you need, whenever you need it.
365 | > - **Troubleshooting.** Access expert assistance to quickly resolve any operational challenges, minimizing downtime and maintaining business continuity.
366 | > - **Code Reviews.** Enhance your team’s code quality with our expert feedback, fostering continuous improvement and collaboration.
367 | > - **Bug Fixes.** Rely on our team to troubleshoot and resolve any issues, ensuring your systems run smoothly.
368 | > - **Migration Assistance.** Accelerate your migration process with our dedicated support, minimizing disruption and speeding up time-to-value.
369 | > - **Customer Workshops.** Engage with our team in weekly workshops, gaining insights and strategies to continuously improve and innovate.
370 | >
371 | >
372 | >
373 |
374 | ## ✨ Contributing
375 |
376 | This project is under active development, and we encourage contributions from our community.
377 |
378 |
379 |
380 | Many thanks to our outstanding contributors:
381 |
382 |
383 |
384 |
385 |
386 | For 🐛 bug reports & feature requests, please use the [issue tracker](https://github.com/cloudposse/terraform-aws-datadog-lambda-forwarder/issues).
387 |
388 | In general, PRs are welcome. We follow the typical "fork-and-pull" Git workflow.
389 | 1. Review our [Code of Conduct](https://github.com/cloudposse/terraform-aws-datadog-lambda-forwarder/?tab=coc-ov-file#code-of-conduct) and [Contributor Guidelines](https://github.com/cloudposse/.github/blob/main/CONTRIBUTING.md).
390 | 2. **Fork** the repo on GitHub
391 | 3. **Clone** the project to your own machine
392 | 4. **Commit** changes to your own branch
393 | 5. **Push** your work back up to your fork
394 | 6. Submit a **Pull Request** so that we can review your changes
395 |
396 | **NOTE:** Be sure to merge the latest changes from "upstream" before making a pull request!## Running Terraform Tests
397 |
398 | We use [Atmos](https://atmos.tools) to streamline how Terraform tests are run. It centralizes configuration and wraps common test workflows with easy-to-use commands.
399 |
400 | All tests are located in the [`test/`](test) folder.
401 |
402 | Under the hood, tests are powered by Terratest together with our internal [Test Helpers](https://github.com/cloudposse/test-helpers) library, providing robust infrastructure validation.
403 |
404 | Setup dependencies:
405 | - Install Atmos ([installation guide](https://atmos.tools/install/))
406 | - Install Go [1.24+ or newer](https://go.dev/doc/install)
407 | - Install Terraform or OpenTofu
408 |
409 | To run tests:
410 |
411 | - Run all tests:
412 | ```sh
413 | atmos test run
414 | ```
415 | - Clean up test artifacts:
416 | ```sh
417 | atmos test clean
418 | ```
419 | - Explore additional test options:
420 | ```sh
421 | atmos test --help
422 | ```
423 | The configuration for test commands is centrally managed. To review what's being imported, see the [`atmos.yaml`](https://raw.githubusercontent.com/cloudposse/.github/refs/heads/main/.github/atmos/terraform-module.yaml) file.
424 |
425 | Learn more about our [automated testing in our documentation](https://docs.cloudposse.com/community/contribute/automated-testing/) or implementing [custom commands](https://atmos.tools/core-concepts/custom-commands/) with atmos.
426 |
427 | ### 🌎 Slack Community
428 |
429 | Join our [Open Source Community](https://cpco.io/slack?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-datadog-lambda-forwarder&utm_content=slack) on Slack. It's **FREE** for everyone! Our "SweetOps" community is where you get to talk with others who share a similar vision for how to rollout and manage infrastructure. This is the best place to talk shop, ask questions, solicit feedback, and work together as a community to build totally *sweet* infrastructure.
430 |
431 | ### 📰 Newsletter
432 |
433 | Sign up for [our newsletter](https://cpco.io/newsletter?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-datadog-lambda-forwarder&utm_content=newsletter) and join 3,000+ DevOps engineers, CTOs, and founders who get insider access to the latest DevOps trends, so you can always stay in the know.
434 | Dropped straight into your Inbox every week — and usually a 5-minute read.
435 |
436 | ### 📆 Office Hours
437 |
438 | [Join us every Wednesday via Zoom](https://cloudposse.com/office-hours?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-datadog-lambda-forwarder&utm_content=office_hours) for your weekly dose of insider DevOps trends, AWS news and Terraform insights, all sourced from our SweetOps community, plus a _live Q&A_ that you can’t find anywhere else.
439 | It's **FREE** for everyone!
440 | ## License
441 |
442 |
443 |
444 |
445 | Preamble to the Apache License, Version 2.0
446 |
447 |
448 |
449 | Complete license is available in the [`LICENSE`](LICENSE) file.
450 |
451 | ```text
452 | Licensed to the Apache Software Foundation (ASF) under one
453 | or more contributor license agreements. See the NOTICE file
454 | distributed with this work for additional information
455 | regarding copyright ownership. The ASF licenses this file
456 | to you under the Apache License, Version 2.0 (the
457 | "License"); you may not use this file except in compliance
458 | with the License. You may obtain a copy of the License at
459 |
460 | https://www.apache.org/licenses/LICENSE-2.0
461 |
462 | Unless required by applicable law or agreed to in writing,
463 | software distributed under the License is distributed on an
464 | "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
465 | KIND, either express or implied. See the License for the
466 | specific language governing permissions and limitations
467 | under the License.
468 | ```
469 |
470 |
471 | ## Trademarks
472 |
473 | All other trademarks referenced herein are the property of their respective owners.
474 |
475 |
476 | ## Copyrights
477 |
478 | Copyright © 2021-2025 [Cloud Posse, LLC](https://cloudposse.com)
479 |
480 |
481 |
482 |
483 |
484 |
485 |
--------------------------------------------------------------------------------
/README.yaml:
--------------------------------------------------------------------------------
1 | #
2 | # This is the canonical configuration for the `README.md`
3 | # Run `make readme` to rebuild the `README.md`
4 | #
5 |
6 | # Name of this project
7 | name: terraform-aws-datadog-lambda-forwarder
8 |
9 | # Logo for this project
10 | #logo: docs/logo.png
11 |
12 | # License of this project
13 | license: "APACHE2"
14 |
15 | # Copyrights
16 | copyrights:
17 | - name: "Cloud Posse, LLC"
18 | url: "https://cloudposse.com"
19 | year: "2021"
20 |
21 | # Canonical GitHub repo
22 | github_repo: cloudposse/terraform-aws-datadog-lambda-forwarder
23 |
24 | # Badges to display
25 | badges:
26 | - name: Latest Release
27 | image: https://img.shields.io/github/release/cloudposse/terraform-aws-datadog-lambda-forwarder.svg?style=for-the-badge
28 | url: https://github.com/cloudposse/terraform-aws-datadog-lambda-forwarder/releases/latest
29 | - name: Last Updated
30 | image: https://img.shields.io/github/last-commit/cloudposse/terraform-aws-datadog-lambda-forwarder.svg?style=for-the-badge
31 | url: https://github.com/cloudposse/terraform-aws-datadog-lambda-forwarder/commits
32 | - name: Slack Community
33 | image: https://slack.cloudposse.com/for-the-badge.svg
34 | url: https://cloudposse.com/slack
35 |
36 | # List any related terraform modules that this module may be used with or that this module depends on.
37 | related:
38 | - name: "terraform-null-label"
39 | description: "Terraform module designed to generate consistent names and tags for resources. Use terraform-null-label to implement a strict naming convention."
40 | url: "https://github.com/cloudposse/terraform-null-label"
41 |
42 | # List any resources helpful for someone to get started. For example, link to the hashicorp documentation or AWS documentation.
43 | references:
44 | - name: "Terraform Standard Module Structure"
45 | description: "HashiCorp's standard module structure is a file and directory layout we recommend for reusable modules distributed in separate repositories."
46 | url: "https://www.terraform.io/docs/modules/index.html#standard-module-structure"
47 | - name: "Terraform Module Requirements"
48 | description: "HashiCorp's guidance on all the requirements for publishing a module. Meeting the requirements for publishing a module is extremely easy."
49 | url: "https://www.terraform.io/docs/registry/modules/publish.html#requirements"
50 | - name: "Terraform `random_integer` Resource"
51 | description: "The resource random_integer generates random values from a given range, described by the min and max attributes of a given resource."
52 | url: "https://registry.terraform.io/providers/hashicorp/random/latest/docs/resources/integer"
53 | - name: "Terraform Version Pinning"
54 | description: "The required_version setting can be used to constrain which versions of the Terraform CLI can be used with your configuration"
55 | url: "https://www.terraform.io/docs/configuration/terraform.html#specifying-a-required-terraform-version"
56 |
57 | # Short description of this project
58 | description: |-
59 | Terraform module to provision all the necessary infrastructure to deploy [Datadog Lambda forwarders](https://github.com/DataDog/datadog-serverless-functions/tree/master/aws/logs_monitoring)
60 |
61 | # Introduction to the project
62 | #introduction: |-
63 | # This is an introduction.
64 |
65 | # How to use this module. Should be an easy example to copy and paste.
66 | usage: |-
67 | For a complete example, see [examples/complete](examples/complete).
68 |
69 | For automated tests of the complete example using [bats](https://github.com/bats-core/bats-core) and [Terratest](https://github.com/gruntwork-io/terratest)
70 | (which tests and deploys the example on AWS), see [test](test).
71 |
72 | To enable Datadog forwarder for RDS Enhanced monitoring:
73 | ```hcl
74 | module "datadog_lambda_forwarder" {
75 | source = "cloudposse/datadog-lambda-forwarder/aws"
76 | # Cloud Posse recommends pinning every module to a specific version
77 | # version = "x.x.x"
78 |
79 | forwarder_rds_enabled = true
80 | }
81 | ```
82 |
83 | To enable Datadog forwarder for a CloudTrail S3 bucket:
84 | ```hcl
85 | module "datadog_lambda_forwarder" {
86 | source = "cloudposse/datadog-lambda-forwarder/aws"
87 | # Cloud Posse recommends pinning every module to a specific version
88 | # version = "x.x.x"
89 |
90 | forwarder_log_enabled = true
91 | s3_buckets = ["cloudtrail-audit-bucket"]
92 | s3_bucket_kms_arns = ["arn:aws:kms:us-west-2:1234567890:key/b204f3d2-1111-2222-94333332-4444ccc222"]
93 | }
94 | ```
95 |
96 | To enable Datadog forwarder for a S3 bucket with prefix:
97 | ```hcl
98 | module "datadog_lambda_forwarder" {
99 | source = "cloudposse/datadog-lambda-forwarder/aws"
100 | # Cloud Posse recommends pinning every module to a specific version
101 | # version = "x.x.x"
102 |
103 | forwarder_log_enabled = true
104 | s3_buckets_with_prefixes = {
105 | MyBucketWithPrefix = {bucket_name = "my-bucket-with-prefix", bucket_prefix = "events/"}
106 | AnotherWithPrefix = {bucket_name = "another-with-prefix", bucket_prefix = "records/"}
107 | }
108 | s3_bucket_kms_arns = ["arn:aws:kms:us-west-2:1234567890:key/b204f3d2-1111-2222-94333332-4444ccc222"]
109 | }
110 | ```
111 |
112 | To enable Datadog forwarder for RDS authentication CloudWatch logs:
113 | ```hcl
114 | module "datadog_lambda_forwarder" {
115 | source = "cloudposse/datadog-lambda-forwarder/aws"
116 | # Cloud Posse recommends pinning every module to a specific version
117 | # version = "x.x.x"
118 |
119 | forwarder_log_enabled = true
120 | cloudwatch_forwarder_log_groups = {
121 | postgres = {
122 | name = "/aws/rds/cluster/pg-main/postgresql"
123 | filter_pattern = ""
124 | }
125 | }
126 | }
127 | ```
128 |
129 | To enable Datadog forwarder for VPC Flow Logs CloudWatch logs:
130 | ```hcl
131 | module "datadog_lambda_forwarder" {
132 | source = "cloudposse/datadog-lambda-forwarder/aws"
133 | # Cloud Posse recommends pinning every module to a specific version
134 | # version = "x.x.x"
135 |
136 | forwarder_vpc_logs_enabled = true
137 | vpclogs_cloudwatch_log_group = "/aws/vpc/flowlogs/vpc1"
138 | }
139 | ```
140 |
141 | To use a local copy of the lambda code you can specify the artifact url:
142 | ```hcl
143 | module "datadog_lambda_forwarder" {
144 | source = "cloudposse/datadog-lambda-forwarder/aws"
145 | # Cloud Posse recommends pinning every module to a specific version
146 | # version = "x.x.x"
147 |
148 | forwarder_rds_enabled = true
149 | forwarder_rds_artifact_url = file("${path.module}/function.zip")
150 | }
151 | ```
152 |
153 | # Example usage
154 | examples: |-
155 | Here is an example of using this module:
156 | - [`examples/complete`](examples/complete) - complete example of using this module
157 |
158 | # How to get started quickly
159 | #quickstart: |-
160 | # Here's how to get started...
161 |
162 | # Other files to include in this README from the project folder
163 | include: []
164 | contributors: []
165 |
--------------------------------------------------------------------------------
/atmos.yaml:
--------------------------------------------------------------------------------
1 | # Atmos Configuration — powered by https://atmos.tools
2 | #
3 | # This configuration enables centralized, DRY, and consistent project scaffolding using Atmos.
4 | #
5 | # Included features:
6 | # - Organizational custom commands: https://atmos.tools/core-concepts/custom-commands
7 | # - Automated README generation: https://atmos.tools/cli/commands/docs/generate
8 | #
9 |
10 | # Import shared configuration used by all modules
11 | import:
12 | - https://raw.githubusercontent.com/cloudposse/.github/refs/heads/main/.github/atmos/terraform-module.yaml
13 |
--------------------------------------------------------------------------------
/context.tf:
--------------------------------------------------------------------------------
1 | #
2 | # ONLY EDIT THIS FILE IN github.com/cloudposse/terraform-null-label
3 | # All other instances of this file should be a copy of that one
4 | #
5 | #
6 | # Copy this file from https://github.com/cloudposse/terraform-null-label/blob/master/exports/context.tf
7 | # and then place it in your Terraform module to automatically get
8 | # Cloud Posse's standard configuration inputs suitable for passing
9 | # to Cloud Posse modules.
10 | #
11 | # curl -sL https://raw.githubusercontent.com/cloudposse/terraform-null-label/master/exports/context.tf -o context.tf
12 | #
13 | # Modules should access the whole context as `module.this.context`
14 | # to get the input variables with nulls for defaults,
15 | # for example `context = module.this.context`,
16 | # and access individual variables as `module.this.`,
17 | # with final values filled in.
18 | #
19 | # For example, when using defaults, `module.this.context.delimiter`
20 | # will be null, and `module.this.delimiter` will be `-` (hyphen).
21 | #
22 |
23 | module "this" {
24 | source = "cloudposse/label/null"
25 | version = "0.25.0" # requires Terraform >= 0.13.0
26 |
27 | enabled = var.enabled
28 | namespace = var.namespace
29 | tenant = var.tenant
30 | environment = var.environment
31 | stage = var.stage
32 | name = var.name
33 | delimiter = var.delimiter
34 | attributes = var.attributes
35 | tags = var.tags
36 | additional_tag_map = var.additional_tag_map
37 | label_order = var.label_order
38 | regex_replace_chars = var.regex_replace_chars
39 | id_length_limit = var.id_length_limit
40 | label_key_case = var.label_key_case
41 | label_value_case = var.label_value_case
42 | descriptor_formats = var.descriptor_formats
43 | labels_as_tags = var.labels_as_tags
44 |
45 | context = var.context
46 | }
47 |
48 | # Copy contents of cloudposse/terraform-null-label/variables.tf here
49 |
50 | variable "context" {
51 | type = any
52 | default = {
53 | enabled = true
54 | namespace = null
55 | tenant = null
56 | environment = null
57 | stage = null
58 | name = null
59 | delimiter = null
60 | attributes = []
61 | tags = {}
62 | additional_tag_map = {}
63 | regex_replace_chars = null
64 | label_order = []
65 | id_length_limit = null
66 | label_key_case = null
67 | label_value_case = null
68 | descriptor_formats = {}
69 | # Note: we have to use [] instead of null for unset lists due to
70 | # https://github.com/hashicorp/terraform/issues/28137
71 | # which was not fixed until Terraform 1.0.0,
72 | # but we want the default to be all the labels in `label_order`
73 | # and we want users to be able to prevent all tag generation
74 | # by setting `labels_as_tags` to `[]`, so we need
75 | # a different sentinel to indicate "default"
76 | labels_as_tags = ["unset"]
77 | }
78 | description = <<-EOT
79 | Single object for setting entire context at once.
80 | See description of individual variables for details.
81 | Leave string and numeric variables as `null` to use default value.
82 | Individual variable settings (non-null) override settings in context object,
83 | except for attributes, tags, and additional_tag_map, which are merged.
84 | EOT
85 |
86 | validation {
87 | condition = lookup(var.context, "label_key_case", null) == null ? true : contains(["lower", "title", "upper"], var.context["label_key_case"])
88 | error_message = "Allowed values: `lower`, `title`, `upper`."
89 | }
90 |
91 | validation {
92 | condition = lookup(var.context, "label_value_case", null) == null ? true : contains(["lower", "title", "upper", "none"], var.context["label_value_case"])
93 | error_message = "Allowed values: `lower`, `title`, `upper`, `none`."
94 | }
95 | }
96 |
97 | variable "enabled" {
98 | type = bool
99 | default = null
100 | description = "Set to false to prevent the module from creating any resources"
101 | }
102 |
103 | variable "namespace" {
104 | type = string
105 | default = null
106 | description = "ID element. Usually an abbreviation of your organization name, e.g. 'eg' or 'cp', to help ensure generated IDs are globally unique"
107 | }
108 |
109 | variable "tenant" {
110 | type = string
111 | default = null
112 | description = "ID element _(Rarely used, not included by default)_. A customer identifier, indicating who this instance of a resource is for"
113 | }
114 |
115 | variable "environment" {
116 | type = string
117 | default = null
118 | description = "ID element. Usually used for region e.g. 'uw2', 'us-west-2', OR role 'prod', 'staging', 'dev', 'UAT'"
119 | }
120 |
121 | variable "stage" {
122 | type = string
123 | default = null
124 | description = "ID element. Usually used to indicate role, e.g. 'prod', 'staging', 'source', 'build', 'test', 'deploy', 'release'"
125 | }
126 |
127 | variable "name" {
128 | type = string
129 | default = null
130 | description = <<-EOT
131 | ID element. Usually the component or solution name, e.g. 'app' or 'jenkins'.
132 | This is the only ID element not also included as a `tag`.
133 | The "name" tag is set to the full `id` string. There is no tag with the value of the `name` input.
134 | EOT
135 | }
136 |
137 | variable "delimiter" {
138 | type = string
139 | default = null
140 | description = <<-EOT
141 | Delimiter to be used between ID elements.
142 | Defaults to `-` (hyphen). Set to `""` to use no delimiter at all.
143 | EOT
144 | }
145 |
146 | variable "attributes" {
147 | type = list(string)
148 | default = []
149 | description = <<-EOT
150 | ID element. Additional attributes (e.g. `workers` or `cluster`) to add to `id`,
151 | in the order they appear in the list. New attributes are appended to the
152 | end of the list. The elements of the list are joined by the `delimiter`
153 | and treated as a single ID element.
154 | EOT
155 | }
156 |
157 | variable "labels_as_tags" {
158 | type = set(string)
159 | default = ["default"]
160 | description = <<-EOT
161 | Set of labels (ID elements) to include as tags in the `tags` output.
162 | Default is to include all labels.
163 | Tags with empty values will not be included in the `tags` output.
164 | Set to `[]` to suppress all generated tags.
165 | **Notes:**
166 | The value of the `name` tag, if included, will be the `id`, not the `name`.
167 | Unlike other `null-label` inputs, the initial setting of `labels_as_tags` cannot be
168 | changed in later chained modules. Attempts to change it will be silently ignored.
169 | EOT
170 | }
171 |
172 | variable "tags" {
173 | type = map(string)
174 | default = {}
175 | description = <<-EOT
176 | Additional tags (e.g. `{'BusinessUnit': 'XYZ'}`).
177 | Neither the tag keys nor the tag values will be modified by this module.
178 | EOT
179 | }
180 |
181 | variable "additional_tag_map" {
182 | type = map(string)
183 | default = {}
184 | description = <<-EOT
185 | Additional key-value pairs to add to each map in `tags_as_list_of_maps`. Not added to `tags` or `id`.
186 | This is for some rare cases where resources want additional configuration of tags
187 | and therefore take a list of maps with tag key, value, and additional configuration.
188 | EOT
189 | }
190 |
191 | variable "label_order" {
192 | type = list(string)
193 | default = null
194 | description = <<-EOT
195 | The order in which the labels (ID elements) appear in the `id`.
196 | Defaults to ["namespace", "environment", "stage", "name", "attributes"].
197 | You can omit any of the 6 labels ("tenant" is the 6th), but at least one must be present.
198 | EOT
199 | }
200 |
201 | variable "regex_replace_chars" {
202 | type = string
203 | default = null
204 | description = <<-EOT
205 | Terraform regular expression (regex) string.
206 | Characters matching the regex will be removed from the ID elements.
207 | If not set, `"/[^a-zA-Z0-9-]/"` is used to remove all characters other than hyphens, letters and digits.
208 | EOT
209 | }
210 |
211 | variable "id_length_limit" {
212 | type = number
213 | default = null
214 | description = <<-EOT
215 | Limit `id` to this many characters (minimum 6).
216 | Set to `0` for unlimited length.
217 | Set to `null` for keep the existing setting, which defaults to `0`.
218 | Does not affect `id_full`.
219 | EOT
220 | validation {
221 | condition = var.id_length_limit == null ? true : var.id_length_limit >= 6 || var.id_length_limit == 0
222 | error_message = "The id_length_limit must be >= 6 if supplied (not null), or 0 for unlimited length."
223 | }
224 | }
225 |
226 | variable "label_key_case" {
227 | type = string
228 | default = null
229 | description = <<-EOT
230 | Controls the letter case of the `tags` keys (label names) for tags generated by this module.
231 | Does not affect keys of tags passed in via the `tags` input.
232 | Possible values: `lower`, `title`, `upper`.
233 | Default value: `title`.
234 | EOT
235 |
236 | validation {
237 | condition = var.label_key_case == null ? true : contains(["lower", "title", "upper"], var.label_key_case)
238 | error_message = "Allowed values: `lower`, `title`, `upper`."
239 | }
240 | }
241 |
242 | variable "label_value_case" {
243 | type = string
244 | default = null
245 | description = <<-EOT
246 | Controls the letter case of ID elements (labels) as included in `id`,
247 | set as tag values, and output by this module individually.
248 | Does not affect values of tags passed in via the `tags` input.
249 | Possible values: `lower`, `title`, `upper` and `none` (no transformation).
250 | Set this to `title` and set `delimiter` to `""` to yield Pascal Case IDs.
251 | Default value: `lower`.
252 | EOT
253 |
254 | validation {
255 | condition = var.label_value_case == null ? true : contains(["lower", "title", "upper", "none"], var.label_value_case)
256 | error_message = "Allowed values: `lower`, `title`, `upper`, `none`."
257 | }
258 | }
259 |
260 | variable "descriptor_formats" {
261 | type = any
262 | default = {}
263 | description = <<-EOT
264 | Describe additional descriptors to be output in the `descriptors` output map.
265 | Map of maps. Keys are names of descriptors. Values are maps of the form
266 | `{
267 | format = string
268 | labels = list(string)
269 | }`
270 | (Type is `any` so the map values can later be enhanced to provide additional options.)
271 | `format` is a Terraform format string to be passed to the `format()` function.
272 | `labels` is a list of labels, in order, to pass to `format()` function.
273 | Label values will be normalized before being passed to `format()` so they will be
274 | identical to how they appear in `id`.
275 | Default is `{}` (`descriptors` output will be empty).
276 | EOT
277 | }
278 |
279 | #### End of copy of cloudposse/terraform-null-label/variables.tf
280 |
--------------------------------------------------------------------------------
/examples/complete/context.tf:
--------------------------------------------------------------------------------
1 | #
2 | # ONLY EDIT THIS FILE IN github.com/cloudposse/terraform-null-label
3 | # All other instances of this file should be a copy of that one
4 | #
5 | #
6 | # Copy this file from https://github.com/cloudposse/terraform-null-label/blob/master/exports/context.tf
7 | # and then place it in your Terraform module to automatically get
8 | # Cloud Posse's standard configuration inputs suitable for passing
9 | # to Cloud Posse modules.
10 | #
11 | # curl -sL https://raw.githubusercontent.com/cloudposse/terraform-null-label/master/exports/context.tf -o context.tf
12 | #
13 | # Modules should access the whole context as `module.this.context`
14 | # to get the input variables with nulls for defaults,
15 | # for example `context = module.this.context`,
16 | # and access individual variables as `module.this.`,
17 | # with final values filled in.
18 | #
19 | # For example, when using defaults, `module.this.context.delimiter`
20 | # will be null, and `module.this.delimiter` will be `-` (hyphen).
21 | #
22 |
23 | module "this" {
24 | source = "cloudposse/label/null"
25 | version = "0.25.0" # requires Terraform >= 0.13.0
26 |
27 | enabled = var.enabled
28 | namespace = var.namespace
29 | tenant = var.tenant
30 | environment = var.environment
31 | stage = var.stage
32 | name = var.name
33 | delimiter = var.delimiter
34 | attributes = var.attributes
35 | tags = var.tags
36 | additional_tag_map = var.additional_tag_map
37 | label_order = var.label_order
38 | regex_replace_chars = var.regex_replace_chars
39 | id_length_limit = var.id_length_limit
40 | label_key_case = var.label_key_case
41 | label_value_case = var.label_value_case
42 | descriptor_formats = var.descriptor_formats
43 | labels_as_tags = var.labels_as_tags
44 |
45 | context = var.context
46 | }
47 |
48 | # Copy contents of cloudposse/terraform-null-label/variables.tf here
49 |
50 | variable "context" {
51 | type = any
52 | default = {
53 | enabled = true
54 | namespace = null
55 | tenant = null
56 | environment = null
57 | stage = null
58 | name = null
59 | delimiter = null
60 | attributes = []
61 | tags = {}
62 | additional_tag_map = {}
63 | regex_replace_chars = null
64 | label_order = []
65 | id_length_limit = null
66 | label_key_case = null
67 | label_value_case = null
68 | descriptor_formats = {}
69 | # Note: we have to use [] instead of null for unset lists due to
70 | # https://github.com/hashicorp/terraform/issues/28137
71 | # which was not fixed until Terraform 1.0.0,
72 | # but we want the default to be all the labels in `label_order`
73 | # and we want users to be able to prevent all tag generation
74 | # by setting `labels_as_tags` to `[]`, so we need
75 | # a different sentinel to indicate "default"
76 | labels_as_tags = ["unset"]
77 | }
78 | description = <<-EOT
79 | Single object for setting entire context at once.
80 | See description of individual variables for details.
81 | Leave string and numeric variables as `null` to use default value.
82 | Individual variable settings (non-null) override settings in context object,
83 | except for attributes, tags, and additional_tag_map, which are merged.
84 | EOT
85 |
86 | validation {
87 | condition = lookup(var.context, "label_key_case", null) == null ? true : contains(["lower", "title", "upper"], var.context["label_key_case"])
88 | error_message = "Allowed values: `lower`, `title`, `upper`."
89 | }
90 |
91 | validation {
92 | condition = lookup(var.context, "label_value_case", null) == null ? true : contains(["lower", "title", "upper", "none"], var.context["label_value_case"])
93 | error_message = "Allowed values: `lower`, `title`, `upper`, `none`."
94 | }
95 | }
96 |
97 | variable "enabled" {
98 | type = bool
99 | default = null
100 | description = "Set to false to prevent the module from creating any resources"
101 | }
102 |
103 | variable "namespace" {
104 | type = string
105 | default = null
106 | description = "ID element. Usually an abbreviation of your organization name, e.g. 'eg' or 'cp', to help ensure generated IDs are globally unique"
107 | }
108 |
109 | variable "tenant" {
110 | type = string
111 | default = null
112 | description = "ID element _(Rarely used, not included by default)_. A customer identifier, indicating who this instance of a resource is for"
113 | }
114 |
115 | variable "environment" {
116 | type = string
117 | default = null
118 | description = "ID element. Usually used for region e.g. 'uw2', 'us-west-2', OR role 'prod', 'staging', 'dev', 'UAT'"
119 | }
120 |
121 | variable "stage" {
122 | type = string
123 | default = null
124 | description = "ID element. Usually used to indicate role, e.g. 'prod', 'staging', 'source', 'build', 'test', 'deploy', 'release'"
125 | }
126 |
127 | variable "name" {
128 | type = string
129 | default = null
130 | description = <<-EOT
131 | ID element. Usually the component or solution name, e.g. 'app' or 'jenkins'.
132 | This is the only ID element not also included as a `tag`.
133 | The "name" tag is set to the full `id` string. There is no tag with the value of the `name` input.
134 | EOT
135 | }
136 |
137 | variable "delimiter" {
138 | type = string
139 | default = null
140 | description = <<-EOT
141 | Delimiter to be used between ID elements.
142 | Defaults to `-` (hyphen). Set to `""` to use no delimiter at all.
143 | EOT
144 | }
145 |
146 | variable "attributes" {
147 | type = list(string)
148 | default = []
149 | description = <<-EOT
150 | ID element. Additional attributes (e.g. `workers` or `cluster`) to add to `id`,
151 | in the order they appear in the list. New attributes are appended to the
152 | end of the list. The elements of the list are joined by the `delimiter`
153 | and treated as a single ID element.
154 | EOT
155 | }
156 |
157 | variable "labels_as_tags" {
158 | type = set(string)
159 | default = ["default"]
160 | description = <<-EOT
161 | Set of labels (ID elements) to include as tags in the `tags` output.
162 | Default is to include all labels.
163 | Tags with empty values will not be included in the `tags` output.
164 | Set to `[]` to suppress all generated tags.
165 | **Notes:**
166 | The value of the `name` tag, if included, will be the `id`, not the `name`.
167 | Unlike other `null-label` inputs, the initial setting of `labels_as_tags` cannot be
168 | changed in later chained modules. Attempts to change it will be silently ignored.
169 | EOT
170 | }
171 |
172 | variable "tags" {
173 | type = map(string)
174 | default = {}
175 | description = <<-EOT
176 | Additional tags (e.g. `{'BusinessUnit': 'XYZ'}`).
177 | Neither the tag keys nor the tag values will be modified by this module.
178 | EOT
179 | }
180 |
181 | variable "additional_tag_map" {
182 | type = map(string)
183 | default = {}
184 | description = <<-EOT
185 | Additional key-value pairs to add to each map in `tags_as_list_of_maps`. Not added to `tags` or `id`.
186 | This is for some rare cases where resources want additional configuration of tags
187 | and therefore take a list of maps with tag key, value, and additional configuration.
188 | EOT
189 | }
190 |
191 | variable "label_order" {
192 | type = list(string)
193 | default = null
194 | description = <<-EOT
195 | The order in which the labels (ID elements) appear in the `id`.
196 | Defaults to ["namespace", "environment", "stage", "name", "attributes"].
197 | You can omit any of the 6 labels ("tenant" is the 6th), but at least one must be present.
198 | EOT
199 | }
200 |
201 | variable "regex_replace_chars" {
202 | type = string
203 | default = null
204 | description = <<-EOT
205 | Terraform regular expression (regex) string.
206 | Characters matching the regex will be removed from the ID elements.
207 | If not set, `"/[^a-zA-Z0-9-]/"` is used to remove all characters other than hyphens, letters and digits.
208 | EOT
209 | }
210 |
211 | variable "id_length_limit" {
212 | type = number
213 | default = null
214 | description = <<-EOT
215 | Limit `id` to this many characters (minimum 6).
216 | Set to `0` for unlimited length.
217 | Set to `null` for keep the existing setting, which defaults to `0`.
218 | Does not affect `id_full`.
219 | EOT
220 | validation {
221 | condition = var.id_length_limit == null ? true : var.id_length_limit >= 6 || var.id_length_limit == 0
222 | error_message = "The id_length_limit must be >= 6 if supplied (not null), or 0 for unlimited length."
223 | }
224 | }
225 |
226 | variable "label_key_case" {
227 | type = string
228 | default = null
229 | description = <<-EOT
230 | Controls the letter case of the `tags` keys (label names) for tags generated by this module.
231 | Does not affect keys of tags passed in via the `tags` input.
232 | Possible values: `lower`, `title`, `upper`.
233 | Default value: `title`.
234 | EOT
235 |
236 | validation {
237 | condition = var.label_key_case == null ? true : contains(["lower", "title", "upper"], var.label_key_case)
238 | error_message = "Allowed values: `lower`, `title`, `upper`."
239 | }
240 | }
241 |
242 | variable "label_value_case" {
243 | type = string
244 | default = null
245 | description = <<-EOT
246 | Controls the letter case of ID elements (labels) as included in `id`,
247 | set as tag values, and output by this module individually.
248 | Does not affect values of tags passed in via the `tags` input.
249 | Possible values: `lower`, `title`, `upper` and `none` (no transformation).
250 | Set this to `title` and set `delimiter` to `""` to yield Pascal Case IDs.
251 | Default value: `lower`.
252 | EOT
253 |
254 | validation {
255 | condition = var.label_value_case == null ? true : contains(["lower", "title", "upper", "none"], var.label_value_case)
256 | error_message = "Allowed values: `lower`, `title`, `upper`, `none`."
257 | }
258 | }
259 |
260 | variable "descriptor_formats" {
261 | type = any
262 | default = {}
263 | description = <<-EOT
264 | Describe additional descriptors to be output in the `descriptors` output map.
265 | Map of maps. Keys are names of descriptors. Values are maps of the form
266 | `{
267 | format = string
268 | labels = list(string)
269 | }`
270 | (Type is `any` so the map values can later be enhanced to provide additional options.)
271 | `format` is a Terraform format string to be passed to the `format()` function.
272 | `labels` is a list of labels, in order, to pass to `format()` function.
273 | Label values will be normalized before being passed to `format()` so they will be
274 | identical to how they appear in `id`.
275 | Default is `{}` (`descriptors` output will be empty).
276 | EOT
277 | }
278 |
279 | #### End of copy of cloudposse/terraform-null-label/variables.tf
280 |
--------------------------------------------------------------------------------
/examples/complete/fixtures.us-east-2.tfvars:
--------------------------------------------------------------------------------
1 | region = "us-east-2"
2 |
3 | namespace = "eg"
4 |
5 | environment = "ue2"
6 |
7 | stage = "test"
8 |
9 | name = "datadog-lambda-forwarder"
10 |
11 | dd_api_key_source = {
12 | resource = "ssm"
13 | identifier = "/datadog/datadog_api_key"
14 | }
15 |
16 | cloudwatch_forwarder_event_patterns = {
17 | "guardduty" = {
18 | source = ["aws.guardduty"]
19 | detail-type = ["GuardDuty Finding"]
20 | }
21 | "cloudtrail" = {
22 | source = ["aws.cloudtrail"]
23 | detail-type = ["AWS API Call via CloudTrail"]
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/examples/complete/main.tf:
--------------------------------------------------------------------------------
1 | module "cloudwatch_logs" {
2 | source = "cloudposse/cloudwatch-logs/aws"
3 | version = "0.6.6"
4 |
5 | name = "postgresql"
6 | context = module.this.context
7 | }
8 |
9 | resource "aws_ssm_parameter" "datadog_key" {
10 | count = module.this.enabled ? 1 : 0
11 |
12 | name = "/datadog/datadog_api_key"
13 | description = "Test Datadog key"
14 | type = "SecureString"
15 | value = "testkey"
16 | overwrite = true
17 | }
18 |
19 | module "datadog_lambda_log_forwarder" {
20 | source = "../.."
21 |
22 | forwarder_log_enabled = true
23 |
24 | cloudwatch_forwarder_log_groups = {
25 | postgres = {
26 | name = module.cloudwatch_logs.log_group_name
27 | filter_pattern = ""
28 | }
29 | }
30 |
31 | cloudwatch_forwarder_event_patterns = var.cloudwatch_forwarder_event_patterns
32 |
33 | # Supply tags
34 | # This results in DD_TAGS = "testkey10,testkey3:testval3,testkey4:testval4"
35 | dd_tags_map = {
36 | testkey3 = "testval3"
37 | testkey4 = "testval4"
38 | testkey10 = null
39 | }
40 |
41 | dd_api_key_source = var.dd_api_key_source
42 |
43 | context = module.this.context
44 |
45 | depends_on = [aws_ssm_parameter.datadog_key]
46 | }
47 |
--------------------------------------------------------------------------------
/examples/complete/outputs.tf:
--------------------------------------------------------------------------------
1 | output "lambda_forwarder_log_function_arn" {
2 | description = "Datadog Lambda forwarder CloudWatch/S3 function ARN"
3 | value = module.datadog_lambda_log_forwarder.lambda_forwarder_log_function_arn
4 | }
5 |
6 | output "lambda_forwarder_log_function_name" {
7 | description = "Datadog Lambda forwarder CloudWatch/S3 function name"
8 | value = module.datadog_lambda_log_forwarder.lambda_forwarder_log_function_name
9 | }
10 |
--------------------------------------------------------------------------------
/examples/complete/providers.tf:
--------------------------------------------------------------------------------
1 | provider "aws" {
2 | region = var.region
3 | }
4 |
--------------------------------------------------------------------------------
/examples/complete/variables.tf:
--------------------------------------------------------------------------------
1 | variable "region" {
2 | type = string
3 | description = "AWS region"
4 | }
5 |
6 | variable "dd_api_key_source" {
7 | description = "One of: ARN for AWS Secrets Manager (asm) to retrieve the Datadog (DD) api key, ARN for the KMS (kms) key used to decrypt the ciphertext_blob of the api key, or the name of the SSM (ssm) parameter used to retrieve the Datadog API key."
8 | type = object({
9 | resource = string
10 | identifier = string
11 | })
12 |
13 | default = {
14 | resource = "ssm"
15 | identifier = "/datadog/datadog_api_key"
16 | }
17 |
18 | # Resource can be one of kms, asm, ssm ("" to disable all lambda resources)
19 | validation {
20 | condition = can(regex("(kms|asm|ssm)", var.dd_api_key_source.resource)) || var.dd_api_key_source.resource == ""
21 | error_message = "Provide one, and only one, ARN for (kms, asm) or name (ssm) to retrieve or decrypt Datadog api key."
22 | }
23 |
24 | # Check KMS ARN format
25 | validation {
26 | condition = var.dd_api_key_source.resource == "kms" ? can(regex("arn:aws:kms:.*:key/.*", var.dd_api_key_source.identifier)) : true
27 | error_message = "ARN for KMS key does not appear to be valid format (example: arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab)."
28 | }
29 |
30 | # Check ASM ARN format
31 | validation {
32 | condition = var.dd_api_key_source.resource == "asm" ? can(regex("arn:aws:secretsmanager:.*:secret:.*", var.dd_api_key_source.identifier)) : true
33 | error_message = "ARN for AWS Secrets Manager (asm) does not appear to be valid format (example: arn:aws:secretsmanager:us-west-2:111122223333:secret:aes128-1a2b3c)."
34 | }
35 |
36 | # Check SSM name format
37 | validation {
38 | condition = var.dd_api_key_source.resource == "ssm" ? can(regex("^[a-zA-Z0-9_./-]+$", var.dd_api_key_source.identifier)) : true
39 | error_message = "Name for SSM parameter does not appear to be valid format, acceptable characters are `a-zA-Z0-9_.-` and `/` to delineate hierarchies."
40 | }
41 | }
42 |
43 | variable "cloudwatch_forwarder_event_patterns" {
44 | type = map(object({
45 | version = optional(list(string))
46 | id = optional(list(string))
47 | detail-type = optional(list(string))
48 | source = optional(list(string))
49 | account = optional(list(string))
50 | time = optional(list(string))
51 | region = optional(list(string))
52 | resources = optional(list(string))
53 | detail = optional(map(list(string)))
54 | }))
55 | description = <<-EOF
56 | Map of title => CloudWatch Event patterns to forward to Datadog. Event structure from here:
57 | Example:
58 | ```hcl
59 | cloudwatch_forwarder_event_rules = {
60 | "guardduty" = {
61 | source = ["aws.guardduty"]
62 | detail-type = ["GuardDuty Finding"]
63 | }
64 | "ec2-terminated" = {
65 | source = ["aws.ec2"]
66 | detail-type = ["EC2 Instance State-change Notification"]
67 | detail = {
68 | state = ["terminated"]
69 | }
70 | }
71 | }
72 | ```
73 | EOF
74 | default = {}
75 | }
76 |
--------------------------------------------------------------------------------
/examples/complete/versions.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_version = ">= 1.3.0"
3 |
4 | required_providers {
5 | # Update these to reflect the actual requirements of your module
6 | local = {
7 | source = "hashicorp/local"
8 | version = ">= 1.2"
9 | }
10 | random = {
11 | source = "hashicorp/random"
12 | version = ">= 2.2"
13 | }
14 | aws = {
15 | source = "hashicorp/aws"
16 | version = ">= 3.0"
17 | }
18 | archive = {
19 | source = "hashicorp/archive"
20 | version = ">= 2.2.0"
21 | }
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/lambda-log.tf:
--------------------------------------------------------------------------------
1 | # The principal Lambda forwarder for Datadog is implemented here:
2 | # https://github.com/DataDog/datadog-serverless-functions/blob/master/aws/logs_monitoring/lambda_function.py
3 | # It can scrape logs from S3 from specific services (not all s3 logs are supported)
4 | # Refer to the table here https://docs.datadoghq.com/logs/guide/send-aws-services-logs-with-the-datadog-lambda-function/?tab=awsconsole#automatically-set-up-triggers
5 |
6 | locals {
7 | s3_bucket_names_to_authorize = toset(flatten([var.s3_buckets, [for o in var.s3_buckets_with_prefixes : o.bucket_name]]))
8 | s3_logs_enabled = local.lambda_enabled && var.forwarder_log_enabled && (length(var.s3_buckets) != 0 || length(var.s3_buckets_with_prefixes) != 0)
9 |
10 | forwarder_log_artifact_url = var.forwarder_log_artifact_url != null ? var.forwarder_log_artifact_url : (
11 | "https://github.com/DataDog/datadog-serverless-functions/releases/download/aws-dd-forwarder-${var.dd_forwarder_version}/${var.dd_artifact_filename}-${var.dd_forwarder_version}.zip"
12 | )
13 | }
14 |
15 | module "forwarder_log_label" {
16 | source = "cloudposse/label/null"
17 | version = "0.25.0"
18 |
19 | enabled = local.lambda_enabled && var.forwarder_log_enabled
20 |
21 | attributes = ["logs"]
22 |
23 | context = module.this.context
24 | }
25 |
26 | module "forwarder_log_s3_label" {
27 | source = "cloudposse/label/null"
28 | version = "0.25.0"
29 |
30 | enabled = local.lambda_enabled && local.s3_logs_enabled
31 |
32 | attributes = ["logs-s3"]
33 |
34 | context = module.this.context
35 | }
36 |
37 | module "forwarder_log_artifact" {
38 | count = local.lambda_enabled && var.forwarder_log_enabled ? 1 : 0
39 |
40 | source = "cloudposse/module-artifact/external"
41 | version = "0.8.0"
42 |
43 | filename = "forwarder-log.zip"
44 | module_name = var.dd_module_name
45 | module_path = path.module
46 | url = local.forwarder_log_artifact_url
47 | }
48 |
49 | resource "aws_iam_role" "lambda_forwarder_log" {
50 | count = local.lambda_enabled && var.forwarder_log_enabled ? 1 : 0
51 |
52 | name = module.forwarder_log_label.id
53 | path = var.forwarder_iam_path
54 | description = "Datadog Lambda CloudWatch/S3 logs forwarder"
55 | assume_role_policy = data.aws_iam_policy_document.assume_role[0].json
56 | permissions_boundary = var.log_permissions_boundary
57 | tags = module.forwarder_log_label.tags
58 |
59 | # AWS will create the log group if needed. Make sure we create it first.
60 | depends_on = [aws_cloudwatch_log_group.forwarder_log]
61 | }
62 |
63 | resource "aws_iam_policy" "lambda_forwarder_log" {
64 | count = local.lambda_enabled && var.forwarder_log_enabled ? 1 : 0
65 |
66 | name = module.forwarder_log_label.id
67 | path = var.forwarder_iam_path
68 | description = "Datadog Lambda CloudWatch/S3 logs forwarder"
69 | policy = data.aws_iam_policy_document.lambda_default[0].json
70 | tags = module.forwarder_log_label.tags
71 | }
72 |
73 | resource "aws_iam_role_policy_attachment" "lambda_forwarder_log" {
74 | count = local.lambda_enabled && var.forwarder_log_enabled ? 1 : 0
75 |
76 | role = aws_iam_role.lambda_forwarder_log[0].name
77 | policy_arn = aws_iam_policy.lambda_forwarder_log[0].arn
78 | }
79 |
80 | ######################################################################
81 | ## Create lambda function
82 |
83 | resource "aws_lambda_function" "forwarder_log" {
84 | count = local.lambda_enabled && var.forwarder_log_enabled ? 1 : 0
85 |
86 | #checkov:skip=BC_AWS_GENERAL_64: (Pertaining to Lambda DLQ) Vendor lambda does not have a means to reprocess failed events.
87 |
88 | description = "Datadog Forwarder for CloudWatch/S3 logs"
89 | filename = module.forwarder_log_artifact[0].file
90 | function_name = module.forwarder_log_label.id
91 | role = aws_iam_role.lambda_forwarder_log[0].arn
92 | handler = "lambda_function.lambda_handler"
93 | source_code_hash = module.forwarder_log_artifact[0].base64sha256
94 | runtime = var.lambda_runtime
95 | architectures = var.lambda_architectures
96 | memory_size = var.lambda_memory_size
97 | timeout = var.lambda_timeout
98 | reserved_concurrent_executions = var.lambda_reserved_concurrent_executions
99 | layers = var.forwarder_log_layers
100 |
101 | dynamic "vpc_config" {
102 | for_each = try(length(var.subnet_ids), 0) > 0 && try(length(var.security_group_ids), 0) > 0 ? [true] : []
103 | content {
104 | security_group_ids = var.security_group_ids
105 | subnet_ids = var.subnet_ids
106 | }
107 | }
108 |
109 | environment {
110 | variables = local.lambda_env
111 | }
112 |
113 | tracing_config {
114 | mode = var.tracing_config_mode
115 | }
116 |
117 | tags = module.forwarder_log_label.tags
118 |
119 | # AWS will create the log group if needed. Make sure we create it first.
120 | depends_on = [aws_cloudwatch_log_group.forwarder_log]
121 | }
122 |
123 | resource "aws_lambda_permission" "allow_s3_bucket" {
124 | for_each = local.s3_logs_enabled ? local.s3_bucket_names_to_authorize : []
125 |
126 | action = "lambda:InvokeFunction"
127 | function_name = aws_lambda_function.forwarder_log[0].arn
128 | principal = "s3.amazonaws.com"
129 | source_arn = "${local.arn_format}:s3:::${each.value}"
130 | }
131 |
132 | resource "aws_s3_bucket_notification" "s3_bucket_notification" {
133 | for_each = local.s3_logs_enabled ? toset(var.s3_buckets) : []
134 |
135 | bucket = each.key
136 |
137 | lambda_function {
138 | lambda_function_arn = aws_lambda_function.forwarder_log[0].arn
139 | events = ["s3:ObjectCreated:*"]
140 | }
141 |
142 | depends_on = [aws_lambda_permission.allow_s3_bucket]
143 | }
144 |
145 | resource "aws_s3_bucket_notification" "s3_bucket_notification_with_prefixes" {
146 | for_each = local.s3_logs_enabled ? var.s3_buckets_with_prefixes : {}
147 |
148 | bucket = each.value.bucket_name
149 |
150 | lambda_function {
151 | lambda_function_arn = aws_lambda_function.forwarder_log[0].arn
152 | events = ["s3:ObjectCreated:*"]
153 | filter_prefix = each.value.bucket_prefix
154 | }
155 |
156 | depends_on = [aws_lambda_permission.allow_s3_bucket]
157 | }
158 |
159 | data "aws_iam_policy_document" "s3_log_bucket" {
160 | count = local.s3_logs_enabled ? 1 : 0
161 |
162 | statement {
163 | effect = "Allow"
164 |
165 | actions = [
166 | "s3:GetBucketLocation",
167 | "s3:GetObject",
168 | "s3:ListBucket",
169 | "s3:ListObjects",
170 | ]
171 | resources = concat(
172 | formatlist("%s:s3:::%s", local.arn_format, local.s3_bucket_names_to_authorize),
173 | formatlist("%s:s3:::%s/*", local.arn_format, local.s3_bucket_names_to_authorize)
174 | )
175 | }
176 |
177 | dynamic "statement" {
178 | for_each = try(length(var.s3_bucket_kms_arns), 0) > 0 ? [true] : []
179 | content {
180 | effect = "Allow"
181 |
182 | actions = [
183 | "kms:Decrypt"
184 | ]
185 | resources = var.s3_bucket_kms_arns
186 | }
187 | }
188 |
189 | dynamic "statement" {
190 | for_each = var.forwarder_use_cache_bucket ? [true] : []
191 | content {
192 | effect = "Allow"
193 |
194 | actions = [
195 | "s3:GetObject",
196 | "s3:PutObject",
197 | "s3:ListObject",
198 | "s3:DeleteObject",
199 | ]
200 | resources = [
201 | one(module.tags_cache_s3_bucket[*].bucket_arn),
202 | "${one(module.tags_cache_s3_bucket[*].bucket_arn)}/*"
203 | ]
204 | }
205 | }
206 | }
207 |
208 | resource "aws_iam_policy" "lambda_forwarder_log_s3" {
209 | count = local.s3_logs_enabled ? 1 : 0
210 |
211 | name = module.forwarder_log_s3_label.id
212 | path = var.forwarder_iam_path
213 | description = "Allow Datadog Lambda Logs Forwarder to access S3 buckets"
214 | policy = join("", data.aws_iam_policy_document.s3_log_bucket[*].json)
215 | tags = module.forwarder_log_s3_label.tags
216 | }
217 |
218 | resource "aws_iam_role_policy_attachment" "datadog_s3" {
219 | count = local.s3_logs_enabled ? 1 : 0
220 |
221 | role = join("", aws_iam_role.lambda_forwarder_log[*].name)
222 | policy_arn = join("", aws_iam_policy.lambda_forwarder_log_s3[*].arn)
223 | }
224 |
225 | # Lambda Forwarder logs
226 | resource "aws_cloudwatch_log_group" "forwarder_log" {
227 | count = local.lambda_enabled && var.forwarder_log_enabled ? 1 : 0
228 |
229 | name = "/aws/lambda/${module.forwarder_log_label.id}"
230 | retention_in_days = var.forwarder_log_retention_days
231 |
232 | kms_key_id = var.kms_key_id
233 |
234 | tags = module.forwarder_log_label.tags
235 | }
236 |
237 | # CloudWatch Log Groups
238 | resource "aws_lambda_permission" "cloudwatch_groups" {
239 | for_each = local.lambda_enabled && var.forwarder_log_enabled ? var.cloudwatch_forwarder_log_groups : {}
240 |
241 | statement_id = "datadog-forwarder-${each.key}-permission"
242 | action = "lambda:InvokeFunction"
243 | function_name = aws_lambda_function.forwarder_log[0].function_name
244 | principal = "logs.${local.aws_region}.amazonaws.com"
245 | source_arn = "${local.arn_format}:logs:${local.aws_region}:${local.aws_account_id}:log-group:${each.value.name}:*"
246 | }
247 |
248 | resource "aws_cloudwatch_log_subscription_filter" "cloudwatch_log_subscription_filter" {
249 | for_each = local.lambda_enabled && var.forwarder_log_enabled ? var.cloudwatch_forwarder_log_groups : {}
250 |
251 | name = module.forwarder_log_label.id
252 | log_group_name = each.value.name
253 | destination_arn = aws_lambda_function.forwarder_log[0].arn
254 | filter_pattern = each.value.filter_pattern
255 | }
256 |
257 | resource "aws_lambda_permission" "allow_eventbridge" {
258 | for_each = local.lambda_enabled && var.forwarder_log_enabled ? var.cloudwatch_forwarder_event_patterns : {}
259 |
260 | action = "lambda:InvokeFunction"
261 | function_name = aws_lambda_function.forwarder_log[0].function_name
262 | principal = "events.amazonaws.com"
263 | source_arn = module.cloudwatch_event[each.key].aws_cloudwatch_event_rule_arn
264 | }
265 |
266 | module "cloudwatch_event" {
267 | source = "cloudposse/cloudwatch-events/aws"
268 | version = "0.6.1"
269 |
270 | for_each = local.lambda_enabled && var.forwarder_log_enabled ? var.cloudwatch_forwarder_event_patterns : {}
271 |
272 | name = each.key
273 | context = module.forwarder_log_label.context
274 |
275 | cloudwatch_event_rule_description = "${each.key} events forwarded to Datadog"
276 |
277 | # Any optional attributes that are not set will equal null, and CloudWatch doesn't like that.
278 | cloudwatch_event_rule_pattern = { for k, v in each.value : k => v if v != null }
279 | cloudwatch_event_target_arn = aws_lambda_function.forwarder_log[0].arn
280 | }
281 |
282 | module "tags_cache_s3_bucket" {
283 | # Bucket for storing lambda tags cache and logs which failed to post. https://docs.datadoghq.com/logs/guide/forwarder/?tab=cloudformation#upgrade-an-older-version-to-31060
284 | source = "cloudposse/s3-bucket/aws"
285 | version = "4.2.0"
286 |
287 | count = local.lambda_enabled && var.forwarder_use_cache_bucket ? 1 : 0
288 |
289 | attributes = concat(module.forwarder_log_label.attributes, ["cache"])
290 |
291 | context = module.forwarder_log_label.context
292 | }
293 |
--------------------------------------------------------------------------------
/lambda-rds.tf:
--------------------------------------------------------------------------------
1 | # The Datadog Lambda RDS enhanced monitoring code:
2 | # https://github.com/DataDog/datadog-serverless-functions/blob/master/aws/rds_enhanced_monitoring/lambda_function.py
3 | # This code can only read RDS Enhanced monitoring metrics from CloudWatch and nothing else.
4 | # If you'd like to read the Auth log from an Aurora cluster, you need to use the `lambda-log` Lambda function and pass the CloudWatch Group of the cluster/clusters
5 |
6 | locals {
7 | forwarder_rds_artifact_url = var.forwarder_rds_artifact_url != null ? var.forwarder_rds_artifact_url : (
8 | "https://raw.githubusercontent.com/DataDog/datadog-serverless-functions/master/aws/rds_enhanced_monitoring/lambda_function.py?ref=${var.dd_forwarder_version}"
9 | )
10 | }
11 |
12 | module "forwarder_rds_label" {
13 | source = "cloudposse/label/null"
14 | version = "0.25.0"
15 |
16 | enabled = local.lambda_enabled && var.forwarder_rds_enabled
17 |
18 | attributes = ["rds"]
19 |
20 | context = module.this.context
21 | }
22 |
23 | module "forwarder_rds_artifact" {
24 | count = local.lambda_enabled && var.forwarder_rds_enabled ? 1 : 0
25 |
26 | source = "cloudposse/module-artifact/external"
27 | version = "0.8.0"
28 |
29 | filename = "forwarder-rds.py"
30 | module_name = var.dd_module_name
31 | module_path = path.module
32 | url = local.forwarder_rds_artifact_url
33 | }
34 |
35 | data "archive_file" "forwarder_rds" {
36 | count = local.lambda_enabled && var.forwarder_rds_enabled ? 1 : 0
37 |
38 | type = "zip"
39 | source_file = module.forwarder_rds_artifact[0].file
40 | output_path = "${path.module}/lambda.zip"
41 | }
42 |
43 | resource "aws_iam_role" "lambda_forwarder_rds" {
44 | count = local.lambda_enabled && var.forwarder_rds_enabled ? 1 : 0
45 |
46 | name = module.forwarder_rds_label.id
47 |
48 | path = var.forwarder_iam_path
49 | description = "Datadog Lambda RDS enhanced monitoring forwarder"
50 | assume_role_policy = data.aws_iam_policy_document.assume_role[0].json
51 | permissions_boundary = var.rds_permissions_boundary
52 | tags = module.forwarder_rds_label.tags
53 |
54 | # AWS will create the log group if needed. Make sure we create it first.
55 | depends_on = [aws_cloudwatch_log_group.forwarder_rds]
56 | }
57 |
58 | resource "aws_iam_policy" "lambda_forwarder_rds" {
59 | count = local.lambda_enabled && var.forwarder_rds_enabled ? 1 : 0
60 |
61 | name = module.forwarder_rds_label.id
62 | path = var.forwarder_iam_path
63 | description = "Datadog Lambda RDS enhanced monitoring forwarder"
64 | policy = data.aws_iam_policy_document.lambda_default[0].json
65 | tags = module.forwarder_rds_label.tags
66 | }
67 |
68 | resource "aws_iam_role_policy_attachment" "lambda_forwarder_rds" {
69 | count = local.lambda_enabled && var.forwarder_rds_enabled ? 1 : 0
70 |
71 | role = aws_iam_role.lambda_forwarder_rds[0].name
72 | policy_arn = aws_iam_policy.lambda_forwarder_rds[0].arn
73 | }
74 |
75 | ######################################################################
76 | ## Create lambda function
77 |
78 | resource "aws_lambda_function" "forwarder_rds" {
79 | count = local.lambda_enabled && var.forwarder_rds_enabled ? 1 : 0
80 |
81 | #checkov:skip=BC_AWS_GENERAL_64: (Pertaining to Lambda DLQ) Vendor lambda does not have a means to reprocess failed events.
82 |
83 | description = "Datadog forwarder for RDS enhanced monitoring"
84 | filename = data.archive_file.forwarder_rds[0].output_path
85 | function_name = module.forwarder_rds_label.id
86 | role = aws_iam_role.lambda_forwarder_rds[0].arn
87 | handler = "forwarder-rds.lambda_handler"
88 | source_code_hash = data.archive_file.forwarder_rds[0].output_base64sha256
89 | runtime = var.lambda_runtime
90 | architectures = var.lambda_architectures
91 | memory_size = var.lambda_memory_size
92 | timeout = var.lambda_timeout
93 | reserved_concurrent_executions = var.lambda_reserved_concurrent_executions
94 | layers = var.forwarder_rds_layers
95 |
96 | dynamic "vpc_config" {
97 | for_each = try(length(var.subnet_ids), 0) > 0 && try(length(var.security_group_ids), 0) > 0 ? [true] : []
98 | content {
99 | security_group_ids = var.security_group_ids
100 | subnet_ids = var.subnet_ids
101 | }
102 | }
103 |
104 | environment {
105 | variables = local.lambda_env
106 | }
107 |
108 | tracing_config {
109 | mode = var.tracing_config_mode
110 | }
111 |
112 | tags = module.forwarder_rds_label.tags
113 |
114 | # AWS will create the log group if needed. Make sure we create it first.
115 | depends_on = [aws_cloudwatch_log_group.forwarder_rds]
116 | }
117 |
118 | resource "aws_lambda_permission" "cloudwatch_enhanced_rds_monitoring" {
119 | count = local.lambda_enabled && var.forwarder_rds_enabled ? 1 : 0
120 |
121 | statement_id = "datadog-forwarder-rds-cloudwatch-logs-permission"
122 | action = "lambda:InvokeFunction"
123 | function_name = aws_lambda_function.forwarder_rds[0].function_name
124 | principal = "logs.amazonaws.com"
125 | source_arn = "${local.arn_format}:logs:${local.aws_region}:${local.aws_account_id}:log-group:RDSOSMetrics:*"
126 | }
127 |
128 | resource "aws_cloudwatch_log_subscription_filter" "datadog_log_subscription_filter_rds" {
129 | count = local.lambda_enabled && var.forwarder_rds_enabled ? 1 : 0
130 |
131 | name = module.forwarder_rds_label.id
132 | log_group_name = "RDSOSMetrics"
133 | destination_arn = aws_lambda_function.forwarder_rds[0].arn
134 | filter_pattern = var.forwarder_rds_filter_pattern
135 | }
136 |
137 | resource "aws_cloudwatch_log_group" "forwarder_rds" {
138 | count = local.lambda_enabled && var.forwarder_rds_enabled ? 1 : 0
139 |
140 | name = "/aws/lambda/${module.forwarder_rds_label.id}"
141 | retention_in_days = var.forwarder_log_retention_days
142 | kms_key_id = var.kms_key_id
143 |
144 | tags = module.forwarder_rds_label.tags
145 | }
146 |
--------------------------------------------------------------------------------
/lambda-vpc-logs.tf:
--------------------------------------------------------------------------------
1 | # The Datadog Lambda forwarder for VPC flow logs code:
2 | # https://github.com/DataDog/datadog-serverless-functions/blob/master/aws/vpc_flow_log_monitoring/lambda_function.py
3 | # This code can only read VPC flow logs sent to a CloudWatch Log Group ( not from S3 )
4 |
5 | locals {
6 | forwarder_vpc_logs_artifact_url = var.forwarder_vpc_logs_artifact_url != null ? var.forwarder_vpc_logs_artifact_url : (
7 | "https://raw.githubusercontent.com/DataDog/datadog-serverless-functions/master/aws/vpc_flow_log_monitoring/lambda_function.py?ref=${var.dd_forwarder_version}"
8 | )
9 | }
10 |
11 | module "forwarder_vpclogs_label" {
12 | source = "cloudposse/label/null"
13 | version = "0.25.0"
14 |
15 | enabled = local.lambda_enabled && var.forwarder_vpc_logs_enabled
16 |
17 | attributes = ["vpc-flow-logs"]
18 |
19 | context = module.this.context
20 | }
21 |
22 | module "forwarder_vpclogs_artifact" {
23 | count = local.lambda_enabled && var.forwarder_vpc_logs_enabled ? 1 : 0
24 |
25 | source = "cloudposse/module-artifact/external"
26 | version = "0.8.0"
27 |
28 | filename = "lambda_function.py"
29 | module_name = var.dd_module_name
30 | module_path = path.module
31 | url = local.forwarder_vpc_logs_artifact_url
32 | }
33 |
34 | data "archive_file" "forwarder_vpclogs" {
35 | count = local.lambda_enabled && var.forwarder_vpc_logs_enabled ? 1 : 0
36 |
37 | type = "zip"
38 | source_file = module.forwarder_vpclogs_artifact[0].file
39 | output_path = "${path.module}/lambda.zip"
40 | }
41 |
42 | resource "aws_iam_role" "lambda_forwarder_vpclogs" {
43 | count = local.lambda_enabled && var.forwarder_vpc_logs_enabled ? 1 : 0
44 |
45 | name = module.forwarder_vpclogs_label.id
46 |
47 | path = var.forwarder_iam_path
48 | description = "Datadog Lambda VPC Flow Logs forwarder"
49 | assume_role_policy = data.aws_iam_policy_document.assume_role[0].json
50 | permissions_boundary = var.vpc_logs_permissions_boundary
51 | tags = module.forwarder_vpclogs_label.tags
52 |
53 | # AWS will create the log group if needed. Make sure we create it first.
54 | depends_on = [aws_cloudwatch_log_group.forwarder_vpclogs]
55 | }
56 |
57 | resource "aws_iam_policy" "lambda_forwarder_vpclogs" {
58 | count = local.lambda_enabled && var.forwarder_vpc_logs_enabled ? 1 : 0
59 |
60 | name = module.forwarder_vpclogs_label.id
61 | path = var.forwarder_iam_path
62 | description = "Datadog Lambda VPC Flow Logs forwarder"
63 | policy = data.aws_iam_policy_document.lambda_default[0].json
64 | tags = module.forwarder_vpclogs_label.tags
65 | }
66 |
67 | resource "aws_iam_role_policy_attachment" "lambda_forwarder_vpclogs" {
68 | count = local.lambda_enabled && var.forwarder_vpc_logs_enabled ? 1 : 0
69 |
70 | role = aws_iam_role.lambda_forwarder_vpclogs[0].name
71 | policy_arn = aws_iam_policy.lambda_forwarder_vpclogs[0].arn
72 | }
73 |
74 | ######################################################################
75 | ## Create lambda function
76 |
77 | resource "aws_lambda_function" "forwarder_vpclogs" {
78 | count = local.lambda_enabled && var.forwarder_vpc_logs_enabled ? 1 : 0
79 |
80 | #checkov:skip=BC_AWS_GENERAL_64: (Pertaining to Lambda DLQ) Vendor lambda does not have a means to reprocess failed events.
81 |
82 | description = "Datadog Lambda forwarder for VPC Flow Logs"
83 | filename = data.archive_file.forwarder_vpclogs[0].output_path
84 | function_name = module.forwarder_vpclogs_label.id
85 | role = aws_iam_role.lambda_forwarder_vpclogs[0].arn
86 | handler = "lambda_function.lambda_handler"
87 | source_code_hash = data.archive_file.forwarder_vpclogs[0].output_base64sha256
88 | runtime = var.lambda_runtime
89 | architectures = var.lambda_architectures
90 | memory_size = var.lambda_memory_size
91 | timeout = var.lambda_timeout
92 | reserved_concurrent_executions = var.lambda_reserved_concurrent_executions
93 | layers = var.forwarder_vpc_logs_layers
94 |
95 | dynamic "vpc_config" {
96 | for_each = try(length(var.subnet_ids), 0) > 0 && try(length(var.security_group_ids), 0) > 0 ? [true] : []
97 | content {
98 | security_group_ids = var.security_group_ids
99 | subnet_ids = var.subnet_ids
100 | }
101 | }
102 |
103 | environment {
104 | variables = local.lambda_env
105 | }
106 |
107 | tracing_config {
108 | mode = var.tracing_config_mode
109 | }
110 |
111 | tags = module.forwarder_vpclogs_label.tags
112 |
113 | # AWS will create the log group if needed. Make sure we create it first.
114 | depends_on = [aws_cloudwatch_log_group.forwarder_vpclogs]
115 | }
116 |
117 | resource "aws_lambda_permission" "cloudwatch_vpclogs" {
118 | count = local.lambda_enabled && var.forwarder_vpc_logs_enabled ? 1 : 0
119 |
120 | statement_id = "datadog-forwarder-flowlogs-cloudwatchlogs-permission"
121 | action = "lambda:InvokeFunction"
122 | function_name = aws_lambda_function.forwarder_vpclogs[0].function_name
123 | principal = "logs.amazonaws.com"
124 | source_arn = "${local.arn_format}:logs:${local.aws_region}:${local.aws_account_id}:log-group:${var.vpclogs_cloudwatch_log_group}:*"
125 | }
126 |
127 | resource "aws_cloudwatch_log_subscription_filter" "datadog_log_subscription_filter_vpclogs" {
128 | count = local.lambda_enabled && var.forwarder_vpc_logs_enabled ? 1 : 0
129 |
130 | name = module.forwarder_vpclogs_label.id
131 | log_group_name = var.vpclogs_cloudwatch_log_group
132 | destination_arn = aws_lambda_function.forwarder_vpclogs[0].arn
133 | filter_pattern = var.forwarder_vpclogs_filter_pattern
134 | }
135 |
136 | resource "aws_cloudwatch_log_group" "forwarder_vpclogs" {
137 | count = local.lambda_enabled && var.forwarder_vpc_logs_enabled ? 1 : 0
138 |
139 | name = "/aws/lambda/${module.forwarder_vpclogs_label.id}"
140 | retention_in_days = var.forwarder_log_retention_days
141 | kms_key_id = var.kms_key_id
142 |
143 | tags = module.forwarder_vpclogs_label.tags
144 | }
145 |
--------------------------------------------------------------------------------
/main.tf:
--------------------------------------------------------------------------------
1 | data "aws_caller_identity" "current" {
2 | count = local.enabled ? 1 : 0
3 | }
4 |
5 | data "aws_partition" "current" {
6 | count = local.enabled ? 1 : 0
7 | }
8 |
9 | data "aws_region" "current" {
10 | count = local.enabled ? 1 : 0
11 | }
12 |
13 | locals {
14 | enabled = module.this.enabled
15 | lambda_enabled = local.enabled
16 |
17 | arn_format = local.enabled ? "arn:${data.aws_partition.current[0].partition}" : ""
18 | aws_account_id = join("", data.aws_caller_identity.current[*].account_id)
19 | aws_region = join("", data.aws_region.current[*].name)
20 |
21 | dd_api_key_resource = var.dd_api_key_source.resource
22 | dd_api_key_identifier = var.dd_api_key_source.identifier
23 |
24 | dd_api_key_arn = local.dd_api_key_resource == "ssm" ? try(coalesce(var.api_key_ssm_arn, join("", data.aws_ssm_parameter.api_key[*].arn)), "") : local.dd_api_key_identifier
25 | dd_api_key_iam_actions = [lookup({ kms = "kms:Decrypt", asm = "secretsmanager:GetSecretValue", ssm = "ssm:GetParameter" }, local.dd_api_key_resource, "")]
26 | dd_api_key_kms = local.dd_api_key_resource == "kms" ? { DD_KMS_API_KEY = var.dd_api_key_kms_ciphertext_blob } : {}
27 | dd_api_key_asm = local.dd_api_key_resource == "asm" ? { DD_API_KEY_SECRET_ARN = local.dd_api_key_identifier } : {}
28 | dd_api_key_ssm = local.dd_api_key_resource == "ssm" ? { DD_API_KEY_SSM_NAME = local.dd_api_key_identifier } : {}
29 |
30 | dd_site = { DD_SITE = var.forwarder_lambda_datadog_host }
31 |
32 | # If map is supplied, merge map with context, or use only context
33 | # Convert map to dd tags equivalent
34 | dd_tags = length(var.dd_tags_map) > 0 ? [
35 | for tagk, tagv in var.dd_tags_map : (tagv != null ? format("%s:%s", tagk, tagv) : tagk)
36 | ] : var.dd_tags
37 | dd_tags_env = { DD_TAGS = join(",", local.dd_tags) }
38 |
39 | cache_bucket_env = var.forwarder_use_cache_bucket ? { DD_S3_BUCKET_NAME = one(module.tags_cache_s3_bucket[*].bucket_id), DD_STORE_FAILED_EVENTS = true } : {}
40 |
41 | lambda_debug = var.forwarder_lambda_debug_enabled ? { DD_LOG_LEVEL = "debug" } : {}
42 | lambda_env = merge(local.dd_api_key_kms, local.dd_api_key_asm, local.dd_api_key_ssm, local.dd_site, local.lambda_debug, local.dd_tags_env, local.cache_bucket_env, var.datadog_forwarder_lambda_environment_variables)
43 | }
44 |
45 | # Log Forwarder, RDS Enhanced Forwarder, VPC Flow Log Forwarder
46 |
47 | data "aws_ssm_parameter" "api_key" {
48 | count = local.lambda_enabled && local.dd_api_key_resource == "ssm" && var.api_key_ssm_arn == null ? 1 : 0
49 |
50 | name = local.dd_api_key_identifier
51 | }
52 |
53 | ######################################################################
54 | ## Create a policy document to allow Lambda to assume role
55 |
56 | data "aws_iam_policy_document" "assume_role" {
57 | count = local.lambda_enabled ? 1 : 0
58 |
59 | statement {
60 | effect = "Allow"
61 |
62 | principals {
63 | type = "Service"
64 | identifiers = ["lambda.amazonaws.com"]
65 | }
66 |
67 | actions = [
68 | "sts:AssumeRole"
69 | ]
70 | }
71 | }
72 |
73 | ######################################################################
74 | ## Create Lambda policy and attach it to the Lambda role
75 |
76 | resource "aws_iam_policy" "datadog_custom_policy" {
77 | count = local.lambda_enabled && length(var.lambda_policy_source_json) > 0 ? 1 : 0
78 |
79 | name = var.lambda_custom_policy_name
80 | policy = var.lambda_policy_source_json
81 |
82 | tags = module.this.tags
83 | }
84 |
85 | data "aws_iam_policy_document" "lambda_default" {
86 | count = local.lambda_enabled ? 1 : 0
87 |
88 | # #checkov:skip=BC_AWS_IAM_57: (Pertaining to constraining IAM write access) This policy has not write access and is restricted to one specific ARN.
89 |
90 | source_policy_documents = local.lambda_enabled && length(var.lambda_policy_source_json) > 0 ? [aws_iam_policy.datadog_custom_policy[0].policy] : []
91 |
92 | statement {
93 | sid = "AllowWriteLogs"
94 |
95 | effect = "Allow"
96 |
97 | actions = [
98 | "logs:CreateLogGroup",
99 | "logs:CreateLogStream",
100 | "logs:PutLogEvents"
101 | ]
102 |
103 | resources = ["*"]
104 | }
105 |
106 | statement {
107 | sid = "AllowGetOrDecryptApiKey"
108 |
109 | effect = "Allow"
110 |
111 | actions = local.dd_api_key_iam_actions
112 |
113 | resources = [local.dd_api_key_arn]
114 | }
115 | }
116 |
--------------------------------------------------------------------------------
/outputs.tf:
--------------------------------------------------------------------------------
1 | output "lambda_forwarder_rds_function_arn" {
2 | description = "Datadog Lambda forwarder RDS Enhanced Monitoring function ARN"
3 | value = local.lambda_enabled && var.forwarder_rds_enabled ? join("", aws_lambda_function.forwarder_rds[*].arn) : null
4 | }
5 |
6 | output "lambda_forwarder_rds_enhanced_monitoring_function_name" {
7 | description = "Datadog Lambda forwarder RDS Enhanced Monitoring function name"
8 | value = local.lambda_enabled && var.forwarder_rds_enabled ? join("", aws_lambda_function.forwarder_rds[*].function_name) : null
9 | }
10 |
11 | output "lambda_forwarder_log_function_arn" {
12 | description = "Datadog Lambda forwarder CloudWatch/S3 function ARN"
13 | value = local.lambda_enabled && var.forwarder_log_enabled ? join("", aws_lambda_function.forwarder_log[*].arn) : null
14 | }
15 |
16 | output "lambda_forwarder_log_function_name" {
17 | description = "Datadog Lambda forwarder CloudWatch/S3 function name"
18 | value = local.lambda_enabled && var.forwarder_log_enabled ? join("", aws_lambda_function.forwarder_log[*].function_name) : null
19 | }
20 |
21 | output "lambda_forwarder_vpc_log_function_arn" {
22 | description = "Datadog Lambda forwarder VPC Flow Logs function ARN"
23 | value = local.lambda_enabled && var.forwarder_vpc_logs_enabled ? join("", aws_lambda_function.forwarder_vpclogs[*].arn) : null
24 | }
25 |
26 | output "lambda_forwarder_vpc_log_function_name" {
27 | description = "Datadog Lambda forwarder VPC Flow Logs function name"
28 | value = local.lambda_enabled && var.forwarder_vpc_logs_enabled ? join("", aws_lambda_function.forwarder_vpclogs[*].function_name) : null
29 | }
30 |
--------------------------------------------------------------------------------
/test/.gitignore:
--------------------------------------------------------------------------------
1 | .test-harness
2 |
--------------------------------------------------------------------------------
/test/Makefile:
--------------------------------------------------------------------------------
1 | TEST_HARNESS ?= https://github.com/cloudposse/test-harness.git
2 | TEST_HARNESS_BRANCH ?= master
3 | TEST_HARNESS_PATH = $(realpath .test-harness)
4 | BATS_ARGS ?= --tap
5 | BATS_LOG ?= test.log
6 |
7 | # Define a macro to run the tests
8 | define RUN_TESTS
9 | @echo "Running tests in $(1)"
10 | @cd $(1) && bats $(BATS_ARGS) $(addsuffix .bats,$(addprefix $(TEST_HARNESS_PATH)/test/terraform/,$(TESTS)))
11 | endef
12 |
13 | default: all
14 |
15 | -include Makefile.*
16 |
17 | ## Provision the test-harnesss
18 | .test-harness:
19 | [ -d $@ ] || git clone --depth=1 -b $(TEST_HARNESS_BRANCH) $(TEST_HARNESS) $@
20 |
21 | ## Initialize the tests
22 | init: .test-harness
23 |
24 | ## Install all dependencies (OS specific)
25 | deps::
26 | @exit 0
27 |
28 | ## Clean up the test harness
29 | clean:
30 | [ "$(TEST_HARNESS_PATH)" == "/" ] || rm -rf $(TEST_HARNESS_PATH)
31 |
32 | ## Run all tests
33 | all: module examples/complete
34 |
35 | ## Run basic sanity checks against the module itself
36 | module: export TESTS ?= installed lint module-pinning provider-pinning validate terraform-docs input-descriptions output-descriptions
37 | module: deps
38 | $(call RUN_TESTS, ../)
39 |
40 | ## Run tests against example
41 | examples/complete: export TESTS ?= installed lint validate
42 | examples/complete: deps
43 | $(call RUN_TESTS, ../$@)
44 |
--------------------------------------------------------------------------------
/test/Makefile.alpine:
--------------------------------------------------------------------------------
1 | ifneq (,$(wildcard /sbin/apk))
2 | ## Install all dependencies for alpine
3 | deps:: init
4 | @apk add --update terraform-docs@cloudposse json2hcl@cloudposse
5 | endif
6 |
--------------------------------------------------------------------------------
/test/src/.gitignore:
--------------------------------------------------------------------------------
1 | .gopath
2 | vendor/
3 |
--------------------------------------------------------------------------------
/test/src/Makefile:
--------------------------------------------------------------------------------
1 | export TERRAFORM_VERSION ?= $(shell curl -s https://checkpoint-api.hashicorp.com/v1/check/terraform | jq -r -M '.current_version' | cut -d. -f1)
2 |
3 | .DEFAULT_GOAL : all
4 |
5 | .PHONY: all
6 | ## Default target
7 | all: test
8 |
9 | .PHONY : init
10 | ## Initialize tests
11 | init:
12 | @exit 0
13 |
14 | .PHONY : test
15 | ## Run tests
16 | test: init
17 | go mod download
18 | go test -v -timeout 10m
19 |
20 | ## Run tests in docker container
21 | docker/test:
22 | docker run --name terratest --rm -it -e AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY -e AWS_SESSION_TOKEN -e GITHUB_TOKEN \
23 | -e PATH="/usr/local/terraform/$(TERRAFORM_VERSION)/bin:/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin" \
24 | -v $(CURDIR)/../../:/module/ cloudposse/test-harness:latest -C /module/test/src test
25 |
26 | .PHONY : clean
27 | ## Clean up files
28 | clean:
29 | rm -rf ../../examples/complete/*.tfstate*
30 |
--------------------------------------------------------------------------------
/test/src/examples_complete_test.go:
--------------------------------------------------------------------------------
1 | package test
2 |
3 | import (
4 | "os/exec"
5 | "regexp"
6 | "strings"
7 | "testing"
8 |
9 | "github.com/gruntwork-io/terratest/modules/random"
10 | "github.com/gruntwork-io/terratest/modules/terraform"
11 | "github.com/stretchr/testify/assert"
12 | "k8s.io/apimachinery/pkg/util/runtime"
13 | )
14 |
15 | // Test the Terraform module in examples/complete using Terratest.
16 | func TestExamplesComplete(t *testing.T) {
17 | // This module needs to be run inside a Git Repository, so we cannot run it in parallel
18 | // t.Parallel()
19 |
20 | // If running on a GitHub Action Runner, invoke the necessary blessing
21 | cmd := exec.Command("bash", "-c", "if [[ -d /__w/terraform-aws-datadog-lambda-forwarder/terraform-aws-datadog-lambda-forwarder ]]; then git config --global --add safe.directory /__w/terraform-aws-datadog-lambda-forwarder/terraform-aws-datadog-lambda-forwarder; fi")
22 | var stdout strings.Builder
23 | cmd.Stdout = &stdout
24 | var stderr strings.Builder
25 | cmd.Stderr = &stderr
26 |
27 | if err := cmd.Run(); err != nil {
28 | t.Logf("Running command: %s", cmd.String())
29 | t.Logf("command stdout: %s", stdout.String())
30 | t.Logf("command stderr: %s", stderr.String())
31 | t.Log(err)
32 | } else if stdout.Len() > 0 || stderr.Len() > 0 {
33 | t.Logf("Running command: %s", cmd.String())
34 | t.Logf("command stdout: %s", stdout.String())
35 | t.Logf("command stderr: %s", stderr.String())
36 | }
37 |
38 | randID := strings.ToLower(random.UniqueId())
39 | attributes := []string{randID}
40 |
41 | varFiles := []string{"fixtures.us-east-2.tfvars"}
42 |
43 | terraformOptions := &terraform.Options{
44 | // The path to where our Terraform code is located
45 | TerraformDir: "../../examples/complete",
46 | Upgrade: true,
47 | // Variables to pass to our Terraform code using -var-file options
48 | VarFiles: varFiles,
49 | Vars: map[string]interface{}{
50 | "attributes": attributes,
51 | },
52 | }
53 |
54 | // At the end of the test, run `terraform destroy` to clean up any resources that were created
55 | defer terraform.Destroy(t, terraformOptions)
56 |
57 | // If Go runtime crushes, run `terraform destroy` to clean up any resources that were created
58 | defer runtime.HandleCrash(func(i interface{}) {
59 | defer terraform.Destroy(t, terraformOptions)
60 | })
61 |
62 | // This will run `terraform init` and `terraform apply` and fail the test if there are any errors
63 | terraform.InitAndApply(t, terraformOptions)
64 |
65 | lambdaFunctionName := terraform.Output(t, terraformOptions, "lambda_forwarder_log_function_name")
66 | assert.Equal(t, "eg-ue2-test-datadog-lambda-forwarder-"+randID+"-logs", lambdaFunctionName)
67 | }
68 |
69 | func TestExamplesCompleteDisabled(t *testing.T) {
70 | // This module needs to be run inside a Git Repository, so we cannot run it in parallel
71 | // t.Parallel()
72 |
73 | randID := strings.ToLower(random.UniqueId())
74 | attributes := []string{randID}
75 |
76 | varFiles := []string{"fixtures.us-east-2.tfvars"}
77 |
78 | terraformOptions := &terraform.Options{
79 | // The path to where our Terraform code is located
80 | TerraformDir: "../../examples/complete",
81 | Upgrade: true,
82 | // Variables to pass to our Terraform code using -var-file options
83 | VarFiles: varFiles,
84 | Vars: map[string]interface{}{
85 | "attributes": attributes,
86 | "enabled": false,
87 | },
88 | }
89 |
90 | // At the end of the test, run `terraform destroy` to clean up any resources that were created
91 | defer terraform.Destroy(t, terraformOptions)
92 |
93 | // If Go runtime crushes, run `terraform destroy` to clean up any resources that were created
94 | defer runtime.HandleCrash(func(i interface{}) {
95 | defer terraform.Destroy(t, terraformOptions)
96 | })
97 |
98 | // This will run `terraform init` and `terraform apply` and fail the test if there are any errors
99 | results := terraform.InitAndApply(t, terraformOptions)
100 |
101 | // Should complete successfully without creating or changing any resources.
102 | // Extract the "Resources:" section of the output to make the error message more readable.
103 | re := regexp.MustCompile(`Resources: [^.]+\.`)
104 | match := re.FindString(results)
105 | assert.Equal(t, "Resources: 0 added, 0 changed, 0 destroyed.", match, "Re-applying the same configuration should not change any resources")
106 | }
107 |
--------------------------------------------------------------------------------
/test/src/go.mod:
--------------------------------------------------------------------------------
1 | module github.com/cloudposse/terraform-aws-datadog-lambda-forwarder
2 |
3 | go 1.20
4 |
5 | require (
6 | github.com/gruntwork-io/terratest v0.42.0
7 | github.com/stretchr/testify v1.8.4
8 | k8s.io/apimachinery v0.25.12
9 | )
10 |
11 | require (
12 | cloud.google.com/go v0.105.0 // indirect
13 | cloud.google.com/go/compute v1.12.1 // indirect
14 | cloud.google.com/go/compute/metadata v0.2.1 // indirect
15 | cloud.google.com/go/iam v0.7.0 // indirect
16 | cloud.google.com/go/storage v1.27.0 // indirect
17 | github.com/agext/levenshtein v1.2.3 // indirect
18 | github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect
19 | github.com/aws/aws-sdk-go v1.44.122 // indirect
20 | github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect
21 | github.com/davecgh/go-spew v1.1.1 // indirect
22 | github.com/go-logr/logr v1.2.3 // indirect
23 | github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
24 | github.com/golang/protobuf v1.5.3 // indirect
25 | github.com/google/go-cmp v0.5.9 // indirect
26 | github.com/google/uuid v1.3.0 // indirect
27 | github.com/googleapis/enterprise-certificate-proxy v0.2.0 // indirect
28 | github.com/googleapis/gax-go/v2 v2.7.0 // indirect
29 | github.com/hashicorp/errwrap v1.0.0 // indirect
30 | github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
31 | github.com/hashicorp/go-getter v1.7.1 // indirect
32 | github.com/hashicorp/go-multierror v1.1.0 // indirect
33 | github.com/hashicorp/go-safetemp v1.0.0 // indirect
34 | github.com/hashicorp/go-version v1.6.0 // indirect
35 | github.com/hashicorp/hcl/v2 v2.9.1 // indirect
36 | github.com/hashicorp/terraform-json v0.13.0 // indirect
37 | github.com/jinzhu/copier v0.0.0-20190924061706-b57f9002281a // indirect
38 | github.com/jmespath/go-jmespath v0.4.0 // indirect
39 | github.com/klauspost/compress v1.15.11 // indirect
40 | github.com/mattn/go-zglob v0.0.2-0.20190814121620-e3c945676326 // indirect
41 | github.com/mitchellh/go-homedir v1.1.0 // indirect
42 | github.com/mitchellh/go-testing-interface v1.14.1 // indirect
43 | github.com/mitchellh/go-wordwrap v1.0.1 // indirect
44 | github.com/pmezard/go-difflib v1.0.0 // indirect
45 | github.com/tmccombs/hcl2json v0.3.3 // indirect
46 | github.com/ulikunitz/xz v0.5.10 // indirect
47 | github.com/zclconf/go-cty v1.9.1 // indirect
48 | go.opencensus.io v0.24.0 // indirect
49 | golang.org/x/crypto v0.14.0 // indirect
50 | golang.org/x/net v0.17.0 // indirect
51 | golang.org/x/oauth2 v0.1.0 // indirect
52 | golang.org/x/sys v0.13.0 // indirect
53 | golang.org/x/text v0.13.0 // indirect
54 | golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect
55 | google.golang.org/api v0.103.0 // indirect
56 | google.golang.org/appengine v1.6.7 // indirect
57 | google.golang.org/genproto v0.0.0-20221201164419-0e50fba7f41c // indirect
58 | google.golang.org/grpc v1.51.0 // indirect
59 | google.golang.org/protobuf v1.31.0 // indirect
60 | gopkg.in/yaml.v3 v3.0.1 // indirect
61 | k8s.io/klog/v2 v2.90.1 // indirect
62 | )
63 |
--------------------------------------------------------------------------------
/variables.tf:
--------------------------------------------------------------------------------
1 | variable "subnet_ids" {
2 | description = "List of subnet IDs to use when deploying the Lambda Function in a VPC"
3 | type = list(string)
4 | default = null
5 | }
6 |
7 | variable "security_group_ids" {
8 | description = "List of security group IDs to use when the Lambda Function runs in a VPC"
9 | type = list(string)
10 | default = null
11 | }
12 |
13 | # https://docs.aws.amazon.com/lambda/latest/dg/gettingstarted-limits.html
14 | variable "lambda_memory_size" {
15 | type = number
16 | description = "Amount of memory in MB your Lambda Function can use at runtime"
17 | default = 128
18 | }
19 |
20 | variable "lambda_reserved_concurrent_executions" {
21 | type = number
22 | description = "Amount of reserved concurrent executions for the lambda function. A value of 0 disables Lambda from being triggered and -1 removes any concurrency limitations. Defaults to Unreserved Concurrency Limits -1"
23 | default = -1
24 | }
25 |
26 | variable "datadog_forwarder_lambda_environment_variables" {
27 | type = map(string)
28 | default = {}
29 | description = "Map of environment variables to pass to the Lambda Function"
30 | }
31 |
32 | variable "lambda_runtime" {
33 | type = string
34 | description = "Runtime environment for Datadog Lambda"
35 | default = "python3.11"
36 | }
37 |
38 | variable "lambda_architectures" {
39 | type = list(string)
40 | description = "Instruction set architecture for your Lambda function. Valid values are [\"x86_64\"] and [\"arm64\"]."
41 | default = null
42 | }
43 |
44 | variable "lambda_timeout" {
45 | type = number
46 | description = "Amount of time your Datadog Lambda Function has to run in seconds"
47 | default = 120
48 | }
49 |
50 | variable "tracing_config_mode" {
51 | type = string
52 | description = "Can be either PassThrough or Active. If PassThrough, Lambda will only trace the request from an upstream service if it contains a tracing header with 'sampled=1'. If Active, Lambda will respect any tracing header it receives from an upstream service"
53 | default = "PassThrough"
54 | }
55 |
56 | variable "dd_api_key_source" {
57 | description = "One of: ARN for AWS Secrets Manager (asm) to retrieve the Datadog (DD) api key, ARN for the KMS (kms) key used to decrypt the ciphertext_blob of the api key, or the name of the SSM (ssm) parameter used to retrieve the Datadog API key"
58 | type = object({
59 | resource = string
60 | identifier = string
61 | })
62 |
63 | default = {
64 | resource = ""
65 | identifier = ""
66 | }
67 |
68 | # Resource can be one of kms, asm, ssm ("" to disable all lambda resources)
69 | validation {
70 | condition = can(regex("(kms|asm|ssm)", var.dd_api_key_source.resource)) || var.dd_api_key_source.resource == ""
71 | error_message = "Provide one, and only one, ARN for (kms, asm) or name (ssm) to retrieve or decrypt Datadog api key."
72 | }
73 |
74 | # Check KMS ARN format
75 | validation {
76 | condition = var.dd_api_key_source.resource == "kms" ? can(regex("arn:.*:kms:.*:key/.*", var.dd_api_key_source.identifier)) : true
77 | error_message = "ARN for KMS key does not appear to be valid format (example: arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab)."
78 | }
79 |
80 | # Check ASM ARN format
81 | validation {
82 | condition = var.dd_api_key_source.resource == "asm" ? can(regex("arn:.*:secretsmanager:.*:secret:.*", var.dd_api_key_source.identifier)) : true
83 | error_message = "ARN for AWS Secrets Manager (asm) does not appear to be valid format (example: arn:aws:secretsmanager:us-west-2:111122223333:secret:aes128-1a2b3c)."
84 | }
85 |
86 | # Check SSM name format
87 | validation {
88 | condition = var.dd_api_key_source.resource == "ssm" ? can(regex("^[a-zA-Z0-9_./-]+$", var.dd_api_key_source.identifier)) || can(regex("^arn:[^:]*:ssm:[^:]*:[^:]*:parameter/[a-zA-Z0-9_./-]+$", var.dd_api_key_source.identifier)) : true
89 | error_message = "API key source identifier must either be full arn or name of SSM parameter. Acceptable characters for name are `a-zA-Z0-9_.-` and `/` to delineate hierarchies."
90 | }
91 | }
92 |
93 | variable "dd_api_key_kms_ciphertext_blob" {
94 | type = string
95 | description = "CiphertextBlob stored in environment variable DD_KMS_API_KEY used by the lambda function, along with the KMS key, to decrypt Datadog API key"
96 | default = ""
97 | }
98 |
99 | variable "dd_artifact_filename" {
100 | type = string
101 | description = "The Datadog artifact filename minus extension"
102 | default = "aws-dd-forwarder"
103 | }
104 |
105 | variable "dd_module_name" {
106 | type = string
107 | description = "The Datadog GitHub repository name"
108 | default = "datadog-serverless-functions"
109 | }
110 |
111 | variable "dd_forwarder_version" {
112 | type = string
113 | description = "Version tag of Datadog lambdas to use. https://github.com/DataDog/datadog-serverless-functions/releases"
114 | default = "3.116.0"
115 | }
116 |
117 | variable "forwarder_log_enabled" {
118 | type = bool
119 | description = "Flag to enable or disable Datadog log forwarder"
120 | default = false
121 | }
122 |
123 | variable "forwarder_rds_enabled" {
124 | type = bool
125 | description = "Flag to enable or disable Datadog RDS enhanced monitoring forwarder"
126 | default = false
127 | }
128 |
129 | variable "forwarder_vpc_logs_enabled" {
130 | type = bool
131 | description = "Flag to enable or disable Datadog VPC flow log forwarder"
132 | default = false
133 | }
134 |
135 | variable "forwarder_log_retention_days" {
136 | type = number
137 | description = "Number of days to retain Datadog forwarder lambda execution logs. One of [0 1 3 5 7 14 30 60 90 120 150 180 365 400 545 731 1827 3653]"
138 | default = 14
139 | }
140 |
141 | variable "kms_key_id" {
142 | type = string
143 | description = "Optional KMS key ID to encrypt Datadog Lambda function logs"
144 | default = null
145 | }
146 |
147 | variable "s3_buckets" {
148 | type = list(string)
149 | description = "The names of S3 buckets to forward logs to Datadog"
150 | default = []
151 | }
152 |
153 | variable "s3_buckets_with_prefixes" {
154 | type = map(object({ bucket_name : string, bucket_prefix : string }))
155 | description = "The names S3 buckets and prefix to forward logs to Datadog"
156 | default = {}
157 | }
158 |
159 | variable "s3_bucket_kms_arns" {
160 | type = list(string)
161 | description = "List of KMS key ARNs for s3 bucket encryption"
162 | default = []
163 | }
164 |
165 | variable "cloudwatch_forwarder_log_groups" {
166 | type = map(object({
167 | name = string
168 | filter_pattern = string
169 | }))
170 | description = < CloudWatch Event patterns to forward to Datadog. Event structure from here:
318 | Example:
319 | ```hcl
320 | cloudwatch_forwarder_event_rules = {
321 | "guardduty" = {
322 | source = ["aws.guardduty"]
323 | detail-type = ["GuardDuty Finding"]
324 | }
325 | "ec2-terminated" = {
326 | source = ["aws.ec2"]
327 | detail-type = ["EC2 Instance State-change Notification"]
328 | detail = {
329 | state = ["terminated"]
330 | }
331 | }
332 | }
333 | ```
334 | EOF
335 | default = {}
336 | }
337 |
338 | variable "forwarder_use_cache_bucket" {
339 | type = bool
340 | description = "Flag to enable or disable the cache bucket for lambda tags and failed events. See https://docs.datadoghq.com/logs/guide/forwarder/?tab=cloudformation#upgrade-an-older-version-to-31060. Recommended for forwarder versions 3.106 and higher."
341 | default = true
342 | }
343 |
--------------------------------------------------------------------------------
/versions.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_version = ">= 1.3.0"
3 |
4 | required_providers {
5 | aws = {
6 | source = "hashicorp/aws"
7 | version = ">= 3.0"
8 | }
9 | archive = {
10 | source = "hashicorp/archive"
11 | version = ">= 2.2.0"
12 | }
13 | }
14 | }
15 |
--------------------------------------------------------------------------------