├── .github ├── CODEOWNERS ├── ISSUE_TEMPLATE │ ├── bug_report_template.md │ └── feature_request_template.md ├── pull_request_template.md ├── stale.yaml └── workflows │ ├── build.yml │ ├── codeql-analysis.yml │ ├── integration_tests.yml │ ├── monitor_api_integration_test.yml │ └── publish.yml ├── .gitignore ├── .npmignore ├── .nvmrc ├── .prettierignore ├── .prettierrc ├── CHANGELOG.md ├── CODEOWNERS ├── CONTRIBUTING.md ├── LICENSE ├── LICENSE-3rdparty.csv ├── NOTICE ├── README.md ├── integration_tests ├── FunctionLevelLayer │ └── file.txt ├── ProviderLevelLayer │ └── file.txt ├── correct_extension_apigateway_snapshot.json ├── correct_extension_snapshot.json ├── correct_forwarder_snapshot.json ├── js_handler.js ├── monitors.integration.spec.ts ├── py_handler.py ├── rb_handler.rb ├── serverless-extension-apigateway.yml ├── serverless-extension.yml └── serverless-forwarder.yml ├── jest.config.js ├── jest.integration.config.js ├── package.json ├── scripts ├── check_layers_json.sh ├── compare_snapshots.py ├── generate_layers_json.sh ├── generate_third_party_license_file.py ├── publish_prod.sh └── run_integration_tests.sh ├── src ├── env.spec.ts ├── env.ts ├── forwarder.spec.ts ├── forwarder.ts ├── git.ts ├── index.spec.ts ├── index.ts ├── layer.spec.ts ├── layer.ts ├── layers-gov.json ├── layers.json ├── monitor-api-requests.spec.ts ├── monitor-api-requests.ts ├── monitors.spec.ts ├── monitors.ts ├── output.ts ├── span-link.spec.ts ├── span-link.ts ├── step-functions-helper.spec.ts ├── step-functions-helper.ts ├── tracing.ts ├── wrapper.spec.ts └── wrapper.ts ├── testEnvironment.js ├── tsconfig.json ├── tslint.json ├── webpack.config.js └── yarn.lock /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @DataDog/serverless-onboarding-enablement 2 | 3 | # Documentation 4 | *.md @DataDog/documentation 5 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report_template.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug Report 3 | about: Create a report to help us improve 4 | --- 5 | 6 | ## Expected Behavior 7 | 8 | 9 | ## Actual Behavior 10 | 11 | 12 | ## Steps to Reproduce the Problem 13 | 14 | 1. 15 | 1. 16 | 1. 17 | 18 | ## Specifications 19 | 20 | - Serverless Framework version: 21 | - Datadog Serverless Plugin version: 22 | - Lambda function runtime (Python 3.7, Node 10, etc.): 23 | 24 | ## Stacktrace 25 | 26 | ``` 27 | Paste here 28 | ``` -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request_template.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature Request 3 | about: Suggest an idea 4 | --- 5 | 6 | ## Expected Behavior 7 | 8 | 9 | ## Actual Behavior 10 | 11 | 12 | ## Steps to Reproduce the Problem 13 | 14 | 1. 15 | 1. 16 | 1. 17 | 18 | ## Specifications 19 | 20 | - Serverless Framework version: 21 | - Datadog Serverless Plugin version: 22 | - Lambda function runtime (Python 3.7, Node 10, etc.): 23 | 24 | ## Stacktrace 25 | 26 | ``` 27 | Paste here 28 | ``` -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | ### What does this PR do? 4 | 5 | 6 | 7 | ### Motivation 8 | 9 | 10 | 11 | ### Testing Guidelines 12 | 13 | 14 | 15 | ### Additional Notes 16 | 17 | 18 | 19 | ### Types of changes 20 | 21 | - [ ] Bug fix 22 | - [ ] New feature 23 | - [ ] Breaking change 24 | - [ ] Misc (docs, refactoring, dependency upgrade, etc.) 25 | 26 | ### Check all that apply 27 | 28 | - [ ] This PR's description is comprehensive 29 | - [ ] This PR contains breaking changes that are documented in the description 30 | - [ ] This PR introduces new APIs or parameters that are documented and unlikely to change in the foreseeable future 31 | - [ ] This PR impacts documentation, and it has been updated (or a ticket has been logged) 32 | - [ ] This PR's changes are covered by the automated tests 33 | - [ ] This PR collects user input/sensitive content into Datadog 34 | -------------------------------------------------------------------------------- /.github/stale.yaml: -------------------------------------------------------------------------------- 1 | # Number of days of inactivity before an issue becomes stale 2 | daysUntilStale: 60 3 | # Number of days of inactivity before a stale issue is closed 4 | daysUntilClose: 7 5 | # Issues with these labels will never be considered stale 6 | exemptLabels: 7 | - pinned 8 | # Label to use when marking an issue as stale 9 | staleLabel: wontfix 10 | # Comment to post when marking an issue as stale. Set to `false` to disable 11 | markComment: > 12 | This issue has been automatically marked as stale and it will be closed 13 | if no further activity occurs. Thank you for your contributions! You can 14 | also find us in the \#serverless channel from the 15 | [Datadog community Slack](https://chat.datadoghq.com/). 16 | # Comment to post when closing a stale issue. Set to `false` to disable 17 | closeComment: false 18 | 19 | -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: build 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | lint: 7 | runs-on: ubuntu-latest 8 | 9 | steps: 10 | - name: Checkout 11 | uses: actions/checkout@v3 12 | 13 | - name: Set up Node 22 14 | uses: actions/setup-node@v3 15 | with: 16 | node-version: 22 17 | 18 | - name: Cache Node modules 19 | id: cache-node-modules 20 | uses: actions/cache@0c907a75c2c80ebcb7f088228285e798b750cf8f # v4.2.1 21 | with: 22 | path: "**/node_modules" 23 | key: ${{ runner.os }}-modules-${{ hashFiles('**/yarn.lock') }} 24 | 25 | - name: Install dependencies 26 | if: steps.cache-node-modules.outputs.cache-hit != 'true' 27 | run: yarn install 28 | 29 | - name: Check formatting 30 | run: yarn check-formatting 31 | 32 | - name: Lint 33 | run: yarn lint 34 | 35 | - name: Install depcheck 36 | run: yarn global add depcheck 37 | 38 | - name: Run depcheck 39 | run: depcheck --ignores="@types/jest,serverless-step-functions" 40 | 41 | test: 42 | runs-on: ubuntu-latest 43 | strategy: 44 | max-parallel: 4 45 | matrix: 46 | node-version: [18, 22] 47 | 48 | steps: 49 | - name: Checkout 50 | uses: actions/checkout@v3 51 | 52 | - name: Set up Node ${{ matrix.node-version }} 53 | uses: actions/setup-node@v3 54 | with: 55 | node-version: ${{ matrix.node-version }} 56 | 57 | - name: Cache Node modules 58 | id: cache-node-modules 59 | uses: actions/cache@0c907a75c2c80ebcb7f088228285e798b750cf8f # v4.2.1 60 | with: 61 | path: "**/node_modules" 62 | key: ${{ runner.os }}-modules-${{ hashFiles('**/yarn.lock') }} 63 | 64 | - name: Install dependencies 65 | if: steps.cache-node-modules.outputs.cache-hit != 'true' 66 | run: yarn install 67 | 68 | - name: Build 69 | run: yarn build 70 | 71 | - name: Run tests 72 | run: yarn test 73 | 74 | - name: Upload code coverage report 75 | run: bash <(curl -s https://codecov.io/bash) 76 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | # 7 | # ******** NOTE ******** 8 | # We have attempted to detect the languages in your repository. Please check 9 | # the `language` matrix defined below to confirm you have the correct set of 10 | # supported CodeQL languages. 11 | # 12 | name: "CodeQL" 13 | 14 | on: 15 | push: 16 | branches: [ master ] 17 | pull_request: 18 | # The branches below must be a subset of the branches above 19 | branches: [ master ] 20 | schedule: 21 | - cron: '36 12 * * 4' 22 | 23 | jobs: 24 | analyze: 25 | name: Analyze 26 | runs-on: ubuntu-latest 27 | permissions: 28 | actions: read 29 | contents: read 30 | security-events: write 31 | 32 | strategy: 33 | fail-fast: false 34 | matrix: 35 | language: [ 'javascript', 'python', 'typescript' ] 36 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] 37 | # Learn more about CodeQL language support at https://git.io/codeql-language-support 38 | 39 | steps: 40 | - name: Checkout repository 41 | uses: actions/checkout@v3 42 | 43 | # Initializes the CodeQL tools for scanning. 44 | - name: Initialize CodeQL 45 | uses: github/codeql-action/init@v2 46 | with: 47 | languages: ${{ matrix.language }} 48 | # If you wish to specify custom queries, you can do so here or in a config file. 49 | # By default, queries listed here will override any specified in a config file. 50 | # Prefix the list here with "+" to use these queries and those in the config file. 51 | # queries: ./path/to/local/query, your-org/your-repo/queries@main 52 | 53 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). 54 | # If this step fails, then you should remove it and run the build manually (see below) 55 | - name: Autobuild 56 | uses: github/codeql-action/autobuild@v2 57 | 58 | # ℹ️ Command-line programs to run using the OS shell. 59 | # 📚 https://git.io/JvXDl 60 | 61 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines 62 | # and modify them (or add more) to build your code if your project 63 | # uses a compiled language 64 | 65 | #- run: | 66 | # make bootstrap 67 | # make release 68 | 69 | - name: Perform CodeQL Analysis 70 | uses: github/codeql-action/analyze@v2 71 | -------------------------------------------------------------------------------- /.github/workflows/integration_tests.yml: -------------------------------------------------------------------------------- 1 | name: integration-tests 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | integration-tests: 7 | runs-on: ubuntu-latest 8 | 9 | steps: 10 | - name: Checkout 11 | uses: actions/checkout@v3 12 | 13 | - name: Set up Node 18 14 | uses: actions/setup-node@v3 15 | with: 16 | node-version: 18 17 | 18 | - name: Cache Node modules 19 | id: cache-node-modules 20 | uses: actions/cache@0c907a75c2c80ebcb7f088228285e798b750cf8f # v4.2.1 21 | with: 22 | path: "**/node_modules" 23 | key: ${{ runner.os }}-modules-${{ hashFiles('**/yarn.lock') }} 24 | 25 | - name: Install Serverless Framework 26 | run: sudo yarn global add serverless@3.35 --prefix /usr/local 27 | 28 | - name: Run snapshot tests 29 | env: 30 | AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} 31 | AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 32 | run: ./scripts/run_integration_tests.sh 33 | 34 | - name: Test getting recommended monitors 35 | env: 36 | DD_API_KEY: ${{ secrets.DD_API_KEY }} 37 | DD_APP_KEY: ${{ secrets.DD_APP_KEY }} 38 | run: npm run test:integration 39 | -------------------------------------------------------------------------------- /.github/workflows/monitor_api_integration_test.yml: -------------------------------------------------------------------------------- 1 | name: monitor-api-integration-test 2 | 3 | on: 4 | schedule: 5 | # 1pm UTC (9am EDT, 8am EST), every day 6 | - cron: "0 13 * * *" 7 | workflow_dispatch: 8 | 9 | jobs: 10 | integration-tests: 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - name: Checkout 15 | uses: actions/checkout@v3 16 | 17 | - name: Set up Node 18 18 | uses: actions/setup-node@v3 19 | with: 20 | node-version: 18 21 | 22 | - name: Cache Node modules 23 | id: cache-node-modules 24 | uses: actions/cache@0c907a75c2c80ebcb7f088228285e798b750cf8f # v4.2.1 25 | with: 26 | path: "**/node_modules" 27 | key: ${{ runner.os }}-modules-${{ hashFiles('**/yarn.lock') }} 28 | 29 | - name: Test getting recommended monitors 30 | env: 31 | DD_API_KEY: ${{ secrets.DD_API_KEY }} 32 | DD_APP_KEY: ${{ secrets.DD_APP_KEY }} 33 | run: npm run test:integration 34 | 35 | - name: Send failure message to Slack 36 | env: 37 | SLACK_CHANNEL: "#serverless-onboarding-and-enablement-ops" 38 | SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} 39 | if: failure() 40 | run: | 41 | set -x 42 | OPS_MESSAGE=":gh-check-failed: Serverless Plugin failed to fetch recommended monitors from monitor API! 43 | Please check GitHub Action log: https://github.com/DataDog/serverless-plugin-datadog/actions/workflows/monitor_api_integration_test.yml" 44 | curl -H "Content-type: application/json" -X POST "$SLACK_WEBHOOK" -d '{ 45 | "channel": "'"$SLACK_CHANNEL"'", 46 | "text": "'"$OPS_MESSAGE"'" 47 | }' 48 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: Publish packages on NPM 2 | on: 3 | release: 4 | types: [created] 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/checkout@v2 10 | - uses: actions/setup-node@v1 11 | with: 12 | node-version: "18.x" 13 | registry-url: "https://registry.npmjs.org" 14 | - run: yarn 15 | - run: yarn build 16 | - run: yarn publish 17 | env: 18 | NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }} 19 | 20 | - name: Send success message to Slack 21 | env: 22 | SLACK_CHANNEL: "#serverless-releases" 23 | SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} 24 | if: success() 25 | run: | 26 | set -x 27 | OPS_MESSAGE=":gh-check-passed: serverless-plugin-datadog NPM publish succeeded!" 28 | curl -H "Content-type: application/json" -X POST "$SLACK_WEBHOOK" -d '{ 29 | "channel": "'"$SLACK_CHANNEL"'", 30 | "text": "'"$OPS_MESSAGE"'" 31 | }' 32 | 33 | - name: Send failure message to Slack 34 | env: 35 | SLACK_CHANNEL: "#serverless-onboarding-and-enablement-ops" 36 | SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} 37 | if: failure() 38 | run: | 39 | set -x 40 | OPS_MESSAGE=":gh-check-failed: serverless-plugin-datadog NPM publish failed! 41 | Please check GitHub Action log: https://github.com/DataDog/serverless-plugin-datadog/actions/workflows/publish.yml" 42 | curl -H "Content-type: application/json" -X POST "$SLACK_WEBHOOK" -d '{ 43 | "channel": "'"$SLACK_CHANNEL"'", 44 | "text": "'"$OPS_MESSAGE"'" 45 | }' 46 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | dist 3 | yarn-error.log 4 | coverage 5 | .layers 6 | .DS_Store 7 | package-lock.json 8 | .serverless/ 9 | test_forwarder_snapshot.json 10 | test_extension_snapshot.json 11 | test_extension_apigateway.json 12 | .node-version -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | * 2 | !dist/** 3 | !LICENSE 4 | !LICENSE-3rdparty.csv 5 | !NOTICE 6 | -------------------------------------------------------------------------------- /.nvmrc: -------------------------------------------------------------------------------- 1 | 18 2 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | *.md -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "printWidth": 120, 3 | "trailingComma": "all", 4 | "arrowParens": "always" 5 | } 6 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # CHANGELOG 2 | 3 | # Version: 1 / 2019-08-12 4 | 5 | - First release 6 | - Enable tracing for by default for Lambda and API Gateway 7 | - Automatically add Lambda Layer to funcitons 8 | -------------------------------------------------------------------------------- /CODEOWNERS: -------------------------------------------------------------------------------- 1 | 2 | * @DataDog/serverless 3 | *.md @DataDog/serverless @DataDog/documentation 4 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | We love pull requests. Here's a quick guide. 4 | 5 | 1. Fork, clone and branch off: 6 | ```bash 7 | git clone git@github.com:/serverless-plugin-datadog.git 8 | git checkout -b 9 | yarn install 10 | ``` 11 | 1. Make your changes. 12 | 1. Test your changes against your own testing application with the help of [`yarn link`](https://classic.yarnpkg.com/en/docs/cli/link/): 13 | ```bash 14 | # From the root of the serverless-plugin-datadog repo 15 | yarn build 16 | cd dist 17 | yarn link 18 | 19 | # From the root of your own serverless application 20 | yarn link "serverless-plugin-datadog" 21 | sls deploy 22 | ``` 23 | 1. Ensure the unit tests pass: 24 | ```bash 25 | yarn test 26 | ``` 27 | 1. Push to your fork and [submit a pull request][pr]. 28 | 29 | [pr]: https://github.com/your-username/datadog-lambda-layer-python/compare/DataDog:master...master 30 | 31 | At this point you're waiting on us. We may suggest some changes or improvements or alternatives. 32 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2021 Datadog, Inc. 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | 203 | SPDX-License-Identifier: Apache-2.0 -------------------------------------------------------------------------------- /LICENSE-3rdparty.csv: -------------------------------------------------------------------------------- 1 | Component,Origin,License,Copyright 2 | -------------------------------------------------------------------------------- /NOTICE: -------------------------------------------------------------------------------- 1 | Datadog datadog-lambda-layer-js 2 | Copyright 2021 Datadog, Inc. 3 | 4 | This product includes software developed at Datadog (https://www.datadoghq.com/). 5 | -------------------------------------------------------------------------------- /integration_tests/FunctionLevelLayer/file.txt: -------------------------------------------------------------------------------- 1 | Also Aaron Stuyvenberg -------------------------------------------------------------------------------- /integration_tests/ProviderLevelLayer/file.txt: -------------------------------------------------------------------------------- 1 | Aaron Stuyvenberg -------------------------------------------------------------------------------- /integration_tests/js_handler.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports.hello = async (event) => { 4 | return { 5 | statusCode: 200, 6 | body: JSON.stringify( 7 | { 8 | message: 'Go Serverless v1.0! Your function executed successfully!', 9 | input: event, 10 | }, 11 | null, 12 | 2 13 | ), 14 | }; 15 | }; 16 | -------------------------------------------------------------------------------- /integration_tests/monitors.integration.spec.ts: -------------------------------------------------------------------------------- 1 | import { getRecommendedMonitors } from "../src/monitor-api-requests"; 2 | 3 | describe("Integration Test for setMonitors", () => { 4 | const expectedMonitors = [ 5 | "high_cold_start_rate", 6 | "timeout", 7 | "out_of_memory", 8 | "high_iterator_age", 9 | "high_cold_start_rate", 10 | "high_throttles", 11 | "increased_cost", 12 | ]; 13 | 14 | it("retrieves recommended monitors from Datadog Monitor API", async () => { 15 | const apiKey = process.env.DD_API_KEY; 16 | const appKey = process.env.DD_APP_KEY; 17 | if (!apiKey || !appKey) { 18 | throw new Error( 19 | "DD_API_KEY and DD_APP_KEY must be set. Please run this test using `DD_API_KEY= DD_APP_KEY= npm run test:integration`", 20 | ); 21 | } 22 | 23 | const recommendedMonitors = await getRecommendedMonitors("datadoghq.com", apiKey, appKey); 24 | 25 | for (const expectedMonitor of expectedMonitors) { 26 | expect(recommendedMonitors).toHaveProperty(expectedMonitor); 27 | } 28 | }); 29 | }); 30 | -------------------------------------------------------------------------------- /integration_tests/py_handler.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | 4 | def hello(event, context): 5 | body = { 6 | "message": "Go Serverless v1.0! Your function executed successfully!", 7 | "input": event 8 | } 9 | 10 | response = { 11 | "statusCode": 200, 12 | "body": json.dumps(body) 13 | } 14 | 15 | return response 16 | -------------------------------------------------------------------------------- /integration_tests/rb_handler.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'datadog/lambda' 4 | 5 | Datadog::Lambda.configure_apm do |c| 6 | end 7 | 8 | def hello(event:, context:) 9 | Datadog::Lambda.wrap(event, context) do 10 | body = { 11 | 'message' => "Datadog <3 Serverless!", 12 | 'input' => event 13 | } 14 | 15 | response = { 16 | 'statusCode' => 200, 17 | 'body' => body 18 | } 19 | 20 | response 21 | end 22 | end 23 | -------------------------------------------------------------------------------- /integration_tests/serverless-extension-apigateway.yml: -------------------------------------------------------------------------------- 1 | service: dd-sls-plugin-integration-test 2 | frameworkVersion: "3" 3 | 4 | plugins: 5 | - ../dist/src 6 | 7 | provider: 8 | name: aws 9 | region: sa-east-1 10 | logs: 11 | restApi: true 12 | httpApi: true 13 | websocket: true 14 | 15 | custom: 16 | datadog: 17 | apiKey: 1234 18 | addExtension: true 19 | forwarderArn: arn:aws:lambda:us-east-1:000000000000:function:datadog-forwarder 20 | enableSourceCodeIntegration: false 21 | testingMode: true 22 | integrationTesting: true 23 | logLevel: "info" 24 | 25 | functions: 26 | PythonHello37: 27 | handler: py_handler.hello 28 | runtime: python3.7 29 | PythonHello38: 30 | handler: py_handler.hello 31 | runtime: python3.8 32 | PythonHello39: 33 | handler: py_handler.hello 34 | runtime: python3.9 35 | events: 36 | - http: 37 | path: users/create 38 | method: get 39 | - httpApi: 40 | path: /users/update 41 | method: put 42 | - websocket: $connect 43 | JavascriptHello16-x: 44 | handler: js_handler.hello 45 | runtime: nodejs16.x 46 | events: 47 | - http: 48 | path: users/create 49 | method: post 50 | - httpApi: 51 | path: /users/remove 52 | method: delete 53 | - websocket: $connect 54 | JavascriptHello18-x: 55 | handler: js_handler.hello 56 | runtime: nodejs18.x 57 | JavascriptHello20-x: 58 | handler: js_handler.hello 59 | runtime: nodejs20.x 60 | DotnetHello6: 61 | handler: dotnet_handler.hello 62 | runtime: dotnet6 63 | DotnetHello8: 64 | handler: dotnet_handler.hello 65 | runtime: dotnet8 66 | JavaHello8: 67 | handler: java_handler.hello 68 | runtime: java8 69 | JavaHello11: 70 | handler: java_handler.hello 71 | runtime: java11 72 | providedHello: 73 | handler: provided_handler.hello 74 | runtime: provided 75 | RubyHello32: 76 | handler: rb_handler.hello 77 | runtime: ruby3.2 78 | -------------------------------------------------------------------------------- /integration_tests/serverless-extension.yml: -------------------------------------------------------------------------------- 1 | service: dd-sls-plugin-integration-test 2 | frameworkVersion: "3" 3 | 4 | plugins: 5 | - ../dist/src 6 | 7 | provider: 8 | name: aws 9 | region: sa-east-1 10 | layers: 11 | - { Ref: ProviderLevelLayerLambdaLayer } 12 | 13 | custom: 14 | datadog: 15 | addExtension: true 16 | apiKey: 1234 17 | enableSourceCodeIntegration: false 18 | testingMode: true 19 | integrationTesting: true 20 | exclude: 21 | - ExcludeThis 22 | 23 | functions: 24 | PythonHello37: 25 | handler: py_handler.hello 26 | runtime: python3.7 27 | PythonHello38: 28 | handler: py_handler.hello 29 | runtime: python3.8 30 | PythonHello39: 31 | handler: py_handler.hello 32 | runtime: python3.9 33 | JavascriptHello16-x: 34 | handler: js_handler.hello 35 | runtime: nodejs16.x 36 | layers: 37 | - { Ref: FunctionLevelLayerLambdaLayer } 38 | JavascriptHello18-x: 39 | handler: js_handler.hello 40 | runtime: nodejs18.x 41 | JavascriptHello20-x: 42 | handler: js_handler.hello 43 | runtime: nodejs20.x 44 | ExcludeThis: 45 | handler: js_handler.hello 46 | runtime: nodejs20.x 47 | DotnetHello6: 48 | handler: dotnet_handler.hello 49 | runtime: dotnet6 50 | DotnetArmHello6: 51 | handler: dotnet_handler.hello 52 | runtime: dotnet6 53 | architecture: arm64 54 | DotnetHello8: 55 | handler: dotnet_handler.hello 56 | runtime: dotnet8 57 | DotnetArmHello8: 58 | handler: dotnet_handler.hello 59 | runtime: dotnet8 60 | architecture: arm64 61 | JavaHello8: 62 | handler: java_handler.hello 63 | runtime: java8 64 | JavaHello11: 65 | handler: java_handler.hello 66 | runtime: java11 67 | providedHello: 68 | handler: provided_handler.hello 69 | runtime: provided 70 | RubyHello32: 71 | handler: rb_handler.hello 72 | runtime: ruby3.2 73 | layers: 74 | ProviderLevelLayer: 75 | path: ProviderLevelLayer # required, path to layer contents on disk 76 | name: ${self:service}-${sls:stage}-ProviderLevelLayer # optional, Deployed Lambda layer name 77 | description: It's a text file # optional, Description to publish to AWS 78 | compatibleRuntimes: # optional, a list of runtimes this layer is compatible with 79 | - nodejs20.x 80 | FunctionLevelLayer: 81 | path: FunctionLevelLayer 82 | name: ${self:service}-${sls:stage}-FunctionLevelLayer # optional, Deployed Lambda layer name 83 | description: It's also a text file # optional, Description to publish to AWS 84 | -------------------------------------------------------------------------------- /integration_tests/serverless-forwarder.yml: -------------------------------------------------------------------------------- 1 | service: dd-sls-plugin-integration-test 2 | frameworkVersion: "3" 3 | 4 | plugins: 5 | - ../dist/src 6 | - serverless-step-functions 7 | 8 | provider: 9 | name: aws 10 | region: sa-east-1 11 | logs: 12 | restApi: 13 | accessLogging: true 14 | executionLogging: false 15 | httpApi: false 16 | websocket: true 17 | 18 | custom: 19 | datadog: 20 | forwarderArn: arn:aws:lambda:us-east-1:000000000000:function:datadog-forwarder 21 | testingMode: true 22 | integrationTesting: true 23 | logLevel: "info" 24 | subscribeToExecutionLogs: true 25 | subscribeToStepFunctionLogs: true 26 | addExtension: false 27 | exclude: 28 | - ExcludeThis 29 | - Exclude-This 30 | 31 | functions: 32 | PythonHello37: 33 | handler: py_handler.hello 34 | runtime: python3.7 35 | PythonHello38: 36 | handler: py_handler.hello 37 | runtime: python3.8 38 | PythonHello39: 39 | handler: py_handler.hello 40 | runtime: python3.9 41 | events: 42 | - http: 43 | path: users/create 44 | method: get 45 | - httpApi: 46 | path: /users/update 47 | method: put 48 | - websocket: $connect 49 | JavascriptHello16-x: 50 | handler: js_handler.hello 51 | runtime: nodejs16.x 52 | events: 53 | - http: 54 | path: users/create 55 | method: post 56 | - httpApi: 57 | path: /users/remove 58 | method: delete 59 | - websocket: $connect 60 | JavascriptHello18-x: 61 | handler: js_handler.hello 62 | runtime: nodejs18.x 63 | JavascriptHello20-x: 64 | handler: js_handler.hello 65 | runtime: nodejs20.x 66 | ExcludeThis: 67 | handler: js_handler.hello 68 | runtime: nodejs20.x 69 | Exclude-This: 70 | handler: js_handler.hello 71 | runtime: nodejs20.x 72 | DotnetHello6: 73 | handler: dotnet_handler.hello 74 | runtime: dotnet6 75 | DotnetHello8: 76 | handler: dotnet_handler.hello 77 | runtime: dotnet8 78 | JavaHello8: 79 | handler: java_handler.hello 80 | runtime: java8 81 | JavaHello11: 82 | handler: java_handler.hello 83 | runtime: java11 84 | providedHello: 85 | handler: provided_handler.hello 86 | runtime: provided 87 | RubyHello32: 88 | handler: rb_handler.hello 89 | runtime: ruby3.2 90 | 91 | resources: 92 | Resources: 93 | stepFunction2LogGroup: 94 | Type: AWS::Logs::LogGroup 95 | Properties: 96 | LogGroupName: /aws/vendedlogs/states/stepFunctionLoggingConfigDynamicArn-Logs-dev 97 | stepFunction3LogGroup: 98 | Type: AWS::Logs::LogGroup 99 | Properties: 100 | LogGroupName: /aws/vendedlogs/states/stepFunctionNoLoggingConfigStaticArn-Logs-dev 101 | 102 | stepFunctions: 103 | stateMachines: 104 | stepfunction1: 105 | name: stepFunctionNoLoggingConfig 106 | definition: 107 | StartAt: State1 108 | States: 109 | State1: 110 | Type: Task 111 | Parameters: 112 | FunctionName: 113 | Fn::GetAtt: [JavascriptHello18-x, Arn] 114 | Resource: arn:aws:states:::lambda:invoke 115 | End: true 116 | stepfunction2: 117 | name: stepFunctionLoggingConfigDynamicArn 118 | loggingConfig: 119 | level: ALL 120 | includeExecutionData: true 121 | destinations: 122 | - Fn::GetAtt: [stepFunction2LogGroup, Arn] 123 | definition: 124 | StartAt: State1 125 | States: 126 | State1: 127 | Type: Task 128 | Parameters: 129 | FunctionName: 130 | Fn::GetAtt: [JavascriptHello18-x, Arn] 131 | Resource: arn:aws:states:::lambda:invoke 132 | End: true 133 | stepfunction3: 134 | name: stepFunctionNoLoggingConfigStaticArn 135 | loggingConfig: 136 | level: ALL 137 | includeExecutionData: true 138 | destinations: 139 | - arn:aws:logs:sa-east-1:425362996713:log-group:/aws/vendedlogs/states/stepFunctionNoLoggingConfigStaticArn-Logs-dev:* 140 | definition: 141 | StartAt: State1 142 | States: 143 | State1: 144 | Type: Task 145 | Parameters: 146 | FunctionName: 147 | Fn::GetAtt: [JavascriptHello18-x, Arn] 148 | Resource: arn:aws:states:::lambda:invoke 149 | End: true 150 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | verbose: true, 3 | moduleFileExtensions: ["ts", "tsx", "js"], 4 | transform: { 5 | ".(ts|tsx)": "ts-jest", 6 | }, 7 | collectCoverage: true, 8 | coverageReporters: ["lcovonly", "text-summary"], 9 | testEnvironment: "/testEnvironment.js", 10 | clearMocks: true, 11 | collectCoverageFrom: ["src/**/*.ts"], 12 | testRegex: "(src\\/).*(\\.spec\\.ts)$", 13 | testPathIgnorePatterns: ["\\.snap$", "/node_modules/", "integration_tests/"], 14 | }; 15 | -------------------------------------------------------------------------------- /jest.integration.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | verbose: true, 3 | moduleFileExtensions: ["ts", "tsx", "js"], 4 | transform: { 5 | ".(ts|tsx)": "ts-jest", 6 | }, 7 | collectCoverage: true, 8 | coverageReporters: ["lcovonly", "text-summary"], 9 | testEnvironment: "/testEnvironment.js", 10 | clearMocks: true, 11 | collectCoverageFrom: ["src/**/*.ts"], 12 | testRegex: ["(integration_tests\\/).*(\\.spec\\.ts)$"], 13 | }; 14 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "serverless-plugin-datadog", 3 | "version": "5.95.0", 4 | "description": "Serverless plugin to automatically instrument python and node functions with datadog tracing", 5 | "main": "dist/src/index.js", 6 | "repository": "https://github.com/DataDog/serverless-plugin-datadog", 7 | "author": "Datadog", 8 | "license": "Apache-2.0", 9 | "scripts": { 10 | "prebuild": "./scripts/check_layers_json.sh", 11 | "build": "tsc", 12 | "test": "jest --config jest.config.js", 13 | "test:integration": "jest ./integration_tests --config jest.integration.config.js", 14 | "test:watch": "jest --watch", 15 | "coverage": "jest --coverage", 16 | "lint": "tslint --project tsconfig.json", 17 | "check-formatting": "prettier --check \"src/**\" \"README.md\"", 18 | "format": "prettier --write \"src/**\" \"README.md\"" 19 | }, 20 | "devDependencies": { 21 | "@types/jest": "^27.4.0", 22 | "@types/mock-fs": "4.13.0", 23 | "@types/node-fetch": "^2.5.10", 24 | "@types/serverless": "1.78.34", 25 | "jest": "^29.7.0", 26 | "jest-environment-node": "^26.6.2", 27 | "mock-fs": "4.13.0", 28 | "prettier": "^2.2.1", 29 | "serverless-step-functions": "^3.17.0", 30 | "ts-jest": "^29.1.1", 31 | "tslint": "^6.1.3", 32 | "typescript": "^4.1.3" 33 | }, 34 | "jest": { 35 | "verbose": true, 36 | "moduleFileExtensions": [ 37 | "ts", 38 | "tsx", 39 | "js" 40 | ], 41 | "transform": { 42 | ".(ts|tsx)": "ts-jest" 43 | }, 44 | "collectCoverage": true, 45 | "coverageReporters": [ 46 | "lcovonly", 47 | "text-summary" 48 | ], 49 | "testRegex": "(src\\/).*(\\.spec\\.ts)$", 50 | "testPathIgnorePatterns": [ 51 | "\\.snap$", 52 | "/node_modules/" 53 | ], 54 | "testEnvironment": "/testEnvironment.js", 55 | "clearMocks": true, 56 | "collectCoverageFrom": [ 57 | "src/**/*.ts" 58 | ] 59 | }, 60 | "dependencies": { 61 | "@datadog/datadog-ci": "^3.3.1", 62 | "node-fetch": "^2.6.1", 63 | "simple-git": "^3.16.0" 64 | }, 65 | "peerDependencies": { 66 | "serverless": "4.x || 3.x || 2.x || 1.x" 67 | }, 68 | "packageManager": "yarn@1.22.22+sha512.a6b2f7906b721bba3d67d4aff083df04dad64c399707841b7acf00f6b133b7ac24255f2652fa22ae3534329dc6180534e98d17432037ff6fd140556e2bb3137e" 69 | } 70 | -------------------------------------------------------------------------------- /scripts/check_layers_json.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Unless explicitly stated otherwise all files in this repository are licensed 4 | # under the Apache License Version 2.0. 5 | # This product includes software developed at Datadog (https://www.datadoghq.com/). 6 | # Copyright 2021 Datadog, Inc. 7 | 8 | set -e 9 | 10 | if [ ! -f "src/layers.json" ] 11 | then 12 | echo "Layers.json not set, please make sure to run generate_layers_json.sh before building" 13 | exit 1 14 | fi 15 | -------------------------------------------------------------------------------- /scripts/compare_snapshots.py: -------------------------------------------------------------------------------- 1 | import json 2 | import sys 3 | import logging 4 | 5 | 6 | log = logging.getLogger(__name__) 7 | 8 | 9 | def do_values_match(a, b, file_name_a, file_name_b, level=0): 10 | """Compare a and b recursively, logging info about any mismatch found""" 11 | aType = type(a) 12 | bType = type(b) 13 | if aType != bType: 14 | log.warning( 15 | "Mismatch level %s: Values have different types: %s vs %s", 16 | level, 17 | aType, 18 | bType, 19 | ) 20 | return False 21 | 22 | if aType == dict: 23 | if len(a) != len(b): 24 | log.warning( 25 | "Mismatch level %s: Dicts have different lengths (%s vs %s). a_keys=%s vs b_keys=%s", 26 | level, 27 | len(a), 28 | len(b), 29 | a.keys(), 30 | b.keys(), 31 | ) 32 | return False 33 | for key in a: 34 | a_value = a[key] 35 | b_value = b.get(key) 36 | 37 | if b_value is None: 38 | log.warning( 39 | "Mismatch level %s: Key %s present in %s but missing from %s dict with keys %s", 40 | level, 41 | key, 42 | file_name_a, 43 | file_name_b, 44 | b.keys(), 45 | ) 46 | return False 47 | 48 | if not do_values_match( 49 | a_value, b_value, file_name_a, file_name_b, level + 1 50 | ): 51 | log.warning( 52 | "Mismatch level %s: Dict values at key %s do not match", 53 | level, 54 | key, 55 | ) 56 | return False 57 | 58 | return True 59 | 60 | if aType == list: 61 | if len(a) != len(b): 62 | log.warning( 63 | "Mismatch level %s: Arrays have different lengths (%s vs %s):", 64 | level, 65 | len(a), 66 | len(b), 67 | ) 68 | log.warning(" %s: %s", file_name_a, a) 69 | log.warning(" %s: %s", file_name_b, b) 70 | 71 | return False 72 | 73 | for i in range(len(a)): 74 | aListItem = a[i] 75 | bListItem = b[i] 76 | 77 | if not do_values_match( 78 | aListItem, bListItem, file_name_a, file_name_b, level + 1 79 | ): 80 | log.warning( 81 | "Mismatch level %s: Array items at position %s do not match", 82 | level, 83 | i, 84 | ) 85 | return False 86 | 87 | return True 88 | 89 | are_values_equal = a == b 90 | if not are_values_equal: 91 | log.warning("Mismatch level %s: Values %s and %s do not match", level, a, b) 92 | return are_values_equal 93 | 94 | 95 | def do_json_files_match(file_name_a, file_name_b): 96 | with open(file_name_a) as fileA: 97 | file_a_data = json.load(fileA) 98 | 99 | with open(file_name_b) as fileB: 100 | file_b_data = json.load(fileB) 101 | 102 | return do_values_match(file_a_data, file_b_data, file_name_a, file_name_b, 0) 103 | 104 | 105 | def main(): 106 | if len(sys.argv) != 3: 107 | raise ValueError("Must provide 2 file names to compare") 108 | 109 | if not do_json_files_match(sys.argv[1], sys.argv[2]): 110 | sys.exit(1) 111 | 112 | # Exit successfully if no mismatch found 113 | sys.exit(0) 114 | 115 | 116 | if __name__ == "__main__": 117 | main() 118 | -------------------------------------------------------------------------------- /scripts/generate_layers_json.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Unless explicitly stated otherwise all files in this repository are licensed 4 | # under the Apache License Version 2.0. 5 | # This product includes software developed at Datadog (https://www.datadoghq.com/). 6 | # Copyright 2021 Datadog, Inc. 7 | 8 | # Writes layer info to easily readable json file 9 | 10 | # Call: ./scripts/generate_layers_json [-g] 11 | # Opts: 12 | # -g: generate govcloud file 13 | 14 | set -e 15 | 16 | LAYER_NAMES=( 17 | "Datadog-Node16-x" 18 | "Datadog-Node18-x" 19 | "Datadog-Node20-x" 20 | "Datadog-Node22-x" 21 | "Datadog-Python37" 22 | "Datadog-Python38" 23 | "Datadog-Python38-ARM" 24 | "Datadog-Python39" 25 | "Datadog-Python39-ARM" 26 | "Datadog-Python310" 27 | "Datadog-Python310-ARM" 28 | "Datadog-Python311" 29 | "Datadog-Python311-ARM" 30 | "Datadog-Python312" 31 | "Datadog-Python312-ARM" 32 | "Datadog-Python313" 33 | "Datadog-Python313-ARM" 34 | "Datadog-Ruby3-2" 35 | "Datadog-Ruby3-2-ARM" 36 | "Datadog-Ruby3-3" 37 | "Datadog-Ruby3-3-ARM" 38 | "Datadog-Extension" 39 | "Datadog-Extension-ARM" 40 | "Datadog-Extension-FIPS" 41 | "Datadog-Extension-ARM-FIPS" 42 | "dd-trace-dotnet" 43 | "dd-trace-dotnet-ARM" 44 | "dd-trace-java" 45 | ) 46 | 47 | JSON_LAYER_NAMES=( 48 | "nodejs16.x" 49 | "nodejs18.x" 50 | "nodejs20.x" 51 | "nodejs22.x" 52 | "python3.7" 53 | "python3.8" 54 | "python3.8-arm" 55 | "python3.9" 56 | "python3.9-arm" 57 | "python3.10" 58 | "python3.10-arm" 59 | "python3.11" 60 | "python3.11-arm" 61 | "python3.12" 62 | "python3.12-arm" 63 | "python3.13" 64 | "python3.13-arm" 65 | "ruby3.2" 66 | "ruby3.2-arm" 67 | "ruby3.3" 68 | "ruby3.3-arm" 69 | "extension" 70 | "extension-arm" 71 | "extension-fips" 72 | "extension-arm-fips" 73 | "dotnet" 74 | "dotnet-arm" 75 | "java" 76 | ) 77 | 78 | AVAILABLE_REGIONS=$(aws ec2 describe-regions | jq -r '.[] | .[] | .RegionName') 79 | 80 | FILE_NAME="src/layers.json" 81 | 82 | INPUT_JSON="{\"regions\":{}}" 83 | 84 | if [ "$1" = "-g" ]; then 85 | FILE_NAME="src/layers-gov.json" 86 | fi 87 | 88 | # Fetch the layers for each region in parallel 89 | echo "Fetching layers for each region" 90 | rm -rf layers 91 | mkdir layers 92 | for region in $AVAILABLE_REGIONS; do 93 | { 94 | aws lambda list-layers --region "$region" | jq -c '[.Layers[] | {LayerName, LastLayerArn: .LatestMatchingVersion.LayerVersionArn}]' > layers/$region.json 95 | } & 96 | done 97 | wait # Wait for all parallel jobs to complete 98 | 99 | echo "Generating layers json" 100 | for region in $AVAILABLE_REGIONS 101 | do 102 | for ((i=0;i<${#LAYER_NAMES[@]};++i)); 103 | do 104 | 105 | layer_name=${LAYER_NAMES[i]} 106 | json_layer_name=${JSON_LAYER_NAMES[i]} 107 | 108 | last_layer_arn=$(cat layers/$region.json | jq -r --arg layer_name $layer_name '.[] | select(.LayerName == $layer_name) .LastLayerArn') 109 | 110 | if [ -z $last_layer_arn ]; then 111 | >&2 echo "No layer found for $region, $layer_name" 112 | else 113 | echo $last_layer_arn 114 | INPUT_JSON=$(jq -r ".regions . \"$region\" . \"$json_layer_name\" = \"$last_layer_arn\"" <<< $INPUT_JSON) 115 | fi 116 | done 117 | done 118 | echo "Writing to ${FILE_NAME}" 119 | jq '.' <<< $INPUT_JSON > $FILE_NAME 120 | 121 | rm -rf layers 122 | -------------------------------------------------------------------------------- /scripts/generate_third_party_license_file.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Unless explicitly stated otherwise all files in this repository are licensed 3 | under the Apache License Version 2.0. 4 | 5 | This product includes software developed at Datadog (https://www.datadoghq.com/). 6 | Copyright 2021 Datadog, Inc. 7 | ''' 8 | 9 | import csv 10 | import json 11 | import re 12 | import shlex 13 | import subprocess 14 | 15 | REPO_EXCEPTIONS = {"eyes": "https://github.com/cloudhead/eyes.js"} 16 | 17 | COPYRIGHT_EXCEPTIONS = ["aws-sdk", "datadog-lambda-layer-js"] 18 | 19 | """ 20 | Exceptions to this scripts assumptions about the formatting of LICENSE files: 21 | 22 | querystring contains the whole license 23 | colors.js doesn't have "Copyright" starting at the beginning of the line (https://github.com/Marak/colors.js/blob/master/LICENSE) 24 | cycles uses the JSON license (https://github.com/dscape/cycle) 25 | aws-xray-sdk puts the copyright string in NOTICE.txt instead of in LICENSE 26 | datadog-lambda-js doesn't put a copyright string in the LICENSE file 27 | aws-sdk-js uses the same format ^ 28 | base64-js misses the name after the copyright year (github.com/beatgammit/base64-js) 29 | querystring puts the whole license in one line 30 | """ 31 | 32 | def get_repo_url(dep_name, dep_meta): 33 | repo_url = dep_meta.get("repository", REPO_EXCEPTIONS.get(dep_name, "NO REPO")) 34 | if repo_url.startswith("https"): 35 | return re.search(r"https:\/\/(.*)", repo_url).group(1) 36 | return repo_url 37 | 38 | 39 | if __name__ == "__main__": 40 | raw_output = subprocess.check_output( 41 | shlex.split("license-checker --json --production --start ..") 42 | ) 43 | deps = json.loads(raw_output) 44 | alphabetized_dep_names = sorted(deps.keys()) 45 | 46 | formatted_deps = [] 47 | for dep in alphabetized_dep_names: 48 | dep_meta = deps[dep] 49 | dep_name = re.search(r"([\w-]+)@", dep).group(1) 50 | repo_url = get_repo_url(dep_name, dep_meta) 51 | license = dep_meta.get("licenses", "LICENSE NOT FOUND") 52 | 53 | if "Custom" in license: 54 | print("Custom license for {}".format(dep_name)) 55 | 56 | # Extract the "Copyright ..." line from the license file. 57 | # Naively handles multi-line copyrights starting with "Copyright" 58 | # and ending with two newlines. 59 | license_file = dep_meta.get("licenseFile", None) 60 | dep_copyright = "" 61 | if license_file: 62 | with open(license_file) as f: 63 | contents = f.read() 64 | # https://stackoverflow.com/a/52347904 65 | matches = re.findall(r"(Copyright.*(\n\S.*)*)", contents) 66 | if len(matches) > 0: 67 | dep_copyright = matches[0][0].replace("\n", " ") 68 | else: 69 | print("No license file for {}".format(dep_name)) 70 | 71 | formatted_deps.append( 72 | { 73 | "Component": dep_name, 74 | "Origin": repo_url, 75 | "License": license, 76 | "Copyright": dep_copyright, 77 | } 78 | ) 79 | 80 | with open("../LICENSE-3rdparty.csv", "w") as csv_file: 81 | fieldnames = ["Component", "Origin", "License", "Copyright"] 82 | writer = csv.DictWriter(csv_file, fieldnames=fieldnames) 83 | writer.writeheader() 84 | for dep in formatted_deps: 85 | writer.writerow(dep) -------------------------------------------------------------------------------- /scripts/publish_prod.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Unless explicitly stated otherwise all files in this repository are licensed 4 | # under the Apache License Version 2.0. 5 | # This product includes software developed at Datadog (https://www.datadoghq.com/). 6 | # Copyright 2021 Datadog, Inc. 7 | 8 | # Usage - run commands from repo root: 9 | # To publish a new version: 10 | # ./scripts/publish_prod.sh 11 | # To publish a new version without updating the layer versions: 12 | # UPDATE_LAYERS=false ./scripts/publish_prod.sh 13 | 14 | set -e 15 | 16 | # Ensure on main, and pull the latest 17 | BRANCH=$(git rev-parse --abbrev-ref HEAD) 18 | if [ $BRANCH != "main" ]; then 19 | echo "Not on main, aborting" 20 | exit 1 21 | else 22 | echo "Updating main branch" 23 | git pull origin main 24 | fi 25 | 26 | # Ensure no uncommitted changes 27 | if [ -n "$(git status --porcelain)" ]; then 28 | echo "Detected uncommitted changes, aborting" 29 | exit 1 30 | fi 31 | 32 | # Check we have merged our changes 33 | echo "Checking changes have been merged to main branch" 34 | LAST_MERGED_COMMIT="$(git log --oneline -1)" 35 | read -p "The most recent commit to the main branch was ${LAST_MERGED_COMMIT}. Was this your most recent change? (y/n): " CONT 36 | if [ "$CONT" != "y" ]; then 37 | echo "Please merge your changes before finishing the release!" 38 | echo "Exiting" 39 | exit 1 40 | fi 41 | 42 | # Read the current version 43 | CURRENT_VERSION=$(node -pe "require('./package.json').version") 44 | 45 | # Read the desired version 46 | if [ -z "$1" ]; then 47 | echo "Must specify a desired version number" 48 | exit 1 49 | elif [[ ! $1 =~ [0-9]+\.[0-9]+\.[0-9]+ ]]; then 50 | echo "Must use a semantic version, e.g., 3.1.4" 51 | exit 1 52 | else 53 | VERSION=$1 54 | fi 55 | 56 | # Confirm to proceed 57 | read -p "About to bump the version from ${CURRENT_VERSION} to ${VERSION}, and publish. Continue? (y/n)" CONT 58 | if [ "$CONT" != "y" ]; then 59 | echo "Exiting" 60 | exit 1 61 | fi 62 | 63 | if [ "$UPDATE_LAYERS" != "false" ]; then 64 | read -p "About to update layer versions to the latest available from AWS. Continue? (y/n)" CONT 65 | if [ "$CONT" != "y" ]; then 66 | echo "Exiting" 67 | exit 1 68 | fi 69 | fi 70 | 71 | if [ "$UPDATE_LAYERS" != "false" ]; then 72 | echo "If an SSO authorization link is printed below, please make sure to authorize it with your GovCloud account." 73 | aws-vault exec sso-govcloud-us1-fed-engineering -- aws sts get-caller-identity 74 | 75 | echo "If an SSO authorization link is printed below, please make sure to authorize it with your datadoghq.com account." 76 | aws-vault exec sso-prod-engineering -- aws sts get-caller-identity 77 | 78 | echo "Updating layer versions for GovCloud AWS accounts" 79 | aws-vault exec sso-govcloud-us1-fed-engineering -- ./scripts/generate_layers_json.sh -g 80 | 81 | echo "Updating layer versions for commercial AWS accounts" 82 | aws-vault exec sso-prod-engineering -- ./scripts/generate_layers_json.sh 83 | 84 | # Commit layer updates if needed 85 | if [[ $(git status --porcelain) == *"src/layers"* ]]; then 86 | echo "Layers updated, committing changes" 87 | git commit src/layers.json src/layers-gov.json -m "Update layer versions" 88 | fi 89 | fi 90 | 91 | echo 92 | echo "Bumping the version number and committing the changes" 93 | yarn version --new-version "$VERSION" 94 | 95 | yarn build 96 | 97 | echo 98 | echo 'Pushing updates to GitHub' 99 | git push origin main 100 | git push origin "refs/tags/v$VERSION" 101 | 102 | echo 103 | echo "DONE! Please create a new release using the link below. It will trigger a GitHub action to publish to npm." 104 | echo "https://github.com/DataDog/serverless-plugin-datadog/releases/new?tag=v$VERSION&title=v$VERSION" 105 | -------------------------------------------------------------------------------- /scripts/run_integration_tests.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Usage - run commands from repo root: 4 | # To check if new changes to the plugin cause changes to any snapshots: 5 | # ./scripts/run_integration_tests.sh 6 | # To regenerate snapshots: 7 | # UPDATE_SNAPSHOTS=true ./scripts/run_integration_tests.sh 8 | 9 | set -e 10 | 11 | # To add new tests create a new yml file in the 'integration_tests' directory, append it to the SERVERLESS_CONFIGS array as well as creating a name for the 12 | # snapshots that will be compared in your test. Add those snapshot names to the TEST_SNAPSHOTS and CORRECT_SNAPSHOTS arrays. 13 | # Note: Each yml config, test, and correct snapshot file should be at the same index in their own array. e.g. All the files for the forwarder test are at index 0. 14 | # In order for this script to work correctly these arrays should have the same amount of elements. 15 | SERVERLESS_CONFIGS=("./serverless-forwarder.yml" "./serverless-extension.yml" "./serverless-extension-apigateway.yml") 16 | TEST_SNAPSHOTS=("test_forwarder_snapshot.json" "test_extension_snapshot.json" "test_extension_apigateway.json") 17 | CORRECT_SNAPSHOTS=("correct_forwarder_snapshot.json" "correct_extension_snapshot.json" "correct_extension_apigateway_snapshot.json") 18 | 19 | script_path=${BASH_SOURCE[0]} 20 | scripts_dir=$(dirname $script_path) 21 | repo_dir=$(dirname $scripts_dir) 22 | root_dir=$(pwd) 23 | if [[ "$root_dir" =~ .*"serverless-plugin-datadog/scripts".* ]]; then 24 | echo "Make sure to run this script from the root $(serverless-plugin-datadog) directory, aborting" 25 | exit 1 26 | fi 27 | 28 | integration_tests_dir="$root_dir/integration_tests" 29 | if [ "$UPDATE_SNAPSHOTS" = "true" ]; then 30 | echo "Overwriting snapshots in this execution" 31 | fi 32 | 33 | yarn 34 | yarn build 35 | 36 | echo "Serverless Framework version:" 37 | serverless --version 38 | 39 | cd $integration_tests_dir 40 | RAW_CFN_TEMPLATE=".serverless/cloudformation-template-update-stack.json" 41 | for ((i = 0; i < ${#SERVERLESS_CONFIGS[@]}; i++)); do 42 | echo "Running 'sls package' with ${SERVERLESS_CONFIGS[i]}" 43 | serverless package --config ${SERVERLESS_CONFIGS[i]} 44 | # Normalize S3Key timestamps 45 | perl -p -i -e 's/("serverless\/dd-sls-plugin-integration-test\/dev\/.*\/dd-sls-plugin-integration-test.zip")/"serverless\/dd-sls-plugin-integration-test\/dev\/XXXXXXXXXXXXX-XXXX-XX-XXXXX:XX:XX.XXXX\/dd-sls-plugin-integration-test.zip"/g' ${RAW_CFN_TEMPLATE} 46 | perl -p -i -e 's/("serverless\/dd-sls-plugin-integration-test\/dev\/.*\/custom-resources.zip")/"serverless\/dd-sls-plugin-integration-test\/dev\/XXXXXXXXXXXXX-XXXX-XX-XXXXX:XX:XX.XXXX\/custom-resources.zip"/g' ${RAW_CFN_TEMPLATE} 47 | # Normalize LambdaVersion ID's 48 | perl -p -i -e 's/(LambdaVersion.*")/LambdaVersionXXXX"/g' ${RAW_CFN_TEMPLATE} 49 | # Normalize SHA256 hashes 50 | perl -p -i -e 's/("CodeSha256":.*)/"CodeSha256": "XXXX"/g' ${RAW_CFN_TEMPLATE} 51 | # Normalize dd_sls_plugin version tag value 52 | perl -p -i -e 's/(v\d+.\d+.\d+)/vX.XX.X/g' ${RAW_CFN_TEMPLATE} 53 | # Normalize Datadog Layer Arn versions 54 | perl -p -i -e 's/(arn:aws:lambda:sa-east-1:464622532012:layer:(Datadog-(Python37|Python38|Python39|Ruby2-7|Ruby3-2|Node12-x|Node14-x|Node16-x|Node18-x|Node20-x|Extension)|dd-trace-(dotnet|java))(-ARM)?:\d+)/arn:aws:lambda:sa-east-1:464622532012:layer:\2:XXX/g' ${RAW_CFN_TEMPLATE} 55 | # Normalize API Gateway timestamps 56 | perl -p -i -e 's/("ApiGatewayDeployment.*")/"ApiGatewayDeploymentxxxx"/g' ${RAW_CFN_TEMPLATE} 57 | # Normalize layer timestamps 58 | perl -p -i -e 's/("serverless\/dd-sls-plugin-integration-test\/dev\/.*\/ProviderLevelLayer.zip")/"serverless\/dd-sls-plugin-integration-test\/dev\/XXXXXXXXXXXXX-XXXX-XX-XXXXX:XX:XX.XXXX\/ProviderLevelLayer.zip"/g' ${RAW_CFN_TEMPLATE} 59 | perl -p -i -e 's/("serverless\/dd-sls-plugin-integration-test\/dev\/.*\/FunctionLevelLayer.zip")/"serverless\/dd-sls-plugin-integration-test\/dev\/XXXXXXXXXXXXX-XXXX-XX-XXXXX:XX:XX.XXXX\/FunctionLevelLayer.zip"/g' ${RAW_CFN_TEMPLATE} 60 | cp ${RAW_CFN_TEMPLATE} ${TEST_SNAPSHOTS[i]} 61 | echo "====================================" 62 | if [ "$UPDATE_SNAPSHOTS" = "true" ]; then 63 | echo "Overriding ${CORRECT_SNAPSHOTS[i]}" 64 | cp ${TEST_SNAPSHOTS[i]} ${CORRECT_SNAPSHOTS[i]} 65 | fi 66 | 67 | echo "Performing diff of ${TEST_SNAPSHOTS[i]} against ${CORRECT_SNAPSHOTS[i]}" 68 | set +e # Dont exit right away if there is a diff in snapshots 69 | cd .. 70 | 71 | python $scripts_dir/compare_snapshots.py $integration_tests_dir/${TEST_SNAPSHOTS[i]} $integration_tests_dir/${CORRECT_SNAPSHOTS[i]} 72 | return_code=$? 73 | set -e 74 | if [ $return_code -eq 0 ]; then 75 | echo "SUCCESS: There were no differences between the ${TEST_SNAPSHOTS[i]} and ${CORRECT_SNAPSHOTS[i]}" 76 | else 77 | echo "FAILURE: There were differences between the ${TEST_SNAPSHOTS[i]} and ${CORRECT_SNAPSHOTS[i]}. Review the diff output above." 78 | echo "If you expected the ${TEST_SNAPSHOTS[i]} to be different generate new snapshots by running this command from a development branch on your local repository: 'UPDATE_SNAPSHOTS=true ./scripts/run_integration_tests.sh'" 79 | exit 1 80 | fi 81 | cd $integration_tests_dir 82 | echo "====================================" 83 | done 84 | exit 0 85 | -------------------------------------------------------------------------------- /src/env.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Unless explicitly stated otherwise all files in this repository are licensed 3 | * under the Apache License Version 2.0. 4 | * 5 | * This product includes software developed at Datadog (https://www.datadoghq.com/). 6 | * Copyright 2021 Datadog, Inc. 7 | */ 8 | 9 | import Service from "serverless/classes/Service"; 10 | import { ExtendedFunctionDefinition, FunctionInfo, runtimeLookup, RuntimeType } from "./layer"; 11 | import { logMessage } from "./output"; 12 | 13 | export interface Configuration { 14 | // Whether Datadog is enabled. Defaults to true. 15 | enabled?: boolean; 16 | // Whether to add the lambda library layers, or expect the user's to bring their own 17 | addLayers: boolean; 18 | // Datadog API Key, only necessary when using metrics without log forwarding 19 | apiKey?: string; 20 | // Datadog App Key used for enabling monitor configuration through plugin; separate from the apiKey that is deployed with your function 21 | appKey?: string; 22 | // Deprecated: old DATADOG_API_KEY used to deploy monitors 23 | monitorsApiKey?: string; 24 | // Deprecated: old DATADOG_APP_KEY used to deploy monitors 25 | monitorsAppKey?: string; 26 | // The ARN of the secret in AWS Secrets Manager containing the Datadog API key. 27 | apiKeySecretArn?: string; 28 | // Datadog API Key encrypted using KMS, only necessary when using metrics without log forwarding 29 | apiKMSKey?: string; 30 | // Whether to capture and store the payload and response of a lambda invocation 31 | captureLambdaPayload?: boolean; 32 | // Which Datadog site to send to (for example, datadoghq.com or datadoghq.eu) 33 | site: string; 34 | // The subdomain to use for app url links that are printed to output. Defaults to app 35 | subdomain: string; 36 | // The log level, (set to DEBUG for extended logging) 37 | logLevel: string | undefined; 38 | // Whether the log forwarder integration is enabled by default 39 | flushMetricsToLogs: boolean; 40 | // Enable tracing on Lambda functions and API Gateway integrations using X-Ray. Defaults to true 41 | enableXrayTracing: boolean; 42 | // Enable tracing on Lambda function using dd-trace, datadog's APM library. 43 | enableDDTracing: boolean; 44 | // Enable ASM on Lambda functions 45 | enableASM?: boolean; 46 | // Enable forwarding Logs 47 | enableDDLogs: boolean; 48 | // Enable profiling 49 | enableProfiling?: boolean; 50 | // Whether to add the Datadog Lambda Extension to send data without the need of the Datadog Forwarder. 51 | addExtension: boolean; 52 | 53 | // When either is set, the plugin will subscribe the lambdas to the forwarder with the given arn. 54 | forwarderArn?: string; 55 | forwarder?: string; 56 | 57 | // Set this to true when you are running the Serverless Plugin's integration tests or want to bypass 58 | // site check during manual testing. This prevents the plugin from validating the Forwarder ARN, prevents 59 | // the plugin from adding Datadog Monitor output links, and bypasses the site check. Defaults to false. 60 | testingMode?: boolean; 61 | 62 | // Deprecated: renamed to testingMode 63 | integrationTesting?: boolean; 64 | 65 | // When set, the plugin will try to automatically tag customers' lambda functions with service and env, 66 | // but will not override existing tags set on function or provider levels. Defaults to true 67 | enableTags: boolean; 68 | // When set, the lambda layer will automatically patch console.log with Datadog's tracing ids. 69 | injectLogContext: boolean; 70 | // When set, the plugin will automatically tag the function with git.commit.sha and git.repository_url. 71 | enableSourceCodeIntegration: boolean; 72 | // When set, if enableSourceCodeIntegration is true, the plugin will upload git metadata to Datadog. 73 | // Set this to false if you have the Datadog github integration setup (as then git metadata does not 74 | // need to be uploaded). 75 | uploadGitMetadata: boolean; 76 | 77 | // When set, this plugin will not try to redirect the handlers of these specified functions; 78 | exclude: string[]; 79 | // When set, this plugin will configure the specified monitors for the function 80 | monitors?: { [id: string]: { [key: string]: any } }[]; 81 | 82 | // When set, this plugin will fail a deployment if monitors can't be created 83 | failOnError: boolean; 84 | // API Gateway Access logging 85 | subscribeToAccessLogs: boolean; 86 | // API Gateway Execution logging - handles rest and websocket. Http not supported as of Sept.21 87 | subscribeToExecutionLogs: boolean; 88 | // Step Function logging 89 | subscribeToStepFunctionLogs: boolean; // deprecated in favor of enableStepFunctionsTracing 90 | // Skip populating the Cloudformation Outputs 91 | skipCloudformationOutputs: boolean; 92 | 93 | // When set, this plugin will configure the specified handler for the functions 94 | customHandler?: string; 95 | 96 | // Cold Start Tracing is enabled by default 97 | enableColdStartTracing?: boolean; 98 | // minimum duration to trace a module load span 99 | minColdStartTraceDuration?: number; 100 | // User specified list of libraries for Cold Start Tracing to ignore 101 | coldStartTraceSkipLibs?: string; 102 | 103 | // Whether to encode the tracing context in the lambda authorizer's response data. Default true 104 | encodeAuthorizerContext?: boolean; 105 | // Whether to parse and use the encoded tracing context from lambda authorizers. Default true 106 | decodeAuthorizerContext?: boolean; 107 | 108 | // Determine when to submit spans before a timeout occurs. 109 | // When the remaining time in a Lambda invocation is less than `apmFlushDeadline`, the tracer will 110 | // attempt to submit the current active spans and all finished spans. 111 | apmFlushDeadline?: string | number; 112 | 113 | // Whether the plugin should look for Datadog Lambda layers in the given AWS account to use 114 | useLayersFromAccount?: string; 115 | 116 | // Step Functions Tracing 117 | enableStepFunctionsTracing?: boolean; 118 | mergeStepFunctionAndLambdaTraces?: boolean; 119 | propagateTraceContext?: boolean; // Deprecated: This was added by mistake. Use propagateUpstreamTrace instead. 120 | propagateUpstreamTrace?: boolean; 121 | 122 | // Disables handler redirection 123 | // Used for testing or for someone exclusively forwarding logs 124 | // or including the library only for metrics. 125 | redirectHandlers?: boolean; 126 | 127 | // When set to `true`, a FIPS-compliant lambda extension layer will be used. 128 | // Only works if `addExtension` is `true`. 129 | isFIPSEnabled?: boolean; 130 | } 131 | const webpackPluginName = "serverless-webpack"; 132 | const apiKeyEnvVar = "DD_API_KEY"; 133 | const apiKeyKMSEnvVar = "DD_KMS_API_KEY"; 134 | const apiKeySecretArnEnvVar = "DD_API_KEY_SECRET_ARN"; 135 | const siteURLEnvVar = "DD_SITE"; 136 | const logLevelEnvVar = "DD_LOG_LEVEL"; 137 | const logForwardingEnvVar = "DD_FLUSH_TO_LOG"; 138 | const ddTracingEnabledEnvVar = "DD_TRACE_ENABLED"; 139 | const ddASMEnabledEnvVar = "DD_SERVERLESS_APPSEC_ENABLED"; 140 | const ddMergeXrayTracesEnvVar = "DD_MERGE_XRAY_TRACES"; 141 | const logInjectionEnvVar = "DD_LOGS_INJECTION"; 142 | const ddLogsEnabledEnvVar = "DD_SERVERLESS_LOGS_ENABLED"; 143 | const ddCaptureLambdaPayloadEnvVar = "DD_CAPTURE_LAMBDA_PAYLOAD"; 144 | const ddColdStartTracingEnabledEnvVar = "DD_COLD_START_TRACING"; 145 | const ddMinColdStartDurationEnvVar = "DD_MIN_COLD_START_DURATION"; 146 | const ddColdStartTracingSkipLibsEnvVar = "DD_COLD_START_TRACE_SKIP_LIB"; 147 | const ddProfilingEnabledEnvVar = "DD_PROFILING_ENABLED"; 148 | const ddEncodeAuthorizerContextEnvVar = "DD_ENCODE_AUTHORIZER_CONTEXT"; 149 | const ddDecodeAuthorizerContextEnvVar = "DD_DECODE_AUTHORIZER_CONTEXT"; 150 | const ddApmFlushDeadlineMillisecondsEnvVar = "DD_APM_FLUSH_DEADLINE_MILLISECONDS"; 151 | const ddUseLayersFromAccount = "DD_USE_LAYERS_FROM_ACCOUNT"; 152 | 153 | export const ddServiceEnvVar = "DD_SERVICE"; 154 | export const ddEnvEnvVar = "DD_ENV"; 155 | export const ddVersionEnvVar = "DD_VERSION"; 156 | export const ddTagsEnvVar = "DD_TAGS"; 157 | 158 | // The Universal instrumentation wrapper which configures several env variables. 159 | // Currently it is only used for Java and .NET 160 | const AWS_LAMBDA_EXEC_WRAPPER_VAR = "AWS_LAMBDA_EXEC_WRAPPER"; 161 | const AWS_LAMBDA_EXEC_WRAPPER = "/opt/datadog_wrapper"; 162 | 163 | export const defaultConfiguration: Configuration = { 164 | addLayers: true, 165 | flushMetricsToLogs: true, 166 | logLevel: undefined, 167 | site: "datadoghq.com", 168 | subdomain: "app", 169 | enableXrayTracing: false, 170 | enableDDTracing: true, 171 | addExtension: true, 172 | enableTags: true, 173 | injectLogContext: true, 174 | enableSourceCodeIntegration: true, 175 | uploadGitMetadata: true, 176 | exclude: [], 177 | testingMode: false, 178 | integrationTesting: false, 179 | subscribeToAccessLogs: true, 180 | subscribeToExecutionLogs: false, 181 | subscribeToStepFunctionLogs: false, 182 | enableDDLogs: true, 183 | captureLambdaPayload: false, 184 | failOnError: false, 185 | skipCloudformationOutputs: false, 186 | mergeStepFunctionAndLambdaTraces: false, 187 | propagateTraceContext: false, 188 | propagateUpstreamTrace: false, 189 | enableStepFunctionsTracing: false, 190 | redirectHandlers: true, 191 | }; 192 | 193 | export function setEnvConfiguration(config: Configuration, handlers: FunctionInfo[]): void { 194 | handlers.forEach(({ handler, type }) => { 195 | handler.environment ??= {}; 196 | const environment = handler.environment as any; 197 | const functionName = handler.name ?? ""; 198 | if ( 199 | process.env.DATADOG_API_KEY !== undefined && 200 | environment[apiKeyEnvVar] === undefined && 201 | // Only set this from the environment if all other methods of authentication 202 | // are not in use. This will set DATADOG_API_KEY on the lambda from the environment 203 | // variable directly if they haven't set one of the below three options 204 | // in the configuration. 205 | config.apiKMSKey === undefined && 206 | config.apiKey === undefined && 207 | config.apiKeySecretArn === undefined 208 | ) { 209 | environment[apiKeyEnvVar] = process.env.DATADOG_API_KEY; 210 | logMessage("Using DATADOG_API_KEY environment variable for authentication"); 211 | } 212 | if (config.apiKey !== undefined && environment[apiKeyEnvVar] === undefined) { 213 | environment[apiKeyEnvVar] = config.apiKey; 214 | } 215 | if (config.apiKMSKey !== undefined && environment[apiKeyKMSEnvVar] === undefined) { 216 | environment[apiKeyKMSEnvVar] = config.apiKMSKey; 217 | } 218 | if (config.apiKeySecretArn !== undefined && environment[apiKeySecretArnEnvVar] === undefined) { 219 | const isNode = runtimeLookup[handler.runtime!] === RuntimeType.NODE; 220 | const isSendingSynchronousMetrics = !config.addExtension && !config.flushMetricsToLogs; 221 | if (isSendingSynchronousMetrics && isNode) { 222 | throw new Error( 223 | "`apiKeySecretArn` is not supported for Node runtimes when using Synchronous Metrics. Set DATADOG_API_KEY in your environment, or use `apiKmsKey` in the configuration.", 224 | ); 225 | } 226 | environment[apiKeySecretArnEnvVar] = config.apiKeySecretArn; 227 | } 228 | if (environment[siteURLEnvVar] === undefined) { 229 | environment[siteURLEnvVar] = config.site; 230 | } 231 | if (environment[logLevelEnvVar] === undefined) { 232 | environment[logLevelEnvVar] = config.logLevel; 233 | } 234 | if (environment[logForwardingEnvVar] === undefined && config.addExtension === false) { 235 | environment[logForwardingEnvVar] = config.flushMetricsToLogs; 236 | } 237 | if (config.enableDDTracing !== undefined && environment[ddTracingEnabledEnvVar] === undefined) { 238 | environment[ddTracingEnabledEnvVar] = config.enableDDTracing; 239 | } 240 | if (config.enableASM !== undefined && config.enableASM) { 241 | if ((config.enableASM && !config.enableDDTracing) || (config.enableASM && !config.addExtension)) { 242 | throw new Error("`enableASM` requires the extension to be present, and `enableDDTracing` to be enabled"); 243 | } 244 | environment[AWS_LAMBDA_EXEC_WRAPPER_VAR] ??= AWS_LAMBDA_EXEC_WRAPPER; 245 | environment[ddASMEnabledEnvVar] ??= config.enableASM; 246 | } 247 | if (config.enableXrayTracing !== undefined && environment[ddMergeXrayTracesEnvVar] === undefined) { 248 | environment[ddMergeXrayTracesEnvVar] = config.enableXrayTracing; 249 | } 250 | if (config.addExtension) { 251 | environment[logInjectionEnvVar] = false; 252 | } else if (config.injectLogContext !== undefined && environment[logInjectionEnvVar] === undefined) { 253 | environment[logInjectionEnvVar] = config.injectLogContext; 254 | } 255 | if (config.enableDDLogs !== undefined && environment[ddLogsEnabledEnvVar] === undefined) { 256 | environment[ddLogsEnabledEnvVar] = config.enableDDLogs; 257 | } 258 | if (environment[ddCaptureLambdaPayloadEnvVar] === undefined) { 259 | environment[ddCaptureLambdaPayloadEnvVar] = config.captureLambdaPayload; 260 | } 261 | if (config.enableColdStartTracing !== undefined && environment[ddColdStartTracingEnabledEnvVar] === undefined) { 262 | environment[ddColdStartTracingEnabledEnvVar] = config.enableColdStartTracing; 263 | } 264 | if (config.minColdStartTraceDuration !== undefined && environment[ddMinColdStartDurationEnvVar] === undefined) { 265 | environment[ddMinColdStartDurationEnvVar] = config.minColdStartTraceDuration; 266 | } 267 | if (config.coldStartTraceSkipLibs !== undefined && environment[ddColdStartTracingSkipLibsEnvVar] === undefined) { 268 | environment[ddColdStartTracingSkipLibsEnvVar] = config.coldStartTraceSkipLibs; 269 | } 270 | if (config.enableProfiling !== undefined && environment[ddProfilingEnabledEnvVar] === undefined) { 271 | environment[ddProfilingEnabledEnvVar] = config.enableProfiling; 272 | } 273 | if (config.encodeAuthorizerContext !== undefined && environment[ddEncodeAuthorizerContextEnvVar] === undefined) { 274 | environment[ddEncodeAuthorizerContextEnvVar] = config.encodeAuthorizerContext; 275 | } 276 | if (config.decodeAuthorizerContext !== undefined && environment[ddDecodeAuthorizerContextEnvVar] === undefined) { 277 | environment[ddDecodeAuthorizerContextEnvVar] = config.decodeAuthorizerContext; 278 | } 279 | if (config.apmFlushDeadline !== undefined && environment[ddApmFlushDeadlineMillisecondsEnvVar] === undefined) { 280 | environment[ddApmFlushDeadlineMillisecondsEnvVar] = config.apmFlushDeadline; 281 | } 282 | if (config.useLayersFromAccount !== undefined && environment[ddUseLayersFromAccount] === undefined) { 283 | environment[ddUseLayersFromAccount] = config.useLayersFromAccount; 284 | } 285 | if (type === RuntimeType.DOTNET || type === RuntimeType.JAVA) { 286 | if (environment[AWS_LAMBDA_EXEC_WRAPPER_VAR] === undefined) { 287 | environment[AWS_LAMBDA_EXEC_WRAPPER_VAR] = AWS_LAMBDA_EXEC_WRAPPER; 288 | } else if (environment[AWS_LAMBDA_EXEC_WRAPPER_VAR] !== AWS_LAMBDA_EXEC_WRAPPER) { 289 | throwEnvVariableError("AWS_LAMBDA_EXEC_WRAPPER", AWS_LAMBDA_EXEC_WRAPPER, functionName); 290 | } 291 | } 292 | }); 293 | } 294 | 295 | export function setSourceCodeIntegrationEnvVar( 296 | handler: ExtendedFunctionDefinition, 297 | gitHash: string, 298 | gitRemote: string, 299 | ): void { 300 | handler.environment ??= {}; 301 | if (handler.environment[ddTagsEnvVar] !== undefined) { 302 | handler.environment[ddTagsEnvVar] += `,`; 303 | } else { 304 | handler.environment[ddTagsEnvVar] = ""; 305 | } 306 | handler.environment[ddTagsEnvVar] += `git.commit.sha:${gitHash},git.repository_url:${gitRemote}`; 307 | } 308 | 309 | function throwEnvVariableError(variable: string, value: string, functionName: string): void { 310 | throw new Error(`Environment variable ${variable} should be set to ${value} for function ${functionName}`); 311 | } 312 | 313 | export function getConfig(service: Service): Configuration { 314 | let custom = service.custom as any; 315 | if (custom === undefined) { 316 | custom = {}; 317 | } 318 | 319 | let datadog = custom.datadog as Partial | undefined; 320 | if (datadog === undefined) { 321 | datadog = {}; 322 | } 323 | 324 | // These values are deprecated but will supersede everything if set 325 | if (custom?.datadog?.monitorsApiKey) { 326 | datadog.apiKey = custom?.datadog?.monitorsApiKey ?? datadog.apiKey; 327 | } 328 | 329 | if (custom?.datadog?.monitorsAppKey) { 330 | datadog.appKey = custom?.datadog?.monitorsAppKey ?? datadog.appKey; 331 | } 332 | 333 | const config: Configuration = { 334 | ...defaultConfiguration, 335 | ...datadog, 336 | }; 337 | 338 | return config; 339 | } 340 | 341 | export function forceExcludeDepsFromWebpack(service: Service): void { 342 | const includeModules = getPropertyFromPath(service, ["custom", "webpack", "includeModules"]); 343 | if (includeModules === undefined) { 344 | return; 345 | } 346 | let forceExclude = includeModules.forceExclude as string[] | undefined; 347 | if (forceExclude === undefined) { 348 | forceExclude = []; 349 | includeModules.forceExclude = forceExclude; 350 | } 351 | if (!forceExclude.includes("datadog-lambda-js")) { 352 | forceExclude.push("datadog-lambda-js"); 353 | } 354 | if (!forceExclude.includes("dd-trace")) { 355 | forceExclude.push("dd-trace"); 356 | } 357 | } 358 | 359 | function getPropertyFromPath(obj: any, path: string[]): any { 360 | for (const part of path) { 361 | let prop = obj[part]; 362 | if (prop === undefined || prop === true) { 363 | prop = {}; 364 | obj[part] = prop; 365 | } 366 | if (prop === false) { 367 | return; 368 | } 369 | obj = prop; 370 | } 371 | return obj; 372 | } 373 | 374 | export function hasWebpackPlugin(service: Service): boolean { 375 | const plugins: string[] | undefined = (service as any).plugins; 376 | if (plugins === undefined) { 377 | return false; 378 | } 379 | if (Array.isArray(plugins)) { 380 | // We have a normal plugin array 381 | return plugins.find((plugin) => plugin === webpackPluginName) !== undefined; 382 | } 383 | // We have an enhanced plugins object 384 | const modules: string[] | undefined = (service as any).plugins.modules; 385 | if (modules === undefined) { 386 | return false; 387 | } 388 | return modules.find((plugin) => plugin === webpackPluginName) !== undefined; 389 | } 390 | -------------------------------------------------------------------------------- /src/forwarder.ts: -------------------------------------------------------------------------------- 1 | import Service from "serverless/classes/Service"; 2 | import { FunctionInfo } from "./layer"; 3 | import { version } from "../package.json"; 4 | import Aws = require("serverless/plugins/aws/provider/awsProvider"); 5 | 6 | const logGroupKey = "AWS::Logs::LogGroup"; 7 | const logGroupSubscriptionKey = "AWS::Logs::SubscriptionFilter"; 8 | const maxAllowableLogGroupSubscriptions: number = 2; 9 | 10 | class DatadogForwarderNotFoundError extends Error { 11 | constructor(message: string) { 12 | super(...message); 13 | this.name = "DatadogForwarderNotFoundError"; 14 | this.message = message; 15 | } 16 | } 17 | 18 | interface LogGroupResource { 19 | Type: typeof logGroupKey; 20 | Properties: { 21 | LogGroupName: string; 22 | }; 23 | } 24 | 25 | interface ForwarderConfigs { 26 | AddExtension: boolean; 27 | TestingMode: boolean | undefined; 28 | IntegrationTesting: boolean | undefined; 29 | SubToAccessLogGroups: boolean; 30 | SubToExecutionLogGroups: boolean; 31 | } 32 | interface SubscriptionFilter { 33 | creationTime: number; 34 | destinationArn: string; 35 | distribution: string; 36 | filterName: string; 37 | filterPattern: string; 38 | logGroupName: string; 39 | roleArn: string; 40 | } 41 | interface DescribeSubscriptionFiltersResponse { 42 | subscriptionFilters: SubscriptionFilter[]; 43 | } 44 | 45 | type SubLogsConfig = 46 | | boolean 47 | | { 48 | accessLogging: boolean | undefined; 49 | executionLogging: boolean | undefined; 50 | } 51 | | undefined; 52 | 53 | type LogsConfig = 54 | | { 55 | restApi: SubLogsConfig; 56 | httpApi: SubLogsConfig; 57 | websocket: SubLogsConfig; 58 | } 59 | | undefined; 60 | 61 | const REST_EXECUTION_LOG_GROUP_KEY = "RestExecutionLogGroup"; 62 | const REST_EXECUTION_SUBSCRIPTION_KEY = "RestExecutionLogGroupSubscription"; 63 | const WEBSOCKETS_EXECUTION_LOG_GROUP_KEY = "WebsocketsExecutionLogGroup"; 64 | const WEBSOCKETS_EXECUTION_SUBSCRIPTION_KEY = "WebsocketsExecutionLogGroupSubscription"; 65 | 66 | // When users define ARN with CloudFormation functions, the ARN takes this type instead of a string. 67 | export interface CloudFormationObjectArn { 68 | "Fn::Sub"?: string; 69 | "arn:aws"?: string; 70 | } 71 | 72 | function isLogGroup(value: any): value is LogGroupResource { 73 | return value.Type === logGroupKey; 74 | } 75 | 76 | /** 77 | * Validates whether Lambda forwarder exists in the account 78 | * @param aws Serverless framework provided AWS client 79 | * @param functionArn The forwarder ARN to be validated 80 | */ 81 | async function validateForwarderArn(aws: Aws, functionArn: CloudFormationObjectArn | string): Promise { 82 | try { 83 | await aws.request("Lambda", "getFunction", { FunctionName: functionArn }); 84 | } catch (err) { 85 | throw new DatadogForwarderNotFoundError(`Could not perform GetFunction on ${functionArn}.`); 86 | } 87 | } 88 | 89 | export async function addExecutionLogGroupsAndSubscriptions( 90 | service: Service, 91 | aws: Aws, 92 | functionArn: CloudFormationObjectArn | string, 93 | ): Promise { 94 | const extendedProvider = (service.provider as any)?.logs; 95 | 96 | if (!isLogsConfig(extendedProvider)) { 97 | return; 98 | } 99 | 100 | const resources = service.provider.compiledCloudFormationTemplate?.Resources; 101 | if (restExecutionLoggingIsEnabled(extendedProvider)) { 102 | // create log group 103 | const logGroupName = await createRestExecutionLogGroupName(aws); 104 | const executionLogGroupName = addExecutionLogGroup(logGroupName); 105 | resources[REST_EXECUTION_LOG_GROUP_KEY] = executionLogGroupName; 106 | // add subscription 107 | const executionSubscription = subscribeToExecutionLogGroup(functionArn, REST_EXECUTION_LOG_GROUP_KEY); 108 | resources[REST_EXECUTION_SUBSCRIPTION_KEY] = executionSubscription; 109 | } 110 | 111 | if (websocketExecutionLoggingIsEnabled(extendedProvider)) { 112 | // create log group 113 | const logGroupName = await createWebsocketExecutionLogGroupName(aws); 114 | const executionLogGroupName = addExecutionLogGroup(logGroupName); 115 | // add subscription 116 | resources[WEBSOCKETS_EXECUTION_LOG_GROUP_KEY] = executionLogGroupName; 117 | const executionSubscription = subscribeToExecutionLogGroup(functionArn, WEBSOCKETS_EXECUTION_LOG_GROUP_KEY); 118 | resources[WEBSOCKETS_EXECUTION_SUBSCRIPTION_KEY] = executionSubscription; 119 | } 120 | } 121 | 122 | export async function addStepFunctionLogGroup(aws: Aws, resources: any, stepFunction: any): Promise { 123 | const stepFunctionName = stepFunction.name; 124 | const logGroupName = `/aws/vendedlogs/states/${stepFunctionName}-Logs-${aws.getStage()}`; 125 | const logGroupResourceName = `${normalizeResourceName(stepFunctionName)}LogGroup`; 126 | 127 | // create log group and add it to compiled CloudFormation template 128 | resources[logGroupResourceName] = { 129 | Type: logGroupKey, 130 | Properties: { 131 | LogGroupName: logGroupName, 132 | Tags: [{ Key: "dd_sls_plugin", Value: `v${version}` }], 133 | }, 134 | }; 135 | 136 | // add logging config to step function in serverless.yaml using newly created log group 137 | // the serverless-step-functions plugin handles the IAM policy creation for the adding logs to the log group 138 | stepFunction.loggingConfig = { 139 | level: "ALL", 140 | includeExecutionData: true, 141 | destinations: [{ "Fn::GetAtt": [logGroupResourceName, "Arn"] }], 142 | }; 143 | } 144 | 145 | export function addDdSlsPluginTag(stateMachineObj: any): void { 146 | stateMachineObj.Properties?.Tags?.push({ 147 | Key: "dd_sls_plugin", 148 | Value: `v${version}`, 149 | }); 150 | } 151 | 152 | export function addDdTraceEnabledTag(stateMachineObj: any, enableStepFunctionsTracing: undefined | boolean): void { 153 | if (!enableStepFunctionsTracing) { 154 | return; 155 | } 156 | stateMachineObj.Properties?.Tags?.push({ 157 | Key: "DD_TRACE_ENABLED", 158 | Value: `true`, 159 | }); 160 | } 161 | 162 | export async function addStepFunctionLogGroupSubscription( 163 | resources: any, 164 | stepFunction: any, 165 | functionArn: CloudFormationObjectArn | string, 166 | ): Promise { 167 | const logGroupSubscriptionResourceName = `${normalizeResourceName(stepFunction.name)}LogGroupSubscription`; 168 | 169 | // parse log group name out of arn in logging config destination 170 | resources[logGroupSubscriptionResourceName] = { 171 | Type: logGroupSubscriptionKey, 172 | Properties: { 173 | DestinationArn: functionArn, 174 | FilterPattern: "", 175 | LogGroupName: { 176 | "Fn::Select": [ 177 | 6, 178 | { 179 | "Fn::Split": [":", stepFunction.loggingConfig.destinations[0]], 180 | }, 181 | ], 182 | }, 183 | }, 184 | }; 185 | } 186 | 187 | export async function addCloudWatchForwarderSubscriptions( 188 | service: Service, 189 | aws: Aws, 190 | functionArn: CloudFormationObjectArn | string, 191 | forwarderConfigs: ForwarderConfigs, 192 | handlers: FunctionInfo[], 193 | ): Promise { 194 | const resources = service.provider.compiledCloudFormationTemplate?.Resources; 195 | if (resources === undefined) { 196 | return ["No cloudformation stack available. Skipping subscribing Datadog forwarder."]; 197 | } 198 | const errors = []; 199 | if (typeof functionArn !== "string") { 200 | errors.push("Skipping forwarder ARN validation because forwarder string defined with CloudFormation function."); 201 | } else if (forwarderConfigs.TestingMode === true || forwarderConfigs.IntegrationTesting === true) { 202 | errors.push("Skipping forwarder ARN validation because 'testingMode' is set to true"); 203 | } else { 204 | await validateForwarderArn(aws, functionArn); 205 | } 206 | for (const [name, resource] of Object.entries(resources)) { 207 | if (!shouldSubscribe(name, resource, forwarderConfigs, handlers, service)) { 208 | continue; 209 | } 210 | const logGroupName = resource.Properties.LogGroupName; 211 | const scopedSubName = `${name}Subscription`; 212 | 213 | let expectedSubName = `${service.getServiceName()}-${aws.getStage()}-${scopedSubName}-`; 214 | 215 | const stackName = aws.naming.getStackName(); 216 | if (stackName) { 217 | expectedSubName = `${stackName}-${scopedSubName}-`; 218 | } 219 | 220 | const canSub = await canSubscribeLogGroup(aws, logGroupName, expectedSubName); 221 | if (!canSub) { 222 | errors.push( 223 | `Could not subscribe Datadog Forwarder due to too many existing subscription filter(s) for ${logGroupName}.`, 224 | ); 225 | continue; 226 | } 227 | // Create subscriptions for each log group 228 | const subscription = subscribeToLogGroup(functionArn, name); 229 | resources[scopedSubName] = subscription; 230 | } 231 | 232 | return errors; 233 | } 234 | 235 | export async function canSubscribeLogGroup(aws: Aws, logGroupName: string, expectedSubName: string): Promise { 236 | const subscriptionFilters = await describeSubscriptionFilters(aws, logGroupName); 237 | const numberOfActiveSubscriptionFilters: number = subscriptionFilters.length; 238 | let foundDatadogSubscriptionFilter: boolean = false; 239 | for (const subscription of subscriptionFilters) { 240 | const filterName = subscription.filterName; 241 | if (filterName.startsWith(expectedSubName)) { 242 | foundDatadogSubscriptionFilter = true; 243 | } 244 | } 245 | if (!foundDatadogSubscriptionFilter && numberOfActiveSubscriptionFilters >= maxAllowableLogGroupSubscriptions) { 246 | return false; 247 | } else { 248 | return true; 249 | } 250 | } 251 | 252 | export async function describeSubscriptionFilters(aws: Aws, logGroupName: string): Promise { 253 | try { 254 | const result: DescribeSubscriptionFiltersResponse = await aws.request( 255 | "CloudWatchLogs", 256 | "describeSubscriptionFilters", 257 | { 258 | logGroupName, 259 | }, 260 | ); 261 | return result.subscriptionFilters; 262 | } catch (err) { 263 | // An error will occur if the log group doesn't exist, so we swallow this and return an empty list. 264 | return []; 265 | } 266 | } 267 | 268 | // Helper functions to validate we have a particular log group and if we should subscribe to it 269 | function validateRestApiSubscription(resource: any, subscribe: boolean, extendedProvider: any): boolean { 270 | return ( 271 | restAccessLoggingIsEnabled(extendedProvider) && 272 | resource.Properties.LogGroupName.startsWith("/aws/api-gateway/") && 273 | subscribe 274 | ); 275 | } 276 | function validateHttpApiSubscription(resource: any, subscribe: boolean, extendedProvider: any): boolean { 277 | return ( 278 | httpAccessLoggingIsEnabled(extendedProvider) && 279 | resource.Properties.LogGroupName.startsWith("/aws/http-api/") && 280 | subscribe 281 | ); 282 | } 283 | function validateWebsocketSubscription(resource: any, subscribe: boolean, extendedProvider: any): boolean { 284 | return ( 285 | websocketAccessLoggingIsEnabled(extendedProvider) && 286 | resource.Properties.LogGroupName.startsWith("/aws/websocket/") && 287 | subscribe 288 | ); 289 | } 290 | 291 | function shouldSubscribe( 292 | resourceName: string, 293 | resource: any, 294 | forwarderConfigs: ForwarderConfigs, 295 | handlers: FunctionInfo[], 296 | service: Service, 297 | ): boolean { 298 | const extendedProvider = (service.provider as any)?.logs; 299 | if (!isLogGroup(resource)) { 300 | return false; 301 | } 302 | // we don't want to run the shouldSubscribe validation on execution log groups since we manually add those. 303 | if (typeof resource.Properties.LogGroupName !== "string") { 304 | return false; 305 | } 306 | /* 307 | Step function log groups created as custom resources in serverless.yml need to be subscribed to using the log group in 308 | the step function loggingConfig since custom resources are not in the complied cloudformation template until a later lifecycle event. 309 | 310 | Step function log groups created outside of serverless.yml need to be subscribed to using the log group in 311 | the step function loggingConfig since these log groups will never be in the compiled cloudformation template. 312 | 313 | Step function log groups created by this plugin are also subscribed to using the log group in the step function loggingConfig 314 | for consistency with step function log groups created with the above methods. 315 | */ 316 | if (resource.Properties.LogGroupName.startsWith("/aws/vendedlogs/states/")) { 317 | return false; 318 | } 319 | // if the extension is enabled, we don't want to subscribe to lambda log groups 320 | if ( 321 | forwarderConfigs.AddExtension && 322 | !( 323 | validateRestApiSubscription(resource, forwarderConfigs.SubToAccessLogGroups, extendedProvider) || 324 | validateHttpApiSubscription(resource, forwarderConfigs.SubToAccessLogGroups, extendedProvider) || 325 | validateWebsocketSubscription(resource, forwarderConfigs.SubToAccessLogGroups, extendedProvider) 326 | ) 327 | ) { 328 | return false; 329 | } 330 | // if the extension is disabled, we should subscribe to lambda log groups 331 | if ( 332 | !( 333 | resource.Properties.LogGroupName.startsWith("/aws/lambda/") || 334 | validateRestApiSubscription(resource, forwarderConfigs.SubToAccessLogGroups, extendedProvider) || 335 | validateHttpApiSubscription(resource, forwarderConfigs.SubToAccessLogGroups, extendedProvider) || 336 | validateWebsocketSubscription(resource, forwarderConfigs.SubToAccessLogGroups, extendedProvider) 337 | ) 338 | ) { 339 | return true; 340 | } 341 | 342 | // If the log group does not belong to our list of handlers, we don't want to subscribe to it 343 | if ( 344 | resource.Properties.LogGroupName.startsWith("/aws/lambda/") && 345 | !handlers.some(({ name }) => getLogGroupLogicalId(name) === resourceName) 346 | ) { 347 | return false; 348 | } 349 | 350 | return true; 351 | } 352 | 353 | function subscribeToLogGroup(functionArn: string | CloudFormationObjectArn, name: string) { 354 | const subscription = { 355 | Type: logGroupSubscriptionKey, 356 | Properties: { 357 | DestinationArn: functionArn, 358 | FilterPattern: "", 359 | LogGroupName: { Ref: name }, 360 | }, 361 | }; 362 | return subscription; 363 | } 364 | 365 | async function createRestExecutionLogGroupName(aws: Aws) { 366 | return { 367 | "Fn::Join": ["", ["API-Gateway-Execution-Logs_", { Ref: "ApiGatewayRestApi" }, "/", aws.getStage()]], 368 | }; 369 | } 370 | 371 | async function createWebsocketExecutionLogGroupName(aws: Aws) { 372 | return { 373 | "Fn::Join": ["", ["/aws/apigateway/", { Ref: "WebsocketsApi" }, "/", aws.getStage()]], 374 | }; 375 | } 376 | 377 | function addExecutionLogGroup(logGroupName: any) { 378 | // Create the Execution log group for API Gateway REST logging manually 379 | const executionLogGroup = { 380 | Type: "AWS::Logs::LogGroup", 381 | Properties: { 382 | LogGroupName: logGroupName, 383 | }, 384 | }; 385 | return executionLogGroup; 386 | } 387 | 388 | function subscribeToExecutionLogGroup(functionArn: string | CloudFormationObjectArn, executionLogGroupKey: string) { 389 | const executionSubscription = { 390 | Type: logGroupSubscriptionKey, 391 | Properties: { 392 | DestinationArn: functionArn, 393 | FilterPattern: "", 394 | LogGroupName: { Ref: executionLogGroupKey }, 395 | }, 396 | }; 397 | return executionSubscription; 398 | } 399 | 400 | export function isLogsConfig(obj: any): obj is LogsConfig { 401 | if (typeof obj !== "object") { 402 | return false; 403 | } 404 | 405 | if (obj.hasOwnProperty("restApi")) { 406 | if (!isSubLogsConfig(obj.restApi)) { 407 | return false; 408 | } 409 | } 410 | 411 | if (obj.hasOwnProperty("httpApi")) { 412 | if (!isSubLogsConfig(obj.httpApi)) { 413 | return false; 414 | } 415 | } 416 | 417 | if (obj.hasOwnProperty("websocket")) { 418 | if (!isSubLogsConfig(obj.websocket)) { 419 | return false; 420 | } 421 | } 422 | return true; 423 | } 424 | 425 | function isSubLogsConfig(obj: any): obj is SubLogsConfig { 426 | if (typeof obj === "boolean") { 427 | return true; 428 | } 429 | if (typeof obj !== "object") { 430 | return false; 431 | } 432 | if (obj.hasOwnProperty("accessLogging")) { 433 | if (typeof obj.accessLogging !== "boolean" && typeof obj.accessLogging !== undefined) { 434 | return false; 435 | } 436 | } 437 | if (obj.hasOwnProperty("executionLogging")) { 438 | if (typeof obj.executionLogging !== "boolean" && typeof obj.executionLogging !== undefined) { 439 | return false; 440 | } 441 | } 442 | return true; 443 | } 444 | 445 | function restAccessLoggingIsEnabled(obj: LogsConfig): boolean { 446 | if (obj?.restApi === false) { 447 | return false; 448 | } 449 | return obj?.restApi === true || obj?.restApi?.accessLogging === true; 450 | } 451 | function restExecutionLoggingIsEnabled(obj: LogsConfig): boolean { 452 | if (obj?.restApi === false) { 453 | return false; 454 | } 455 | return obj?.restApi === true || obj?.restApi?.executionLogging === true; 456 | } 457 | function httpAccessLoggingIsEnabled(obj: LogsConfig): boolean { 458 | if (obj?.httpApi === false) { 459 | return false; 460 | } 461 | return obj?.httpApi === true || obj?.httpApi?.accessLogging === true; 462 | } 463 | 464 | function websocketAccessLoggingIsEnabled(obj: LogsConfig): boolean { 465 | if (obj?.websocket === false) { 466 | return false; 467 | } 468 | return obj?.websocket === true || obj?.websocket?.accessLogging === true; 469 | } 470 | 471 | function websocketExecutionLoggingIsEnabled(obj: LogsConfig): boolean { 472 | if (obj?.websocket === false) { 473 | return false; 474 | } 475 | return obj?.websocket === true || obj?.websocket?.executionLogging === true; 476 | } 477 | 478 | // Created from https://github.com/serverless/serverless/blob/master/lib/plugins/aws/lib/naming.js#L125-L127 479 | // Skipped lodash because Lambda Function Names can't include unicode chars or symbols 480 | function getLogGroupLogicalId(functionName: string): string { 481 | if (!functionName) { 482 | return ""; 483 | } 484 | const uppercasedFirst = functionName[0].toUpperCase(); 485 | const rest = functionName.slice(1); 486 | const upperCasedFunctionName = uppercasedFirst + rest; 487 | const normalizedFunctionName = upperCasedFunctionName.replace(/-/g, "Dash").replace(/_/g, "Underscore"); 488 | return `${normalizedFunctionName}LogGroup`; 489 | } 490 | 491 | // Resource names in CloudFormation Templates can only have alphanumeric characters 492 | function normalizeResourceName(resourceName: string): string { 493 | return resourceName.replace(/[^0-9a-z]/gi, ""); 494 | } 495 | -------------------------------------------------------------------------------- /src/git.ts: -------------------------------------------------------------------------------- 1 | import * as simpleGit from "simple-git"; 2 | 3 | // Returns a configured SimpleGit. 4 | export const newSimpleGit = async (): Promise => { 5 | const options = { 6 | baseDir: process.cwd(), 7 | binary: "git", 8 | maxConcurrentProcesses: 1, 9 | }; 10 | try { 11 | // Attempt to set the baseDir to the root of the repository so the 'git ls-files' command 12 | // returns the tracked files paths relative to the root of the repository. 13 | const git = simpleGit.gitP(options); 14 | const root = await git.revparse("--show-toplevel"); 15 | options.baseDir = root; 16 | } catch { 17 | return undefined; 18 | } 19 | 20 | return simpleGit.gitP(options); 21 | }; 22 | -------------------------------------------------------------------------------- /src/layer.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Unless explicitly stated otherwise all files in this repository are licensed 3 | * under the Apache License Version 2.0. 4 | * 5 | * This product includes software developed at Datadog (https://www.datadoghq.com/). 6 | * Copyright 2021 Datadog, Inc. 7 | */ 8 | import { FunctionDefinition, FunctionDefinitionHandler } from "serverless"; 9 | import Service from "serverless/classes/Service"; 10 | import { Configuration } from "./env"; 11 | 12 | export enum RuntimeType { 13 | NODE = "node", 14 | PYTHON = "python", 15 | DOTNET = "dotnet", 16 | CUSTOM = "custom", 17 | JAVA = "java", 18 | RUBY = "ruby", 19 | GO = "go", 20 | UNSUPPORTED = "unsupported", 21 | } 22 | 23 | // .NET can only be used with the extension and Java requires 24 | // some code changes 25 | const RUNTIMES_TO_ADD_FOR_EXTENSION_ONLY = [RuntimeType.DOTNET, RuntimeType.JAVA]; 26 | 27 | export interface FunctionInfo { 28 | name: string; 29 | type: RuntimeType; 30 | handler: ExtendedFunctionDefinition; 31 | runtime?: string; 32 | } 33 | 34 | const X86_64_ARCHITECTURE = "x86_64"; 35 | const ARM64_ARCHITECTURE = "arm64"; 36 | const DEFAULT_ARCHITECTURE = X86_64_ARCHITECTURE; 37 | 38 | const DEFAULT_REGION = "us-east-1"; 39 | 40 | const US_GOV_REGION_PREFIX = "us-gov-"; 41 | 42 | // Separate interface since DefinitelyTyped currently doesn't include tags or env 43 | export interface ExtendedFunctionDefinition extends FunctionDefinition { 44 | architecture?: string; 45 | } 46 | 47 | export interface LayerJSON { 48 | regions: { 49 | [region: string]: 50 | | { 51 | [runtime: string]: string | undefined; 52 | } 53 | | undefined; 54 | }; 55 | } 56 | 57 | export const runtimeLookup: { [key: string]: RuntimeType } = { 58 | "nodejs16.x": RuntimeType.NODE, 59 | "nodejs18.x": RuntimeType.NODE, 60 | "nodejs20.x": RuntimeType.NODE, 61 | "nodejs22.x": RuntimeType.NODE, 62 | "python3.7": RuntimeType.PYTHON, 63 | "python3.8": RuntimeType.PYTHON, 64 | "python3.9": RuntimeType.PYTHON, 65 | "python3.10": RuntimeType.PYTHON, 66 | "python3.11": RuntimeType.PYTHON, 67 | "python3.12": RuntimeType.PYTHON, 68 | "python3.13": RuntimeType.PYTHON, 69 | dotnet6: RuntimeType.DOTNET, 70 | dotnet8: RuntimeType.DOTNET, 71 | java11: RuntimeType.JAVA, 72 | java17: RuntimeType.JAVA, 73 | java21: RuntimeType.JAVA, 74 | "java8.al2": RuntimeType.JAVA, 75 | java8: RuntimeType.JAVA, 76 | "provided.al2": RuntimeType.CUSTOM, 77 | "provided.al2023": RuntimeType.CUSTOM, 78 | provided: RuntimeType.CUSTOM, 79 | "ruby3.2": RuntimeType.RUBY, 80 | "go1.x": RuntimeType.GO, 81 | }; 82 | 83 | // Map from x86 runtime keys in layers.json to the corresponding ARM runtime keys 84 | export const ARM_RUNTIME_KEYS: { [key: string]: string } = { 85 | "python3.8": "python3.8-arm", 86 | "python3.9": "python3.9-arm", 87 | "python3.10": "python3.10-arm", 88 | "python3.11": "python3.11-arm", 89 | "python3.12": "python3.12-arm", 90 | "python3.13": "python3.13-arm", 91 | "ruby3.2": "ruby3.2-arm", 92 | extension: "extension-arm", 93 | dotnet: "dotnet-arm", 94 | // The same Node layers work for both x86 and ARM 95 | "nodejs16.x": "nodejs16.x", 96 | "nodejs18.x": "nodejs18.x", 97 | "nodejs20.x": "nodejs20.x", 98 | "nodejs22.x": "nodejs22.x", 99 | // The same Java layer works for both x86 and ARM 100 | java: "java", 101 | }; 102 | 103 | export function findHandlers(service: Service, exclude: string[], defaultRuntime?: string): FunctionInfo[] { 104 | return Object.entries(service.functions) 105 | .map(([name, handler]) => { 106 | let { runtime } = handler; 107 | if (runtime === undefined) { 108 | runtime = defaultRuntime; 109 | } 110 | if (runtime !== undefined && runtime in runtimeLookup) { 111 | return { type: runtimeLookup[runtime], runtime, name, handler } as FunctionInfo; 112 | } 113 | return { type: RuntimeType.UNSUPPORTED, runtime, name, handler } as FunctionInfo; 114 | }) 115 | .filter((result) => result !== undefined) 116 | .filter( 117 | (result) => exclude === undefined || (exclude !== undefined && !exclude.includes(result.name)), 118 | ) as FunctionInfo[]; 119 | } 120 | 121 | /** 122 | * Normalize the runtime in the yml to match our layers.json keys 123 | * For most runtimes the key in layers.json is the same as the string set in the 124 | * serverless.yml, but for dotnet and java they are not 125 | * 126 | * @param runtimeSetting string set in serverless.yml ex: "dotnet6", "nodejs18.x" 127 | * @returns normalized runtime key 128 | */ 129 | export function normalizeRuntimeKey(runtimeSetting: string): string { 130 | if (runtimeSetting.startsWith("dotnet")) { 131 | return "dotnet"; 132 | } 133 | if (runtimeSetting.startsWith("java")) { 134 | return "java"; 135 | } 136 | return runtimeSetting; 137 | } 138 | 139 | /** 140 | * Add library layers for the given runtime and architecture 141 | * 142 | * @param service Serverless framework service 143 | * @param handlers Lambda functions to add layers to 144 | * @param layers layers.json file read into an object 145 | * @param accountId optional account ID that the layers live in - undefined 146 | * unless the customer sets a value for useLayersFromAccount in yaml 147 | * @param isUsingExtension whether to install the Datadog Lambda Extension as a layer 148 | */ 149 | export function applyLambdaLibraryLayers( 150 | service: Service, 151 | handlers: FunctionInfo[], 152 | layers: LayerJSON, 153 | accountId?: string, 154 | isUsingExtension = true, 155 | ): void { 156 | const { region } = service.provider; 157 | // It's possible a local account layer is being used in a region we have not published to so we use a default region's ARNs 158 | const shouldUseDefaultRegion = layers.regions[region] === undefined && accountId !== undefined; 159 | const regionRuntimes = shouldUseDefaultRegion ? layers.regions[DEFAULT_REGION] : layers.regions[region]; 160 | if (regionRuntimes === undefined) { 161 | return; 162 | } 163 | 164 | for (const handler of handlers) { 165 | if (handler.type === RuntimeType.UNSUPPORTED) { 166 | continue; 167 | } 168 | 169 | const { runtime } = handler; 170 | if (runtime === undefined) { 171 | continue; 172 | } 173 | 174 | if (!isUsingExtension && RUNTIMES_TO_ADD_FOR_EXTENSION_ONLY.includes(handler.type)) { 175 | continue; 176 | } 177 | 178 | const x86RuntimeKey = normalizeRuntimeKey(runtime); 179 | const armRuntimeKey = ARM_RUNTIME_KEYS[x86RuntimeKey]; 180 | 181 | let x86LayerArn = regionRuntimes[x86RuntimeKey]; 182 | let armLayerArn = regionRuntimes[armRuntimeKey]; 183 | 184 | if (accountId && x86LayerArn) { 185 | x86LayerArn = buildLocalLambdaLayerARN(x86LayerArn, accountId, region); 186 | } 187 | if (accountId && armLayerArn) { 188 | armLayerArn = buildLocalLambdaLayerARN(armLayerArn, accountId, region); 189 | } 190 | 191 | const architecture = 192 | handler.handler?.architecture ?? (service.provider as any).architecture ?? DEFAULT_ARCHITECTURE; 193 | const isArm64 = architecture === ARM64_ARCHITECTURE; 194 | 195 | // Use the ARM layer if customer's handler is using ARM 196 | let layerARN = isArm64 ? armLayerArn : x86LayerArn; 197 | 198 | // Fall back to the x86 layer if no ARM layer is available 199 | if (isArm64 && layerARN === undefined) { 200 | layerARN = x86LayerArn; 201 | } 202 | 203 | if (accountId && layerARN) { 204 | layerARN = buildLocalLambdaLayerARN(layerARN, accountId, region); 205 | } 206 | 207 | if (isArm64 && layerARN !== undefined && x86LayerArn !== undefined) { 208 | // Remove the x86 layer if the customer is using ARM 209 | removePreviousLayer(service, handler, x86LayerArn); 210 | } 211 | if (!isArm64 && layerARN !== undefined && armLayerArn !== undefined) { 212 | // Remove the ARM layer if the customer is using x86 213 | removePreviousLayer(service, handler, armLayerArn); 214 | } 215 | 216 | if (layerARN) { 217 | addLayer(service, handler, layerARN); 218 | } 219 | } 220 | } 221 | 222 | export function applyExtensionLayer( 223 | service: Service, 224 | handlers: FunctionInfo[], 225 | layers: LayerJSON, 226 | accountId?: string, 227 | isFIPSEnabled: boolean = false, 228 | ): void { 229 | const { region } = service.provider; 230 | // It's possible a local account layer is being used in a region we have not published to so we use a default region's ARNs 231 | const shouldUseDefaultRegion = layers.regions[region] === undefined && accountId !== undefined; 232 | const regionRuntimes = shouldUseDefaultRegion ? layers.regions[DEFAULT_REGION] : layers.regions[region]; 233 | if (regionRuntimes === undefined) { 234 | return; 235 | } 236 | 237 | for (const handler of handlers) { 238 | if (handler.type === RuntimeType.UNSUPPORTED) { 239 | continue; 240 | } 241 | const architecture = 242 | (handler.handler as any).architecture ?? (service.provider as any).architecture ?? DEFAULT_ARCHITECTURE; 243 | let extensionLayerKey: string = "extension"; 244 | 245 | if (architecture === ARM64_ARCHITECTURE) { 246 | const prevExtensionARN = 247 | accountId !== undefined 248 | ? buildLocalLambdaLayerARN(regionRuntimes[extensionLayerKey], accountId, region) 249 | : regionRuntimes[extensionLayerKey]; 250 | removePreviousLayer(service, handler, prevExtensionARN); 251 | extensionLayerKey = ARM_RUNTIME_KEYS[extensionLayerKey]; 252 | } 253 | 254 | if (isFIPSEnabled) { 255 | extensionLayerKey += "-fips"; 256 | } 257 | 258 | let extensionARN = regionRuntimes[extensionLayerKey]; 259 | if (accountId && extensionARN) { 260 | extensionARN = buildLocalLambdaLayerARN(extensionARN, accountId, region); 261 | } 262 | 263 | if (extensionARN) { 264 | addLayer(service, handler, extensionARN); 265 | } 266 | } 267 | } 268 | 269 | export function pushLayerARN(layerARN: string, currentLayers: string[]): string[] { 270 | const layerSet = new Set(currentLayers); 271 | layerSet.add(layerARN); 272 | return Array.from(layerSet); 273 | } 274 | 275 | export function isFunctionDefinitionHandler(funcDef: FunctionDefinition): funcDef is FunctionDefinitionHandler { 276 | return typeof (funcDef as any).handler === "string"; 277 | } 278 | 279 | /** 280 | * The isFIPSEnabled flag defaults to `true` if `addExtension` is `true` and region 281 | * starts with "us-gov-". It defaults to `false` otherwise. 282 | */ 283 | export function getDefaultIsFIPSEnabledFlag(config: Configuration, region: string): boolean { 284 | return config.addExtension && region.startsWith(US_GOV_REGION_PREFIX); 285 | } 286 | 287 | function addLayer(service: Service, handler: FunctionInfo, layerArn: string): void { 288 | setLayers(handler, pushLayerARN(layerArn, getLayers(service, handler))); 289 | } 290 | 291 | function getLayers(service: Service, handler: FunctionInfo): string[] { 292 | const functionLayersList = ((handler.handler as any).layers as string[] | string[]) || []; 293 | const serviceLayersList = ((service.provider as any).layers as string[] | string[]) || []; 294 | // Function-level layers override service-level layers 295 | // Append to the function-level layers if other function-level layers are present 296 | // If service-level layers are present 297 | // Set them at the function level, as our layers are runtime-dependent and could vary 298 | // between functions in the same project 299 | if (functionLayersList.length > 0 || serviceLayersList.length === 0) { 300 | return functionLayersList; 301 | } else { 302 | return serviceLayersList; 303 | } 304 | } 305 | 306 | function removePreviousLayer(service: Service, handler: FunctionInfo, previousLayer: string | undefined): void { 307 | let layersList = getLayers(service, handler); 308 | if (new Set(layersList).has(previousLayer!)) { 309 | layersList = layersList?.filter((layer) => layer !== previousLayer); 310 | } 311 | setLayers(handler, layersList); 312 | } 313 | 314 | function setLayers(handler: FunctionInfo, layers: string[]): void { 315 | (handler.handler as any).layers = layers; 316 | } 317 | 318 | function buildLocalLambdaLayerARN(layerARN: string | undefined, accountId: string, region: string): string | undefined { 319 | if (layerARN === undefined) { 320 | return; 321 | } 322 | // Rebuild the layer ARN to use the given account's region and partition 323 | const [layerName, layerVersion] = layerARN.split(":").slice(6, 8); 324 | const partition = getAwsPartitionByRegion(region); 325 | const localLayerARN = `arn:${partition}:lambda:${region}:${accountId}:layer:${layerName}:${layerVersion}`; 326 | return localLayerARN; 327 | } 328 | 329 | function getAwsPartitionByRegion(region: string): string { 330 | if (region.startsWith(US_GOV_REGION_PREFIX)) { 331 | return "aws-us-gov"; 332 | } 333 | if (region.startsWith("cn-")) { 334 | return "aws-cn"; 335 | } 336 | return "aws"; 337 | } 338 | -------------------------------------------------------------------------------- /src/layers-gov.json: -------------------------------------------------------------------------------- 1 | { 2 | "regions": { 3 | "us-gov-west-1": { 4 | "nodejs16.x": "arn:aws-us-gov:lambda:us-gov-west-1:002406178527:layer:Datadog-Node16-x:120", 5 | "nodejs18.x": "arn:aws-us-gov:lambda:us-gov-west-1:002406178527:layer:Datadog-Node18-x:125", 6 | "nodejs20.x": "arn:aws-us-gov:lambda:us-gov-west-1:002406178527:layer:Datadog-Node20-x:125", 7 | "nodejs22.x": "arn:aws-us-gov:lambda:us-gov-west-1:002406178527:layer:Datadog-Node22-x:125", 8 | "python3.7": "arn:aws-us-gov:lambda:us-gov-west-1:002406178527:layer:Datadog-Python37:85", 9 | "python3.8": "arn:aws-us-gov:lambda:us-gov-west-1:002406178527:layer:Datadog-Python38:110", 10 | "python3.8-arm": "arn:aws-us-gov:lambda:us-gov-west-1:002406178527:layer:Datadog-Python38-ARM:110", 11 | "python3.9": "arn:aws-us-gov:lambda:us-gov-west-1:002406178527:layer:Datadog-Python39:110", 12 | "python3.9-arm": "arn:aws-us-gov:lambda:us-gov-west-1:002406178527:layer:Datadog-Python39-ARM:110", 13 | "python3.10": "arn:aws-us-gov:lambda:us-gov-west-1:002406178527:layer:Datadog-Python310:110", 14 | "python3.10-arm": "arn:aws-us-gov:lambda:us-gov-west-1:002406178527:layer:Datadog-Python310-ARM:110", 15 | "python3.11": "arn:aws-us-gov:lambda:us-gov-west-1:002406178527:layer:Datadog-Python311:110", 16 | "python3.11-arm": "arn:aws-us-gov:lambda:us-gov-west-1:002406178527:layer:Datadog-Python311-ARM:110", 17 | "python3.12": "arn:aws-us-gov:lambda:us-gov-west-1:002406178527:layer:Datadog-Python312:110", 18 | "python3.12-arm": "arn:aws-us-gov:lambda:us-gov-west-1:002406178527:layer:Datadog-Python312-ARM:110", 19 | "python3.13": "arn:aws-us-gov:lambda:us-gov-west-1:002406178527:layer:Datadog-Python313:110", 20 | "python3.13-arm": "arn:aws-us-gov:lambda:us-gov-west-1:002406178527:layer:Datadog-Python313-ARM:110", 21 | "ruby3.2": "arn:aws-us-gov:lambda:us-gov-west-1:002406178527:layer:Datadog-Ruby3-2:25", 22 | "ruby3.2-arm": "arn:aws-us-gov:lambda:us-gov-west-1:002406178527:layer:Datadog-Ruby3-2-ARM:25", 23 | "ruby3.3": "arn:aws-us-gov:lambda:us-gov-west-1:002406178527:layer:Datadog-Ruby3-3:25", 24 | "ruby3.3-arm": "arn:aws-us-gov:lambda:us-gov-west-1:002406178527:layer:Datadog-Ruby3-3-ARM:25", 25 | "extension": "arn:aws-us-gov:lambda:us-gov-west-1:002406178527:layer:Datadog-Extension:80", 26 | "extension-arm": "arn:aws-us-gov:lambda:us-gov-west-1:002406178527:layer:Datadog-Extension-ARM:80", 27 | "extension-fips": "arn:aws-us-gov:lambda:us-gov-west-1:002406178527:layer:Datadog-Extension-FIPS:80", 28 | "extension-arm-fips": "arn:aws-us-gov:lambda:us-gov-west-1:002406178527:layer:Datadog-Extension-ARM-FIPS:80", 29 | "dotnet": "arn:aws-us-gov:lambda:us-gov-west-1:002406178527:layer:dd-trace-dotnet:20", 30 | "dotnet-arm": "arn:aws-us-gov:lambda:us-gov-west-1:002406178527:layer:dd-trace-dotnet-ARM:20", 31 | "java": "arn:aws-us-gov:lambda:us-gov-west-1:002406178527:layer:dd-trace-java:21" 32 | }, 33 | "us-gov-east-1": { 34 | "nodejs16.x": "arn:aws-us-gov:lambda:us-gov-east-1:002406178527:layer:Datadog-Node16-x:120", 35 | "nodejs18.x": "arn:aws-us-gov:lambda:us-gov-east-1:002406178527:layer:Datadog-Node18-x:125", 36 | "nodejs20.x": "arn:aws-us-gov:lambda:us-gov-east-1:002406178527:layer:Datadog-Node20-x:125", 37 | "nodejs22.x": "arn:aws-us-gov:lambda:us-gov-east-1:002406178527:layer:Datadog-Node22-x:125", 38 | "python3.7": "arn:aws-us-gov:lambda:us-gov-east-1:002406178527:layer:Datadog-Python37:85", 39 | "python3.8": "arn:aws-us-gov:lambda:us-gov-east-1:002406178527:layer:Datadog-Python38:110", 40 | "python3.8-arm": "arn:aws-us-gov:lambda:us-gov-east-1:002406178527:layer:Datadog-Python38-ARM:110", 41 | "python3.9": "arn:aws-us-gov:lambda:us-gov-east-1:002406178527:layer:Datadog-Python39:110", 42 | "python3.9-arm": "arn:aws-us-gov:lambda:us-gov-east-1:002406178527:layer:Datadog-Python39-ARM:110", 43 | "python3.10": "arn:aws-us-gov:lambda:us-gov-east-1:002406178527:layer:Datadog-Python310:110", 44 | "python3.10-arm": "arn:aws-us-gov:lambda:us-gov-east-1:002406178527:layer:Datadog-Python310-ARM:110", 45 | "python3.11": "arn:aws-us-gov:lambda:us-gov-east-1:002406178527:layer:Datadog-Python311:110", 46 | "python3.11-arm": "arn:aws-us-gov:lambda:us-gov-east-1:002406178527:layer:Datadog-Python311-ARM:110", 47 | "python3.12": "arn:aws-us-gov:lambda:us-gov-east-1:002406178527:layer:Datadog-Python312:110", 48 | "python3.12-arm": "arn:aws-us-gov:lambda:us-gov-east-1:002406178527:layer:Datadog-Python312-ARM:110", 49 | "python3.13": "arn:aws-us-gov:lambda:us-gov-east-1:002406178527:layer:Datadog-Python313:110", 50 | "python3.13-arm": "arn:aws-us-gov:lambda:us-gov-east-1:002406178527:layer:Datadog-Python313-ARM:110", 51 | "ruby3.2": "arn:aws-us-gov:lambda:us-gov-east-1:002406178527:layer:Datadog-Ruby3-2:25", 52 | "ruby3.2-arm": "arn:aws-us-gov:lambda:us-gov-east-1:002406178527:layer:Datadog-Ruby3-2-ARM:25", 53 | "ruby3.3": "arn:aws-us-gov:lambda:us-gov-east-1:002406178527:layer:Datadog-Ruby3-3:25", 54 | "ruby3.3-arm": "arn:aws-us-gov:lambda:us-gov-east-1:002406178527:layer:Datadog-Ruby3-3-ARM:25", 55 | "extension": "arn:aws-us-gov:lambda:us-gov-east-1:002406178527:layer:Datadog-Extension:80", 56 | "extension-arm": "arn:aws-us-gov:lambda:us-gov-east-1:002406178527:layer:Datadog-Extension-ARM:80", 57 | "extension-fips": "arn:aws-us-gov:lambda:us-gov-east-1:002406178527:layer:Datadog-Extension-FIPS:80", 58 | "extension-arm-fips": "arn:aws-us-gov:lambda:us-gov-east-1:002406178527:layer:Datadog-Extension-ARM-FIPS:80", 59 | "dotnet": "arn:aws-us-gov:lambda:us-gov-east-1:002406178527:layer:dd-trace-dotnet:20", 60 | "dotnet-arm": "arn:aws-us-gov:lambda:us-gov-east-1:002406178527:layer:dd-trace-dotnet-ARM:20", 61 | "java": "arn:aws-us-gov:lambda:us-gov-east-1:002406178527:layer:dd-trace-java:21" 62 | } 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /src/monitor-api-requests.ts: -------------------------------------------------------------------------------- 1 | import fetch, { Response } from "node-fetch"; 2 | import * as Serverless from "serverless"; 3 | import { MonitorParams, ServerlessMonitor, replaceCriticalThreshold } from "./monitors"; 4 | 5 | export class InvalidAuthenticationError extends Error { 6 | constructor(message: string) { 7 | super(...message); 8 | this.name = "Invalid Authentication Error"; 9 | this.message = message; 10 | } 11 | } 12 | 13 | interface QueriedMonitor { 14 | query: string; 15 | id: number; 16 | name: string; 17 | tags: string[]; 18 | } 19 | 20 | export interface TemplateVariable { 21 | name: string; 22 | defaults: string[]; 23 | } 24 | 25 | export interface RecommendedMonitorParams { 26 | id: string; 27 | attributes: { 28 | query: string; 29 | message: string; 30 | description: string; 31 | type: string; 32 | options: { 33 | thresholds: { [key: string]: any }; 34 | }; 35 | name: string; 36 | template_variables?: TemplateVariable[]; 37 | tags: string[]; 38 | }; 39 | } 40 | 41 | export async function createMonitor( 42 | site: string, 43 | monitorParams: MonitorParams, 44 | monitorsApiKey: string, 45 | monitorsAppKey: string, 46 | ): Promise { 47 | const response: Response = await fetch(`https://api.${site}/api/v1/monitor`, { 48 | method: "POST", 49 | headers: { 50 | "DD-API-KEY": monitorsApiKey, 51 | "DD-APPLICATION-KEY": monitorsAppKey, 52 | "Content-Type": "application/json", 53 | }, 54 | body: JSON.stringify(monitorParams), 55 | }); 56 | return response; 57 | } 58 | 59 | export async function updateMonitor( 60 | site: string, 61 | monitorId: number, 62 | monitorParams: MonitorParams, 63 | monitorsApiKey: string, 64 | monitorsAppKey: string, 65 | ): Promise { 66 | const response: Response = await fetch(`https://api.${site}/api/v1/monitor/${monitorId}`, { 67 | method: "PUT", 68 | headers: { 69 | "DD-API-KEY": monitorsApiKey, 70 | "DD-APPLICATION-KEY": monitorsAppKey, 71 | "Content-Type": "application/json", 72 | }, 73 | body: JSON.stringify(monitorParams), 74 | }); 75 | 76 | return response; 77 | } 78 | 79 | export async function deleteMonitor( 80 | site: string, 81 | monitorId: number, 82 | monitorsApiKey: string, 83 | monitorsAppKey: string, 84 | ): Promise { 85 | const response: Response = await fetch(`https://api.${site}/api/v1/monitor/${monitorId}`, { 86 | method: "DELETE", 87 | headers: { 88 | "DD-API-KEY": monitorsApiKey, 89 | "DD-APPLICATION-KEY": monitorsAppKey, 90 | "Content-Type": "application/json", 91 | }, 92 | }); 93 | 94 | return response; 95 | } 96 | 97 | export async function searchMonitors( 98 | site: string, 99 | queryTag: string, 100 | monitorsApiKey: string, 101 | monitorsAppKey: string, 102 | ): Promise { 103 | let monitors: QueriedMonitor[] = []; 104 | let page = 0; 105 | let pageCount = 1; 106 | do { 107 | const query = `tag:"${queryTag}"`; 108 | const response: Response = await fetch(`https://api.${site}/api/v1/monitor/search?query=${query}&page=${page}`, { 109 | method: "GET", 110 | headers: { 111 | "DD-API-KEY": monitorsApiKey, 112 | "DD-APPLICATION-KEY": monitorsAppKey, 113 | "Content-Type": "application/json", 114 | }, 115 | }); 116 | 117 | if (response.status !== 200) { 118 | throw new Error(`Can't fetch monitors. Status code: ${response.status}. Message: ${response.statusText}`); 119 | } 120 | 121 | const json = await response.json(); 122 | monitors = monitors.concat(json.monitors); 123 | pageCount = json.metadata.page_count; 124 | page += 1; 125 | } while (page < pageCount); 126 | 127 | return monitors; 128 | } 129 | 130 | export async function getCloudFormationStackId(serverless: Serverless): Promise { 131 | const stackName = serverless.getProvider("aws").naming.getStackName(); 132 | const describeStackOutput = await serverless 133 | .getProvider("aws") 134 | .request( 135 | "CloudFormation", 136 | "describeStacks", 137 | { StackName: stackName }, 138 | { region: serverless.getProvider("aws").getRegion() }, 139 | ) 140 | .catch(() => { 141 | // Ignore any request exceptions, fail silently and skip output logging 142 | }); 143 | const cloudFormationStackId: string = describeStackOutput ? describeStackOutput.Stacks[0].StackId : ""; 144 | return cloudFormationStackId; 145 | } 146 | 147 | export async function getExistingMonitors( 148 | site: string, 149 | cloudFormationStackId: string, 150 | monitorsApiKey: string, 151 | monitorsAppKey: string, 152 | ): Promise<{ [key: string]: number }> { 153 | const existingMonitors = await searchMonitors( 154 | site, 155 | `aws_cloudformation_stack-id:${cloudFormationStackId}`, 156 | monitorsApiKey, 157 | monitorsAppKey, 158 | ); 159 | const serverlessMonitorIdByMonitorId: { [key: string]: number } = {}; 160 | for (const existingMonitor of existingMonitors) { 161 | for (const tag of existingMonitor.tags) { 162 | if (tag.startsWith("serverless_monitor_id:") || tag.startsWith("serverless_id:")) { 163 | const serverlessMonitorId = tag.substring(tag.indexOf(":") + 1); 164 | serverlessMonitorIdByMonitorId[serverlessMonitorId] = existingMonitor.id; 165 | } 166 | } 167 | } 168 | return serverlessMonitorIdByMonitorId; 169 | } 170 | 171 | export async function getRecommendedMonitors( 172 | site: string, 173 | monitorsApiKey: string, 174 | monitorsAppKey: string, 175 | ): Promise<{ 176 | [key: string]: ServerlessMonitor; 177 | }> { 178 | const recommendedMonitors: { [key: string]: ServerlessMonitor } = {}; 179 | // Setting a count of 50 in the hope that all can be fetched at once. The default is 10 per page. 180 | const endpoint = `https://api.${site}/api/v2/monitor/recommended?count=50&start=0&search=tag%3A%22product%3Aserverless%22%20AND%20tag%3A%22integration%3Aamazon-lambda%22`; 181 | const response: Response = await fetch(endpoint, { 182 | method: "GET", 183 | headers: { 184 | "DD-API-KEY": monitorsApiKey, 185 | "DD-APPLICATION-KEY": monitorsAppKey, 186 | "Content-Type": "application/json", 187 | }, 188 | }); 189 | if (response.status !== 200) { 190 | throw new Error(`Can't fetch monitor params. Status code: ${response.status}. Message: ${response.statusText}`); 191 | } 192 | 193 | const json = await response.json(); 194 | const recommendedMonitorsData = json.data; 195 | recommendedMonitorsData.forEach((recommendedMonitorParam: RecommendedMonitorParams) => { 196 | const recommendedMonitorId = parseRecommendedMonitorServerlessId(recommendedMonitorParam); 197 | if (recommendedMonitorId === undefined) { 198 | return; 199 | } 200 | 201 | const recommendedMonitor: ServerlessMonitor = { 202 | name: recommendedMonitorParam.attributes.name, 203 | threshold: recommendedMonitorParam.attributes.options.thresholds.critical, 204 | message: recommendedMonitorParam.attributes.message, 205 | type: recommendedMonitorParam.attributes.type, 206 | query: (cloudFormationStackId: string, criticalThreshold: number) => { 207 | let query = recommendedMonitorParam.attributes.query; 208 | // replace $scope with cloudformation_stack_id 209 | query = query.replace(/\$scope/g, `aws_cloudformation_stack-id:${cloudFormationStackId}`); 210 | 211 | if (criticalThreshold !== recommendedMonitorParam.attributes.options.thresholds.critical) { 212 | query = replaceCriticalThreshold(query, criticalThreshold); 213 | } 214 | return query; 215 | }, 216 | templateVariables: recommendedMonitorParam.attributes.template_variables, 217 | }; 218 | recommendedMonitors[recommendedMonitorId] = recommendedMonitor; 219 | }); 220 | 221 | return recommendedMonitors; 222 | } 223 | 224 | export function parseRecommendedMonitorServerlessId( 225 | recommendedMonitorParams: RecommendedMonitorParams, 226 | ): string | undefined { 227 | for (const tag of recommendedMonitorParams.attributes.tags) { 228 | if (tag.startsWith("serverless_id:")) { 229 | return tag.substring(tag.indexOf(":") + 1); 230 | } 231 | } 232 | return undefined; 233 | } 234 | -------------------------------------------------------------------------------- /src/monitors.spec.ts: -------------------------------------------------------------------------------- 1 | import { 2 | createMonitor, 3 | deleteMonitor, 4 | getExistingMonitors, 5 | updateMonitor, 6 | getRecommendedMonitors, 7 | } from "./monitor-api-requests"; 8 | import { Monitor, RecommendedMonitors, setMonitors, buildMonitorParams } from "./monitors"; 9 | 10 | jest.mock("./monitor-api-requests", () => ({ 11 | createMonitor: jest.fn(), 12 | updateMonitor: jest.fn(), 13 | deleteMonitor: jest.fn(), 14 | getExistingMonitors: jest.fn(), 15 | getRecommendedMonitors: jest.fn(), 16 | })); 17 | 18 | const CUSTOM_MONITOR_1: Monitor = { 19 | custom_monitor_1: { 20 | name: "Custom Monitor 1", 21 | query: 22 | "max(next_1w):forecast(avg:system.load.1{*}, 'linear', 1, interval='60m', history='1w', model='default') >= 3", 23 | }, 24 | }; 25 | 26 | const CUSTOM_MONITOR_2: Monitor = { 27 | custom_monitor_2: { 28 | name: "Custom Monitor 2", 29 | query: "avg(last_15m):anomalies(avg:system.load.1{*}, 'basic', 2, direction='both') >= 1", 30 | tags: ["custom_monitor_2"], 31 | message: "This is a custom monitor", 32 | options: { 33 | renotify_interval: 0, 34 | timeout_h: 0, 35 | thresholds: { critical: 1 }, 36 | notify_no_data: false, 37 | no_data_timeframe: 2, 38 | notify_audit: false, 39 | require_full_window: true, 40 | }, 41 | }, 42 | }; 43 | 44 | const UPDATED_CUSTOM_MONITOR_2: Monitor = { 45 | custom_monitor_2: { 46 | name: "Updated Custom Monitor 2", 47 | query: "avg(last_15m):anomalies(avg:system.load.1{*}, 'basic', 2, direction='both') >= 1", 48 | message: "This is a custom monitor", 49 | }, 50 | }; 51 | 52 | const INCREASED_COST_MONITOR: Monitor = { 53 | increased_cost: { 54 | name: "Increased Cost", 55 | query: "", 56 | message: "This is an increased cost monitor", 57 | options: { 58 | renotify_interval: 0, 59 | timeout_h: 0, 60 | thresholds: { warning: 1, critical: 25 }, // custom critical threshold value 61 | notify_no_data: false, 62 | no_data_timeframe: 2, 63 | notify_audit: false, 64 | require_full_window: true, 65 | }, 66 | }, 67 | }; 68 | 69 | const TIMEOUT_MONITOR: Monitor = { 70 | timeout: { 71 | name: "Modified Timeout Monitor", 72 | query: "avg(last_15m):anomalies(avg:system.load.1{*}, 'basic', 2, direction='both') >= 1", 73 | }, 74 | }; 75 | 76 | const DEFAULT_TIMEOUT_MONITOR: Monitor = { 77 | timeout: {}, 78 | }; 79 | 80 | const NO_TEMPLATE_VARIABLE_MONITOR: Monitor = { 81 | test_no_template_variable: {}, 82 | }; 83 | 84 | const CUSTOM_MONITOR_1_PARAMS = { 85 | name: "Custom Monitor 1", 86 | query: "max(next_1w):forecast(avg:system.load.1{*}, 'linear', 1, interval='60m', history='1w', model='default') >= 3", 87 | tags: [ 88 | "serverless_monitor_type:single_function", 89 | "serverless_monitor_id:custom_monitor_1", 90 | "aws_cloudformation_stack-id:cloud_formation_id", 91 | "created_by:dd_sls_plugin", 92 | "env:env", 93 | "service:service", 94 | ], 95 | options: {}, 96 | type: "metric alert", 97 | }; 98 | const CUSTOM_MONITOR_2_PARAMS = { 99 | name: "Custom Monitor 2", 100 | query: "avg(last_15m):anomalies(avg:system.load.1{*}, 'basic', 2, direction='both') >= 1", 101 | tags: [ 102 | "custom_monitor_2", 103 | "serverless_monitor_type:single_function", 104 | "serverless_monitor_id:custom_monitor_2", 105 | "aws_cloudformation_stack-id:cloud_formation_id", 106 | "created_by:dd_sls_plugin", 107 | "env:env", 108 | "service:service", 109 | ], 110 | message: "This is a custom monitor", 111 | options: { 112 | renotify_interval: 0, 113 | timeout_h: 0, 114 | thresholds: { critical: 1 }, 115 | notify_no_data: false, 116 | no_data_timeframe: 2, 117 | notify_audit: false, 118 | require_full_window: true, 119 | }, 120 | type: "metric alert", 121 | }; 122 | const UPDATED_CUSTOM_MONITOR_2_PARAMS = { 123 | name: "Updated Custom Monitor 2", 124 | query: "avg(last_15m):anomalies(avg:system.load.1{*}, 'basic', 2, direction='both') >= 1", 125 | tags: [ 126 | "serverless_monitor_type:single_function", 127 | "serverless_monitor_id:custom_monitor_2", 128 | "aws_cloudformation_stack-id:cloud_formation_id", 129 | "created_by:dd_sls_plugin", 130 | "env:env", 131 | "service:service", 132 | ], 133 | message: "This is a custom monitor", 134 | options: {}, 135 | type: "metric alert", 136 | }; 137 | const INCREASED_COST_MONITOR_PARAMS = { 138 | name: "Increased Cost", 139 | query: 140 | "pct_change(avg(last_5m),last_5m):avg:aws.lambda.enhanced.estimated_cost{aws_cloudformation_stack-id:cloud_formation_id} > 25", 141 | message: "This is an increased cost monitor", 142 | options: { 143 | renotify_interval: 0, 144 | timeout_h: 0, 145 | thresholds: { warning: 1, critical: 25 }, 146 | notify_no_data: false, 147 | no_data_timeframe: 2, 148 | notify_audit: false, 149 | require_full_window: true, 150 | }, 151 | tags: [ 152 | "serverless_monitor_type:single_function", 153 | "serverless_monitor_id:increased_cost", 154 | "aws_cloudformation_stack-id:cloud_formation_id", 155 | "created_by:dd_sls_plugin", 156 | "env:env", 157 | "service:service", 158 | ], 159 | type: "metric alert", 160 | }; 161 | const TIMEOUT_MONITOR_PARAMS = { 162 | name: "Modified Timeout Monitor", 163 | query: 164 | "avg(last_15m):sum:aws.lambda.duration.maximum{aws_cloudformation_stack-id:cloud_formation_id} by {aws_account,functionname,region}.as_count() / (sum:aws.lambda.timeout{aws_cloudformation_stack-id:cloud_formation_id} by {aws_account,functionname,region}.as_count() * 1000) >= 1", 165 | tags: [ 166 | "serverless_monitor_type:single_function", 167 | "serverless_monitor_id:timeout", 168 | "aws_cloudformation_stack-id:cloud_formation_id", 169 | "created_by:dd_sls_plugin", 170 | "env:env", 171 | "service:service", 172 | ], 173 | options: {}, 174 | type: "metric alert", 175 | message: 176 | "At least one invocation in the selected time range timed out. This occurs when your function runs for longer than the configured timeout or the global Lambda timeout. Resolution: [Distributed tracing](https://docs.datadoghq.com/serverless/distributed_tracing) can help you pinpoint slow requests to APIs and other microservices. You can also consider increasing the timeout of your function. Note that this could affect your AWS bill.", 177 | }; 178 | const NO_TEMPLATE_VARIABLE_MONITOR_PARAMS = { 179 | name: "This is to ensure that serverless plugin works properly when a recommended monitor has no template variable", 180 | query: "false", 181 | tags: [ 182 | "serverless_monitor_type:single_function", 183 | "serverless_monitor_id:test_no_template_variable", 184 | "aws_cloudformation_stack-id:cloud_formation_id", 185 | "created_by:dd_sls_plugin", 186 | "env:env", 187 | "service:service", 188 | ], 189 | options: {}, 190 | type: "metric alert", 191 | message: "This alert is not supposed to be triggered.", 192 | }; 193 | const DEFAULT_TIMEOUT_MONITOR_PARAMS = { 194 | name: "Timeout on {{functionname.name}} in {{region.name}} for {{aws_account.name}} with $varNoDefault", 195 | query: 196 | "avg(last_15m):sum:aws.lambda.duration.maximum{aws_cloudformation_stack-id:cloud_formation_id} by {aws_account,functionname,region}.as_count() / (sum:aws.lambda.timeout{aws_cloudformation_stack-id:cloud_formation_id} by {aws_account,functionname,region}.as_count() * 1000) >= 1", 197 | tags: [ 198 | "serverless_monitor_type:single_function", 199 | "serverless_monitor_id:timeout", 200 | "aws_cloudformation_stack-id:cloud_formation_id", 201 | "created_by:dd_sls_plugin", 202 | "env:env", 203 | "service:service", 204 | ], 205 | options: {}, 206 | type: "metric alert", 207 | message: 208 | "At least one invocation in the selected time range timed out. This occurs when your function runs for longer than the configured timeout or the global Lambda timeout. Resolution: [Distributed tracing](https://docs.datadoghq.com/serverless/distributed_tracing) can help you pinpoint slow requests to APIs and other microservices. You can also consider increasing the timeout of your function. Note that this could affect your AWS bill.", 209 | }; 210 | 211 | const TEMPLATE_VARIABLES = [ 212 | { 213 | name: "functionName", 214 | defaults: ["{{functionname.name}}"], 215 | prefix: "", 216 | available_values: [], 217 | }, 218 | { 219 | name: "regionName", 220 | defaults: ["{{region.name}}"], 221 | prefix: "", 222 | available_values: [], 223 | }, 224 | { 225 | name: "awsAccount", 226 | defaults: ["{{aws_account.name}}"], 227 | prefix: "", 228 | available_values: [], 229 | }, 230 | { 231 | name: "scope", 232 | defaults: ["*"], 233 | prefix: "", 234 | available_values: [], 235 | }, 236 | // A template variable with no default value. If it exists in the name of 237 | // a recommended monitor, then interpolation code will do nothing, i.e. 238 | // it will leave "$varNoDefault" as it is in the name string. 239 | { 240 | name: "varNoDefault", 241 | defaults: [], 242 | prefix: "", 243 | available_values: [], 244 | }, 245 | ]; 246 | 247 | const RECOMMENDED_MONITORS: RecommendedMonitors = { 248 | increased_cost: { 249 | name: "Increased Cost on $functionName in $regionName for $awsAccount", 250 | threshold: 0.2, 251 | message: "Estimated cost of invocations have increased more than 20%", 252 | query: (cloudFormationStackId: string, criticalThreshold: number) => { 253 | return `pct_change(avg(last_5m),last_5m):avg:aws.lambda.enhanced.estimated_cost{aws_cloudformation_stack-id:${cloudFormationStackId}} > ${criticalThreshold}`; 254 | }, 255 | templateVariables: TEMPLATE_VARIABLES, 256 | }, 257 | timeout: { 258 | name: "Timeout on $functionName in $regionName for $awsAccount with $varNoDefault", 259 | threshold: 1, 260 | message: 261 | "At least one invocation in the selected time range timed out. This occurs when your function runs for longer than the configured timeout or the global Lambda timeout. Resolution: [Distributed tracing](https://docs.datadoghq.com/serverless/distributed_tracing) can help you pinpoint slow requests to APIs and other microservices. You can also consider increasing the timeout of your function. Note that this could affect your AWS bill.", 262 | type: "query alert", 263 | query: (cloudFormationStackId: string, criticalThreshold: number) => { 264 | return `avg(last_15m):sum:aws.lambda.duration.maximum{aws_cloudformation_stack-id:${cloudFormationStackId}} by {aws_account,functionname,region}.as_count() / (sum:aws.lambda.timeout{aws_cloudformation_stack-id:${cloudFormationStackId}} by {aws_account,functionname,region}.as_count() * 1000) >= ${criticalThreshold}`; 265 | }, 266 | templateVariables: TEMPLATE_VARIABLES, 267 | }, 268 | test_no_template_variable: { 269 | name: "This is to ensure that serverless plugin works properly when a recommended monitor has no template variable", 270 | threshold: 0, 271 | message: "This alert is not supposed to be triggered.", 272 | query: () => { 273 | return "false"; 274 | }, 275 | }, 276 | }; 277 | 278 | const MONITOR_SET_1 = [CUSTOM_MONITOR_1, CUSTOM_MONITOR_2, INCREASED_COST_MONITOR]; 279 | const MONITOR_SET_2 = [CUSTOM_MONITOR_1, UPDATED_CUSTOM_MONITOR_2, TIMEOUT_MONITOR]; 280 | const MONITOR_SET_3 = [CUSTOM_MONITOR_1, INCREASED_COST_MONITOR]; 281 | 282 | describe("buildMonitorParams", () => { 283 | it("returns valid monitor params for a custom monitor", async () => { 284 | const monitorParams = buildMonitorParams( 285 | CUSTOM_MONITOR_1, 286 | "cloud_formation_id", 287 | "service", 288 | "env", 289 | RECOMMENDED_MONITORS, 290 | ); 291 | expect(monitorParams).toEqual(CUSTOM_MONITOR_1_PARAMS); 292 | }); 293 | it("returns valid monitor params for a custom monitor", async () => { 294 | const monitorParams = buildMonitorParams( 295 | CUSTOM_MONITOR_2, 296 | "cloud_formation_id", 297 | "service", 298 | "env", 299 | RECOMMENDED_MONITORS, 300 | ); 301 | expect(monitorParams).toEqual(CUSTOM_MONITOR_2_PARAMS); 302 | }); 303 | it("returns valid monitor params for an updated custom monitor", async () => { 304 | const monitorParams = buildMonitorParams( 305 | UPDATED_CUSTOM_MONITOR_2, 306 | "cloud_formation_id", 307 | "service", 308 | "env", 309 | RECOMMENDED_MONITORS, 310 | ); 311 | expect(monitorParams).toEqual(UPDATED_CUSTOM_MONITOR_2_PARAMS); 312 | }); 313 | it("returns valid monitor params for Increased Cost monitor", async () => {}); 314 | it("returns valid monitor params for the Timeout monitor", async () => { 315 | const monitorParams = buildMonitorParams( 316 | TIMEOUT_MONITOR, 317 | "cloud_formation_id", 318 | "service", 319 | "env", 320 | RECOMMENDED_MONITORS, 321 | ); 322 | expect(monitorParams).toEqual(TIMEOUT_MONITOR_PARAMS); 323 | }); 324 | it("returns valid monitor params for a minotor which has no template variable", async () => { 325 | const monitorParams = buildMonitorParams( 326 | NO_TEMPLATE_VARIABLE_MONITOR, 327 | "cloud_formation_id", 328 | "service", 329 | "env", 330 | RECOMMENDED_MONITORS, 331 | ); 332 | expect(monitorParams).toEqual(NO_TEMPLATE_VARIABLE_MONITOR_PARAMS); 333 | }); 334 | it("interpolates template variables in the name of a recommended monitor", async () => { 335 | const monitorParams = buildMonitorParams( 336 | DEFAULT_TIMEOUT_MONITOR, 337 | "cloud_formation_id", 338 | "service", 339 | "env", 340 | RECOMMENDED_MONITORS, 341 | ); 342 | expect(monitorParams).toEqual(DEFAULT_TIMEOUT_MONITOR_PARAMS); 343 | }); 344 | }); 345 | 346 | describe("setMonitors", () => { 347 | afterEach(() => { 348 | (createMonitor as unknown as jest.Mock).mockRestore(); 349 | (updateMonitor as unknown as jest.Mock).mockRestore(); 350 | (deleteMonitor as unknown as jest.Mock).mockRestore(); 351 | (getExistingMonitors as unknown as jest.Mock).mockRestore(); 352 | (getRecommendedMonitors as unknown as jest.Mock).mockRestore(); 353 | }); 354 | 355 | it("returns 'Successfully created custom_monitor_1'", async () => { 356 | (getRecommendedMonitors as unknown as jest.Mock).mockReturnValue(RECOMMENDED_MONITORS); 357 | (getExistingMonitors as unknown as jest.Mock).mockReturnValue({}); 358 | (createMonitor as unknown as jest.Mock).mockReturnValue({ status: 200 }); 359 | const logStatements = await setMonitors( 360 | "app", 361 | "datadoghq.com", 362 | [CUSTOM_MONITOR_1], 363 | "apikey", 364 | "appkey", 365 | "cloud_formation_id", 366 | "service", 367 | "env", 368 | ); 369 | expect(logStatements).toEqual(["Successfully created custom_monitor_1"]); 370 | expect(createMonitor as unknown as jest.Mock).toHaveBeenCalledWith( 371 | "datadoghq.com", 372 | CUSTOM_MONITOR_1_PARAMS, 373 | "apikey", 374 | "appkey", 375 | ); 376 | }); 377 | it("returns 'Successfully updated custom_monitor_1', 'Successfully created custom_monitor_2, increased_cost'", async () => { 378 | (getRecommendedMonitors as unknown as jest.Mock).mockReturnValue(RECOMMENDED_MONITORS); 379 | (getExistingMonitors as unknown as jest.Mock).mockReturnValue({ custom_monitor_1: 123456 }); 380 | (createMonitor as unknown as jest.Mock).mockReturnValue({ status: 200 }); 381 | (updateMonitor as unknown as jest.Mock).mockReturnValue({ status: 200 }); 382 | const logStatements = await setMonitors( 383 | "app", 384 | "datadoghq.com", 385 | MONITOR_SET_1, 386 | "apikey", 387 | "appkey", 388 | "cloud_formation_id", 389 | "service", 390 | "env", 391 | ); 392 | expect(logStatements).toEqual([ 393 | "Successfully updated custom_monitor_1", 394 | "Successfully created custom_monitor_2, increased_cost", 395 | ]); 396 | expect(createMonitor as unknown as jest.Mock).toHaveBeenCalledWith( 397 | "datadoghq.com", 398 | CUSTOM_MONITOR_2_PARAMS, 399 | "apikey", 400 | "appkey", 401 | ); 402 | expect(createMonitor as unknown as jest.Mock).toHaveBeenCalledWith( 403 | "datadoghq.com", 404 | INCREASED_COST_MONITOR_PARAMS, 405 | "apikey", 406 | "appkey", 407 | ); 408 | }); 409 | it("returns 'Successfully updated custom_monitor_1, custom_monitor_2', 'Successfully created timeout', 'Successfully deleted increased_cost'", async () => { 410 | (getRecommendedMonitors as unknown as jest.Mock).mockReturnValue(RECOMMENDED_MONITORS); 411 | (getExistingMonitors as unknown as jest.Mock).mockReturnValue({ 412 | custom_monitor_1: 123456, 413 | custom_monitor_2: 123456, 414 | increased_cost: 123456, 415 | }); 416 | (createMonitor as unknown as jest.Mock).mockReturnValue({ status: 200 }); 417 | (updateMonitor as unknown as jest.Mock).mockReturnValue({ status: 200 }); 418 | (deleteMonitor as unknown as jest.Mock).mockReturnValue({ status: 200 }); 419 | const logStatements = await setMonitors( 420 | "app", 421 | "datadoghq.com", 422 | MONITOR_SET_2, 423 | "apikey", 424 | "appkey", 425 | "cloud_formation_id", 426 | "service", 427 | "env", 428 | ); 429 | expect(logStatements).toEqual([ 430 | "Successfully updated custom_monitor_1, custom_monitor_2", 431 | "Successfully created timeout", 432 | "Successfully deleted increased_cost", 433 | ]); 434 | expect(updateMonitor as unknown as jest.Mock).toHaveBeenCalledWith( 435 | "datadoghq.com", 436 | 123456, 437 | CUSTOM_MONITOR_1_PARAMS, 438 | "apikey", 439 | "appkey", 440 | ); 441 | expect(updateMonitor as unknown as jest.Mock).toHaveBeenCalledWith( 442 | "datadoghq.com", 443 | 123456, 444 | UPDATED_CUSTOM_MONITOR_2_PARAMS, 445 | "apikey", 446 | "appkey", 447 | ); //make sure to use the UPDATED_Monitors? 448 | expect(createMonitor as unknown as jest.Mock).toHaveBeenCalledWith( 449 | "datadoghq.com", 450 | TIMEOUT_MONITOR_PARAMS, 451 | "apikey", 452 | "appkey", 453 | ); 454 | expect(deleteMonitor as unknown as jest.Mock).toHaveBeenCalledWith("datadoghq.com", 123456, "apikey", "appkey"); 455 | }); 456 | it("returns 'Succcessfully updated custom_monitor_1, 'Successfully created increased_cost', 'Successfully deleted timeout'", async () => { 457 | (getRecommendedMonitors as unknown as jest.Mock).mockReturnValue(RECOMMENDED_MONITORS); 458 | (getExistingMonitors as unknown as jest.Mock).mockReturnValue({ 459 | timeout: 123456, 460 | custom_monitor_1: 123456, 461 | custom_monitor_2: 123456, 462 | }); 463 | (createMonitor as unknown as jest.Mock).mockReturnValue({ status: 200 }); 464 | (updateMonitor as unknown as jest.Mock).mockReturnValue({ status: 200 }); 465 | (deleteMonitor as unknown as jest.Mock).mockReturnValue({ status: 200 }); 466 | const logStatements = await setMonitors( 467 | "app", 468 | "datadoghq.com", 469 | MONITOR_SET_3, 470 | "apikey", 471 | "appkey", 472 | "cloud_formation_id", 473 | "service", 474 | "env", 475 | ); 476 | expect(logStatements).toEqual([ 477 | "Successfully updated custom_monitor_1", 478 | "Successfully created increased_cost", 479 | "Successfully deleted timeout, custom_monitor_2", 480 | ]); 481 | expect(updateMonitor as unknown as jest.Mock).toHaveBeenCalledWith( 482 | "datadoghq.com", 483 | 123456, 484 | CUSTOM_MONITOR_1_PARAMS, 485 | "apikey", 486 | "appkey", 487 | ); 488 | expect(deleteMonitor as unknown as jest.Mock).toHaveBeenCalledWith("datadoghq.com", 123456, "apikey", "appkey"); 489 | expect(deleteMonitor as unknown as jest.Mock).toHaveBeenCalledWith("datadoghq.com", 123456, "apikey", "appkey"); 490 | expect(createMonitor as unknown as jest.Mock).toHaveBeenCalledWith( 491 | "datadoghq.com", 492 | INCREASED_COST_MONITOR_PARAMS, 493 | "apikey", 494 | "appkey", 495 | ); 496 | }); 497 | }); 498 | -------------------------------------------------------------------------------- /src/monitors.ts: -------------------------------------------------------------------------------- 1 | import { 2 | updateMonitor, 3 | createMonitor, 4 | deleteMonitor, 5 | getExistingMonitors, 6 | getRecommendedMonitors, 7 | TemplateVariable, 8 | } from "./monitor-api-requests"; 9 | import { Response } from "node-fetch"; 10 | 11 | export interface MonitorParams { 12 | [key: string]: any; 13 | } 14 | export interface Monitor { 15 | [key: string]: MonitorParams; 16 | } 17 | 18 | export interface ServerlessMonitor { 19 | name: string; 20 | threshold: number; 21 | query: (cloudFormationStackId: string, criticalThreshold: number) => string; 22 | message: string; 23 | type?: string; 24 | templateVariables?: TemplateVariable[]; 25 | } 26 | 27 | export interface RecommendedMonitors { 28 | [key: string]: ServerlessMonitor; 29 | } 30 | /** 31 | * Adds the appropriate tags and required parameters that will be passed as part of the request body for creating and updating monitors 32 | * @param monitor - the Monitor object that is defined in the serverless.yml file 33 | * @param cloudFormationStackId - the CloudFormation stack ID 34 | * @param service - the Service 35 | * @param env - the Environment 36 | * @param recommendedMonitors - recommended monitors 37 | * @returns valid monitor parameters 38 | */ 39 | export function buildMonitorParams( 40 | monitor: Monitor, 41 | cloudFormationStackId: string, 42 | service: string, 43 | env: string, 44 | recommendedMonitors: RecommendedMonitors, 45 | ): { [x: string]: any } { 46 | const serverlessMonitorId = Object.keys(monitor)[0]; 47 | 48 | if (!monitor[serverlessMonitorId]) { 49 | monitor[serverlessMonitorId] = {}; 50 | } 51 | 52 | const monitorParams = { ...monitor[serverlessMonitorId] }; 53 | 54 | if (!monitorParams.tags) { 55 | monitorParams.tags = []; 56 | } 57 | if (!monitorParams.options) { 58 | monitorParams.options = {}; 59 | } 60 | if (monitorParams.type === undefined) { 61 | monitorParams.type = "metric alert"; 62 | } 63 | 64 | monitorParams.tags = [ 65 | ...monitorParams.tags, 66 | "serverless_monitor_type:single_function", 67 | `serverless_monitor_id:${serverlessMonitorId}`, 68 | `aws_cloudformation_stack-id:${cloudFormationStackId}`, 69 | "created_by:dd_sls_plugin", 70 | `env:${env}`, 71 | `service:${service}`, 72 | ]; 73 | 74 | if (isRecommendedMonitor(serverlessMonitorId, recommendedMonitors)) { 75 | const recommendedMonitor = recommendedMonitors[serverlessMonitorId]; 76 | let criticalThreshold = recommendedMonitor.threshold; 77 | 78 | if (monitorParams.options?.thresholds?.critical !== undefined) { 79 | criticalThreshold = monitorParams.options.thresholds.critical; 80 | } 81 | 82 | monitorParams.query = recommendedMonitor.query(cloudFormationStackId, criticalThreshold); 83 | 84 | if (!monitorParams.message) { 85 | monitorParams.message = recommendedMonitor.message; 86 | } 87 | if (!monitorParams.name) { 88 | monitorParams.name = getInterpolatedMonitorName(recommendedMonitor); 89 | } 90 | } 91 | 92 | return monitorParams; 93 | } 94 | 95 | /** 96 | * Checks to see if the given monitor is a serverless recommended monitor 97 | * @param serverlessMonitorId - Unique ID string defined for each monitor 98 | * @param recommendedMonitors - recommended monitors 99 | * @returns true if a given monitor is a serverless recommended monitor 100 | */ 101 | function isRecommendedMonitor(serverlessMonitorId: string, recommendedMonitors: RecommendedMonitors): boolean { 102 | return recommendedMonitors[serverlessMonitorId] !== undefined; 103 | } 104 | 105 | /** 106 | * Checks to see if the monitor already exists 107 | * @param serverlessMonitorId - Unique ID string defined for each serverless monitor 108 | * @param existingMonitors - Monitors that have already been created 109 | * @returns true if given monitor already exists 110 | */ 111 | function doesMonitorExist(serverlessMonitorId: string, existingMonitors: { [key: string]: number }): boolean { 112 | return Object.keys(existingMonitors).includes(serverlessMonitorId); 113 | } 114 | 115 | /** 116 | * Deletes the monitors that have been removed from the plugin 117 | * @param site Which Datadog site to send data to, e.g. datadoghq.com 118 | * @param pluginMonitors Monitors that are currently defined in the plugin 119 | * @param existingMonitors Monitors that have already been created 120 | * @param monitorsApiKey API Key 121 | * @param monitorsAppKey Application Key 122 | * @returns an array of successfully deleted monitors 123 | */ 124 | async function deleteRemovedMonitors( 125 | site: string, 126 | pluginMonitors: Monitor[], 127 | existingMonitors: { [key: string]: number }, 128 | monitorsApiKey: string, 129 | monitorsAppKey: string, 130 | ): Promise { 131 | const successfullyDeletedMonitors: string[] = []; 132 | const currentMonitorIds: string[] = []; 133 | pluginMonitors.forEach((currentMonitor) => currentMonitorIds.push(Object.keys(currentMonitor)[0])); 134 | for (const pluginMonitorId of Object.keys(existingMonitors)) { 135 | if (!currentMonitorIds.includes(pluginMonitorId)) { 136 | const response = await deleteMonitor(site, existingMonitors[pluginMonitorId], monitorsApiKey, monitorsAppKey); 137 | const successfullyDeleted = handleMonitorsApiResponse(response, pluginMonitorId); 138 | if (successfullyDeleted) { 139 | successfullyDeletedMonitors.push(` ${pluginMonitorId}`); 140 | } 141 | } 142 | } 143 | return successfullyDeletedMonitors; 144 | } 145 | 146 | /** 147 | * Handles the Monitor API response and logs the appropriate error 148 | * @param response Monitor API Response 149 | * @param serverlessMonitorId Serverless Monitor ID 150 | * @param subdomain Subdomain to use for app URLs, e.g. "app" 151 | * @param site Which Datadog site to send data to, e.g. datadoghq.com 152 | * @returns true if the response is 200 OK. Throw an error for other HTTP status codes. 153 | */ 154 | export function handleMonitorsApiResponse( 155 | response: Response, 156 | serverlessMonitorId?: string, 157 | subdomain?: string, 158 | site?: string, 159 | ): boolean { 160 | if (response.status === 200) { 161 | return true; 162 | } else if (response.status === 400) { 163 | throw new Error( 164 | `400 Bad Request: This could be due to incorrect syntax or a missing required tag for ${serverlessMonitorId}. Have you looked at your monitor tag policies? https://${subdomain}.${site}/monitors/settings/policies`, 165 | ); 166 | } else { 167 | throw new Error(`${response.status} ${response.statusText}`); 168 | } 169 | } 170 | 171 | /** 172 | * Creates, updates, and deletes the appropriate monitor configurations as defined in the serverless.yml file 173 | * @param subdomain - Subdomain to use for app URLs, e.g. "app" 174 | * @param site - Which Datadog site to send data to, e.g. datadoghq.com 175 | * @param monitors - Monitors defined in the serverless.yml file 176 | * @param monitorsApiKey - the API Key 177 | * @param monitorsAppKey - the Application Key 178 | * @param cloudFormationStackId - the CloudFormation stack ID 179 | * @param service - the Service 180 | * @param env - the Environment 181 | * @returns Log statements showing the monitors that have been successfully 182 | * created, updated, and deleted according to the configuration 183 | * defined in the plugin 184 | */ 185 | export async function setMonitors( 186 | subdomain: string, 187 | site: string, 188 | monitors: Monitor[], 189 | monitorsApiKey: string, 190 | monitorsAppKey: string, 191 | cloudFormationStackId: string, 192 | service: string, 193 | env: string, 194 | ): Promise { 195 | const recommendedMonitors = await getRecommendedMonitors(site, monitorsApiKey, monitorsAppKey); 196 | const serverlessMonitorIdByMonitorId = await getExistingMonitors( 197 | site, 198 | cloudFormationStackId, 199 | monitorsApiKey, 200 | monitorsAppKey, 201 | ); 202 | const successfullyUpdatedMonitors: string[] = []; 203 | const successfullyCreatedMonitors: string[] = []; 204 | 205 | for (const monitor of monitors) { 206 | const serverlessMonitorId = Object.keys(monitor)[0]; 207 | const monitorIdNumber = serverlessMonitorIdByMonitorId[serverlessMonitorId]; 208 | const monitorParams = buildMonitorParams(monitor, cloudFormationStackId, service, env, recommendedMonitors); 209 | const monitorExists = await doesMonitorExist(serverlessMonitorId, serverlessMonitorIdByMonitorId); 210 | if (monitorExists) { 211 | const response = await updateMonitor(site, monitorIdNumber, monitorParams, monitorsApiKey, monitorsAppKey); 212 | const successfullyCreated = handleMonitorsApiResponse(response, serverlessMonitorId, subdomain, site); 213 | if (successfullyCreated) { 214 | successfullyUpdatedMonitors.push(` ${serverlessMonitorId}`); 215 | } 216 | } else { 217 | const response = await createMonitor(site, monitorParams, monitorsApiKey, monitorsAppKey); 218 | const successfullyUpdated = handleMonitorsApiResponse(response, serverlessMonitorId, subdomain, site); 219 | if (successfullyUpdated) { 220 | successfullyCreatedMonitors.push(` ${serverlessMonitorId}`); 221 | } 222 | } 223 | } 224 | const successfullyDeletedMonitors = await deleteRemovedMonitors( 225 | site, 226 | monitors, 227 | serverlessMonitorIdByMonitorId, 228 | monitorsApiKey, 229 | monitorsAppKey, 230 | ); 231 | const logStatements: string[] = []; 232 | if (successfullyUpdatedMonitors.length > 0) { 233 | logStatements.push(`Successfully updated${successfullyUpdatedMonitors}`); 234 | } 235 | if (successfullyCreatedMonitors.length > 0) { 236 | logStatements.push(`Successfully created${successfullyCreatedMonitors}`); 237 | } 238 | if (successfullyDeletedMonitors.length > 0) { 239 | logStatements.push(`Successfully deleted${successfullyDeletedMonitors}`); 240 | } 241 | return logStatements; 242 | } 243 | 244 | /** Helper function that replaces the default threshold included in the query string with the new critical threshold configured by the customer 245 | * @param query - the query string 246 | * @param criticalThreshold - new critical threshold as defined by the customer 247 | * @returns the query string where the critical threshold is already replaced 248 | */ 249 | export function replaceCriticalThreshold(query: string, criticalThreshold: number): string { 250 | const thresholdComparison = /(>=|>)(.*)$/; 251 | const newQuery = query.replace(thresholdComparison, `$1 ${criticalThreshold}`); 252 | 253 | return newQuery; 254 | } 255 | 256 | /** 257 | * Helper function that interpolates template variables (if any) into the name 258 | * of a recommended monitor, e.g. replaces template variable `$functionName` 259 | * with its default value `{{functionname.name}}`. The interpolation result is 260 | * returned. The original monitor params are not altered. 261 | * Template variables with no default value will be ignored. 262 | * @param recommendedMonitors - recommended monitors 263 | * @returns Interpolation result, e.g. "High Error Rate on {{functionname.name}} in {{region.name}} for {{aws_account.name}}" 264 | */ 265 | function getInterpolatedMonitorName(recommendedMonitor: ServerlessMonitor): string { 266 | let interpolatedName = recommendedMonitor.name; 267 | recommendedMonitor.templateVariables?.forEach((templateVariable) => { 268 | if (templateVariable.defaults.length > 0) { 269 | interpolatedName = interpolatedName.replace("$" + templateVariable.name, templateVariable.defaults[0]); 270 | } 271 | }); 272 | return interpolatedName; 273 | } 274 | -------------------------------------------------------------------------------- /src/output.ts: -------------------------------------------------------------------------------- 1 | import * as Serverless from "serverless"; 2 | import { FunctionInfo } from "./layer"; 3 | 4 | const yellowFont = "\x1b[33m"; 5 | const underlineFont = "\x1b[4m"; 6 | const endFont = "\x1b[0m"; 7 | const outputPrefix = "DatadogMonitor"; 8 | 9 | /** 10 | * Builds the CloudFormation Outputs containing the alphanumeric key, description, 11 | * and value (URL) to the function in Datadog 12 | */ 13 | export async function addOutputLinks( 14 | serverless: Serverless, 15 | site: string, 16 | subdomain: string, 17 | handlers: FunctionInfo[], 18 | ): Promise { 19 | const awsAccount = await serverless.getProvider("aws").getAccountId(); 20 | const region = serverless.service.provider.region; 21 | const outputs = serverless.service.provider.compiledCloudFormationTemplate?.Outputs; 22 | if (outputs === undefined) { 23 | return; 24 | } 25 | 26 | handlers.forEach(({ name, handler }) => { 27 | const functionName = handler.name; 28 | const key = `${outputPrefix}${name}`.replace(/[^a-z0-9]/gi, ""); 29 | outputs[key] = { 30 | Description: `See ${name} in Datadog`, 31 | Value: `https://${subdomain}.${site}/functions?cloud=aws&entity_view=lambda_functions&selection=aws-lambda-functions%2B${functionName?.toLowerCase()}%2B${region}%2B${awsAccount}`, 32 | }; 33 | }); 34 | } 35 | 36 | export async function printOutputs( 37 | serverless: Serverless, 38 | site: string, 39 | subdomain: string, 40 | service: string, 41 | env: string, 42 | ): Promise { 43 | const stackName = serverless.getProvider("aws").naming.getStackName(); 44 | const describeStackOutput = await serverless 45 | .getProvider("aws") 46 | .request( 47 | "CloudFormation", 48 | "describeStacks", 49 | { StackName: stackName }, 50 | { region: serverless.getProvider("aws").getRegion() }, 51 | ) 52 | .catch(() => { 53 | // Ignore any request exceptions, fail silently and skip output logging 54 | }); 55 | if (describeStackOutput === undefined) { 56 | return; 57 | } 58 | 59 | logHeader("Datadog Monitoring", true); 60 | logHeader("functions"); 61 | 62 | for (const output of describeStackOutput.Stacks[0].Outputs) { 63 | if (output.OutputKey.startsWith(outputPrefix)) { 64 | const key = output.OutputKey.substring(outputPrefix.length); 65 | logMessage(`${key}: ${output.OutputValue}`); 66 | } 67 | } 68 | logHeader("View Serverless Monitors", true); 69 | logMessage( 70 | `https://${subdomain}.${site}/monitors/manage?q=tag%3A%28%22env%3A${env}%22%20AND%20%22service%3A${service}%22%29`, 71 | ); 72 | } 73 | 74 | function logHeader(message: string, underline = false): void { 75 | const startFont = underline ? `${yellowFont}${underlineFont}` : `${yellowFont}`; 76 | console.log(`${startFont}${message}${endFont}`); 77 | } 78 | 79 | export function logMessage(message: string): void { 80 | console.log(` ${message}`); 81 | } 82 | -------------------------------------------------------------------------------- /src/span-link.spec.ts: -------------------------------------------------------------------------------- 1 | // tslint:disable-next-line:no-var-requires 2 | const stepFunctionsHelper = require("./step-functions-helper"); 3 | stepFunctionsHelper.updateDefinitionString = jest.fn().mockImplementation(); 4 | 5 | import Service from "serverless/classes/Service"; 6 | import Serverless from "serverless"; 7 | import { mergeStepFunctionAndLambdaTraces } from "./span-link"; 8 | 9 | describe("mergeStepFunctionAndLambdaTraces option related tests", () => { 10 | function serviceWithResources(resources?: Record, serviceName = "my-service"): Service { 11 | const service = { 12 | getServiceName: () => serviceName, 13 | serverless: { 14 | cli: { 15 | log: () => "", 16 | }, 17 | }, 18 | provider: { 19 | name: "", 20 | stage: "", 21 | region: "", 22 | versionFunctions: true, 23 | compiledCloudFormationTemplate: { 24 | Resources: resources as any, 25 | Outputs: {}, 26 | }, 27 | logs: { 28 | restApi: true, 29 | httpApi: true, 30 | websocket: true, 31 | }, 32 | }, 33 | }; 34 | return service as any; 35 | } 36 | describe("test mergeStepFunctionAndLambdaTraces", () => { 37 | it("have no state machine in the resources", async () => { 38 | const resources = { 39 | "a-lambda-resource": { 40 | Type: "AWS::Lambda::Function", 41 | }, 42 | }; 43 | const service = serviceWithResources(); 44 | const serverless: Serverless = service.serverless; 45 | mergeStepFunctionAndLambdaTraces(resources, serverless); 46 | expect(stepFunctionsHelper.updateDefinitionString).toBeCalledTimes(0); 47 | }); 48 | 49 | it("have one state machine in the resources", async () => { 50 | const resources = { 51 | "unit-test-state-machine": { 52 | Type: "AWS::StepFunctions::StateMachine", 53 | Properties: { 54 | DefinitionString: { 55 | "Fn::Sub": ["real-definition-string", {}], 56 | }, 57 | }, 58 | }, 59 | "another-resource": { 60 | Type: "AWS::Lambda::Function", 61 | }, 62 | }; 63 | const service = serviceWithResources(); 64 | const serverless: Serverless = service.serverless; 65 | mergeStepFunctionAndLambdaTraces(resources, serverless); 66 | expect(stepFunctionsHelper.updateDefinitionString).toBeCalledTimes(1); 67 | }); 68 | 69 | it("can handle a steate machine with a string DefinitionString", async () => { 70 | const resources = { 71 | "unit-test-state-machine": { 72 | Type: "AWS::StepFunctions::StateMachine", 73 | Properties: { 74 | DefinitionString: 75 | '{"Comment":"Some comment","StartAt":"agocsTest1","States":{"agocsTest1":{"Type":"Task","Resource":"arn:aws:states:::states:startExecution.sync:2","Parameters":{"StateMachineArn":"arn:aws:states:::states:startExecution.sync:2","Input":{"foo":"bar"}},"End":true}}}', 76 | }, 77 | }, 78 | "another-resource": { 79 | Type: "AWS::Lambda::Function", 80 | }, 81 | }; 82 | const service = serviceWithResources(); 83 | const serverless: Serverless = service.serverless; 84 | mergeStepFunctionAndLambdaTraces(resources, serverless); 85 | expect(stepFunctionsHelper.updateDefinitionString).toBeCalledTimes(1); 86 | }); 87 | 88 | it("have two state machine in the resources", async () => { 89 | const resources = { 90 | "unit-test-state-machine": { 91 | Type: "AWS::StepFunctions::StateMachine", 92 | Properties: { 93 | DefinitionString: { 94 | "Fn::Sub": ["real-definition-string", {}], 95 | }, 96 | }, 97 | }, 98 | "unit-test-state-machine2": { 99 | Type: "AWS::StepFunctions::StateMachine", 100 | Properties: { 101 | DefinitionString: { 102 | "Fn::Sub": ["real-definition-string", {}], 103 | }, 104 | }, 105 | }, 106 | "another-resource": { 107 | Type: "AWS::Lambda::Function", 108 | }, 109 | }; 110 | const service = serviceWithResources(); 111 | const serverless: Serverless = service.serverless; 112 | mergeStepFunctionAndLambdaTraces(resources, serverless); 113 | expect(stepFunctionsHelper.updateDefinitionString).toBeCalledTimes(2); 114 | }); 115 | }); 116 | }); 117 | -------------------------------------------------------------------------------- /src/span-link.ts: -------------------------------------------------------------------------------- 1 | import { GeneralResource, updateDefinitionString } from "./step-functions-helper"; 2 | import * as Serverless from "serverless"; 3 | 4 | export function mergeStepFunctionAndLambdaTraces( 5 | resources: { [key: string]: GeneralResource }, 6 | serverless: Serverless, 7 | ): void { 8 | for (const [resourceName, resourceObj] of Object.entries(resources)) { 9 | if (resourceObj.Type !== "AWS::StepFunctions::StateMachine" || !resourceObj.Properties) { 10 | continue; 11 | } 12 | const definitionString = resourceObj.Properties?.DefinitionString!; 13 | const newDefString = updateDefinitionString(definitionString, serverless, resourceName); 14 | resourceObj.Properties.DefinitionString = newDefString; 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /src/step-functions-helper.spec.ts: -------------------------------------------------------------------------------- 1 | import { 2 | isLambdaApiStep, 3 | StateMachineDefinition, 4 | updateDefinitionString, 5 | updateDefinitionForStepFunctionInvocationStep, 6 | StepFunctionInput, 7 | } from "./step-functions-helper"; 8 | 9 | import Service from "serverless/classes/Service"; 10 | 11 | function serviceWithResources(resources?: Record, serviceName = "my-service"): Service { 12 | const service = { 13 | getServiceName: () => serviceName, 14 | serverless: { 15 | cli: { 16 | log: () => "", 17 | }, 18 | }, 19 | provider: { 20 | name: "", 21 | stage: "", 22 | region: "", 23 | versionFunctions: true, 24 | compiledCloudFormationTemplate: { 25 | Resources: resources as any, 26 | Outputs: {}, 27 | }, 28 | logs: { 29 | restApi: true, 30 | httpApi: true, 31 | websocket: true, 32 | }, 33 | }, 34 | }; 35 | return service as any; 36 | } 37 | 38 | describe("test updateDefinitionString", () => { 39 | const serverless = serviceWithResources().serverless; 40 | const stateMachineName = "fake-state-machine-name"; 41 | 42 | it("test lambda step with non-object Parameters field", async () => { 43 | const definitionString = { 44 | "Fn::Sub": [ 45 | '{"Comment":"fake comment","StartAt":"InvokeLambda","States":{"InvokeLambda":{"Type":"Task","Parameters":"Just a string!","Resource":"arn:aws:states:::lambda:invoke","End":true}}}', 46 | {}, 47 | ], 48 | }; 49 | updateDefinitionString(definitionString, serverless, stateMachineName); 50 | 51 | const definitionAfterUpdate: StateMachineDefinition = JSON.parse(definitionString["Fn::Sub"][0] as string); 52 | expect(definitionAfterUpdate.States?.InvokeLambda?.Parameters).toBe("Just a string!"); 53 | }); 54 | 55 | it("Case 4.1: test lambda step with default payload of '$'", async () => { 56 | const definitionString = { 57 | "Fn::Sub": [ 58 | '{"Comment":"fake comment","StartAt":"InvokeLambda","States":{"InvokeLambda":{"Type":"Task","Parameters":{"FunctionName":"fake-function-name","Payload.$":"$"},"Resource":"arn:aws:states:::lambda:invoke","End":true}}}', 59 | {}, 60 | ], 61 | }; 62 | updateDefinitionString(definitionString, serverless, stateMachineName); 63 | 64 | const definitionAfterUpdate: StateMachineDefinition = JSON.parse(definitionString["Fn::Sub"][0] as string); 65 | expect(definitionAfterUpdate.States?.InvokeLambda?.Parameters?.["Payload.$"]).toBe( 66 | "States.JsonMerge($$, $, false)", 67 | ); 68 | }); 69 | 70 | it("Case 4.3: test lambda step with empty payload", async () => { 71 | const definitionString = { 72 | "Fn::Sub": [ 73 | '{"Comment":"fake comment","StartAt":"InvokeLambda","States":{"InvokeLambda":{"Type":"Task","Parameters":{"FunctionName":"fake-function-name","Payload.$":{}},"Resource":"arn:aws:states:::lambda:invoke","End":true}}}', 74 | {}, 75 | ], 76 | }; 77 | updateDefinitionString(definitionString, serverless, stateMachineName); 78 | 79 | const definitionAfterUpdate: StateMachineDefinition = JSON.parse(definitionString["Fn::Sub"][0] as string); 80 | expect(definitionAfterUpdate.States?.InvokeLambda?.Parameters?.["Payload.$"]).toStrictEqual({}); 81 | }); 82 | 83 | it("Case 4.3: test lambda step with custom payload", async () => { 84 | const definitionString = { 85 | "Fn::Sub": [ 86 | '{"Comment":"fake comment","StartAt":"InvokeLambda","States":{"InvokeLambda":{"Type":"Task","Parameters":{"FunctionName":"fake-function-name","Payload.$":"$$.State"},"Resource":"arn:aws:states:::lambda:invoke","End":true}}}', 87 | {}, 88 | ], 89 | }; 90 | updateDefinitionString(definitionString, serverless, stateMachineName); 91 | 92 | const definitionAfterUpdate: StateMachineDefinition = JSON.parse(definitionString["Fn::Sub"][0] as string); 93 | expect(definitionAfterUpdate.States?.InvokeLambda?.Parameters?.["Payload.$"]).toBe("$$.State"); 94 | }); 95 | 96 | it("updates the definitionstring of a StepFunction with a string definitionString", async () => { 97 | const definitionString = 98 | '{"Comment":"Some comment","StartAt":"agocsTest1","States":{"agocsTest1":{"Type":"Task","Resource":"arn:aws:states:::states:startExecution.sync:2","Parameters":{"StateMachineArn":"arn:aws:states:::states:startExecution.sync:2","Input":{"foo":"bar"}},"End":true}}}'; 99 | const newDefString = updateDefinitionString(definitionString, serverless, stateMachineName); 100 | 101 | expect(typeof newDefString === "string").toBeTruthy(); 102 | expect(newDefString).toContain("CONTEXT"); 103 | }); 104 | 105 | it("Case 1: test lambda step without Payload or Payload.$", async () => { 106 | const definitionString = { 107 | "Fn::Sub": [ 108 | '{"Comment":"fake comment","StartAt":"InvokeLambda","States":{"InvokeLambda":{"Type":"Task","Parameters":{"FunctionName":"fake-function-name"},"Resource":"arn:aws:states:::lambda:invoke","End":true}}}', 109 | {}, 110 | ], 111 | }; 112 | updateDefinitionString(definitionString, serverless, stateMachineName); 113 | 114 | const definitionAfterUpdate: StateMachineDefinition = JSON.parse(definitionString["Fn::Sub"][0] as string); 115 | expect(definitionAfterUpdate.States?.InvokeLambda?.Parameters?.["Payload.$"]).toBe( 116 | "$$['Execution', 'State', 'StateMachine']", 117 | ); 118 | }); 119 | 120 | it("Case 3: test lambda step when Payload is not an object", async () => { 121 | const definitionString = { 122 | "Fn::Sub": [ 123 | '{"Comment":"fake comment","StartAt":"InvokeLambda","States":{"InvokeLambda":{"Type":"Task","Parameters":{"FunctionName":"fake-function-name","Payload":"Just a string!"},"Resource":"arn:aws:states:::lambda:invoke","End":true}}}', 124 | {}, 125 | ], 126 | }; 127 | updateDefinitionString(definitionString, serverless, stateMachineName); 128 | 129 | const definitionAfterUpdate: StateMachineDefinition = JSON.parse(definitionString["Fn::Sub"][0] as string); 130 | expect(definitionAfterUpdate.States?.InvokeLambda?.Parameters?.["Payload"]).toBe("Just a string!"); 131 | }); 132 | 133 | it("Case 2.1: test lambda step when Execution, State and StateMachine are already injected into Payload", async () => { 134 | const definitionString = { 135 | "Fn::Sub": [ 136 | '{"Comment":"fake comment","StartAt":"InvokeLambda","States":{"InvokeLambda":{"Type":"Task","Parameters":{"FunctionName":"fake-function-name","Payload":{"Execution.$":"$$.Execution","State.$":"$$.State","StateMachine.$":"$$.StateMachine"}},"Resource":"arn:aws:states:::lambda:invoke","End":true}}}', 137 | {}, 138 | ], 139 | }; 140 | updateDefinitionString(definitionString, serverless, stateMachineName); 141 | 142 | const definitionAfterUpdate: StateMachineDefinition = JSON.parse(definitionString["Fn::Sub"][0] as string); 143 | expect(definitionAfterUpdate.States?.InvokeLambda?.Parameters?.["Payload"]).toStrictEqual({ 144 | "Execution.$": "$$.Execution", 145 | "State.$": "$$.State", 146 | "StateMachine.$": "$$.StateMachine", 147 | }); 148 | }); 149 | 150 | it("Case 2.2: test lambda step when some of Execution, State or StateMachine field but conject injection is not set up completely", async () => { 151 | const definitionString = { 152 | "Fn::Sub": [ 153 | '{"Comment":"fake comment","StartAt":"InvokeLambda","States":{"InvokeLambda":{"Type":"Task","Parameters":{"FunctionName":"fake-function-name","Payload":{"Execution":"$$.Execution"}},"Resource":"arn:aws:states:::lambda:invoke","End":true}}}', 154 | {}, 155 | ], 156 | }; 157 | updateDefinitionString(definitionString, serverless, stateMachineName); 158 | 159 | const definitionAfterUpdate: StateMachineDefinition = JSON.parse(definitionString["Fn::Sub"][0] as string); 160 | expect(definitionAfterUpdate.States?.InvokeLambda?.Parameters?.["Payload"]).toStrictEqual({ 161 | Execution: "$$.Execution", 162 | }); 163 | }); 164 | 165 | it("Case 2.3: test lambda step when none of Execution, State, or StateMachine is in Payload", async () => { 166 | const definitionString = { 167 | "Fn::Sub": [ 168 | '{"Comment":"fake comment","StartAt":"InvokeLambda","States":{"InvokeLambda":{"Type":"Task","Parameters":{"FunctionName":"fake-function-name","Payload":{"CustomerId":42}},"Resource":"arn:aws:states:::lambda:invoke","End":true}}}', 169 | {}, 170 | ], 171 | }; 172 | updateDefinitionString(definitionString, serverless, stateMachineName); 173 | 174 | const definitionAfterUpdate: StateMachineDefinition = JSON.parse(definitionString["Fn::Sub"][0] as string); 175 | expect(definitionAfterUpdate.States?.InvokeLambda?.Parameters?.["Payload"]).toStrictEqual({ 176 | CustomerId: 42, 177 | "Execution.$": "$$.Execution", 178 | "State.$": "$$.State", 179 | "StateMachine.$": "$$.StateMachine", 180 | }); 181 | }); 182 | 183 | it(`Case 4.2: test lambda step already has context injection set up using "Payload.$": "States.JsonMerge($$, $, false)"`, async () => { 184 | const definitionString = { 185 | "Fn::Sub": [ 186 | '{"Comment":"fake comment","StartAt":"InvokeLambda","States":{"InvokeLambda":{"Type":"Task","Parameters":{"FunctionName":"fake-function-name","Payload.$":"States.JsonMerge($$, $, false)"},"Resource":"arn:aws:states:::lambda:invoke","End":true}}}', 187 | {}, 188 | ], 189 | }; 190 | updateDefinitionString(definitionString, serverless, stateMachineName); 191 | 192 | const definitionAfterUpdate: StateMachineDefinition = JSON.parse(definitionString["Fn::Sub"][0] as string); 193 | expect(definitionAfterUpdate.States?.InvokeLambda?.Parameters?.["Payload.$"]).toBe( 194 | "States.JsonMerge($$, $, false)", 195 | ); 196 | }); 197 | 198 | it(`Case 4.2: test lambda step already has context injection set up using "Payload.$": "$$['Execution', 'State', 'StateMachine']"`, async () => { 199 | const definitionString = { 200 | "Fn::Sub": [ 201 | `{"Comment":"fake comment","StartAt":"InvokeLambda","States":{"InvokeLambda":{"Type":"Task","Parameters":{"FunctionName":"fake-function-name","Payload.$":"$$['Execution', 'State', 'StateMachine']"},"Resource":"arn:aws:states:::lambda:invoke","End":true}}}`, 202 | {}, 203 | ], 204 | }; 205 | updateDefinitionString(definitionString, serverless, stateMachineName); 206 | 207 | const definitionAfterUpdate: StateMachineDefinition = JSON.parse(definitionString["Fn::Sub"][0] as string); 208 | expect(definitionAfterUpdate.States?.InvokeLambda?.Parameters?.["Payload.$"]).toBe( 209 | `$$['Execution', 'State', 'StateMachine']`, 210 | ); 211 | }); 212 | 213 | it("Case 4.3: test lambda step has custom Payload.$ do nothing", async () => { 214 | const definitionString = { 215 | "Fn::Sub": [ 216 | '{"Comment":"fake comment","StartAt":"InvokeLambda","States":{"InvokeLambda":{"Type":"Task","Parameters":{"FunctionName":"fake-function-name","Payload.$":"something-customized"},"Resource":"arn:aws:states:::lambda:invoke","End":true}}}', 217 | {}, 218 | ], 219 | }; 220 | updateDefinitionString(definitionString, serverless, stateMachineName); 221 | 222 | const definitionAfterUpdate: StateMachineDefinition = JSON.parse(definitionString["Fn::Sub"][0] as string); 223 | expect(definitionAfterUpdate.States?.InvokeLambda?.Parameters?.["Payload.$"]).toBe("something-customized"); 224 | }); 225 | 226 | it("test lambda legacy integration with undefined parameters do nothing", async () => { 227 | const definitionString = { 228 | "Fn::Sub": [ 229 | '{"Comment":"fake comment","StartAt":"InvokeLambda","States":{"InvokeLambda":{"Type":"Task","Resource":"arn:aws:lambda:sa-east-1:601427271234:function:unit-test-function-name","End":true}}}', 230 | {}, 231 | ], 232 | }; 233 | updateDefinitionString(definitionString, serverless, stateMachineName); 234 | 235 | const definitionAfterUpdate: StateMachineDefinition = JSON.parse(definitionString["Fn::Sub"][0] as string); 236 | expect(definitionAfterUpdate.States?.InvokeLambda?.Parameters?.["Payload.$"]).toBe(undefined); 237 | }); 238 | 239 | it("test non-lambda steps do nothing", async () => { 240 | const definitionString = { 241 | "Fn::Sub": [ 242 | '{"Comment":"fake comment","StartAt":"InvokeLambda","States":{"InvokeDynamodb":{"Type":"Task","Parameters":{"someKey":"someValue"},"Resource":"arn:aws:states:::dynamodb:updateItem","End":true}}}', 243 | {}, 244 | ], 245 | }; 246 | updateDefinitionString(definitionString, serverless, stateMachineName); 247 | 248 | const definitionAfterUpdate: StateMachineDefinition = JSON.parse(definitionString["Fn::Sub"][0] as string); 249 | expect(definitionAfterUpdate.States?.InvokeDynamodb).toStrictEqual({ 250 | End: true, 251 | Parameters: { someKey: "someValue" }, 252 | Resource: "arn:aws:states:::dynamodb:updateItem", 253 | Type: "Task", 254 | }); 255 | }); 256 | 257 | it("test legacy lambda context is injected", async () => { 258 | const definitionString = { 259 | "Fn::Sub": [ 260 | '{"Comment":"fake comment","StartAt":"InvokeLambda","States":{"InvokeLambda":{"Type":"Task","Parameters":{"FunctionName":"fake-function-name","Payload.$":"$"},"Resource":"arn:aws:lambda:sa-east-1:601427271234:function:unit-test-function-name","End":true}}}', 261 | {}, 262 | ], 263 | }; 264 | updateDefinitionString(definitionString, serverless, stateMachineName); 265 | 266 | const definitionAfterUpdate: StateMachineDefinition = JSON.parse(definitionString["Fn::Sub"][0] as string); 267 | expect(definitionAfterUpdate.States?.InvokeLambda).toStrictEqual({ 268 | End: true, 269 | Parameters: { 270 | FunctionName: "fake-function-name", 271 | "Payload.$": "States.JsonMerge($$, $, false)", 272 | }, 273 | Resource: "arn:aws:lambda:sa-east-1:601427271234:function:unit-test-function-name", 274 | Type: "Task", 275 | }); 276 | }); 277 | 278 | it("test empty Fn::Sub", async () => { 279 | const definitionString = { 280 | "Fn::Sub": [], 281 | }; 282 | updateDefinitionString(definitionString, serverless, stateMachineName); 283 | 284 | expect(definitionString["Fn::Sub"].length).toBe(0); 285 | }); 286 | 287 | it("test step function invocation", async () => { 288 | const definitionString = { 289 | "Fn::Sub": [ 290 | '{"Comment": "A description of my state machine", "StartAt": "Step Functions StartExecution", "States": {"Step Functions StartExecution": {"Type": "Task", "Resource": "arn:aws:states:::states:startExecution", "Parameters": {"StateMachineArn": "arn:aws:states:us-east-1:425362996713:stateMachine:agocs-test-noop-state-machine-2", "Input": {"StatePayload": "Hello from Step Functions!", "AWS_STEP_FUNCTIONS_STARTED_BY_EXECUTION_ID.$": "$$.Execution.Id" }}, "End": true }}}', 291 | {}, 292 | ], 293 | }; 294 | updateDefinitionString(definitionString, serverless, stateMachineName); 295 | 296 | const definitionAfterUpdate: StateMachineDefinition = JSON.parse(definitionString["Fn::Sub"][0] as string); 297 | const input = definitionAfterUpdate.States["Step Functions StartExecution"]?.Parameters?.Input as StepFunctionInput; 298 | expect(input["CONTEXT.$"]).toBe("$$['Execution', 'State', 'StateMachine']"); 299 | }); 300 | 301 | it("test step function invocation without input", async () => { 302 | const definitionString = { 303 | "Fn::Sub": [ 304 | '{"Comment": "A description of my state machine", "StartAt": "Step Functions StartExecution", "States": {"Step Functions StartExecution": {"Type": "Task", "Resource": "arn:aws:states:::states:startExecution", "Parameters": {"StateMachineArn": "arn:aws:states:us-east-1:425362996713:stateMachine:agocs-test-noop-state-machine-2"}, "End": true }}}', 305 | {}, 306 | ], 307 | }; 308 | updateDefinitionString(definitionString, serverless, stateMachineName); 309 | 310 | const definitionAfterUpdate: StateMachineDefinition = JSON.parse(definitionString["Fn::Sub"][0] as string); 311 | const input = definitionAfterUpdate.States["Step Functions StartExecution"]?.Parameters?.Input as StepFunctionInput; 312 | expect(input["CONTEXT.$"]).toBe("States.JsonMerge($$, $, false)"); 313 | }); 314 | 315 | it("test step function invocation with pre-exisitng context object", async () => { 316 | const definitionString = { 317 | "Fn::Sub": [ 318 | '{"Comment": "A description of my state machine", "StartAt": "Step Functions StartExecution", "States": {"Step Functions StartExecution": {"Type": "Task", "Resource": "arn:aws:states:::states:startExecution", "Parameters": {"StateMachineArn": "arn:aws:states:us-east-1:425362996713:stateMachine:agocs-test-noop-state-machine-2", "Input": {"StatePayload": "Hello from Step Functions!", "AWS_STEP_FUNCTIONS_STARTED_BY_EXECUTION_ID.$": "$$.Execution.Id", "CONTEXT.$": "something else"}}, "End": true }}}', 319 | {}, 320 | ], 321 | }; 322 | updateDefinitionString(definitionString, serverless, stateMachineName); 323 | 324 | const definitionAfterUpdate: StateMachineDefinition = JSON.parse(definitionString["Fn::Sub"][0] as string); 325 | const input = definitionAfterUpdate.States["Step Functions StartExecution"]?.Parameters?.Input as StepFunctionInput; 326 | expect(input["CONTEXT.$"]).toBe("something else"); 327 | }); 328 | }); 329 | 330 | describe("test updateDefinitionForStepFunctionInvocationStep", () => { 331 | const stepName = "Step Functions StartExecution"; 332 | const serverless = serviceWithResources().serverless; 333 | const stateMachineName = "fake-state-machine-name"; 334 | 335 | it("Case 0.2: Input field not set in parameters", async () => { 336 | const parameters = { FunctionName: "bla" }; 337 | const step = { Parameters: parameters }; 338 | expect(updateDefinitionForStepFunctionInvocationStep(stepName, step, serverless, stateMachineName)).toBeTruthy(); 339 | }); 340 | 341 | it("Case 1: Input field empty", async () => { 342 | const parameters = { FunctionName: "bla", Input: {} }; 343 | const step = { Parameters: parameters }; 344 | expect(updateDefinitionForStepFunctionInvocationStep(stepName, step, serverless, stateMachineName)).toBeTruthy(); 345 | }); 346 | 347 | it("Case 0.3: Input field is not an object", async () => { 348 | const parameters = { FunctionName: "bla", Input: "foo" }; 349 | const step = { Parameters: parameters }; 350 | expect(updateDefinitionForStepFunctionInvocationStep(stepName, step, serverless, stateMachineName)).toBeFalsy(); 351 | }); 352 | 353 | it('Case 0.4: Parameters field has "Input.$" field', async () => { 354 | const parameters = { FunctionName: "bla", "Input.$": "$" }; 355 | const step = { Parameters: parameters }; 356 | expect(updateDefinitionForStepFunctionInvocationStep(stepName, step, serverless, stateMachineName)).toBeFalsy(); 357 | }); 358 | 359 | it('Case 1: Input field has stuff in it but no "CONTEXT" or "CONTEXT.$"', async () => { 360 | const parameters = { FunctionName: "bla", Input: { foo: "bar" } }; 361 | const step = { Parameters: parameters }; 362 | expect(updateDefinitionForStepFunctionInvocationStep(stepName, step, serverless, stateMachineName)).toBeTruthy(); 363 | }); 364 | 365 | it('Case 2: Input field has "CONTEXT" field', async () => { 366 | const parameters = { FunctionName: "bla", Input: { CONTEXT: "foo" } }; 367 | const step = { Parameters: parameters }; 368 | expect(updateDefinitionForStepFunctionInvocationStep(stepName, step, serverless, stateMachineName)).toBeFalsy(); 369 | }); 370 | 371 | it("Case 3.1: Context injection already set up using States.JsonMerge($$, $, false)", async () => { 372 | const parameters = { FunctionName: "bla", Input: { "CONTEXT.$": "States.JsonMerge($$, $, false)" } }; 373 | const step = { Parameters: parameters }; 374 | expect(updateDefinitionForStepFunctionInvocationStep(stepName, step, serverless, stateMachineName)).toBeFalsy(); 375 | }); 376 | 377 | it("Case 3.1: Context injection already set up using $$['Execution', 'State', 'StateMachine']", async () => { 378 | const parameters = { FunctionName: "bla", Input: { "CONTEXT.$": "$$['Execution', 'State', 'StateMachine']" } }; 379 | const step = { Parameters: parameters }; 380 | expect(updateDefinitionForStepFunctionInvocationStep(stepName, step, serverless, stateMachineName)).toBeFalsy(); 381 | }); 382 | 383 | it("Case 3.2: Input field has a custom CONTEXT.$ field", async () => { 384 | const parameters = { FunctionName: "bla", Input: { "CONTEXT.$": "something else" } }; 385 | const step = { Parameters: parameters }; 386 | expect(updateDefinitionForStepFunctionInvocationStep(stepName, step, serverless, stateMachineName)).toBeFalsy(); 387 | }); 388 | }); 389 | 390 | describe("test isLambdaApiStep", () => { 391 | it("resource is default lambda", async () => { 392 | const resource = "arn:aws:states:::lambda:invoke"; 393 | expect(isLambdaApiStep(resource)).toBeTruthy(); 394 | }); 395 | 396 | it("resource is lambda arn for legacy lambda api", async () => { 397 | const resource = "arn:aws:lambda:sa-east-1:601427271234:function:hello-function"; 398 | expect(isLambdaApiStep(resource)).toBeTruthy(); 399 | }); 400 | 401 | it("resource of dynamodb", async () => { 402 | const resource = "arn:aws:states:::dynamodb:updateItem"; 403 | expect(isLambdaApiStep(resource)).toBeFalsy(); 404 | }); 405 | 406 | it("resource of empty string", async () => { 407 | const resource = ""; 408 | expect(isLambdaApiStep(resource)).toBeFalsy(); 409 | }); 410 | 411 | it("resource of undefined", async () => { 412 | const resource = undefined; 413 | expect(isLambdaApiStep(resource)).toBeFalsy(); 414 | }); 415 | }); 416 | -------------------------------------------------------------------------------- /src/step-functions-helper.ts: -------------------------------------------------------------------------------- 1 | import Serverless from "serverless"; 2 | 3 | export interface GeneralResource { 4 | Type: string; 5 | Properties?: { 6 | DefinitionString?: 7 | | string 8 | | { 9 | "Fn::Sub": any[]; 10 | }; 11 | }; 12 | } 13 | 14 | export interface StateMachineDefinition { 15 | States: { [key: string]: StateMachineStep }; 16 | } 17 | 18 | export type PayloadObject = { 19 | "Execution.$"?: any; 20 | Execution?: any; 21 | "State.$"?: any; 22 | State?: any; 23 | "StateMachine.$"?: any; 24 | StateMachine?: any; 25 | }; 26 | 27 | export type StepFunctionInput = { 28 | "CONTEXT.$"?: string; 29 | CONTEXT?: string; 30 | [key: string]: unknown; 31 | }; 32 | 33 | export interface StateMachineStep { 34 | Resource?: string; 35 | Parameters?: { 36 | FunctionName?: string; 37 | "Payload.$"?: string; 38 | Payload?: string | PayloadObject; 39 | Input?: string | StepFunctionInput; 40 | }; 41 | Next?: string; 42 | End?: boolean; 43 | } 44 | 45 | export function isLambdaApiStep(resource: string | undefined): boolean { 46 | // Allow for either the standard or legacy definitions of a lambda step 47 | return ( 48 | resource !== undefined && 49 | (resource?.startsWith("arn:aws:states:::lambda:invoke") || resource?.startsWith("arn:aws:lambda")) 50 | ); 51 | } 52 | 53 | export function isStepFunctionInvocation(resource: string | undefined): boolean { 54 | if (resource === undefined) { 55 | return false; 56 | } 57 | return resource.startsWith("arn:aws:states:::states:startExecution"); 58 | } 59 | 60 | function parseDefinitionObject(definitionString: { "Fn::Sub": (string | object)[] }): StateMachineDefinition { 61 | if ( 62 | !(typeof definitionString === "object" && "Fn::Sub" in definitionString && definitionString["Fn::Sub"].length > 0) 63 | ) { 64 | throw new Error("unexpected definitionString"); 65 | } 66 | const unparsedDefinition = definitionString["Fn::Sub"] ? definitionString["Fn::Sub"][0] : ""; // index 0 should always be a string of step functions definition 67 | if (unparsedDefinition === "") { 68 | throw new Error("no definition string found in DefinitionString"); 69 | } 70 | const definitionObj: StateMachineDefinition = JSON.parse(unparsedDefinition as string); 71 | return definitionObj; 72 | } 73 | 74 | // Updates the definitionString of a step function to include trace context as appropriate for a Lambda invocation or a nested step function invocation. 75 | // definitionString can either be an object or a naked string, so we need to return the same and explicitly modify the Resource in span-link.ts 76 | export function updateDefinitionString( 77 | definitionString: string | { "Fn::Sub": (string | object)[] }, 78 | serverless: Serverless, 79 | stateMachineName: string, 80 | ): string | { "Fn::Sub": (string | object)[] } { 81 | // Step 1: Parse definition object from definition string 82 | let definitionObj: StateMachineDefinition; 83 | if (typeof definitionString !== "string") { 84 | // definitionString is a {"Fn::Sub": (string | object)[]} 85 | try { 86 | definitionObj = parseDefinitionObject(definitionString); 87 | } catch (error) { 88 | serverless.cli.log("Unable to update StepFunction definition. " + error); 89 | return definitionString; 90 | } 91 | } else { 92 | definitionObj = JSON.parse(definitionString); 93 | } 94 | 95 | // Step 2: Mutate the definition object 96 | const states = definitionObj.States; 97 | for (const [stepName, step] of Object.entries(states)) { 98 | // only inject context into Lambda API steps and Step Function invocation steps 99 | if (isLambdaApiStep(step?.Resource)) { 100 | updateDefinitionForDefaultLambdaApiStep(stepName, step, serverless, stateMachineName); 101 | } else if (isStepFunctionInvocation(step?.Resource)) { 102 | updateDefinitionForStepFunctionInvocationStep(stepName, step, serverless, stateMachineName); 103 | } 104 | } 105 | 106 | // Step 3: Convert definition object back into definition string 107 | if (typeof definitionString !== "string") { 108 | definitionString["Fn::Sub"][0] = JSON.stringify(definitionObj); // writing back to the original JSON created by Serverless framework 109 | } else { 110 | definitionString = JSON.stringify(definitionObj); 111 | } 112 | return definitionString; // return the definitionString so it can be written to the Resource in span-link.ts 113 | } 114 | 115 | // Truth table 116 | // Case | Input | Will update 117 | // -----|----------------------------------------------------------|------------- 118 | // 1 | No "Payload" or "Payload.$" | true 119 | // 2.1 | "Payload" is object, already injected | false 120 | // 2.2 | "Payload" object has Execution, State or StateMachine | false 121 | // 2.3 | "Payload" object has no Execution, State or StateMachine | true 122 | // 3 | "Payload" is not object | false 123 | // 4.1 | "Payload.$": "$" (default payload) | true 124 | // 4.2 | "Payload.$": "States.JsonMerge($$, $, false)" or | false 125 | // | "Payload.$": "$$['Execution', 'State', 'StateMachine']" | 126 | // 4.3 | Custom "Payload.$" | false 127 | function updateDefinitionForDefaultLambdaApiStep( 128 | stepName: string, 129 | step: StateMachineStep, 130 | serverless: Serverless, 131 | stateMachineName: string, 132 | ): void { 133 | if (typeof step.Parameters !== "object") { 134 | serverless.cli.log( 135 | `[Warn] Parameters field is not a JSON object. Merging traces failed for step: ${stepName} of state machine: ${stateMachineName}. \ 136 | Your Step Functions trace will not be merged with downstream Lambda traces. To manually merge these traces, check out \ 137 | https://docs.datadoghq.com/serverless/step_functions/troubleshooting/`, 138 | ); 139 | return; 140 | } 141 | 142 | // Case 2 & 3: Parameters has "Payload" field 143 | if (step.Parameters.hasOwnProperty("Payload")) { 144 | const payload = step.Parameters.Payload; 145 | 146 | // Case 3: payload is not a JSON object 147 | if (typeof payload !== "object") { 148 | serverless.cli.log( 149 | `[Warn] Payload field is not a JSON object. Merging traces failed for step: ${stepName} of state machine: ${stateMachineName}. \ 150 | Your Step Functions trace will not be merged with downstream Lambda traces. To manually merge these traces, check out \ 151 | https://docs.datadoghq.com/serverless/step_functions/troubleshooting/`, 152 | ); 153 | return; 154 | } 155 | 156 | // Case 2: payload is a JSON object 157 | if ( 158 | payload["Execution.$"] === "$$.Execution" && 159 | payload["State.$"] === "$$.State" && 160 | payload["StateMachine.$"] === "$$.StateMachine" 161 | ) { 162 | // Case 2.1: already injected into "Payload" 163 | serverless.cli.log( 164 | `Context injection is already set up. Skipping merging traces for step: ${stepName} of state machine: ${stateMachineName}.\n`, 165 | ); 166 | 167 | return; 168 | } 169 | 170 | // Case 2.2: "Payload" object has Execution, State or StateMachine field but conject injection is not set up completely 171 | if ( 172 | payload.hasOwnProperty("Execution.$") || 173 | payload.hasOwnProperty("Execution") || 174 | payload.hasOwnProperty("State.$") || 175 | payload.hasOwnProperty("State") || 176 | payload.hasOwnProperty("StateMachine.$") || 177 | payload.hasOwnProperty("StateMachine") 178 | ) { 179 | serverless.cli 180 | .log(`[Warn] Step ${stepName} of state machine: ${stateMachineName} may be using custom Execution, State or StateMachine field. \ 181 | Step Functions Context Object injection skipped. Your Step Functions trace will not be merged with downstream Lambda traces. To manually \ 182 | merge these traces, check out https://docs.datadoghq.com/serverless/step_functions/troubleshooting/\n`); 183 | 184 | return; 185 | } 186 | 187 | // Case 2.3: "Payload" object has no Execution, State or StateMachine field 188 | payload["Execution.$"] = "$$.Execution"; 189 | payload["State.$"] = "$$.State"; 190 | payload["StateMachine.$"] = "$$.StateMachine"; 191 | 192 | return; 193 | } 194 | 195 | // Case 4: Parameters has "Payload.$" field 196 | if (step.Parameters.hasOwnProperty("Payload.$")) { 197 | // Case 4.1: default "Payload.$" 198 | if (step.Parameters["Payload.$"] === "$") { 199 | step.Parameters!["Payload.$"] = "States.JsonMerge($$, $, false)"; 200 | serverless.cli.log( 201 | `JsonMerge Step Functions context object with payload in step: ${stepName} of state machine: ${stateMachineName}.`, 202 | ); 203 | return; 204 | } 205 | 206 | // Case 4.2: context injection is already set up using "Payload.$" 207 | if ( 208 | step.Parameters["Payload.$"] === "States.JsonMerge($$, $, false)" || 209 | step.Parameters["Payload.$"] === "$$['Execution', 'State', 'StateMachine']" 210 | ) { 211 | serverless.cli.log( 212 | `Step ${stepName} of state machine ${stateMachineName}: Context injection is already set up. Skipping context injection.\n`, 213 | ); 214 | 215 | return; 216 | } 217 | 218 | // Case 4.3: custom "Payload.$" 219 | serverless.cli.log( 220 | `[Warn] Step ${stepName} of state machine ${stateMachineName} has a custom Payload field. Step Functions Context Object injection \ 221 | skipped. Your Step Functions trace will not be merged with downstream Lambda traces. To manually merge these traces, \ 222 | check out https://docs.datadoghq.com/serverless/step_functions/troubleshooting/\n`, 223 | ); 224 | return; 225 | } 226 | 227 | // Case 1: No "Payload" or "Payload.$" 228 | step.Parameters!["Payload.$"] = "$$['Execution', 'State', 'StateMachine']"; 229 | serverless.cli.log(`Merging traces for step: ${stepName} of state machine: ${stateMachineName}.`); 230 | } 231 | 232 | // Truth table 233 | // Input | Expected 234 | // -------------------------|--------- 235 | // Empty object | true 236 | // undefined | true 237 | // not object | false 238 | // object without CONTEXT.$ | true 239 | // object with CONTEXT.$ | false 240 | 241 | // Truth table 242 | // Case | Input | Expected 243 | // -----|----------------------------------------------------------|--------- 244 | // 0.1 | Parameters field is not an object | false 245 | // 0.2 | Parameters field has no Input field | true 246 | // 0.3 | Parameters.Input is not an object | false 247 | // 0.4 | Parameters field has "Input.$" field | false 248 | // 1 | No "CONTEXT" or "CONTEXT.$" | true 249 | // 2 | Has "CONTEXT" | false 250 | // 3.1 | "CONTEXT.$": "States.JsonMerge($$, $, false)" or | false 251 | // | "CONTEXT.$": "$$['Execution', 'State', 'StateMachine']" | 252 | // 3.2 | Custom "CONTEXT.$" | false 253 | export function updateDefinitionForStepFunctionInvocationStep( 254 | stepName: string, 255 | step: StateMachineStep, 256 | serverless: Serverless, 257 | stateMachineName: string, 258 | ): boolean { 259 | const parameters = step?.Parameters; 260 | 261 | // Case 0.1: Parameters field is not an object 262 | if (typeof parameters !== "object") { 263 | return false; 264 | } 265 | 266 | // Case 0.4: Parameters field has "Input.$" field 267 | if (parameters.hasOwnProperty("Input.$")) { 268 | serverless.cli 269 | .log(`[Warn] Step ${stepName} of state machine ${stateMachineName} has custom "Input.$" field. Step Functions Context \ 270 | Object injection skipped. Your Step Functions trace will not be merged with downstream Step Function traces. To manually \ 271 | merge these traces, check out https://docs.datadoghq.com/serverless/step_functions/troubleshooting/ and \ 272 | https://github.com/DataDog/serverless-plugin-datadog/issues/584\n`); 273 | return false; 274 | } 275 | 276 | // Case 0.2: Parameters field has no Input field 277 | if (!parameters.hasOwnProperty("Input")) { 278 | parameters.Input = { "CONTEXT.$": "States.JsonMerge($$, $, false)" }; 279 | return true; 280 | } 281 | 282 | // Case 0.3: Parameters.Input is not an object 283 | if (typeof parameters.Input !== "object") { 284 | return false; 285 | } 286 | 287 | // Case 1: No "CONTEXT" or "CONTEXT.$" 288 | if (!parameters.Input.hasOwnProperty("CONTEXT") && !parameters.Input.hasOwnProperty("CONTEXT.$")) { 289 | parameters.Input["CONTEXT.$"] = "$$['Execution', 'State', 'StateMachine']"; 290 | return true; 291 | } 292 | 293 | // Case 2: Has 'CONTEXT' field. 294 | // This case should be rare, so we don't support context injection for this case for now. 295 | if (parameters.Input.hasOwnProperty("CONTEXT")) { 296 | serverless.cli 297 | .log(`[Warn] Step ${stepName} of state machine ${stateMachineName} has custom CONTEXT field. Step Functions Context \ 298 | Object injection skipped. Your Step Functions trace will not be merged with downstream Step Function traces. To manually \ 299 | merge these traces, check out https://docs.datadoghq.com/serverless/step_functions/troubleshooting/\n`); 300 | 301 | return false; 302 | } 303 | 304 | // Case 3.1 context injection is already set up 305 | if ( 306 | parameters.Input["CONTEXT.$"] === "States.JsonMerge($$, $, false)" || 307 | parameters.Input["CONTEXT.$"] === `$$['Execution', 'State', 'StateMachine']` 308 | ) { 309 | serverless.cli.log( 310 | `Step ${stepName} of state machine ${stateMachineName}: Context injection is already set up. Skipping context injection.\n`, 311 | ); 312 | 313 | return false; 314 | } 315 | 316 | // Case 3.2 custom CONTEXT.$ field 317 | serverless.cli 318 | .log(`[Warn] Step ${stepName} of state machine ${stateMachineName}: Parameters.Input field has a custom CONTEXT.$ field. Step \ 319 | Functions Context Object injection skipped. Your Step Functions trace will not be merged with downstream Step Function traces. To \ 320 | manually merge these traces, check out https://docs.datadoghq.com/serverless/step_functions/troubleshooting/\n`); 321 | 322 | return false; 323 | } 324 | 325 | export function inspectAndRecommendStepFunctionsInstrumentation(serverless: Serverless): void { 326 | const stepFunctions = Object.values((serverless.service as any).stepFunctions?.stateMachines || {}); 327 | if (stepFunctions.length !== 0) { 328 | serverless.cli.log( 329 | `Uninstrumented Step Functions detected in your serverless.yml file. If you would like to see Step Functions traces, please see details of 'enableStepFunctionsTracing' and 'mergeStepFunctionAndLambdaTraces' variables in the README (https://github.com/DataDog/serverless-plugin-datadog/)`, 330 | ); 331 | } 332 | } 333 | -------------------------------------------------------------------------------- /src/tracing.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Unless explicitly stated otherwise all files in this repository are licensed 3 | * under the Apache License Version 2.0. 4 | * 5 | * This product includes software developed at Datadog (https://www.datadoghq.com/). 6 | * Copyright 2021 Datadog, Inc. 7 | */ 8 | 9 | import Service from "serverless/classes/Service"; 10 | import { Provider } from "serverless/plugins/aws/provider/awsProvider"; 11 | import { FunctionInfo } from "./layer"; 12 | 13 | const ddTraceEnabledEnvVar = "DD_TRACE_ENABLED"; 14 | const ddMergeXrayTracesEnvVar = "DD_MERGE_XRAY_TRACES"; 15 | 16 | export enum TracingMode { 17 | XRAY, 18 | DD_TRACE, 19 | HYBRID, 20 | NONE, 21 | } 22 | 23 | export function enableTracing(service: Service, tracingMode: TracingMode, handlers: FunctionInfo[]): void { 24 | const provider = service.provider as Provider; 25 | if (tracingMode === TracingMode.XRAY || tracingMode === TracingMode.HYBRID) { 26 | provider.tracing = { 27 | apiGateway: provider.apiGateway?.restApiId 28 | ? (undefined as any) // Current type definition does not allow undefined however it is a valid option. 29 | : true, 30 | lambda: true, 31 | }; 32 | } 33 | handlers.forEach(({ handler }) => { 34 | handler.environment ??= {}; 35 | const environment = handler.environment as any; 36 | // if tracing is not enabled, merge x-ray cannot be enabled 37 | if (environment[ddTraceEnabledEnvVar] === false || environment[ddTraceEnabledEnvVar] === "false") { 38 | environment[ddMergeXrayTracesEnvVar] = false; 39 | } 40 | }); 41 | } 42 | -------------------------------------------------------------------------------- /src/wrapper.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Unless explicitly stated otherwise all files in this repository are licensed 3 | * under the Apache License Version 2.0. 4 | * 5 | * This product includes software developed at Datadog (https://www.datadoghq.com/). 6 | * Copyright 2021 Datadog, Inc. 7 | */ 8 | 9 | import { redirectHandlers } from "./wrapper"; 10 | import { datadogHandlerEnvVar, jsHandler, jsHandlerWithLayers, pythonHandler } from "./wrapper"; 11 | import { RuntimeType } from "./layer"; 12 | import mock from "mock-fs"; 13 | 14 | describe("redirectHandlers", () => { 15 | afterAll(() => { 16 | mock.restore(); 17 | }); 18 | 19 | it("redirects js handlers correctly when addLayers is true", async () => { 20 | mock({}); 21 | const handler = { 22 | name: "my-lambda", 23 | package: {} as any, 24 | handler: "mydir/func.myhandler", 25 | events: [], 26 | }; 27 | redirectHandlers( 28 | [ 29 | { 30 | name: "my-lambda", 31 | type: RuntimeType.NODE, 32 | handler: handler, 33 | }, 34 | ], 35 | true, 36 | ); 37 | expect(handler.handler).toEqual(jsHandlerWithLayers); 38 | }); 39 | 40 | it("redirects js handlers correctly when addLayers is false", async () => { 41 | mock({}); 42 | const handler = { 43 | name: "my-lambda", 44 | package: {} as any, 45 | handler: "mydir/func.myhandler", 46 | events: [], 47 | }; 48 | redirectHandlers( 49 | [ 50 | { 51 | name: "my-lambda", 52 | type: RuntimeType.NODE, 53 | handler: handler, 54 | }, 55 | ], 56 | false, 57 | ); 58 | expect(handler.handler).toEqual(jsHandler); 59 | }); 60 | 61 | it("does not push duplicate versions of redirected handler", async () => { 62 | mock({}); 63 | const handler1 = { 64 | name: "my-lambda", 65 | package: {} as any, 66 | handler: "mydir/func.myhandler", 67 | events: [], 68 | }; 69 | const handler2 = { 70 | name: "second-lambda", 71 | package: {} as any, 72 | handler: "mydir/func.secondhandler", 73 | events: [], 74 | }; 75 | redirectHandlers( 76 | [ 77 | { 78 | name: "my-lambda", 79 | type: RuntimeType.PYTHON, 80 | handler: handler1, 81 | }, 82 | { 83 | name: "second-lambda", 84 | type: RuntimeType.PYTHON, 85 | handler: handler2, 86 | }, 87 | ], 88 | true, 89 | ); 90 | expect(handler1.handler).toEqual(pythonHandler); 91 | expect(handler2.handler).toEqual(pythonHandler); 92 | }); 93 | 94 | it("redirects handler and sets env variable to original handler", async () => { 95 | mock({}); 96 | const handler = { 97 | name: "my-lambda", 98 | package: {} as any, 99 | handler: "mydir/func.myhandler", 100 | events: [], 101 | }; 102 | redirectHandlers( 103 | [ 104 | { 105 | name: "my-lambda", 106 | type: RuntimeType.NODE, 107 | handler: handler, 108 | }, 109 | ], 110 | false, 111 | ); 112 | expect(handler).toEqual({ 113 | name: "my-lambda", 114 | package: { include: [] }, 115 | handler: jsHandler, 116 | events: [], 117 | environment: { [datadogHandlerEnvVar]: "mydir/func.myhandler" }, 118 | }); 119 | }); 120 | 121 | it("does not modify image based function definitions", async () => { 122 | mock({}); 123 | const funcDef = { 124 | name: "my-lambda", 125 | package: {} as any, 126 | image: "my.oci.image", 127 | events: [], 128 | }; 129 | const origFuncDef = { 130 | ...funcDef, 131 | }; 132 | redirectHandlers( 133 | [ 134 | { 135 | name: "my-lambda", 136 | type: RuntimeType.NODE, 137 | handler: funcDef, 138 | }, 139 | ], 140 | true, 141 | ); 142 | expect(funcDef).toEqual(origFuncDef); 143 | }); 144 | 145 | it("sets custom handlers correctly for nodejs", async () => { 146 | mock({}); 147 | const customHandler = "/src/custom-handler.handler"; 148 | const handler = { 149 | name: "my-lambda", 150 | package: {} as any, 151 | handler: "mydir/func.myhandler", 152 | events: [], 153 | }; 154 | redirectHandlers( 155 | [ 156 | { 157 | name: "my-lambda", 158 | type: RuntimeType.NODE, 159 | handler: handler, 160 | }, 161 | ], 162 | false, 163 | customHandler, 164 | ); 165 | expect(handler.handler).toEqual(customHandler); 166 | }); 167 | 168 | it("sets custom handlers correctly for python", async () => { 169 | mock({}); 170 | const customHandler = "/src/custom-handler.handler"; 171 | const handler = { 172 | name: "my-lambda", 173 | package: {} as any, 174 | handler: "mydir/func.myhandler", 175 | events: [], 176 | }; 177 | redirectHandlers( 178 | [ 179 | { 180 | name: "my-lambda", 181 | type: RuntimeType.PYTHON, 182 | handler: handler, 183 | }, 184 | ], 185 | false, 186 | customHandler, 187 | ); 188 | expect(handler.handler).toEqual(customHandler); 189 | }); 190 | }); 191 | -------------------------------------------------------------------------------- /src/wrapper.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Unless explicitly stated otherwise all files in this repository are licensed 3 | * under the Apache License Version 2.0. 4 | * 5 | * This product includes software developed at Datadog (https://www.datadoghq.com/). 6 | * Copyright 2021 Datadog, Inc. 7 | */ 8 | 9 | import { FunctionDefinitionHandler } from "serverless"; 10 | import { FunctionInfo, isFunctionDefinitionHandler, RuntimeType } from "./layer"; 11 | 12 | export const datadogHandlerEnvVar = "DD_LAMBDA_HANDLER"; 13 | export const pythonHandler = "datadog_lambda.handler.handler"; 14 | export const jsHandlerWithLayers = "/opt/nodejs/node_modules/datadog-lambda-js/handler.handler"; 15 | export const jsHandler = "node_modules/datadog-lambda-js/dist/handler.handler"; 16 | 17 | /** 18 | * For each lambda function, redirects handler to the Datadog handler for the given runtime, 19 | * and sets Datadog environment variable `DD_LAMBDA_HANDLER` to the original handler. 20 | */ 21 | export function redirectHandlers(funcs: FunctionInfo[], addLayers: boolean, customHandler?: string): void { 22 | funcs.forEach((func) => { 23 | const handler = getDDHandler(func.type, addLayers, customHandler); 24 | if (handler === undefined) { 25 | return; 26 | } 27 | const funcDef = func.handler; 28 | if (!isFunctionDefinitionHandler(funcDef)) { 29 | return; 30 | } 31 | setEnvDatadogHandler(funcDef); 32 | 33 | funcDef.handler = handler; 34 | if (func.handler.package === undefined) { 35 | func.handler.package = { 36 | exclude: [], 37 | include: [], 38 | }; 39 | } 40 | if (func.handler.package.include === undefined) { 41 | func.handler.package.include = []; 42 | } 43 | }); 44 | } 45 | 46 | function getDDHandler( 47 | lambdaRuntime: RuntimeType | undefined, 48 | addLayers: boolean, 49 | customHandler?: string, 50 | ): string | undefined { 51 | if (lambdaRuntime === undefined) { 52 | return; 53 | } 54 | if (customHandler) { 55 | return customHandler; 56 | } 57 | switch (lambdaRuntime) { 58 | case RuntimeType.NODE: 59 | return addLayers ? jsHandlerWithLayers : jsHandler; 60 | case RuntimeType.PYTHON: 61 | return pythonHandler; 62 | } 63 | } 64 | 65 | function setEnvDatadogHandler(func: FunctionDefinitionHandler) { 66 | const originalHandler = func.handler; 67 | const environment = func.environment ?? {}; 68 | environment[datadogHandlerEnvVar] = originalHandler; 69 | func.environment = environment; 70 | } 71 | -------------------------------------------------------------------------------- /testEnvironment.js: -------------------------------------------------------------------------------- 1 | module.exports = require("jest-environment-node"); 2 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | /* Basic Options */ 4 | // "incremental": true, /* Enable incremental compilation */ 5 | "target": "es6" /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */, 6 | "module": "commonjs" /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */, 7 | "lib": ["es2015"] /* Specify library files to be included in the compilation. */, 8 | // "allowJs": true, /* Allow javascript files to be compiled. */ 9 | // "checkJs": true, /* Report errors in .js files. */ 10 | // "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */ 11 | "declaration": true /* Generates corresponding '.d.ts' file. */, 12 | "declarationMap": true /* Generates a sourcemap for each corresponding '.d.ts' file. */, 13 | "sourceMap": true /* Generates corresponding '.map' file. */, 14 | // "outFile": "./", /* Concatenate and emit output to single file. */ 15 | "outDir": "./dist" /* Redirect output structure to the directory. */, 16 | // "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */ 17 | // "composite": true, /* Enable project compilation */ 18 | // "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */ 19 | // "removeComments": true, /* Do not emit comments to output. */ 20 | // "noEmit": true, /* Do not emit outputs. */ 21 | // "importHelpers": true, /* Import emit helpers from 'tslib'. */ 22 | "downlevelIteration": true /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */, 23 | // "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */ 24 | 25 | /* Strict Type-Checking Options */ 26 | "strict": true /* Enable all strict type-checking options. */, 27 | // "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */ 28 | // "strictNullChecks": true, /* Enable strict null checks. */ 29 | // "strictFunctionTypes": true, /* Enable strict checking of function types. */ 30 | // "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */ 31 | // "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */ 32 | // "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */ 33 | // "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */ 34 | 35 | /* Additional Checks */ 36 | "noUnusedLocals": true, /* Report errors on unused locals. */ 37 | "noUnusedParameters": true, /* Report errors on unused parameters. */ 38 | // "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */ 39 | // "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */ 40 | 41 | /* Module Resolution Options */ 42 | "moduleResolution": "node" /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */, 43 | "baseUrl": "./src" /* Base directory to resolve non-absolute module names. */, 44 | // "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */ 45 | // "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */ 46 | "typeRoots": ["node_modules/@types", "types"] /* List of folders to include type definitions from. */, 47 | // "types": [], /* Type declaration files to be included in compilation. */ 48 | // "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */ 49 | "esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */, 50 | // "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */ 51 | // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ 52 | 53 | /* Source Map Options */ 54 | // "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */ 55 | // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ 56 | // "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */ 57 | // "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */ 58 | 59 | /* Experimental Options */ 60 | // "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */ 61 | // "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */ 62 | 63 | "resolveJsonModule": true 64 | }, 65 | "include": ["src/**/*"], 66 | "exclude": ["node_modules", "**/*.spec.ts"] 67 | } 68 | -------------------------------------------------------------------------------- /tslint.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "tslint:recommended", 3 | "rules": { 4 | "interface-name": false, 5 | "variable-name": { 6 | "options": ["allow-leading-underscore"] 7 | }, 8 | "no-floating-promises": true, 9 | "no-console": false, 10 | "no-empty": false, 11 | "trailing-comma": [ 12 | true, 13 | { 14 | "multiline": { 15 | "imports": "ignore", 16 | "exports": "ignore", 17 | "properties": "always", 18 | "functions": "always", 19 | "objects": "always", 20 | "typeLiterals": "always" 21 | } 22 | } 23 | ] 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /webpack.config.js: -------------------------------------------------------------------------------- 1 | const path = require("path"); 2 | 3 | module.exports = { 4 | entry: "./src/index.ts", 5 | target: "node", 6 | module: { 7 | rules: [ 8 | { 9 | test: /\.tsx?$/, 10 | use: "ts-loader", 11 | exclude: /node_modules/, 12 | }, 13 | ], 14 | }, 15 | resolve: { 16 | extensions: [".tsx", ".ts", ".js"], 17 | }, 18 | output: { 19 | filename: "index.js", 20 | path: path.resolve(__dirname, "dist-layer"), 21 | }, 22 | }; 23 | --------------------------------------------------------------------------------