├── .eslintrc ├── .github └── workflows │ └── codeql-analysis.yml ├── .gitignore ├── .prettierignore ├── .prettierrc ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── NOTICE ├── README.md ├── RELEASE_NOTES.md ├── bin ├── deploy-canary.sh ├── publish-package.sh ├── run-integ-tests.sh ├── start-agent.sh └── utils.sh ├── examples ├── README.md ├── agent │ ├── index.js │ ├── package-lock.json │ ├── package.json │ └── run.sh ├── ecs-firelens │ ├── .gitignore │ ├── Dockerfile │ ├── app.js │ ├── container-definitions.template.json │ ├── fluent-bit.conf │ ├── package-lock.json │ ├── package.json │ └── publish.sh ├── eks │ ├── Dockerfile │ ├── app.js │ ├── kubernetes │ │ ├── configmap.yaml │ │ ├── deployment.yaml │ │ └── service.yaml │ ├── package-lock.json │ └── package.json ├── lambda │ ├── .gitignore │ ├── deploy │ │ └── deploy-lambda.sh │ └── src │ │ ├── index.js │ │ ├── package-lock.json │ │ └── package.json └── testing │ ├── jestconfig.json │ ├── package-lock.json │ ├── package.json │ ├── src │ └── module.js │ └── tests │ ├── module.jest.test.js │ └── module.local.test.js ├── jestconfig.integ.json ├── jestconfig.json ├── package-lock.json ├── package.json ├── src ├── Constants.ts ├── config │ ├── Configuration.ts │ ├── EnvironmentConfigurationProvider.ts │ ├── IConfiguration.ts │ └── __tests__ │ │ └── EnvironmentConfigurationProvider.test.ts ├── environment │ ├── DefaultEnvironment.ts │ ├── EC2Environment.ts │ ├── ECSEnvironment.ts │ ├── EnvironmentDetector.ts │ ├── Environments.ts │ ├── IEnvironment.ts │ ├── LambdaEnvironment.ts │ ├── LocalEnvironment.ts │ └── __tests__ │ │ ├── DefaultEnvironment.test.ts │ │ ├── ECSEnvironment.test.ts │ │ ├── EnvironmentDetector.test.ts │ │ ├── LambdaEnvironment.test.ts │ │ └── LocalEnvironment.test.ts ├── exceptions │ ├── DimensionSetExceededError.ts │ ├── InvalidDimensionError.ts │ ├── InvalidMetricError.ts │ ├── InvalidNamespaceError.ts │ └── InvalidTimestampError.ts ├── index.ts ├── logger │ ├── MetricScope.ts │ ├── MetricValues.ts │ ├── MetricsContext.ts │ ├── MetricsLogger.ts │ ├── MetricsLoggerFactory.ts │ ├── StorageResolution.ts │ ├── Unit.ts │ └── __tests__ │ │ ├── MetricScope.test.ts │ │ ├── MetricsContext.test.ts │ │ ├── MetricsLogger.test.ts │ │ └── MetricsLoggerFactory.test.ts ├── serializers │ ├── LogSerializer.ts │ ├── Serializer.ts │ ├── SerializerFactory.ts │ └── __tests__ │ │ └── LogSerializer.test.ts ├── sinks │ ├── AgentSink.ts │ ├── ConsoleSink.ts │ ├── Sink.ts │ ├── __tests__ │ │ ├── AgentSink.test.ts │ │ ├── ConsoleSink.test.ts │ │ └── TcpClient.test.ts │ └── connections │ │ ├── IEndpoint.ts │ │ ├── ISocketClient.ts │ │ ├── TcpClient.ts │ │ └── UdpClient.ts └── utils │ ├── Fetch.ts │ ├── Logger.ts │ ├── Time.ts │ └── Validator.ts ├── test ├── canary │ └── agent │ │ ├── Dockerfile │ │ ├── container-definitions.json │ │ ├── index.js │ │ └── package.json ├── integ │ └── agent │ │ ├── .aws │ │ ├── .gitignore │ │ └── amazon-cloudwatch-agent.json │ │ ├── Dockerfile │ │ └── end-to-end.integ.ts └── utils │ ├── Sleep.ts │ └── TestSink.ts ├── tsconfig.eslint.json └── tsconfig.json /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "parser": "@typescript-eslint/parser", 3 | "parserOptions": { 4 | "project": "./tsconfig.eslint.json" 5 | }, 6 | "extends": [ 7 | "eslint:recommended", 8 | "plugin:@typescript-eslint/eslint-recommended", 9 | "plugin:@typescript-eslint/recommended", 10 | "plugin:@typescript-eslint/recommended-requiring-type-checking", 11 | "prettier" 12 | ], 13 | "rules": { 14 | "@typescript-eslint/interface-name-prefix": "off" 15 | }, 16 | // Be more lenient in tests for some rules 17 | "overrides": [ 18 | { 19 | "files": ["./test/**/*", "**/__tests__/*"], 20 | "rules": { 21 | "@typescript-eslint/ban-ts-ignore": "off", 22 | "@typescript-eslint/explicit-function-return-type": "off", 23 | "@typescript-eslint/no-explicit-any": "off", 24 | "@typescript-eslint/no-non-null-assertion": "off", 25 | "@typescript-eslint/no-use-before-define": "off", 26 | "@typescript-eslint/unbound-method": "off", 27 | "@typescript-eslint/no-unsafe-member-access": "off", 28 | "@typescript-eslint/no-unsafe-argument": "off", 29 | "@typescript-eslint/no-unsafe-assignment": "off", 30 | "@typescript-eslint/no-unsafe-call": "off", 31 | "@typescript-eslint/no-unsafe-return": "off", 32 | "@typescript-eslint/no-floating-promises": "off", 33 | "@typescript-eslint/no-var-requires": "off", 34 | "@typescript-eslint/ban-ts-comment": "off" 35 | } 36 | } 37 | ] 38 | } 39 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | # 7 | # ******** NOTE ******** 8 | # We have attempted to detect the languages in your repository. Please check 9 | # the `language` matrix defined below to confirm you have the correct set of 10 | # supported CodeQL languages. 11 | # ******** NOTE ******** 12 | 13 | name: "CodeQL" 14 | 15 | on: 16 | push: 17 | branches: [ master ] 18 | pull_request: 19 | # The branches below must be a subset of the branches above 20 | branches: [ master ] 21 | schedule: 22 | - cron: '18 22 * * 4' 23 | 24 | jobs: 25 | analyze: 26 | name: Analyze 27 | runs-on: ubuntu-latest 28 | 29 | strategy: 30 | fail-fast: false 31 | matrix: 32 | language: [ 'javascript' ] 33 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] 34 | # Learn more... 35 | # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection 36 | 37 | steps: 38 | - name: Checkout repository 39 | uses: actions/checkout@v2 40 | 41 | # Initializes the CodeQL tools for scanning. 42 | - name: Initialize CodeQL 43 | uses: github/codeql-action/init@v1 44 | with: 45 | languages: ${{ matrix.language }} 46 | # If you wish to specify custom queries, you can do so here or in a config file. 47 | # By default, queries listed here will override any specified in a config file. 48 | # Prefix the list here with "+" to use these queries and those in the config file. 49 | # queries: ./path/to/local/query, your-org/your-repo/queries@main 50 | 51 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). 52 | # If this step fails, then you should remove it and run the build manually (see below) 53 | - name: Autobuild 54 | uses: github/codeql-action/autobuild@v1 55 | 56 | # ℹ️ Command-line programs to run using the OS shell. 57 | # 📚 https://git.io/JvXDl 58 | 59 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines 60 | # and modify them (or add more) to build your code if your project 61 | # uses a compiled language 62 | 63 | #- run: | 64 | # make bootstrap 65 | # make release 66 | 67 | - name: Perform CodeQL Analysis 68 | uses: github/codeql-action/analyze@v1 69 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | lib 3 | .buildutils 4 | temp 5 | *.bak 6 | *.log 7 | .DS_Store 8 | *.temp 9 | *.zip 10 | *.tgz -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | *.md -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "printWidth": 120, 3 | "trailingComma": "all", 4 | "singleQuote": true 5 | } 6 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | ## Code of Conduct 2 | 3 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 4 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 5 | opensource-codeofconduct@amazon.com with any additional questions or comments. 6 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing Guidelines 2 | 3 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional 4 | documentation, we greatly value feedback and contributions from our community. 5 | 6 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary 7 | information to effectively respond to your bug report or contribution. 8 | 9 | ## Reporting Bugs/Feature Requests 10 | 11 | We welcome you to use the GitHub issue tracker to report bugs or suggest features. 12 | 13 | When filing an issue, please check existing open, or recently closed, issues to make sure somebody else hasn't already 14 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful: 15 | 16 | - A reproducible test case or series of steps 17 | - The version of our code being used 18 | - Any modifications you've made relevant to the bug 19 | - Anything unusual about your environment or deployment 20 | 21 | ## Contributing via Pull Requests 22 | 23 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that: 24 | 25 | 1. You are working against the latest source on the _master_ branch. 26 | 2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already. 27 | 3. You open an issue to discuss any significant work - we would hate for your time to be wasted. 28 | 29 | To send us a pull request, please: 30 | 31 | 1. Fork the repository. 32 | 2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change. 33 | 3. Ensure local tests pass. 34 | 4. Commit to your fork using clear commit messages. 35 | 5. Send us a pull request, answering any default questions in the pull request interface. 36 | 6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation. 37 | 38 | GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and 39 | [creating a pull request](https://help.github.com/articles/creating-a-pull-request/). 40 | 41 | ## Finding contributions to work on 42 | 43 | Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any 'help wanted' issues is a great place to start. 44 | 45 | ## Code of Conduct 46 | 47 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 48 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 49 | opensource-codeofconduct@amazon.com with any additional questions or comments. 50 | 51 | ## Security issue notifications 52 | 53 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue. 54 | 55 | ## Licensing 56 | 57 | See the [LICENSE](LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution. 58 | 59 | We may ask you to sign a [Contributor License Agreement (CLA)](http://en.wikipedia.org/wiki/Contributor_License_Agreement) for larger changes. 60 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. -------------------------------------------------------------------------------- /NOTICE: -------------------------------------------------------------------------------- 1 | Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. -------------------------------------------------------------------------------- /RELEASE_NOTES.md: -------------------------------------------------------------------------------- 1 | # Release Notes 2 | 3 | See the repository's [Releases list](https://github.com/awslabs/aws-embedded-metrics-node/releases). 4 | -------------------------------------------------------------------------------- /bin/deploy-canary.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | rootdir=$(git rev-parse --show-toplevel) 4 | rootdir=${rootdir:-$(pwd)} # in case we are not in a git repository (Code Pipelines) 5 | source $rootdir/bin/utils.sh 6 | 7 | LIB_PATH=$rootdir 8 | CANARY_PATH=$LIB_PATH/test/canary/agent 9 | NODE_MODULES_PATH=$CANARY_PATH/node_modules 10 | ACCOUNT_ID=863722843142 11 | REGION=us-west-2 12 | EMF_LANGUAGE=node 13 | IMAGE_NAME=emf-$EMF_LANGUAGE-canary 14 | ECS_CLUSTER_NAME=emf-canary-cluster 15 | ECS_TASK_FAMILY=emf-canary-$EMF_LANGUAGE-tasks 16 | ECS_SERVICE_NAME=emf-canary-$EMF_LANGUAGE-service 17 | ECR_ENDPOINT=$ACCOUNT_ID.dkr.ecr.$REGION.amazonaws.com 18 | ECR_REMOTE=$ECR_ENDPOINT/$IMAGE_NAME 19 | 20 | cp -r $LIB_PATH/node_modules $NODE_MODULES_PATH/ 21 | cp -r $LIB_PATH/lib $NODE_MODULES_PATH/aws-embedded-metrics 22 | cp -r $LIB_PATH/package.json $NODE_MODULES_PATH/aws-embedded-metrics/package.json 23 | 24 | pushd $CANARY_PATH 25 | echo 'BUILDING THE EXAMPLE DOCKER IMAGE' 26 | aws ecr get-login-password --region $REGION | docker login --username AWS --password-stdin $ECR_ENDPOINT 27 | docker build . -t $IMAGE_NAME:latest 28 | check_exit 29 | 30 | echo 'PUSHING THE EXAMPLE DOCKER IMAGE TO ECR' 31 | imageid=$(docker images -q $IMAGE_NAME:latest) 32 | docker tag $imageid $ECR_REMOTE 33 | docker push $ECR_REMOTE 34 | check_exit 35 | 36 | echo 'UPDATING THE ECS SERVICE' 37 | aws ecs update-service \ 38 | --region $REGION \ 39 | --cluster $ECS_CLUSTER_NAME \ 40 | --service $ECS_SERVICE_NAME \ 41 | --force-new-deployment \ 42 | --task-definition $(aws ecs register-task-definition \ 43 | --network-mode awsvpc \ 44 | --requires-compatibilities FARGATE \ 45 | --task-role arn:aws:iam::$ACCOUNT_ID:role/ECSCanaryTaskExecutionRole \ 46 | --execution-role-arn "arn:aws:iam::$ACCOUNT_ID:role/ECSCanaryTaskExecutionRole" \ 47 | --region $REGION \ 48 | --memory 512 \ 49 | --cpu '.25 vcpu' \ 50 | --family $ECS_TASK_FAMILY \ 51 | --container-definitions "$(cat container-definitions.json)" \ 52 | | jq --raw-output '.taskDefinition.taskDefinitionArn' | awk -F '/' '{ print $2 }') 53 | 54 | popd 55 | -------------------------------------------------------------------------------- /bin/publish-package.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | rootdir=$(git rev-parse --show-toplevel) 4 | rootdir=${rootdir:-$(pwd)} # in case we are not in a git repository (Code Pipelines) 5 | 6 | cd $rootdir 7 | source $rootdir/bin/utils.sh 8 | 9 | new_version=$(cat package.json | jq '.version' --raw-output) 10 | echo "Deploying $new_version to NPM" 11 | 12 | # we need to do this so that eslint doesn't try to stat the directory 13 | # and blow up on too many symlinks 14 | rm -rf examples/agent/node_modules/ 15 | rm -rf examples/ecs-firelens/node_modules/ 16 | rm -rf examples/eks/node_modules/ 17 | rm -rf examples/lambda/src/node_modules/ 18 | rm -rf examples/testing/node_modules/ 19 | 20 | echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc 21 | npm whoami 22 | 23 | # if the version is a pre-release, tag it with "next" 24 | if [[ $new_version == *"-"* ]]; then 25 | echo "New version is a pre-release, tagging as next" 26 | npm publish --tag next 27 | check_exit 28 | else 29 | echo "New version is NOT a pre-release, tagging as latest." 30 | npm publish 31 | check_exit 32 | fi -------------------------------------------------------------------------------- /bin/run-integ-tests.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Run integration tests against a CW Agent. 4 | # We first create the necessary 5 | # 6 | # usage: 7 | # export AWS_ACCESS_KEY_ID= 8 | # export AWS_SECRET_ACCESS_KEY= 9 | # export AWS_REGION=us-west-2 10 | # ./run-integ-tests.sh 11 | 12 | rootdir=$(git rev-parse --show-toplevel) 13 | rootdir=${rootdir:-$(pwd)} # in case we are not in a git repository (Code Pipelines) 14 | 15 | tempfile="$rootdir/test/integ/agent/.temp" 16 | 17 | ################################### 18 | # Configure and start the agent 19 | ################################### 20 | 21 | $rootdir/bin/start-agent.sh 22 | 23 | ################################### 24 | # Wait for the agent to boot 25 | ################################### 26 | 27 | echo "Waiting for agent to start." 28 | tail -f $tempfile | sed '/Loaded outputs: cloudwatchlogs/ q' 29 | containerId=$(docker ps -q) 30 | echo "Agent started in container: $containerId." 31 | 32 | ################################### 33 | # Run tests 34 | ################################### 35 | 36 | cd $rootdir 37 | npm run exec-integ 38 | status_code=$? 39 | 40 | ################################### 41 | # Cleanup 42 | ################################### 43 | 44 | cat $tempfile 45 | docker stop $containerId 46 | rm -rf $tempfile 47 | rm -rf ./.aws/credentials 48 | rm -rf ./.aws/config 49 | 50 | exit $status_code -------------------------------------------------------------------------------- /bin/start-agent.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Run integration tests against a CW Agent. 4 | # We first create the necessary 5 | # 6 | # usage: 7 | # export AWS_ACCESS_KEY_ID= 8 | # export AWS_SECRET_ACCESS_KEY= 9 | # export AWS_REGION=us-west-2 10 | # ./start-agent.sh 11 | 12 | rootdir=$(git rev-parse --show-toplevel) 13 | rootdir=${rootdir:-$(pwd)} # in case we are not in a git repository (Code Pipelines) 14 | 15 | tempfile="$rootdir/test/integ/agent/.temp" 16 | 17 | ################################### 18 | # Configure and start the agent 19 | ################################### 20 | 21 | # Check if IAM user credentials exist 22 | if [ -z "$AWS_ACCESS_KEY_ID" ] || [ -z "$AWS_SECRET_ACCESS_KEY" ]; then 23 | echo "No IAM user credentials found, assuming we are running on CodeBuild pipeline, falling back to IAM role.." 24 | 25 | # Store the AWS STS assume-role output and extract credentials 26 | CREDS=$(aws sts assume-role \ 27 | --role-arn $Code_Build_Execution_Role_ARN \ 28 | --role-session-name "session-$(uuidgen)" \ 29 | --query 'Credentials.[AccessKeyId,SecretAccessKey,SessionToken]' \ 30 | --output text \ 31 | --duration-seconds 3600) 32 | 33 | # Parse the output into separate variables 34 | read AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_SESSION_TOKEN <<< $CREDS 35 | 36 | # Export the variables 37 | export AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_SESSION_TOKEN 38 | 39 | CREDENTIALS_CONTENT="[AmazonCloudWatchAgent] 40 | aws_access_key_id = $AWS_ACCESS_KEY_ID 41 | aws_secret_access_key = $AWS_SECRET_ACCESS_KEY 42 | aws_session_token = $AWS_SESSION_TOKEN" 43 | else 44 | echo "Using provided IAM user credentials..." 45 | CREDENTIALS_CONTENT="[AmazonCloudWatchAgent] 46 | aws_access_key_id = $AWS_ACCESS_KEY_ID 47 | aws_secret_access_key = $AWS_SECRET_ACCESS_KEY" 48 | fi 49 | 50 | pushd $rootdir/test/integ/agent 51 | echo "$CREDENTIALS_CONTENT" > ./.aws/credentials 52 | 53 | echo "[profile AmazonCloudWatchAgent] 54 | region = $AWS_REGION 55 | " > ./.aws/config 56 | 57 | docker build -t agent:latest . 58 | docker run -p 25888:25888/udp -p 25888:25888/tcp \ 59 | -e AWS_REGION=$AWS_REGION \ 60 | -e AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID \ 61 | -e AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY \ 62 | agent:latest &> $tempfile & 63 | popd 64 | -------------------------------------------------------------------------------- /bin/utils.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env zsh 2 | 3 | function check_exit() { 4 | last_exit_code=$? 5 | 6 | if [ $last_exit_code -ne 0 ]; 7 | then 8 | echo "Last command failed with exit code: $last_exit_code." 9 | echo "Exiting." 10 | exit $last_exit_code; 11 | fi 12 | } 13 | -------------------------------------------------------------------------------- /examples/README.md: -------------------------------------------------------------------------------- 1 | # Examples 2 | 3 | ## Lambda 4 | 5 | You can deploy the Lambda example by running: 6 | 7 | ```sh 8 | export AWS_REGION=us-west-2 9 | export LAMBDA_ARN="arn:aws:lambda:$AWS_REGION::function:" 10 | ./examples/lambda/deploy/deploy-lambda.sh $LAMBDA_ARN $AWS_REGION 11 | ``` 12 | 13 | ## Agent 14 | 15 | In order to run this example you will need the CloudWatch Agent running locally. 16 | The easiest way to do this is by running it in a Docker container using the following script. 17 | Alternatively, you can find installation instructions [here](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/install-CloudWatch-Agent-on-EC2-Instance.html). 18 | 19 | ```sh 20 | export AWS_ACCESS_KEY_ID= 21 | export AWS_SECRET_ACCESS_KEY= 22 | export AWS_REGION=us-west-2 23 | ./bin/start-agent.sh 24 | ``` 25 | 26 | Run the example: 27 | 28 | ``` 29 | ./examples/agent/run.sh 30 | ``` 31 | 32 | ## Docker 33 | 34 | With Docker images, using the `awslogs` log driver will send your container logs to CloudWatch Logs. All you have to do is write to STDOUT and your EMF logs will be processed. 35 | 36 | [Official Docker documentation for `awslogs` driver](https://docs.docker.com/config/containers/logging/awslogs/) 37 | 38 | ## ECS and Fargate 39 | 40 | With ECS and Fargate, you can use the `awsfirelens` (recommended) or `awslogs` log driver to have your logs sent to CloudWatch Logs on your behalf. After configuring the options for your preferred log driver, you may write your EMF logs to STDOUT and they will be processed. 41 | 42 | [`awsfirelens` documentation](https://github.com/aws/amazon-cloudwatch-logs-for-fluent-bit) 43 | 44 | [ECS documentation on `awslogs` log driver](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/using_awslogs.html) 45 | 46 | ## Fluent Bit and Fluentd 47 | 48 | Fluent Bit can be used to collect logs and push them to CloudWatch Logs. After configuring the Amazon CloudWatch Logs output plugin, you may write your EMF logs to STDOUT and they will be processed. 49 | 50 | [Getting Started with Fluent Bit](https://docs.fluentbit.io/manual/installation/getting-started-with-fluent-bit) 51 | 52 | [Amazon CloudWatch output plugin for Fluent Bit](https://docs.fluentbit.io/manual/pipeline/outputs/cloudwatch) 53 | 54 | ## FireLens on ECS 55 | 56 | You can deploy the example by running the following: 57 | 58 | ```sh 59 | # create an ECR repository for the example image 60 | aws ecr create-repository --repository-name --region 61 | 62 | # create an S3 bucket for the Fluent-Bit configuration 63 | aws s3api create-bucket --bucket --region 64 | 65 | # create ECS cluster 66 | # create ECS task definition 67 | # create ECS service 68 | 69 | # deploy 70 | ./examples/ecs-firelens/publish.sh \ 71 | \ 72 | \ 73 | \ 74 | \ 75 | \ 76 | \ 77 | 78 | ``` 79 | 80 | ### Example Metrics 81 | 82 | ```json 83 | { 84 | "_aws": { 85 | "Timestamp": 1583902595342, 86 | "CloudWatchMetrics": [ 87 | { 88 | "Dimensions": [[ "ServiceName", "ServiceType" ]], 89 | "Metrics": [{ "Name": "ProcessingTime", "Unit": "Milliseconds" }], 90 | "Namespace": "aws-embedded-metrics" 91 | } 92 | ] 93 | }, 94 | "ServiceName": "example", 95 | "ServiceType": "AWS::ECS::Container", 96 | "Method": "GET", 97 | "Url": "/test", 98 | "containerId": "702e4bcf1345", 99 | "createdAt": "2020-03-11T04:54:24.981207801Z", 100 | "startedAt": "2020-03-11T04:54:25.594413051Z", 101 | "image": ".dkr.ecr..amazonaws.com/emf-examples:latest", 102 | "cluster": "emf-example", 103 | "taskArn": "arn:aws:ecs:::task/2fe946f6-8a2e-41a4-8fec-c4983bad8f74", 104 | "ProcessingTime": 5 105 | } 106 | ``` 107 | 108 | ## EKS 109 | ### Install dependencies 110 | ```sh 111 | npm i 112 | ``` 113 | 114 | ### Deployment 115 | 116 | 1. Create a new EKS cluster using [eksctl](https://docs.aws.amazon.com/eks/latest/userguide/eksctl.html). 117 | ```sh 118 | eksctl create cluster --name eks-demo 119 | ``` 120 | 121 | 2. Build and push the docker image. You will want to use your own Dockerhub repository. 122 | ```sh 123 | docker login 124 | docker build . -t /eks-demo:latest --platform linux/amd64 125 | docker push /eks-demo 126 | ``` 127 | 128 | 3. Set the container image name in `kubernetes/deployment.yaml` and apply the configuration to your cluster. 129 | ```sh 130 | kubectl apply -f kubernetes/ 131 | kubectl get deployment eks-demo 132 | ``` 133 | 134 | 4. Add CloudWatch permissions to the worker nodes. 135 | ```sh 136 | # Get the IAM role name for the worker nodes (NodeRole): 137 | aws eks describe-nodegroup --cluster-name eks-demo --nodegroup-name 138 | 139 | # Attach the CloudWatchAgentServerPolicy to the role: 140 | aws iam attach-role-policy --role-name --policy-arn arn:aws:iam::aws:policy/CloudWatchAgentServerPolicy 141 | ``` 142 | 143 | 144 | 5. To test, naviagate to your ELB endpoint for the cluster. This will generate EMF logs in your AWS account. 145 | ```sh 146 | # Get endpoint 147 | kubectl get svc eks-demo 148 | 149 | # Ping the endpoint 150 | curl http://..elb.amazonaws.com/ping 151 | ``` 152 | 153 | 6. To update, re-build, push changes and delete the running pod. 154 | 155 | ```sh 156 | docker build . -t /eks-demo:latest --platform linux/amd64 157 | docker push /eks-demo 158 | kubectl delete pod 159 | ``` 160 | -------------------------------------------------------------------------------- /examples/agent/index.js: -------------------------------------------------------------------------------- 1 | const { metricScope, Unit, StorageResolution } = require('aws-embedded-metrics'); 2 | 3 | const doWork = metricScope(metrics => async event => { 4 | metrics.putDimensions({ Operation: 'Agent' }); 5 | metrics.putMetric('ExampleMetric', 100, Unit.Milliseconds); 6 | metrics.putMetric('ExampleHighResolutionMetric', 10, Unit.Milliseconds, StorageResolution.High); 7 | metrics.setProperty('RequestId', '422b1569-16f6-4a03-b8f0-fe3fd9b100f8'); 8 | }); 9 | 10 | doWork(); 11 | -------------------------------------------------------------------------------- /examples/agent/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "aws-embedded-metrics-agent-demo", 3 | "version": "1.0.1", 4 | "lockfileVersion": 2, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "aws-embedded-metrics-agent-demo", 9 | "version": "1.0.1", 10 | "license": "ISC", 11 | "dependencies": { 12 | "aws-embedded-metrics": "^3.0.0" 13 | } 14 | }, 15 | "node_modules/@datastructures-js/heap": { 16 | "version": "4.1.0", 17 | "resolved": "https://registry.npmjs.org/@datastructures-js/heap/-/heap-4.1.0.tgz", 18 | "integrity": "sha512-PFKWpDhd1BEdKw2FErddES8bLT6R2/FiivT42eByshG+9vNUpsGuOr7tsKcqRo1S3XyfBnQT711HVQt2k6xRQw==" 19 | }, 20 | "node_modules/aws-embedded-metrics": { 21 | "version": "3.0.0", 22 | "resolved": "https://registry.npmjs.org/aws-embedded-metrics/-/aws-embedded-metrics-3.0.0.tgz", 23 | "integrity": "sha512-4SsOynlnrdT9C8NzQFLqyIz/5g/sYPYsKF1yh+VcnIIMliXHt1CHZS4Gw0Gd0IDZLPS+sWrL6jyAzPvsaog7sg==", 24 | "dependencies": { 25 | "@datastructures-js/heap": "^4.0.2" 26 | }, 27 | "engines": { 28 | "node": ">=10.0.0" 29 | } 30 | } 31 | }, 32 | "dependencies": { 33 | "@datastructures-js/heap": { 34 | "version": "4.1.0", 35 | "resolved": "https://registry.npmjs.org/@datastructures-js/heap/-/heap-4.1.0.tgz", 36 | "integrity": "sha512-PFKWpDhd1BEdKw2FErddES8bLT6R2/FiivT42eByshG+9vNUpsGuOr7tsKcqRo1S3XyfBnQT711HVQt2k6xRQw==" 37 | }, 38 | "aws-embedded-metrics": { 39 | "version": "3.0.0", 40 | "resolved": "https://registry.npmjs.org/aws-embedded-metrics/-/aws-embedded-metrics-3.0.0.tgz", 41 | "integrity": "sha512-4SsOynlnrdT9C8NzQFLqyIz/5g/sYPYsKF1yh+VcnIIMliXHt1CHZS4Gw0Gd0IDZLPS+sWrL6jyAzPvsaog7sg==", 42 | "requires": { 43 | "@datastructures-js/heap": "^4.0.2" 44 | } 45 | } 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /examples/agent/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "aws-embedded-metrics-agent-demo", 3 | "version": "1.0.1", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1", 8 | "start": "node index.js" 9 | }, 10 | "author": "", 11 | "license": "ISC", 12 | "dependencies": { 13 | "aws-embedded-metrics": "^3.0.0" 14 | }, 15 | "volta": { 16 | "node": "16.16.0" 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /examples/agent/run.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | rootdir=$(git rev-parse --show-toplevel) 4 | 5 | pushd $rootdir/examples/agent 6 | export AWS_EMF_ENABLE_DEBUG_LOGGING=true 7 | export AWS_EMF_LOG_GROUP_NAME=AgentDemo 8 | export AWS_EMF_LOG_STREAM_NAME=local 9 | export AWS_EMF_SERVICE_NAME=Demo 10 | export AWS_EMF_SERVICE_TYPE=local 11 | export AWS_EMF_AGENT_ENDPOINT=tcp://0.0.0.0:25888 12 | node index.js 13 | popd -------------------------------------------------------------------------------- /examples/ecs-firelens/.gitignore: -------------------------------------------------------------------------------- 1 | container-definitions.json -------------------------------------------------------------------------------- /examples/ecs-firelens/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:10.16.0-alpine AS base 2 | RUN mkdir -p /app/src 3 | WORKDIR /app/src 4 | 5 | COPY package.json ./ 6 | # install packages but copy the local version of the package in directly 7 | RUN npm i && rm -rf node_modules/aws-embedded-metrics 8 | COPY node_modules/aws-embedded-metrics ./node_modules/aws-embedded-metrics 9 | 10 | # copy the source files over 11 | COPY . . 12 | 13 | CMD [ "node", "app" ] 14 | -------------------------------------------------------------------------------- /examples/ecs-firelens/app.js: -------------------------------------------------------------------------------- 1 | const Koa = require('koa'); 2 | const app = new Koa(); 3 | 4 | const { metricScope, Unit, StorageResolution } = require('aws-embedded-metrics'); 5 | 6 | app.use( 7 | metricScope(metrics => async (ctx, next) => { 8 | const start = Date.now(); 9 | 10 | await next(); 11 | 12 | ctx.body = `Hello World ... ${ctx.method} ${ctx.url}\n`; 13 | 14 | metrics.setProperty('Method', ctx.method); 15 | metrics.setProperty('Url', ctx.url); 16 | metrics.putMetric('ProcessingTime', Date.now() - start, Unit.Milliseconds); 17 | metrics.putMetric('ProcessingLatency', 100, Unit.Milliseconds, StorageResolution.High); 18 | 19 | // send application logs to stdout, FireLens will send this to a different LogGroup 20 | console.log('Completed Request'); 21 | }), 22 | ); 23 | 24 | app.listen(3000); 25 | -------------------------------------------------------------------------------- /examples/ecs-firelens/container-definitions.template.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name": "example", 4 | "image": ".dkr.ecr..amazonaws.com/:latest", 5 | "portMappings": [{ "containerPort": 3000, "protocol": "tcp" }], 6 | "essential": true, 7 | "logConfiguration": { 8 | "logDriver": "awsfirelens", 9 | "options": { 10 | "Name": "cloudwatch", 11 | "region": "", 12 | "log_group_name": "aws-emf-ecs-firelens-example-logs", 13 | "auto_create_group": "true", 14 | "log_stream_prefix": "from-fluent-bit" 15 | } 16 | }, 17 | "links": ["fluent-bit"] 18 | }, 19 | { 20 | "name": "fluent-bit", 21 | "image": "906394416424.dkr.ecr..amazonaws.com/aws-for-fluent-bit:latest", 22 | "essential": true, 23 | "firelensConfiguration": { 24 | "type": "fluentbit", 25 | "options": { 26 | "config-file-type": "s3", 27 | "config-file-value": "arn:aws:s3:::/fluent-bit.conf" 28 | } 29 | } 30 | } 31 | ] 32 | -------------------------------------------------------------------------------- /examples/ecs-firelens/fluent-bit.conf: -------------------------------------------------------------------------------- 1 | # TCP input used for EMF payloads 2 | [INPUT] 3 | Name tcp 4 | Listen 0.0.0.0 5 | Port 25888 6 | Chunk_Size 32 7 | Buffer_Size 64 8 | Format none 9 | Tag emf-${HOSTNAME} 10 | # This tag is used by the output plugin to determine the LogStream 11 | # including the HOSTNAME is a way to increase the number of LogStreams 12 | # proportional to the number of instances. The maximum throughput on a 13 | # single LogStream is 5 MB/s (max 1 MB at max 5 TPS). 14 | 15 | # Output for EMF over TCP -> CloudWatch 16 | [OUTPUT] 17 | Name cloudwatch 18 | Match emf-* 19 | region us-east-1 20 | log_key log 21 | log_group_name aws-emf-ecs-firelens-example-metrics 22 | log_stream_prefix from-fluent-bit- 23 | auto_create_group true 24 | log_format json/emf 25 | -------------------------------------------------------------------------------- /examples/ecs-firelens/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "ecs-firelens", 3 | "version": "1.0.1", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1", 8 | "serve-local": "AWS_EMF_ENVIRONMENT=Local node app" 9 | }, 10 | "keywords": [], 11 | "author": "", 12 | "license": "ISC", 13 | "dependencies": { 14 | "aws-embedded-metrics": "^3.0.0", 15 | "koa": "^2.11.0" 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /examples/ecs-firelens/publish.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Usage: 3 | # ./examples/ecs-firelens/publish.sh \ 4 | # \ 5 | # \ 6 | # \ 7 | # \ 8 | # \ 9 | # \ 10 | # 11 | 12 | rootdir=$(git rev-parse --show-toplevel) 13 | 14 | ACCOUNT_ID=$1 15 | REGION=$2 16 | IMAGE_NAME=$3 # emf-ecs-firelens 17 | FLUENT_BIT_S3_CONFIG_BUCKET=$4 # aws-emf-ecs-firelens-configurations 18 | CLUSTER_NAME=$5 # emf-example 19 | ECS_TASK_FAMILY=$6 # aws-emf-ecs-koa-example 20 | ECS_SERVICE_NAME=$7 # aws-emf-ecs-firelens-ec2 21 | 22 | LIB_PATH=$rootdir 23 | EXAMPLE_DIR=$rootdir/examples/ecs-firelens 24 | NODE_MODULES_PATH=$EXAMPLE_DIR/node_modules 25 | ECR_REMOTE=$ACCOUNT_ID.dkr.ecr.$REGION.amazonaws.com/$IMAGE_NAME 26 | 27 | function check_exit() { 28 | last_exit_code=$? 29 | if [ $last_exit_code -ne 0 ]; 30 | then 31 | echo "Last command failed with exit code: $last_exit_code." 32 | echo "Exiting." 33 | exit $last_exit_code; 34 | fi 35 | } 36 | 37 | echo 'BUILDING THE LOCAL PROJECT' 38 | pushd $rootdir 39 | npm i && npm run build 40 | check_exit 41 | popd 42 | 43 | pushd $EXAMPLE_DIR 44 | pwd 45 | 46 | echo 'COPYING LOCAL PROJECT TO EXAMPLE node_modules' 47 | rm -rf $NODE_MODULES_PATH/aws-embedded-metrics 48 | cp -r $LIB_PATH/lib $NODE_MODULES_PATH/aws-embedded-metrics 49 | 50 | echo 'UPDATING CONTAINER DEFINITIONS' 51 | sed "s//$ACCOUNT_ID/g" $EXAMPLE_DIR/container-definitions.template.json \ 52 | | sed "s//$REGION/g" \ 53 | | sed "s//$FLUENT_BIT_S3_CONFIG_BUCKET/g" \ 54 | | sed "s//$IMAGE_NAME/g" \ 55 | > $EXAMPLE_DIR/container-definitions.json 56 | check_exit 57 | 58 | echo 'PUSHING FLUENT BIT CONFIG TO S3' 59 | aws s3 cp fluent-bit.conf s3://$FLUENT_BIT_S3_CONFIG_BUCKET --region $REGION 60 | check_exit 61 | 62 | echo 'BUILDING THE EXAMPLE DOCKER IMAGE' 63 | `aws ecr get-login --no-include-email --region $REGION` 64 | docker build . -t $IMAGE_NAME:latest 65 | check_exit 66 | 67 | echo 'PUSHING THE EXAMPLE DOCKER IMAGE TO ECR' 68 | imageid=$(docker images -q $IMAGE_NAME:latest) 69 | docker tag $imageid $ECR_REMOTE 70 | docker push $ECR_REMOTE 71 | check_exit 72 | 73 | echo 'UPDATING THE ECS SERVICE' 74 | aws ecs update-service \ 75 | --region $REGION \ 76 | --cluster $CLUSTER_NAME \ 77 | --service $ECS_SERVICE_NAME \ 78 | --force-new-deployment \ 79 | --task-definition $(aws ecs register-task-definition \ 80 | --network-mode bridge \ 81 | --requires-compatibilities EC2 \ 82 | --task-role arn:aws:iam::$ACCOUNT_ID:role/ecsTaskExecutionRole \ 83 | --execution-role-arn "arn:aws:iam::$ACCOUNT_ID:role/ecsTaskExecutionRole" \ 84 | --region $REGION \ 85 | --memory 256 \ 86 | --cpu '1 vcpu' \ 87 | --family $ECS_TASK_FAMILY \ 88 | --container-definitions "$(cat container-definitions.json)" \ 89 | | jq --raw-output '.taskDefinition.taskDefinitionArn' | awk -F '/' '{ print $2 }') 90 | 91 | popd -------------------------------------------------------------------------------- /examples/eks/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:10.16.0-alpine AS base 2 | RUN mkdir -p /app/src 3 | WORKDIR /app/src 4 | 5 | COPY package.json ./ 6 | # install packages but copy the local version of the package in directly 7 | RUN npm i && rm -rf node_modules/aws-embedded-metrics 8 | COPY node_modules/aws-embedded-metrics ./node_modules/aws-embedded-metrics 9 | 10 | # copy the source files over 11 | COPY . . 12 | 13 | CMD [ "node", "app" ] 14 | -------------------------------------------------------------------------------- /examples/eks/app.js: -------------------------------------------------------------------------------- 1 | const Koa = require('koa'); 2 | const app = new Koa(); 3 | 4 | const { metricScope, Configuration, Unit, StorageResolution } = require('aws-embedded-metrics'); 5 | 6 | Configuration.serviceName = 'EKS-Demo'; 7 | Configuration.serviceType = 'AWS::EKS::Cluster'; 8 | Configuration.logStreamName = process.env.HOSTNAME; 9 | 10 | app.use( 11 | metricScope(metrics => async (ctx, next) => { 12 | const start = Date.now(); 13 | 14 | await next(); 15 | 16 | ctx.body = `Hello World ... ${ctx.method} ${ctx.url}\n`; 17 | 18 | metrics.setProperty('Method', ctx.method); 19 | metrics.setProperty('Url', ctx.url); 20 | metrics.putMetric('ProcessingTime', Date.now() - start, Unit.Milliseconds); 21 | metrics.putMetric('ProcessingLatency', 100, Unit.Milliseconds, StorageResolution.High); 22 | }), 23 | ); 24 | 25 | app.listen(3000); 26 | -------------------------------------------------------------------------------- /examples/eks/kubernetes/configmap.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | data: 3 | # Any changes here must not break the JSON format 4 | cwagentconfig.json: | 5 | { 6 | "logs": { 7 | "metrics_collected": { 8 | "emf": { } 9 | } 10 | } 11 | } 12 | kind: ConfigMap 13 | metadata: 14 | name: cw-agent-config 15 | namespace: default 16 | -------------------------------------------------------------------------------- /examples/eks/kubernetes/deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: eks-demo 5 | labels: 6 | app: eks-demo 7 | namespace: default 8 | spec: 9 | replicas: 1 10 | selector: 11 | matchLabels: 12 | app: eks-demo 13 | strategy: 14 | rollingUpdate: 15 | maxSurge: 25% 16 | maxUnavailable: 25% 17 | type: RollingUpdate 18 | template: 19 | metadata: 20 | labels: 21 | app: eks-demo 22 | spec: 23 | containers: 24 | - image: /eks-demo:latest 25 | imagePullPolicy: Always 26 | name: eks-demo 27 | ports: 28 | - containerPort: 3000 29 | protocol: TCP 30 | env: 31 | - name: AWS_EMF_AGENT_ENDPOINT 32 | value: "tcp://127.0.0.1:25888" 33 | - name: AWS_EMF_ENABLE_DEBUG_LOGGING 34 | value: "true" 35 | - image: amazon/cloudwatch-agent:latest 36 | name: cloudwatch-agent 37 | imagePullPolicy: Always 38 | resources: 39 | limits: 40 | cpu: 200m 41 | memory: 100Mi 42 | requests: 43 | cpu: 200m 44 | memory: 100Mi 45 | volumeMounts: 46 | - name: cwagentconfig 47 | mountPath: /etc/cwagentconfig 48 | ports: 49 | - protocol: TCP 50 | hostPort: 25888 51 | containerPort: 25888 52 | volumes: 53 | - name: cwagentconfig 54 | configMap: 55 | name: cw-agent-config 56 | -------------------------------------------------------------------------------- /examples/eks/kubernetes/service.yaml: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v1 3 | kind: Service 4 | metadata: 5 | name: eks-demo 6 | spec: 7 | selector: 8 | app: eks-demo 9 | type: LoadBalancer 10 | ports: 11 | - protocol: TCP 12 | port: 80 13 | targetPort: 3000 14 | -------------------------------------------------------------------------------- /examples/eks/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "eks-example", 3 | "version": "1.0.2", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "keywords": [], 10 | "author": "", 11 | "license": "ISC", 12 | "dependencies": { 13 | "aws-embedded-metrics": "^4.1.0", 14 | "koa": "^2.11.0" 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /examples/lambda/.gitignore: -------------------------------------------------------------------------------- 1 | *.bak -------------------------------------------------------------------------------- /examples/lambda/deploy/deploy-lambda.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # usage: 4 | # ./deploy/deploy-lambda.sh { LAMBDA_ARN } { region } 5 | 6 | rootdir=$(git rev-parse --show-toplevel) 7 | 8 | LIB_PATH=$rootdir 9 | ZIP_PATH=$rootdir/examples/lambda/artifacts 10 | SRC_PATH=$rootdir/examples/lambda/src 11 | NODE_MODULES_PATH=$rootdir/examples/lambda/src/node_modules 12 | AWS_LAMBDA_ARN=$1 13 | REGION=$2 14 | 15 | rm "$ZIP_PATH.zip" 16 | 17 | ################################### 18 | # Copy current version over to node_modules 19 | ################################### 20 | 21 | mkdir $NODE_MODULES_PATH 22 | cp -r $LIB_PATH/lib $NODE_MODULES_PATH/aws-embedded-metrics 23 | 24 | ################################### 25 | # Compress the bundle 26 | ################################### 27 | 28 | pushd $SRC_PATH 29 | pwd 30 | zip -r $ZIP_PATH . 31 | popd 32 | 33 | ################################### 34 | # Deploy Lambda 35 | ################################### 36 | 37 | echo "Updating function code with archive at $ZIP_PATH.zip..." 38 | aws lambda update-function-code \ 39 | --function-name $AWS_LAMBDA_ARN \ 40 | --region $REGION \ 41 | --zip-file fileb://$ZIP_PATH.zip 42 | 43 | ################################### 44 | # Cleanup temp files 45 | ################################### 46 | rm -rf $NODE_MODULES_PATH 47 | rm "$ZIP_PATH.zip" 48 | -------------------------------------------------------------------------------- /examples/lambda/src/index.js: -------------------------------------------------------------------------------- 1 | const { metricScope, StorageResolution } = require('aws-embedded-metrics'); 2 | 3 | const aggregator = metricScope(metrics => async event => { 4 | console.log('received message'); 5 | metrics.putDimensions({ Service: 'Aggregator' }); 6 | metrics.putMetric('ProcessingLatency', 100, 'Milliseconds'); 7 | metrics.putMetric('CPU Utilization', 87, 'Percent', StorageResolution.High); 8 | metrics.setProperty('AccountId', '123456789012'); 9 | metrics.setProperty('RequestId', '422b1569-16f6-4a03-b8f0-fe3fd9b100f8'); 10 | metrics.setProperty('DeviceId', '61270781-c6ac-46f1-baf7-22c808af8162'); 11 | metrics.setProperty('Payload', { 12 | sampleTime: 123456789, 13 | temperature: 273.0, 14 | pressure: 101.3, 15 | }); 16 | console.log('completed aggregation successfully.'); 17 | }); 18 | 19 | exports.handler = aggregator; 20 | -------------------------------------------------------------------------------- /examples/lambda/src/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "aws-embedded-metrics-lambda-demo", 3 | "version": "1.0.1", 4 | "lockfileVersion": 2, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "aws-embedded-metrics-lambda-demo", 9 | "version": "1.0.1", 10 | "license": "ISC", 11 | "dependencies": { 12 | "aws-embedded-metrics": "^3.0.0" 13 | } 14 | }, 15 | "node_modules/@datastructures-js/heap": { 16 | "version": "4.1.0", 17 | "resolved": "https://registry.npmjs.org/@datastructures-js/heap/-/heap-4.1.0.tgz", 18 | "integrity": "sha512-PFKWpDhd1BEdKw2FErddES8bLT6R2/FiivT42eByshG+9vNUpsGuOr7tsKcqRo1S3XyfBnQT711HVQt2k6xRQw==" 19 | }, 20 | "node_modules/aws-embedded-metrics": { 21 | "version": "3.0.0", 22 | "resolved": "https://registry.npmjs.org/aws-embedded-metrics/-/aws-embedded-metrics-3.0.0.tgz", 23 | "integrity": "sha512-4SsOynlnrdT9C8NzQFLqyIz/5g/sYPYsKF1yh+VcnIIMliXHt1CHZS4Gw0Gd0IDZLPS+sWrL6jyAzPvsaog7sg==", 24 | "dependencies": { 25 | "@datastructures-js/heap": "^4.0.2" 26 | }, 27 | "engines": { 28 | "node": ">=10.0.0" 29 | } 30 | } 31 | }, 32 | "dependencies": { 33 | "@datastructures-js/heap": { 34 | "version": "4.1.0", 35 | "resolved": "https://registry.npmjs.org/@datastructures-js/heap/-/heap-4.1.0.tgz", 36 | "integrity": "sha512-PFKWpDhd1BEdKw2FErddES8bLT6R2/FiivT42eByshG+9vNUpsGuOr7tsKcqRo1S3XyfBnQT711HVQt2k6xRQw==" 37 | }, 38 | "aws-embedded-metrics": { 39 | "version": "3.0.0", 40 | "resolved": "https://registry.npmjs.org/aws-embedded-metrics/-/aws-embedded-metrics-3.0.0.tgz", 41 | "integrity": "sha512-4SsOynlnrdT9C8NzQFLqyIz/5g/sYPYsKF1yh+VcnIIMliXHt1CHZS4Gw0Gd0IDZLPS+sWrL6jyAzPvsaog7sg==", 42 | "requires": { 43 | "@datastructures-js/heap": "^4.0.2" 44 | } 45 | } 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /examples/lambda/src/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "aws-embedded-metrics-lambda-demo", 3 | "version": "1.0.1", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "aws-embedded-metrics": "^3.0.0" 13 | }, 14 | "volta": { 15 | "node": "16.16.0" 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /examples/testing/jestconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "testRegex": "\\.test\\.js$", 3 | "moduleFileExtensions": [ "js" ] 4 | } 5 | -------------------------------------------------------------------------------- /examples/testing/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "testing", 3 | "version": "1.0.1", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "jest --config jestconfig.json" 8 | }, 9 | "keywords": [], 10 | "author": "", 11 | "license": "ISC", 12 | "dependencies": { 13 | "aws-embedded-metrics": "^3.0.0" 14 | }, 15 | "devDependencies": { 16 | "jest": "^28.1.3", 17 | "node-notifier": ">=8.0.1" 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /examples/testing/src/module.js: -------------------------------------------------------------------------------- 1 | const { metricScope, createMetricsLogger, Unit } = require('aws-embedded-metrics'); 2 | 3 | const usingScope = metricScope(metrics => async (event, ctx) => { 4 | metrics.putDimensions({ Service: 'Aggregator' }); 5 | metrics.putMetric('EventCount', event.count, Unit.Count); 6 | metrics.setProperty('AccountId', event.accountId); 7 | metrics.setProperty('RequestId', ctx.requestId); 8 | }); 9 | 10 | const usingCreateLogger = async (event, ctx) => { 11 | const metrics = createMetricsLogger(); 12 | metrics.putDimensions({ Service: 'Aggregator' }); 13 | metrics.putMetric('EventCount', event.count, Unit.Count); 14 | metrics.setProperty('AccountId', event.accountId); 15 | metrics.setProperty('RequestId', ctx.requestId); 16 | await metrics.flush(); 17 | }; 18 | 19 | module.exports = { usingScope, usingCreateLogger }; 20 | -------------------------------------------------------------------------------- /examples/testing/tests/module.jest.test.js: -------------------------------------------------------------------------------- 1 | // this test demonstrates how other packages could mock 2 | // the aws-embedded-metrics modules when using the 3 | // jest testing framework: https://github.com/facebook/jest 4 | 5 | // import the modules that are being tested 6 | // each of the methods being tested use a variation 7 | // of the aws-embedded-metrics logger 8 | const { usingScope, usingCreateLogger } = require('../src/module'); 9 | 10 | // setup our mocks 11 | jest.mock('aws-embedded-metrics', () => { 12 | // here we're pulling the actual Units in since our methods under 13 | // test depend on it and there's no reason to mock this type since 14 | // it's just a POJO 15 | const { Unit } = jest.requireActual('aws-embedded-metrics'); 16 | 17 | // here we're mocking the actual logger that our methods under test use 18 | const mockLogger = { 19 | putMetric: jest.fn(), 20 | putDimensions: jest.fn(), 21 | setProperty: jest.fn(), 22 | flush: jest.fn(), 23 | }; 24 | 25 | // return the mocked module 26 | return { 27 | // by returning the actual mock logger instance, 28 | // our tests can make assertions about which metrics 29 | // were logged if desired 30 | mockLogger, 31 | metricScope: fn => fn(mockLogger), 32 | createMetricsLogger: () => mockLogger, 33 | Unit, 34 | }; 35 | }); 36 | 37 | // import the mock logger that we setup above 38 | // we need to do it this way since Jest won't allow us to 39 | // share variables with the mocked modules since mocks 40 | // get hoisted above everything else 41 | const { mockLogger } = require('aws-embedded-metrics'); 42 | 43 | // now we'll begin testing our actual methods 44 | test('usingScope records latency with metadata', async () => { 45 | // arrange 46 | const count = Math.random() * 100; 47 | const accountId = Math.random() * 100; 48 | const requestId = '1e2171fa-fe92-4b20-b44d-43f908beda14'; 49 | 50 | // act 51 | await usingScope({ accountId, count }, { requestId }); 52 | 53 | // assert 54 | expect(mockLogger.putMetric).toBeCalledWith('EventCount', count, 'Count'); 55 | expect(mockLogger.putDimensions).toBeCalledWith({ Service: 'Aggregator' }); 56 | expect(mockLogger.setProperty).toBeCalledWith('RequestId', requestId); 57 | expect(mockLogger.setProperty).toBeCalledWith('AccountId', accountId); 58 | }); 59 | 60 | test('usingCreateLogger records latency with metadata', async () => { 61 | // arrange 62 | const count = Math.random() * 100; 63 | const accountId = Math.random() * 100; 64 | const requestId = 'de126e0c-4ca0-484b-88f8-7bde08f2ae46'; 65 | 66 | // act 67 | await usingCreateLogger({ accountId, count }, { requestId }); 68 | 69 | // assert 70 | expect(mockLogger.putMetric).toBeCalledWith('EventCount', count, 'Count'); 71 | expect(mockLogger.putDimensions).toBeCalledWith({ Service: 'Aggregator' }); 72 | expect(mockLogger.setProperty).toBeCalledWith('RequestId', requestId); 73 | expect(mockLogger.setProperty).toBeCalledWith('AccountId', accountId); 74 | expect(mockLogger.flush).toBeCalled(); 75 | }); 76 | -------------------------------------------------------------------------------- /examples/testing/tests/module.local.test.js: -------------------------------------------------------------------------------- 1 | // this test demonstrates how you can use local environment override 2 | // to avoid mocking entirely 3 | // you can use this if you're not interested in making assertions 4 | // about the metrics being logged 5 | // the metrics will be serialized and sent to stdout 6 | 7 | // set the environment override to Local which just logs to stdout 8 | // this needs to be done prior to importing any modules 9 | // because environment detection starts as soon as the 10 | // aws-embedded-metrics module is loaded 11 | process.env.AWS_EMF_ENVIRONMENT = 'Local'; 12 | 13 | const { usingScope } = require('../src/module'); 14 | 15 | // now we'll begin testing our actual methods 16 | test('usingScope test', async () => { 17 | // arrange 18 | const count = Math.random() * 100; 19 | const accountId = Math.random() * 100; 20 | const requestId = '1e2171fa-fe92-4b20-b44d-43f908beda14'; 21 | 22 | // act 23 | await usingScope({ accountId, count }, { requestId }); 24 | 25 | // assert 26 | // I don't have anything I need to assert... 27 | }); 28 | -------------------------------------------------------------------------------- /jestconfig.integ.json: -------------------------------------------------------------------------------- 1 | { 2 | "transform": { 3 | "^.+\\.(t|j)sx?$": "ts-jest" 4 | }, 5 | "testRegex": "\\.integ\\.(jsx?|tsx?)$", 6 | "moduleFileExtensions": ["ts", "tsx", "js", "jsx", "json", "node"] 7 | } 8 | -------------------------------------------------------------------------------- /jestconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "transform": { 3 | "^.+\\.(t|j)sx?$": "ts-jest" 4 | }, 5 | "testRegex": "(/__tests__/.*|(\\.|/)(test))\\.ts$", 6 | "moduleFileExtensions": ["ts", "tsx", "js", "jsx", "json", "node"] 7 | } 8 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "aws-embedded-metrics", 3 | "version": "4.2.0", 4 | "description": "AWS Embedded Metrics Client Library", 5 | "main": "lib/index.js", 6 | "types": "lib/index.d.ts", 7 | "repository": { 8 | "type": "git", 9 | "url": "https://github.com/awslabs/aws-embedded-metrics-node.git" 10 | }, 11 | "engines": { 12 | "node": ">=10.0.0" 13 | }, 14 | "scripts": { 15 | "test": "jest --runInBand --detectOpenHandles --config jestconfig.json", 16 | "integ": "./bin/run-integ-tests.sh", 17 | "exec-integ": "jest --config jestconfig.integ.json", 18 | "watch": "npm run test -- --watch", 19 | "build": "tsc", 20 | "format": "prettier --write \"src/**/*.ts\"", 21 | "lint": "eslint \"{src,test}/**/*.ts\"", 22 | "fix-lint": "npm run format && eslint --fix \"{src,test}/**/*.ts\"", 23 | "prepare": "npm run build", 24 | "prepublishOnly": "npm test && npm run lint", 25 | "preversion": "npm run lint", 26 | "version": "npm run format" 27 | }, 28 | "keywords": [], 29 | "author": "Amazon Web Services", 30 | "contributors": [ 31 | "Jared Nance ", 32 | "Mark Kuhn " 33 | ], 34 | "license": "Apache-2.0", 35 | "dependencies": { 36 | "@datastructures-js/heap": "^4.0.2" 37 | }, 38 | "devDependencies": { 39 | "@aws-sdk/client-cloudwatch": "^3.437.0", 40 | "@faker-js/faker": "^7.5.0", 41 | "@types/faker": "^4.1.5", 42 | "@types/jest": "^26.0.22", 43 | "@types/node": "^12.0.8", 44 | "@typescript-eslint/eslint-plugin": "^5.36.1", 45 | "@typescript-eslint/parser": "^5.36.1", 46 | "eslint": "^8.23.0", 47 | "eslint-config-prettier": "^8.5.0", 48 | "eslint-plugin-prettier": "^4.2.1", 49 | "jest": "^29.7.0", 50 | "node-notifier": ">=8.0.1", 51 | "npm-pack-zip": "^1.3.0", 52 | "prettier": "^2.7.1", 53 | "ts-jest": "^29.1.5", 54 | "typescript": "^4.8.2", 55 | "y18n": ">=4.0.1" 56 | }, 57 | "files": [ 58 | "lib/**/*" 59 | ], 60 | "volta": { 61 | "node": "16.16.0" 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /src/Constants.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | export enum Constants { 17 | MAX_DIMENSION_SET_SIZE = 30, 18 | MAX_DIMENSION_NAME_LENGTH = 250, 19 | MAX_DIMENSION_VALUE_LENGTH = 1024, 20 | MAX_METRIC_NAME_LENGTH = 1024, 21 | MAX_NAMESPACE_LENGTH = 256, 22 | VALID_NAMESPACE_REGEX = '^[a-zA-Z0-9._#:/-]+$', 23 | VALID_DIMENSION_REGEX = '^[\x00-\x7F]+$', 24 | MAX_TIMESTAMP_PAST_AGE = 1209600000, // 2 weeks 25 | MAX_TIMESTAMP_FUTURE_AGE = 7200000, // 2 hours 26 | 27 | DEFAULT_NAMESPACE = 'aws-embedded-metrics', 28 | MAX_METRICS_PER_EVENT = 100, 29 | MAX_VALUES_PER_METRIC = 100, 30 | DEFAULT_AGENT_HOST = '0.0.0.0', 31 | DEFAULT_AGENT_PORT = 25888, 32 | } 33 | -------------------------------------------------------------------------------- /src/config/Configuration.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | import { EnvironmentConfigurationProvider } from './EnvironmentConfigurationProvider'; 17 | const Configuration = new EnvironmentConfigurationProvider().getConfiguration(); 18 | export default Configuration; 19 | -------------------------------------------------------------------------------- /src/config/EnvironmentConfigurationProvider.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | import { IConfiguration } from './IConfiguration'; 17 | import { Constants } from '../Constants'; 18 | import Environments from '../environment/Environments'; 19 | 20 | const ENV_VAR_PREFIX = 'AWS_EMF'; 21 | 22 | enum ConfigKeys { 23 | LOG_GROUP_NAME = 'LOG_GROUP_NAME', 24 | LOG_STREAM_NAME = 'LOG_STREAM_NAME', 25 | ENABLE_DEBUG_LOGGING = 'ENABLE_DEBUG_LOGGING', 26 | SERVICE_NAME = 'SERVICE_NAME', 27 | SERVICE_TYPE = 'SERVICE_TYPE', 28 | AGENT_ENDPOINT = 'AGENT_ENDPOINT', 29 | ENVIRONMENT_OVERRIDE = 'ENVIRONMENT', 30 | NAMESPACE = 'NAMESPACE', 31 | } 32 | 33 | export class EnvironmentConfigurationProvider { 34 | public getConfiguration(): IConfiguration { 35 | return { 36 | agentEndpoint: this.getEnvVariable(ConfigKeys.AGENT_ENDPOINT), 37 | debuggingLoggingEnabled: this.tryGetEnvVariableAsBoolean(ConfigKeys.ENABLE_DEBUG_LOGGING, false), 38 | logGroupName: this.getEnvVariable(ConfigKeys.LOG_GROUP_NAME), 39 | logStreamName: this.getEnvVariable(ConfigKeys.LOG_STREAM_NAME), 40 | serviceName: 41 | this.getEnvVariable(ConfigKeys.SERVICE_NAME) || this.getEnvVariableWithoutPrefix(ConfigKeys.SERVICE_NAME), 42 | serviceType: 43 | this.getEnvVariable(ConfigKeys.SERVICE_TYPE) || this.getEnvVariableWithoutPrefix(ConfigKeys.SERVICE_TYPE), 44 | environmentOverride: this.getEnvironmentOverride(), 45 | namespace: this.getEnvVariable(ConfigKeys.NAMESPACE) || Constants.DEFAULT_NAMESPACE, 46 | }; 47 | } 48 | 49 | private getEnvVariableWithoutPrefix(configKey: string): string | undefined { 50 | return process.env[configKey]; 51 | } 52 | 53 | private getEnvVariable(configKey: string): string | undefined { 54 | return process.env[`${ENV_VAR_PREFIX}_${configKey}`]; 55 | } 56 | 57 | private tryGetEnvVariableAsBoolean(configKey: string, fallback: boolean): boolean { 58 | const configValue = this.getEnvVariable(configKey); 59 | return !configValue ? fallback : configValue.toLowerCase() === 'true'; 60 | } 61 | 62 | getEnvironmentOverride(): Environments { 63 | const overrideValue = this.getEnvVariable(ConfigKeys.ENVIRONMENT_OVERRIDE); 64 | const environment = Environments[overrideValue as keyof typeof Environments]; 65 | if (environment === undefined) { 66 | return Environments.Unknown; 67 | } 68 | return environment; 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /src/config/IConfiguration.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | import Environments from '../environment/Environments'; 17 | 18 | export interface IConfiguration { 19 | /** 20 | * Whether or not internal logging should be enabled. 21 | */ 22 | debuggingLoggingEnabled: boolean; 23 | 24 | /** 25 | * The name of the service to use in the default dimensions. 26 | */ 27 | serviceName: string | undefined; 28 | 29 | /** 30 | * The type of the service to use in the default dimensions. 31 | */ 32 | serviceType: string | undefined; 33 | 34 | /** 35 | * The LogGroup name to use. This will be ignored when using the 36 | * Lambda scope. 37 | */ 38 | logGroupName: string | undefined; 39 | 40 | /** 41 | * The LogStream name to use. This will be ignored when using the 42 | * Lambda scope. 43 | */ 44 | logStreamName: string | undefined; 45 | 46 | /** 47 | * The endpoint to use to connect to the CloudWatch Agent 48 | */ 49 | agentEndpoint: string | undefined; 50 | 51 | /** 52 | * Environment override. This will short circuit auto-environment detection. 53 | * Valid values include: 54 | * - Local: no decoration and sends over stdout 55 | * - Lambda: decorates logs with Lambda metadata and sends over stdout 56 | * - Agent: no decoration and sends over TCP 57 | * - EC2: decorates logs with EC2 metadata and sends over TCP 58 | */ 59 | environmentOverride: Environments | undefined; 60 | 61 | /** 62 | * Sets the CloudWatch namespace that extracted metrics should be published to. 63 | */ 64 | namespace: string; 65 | } 66 | -------------------------------------------------------------------------------- /src/config/__tests__/EnvironmentConfigurationProvider.test.ts: -------------------------------------------------------------------------------- 1 | import { faker } from '@faker-js/faker'; 2 | import Environments from '../../environment/Environments'; 3 | 4 | beforeEach(() => { 5 | jest.resetModules(); 6 | }); 7 | 8 | const getConfig = () => require('../Configuration').default; 9 | 10 | test('can set LogGroup name from environment', () => { 11 | // arrange 12 | const expectedValue = faker.random.word(); 13 | process.env.AWS_EMF_LOG_GROUP_NAME = expectedValue; 14 | 15 | // act 16 | const config = getConfig(); 17 | 18 | // assert 19 | const result = config.logGroupName; 20 | expect(result).toBe(expectedValue); 21 | }); 22 | 23 | test('can set LogStream name from environment', () => { 24 | // arrange 25 | const expectedValue = faker.random.word(); 26 | process.env.AWS_EMF_LOG_STREAM_NAME = expectedValue; 27 | 28 | // act 29 | const config = getConfig(); 30 | 31 | // assert 32 | const result = config.logStreamName; 33 | expect(result).toBe(expectedValue); 34 | }); 35 | 36 | test('can enable debug logging from environment', () => { 37 | // arrange 38 | const expectedValue = true; 39 | process.env.AWS_EMF_ENABLE_DEBUG_LOGGING = expectedValue.toString(); 40 | 41 | // act 42 | const config = getConfig(); 43 | 44 | // assert 45 | const result = config.debuggingLoggingEnabled; 46 | expect(result).toBe(expectedValue); 47 | }); 48 | 49 | test('can set ServiceName from environment', () => { 50 | // arrange 51 | const expectedValue = faker.random.word(); 52 | process.env.AWS_EMF_SERVICE_NAME = expectedValue; 53 | delete process.env.SERVICE_NAME; 54 | 55 | // act 56 | const config = getConfig(); 57 | 58 | // assert 59 | const result = config.serviceName; 60 | expect(result).toBe(expectedValue); 61 | }); 62 | 63 | test('can set ServiceName from environment w/o prefix', () => { 64 | // arrange 65 | const expectedValue = faker.random.word(); 66 | delete process.env.AWS_EMF_SERVICE_NAME; 67 | process.env.SERVICE_NAME = expectedValue; 68 | 69 | // act 70 | const config = getConfig(); 71 | 72 | // assert 73 | const result = config.serviceName; 74 | expect(result).toBe(expectedValue); 75 | }); 76 | 77 | test('ServiceName w/ prefix takes precendence over w/o prefix', () => { 78 | // arrange 79 | const expectedValue = faker.random.word(); 80 | process.env.AWS_EMF_SERVICE_NAME = expectedValue; 81 | process.env.SERVICE_NAME = faker.random.word(); 82 | 83 | // act 84 | const config = getConfig(); 85 | 86 | // assert 87 | const result = config.serviceName; 88 | expect(result).toBe(expectedValue); 89 | }); 90 | 91 | test('can manually set ServiceName', () => { 92 | // arrange 93 | const expectedValue = faker.random.word(); 94 | process.env.AWS_EMF_SERVICE_NAME = faker.random.word(); 95 | process.env.SERVICE_NAME = faker.random.word(); 96 | const config = getConfig(); 97 | 98 | // act 99 | config.serviceName = expectedValue; 100 | 101 | // assert 102 | const result = config.serviceName; 103 | expect(result).toBe(expectedValue); 104 | }); 105 | 106 | test('can set ServiceType from environment', () => { 107 | // arrange 108 | const expectedValue = faker.random.word(); 109 | process.env.AWS_EMF_SERVICE_TYPE = expectedValue; 110 | delete process.env.SERVICE_TYPE; 111 | 112 | // act 113 | const config = getConfig(); 114 | 115 | // assert 116 | const result = config.serviceType; 117 | expect(result).toBe(expectedValue); 118 | }); 119 | 120 | test('can set ServiceType from environment w/o prefix', () => { 121 | // arrange 122 | const expectedValue = faker.random.word(); 123 | delete process.env.AWS_EMF_SERVICE_TYPE; 124 | process.env.SERVICE_TYPE = expectedValue; 125 | 126 | // act 127 | const config = getConfig(); 128 | 129 | // assert 130 | const result = config.serviceType; 131 | expect(result).toBe(expectedValue); 132 | }); 133 | 134 | test('ServiceType w/ prefix takes precendence over w/o prefix', () => { 135 | // arrange 136 | const expectedValue = faker.random.word(); 137 | process.env.AWS_EMF_SERVICE_TYPE = expectedValue; 138 | process.env.SERVICE_TYPE = faker.random.word(); 139 | 140 | // act 141 | const config = getConfig(); 142 | 143 | // assert 144 | const result = config.serviceType; 145 | expect(result).toBe(expectedValue); 146 | }); 147 | 148 | test('can manually set ServiceType', () => { 149 | // arrange 150 | const expectedValue = faker.random.word(); 151 | process.env.AWS_EMF_SERVICE_TYPE = faker.random.word(); 152 | process.env.SERVICE_TYPE = faker.random.word(); 153 | const config = getConfig(); 154 | 155 | // act 156 | config.serviceType = expectedValue; 157 | 158 | // assert 159 | const result = config.serviceType; 160 | expect(result).toBe(expectedValue); 161 | }); 162 | 163 | test('can set agent endpoint from environment', () => { 164 | // arrange 165 | const expectedValue = faker.internet.url(); 166 | process.env.AWS_EMF_AGENT_ENDPOINT = expectedValue; 167 | 168 | // act 169 | const config = getConfig(); 170 | 171 | // assert 172 | const result = config.agentEndpoint; 173 | expect(result).toBe(expectedValue); 174 | }); 175 | 176 | test('can set environment override from environment', () => { 177 | // arrange 178 | const expectedValue = 'Local'; 179 | process.env.AWS_EMF_ENVIRONMENT = expectedValue; 180 | 181 | // act 182 | const config = getConfig(); 183 | 184 | // assert 185 | const result = config.environmentOverride; 186 | expect(result).toBe(Environments.Local); 187 | }); 188 | 189 | test('if environment override is not set, default to unknown', () => { 190 | // arrange 191 | process.env.AWS_EMF_ENVIRONMENT = ''; 192 | // act 193 | const config = getConfig(); 194 | 195 | // assert 196 | const result = config.environmentOverride; 197 | expect(result).toBe(Environments.Unknown); 198 | }); 199 | 200 | test('if environment override cannot be parsed, default to unknown', () => { 201 | // arrange 202 | process.env.AWS_EMF_ENVIRONMENT = faker.random.alphaNumeric(); 203 | // act 204 | const config = getConfig(); 205 | 206 | // assert 207 | const result = config.environmentOverride; 208 | expect(result).toBe(Environments.Unknown); 209 | }); 210 | 211 | test('namespace defaults to aws-embedded-metrics', () => { 212 | // act 213 | const config = getConfig(); 214 | 215 | // assert 216 | const result = config.namespace; 217 | expect(result).toBe('aws-embedded-metrics'); 218 | }); 219 | 220 | test('can set namespace from environment', () => { 221 | // arrange 222 | const expectedValue = faker.random.word(); 223 | process.env.AWS_EMF_NAMESPACE = expectedValue; 224 | 225 | // act 226 | const config = getConfig(); 227 | 228 | // assert 229 | const result = config.namespace; 230 | expect(result).toBe(expectedValue); 231 | }); 232 | 233 | test('can manually set namespace', () => { 234 | // arrange 235 | const expectedValue = faker.random.word(); 236 | process.env.AWS_EMF_NAMESPACE = faker.random.word(); 237 | const config = getConfig(); 238 | 239 | // act 240 | config.namespace = expectedValue; 241 | 242 | // assert 243 | const result = config.namespace; 244 | expect(result).toBe(expectedValue); 245 | }); 246 | -------------------------------------------------------------------------------- /src/environment/DefaultEnvironment.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | import config from '../config/Configuration'; 17 | import { AgentSink } from '../sinks/AgentSink'; 18 | import { ISink } from '../sinks/Sink'; 19 | import { LOG } from '../utils/Logger'; 20 | import { IEnvironment } from './IEnvironment'; 21 | 22 | export class DefaultEnvironment implements IEnvironment { 23 | private sink: ISink | undefined; 24 | 25 | public probe(): Promise { 26 | return Promise.resolve(true); 27 | } 28 | 29 | public getName(): string { 30 | if (!config.serviceName) { 31 | LOG('Unknown ServiceName.'); 32 | return 'Unknown'; 33 | } 34 | return config.serviceName; 35 | } 36 | 37 | public getType(): string { 38 | if (!config.serviceType) { 39 | LOG('Unknown ServiceType.'); 40 | return 'Unknown'; 41 | } 42 | return config.serviceType; 43 | } 44 | 45 | public getLogGroupName(): string { 46 | // if the caller explicitly overrides logGroupName to 47 | // be empty, we should honor that rather than providing 48 | // the default behavior. 49 | if (config.logGroupName === '') { 50 | return ''; 51 | } 52 | return config.logGroupName ? config.logGroupName : `${this.getName()}-metrics`; 53 | } 54 | 55 | public configureContext(): void { 56 | // no-op 57 | } 58 | 59 | public getSink(): ISink { 60 | if (!this.sink) { 61 | this.sink = new AgentSink(this.getLogGroupName(), config.logStreamName); 62 | } 63 | return this.sink; 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /src/environment/EC2Environment.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | import config from '../config/Configuration'; 17 | import { MetricsContext } from '../logger/MetricsContext'; 18 | import { AgentSink } from '../sinks/AgentSink'; 19 | import { ISink } from '../sinks/Sink'; 20 | import { fetchJSON, fetchString } from '../utils/Fetch'; 21 | import { LOG } from '../utils/Logger'; 22 | import { IEnvironment } from './IEnvironment'; 23 | import { RequestOptions } from 'http'; 24 | 25 | // Documentation for configuring instance metadata can be found here: 26 | // https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/configuring-instance-metadata-service.html 27 | const host = '169.254.169.254'; 28 | const tokenPath = '/latest/api/token'; 29 | const tokenRequestHeaderKey = 'X-aws-ec2-metadata-token-ttl-seconds'; 30 | const tokenRequestHeaderValue = '21600'; 31 | const metadataPath = '/latest/dynamic/instance-identity/document'; 32 | const metadataRequestTokenHeaderKey = 'X-aws-ec2-metadata-token'; 33 | 34 | interface IEC2MetadataResponse { 35 | imageId: string; 36 | availabilityZone: string; 37 | privateIp: string; 38 | instanceId: string; 39 | instanceType: string; 40 | } 41 | 42 | export class EC2Environment implements IEnvironment { 43 | private metadata: IEC2MetadataResponse | undefined; 44 | private sink: ISink | undefined; 45 | private token: string | undefined; 46 | 47 | public async probe(): Promise { 48 | try { 49 | const options: RequestOptions = { 50 | host, 51 | path: tokenPath, 52 | method: 'PUT', 53 | headers: { [tokenRequestHeaderKey]: tokenRequestHeaderValue }, 54 | }; 55 | this.token = await fetchString(options); 56 | } catch (e) { 57 | LOG(e); 58 | return false; 59 | } 60 | 61 | try { 62 | const metadataOptions: RequestOptions = { 63 | host, 64 | path: metadataPath, 65 | method: 'GET', 66 | headers: { [metadataRequestTokenHeaderKey]: this.token }, 67 | }; 68 | this.metadata = await fetchJSON(metadataOptions); 69 | return !!this.metadata; 70 | } catch (e) { 71 | LOG(e); 72 | return false; 73 | } 74 | } 75 | 76 | public getName(): string { 77 | if (!config.serviceName) { 78 | LOG('Unknown ServiceName.'); 79 | return 'Unknown'; 80 | } 81 | return config.serviceName; 82 | } 83 | 84 | public getType(): string { 85 | if (this.metadata) { 86 | return 'AWS::EC2::Instance'; 87 | } 88 | 89 | // this will only happen if probe() is not called first 90 | return 'Unknown'; 91 | } 92 | 93 | public getLogGroupName(): string { 94 | return config.logGroupName ? config.logGroupName : `${this.getName()}-metrics`; 95 | } 96 | 97 | public configureContext(context: MetricsContext): void { 98 | if (this.metadata) { 99 | context.setProperty('imageId', this.metadata.imageId); 100 | context.setProperty('instanceId', this.metadata.instanceId); 101 | context.setProperty('instanceType', this.metadata.instanceType); 102 | context.setProperty('privateIP', this.metadata.privateIp); 103 | context.setProperty('availabilityZone', this.metadata.availabilityZone); 104 | } 105 | } 106 | 107 | public getSink(): ISink { 108 | if (!this.sink) { 109 | this.sink = new AgentSink(this.getLogGroupName(), config.logStreamName); 110 | } 111 | return this.sink; 112 | } 113 | } 114 | -------------------------------------------------------------------------------- /src/environment/ECSEnvironment.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | import config from '../config/Configuration'; 17 | import { MetricsContext } from '../logger/MetricsContext'; 18 | import { AgentSink } from '../sinks/AgentSink'; 19 | import { ISink } from '../sinks/Sink'; 20 | import { IEnvironment } from './IEnvironment'; 21 | import { fetchJSON } from '../utils/Fetch'; 22 | import { LOG } from '../utils/Logger'; 23 | import * as os from 'os'; 24 | import { Constants } from '../Constants'; 25 | 26 | interface IECSMetadataResponse { 27 | Name: string; 28 | DockerId: string; 29 | DockerName: string; 30 | Image: string; 31 | FormattedImageName: string; 32 | ImageID: string; 33 | Ports: string; 34 | Labels: IECSMetadataLabels; 35 | CreatedAt: string; 36 | StartedAt: string; 37 | Networks: IECSMetadataNetworks[]; 38 | } 39 | 40 | interface IECSMetadataLabels { 41 | 'com.amazonaws.ecs.cluster': string; 42 | 'com.amazonaws.ecs.container-name': string; 43 | 'com.amazonaws.ecs.task-arn': string; 44 | 'com.amazonaws.ecs.task-definition-family': string; 45 | 'com.amazonaws.ecs.task-definition-version': string; 46 | } 47 | 48 | interface IECSMetadataNetworks { 49 | NetworkMode: string; 50 | IPv4Addresses: string[]; 51 | } 52 | 53 | // formats image names into something more readable for a metric name 54 | // e.g. .dkr.ecr..amazonaws.com/:latest -> :latest 55 | const formatImageName = (imageName: string): string => { 56 | if (imageName) { 57 | const splitImageName = imageName.split('/'); 58 | return splitImageName[splitImageName.length - 1]; 59 | } 60 | return imageName; 61 | }; 62 | 63 | export class ECSEnvironment implements IEnvironment { 64 | private sink: ISink | undefined; 65 | private metadata: IECSMetadataResponse | undefined; 66 | private fluentBitEndpoint: string | undefined; 67 | 68 | public async probe(): Promise { 69 | if (!process.env.ECS_CONTAINER_METADATA_URI) { 70 | return Promise.resolve(false); 71 | } 72 | 73 | if (process.env.FLUENT_HOST && !config.agentEndpoint) { 74 | this.fluentBitEndpoint = `tcp://${process.env.FLUENT_HOST}:${Constants.DEFAULT_AGENT_PORT}`; 75 | config.agentEndpoint = this.fluentBitEndpoint; 76 | LOG(`Using FluentBit configuration. Endpoint: ${this.fluentBitEndpoint}`); 77 | } 78 | 79 | try { 80 | const options = new URL(process.env.ECS_CONTAINER_METADATA_URI); 81 | this.metadata = await fetchJSON(options); 82 | if (this.metadata) { 83 | this.metadata.FormattedImageName = formatImageName(this.metadata.Image); 84 | LOG(`Successfully collected ECS Container metadata.`); 85 | } 86 | } catch (e) { 87 | LOG('Failed to collect ECS Container Metadata.'); 88 | LOG(e); 89 | } 90 | 91 | // return true regardless of whether or not metadata collection 92 | // succeeded. we know that this is supposed to be an ECS environment 93 | // just from the environment variable 94 | return true; 95 | } 96 | 97 | public getName(): string { 98 | if (config.serviceName) { 99 | return config.serviceName; 100 | } 101 | 102 | return this.metadata?.FormattedImageName ? this.metadata.FormattedImageName : 'Unknown'; 103 | } 104 | 105 | public getType(): string { 106 | return 'AWS::ECS::Container'; 107 | } 108 | 109 | public getLogGroupName(): string { 110 | // FireLens / fluent-bit does not need the log group to be included 111 | // since configuration of the LogGroup is handled by the 112 | // fluent bit config file 113 | if (this.fluentBitEndpoint) { 114 | return ''; 115 | } 116 | 117 | return config.logGroupName || this.getName(); 118 | } 119 | 120 | public configureContext(context: MetricsContext): void { 121 | this.addProperty(context, 'containerId', os.hostname()); 122 | this.addProperty(context, 'createdAt', this.metadata?.CreatedAt); 123 | this.addProperty(context, 'startedAt', this.metadata?.StartedAt); 124 | this.addProperty(context, 'image', this.metadata?.Image); 125 | this.addProperty(context, 'cluster', this.metadata?.Labels['com.amazonaws.ecs.cluster']); 126 | this.addProperty(context, 'taskArn', this.metadata?.Labels['com.amazonaws.ecs.task-arn']); 127 | 128 | // we override the standard default dimensions here because in the 129 | // FireLens / fluent-bit case, we don't need the LogGroup 130 | if (this.fluentBitEndpoint) { 131 | context.setDefaultDimensions({ 132 | ServiceName: config.serviceName || this.getName(), 133 | ServiceType: config.serviceType || this.getType(), 134 | }); 135 | } 136 | } 137 | 138 | public getSink(): ISink { 139 | if (!this.sink) { 140 | const logGroupName = this.fluentBitEndpoint ? '' : this.getLogGroupName(); 141 | this.sink = new AgentSink(logGroupName); 142 | } 143 | return this.sink; 144 | } 145 | 146 | private addProperty(context: MetricsContext, key: string, value: string | undefined): void { 147 | if (value) { 148 | context.setProperty(key, value); 149 | } 150 | } 151 | } 152 | -------------------------------------------------------------------------------- /src/environment/EnvironmentDetector.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | import { LOG } from '../utils/Logger'; 17 | import { DefaultEnvironment } from './DefaultEnvironment'; 18 | import { ECSEnvironment } from './ECSEnvironment'; 19 | import { EC2Environment } from './EC2Environment'; 20 | import { LambdaEnvironment } from './LambdaEnvironment'; 21 | import { IEnvironment } from './IEnvironment'; 22 | import config from '../config/Configuration'; 23 | import Environments from './Environments'; 24 | import { LocalEnvironment } from './LocalEnvironment'; 25 | 26 | type EnvironmentProvider = () => Promise; 27 | 28 | const lambdaEnvironment = new LambdaEnvironment(); 29 | const ecsEnvironment = new ECSEnvironment(); 30 | const ec2Environment = new EC2Environment(); 31 | const defaultEnvironment = new DefaultEnvironment(); 32 | 33 | // ordering of this array matters 34 | // both Lambda and ECS can be determined from environment variables 35 | // making the entire detection process fast an cheap 36 | // EC2 can only be determined by making a remote HTTP request 37 | const environments = [lambdaEnvironment, ecsEnvironment, ec2Environment]; 38 | 39 | let environment: IEnvironment | undefined = undefined; 40 | 41 | const getEnvironmentFromOverride = (): IEnvironment | undefined => { 42 | // short-circuit environment detection and use override 43 | switch (config.environmentOverride) { 44 | case Environments.Agent: 45 | return defaultEnvironment; 46 | case Environments.EC2: 47 | return ec2Environment; 48 | case Environments.Lambda: 49 | return lambdaEnvironment; 50 | case Environments.ECS: 51 | return ecsEnvironment; 52 | case Environments.Local: 53 | return new LocalEnvironment(); 54 | case Environments.Unknown: 55 | default: 56 | return undefined; 57 | } 58 | }; 59 | 60 | const discoverEnvironment = async (): Promise => { 61 | LOG(`Discovering environment`); 62 | for (const envUnderTest of environments) { 63 | LOG(`Testing: ${envUnderTest.constructor.name}`); 64 | 65 | try { 66 | if (await envUnderTest.probe()) { 67 | return envUnderTest; 68 | } 69 | } catch (e) { 70 | LOG(`Failed probe: ${envUnderTest.constructor.name}`); 71 | } 72 | } 73 | return defaultEnvironment; 74 | }; 75 | 76 | const _resolveEnvironment: EnvironmentProvider = async (): Promise => { 77 | LOG('Resolving environment'); 78 | if (environment) { 79 | return environment; 80 | } 81 | 82 | if (config.environmentOverride) { 83 | LOG('Environment override supplied', config.environmentOverride); 84 | // this will be falsy if an invalid configuration value is provided 85 | environment = getEnvironmentFromOverride(); 86 | if (environment) { 87 | return environment; 88 | } else { 89 | LOG('Invalid environment provided. Falling back to auto-discovery.', config.environmentOverride); 90 | } 91 | } 92 | 93 | environment = await discoverEnvironment(); // eslint-disable-line require-atomic-updates 94 | return environment; 95 | }; 96 | 97 | // pro-actively begin resolving the environment 98 | // this will allow us to kick off any async tasks 99 | // at module load time to reduce any blocking that 100 | // may occur on the initial flush() 101 | const environmentPromise = _resolveEnvironment(); 102 | const resolveEnvironment: EnvironmentProvider = async (): Promise => { 103 | return environmentPromise; 104 | }; 105 | 106 | // this method is used for testing to bypass the cached environmentPromise result 107 | const cleanResolveEnvironment = async (): Promise => { 108 | await environmentPromise; 109 | environment = undefined; 110 | return await _resolveEnvironment(); 111 | }; 112 | 113 | export { EnvironmentProvider, resolveEnvironment, cleanResolveEnvironment }; 114 | -------------------------------------------------------------------------------- /src/environment/Environments.ts: -------------------------------------------------------------------------------- 1 | enum Environments { 2 | Local = 'Local', 3 | Lambda = 'Lambda', 4 | Agent = 'Agent', 5 | EC2 = 'EC2', 6 | ECS = 'ECS', 7 | Unknown = '', 8 | } 9 | 10 | export default Environments; 11 | -------------------------------------------------------------------------------- /src/environment/IEnvironment.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | import { MetricsContext } from '../logger/MetricsContext'; 17 | import { ISink } from '../sinks/Sink'; 18 | 19 | /** 20 | * A runtime environment (e.g. Lambda, EKS, ECS, EC2) 21 | */ 22 | export interface IEnvironment { 23 | /** 24 | * Determines whether or not we are executing in this environment 25 | */ 26 | probe(): Promise; 27 | 28 | /** 29 | * Get the environment name. This will be used to set the ServiceName dimension. 30 | */ 31 | getName(): string; 32 | 33 | /** 34 | * Get the environment type. This will be used to set the ServiceType dimension. 35 | */ 36 | getType(): string; 37 | 38 | /** 39 | * Get log group name. This will be used to set the LogGroup dimension. 40 | */ 41 | getLogGroupName(): string; 42 | 43 | /** 44 | * Configure the context with environment properties. 45 | * 46 | * @param context 47 | */ 48 | configureContext(context: MetricsContext): void; 49 | 50 | /** 51 | * Create the appropriate sink for this environment. 52 | */ 53 | getSink(): ISink; 54 | } 55 | -------------------------------------------------------------------------------- /src/environment/LambdaEnvironment.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | import { MetricsContext } from '../logger/MetricsContext'; 17 | import { ConsoleSink } from '../sinks/ConsoleSink'; 18 | import { ISink } from '../sinks/Sink'; 19 | import { IEnvironment } from './IEnvironment'; 20 | 21 | export class LambdaEnvironment implements IEnvironment { 22 | private sink: ISink | undefined; 23 | 24 | public probe(): Promise { 25 | return Promise.resolve(process.env.AWS_LAMBDA_FUNCTION_NAME ? true : false); 26 | } 27 | 28 | public getName(): string { 29 | return process.env.AWS_LAMBDA_FUNCTION_NAME || 'Unknown'; 30 | } 31 | 32 | public getType(): string { 33 | return 'AWS::Lambda::Function'; 34 | } 35 | 36 | public getLogGroupName(): string { 37 | return this.getName(); 38 | } 39 | 40 | public configureContext(context: MetricsContext): void { 41 | this.addProperty(context, 'executionEnvironment', process.env.AWS_EXECUTION_ENV); 42 | this.addProperty(context, 'memorySize', process.env.AWS_LAMBDA_FUNCTION_MEMORY_SIZE); 43 | this.addProperty(context, 'functionVersion', process.env.AWS_LAMBDA_FUNCTION_VERSION); 44 | this.addProperty(context, 'logStreamId', process.env.AWS_LAMBDA_LOG_STREAM_NAME); 45 | 46 | const trace = this.getSampledTrace(); 47 | if (trace) { 48 | this.addProperty(context, 'traceId', trace); 49 | } 50 | } 51 | 52 | public getSink(): ISink { 53 | if (!this.sink) { 54 | this.sink = new ConsoleSink(); 55 | } 56 | return this.sink; 57 | } 58 | 59 | private addProperty(context: MetricsContext, key: string, value: string | undefined): void { 60 | if (value) { 61 | context.setProperty(key, value); 62 | } 63 | } 64 | 65 | private getSampledTrace(): string | void { 66 | // only collect traces which have been sampled 67 | if (process.env._X_AMZN_TRACE_ID && process.env._X_AMZN_TRACE_ID.includes('Sampled=1')) { 68 | return process.env._X_AMZN_TRACE_ID; 69 | } 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /src/environment/LocalEnvironment.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | import config from '../config/Configuration'; 17 | import { ISink } from '../sinks/Sink'; 18 | import { LOG } from '../utils/Logger'; 19 | import { IEnvironment } from './IEnvironment'; 20 | import { ConsoleSink } from '../sinks/ConsoleSink'; 21 | 22 | export class LocalEnvironment implements IEnvironment { 23 | private sink: ISink | undefined; 24 | 25 | public probe(): Promise { 26 | // probe is not intended to be used in the LocalEnvironment 27 | // To use the local environment you should set the environment 28 | // override 29 | return Promise.resolve(false); 30 | } 31 | 32 | public getName(): string { 33 | if (!config.serviceName) { 34 | LOG('Unknown ServiceName.'); 35 | return 'Unknown'; 36 | } 37 | return config.serviceName; 38 | } 39 | 40 | public getType(): string { 41 | if (!config.serviceType) { 42 | LOG('Unknown ServiceType.'); 43 | return 'Unknown'; 44 | } 45 | return config.serviceType; 46 | } 47 | 48 | public getLogGroupName(): string { 49 | return config.logGroupName ? config.logGroupName : `${this.getName()}-metrics`; 50 | } 51 | 52 | public configureContext(): void { 53 | // no-op 54 | } 55 | 56 | public getSink(): ISink { 57 | if (!this.sink) { 58 | this.sink = new ConsoleSink(); 59 | } 60 | return this.sink; 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /src/environment/__tests__/DefaultEnvironment.test.ts: -------------------------------------------------------------------------------- 1 | import { faker } from '@faker-js/faker'; 2 | import config from '../../config/Configuration'; 3 | import { DefaultEnvironment } from '../DefaultEnvironment'; 4 | 5 | test('probe() always returns true', async () => { 6 | // arrange 7 | const env = new DefaultEnvironment(); 8 | 9 | // act 10 | const result = await env.probe(); 11 | 12 | // assert 13 | expect(result).toBe(true); 14 | }); 15 | 16 | test('getName() returns "Unknown" if not specified', () => { 17 | // arrange 18 | const env = new DefaultEnvironment(); 19 | 20 | // act 21 | const result = env.getName(); 22 | 23 | // assert 24 | expect(result).toBe('Unknown'); 25 | }); 26 | 27 | test('getType() returns "Unknown" if not specified', () => { 28 | // arrange 29 | const env = new DefaultEnvironment(); 30 | 31 | // act 32 | const result = env.getType(); 33 | 34 | // assert 35 | expect(result).toBe('Unknown'); 36 | }); 37 | 38 | test('getName() returns name if configured', () => { 39 | // arrange 40 | const expectedName = faker.random.word(); 41 | config.serviceName = expectedName; 42 | const env = new DefaultEnvironment(); 43 | 44 | // act 45 | const result = env.getName(); 46 | 47 | // assert 48 | expect(result).toBe(expectedName); 49 | }); 50 | 51 | test('getType() returns type if configured', () => { 52 | // arrange 53 | const expectedType = faker.random.word(); 54 | config.serviceType = expectedType; 55 | const env = new DefaultEnvironment(); 56 | 57 | // act 58 | const result = env.getType(); 59 | 60 | // assert 61 | expect(result).toBe(expectedType); 62 | }); 63 | 64 | test('getLogGroupName() returns logGroup if configured', () => { 65 | // arrange 66 | const name = faker.random.word(); 67 | config.logGroupName = name; 68 | const env = new DefaultEnvironment(); 69 | 70 | // act 71 | const result = env.getLogGroupName(); 72 | 73 | // assert 74 | expect(result).toBe(name); 75 | }); 76 | 77 | test('getLogGroupName() returns -metrics if not configured', () => { 78 | // arrange 79 | const serviceName = faker.random.word(); 80 | config.logGroupName = undefined; 81 | config.serviceName = serviceName; 82 | const env = new DefaultEnvironment(); 83 | 84 | // act 85 | const result = env.getLogGroupName(); 86 | 87 | // assert 88 | expect(result).toBe(`${serviceName}-metrics`); 89 | }); 90 | 91 | test('getLogGroupName() returns empty if explicitly set to empty', () => { 92 | // arrange 93 | config.logGroupName = ''; 94 | const env = new DefaultEnvironment(); 95 | 96 | // act 97 | const result = env.getLogGroupName(); 98 | 99 | // assert 100 | expect(result).toBe(``); 101 | }); 102 | 103 | test('getSink() creates an AgentSink', () => { 104 | // arrange 105 | const expectedSink = 'AgentSink'; 106 | const env = new DefaultEnvironment(); 107 | config.logGroupName = faker.random.word(); 108 | config.logStreamName = faker.random.word(); 109 | 110 | // act 111 | const sink = env.getSink(); 112 | 113 | // assert 114 | expect(sink.name).toBe(expectedSink); 115 | }); 116 | 117 | test('getSink() uses service name if LogGroup is not configured', () => { 118 | // arrange 119 | const env = new DefaultEnvironment(); 120 | const expectedName = faker.random.word(); 121 | config.serviceName = expectedName; 122 | config.logGroupName = undefined; 123 | 124 | // act 125 | const sink: any = env.getSink(); 126 | 127 | // assert 128 | expect(sink.logGroupName).toBe(`${expectedName}-metrics`); 129 | }); 130 | -------------------------------------------------------------------------------- /src/environment/__tests__/ECSEnvironment.test.ts: -------------------------------------------------------------------------------- 1 | import { faker } from '@faker-js/faker'; 2 | import config from '../../config/Configuration'; 3 | import { ECSEnvironment } from '../ECSEnvironment'; 4 | import { MetricsContext } from '../../logger/MetricsContext'; 5 | import { fetchJSON } from '../../utils/Fetch'; 6 | 7 | import * as os from 'os'; 8 | import { Constants } from '../../Constants'; 9 | jest.mock('../../utils/Fetch', () => ({ 10 | fetchJSON: jest.fn(), 11 | })); 12 | 13 | beforeEach(() => { 14 | config.agentEndpoint = undefined; 15 | config.serviceName = undefined; 16 | config.logGroupName = undefined; 17 | process.env = { 18 | ECS_CONTAINER_METADATA_URI: faker.internet.url(), 19 | }; 20 | }); 21 | 22 | describe('probe', () => { 23 | test('returns false if ECS_CONTAINER_METADATA_URI not set', async () => { 24 | // arrange 25 | process.env.ECS_CONTAINER_METADATA_URI = ''; 26 | const env = new ECSEnvironment(); 27 | 28 | // act 29 | const result = await env.probe(); 30 | 31 | // assert 32 | expect(result).toBe(false); 33 | }); 34 | 35 | test('returns true if ECS_CONTAINER_METADATA_URI set', async () => { 36 | // arrange -- env var is set in beforeEach 37 | const env = new ECSEnvironment(); 38 | 39 | // act 40 | const result = await env.probe(); 41 | 42 | // assert 43 | expect(result).toBe(true); 44 | }); 45 | 46 | test('configures endpoint to use fluent bit if set', async () => { 47 | // arrange 48 | process.env.FLUENT_HOST = faker.internet.ip(); 49 | const expectedEndpoint = `tcp://${process.env.FLUENT_HOST}:${Constants.DEFAULT_AGENT_PORT}`; 50 | 51 | const env = new ECSEnvironment(); 52 | 53 | // act 54 | await env.probe(); 55 | 56 | // assert 57 | expect(config.agentEndpoint).toBe(expectedEndpoint); 58 | }); 59 | 60 | test('does not configure fluent bit endpoint if override specified', async () => { 61 | // arrange 62 | const configuredOverride = faker.internet.ip(); 63 | config.agentEndpoint = configuredOverride; 64 | process.env.FLUENT_HOST = faker.internet.ip(); 65 | 66 | const env = new ECSEnvironment(); 67 | 68 | // act 69 | await env.probe(); 70 | 71 | // assert 72 | expect(config.agentEndpoint).toBe(configuredOverride); 73 | }); 74 | }); 75 | 76 | describe('configureContext()', () => { 77 | test('uses ECS metadata', async () => { 78 | // arrange 79 | const expected = getRandomECSMetadata(); 80 | 81 | // @ts-ignore 82 | fetchJSON.mockImplementation(() => expected); 83 | const env = new ECSEnvironment(); 84 | await env.probe(); 85 | 86 | // act 87 | const ctx = MetricsContext.empty(); 88 | env.configureContext(ctx); 89 | 90 | // assert 91 | expect(ctx.properties.containerId).toBe(os.hostname()); 92 | expect(ctx.properties.createdAt).toBe(expected.CreatedAt); 93 | expect(ctx.properties.startedAt).toBe(expected.StartedAt); 94 | expect(ctx.properties.image).toBe(expected.Image); 95 | expect(ctx.properties.cluster).toBe(expected.Labels['com.amazonaws.ecs.cluster']); 96 | expect(ctx.properties.taskArn).toBe(expected.Labels['com.amazonaws.ecs.task-arn']); 97 | }); 98 | 99 | test('does not set default dimensions if fluent bit is not set', async () => { 100 | // arrange 101 | const expected = getRandomECSMetadata(); 102 | 103 | // @ts-ignore 104 | fetchJSON.mockImplementation(() => expected); 105 | const env = new ECSEnvironment(); 106 | await env.probe(); 107 | 108 | // act 109 | const ctx = MetricsContext.empty(); 110 | env.configureContext(ctx); 111 | 112 | // assert 113 | expect(ctx.getDimensions().length).toBe(0); 114 | }); 115 | 116 | test('sets default dimensions without LogGroup if fluent bit is set', async () => { 117 | // arrange 118 | process.env.FLUENT_HOST = faker.internet.ip(); 119 | const expected = getRandomECSMetadata(); 120 | 121 | // @ts-ignore 122 | fetchJSON.mockImplementation(() => expected); 123 | const env = new ECSEnvironment(); 124 | await env.probe(); 125 | 126 | // act 127 | const ctx = MetricsContext.empty(); 128 | env.configureContext(ctx); 129 | 130 | // assert 131 | expect(ctx.getDimensions().length).toBe(1); 132 | const dimensionSet = ctx.getDimensions()[0]; 133 | expect(Object.keys(dimensionSet).length).toBe(2); 134 | expect(dimensionSet['ServiceName']).toBe(expected.Image); 135 | expect(dimensionSet['ServiceType']).toBe('AWS::ECS::Container'); 136 | expect(dimensionSet['LogGroup']).toBe(undefined); 137 | }); 138 | }); 139 | 140 | describe('getLogGroupName()', () => { 141 | test('is empty if FluentBit configured', async () => { 142 | // arrange 143 | process.env.FLUENT_HOST = faker.internet.ip(); 144 | 145 | const env = new ECSEnvironment(); 146 | await env.probe(); 147 | 148 | // act 149 | const result = env.getLogGroupName(); 150 | 151 | // assert 152 | expect(result).toBe(''); 153 | }); 154 | 155 | test('getLogGroupName() return override if configured', async () => { 156 | // arrange 157 | config.logGroupName = faker.random.alphaNumeric(10); 158 | 159 | const env = new ECSEnvironment(); 160 | await env.probe(); 161 | 162 | // act 163 | const result = env.getLogGroupName(); 164 | 165 | // assert 166 | expect(result).toBe(config.logGroupName); 167 | }); 168 | 169 | test('getLogGroupName() returns service name if not configured', async () => { 170 | // arrange 171 | config.serviceName = faker.random.alphaNumeric(10); 172 | 173 | const env = new ECSEnvironment(); 174 | await env.probe(); 175 | 176 | // act 177 | const result = env.getLogGroupName(); 178 | 179 | // assert 180 | expect(result).toBe(config.serviceName); 181 | }); 182 | }); 183 | 184 | describe('getName()', () => { 185 | test('returns service name if configured', async () => { 186 | // arrange 187 | config.serviceName = faker.random.alphaNumeric(10); 188 | 189 | const env = new ECSEnvironment(); 190 | await env.probe(); 191 | 192 | // act 193 | const result = env.getName(); 194 | 195 | // assert 196 | expect(result).toBe(config.serviceName); 197 | }); 198 | 199 | test('returns formatted image name if available', async () => { 200 | // arrange 201 | const formattedImageName = `${faker.lorem.word()}:latest`; 202 | const fullImageName = `${faker.datatype.number({ 203 | min: 0, 204 | max: 999999999999, 205 | })}.dkr.ecr..amazonaws.com/${formattedImageName}`; 206 | const metadata = { 207 | Image: fullImageName, 208 | Labels: {}, 209 | }; 210 | 211 | // @ts-ignore 212 | fetchJSON.mockImplementation(() => metadata); 213 | const env = new ECSEnvironment(); 214 | await env.probe(); 215 | 216 | // act 217 | const result = env.getName(); 218 | 219 | // assert 220 | expect(result).toBe(formattedImageName); 221 | }); 222 | 223 | test('returns Unknown if image name is undefined', async () => { 224 | // arrange 225 | const expectedName = `Unknown`; 226 | const metadata = { 227 | Image: undefined, 228 | Labels: {}, 229 | }; 230 | 231 | // @ts-ignore 232 | fetchJSON.mockImplementation(() => metadata); 233 | const env = new ECSEnvironment(); 234 | await env.probe(); 235 | 236 | // act 237 | const result = env.getName(); 238 | 239 | // assert 240 | expect(result).toBe(expectedName); 241 | }); 242 | 243 | test('returns Unknown if image not available', async () => { 244 | // arrange 245 | // @ts-ignore 246 | fetchJSON.mockImplementation(() => { 247 | return {}; 248 | }); 249 | const env = new ECSEnvironment(); 250 | await env.probe(); 251 | 252 | // act 253 | const result = env.getName(); 254 | 255 | // assert 256 | expect(result).toBe('Unknown'); 257 | }); 258 | }); 259 | 260 | describe('getType()', () => { 261 | test('returns AWS::ECS::Container', () => { 262 | // arrange 263 | const env = new ECSEnvironment(); 264 | 265 | // act 266 | const result = env.getType(); 267 | 268 | // assert 269 | expect(result).toBe('AWS::ECS::Container'); 270 | }); 271 | }); 272 | 273 | describe('getSink()', () => { 274 | test('returns AgentSink without LogGroup if FluentBit configured', async () => { 275 | // arrange 276 | process.env.FLUENT_HOST = faker.internet.ip(); 277 | 278 | const env = new ECSEnvironment(); 279 | await env.probe(); 280 | 281 | // act 282 | const result: any = env.getSink(); 283 | 284 | // assert 285 | expect(result.name).toBe('AgentSink'); 286 | expect(result.logGroupName).toBe(''); 287 | }); 288 | 289 | test('returns AgentSink with LogGroup if FluentBit not configured', async () => { 290 | // arrange 291 | config.logGroupName = faker.lorem.word(); 292 | 293 | const env = new ECSEnvironment(); 294 | await env.probe(); 295 | 296 | // act 297 | const result: any = env.getSink(); 298 | 299 | // assert 300 | expect(result.name).toBe('AgentSink'); 301 | expect(result.logGroupName).toBe(config.logGroupName); 302 | }); 303 | }); 304 | 305 | const getRandomECSMetadata = (): any => { 306 | return { 307 | CreatedAt: faker.date.past(), 308 | StartedAt: faker.date.past(), 309 | Image: faker.lorem.word(), 310 | Labels: { 311 | 'com.amazonaws.ecs.cluster': faker.random.alphaNumeric(), 312 | 'com.amazonaws.ecs.task-arn': faker.random.alphaNumeric(), 313 | }, 314 | }; 315 | }; 316 | -------------------------------------------------------------------------------- /src/environment/__tests__/EnvironmentDetector.test.ts: -------------------------------------------------------------------------------- 1 | import { cleanResolveEnvironment } from '../EnvironmentDetector'; 2 | import config from '../../config/Configuration'; 3 | import Environments from '../Environments'; 4 | import { DefaultEnvironment } from '../DefaultEnvironment'; 5 | import { ECSEnvironment } from '../ECSEnvironment'; 6 | import { EC2Environment } from '../EC2Environment'; 7 | import { LambdaEnvironment } from '../LambdaEnvironment'; 8 | 9 | const envs = [LambdaEnvironment, ECSEnvironment, EC2Environment, DefaultEnvironment]; 10 | const setEnvironment = (env: any) => { 11 | envs.forEach(e => { 12 | const probeResult = env === e; 13 | e.prototype.probe = jest.fn(() => Promise.resolve(probeResult)); 14 | }); 15 | }; 16 | 17 | test('resolveEnvironment() returns LambdaEnvironment if AWS_LAMBDA_FUNCTION_NAME specified', async () => { 18 | // arrange 19 | setEnvironment(LambdaEnvironment); 20 | 21 | // act 22 | const result = await cleanResolveEnvironment(); 23 | 24 | // assert 25 | expect(result.constructor.name).toBe('LambdaEnvironment'); 26 | }); 27 | 28 | test('resolveEnvironment() returns ECSEnvironment if probe returns true', async () => { 29 | // arrange 30 | setEnvironment(ECSEnvironment); 31 | 32 | // act 33 | const result = await cleanResolveEnvironment(); 34 | 35 | // assert 36 | expect(result.constructor.name).toBe('ECSEnvironment'); 37 | }); 38 | 39 | test('resolveEnvironment() returns DefaultEnvironment if nothing else was detected', async () => { 40 | // arrange 41 | setEnvironment(undefined); 42 | 43 | // act 44 | const result = await cleanResolveEnvironment(); 45 | 46 | // assert 47 | expect(result.constructor.name).toBe('DefaultEnvironment'); 48 | }); 49 | 50 | test('resolveEnvironment() honors configured override', async () => { 51 | // arrange 52 | config.environmentOverride = Environments.Local; 53 | setEnvironment(ECSEnvironment); 54 | 55 | // act 56 | const result = await cleanResolveEnvironment(); 57 | 58 | // assert 59 | expect(result.constructor.name).toBe('LocalEnvironment'); 60 | }); 61 | 62 | test('resolveEnvironment() ignores invalid override and falls back to discovery', async () => { 63 | // arrange 64 | // @ts-ignore 65 | config.environmentOverride = 'Invalid'; 66 | setEnvironment(LambdaEnvironment); 67 | 68 | // act 69 | const result = await cleanResolveEnvironment(); 70 | 71 | // assert 72 | expect(result.constructor.name).toBe('LambdaEnvironment'); 73 | }); 74 | -------------------------------------------------------------------------------- /src/environment/__tests__/LambdaEnvironment.test.ts: -------------------------------------------------------------------------------- 1 | import { faker } from '@faker-js/faker'; 2 | import { MetricsContext } from '../../logger/MetricsContext'; 3 | import { LambdaEnvironment } from '../LambdaEnvironment'; 4 | 5 | test('probe() returns true if function name provided in environment', async () => { 6 | // arrange 7 | process.env.AWS_LAMBDA_FUNCTION_NAME = faker.random.word(); 8 | const env = new LambdaEnvironment(); 9 | 10 | // act 11 | const result = await env.probe(); 12 | 13 | // assert 14 | expect(result).toBe(true); 15 | }); 16 | 17 | test('getName() returns function name', () => { 18 | // arrange 19 | const env = new LambdaEnvironment(); 20 | const expectedName = faker.random.word(); 21 | process.env.AWS_LAMBDA_FUNCTION_NAME = expectedName; 22 | 23 | // act 24 | const result = env.getName(); 25 | 26 | // assert 27 | expect(result).toBe(expectedName); 28 | }); 29 | 30 | test('getType() returns "AWS::Lambda::Function"', () => { 31 | // arrange 32 | const env = new LambdaEnvironment(); 33 | 34 | // act 35 | const result = env.getType(); 36 | 37 | // assert 38 | expect(result).toBe('AWS::Lambda::Function'); 39 | }); 40 | 41 | test('getLogGroupName() returns function name', () => { 42 | // arrange 43 | const env = new LambdaEnvironment(); 44 | const expectedName = faker.random.word(); 45 | process.env.AWS_LAMBDA_FUNCTION_NAME = expectedName; 46 | 47 | // act 48 | const result = env.getLogGroupName(); 49 | 50 | // assert 51 | expect(result).toBe(expectedName); 52 | }); 53 | 54 | test('createSink() creates a ConsoleSink', () => { 55 | // arrange 56 | const expectedSink = 'ConsoleSink'; 57 | const env = new LambdaEnvironment(); 58 | 59 | // act 60 | const sink = env.getSink(); 61 | 62 | // assert 63 | expect(sink.name).toBe(expectedSink); 64 | }); 65 | 66 | test('configureContex() adds default properties', () => { 67 | // arrange 68 | const env = new LambdaEnvironment(); 69 | const context = MetricsContext.empty(); 70 | 71 | const executionEnvironment = faker.random.word(); 72 | const memorySize = faker.random.word(); 73 | const functionVersion = faker.random.word(); 74 | const logStreamId = faker.random.word(); 75 | 76 | process.env.AWS_EXECUTION_ENV = executionEnvironment; 77 | process.env.AWS_LAMBDA_FUNCTION_MEMORY_SIZE = memorySize; 78 | process.env.AWS_LAMBDA_FUNCTION_VERSION = functionVersion; 79 | process.env.AWS_LAMBDA_LOG_STREAM_NAME = logStreamId; 80 | 81 | // act 82 | env.configureContext(context); 83 | 84 | // assert 85 | expect(context.properties.executionEnvironment).toBe(executionEnvironment); 86 | expect(context.properties.memorySize).toBe(memorySize); 87 | expect(context.properties.functionVersion).toBe(functionVersion); 88 | expect(context.properties.logStreamId).toBe(logStreamId); 89 | }); 90 | 91 | test('configureContex() sets trace id if sampled', () => { 92 | // arrange 93 | const expectedTraceId = 'Root=1-5da0c3a1-0494a318bc08973b890cafed;Parent=236fcb7c7ff79f19;Sampled=1'; 94 | process.env._X_AMZN_TRACE_ID = expectedTraceId; 95 | const context = MetricsContext.empty(); 96 | const env = new LambdaEnvironment(); 97 | 98 | // act 99 | env.configureContext(context); 100 | 101 | // assert 102 | expect(context.properties.traceId).toBe(expectedTraceId); 103 | }); 104 | 105 | test('configureContex() does not set trace id if not sampled', () => { 106 | // arrange 107 | const expectedTraceId = 'Root=1-5da0c3a1-0494a318bc08973b890cafed;Parent=236fcb7c7ff79f19;Sampled=0'; 108 | process.env._X_AMZN_TRACE_ID = expectedTraceId; 109 | const context = MetricsContext.empty(); 110 | const env = new LambdaEnvironment(); 111 | 112 | // act 113 | env.configureContext(context); 114 | 115 | // assert 116 | expect(context.properties.traceId).toBe(undefined); 117 | }); 118 | -------------------------------------------------------------------------------- /src/environment/__tests__/LocalEnvironment.test.ts: -------------------------------------------------------------------------------- 1 | import { faker } from '@faker-js/faker'; 2 | import config from '../../config/Configuration'; 3 | import { LocalEnvironment } from '../LocalEnvironment'; 4 | 5 | test('probe() always returns false', async () => { 6 | // arrange 7 | const env = new LocalEnvironment(); 8 | 9 | // act 10 | const result = await env.probe(); 11 | 12 | // assert 13 | expect(result).toBe(false); 14 | }); 15 | 16 | test('getName() returns "Unknown" if not specified', () => { 17 | // arrange 18 | const env = new LocalEnvironment(); 19 | 20 | // act 21 | const result = env.getName(); 22 | 23 | // assert 24 | expect(result).toBe('Unknown'); 25 | }); 26 | 27 | test('getType() returns "Unknown" if not specified', () => { 28 | // arrange 29 | const env = new LocalEnvironment(); 30 | 31 | // act 32 | const result = env.getType(); 33 | 34 | // assert 35 | expect(result).toBe('Unknown'); 36 | }); 37 | 38 | test('getName() returns name if configured', () => { 39 | // arrange 40 | const expectedName = faker.random.word(); 41 | config.serviceName = expectedName; 42 | const env = new LocalEnvironment(); 43 | 44 | // act 45 | const result = env.getName(); 46 | 47 | // assert 48 | expect(result).toBe(expectedName); 49 | }); 50 | 51 | test('getType() returns type if configured', () => { 52 | // arrange 53 | const expectedType = faker.random.word(); 54 | config.serviceType = expectedType; 55 | const env = new LocalEnvironment(); 56 | 57 | // act 58 | const result = env.getType(); 59 | 60 | // assert 61 | expect(result).toBe(expectedType); 62 | }); 63 | 64 | test('getLogGroupName() returns logGroup if configured', () => { 65 | // arrange 66 | const name = faker.random.word(); 67 | config.logGroupName = name; 68 | const env = new LocalEnvironment(); 69 | 70 | // act 71 | const result = env.getLogGroupName(); 72 | 73 | // assert 74 | expect(result).toBe(name); 75 | }); 76 | 77 | test('getLogGroupName() returns -metrics if not configured', () => { 78 | // arrange 79 | const serviceName = faker.random.word(); 80 | config.logGroupName = undefined; 81 | config.serviceName = serviceName; 82 | const env = new LocalEnvironment(); 83 | 84 | // act 85 | const result = env.getLogGroupName(); 86 | 87 | // assert 88 | expect(result).toBe(`${serviceName}-metrics`); 89 | }); 90 | 91 | test('getSink() creates a ConsoleSink', () => { 92 | // arrange 93 | const expectedSink = 'ConsoleSink'; 94 | const env = new LocalEnvironment(); 95 | 96 | // act 97 | const sink = env.getSink(); 98 | 99 | // assert 100 | expect(sink.name).toBe(expectedSink); 101 | }); 102 | -------------------------------------------------------------------------------- /src/exceptions/DimensionSetExceededError.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | export class DimensionSetExceededError extends Error { 17 | constructor(msg: string) { 18 | super(msg); 19 | 20 | // Set the prototype explicitly. 21 | Object.setPrototypeOf(this, DimensionSetExceededError.prototype); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/exceptions/InvalidDimensionError.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | export class InvalidDimensionError extends Error { 17 | constructor(msg: string) { 18 | super(msg); 19 | 20 | // Set the prototype explicitly. 21 | Object.setPrototypeOf(this, InvalidDimensionError.prototype); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/exceptions/InvalidMetricError.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | export class InvalidMetricError extends Error { 17 | constructor(msg: string) { 18 | super(msg); 19 | 20 | // Set the prototype explicitly. 21 | Object.setPrototypeOf(this, InvalidMetricError.prototype); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/exceptions/InvalidNamespaceError.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | export class InvalidNamespaceError extends Error { 17 | constructor(msg: string) { 18 | super(msg); 19 | 20 | // Set the prototype explicitly. 21 | Object.setPrototypeOf(this, InvalidNamespaceError.prototype); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/exceptions/InvalidTimestampError.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | export class InvalidTimestampError extends Error { 17 | constructor(msg: string) { 18 | super(msg); 19 | 20 | // Set the prototype explicitly. 21 | Object.setPrototypeOf(this, InvalidTimestampError.prototype); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | export { MetricsLogger } from './logger/MetricsLogger'; 17 | export { ConsoleSink as LocalSink } from './sinks/ConsoleSink'; 18 | export { AgentSink } from './sinks/AgentSink'; 19 | export { metricScope } from './logger/MetricScope'; 20 | export { createMetricsLogger } from './logger/MetricsLoggerFactory'; 21 | export { Unit } from './logger/Unit'; 22 | export { StorageResolution } from './logger/StorageResolution'; 23 | 24 | import Configuration from './config/Configuration'; 25 | export { Configuration }; 26 | -------------------------------------------------------------------------------- /src/logger/MetricScope.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | import { LOG } from '../utils/Logger'; 17 | import { MetricsLogger } from './MetricsLogger'; 18 | import { createMetricsLogger } from './MetricsLoggerFactory'; 19 | 20 | /** 21 | * An asynchronous wrapper that provides a metrics instance. 22 | */ 23 | const metricScope = ( 24 | handler: (m: MetricsLogger) => (...args: U) => T | Promise, 25 | ): ((...args: U) => Promise) => { 26 | const wrappedHandler = async (...args: U): Promise => { 27 | const metrics = createMetricsLogger(); 28 | try { 29 | return await handler(metrics)(...args); 30 | } finally { 31 | try { 32 | await metrics.flush(); 33 | } catch (e) { 34 | LOG('Failed to flush metrics', e); 35 | } 36 | } 37 | }; 38 | return wrappedHandler; 39 | }; 40 | 41 | export { metricScope }; 42 | -------------------------------------------------------------------------------- /src/logger/MetricValues.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | import { Unit } from '..'; 17 | import { StorageResolution } from './StorageResolution'; 18 | 19 | export class MetricValues { 20 | public values: number[]; 21 | public unit: string; 22 | public storageResolution: number; 23 | 24 | constructor(value: number, unit?: Unit | string, storageResolution?: StorageResolution | number) { 25 | this.values = [value]; 26 | this.unit = unit || 'None'; 27 | this.storageResolution = storageResolution || StorageResolution.Standard; 28 | } 29 | 30 | /** 31 | * Appends the provided value to the current metric 32 | * @param value 33 | */ 34 | public addValue(value: number): void { 35 | this.values.push(value); 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /src/logger/MetricsContext.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | import Configuration from '../config/Configuration'; 17 | import { LOG } from '../utils/Logger'; 18 | import { validateNamespace, validateTimestamp, validateDimensionSet, validateMetric } from '../utils/Validator'; 19 | import { MetricValues } from './MetricValues'; 20 | import { StorageResolution } from './StorageResolution'; 21 | import { Unit } from './Unit'; 22 | 23 | interface IProperties { 24 | [s: string]: unknown; 25 | } 26 | 27 | type Metrics = Map; 28 | 29 | export class MetricsContext { 30 | /** 31 | * Use this to create a new, empty context. 32 | */ 33 | public static empty(): MetricsContext { 34 | return new MetricsContext(); 35 | } 36 | 37 | public namespace: string; 38 | public properties: IProperties; 39 | public metrics: Metrics = new Map(); 40 | public meta: Record = {}; 41 | private dimensions: Array>; 42 | private defaultDimensions: Record; 43 | private shouldUseDefaultDimensions = true; 44 | private timestamp: Date | number | undefined; 45 | private metricNameAndResolutionMap: Map = new Map(); 46 | 47 | /** 48 | * Constructor used to create child instances. 49 | * You should not use this constructor directly. 50 | * Instead, use createCopyWithContext() or empty(). 51 | * 52 | * The reason for this is to avoid unexpected behavior when creating 53 | * MetricsContexts with defaultDimensions and existing dimensions. 54 | * 55 | * @param properties 56 | * @param dimensions 57 | */ 58 | private constructor( 59 | namespace?: string, 60 | properties?: IProperties, 61 | dimensions?: Array>, 62 | defaultDimensions?: Record, 63 | shouldUseDefaultDimensions?: boolean, 64 | timestamp?: Date | number, 65 | ) { 66 | this.namespace = namespace || Configuration.namespace; 67 | this.properties = properties || {}; 68 | this.dimensions = dimensions || []; 69 | this.timestamp = timestamp; 70 | this.meta.Timestamp = MetricsContext.resolveMetaTimestamp(timestamp); 71 | this.defaultDimensions = defaultDimensions || {}; 72 | if (shouldUseDefaultDimensions != undefined) { 73 | this.shouldUseDefaultDimensions = shouldUseDefaultDimensions; 74 | } 75 | } 76 | 77 | private static resolveMetaTimestamp(timestamp?: Date | number): number { 78 | if (timestamp instanceof Date) { 79 | return timestamp.getTime(); 80 | } else if (timestamp) { 81 | return timestamp; 82 | } else { 83 | return Date.now(); 84 | } 85 | } 86 | 87 | public setNamespace(value: string): void { 88 | validateNamespace(value); 89 | this.namespace = value; 90 | } 91 | 92 | public setProperty(key: string, value: unknown): void { 93 | this.properties[key] = value; 94 | } 95 | 96 | public setTimestamp(timestamp: Date | number): void { 97 | validateTimestamp(timestamp); 98 | this.timestamp = timestamp; 99 | this.meta.Timestamp = MetricsContext.resolveMetaTimestamp(timestamp); 100 | } 101 | 102 | /** 103 | * Sets default dimensions for the Context. 104 | * A dimension set will be created with just the default dimensions 105 | * and all calls to putDimensions will be prepended with the defaults. 106 | */ 107 | public setDefaultDimensions(dimensions: Record): void { 108 | LOG(`Received default dimensions`, dimensions); 109 | this.defaultDimensions = dimensions; 110 | } 111 | 112 | /** 113 | * Adds a new set of dimensions. Any time a new dimensions set 114 | * is added, the set is first prepended by the default dimensions. 115 | * 116 | * @param dimensions 117 | */ 118 | public putDimensions(incomingDimensionSet: Record): void { 119 | validateDimensionSet(incomingDimensionSet); 120 | 121 | // Duplicate dimensions sets are removed before being added to the end of the collection. 122 | // This ensures the latest dimension key-value is used as a target member on the root EMF node. 123 | // This operation is O(n^2), but acceptable given sets are capped at 10 dimensions 124 | const incomingDimensionSetKeys = Object.keys(incomingDimensionSet); 125 | this.dimensions = this.dimensions.filter((existingDimensionSet) => { 126 | const existingDimensionSetKeys = Object.keys(existingDimensionSet); 127 | if (existingDimensionSetKeys.length !== incomingDimensionSetKeys.length) { 128 | return true; 129 | } 130 | return !existingDimensionSetKeys.every((existingDimensionSetKey) => 131 | incomingDimensionSetKeys.includes(existingDimensionSetKey), 132 | ); 133 | }); 134 | 135 | this.dimensions.push(incomingDimensionSet); 136 | } 137 | 138 | /** 139 | * Overwrite all dimensions. 140 | * 141 | * @param dimensionSets 142 | */ 143 | public setDimensions(dimensionSets: Array>, useDefault = false): void { 144 | dimensionSets.forEach((dimensionSet) => validateDimensionSet(dimensionSet)); 145 | this.shouldUseDefaultDimensions = useDefault; 146 | this.dimensions = dimensionSets; 147 | } 148 | 149 | /** 150 | * Reset all custom dimensions 151 | * @param useDefault Indicates whether default dimensions should be used 152 | */ 153 | public resetDimensions(useDefault: boolean): void { 154 | this.shouldUseDefaultDimensions = useDefault; 155 | this.dimensions = []; 156 | } 157 | 158 | /** 159 | * Get the current dimensions. 160 | */ 161 | public getDimensions(): Array> { 162 | // caller has explicitly called setDimensions 163 | if (this.shouldUseDefaultDimensions === false) { 164 | return this.dimensions; 165 | } 166 | 167 | // if there are no default dimensions, return the custom dimensions 168 | if (Object.keys(this.defaultDimensions).length === 0) { 169 | return this.dimensions; 170 | } 171 | 172 | // if default dimensions have been provided, but no custom dimensions, use the defaults 173 | if (this.dimensions.length === 0) { 174 | return [this.defaultDimensions]; 175 | } 176 | 177 | // otherwise, merge the dimensions 178 | // we do this on the read path because default dimensions 179 | // may get updated asynchronously by environment detection 180 | return this.dimensions.map((custom) => { 181 | return { ...this.defaultDimensions, ...custom }; 182 | }); 183 | } 184 | 185 | public putMetric( 186 | key: string, 187 | value: number, 188 | unit?: Unit | string, 189 | storageResolution?: StorageResolution | number, 190 | ): void { 191 | validateMetric(key, value, unit, storageResolution, this.metricNameAndResolutionMap); 192 | 193 | const currentMetric = this.metrics.get(key); 194 | if (currentMetric) { 195 | currentMetric.addValue(value); 196 | } else { 197 | this.metrics.set(key, new MetricValues(value, unit, storageResolution)); 198 | } 199 | this.metricNameAndResolutionMap?.set(key, storageResolution || StorageResolution.Standard); 200 | } 201 | 202 | /** 203 | * Creates an independently flushable context. 204 | * Custom dimensions are preserved by default unless preserveDimensions parameter is set. 205 | * @param preserveDimensions Indicates whether custom dimensions should be preserved 206 | */ 207 | public createCopyWithContext(preserveDimensions = true): MetricsContext { 208 | return new MetricsContext( 209 | this.namespace, 210 | Object.assign({}, this.properties), 211 | preserveDimensions ? Object.assign([], this.dimensions) : [], 212 | this.defaultDimensions, 213 | this.shouldUseDefaultDimensions, 214 | this.timestamp, 215 | ); 216 | } 217 | } 218 | -------------------------------------------------------------------------------- /src/logger/MetricsLogger.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | import Configuration from '../config/Configuration'; 17 | import { EnvironmentProvider } from '../environment/EnvironmentDetector'; 18 | import { IEnvironment } from '../environment/IEnvironment'; 19 | import { MetricsContext } from './MetricsContext'; 20 | import { Unit } from './Unit'; 21 | import { StorageResolution } from './StorageResolution'; 22 | 23 | /** 24 | * An async metrics logger. 25 | * Use this interface to publish logs to CloudWatch Logs 26 | * and extract metrics to CloudWatch Metrics asynchronously. 27 | */ 28 | export class MetricsLogger { 29 | private context: MetricsContext; 30 | private resolveEnvironment: EnvironmentProvider; 31 | public flushPreserveDimensions: boolean; 32 | 33 | constructor(resolveEnvironment: EnvironmentProvider, context?: MetricsContext) { 34 | this.resolveEnvironment = resolveEnvironment; 35 | this.context = context || MetricsContext.empty(); 36 | this.flushPreserveDimensions = true; 37 | } 38 | 39 | /** 40 | * Flushes the current context state to the configured sink. 41 | */ 42 | public async flush(): Promise { 43 | // resolve the environment and get the sink 44 | // MOST of the time this will run synchonrously 45 | // This only runs asynchronously if executing for the 46 | // first time in a non-lambda environment 47 | const environment = await this.resolveEnvironment(); 48 | 49 | this.configureContextForEnvironment(this.context, environment); 50 | const sink = environment.getSink(); 51 | 52 | // accept and reset the context 53 | await sink.accept(this.context); 54 | this.context = this.context.createCopyWithContext(this.flushPreserveDimensions); 55 | } 56 | 57 | /** 58 | * Set a property on the published metrics. 59 | * This is stored in the emitted log data and you are not 60 | * charged for this data by CloudWatch Metrics. 61 | * These values can be values that are useful for searching on, 62 | * but have too high cardinality to emit as dimensions to 63 | * CloudWatch Metrics. 64 | * 65 | * @param key Property name 66 | * @param value Property value 67 | */ 68 | public setProperty(key: string, value: unknown): MetricsLogger { 69 | this.context.setProperty(key, value); 70 | return this; 71 | } 72 | 73 | /** 74 | * Adds a dimension. 75 | * This is generally a low cardinality key-value pair that is part of the metric identity. 76 | * CloudWatch treats each unique combination of dimensions as a separate metric, even if the metrics have the same metric name. 77 | * 78 | * @param dimension 79 | * @param value 80 | * @see [CloudWatch Dimensions](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/cloudwatch_concepts.html#Dimension) 81 | */ 82 | public putDimensions(dimensions: Record): MetricsLogger { 83 | this.context.putDimensions(dimensions); 84 | return this; 85 | } 86 | 87 | /** 88 | * Overwrite all dimensions on this MetricsLogger instance. 89 | * @see [CloudWatch Dimensions](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/cloudwatch_concepts.html#Dimension) 90 | * 91 | * @param {Array> | Record} dimensionSetOrSets Dimension sets to overwrite with 92 | * @param {boolean} [useDefault=false] whether to use default dimensions 93 | */ 94 | public setDimensions(dimensionSet: Record, useDefault: boolean): MetricsLogger; 95 | public setDimensions(dimensionSet: Record): MetricsLogger; 96 | public setDimensions(dimensionSets: Array>, useDefault: boolean): MetricsLogger; 97 | public setDimensions(dimensionSets: Array>): MetricsLogger; 98 | public setDimensions( 99 | dimensionSetOrSets: Array> | Record, 100 | useDefault = false, 101 | ): MetricsLogger { 102 | if (Array.isArray(dimensionSetOrSets)) { 103 | this.context.setDimensions(dimensionSetOrSets, useDefault); 104 | } else { 105 | this.context.setDimensions([dimensionSetOrSets], useDefault); 106 | } 107 | 108 | return this; 109 | } 110 | 111 | /** 112 | * Clear all custom dimensions on this MetricsLogger instance 113 | * 114 | * @param useDefault indicates whether default dimensions should be used 115 | */ 116 | public resetDimensions(useDefault: boolean): MetricsLogger { 117 | this.context.resetDimensions(useDefault); 118 | return this; 119 | } 120 | 121 | /** 122 | * Put a metric value. 123 | * This value will be emitted to CloudWatch Metrics asyncronously and does not contribute to your 124 | * account TPS limits. The value will also be available in your CloudWatch Logs 125 | * @param key 126 | * @param value 127 | * @param unit 128 | * @param storageResolution 129 | */ 130 | public putMetric( 131 | key: string, 132 | value: number, 133 | unit?: Unit | string, 134 | storageResolution?: StorageResolution | number, 135 | ): MetricsLogger { 136 | this.context.putMetric(key, value, unit, storageResolution); 137 | return this; 138 | } 139 | 140 | /** 141 | * Set the CloudWatch namespace that metrics should be published to. 142 | * @param value 143 | */ 144 | public setNamespace(value: string): MetricsLogger { 145 | this.context.setNamespace(value); 146 | return this; 147 | } 148 | 149 | /** 150 | * Set the timestamp of metrics emitted in this context. 151 | * 152 | * If not set, the timestamp will default to new Date() at the point 153 | * the context is constructed. 154 | * 155 | * If set, timestamp will preserved across calls to flush(). 156 | * 157 | * @param timestamp 158 | */ 159 | public setTimestamp(timestamp: Date | number): MetricsLogger { 160 | this.context.setTimestamp(timestamp); 161 | return this; 162 | } 163 | 164 | /** 165 | * Creates a new logger using the same contextual data as 166 | * the previous logger. This allows you to flush the instances 167 | * independently. 168 | */ 169 | public new(): MetricsLogger { 170 | return new MetricsLogger(this.resolveEnvironment, this.context.createCopyWithContext()); 171 | } 172 | 173 | private configureContextForEnvironment = (context: MetricsContext, environment: IEnvironment): void => { 174 | const defaultDimensions = { 175 | // LogGroup name will entirely depend on the environment since there 176 | // are some cases where the LogGroup cannot be configured (e.g. Lambda) 177 | LogGroup: environment.getLogGroupName(), 178 | ServiceName: Configuration.serviceName || environment.getName(), 179 | ServiceType: Configuration.serviceType || environment.getType(), 180 | }; 181 | context.setDefaultDimensions(defaultDimensions); 182 | environment.configureContext(context); 183 | }; 184 | } 185 | -------------------------------------------------------------------------------- /src/logger/MetricsLoggerFactory.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | import { MetricsLogger } from '..'; 17 | import { resolveEnvironment } from '../environment/EnvironmentDetector'; 18 | import { MetricsContext } from './MetricsContext'; 19 | 20 | const createMetricsLogger = (): MetricsLogger => { 21 | const context = MetricsContext.empty(); 22 | return new MetricsLogger(resolveEnvironment, context); 23 | }; 24 | 25 | export { createMetricsLogger }; 26 | -------------------------------------------------------------------------------- /src/logger/StorageResolution.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | export enum StorageResolution { 17 | High = 1, 18 | Standard = 60, 19 | } 20 | -------------------------------------------------------------------------------- /src/logger/Unit.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | export enum Unit { 17 | Seconds = 'Seconds', 18 | Microseconds = 'Microseconds', 19 | Milliseconds = 'Milliseconds', 20 | Bytes = 'Bytes', 21 | Kilobytes = 'Kilobytes', 22 | Megabytes = 'Megabytes', 23 | Gigabytes = 'Gigabytes', 24 | Terabytes = 'Terabytes', 25 | Bits = 'Bits', 26 | Kilobits = 'Kilobits', 27 | Megabits = 'Megabits', 28 | Gigabits = 'Gigabits', 29 | Terabits = 'Terabits', 30 | Percent = 'Percent', 31 | Count = 'Count', 32 | BytesPerSecond = 'Bytes/Second', 33 | KilobytesPerSecond = 'Kilobytes/Second', 34 | MegabytesPerSecond = 'Megabytes/Second', 35 | GigabytesPerSecond = 'Gigabytes/Second', 36 | TerabytesPerSecond = 'Terabytes/Second', 37 | BitsPerSecond = 'Bits/Second', 38 | KilobitsPerSecond = 'Kilobits/Second', 39 | MegabitsPerSecond = 'Megabits/Second', 40 | GigabitsPerSecond = 'Gigabits/Second', 41 | TerabitsPerSecond = 'Terabits/Second', 42 | CountPerSecond = 'Count/Second', 43 | None = 'None', 44 | } 45 | -------------------------------------------------------------------------------- /src/logger/__tests__/MetricScope.test.ts: -------------------------------------------------------------------------------- 1 | import sleep from '../../../test/utils/Sleep'; 2 | import { metricScope } from '../MetricScope'; 3 | import { MetricsLogger } from '../MetricsLogger'; 4 | 5 | const mockEnvironment = jest.fn(); 6 | jest.mock('../../logger/MetricsLoggerFactory', () => { 7 | return { 8 | createMetricsLogger: () => new MetricsLogger(mockEnvironment), 9 | }; 10 | }); 11 | 12 | test('async scope executes handler function', async () => { 13 | // arrange 14 | let wasInvoked = false; 15 | 16 | const handler = metricScope(() => async () => { 17 | await sleep(1); 18 | wasInvoked = true; 19 | }); 20 | 21 | // act 22 | await handler(); 23 | 24 | // assert 25 | expect(wasInvoked).toBe(true); 26 | }); 27 | 28 | test('sync scope executes handler function', async () => { 29 | // arrange 30 | let a = false; 31 | 32 | const handler = metricScope(() => () => { 33 | a = true; 34 | }); 35 | 36 | // act 37 | // the customer can pass in a synchronous function, but we will still return 38 | // an async function back to the Lambda caller 39 | await handler(); 40 | 41 | // assert 42 | expect(a).toBe(true); 43 | }); 44 | 45 | test('async scope passes arguments', async () => { 46 | // arrange 47 | let arg1 = false; 48 | let arg2 = ''; 49 | 50 | const handler = metricScope(() => async (input1: boolean, input2: string) => { 51 | await sleep(1); 52 | arg1 = input1; 53 | arg2 = input2; 54 | }); 55 | 56 | // act 57 | // the customer can pass in a synchronous function, but we will still return 58 | // an async function back to the Lambda caller 59 | await handler(true, 'success'); 60 | 61 | // assert 62 | expect(arg1).toBe(true); 63 | expect(arg2).toBe('success'); 64 | }); 65 | 66 | test('async scope returns child function return value', async () => { 67 | // arrange 68 | const expected = true; 69 | 70 | const handler = metricScope(() => async () => { 71 | return await Promise.resolve(expected); 72 | }); 73 | 74 | // act 75 | // the customer can pass in a synchronous function, but we will still return 76 | // an async function back to the Lambda caller 77 | const result = await handler(); 78 | 79 | // assert 80 | expect(result).toBe(expected); 81 | }); 82 | 83 | test('sync scope passes arguments', async () => { 84 | // arrange 85 | let arg1 = false; 86 | let arg2 = ''; 87 | 88 | const handler = metricScope(() => (input1: boolean, input2: string) => { 89 | arg1 = input1; 90 | arg2 = input2; 91 | }); 92 | 93 | // act 94 | // the customer can pass in a synchronous function, but we will still return 95 | // an async function back to the Lambda caller 96 | await handler(true, 'success'); 97 | 98 | // assert 99 | expect(arg1).toBe(true); 100 | expect(arg2).toBe('success'); 101 | }); 102 | 103 | test('sync scope returns child function return value', async () => { 104 | // arrange 105 | const expected = true; 106 | 107 | const handler = metricScope(() => () => { 108 | return expected; 109 | }); 110 | 111 | // act 112 | // the customer can pass in a synchronous function, but we will still return 113 | // an async function back to the Lambda caller 114 | const result = await handler(); 115 | 116 | // assert 117 | expect(result).toBe(expected); 118 | }); 119 | 120 | test('async scope rejects with child function reject value', async () => { 121 | // arrange 122 | const expected = true; 123 | 124 | const handler = metricScope(() => async () => { 125 | return await Promise.reject(expected); 126 | }); 127 | 128 | // act 129 | const result = handler(); 130 | 131 | // assert 132 | await expect(result).rejects.toBe(expected); 133 | }); 134 | 135 | test('sync scope rejects with child function error', async () => { 136 | // arrange 137 | const expected = true; 138 | 139 | const handler = metricScope(() => () => { 140 | throw expected; 141 | }); 142 | 143 | // act 144 | // the customer can pass in a synchronous function, but we will still return 145 | // an async function back to the Lambda caller 146 | const result = handler(); 147 | 148 | // assert 149 | await expect(result).rejects.toBe(expected); 150 | }); 151 | 152 | test('async scope flush is still called when child function rejects', async () => { 153 | // arrange 154 | mockEnvironment.mockReset(); 155 | const handler = metricScope(() => async () => { 156 | return await Promise.reject('error'); 157 | }); 158 | 159 | // act 160 | try { 161 | await handler(); 162 | } catch (e) { 163 | // ignored 164 | } 165 | 166 | // assert 167 | expect(mockEnvironment).toHaveBeenCalled(); 168 | }); 169 | 170 | test('sync scope flush is still called when child function throws', async () => { 171 | // arrange 172 | mockEnvironment.mockReset(); 173 | const handler = metricScope(() => () => { 174 | throw 'error'; 175 | }); 176 | 177 | // act 178 | // the customer can pass in a synchronous function, but we will still return 179 | // an async function back to the Lambda caller 180 | try { 181 | await handler(); 182 | } catch (e) { 183 | // ignored 184 | } 185 | 186 | // assert 187 | expect(mockEnvironment).toHaveBeenCalled(); 188 | }); 189 | -------------------------------------------------------------------------------- /src/logger/__tests__/MetricsLoggerFactory.test.ts: -------------------------------------------------------------------------------- 1 | const createMetricsLogger = () => { 2 | // environment detection happens at module load time which is why 3 | // this needs to be inlined during test execution 4 | // eslint-disable-next-line @typescript-eslint/no-var-requires 5 | const { createMetricsLogger } = require('../MetricsLoggerFactory'); 6 | return createMetricsLogger(); 7 | }; 8 | 9 | test('createMetricsLogger() creates a logger', () => { 10 | // arrange 11 | // act 12 | const logger = createMetricsLogger(); 13 | 14 | // assert 15 | expect(logger).toBeTruthy(); 16 | expect(logger.constructor.name).toBe('MetricsLogger'); 17 | }); 18 | -------------------------------------------------------------------------------- /src/serializers/LogSerializer.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | import { MaxHeap } from '@datastructures-js/heap'; 16 | 17 | import { Constants } from '../Constants'; 18 | import { DimensionSetExceededError } from '../exceptions/DimensionSetExceededError'; 19 | import { MetricsContext } from '../logger/MetricsContext'; 20 | import { StorageResolution } from '../logger/StorageResolution'; 21 | import { ISerializer } from './Serializer'; 22 | 23 | interface MetricProgress { 24 | // Name of the metric 25 | name: string; 26 | // Number of metric values remained to be processed 27 | numLeft: number; 28 | } 29 | 30 | /** 31 | * Serializes the provided context to the CWL Structured 32 | * Logs format with Embedded Metric Filters. 33 | */ 34 | export class LogSerializer implements ISerializer { 35 | /** 36 | * Retrieve the current context as a JSON string 37 | */ 38 | public serialize(context: MetricsContext): string[] { 39 | const dimensionKeys: string[][] = []; 40 | let dimensionProperties = {}; 41 | 42 | context.getDimensions().forEach((dimensionSet) => { 43 | const keys = Object.keys(dimensionSet); 44 | 45 | if (keys.length > Constants.MAX_DIMENSION_SET_SIZE) { 46 | const errMsg = 47 | `Maximum number of dimensions allowed are ${Constants.MAX_DIMENSION_SET_SIZE}.` + 48 | `Account for default dimensions if not using set_dimensions.`; 49 | throw new DimensionSetExceededError(errMsg); 50 | } 51 | 52 | dimensionKeys.push(keys); 53 | dimensionProperties = { ...dimensionProperties, ...dimensionSet }; 54 | }); 55 | 56 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 57 | const createBody = (): any => { 58 | return { 59 | ...dimensionProperties, 60 | ...context.properties, 61 | _aws: { 62 | ...context.meta, 63 | CloudWatchMetrics: [ 64 | { 65 | Dimensions: dimensionKeys, 66 | Metrics: [], 67 | Namespace: context.namespace, 68 | }, 69 | ], 70 | }, 71 | }; 72 | }; 73 | 74 | const eventBatches: string[] = []; 75 | // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment 76 | let currentBody = createBody(); 77 | 78 | // eslint-disable-next-line 79 | const currentMetricsInBody = (): number => currentBody._aws.CloudWatchMetrics[0].Metrics.length; 80 | const hasMaxMetrics = (): boolean => currentMetricsInBody() === Constants.MAX_METRICS_PER_EVENT; 81 | 82 | // converts the body to JSON and pushes it into the batches 83 | const serializeCurrentBody = (): void => { 84 | eventBatches.push(JSON.stringify(currentBody)); 85 | // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment 86 | currentBody = createBody(); 87 | }; 88 | 89 | const remainingMetrics = MaxHeap.heapify( 90 | Array.from(context.metrics, ([key, value]) => { 91 | return { name: key, numLeft: value.values.length }; 92 | }), 93 | (metric) => metric.numLeft, 94 | ); 95 | let processedMetrics: MetricProgress[] = []; 96 | 97 | // Batches the metrics with the most number of values first, such that each metric has no more 98 | // than 100 values, and each batch has no more than 100 metric definitions. 99 | while (!remainingMetrics.isEmpty()) { 100 | const metricProgress = remainingMetrics.extractRoot()!; 101 | const metric = context.metrics.get(metricProgress.name); 102 | if (metric) { 103 | const startIndex = metric.values.length - metricProgress.numLeft; 104 | // if there is only one metric value, unwrap it to make querying easier 105 | const metricValue = 106 | metricProgress.numLeft === 1 107 | ? metric.values[startIndex] 108 | : // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access 109 | metric.values.slice(startIndex, startIndex + (Constants.MAX_VALUES_PER_METRIC as number)); 110 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access 111 | currentBody[metricProgress.name] = metricValue; 112 | const metricBody: { [key: string]: any } = { 113 | Name: metricProgress.name, 114 | Unit: metric.unit, 115 | ...(metric.storageResolution == StorageResolution.High ? { StorageResolution: StorageResolution.High } : {}), 116 | }; 117 | // eslint-disable-next-line 118 | currentBody._aws.CloudWatchMetrics[0].Metrics.push(metricBody); 119 | metricProgress.numLeft -= Constants.MAX_VALUES_PER_METRIC; 120 | if (metricProgress.numLeft > 0) { 121 | processedMetrics.push(metricProgress); 122 | } 123 | 124 | if (hasMaxMetrics() || remainingMetrics.isEmpty()) { 125 | serializeCurrentBody(); 126 | // inserts these metrics back in the heap to be processed in the next iteration. 127 | processedMetrics.forEach((processingMetric) => remainingMetrics.insert(processingMetric)); 128 | processedMetrics = []; 129 | } 130 | } 131 | } 132 | 133 | if (eventBatches.length === 0 || currentMetricsInBody() > 0) { 134 | serializeCurrentBody(); 135 | } 136 | 137 | return eventBatches; 138 | } 139 | } 140 | -------------------------------------------------------------------------------- /src/serializers/Serializer.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | import { MetricsContext } from '../logger/MetricsContext'; 17 | 18 | export interface ISerializer { 19 | /** 20 | * Serialize the provided metrics context to a JSON string. 21 | * 22 | * @param context The MetricsContext 23 | */ 24 | serialize(context: MetricsContext): string[]; 25 | } 26 | -------------------------------------------------------------------------------- /src/serializers/SerializerFactory.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | import { LogSerializer } from './LogSerializer'; 17 | 18 | enum FormatVersion { 19 | ZERO = '0', 20 | } 21 | 22 | const createSerializer = (version: FormatVersion): LogSerializer => { 23 | switch (version) { 24 | case FormatVersion.ZERO: 25 | return new LogSerializer(); 26 | default: 27 | throw new Error(`Unsupported version.`); 28 | } 29 | }; 30 | 31 | export default { createSerializer, FormatVersion }; 32 | -------------------------------------------------------------------------------- /src/serializers/__tests__/LogSerializer.test.ts: -------------------------------------------------------------------------------- 1 | import { faker } from '@faker-js/faker'; 2 | import { Constants } from '../../Constants'; 3 | import { MetricsContext } from '../../logger/MetricsContext'; 4 | import { LogSerializer } from '../LogSerializer'; 5 | import { DimensionSetExceededError } from '../../exceptions/DimensionSetExceededError'; 6 | import { Unit, StorageResolution } from '../..'; 7 | 8 | test('serializes dimensions', () => { 9 | // arrange 10 | const expectedKey = faker.random.word(); 11 | const expectedValue = faker.random.word(); 12 | const dimensions: any = {}; 13 | dimensions[expectedKey] = expectedValue; 14 | 15 | const expected: any = { ...getEmptyPayload(), ...dimensions }; 16 | expected._aws.CloudWatchMetrics[0].Dimensions.push([expectedKey]); 17 | 18 | const context = getContext(); 19 | context.putDimensions(dimensions); 20 | // act 21 | const resultJson = serializer.serialize(context)[0]; 22 | 23 | // assert 24 | assertJsonEquality(resultJson, expected); 25 | }); 26 | 27 | test('serializes properties', () => { 28 | // arrange 29 | const expectedKey = faker.random.word(); 30 | const expectedValue = faker.random.word(); 31 | 32 | const expected: any = { ...getEmptyPayload() }; 33 | expected[expectedKey] = expectedValue; 34 | 35 | const context = getContext(); 36 | context.setProperty(expectedKey, expectedValue); 37 | 38 | // act 39 | const resultJson = serializer.serialize(context)[0]; 40 | 41 | // assert 42 | assertJsonEquality(resultJson, expected); 43 | }); 44 | 45 | test('serializes metrics with single datapoint', () => { 46 | // arrange 47 | const expectedKey = faker.random.word(); 48 | const expectedValue = faker.datatype.number(); 49 | const expectedMetricDefinition = { 50 | Name: expectedKey, 51 | Unit: 'None', 52 | }; 53 | const expected: any = { ...getEmptyPayload() }; 54 | expected[expectedKey] = expectedValue; 55 | expected._aws.CloudWatchMetrics[0].Metrics.push(expectedMetricDefinition); 56 | 57 | const context = getContext(); 58 | context.putMetric(expectedKey, expectedValue); 59 | 60 | // act 61 | const resultJson = serializer.serialize(context)[0]; 62 | 63 | // assert 64 | assertJsonEquality(resultJson, expected); 65 | }); 66 | 67 | test('serializes metrics with multiple datapoints', () => { 68 | // arrange 69 | const expectedKey = faker.random.word(); 70 | const expectedValues = [faker.datatype.number(), faker.datatype.number()]; 71 | const expectedMetricDefinition = { 72 | Name: expectedKey, 73 | Unit: 'None', 74 | }; 75 | const expected: any = { ...getEmptyPayload() }; 76 | expected[expectedKey] = expectedValues; 77 | expected._aws.CloudWatchMetrics[0].Metrics.push(expectedMetricDefinition); 78 | 79 | const context = getContext(); 80 | context.putMetric(expectedKey, expectedValues[0]); 81 | context.putMetric(expectedKey, expectedValues[1]); 82 | 83 | // act 84 | const resultJson = serializer.serialize(context)[0]; 85 | 86 | // assert 87 | assertJsonEquality(resultJson, expected); 88 | }); 89 | 90 | test('serialize high resolution metrics', () => { 91 | // arrange 92 | const expectedKey = faker.random.word(); 93 | const expectedValue = faker.datatype.number(); 94 | const expectedUnit = Unit.Bits; 95 | const expectedStorageResolution = StorageResolution.High; 96 | const expectedMetricDefinition = { 97 | Name: expectedKey, 98 | Unit: 'Bits', 99 | StorageResolution: 1, 100 | }; 101 | const expected: any = { ...getEmptyPayload() }; 102 | expected[expectedKey] = expectedValue; 103 | expected._aws.CloudWatchMetrics[0].Metrics.push(expectedMetricDefinition); 104 | 105 | const context = getContext(); 106 | context.putMetric(expectedKey, expectedValue, expectedUnit, expectedStorageResolution); 107 | 108 | // act 109 | const resultJson = serializer.serialize(context)[0]; 110 | 111 | // assert 112 | assertJsonEquality(resultJson, expected); 113 | }); 114 | 115 | test('serialize standard resolution metrics', () => { 116 | // arrange 117 | const expectedKey = faker.random.word(); 118 | const expectedValue = faker.datatype.number(); 119 | const expectedUnit = Unit.Bits; 120 | const expectedStorageResolution = StorageResolution.Standard; 121 | const expectedMetricDefinition = { 122 | Name: expectedKey, 123 | Unit: 'Bits', 124 | }; 125 | const expected: any = { ...getEmptyPayload() }; 126 | expected[expectedKey] = expectedValue; 127 | expected._aws.CloudWatchMetrics[0].Metrics.push(expectedMetricDefinition); 128 | 129 | const context = getContext(); 130 | context.putMetric(expectedKey, expectedValue, expectedUnit, expectedStorageResolution); 131 | 132 | // act 133 | const resultJson = serializer.serialize(context)[0]; 134 | 135 | // assert 136 | assertJsonEquality(resultJson, expected); 137 | }); 138 | 139 | test('serializes more than 100 metrics into multiple events', () => { 140 | // arrange 141 | const expectedValue = 1; 142 | const metrics = 275; 143 | const expectedBatches = 3; 144 | 145 | const context = getContext(); 146 | for (let index = 0; index < metrics; index++) { 147 | const expectedKey = `Metric-${index}`; 148 | context.putMetric(expectedKey, expectedValue); 149 | } 150 | 151 | // act 152 | const results = serializer.serialize(context); 153 | 154 | // assert 155 | const resultObjs = results.map(resultJson => JSON.parse(resultJson)); 156 | expect(resultObjs.length).toBe(expectedBatches); 157 | for (let batchIndex = 0; batchIndex < expectedBatches; batchIndex++) { 158 | const expectedMetricCount = batchIndex === expectedBatches - 1 ? metrics % 100 : 100; 159 | 160 | const resultObj = resultObjs[batchIndex]; 161 | expect(resultObj._aws.CloudWatchMetrics[0].Metrics.length).toBe(expectedMetricCount); 162 | } 163 | 164 | const mergedResult = Object.assign({}, ...resultObjs); 165 | for (let index = 0; index < metrics; index++) { 166 | expect(mergedResult[`Metric-${index}`]).toBe(expectedValue); 167 | } 168 | }); 169 | 170 | test('serializes metrics with more than 100 values each into multiple events', () => { 171 | // arrange 172 | const metrics = 128; 173 | const valuesMultiplier = 3; 174 | const expectedBatches = Math.max( 175 | Math.ceil(metrics / Constants.MAX_METRICS_PER_EVENT), 176 | Math.ceil((metrics * valuesMultiplier) / Constants.MAX_VALUES_PER_METRIC), 177 | ); 178 | 179 | const context = getContext(); 180 | for (let i = 1; i <= metrics; i++) { 181 | const expectedKey = `Metric-${i}`; 182 | for (let j = 0; j < i * valuesMultiplier; j++) { 183 | context.putMetric(expectedKey, j); 184 | } 185 | } 186 | 187 | // act 188 | const results = serializer.serialize(context); 189 | 190 | // assert 191 | const resultObjs = results.map(resultJson => JSON.parse(resultJson)); 192 | expect(resultObjs.length).toBe(expectedBatches); 193 | for (const resultObj of resultObjs) { 194 | expect(resultObj._aws.CloudWatchMetrics[0].Metrics.length).toBeLessThanOrEqual(Constants.MAX_METRICS_PER_EVENT); 195 | } 196 | 197 | for (let index = 1; index <= metrics; index++) { 198 | let metricValues: number[] = []; 199 | for (const resultObj of resultObjs) { 200 | const metricValue = resultObj[`Metric-${index}`]; 201 | if (metricValue) { 202 | if (Array.isArray(metricValue)) { 203 | expect(metricValue.length).toBeLessThanOrEqual(Constants.MAX_VALUES_PER_METRIC); 204 | metricValues = metricValues.concat(metricValue); 205 | } else { 206 | metricValues.push(metricValue); 207 | } 208 | } 209 | } 210 | expect(metricValues.sort()).toEqual(Array.from({ length: index * valuesMultiplier }, (v, i) => i).sort()); 211 | } 212 | }); 213 | 214 | test('cannot serialize more than 30 dimensions', () => { 215 | // arrange 216 | const context = MetricsContext.empty(); 217 | const defaultDimensionKey = faker.random.word(); 218 | const defaultDimensionValue = faker.random.word(); 219 | const numOfCustomDimensions = 30; 220 | const dimensionSet: Record = {}; 221 | 222 | for (let i = 0; i < numOfCustomDimensions; i++) { 223 | const expectedKey = `${i}`; 224 | dimensionSet[expectedKey] = faker.random.word(); 225 | } 226 | 227 | // act 228 | context.setDefaultDimensions({ [defaultDimensionKey]: defaultDimensionValue }); 229 | context.putDimensions(dimensionSet); 230 | 231 | // assert 232 | expect(() => { 233 | serializer.serialize(context); 234 | }).toThrow(DimensionSetExceededError); 235 | }); 236 | 237 | const assertJsonEquality = (resultJson: string, expectedObj: any) => { 238 | const actual = JSON.parse(resultJson); 239 | expect(actual).toStrictEqual(expectedObj); 240 | }; 241 | 242 | const getEmptyPayload = () => { 243 | return Object.assign( 244 | {}, 245 | { 246 | _aws: { 247 | CloudWatchMetrics: [ 248 | { 249 | Dimensions: [], 250 | Metrics: [], 251 | Namespace: 'aws-embedded-metrics', 252 | }, 253 | ], 254 | Timestamp: 0, 255 | }, 256 | }, 257 | ); 258 | }; 259 | 260 | const serializer = new LogSerializer(); 261 | const getContext = () => { 262 | const context = MetricsContext.empty(); 263 | context.meta.Timestamp = 0; 264 | return context; 265 | }; 266 | -------------------------------------------------------------------------------- /src/sinks/AgentSink.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | import url = require('url'); 17 | 18 | import Configuration from '../config/Configuration'; 19 | import { MetricsContext } from '../logger/MetricsContext'; 20 | import { LogSerializer } from '../serializers/LogSerializer'; 21 | import { ISerializer } from '../serializers/Serializer'; 22 | import { LOG } from '../utils/Logger'; 23 | import { IEndpoint } from './connections/IEndpoint'; 24 | import { ISocketClient } from './connections/ISocketClient'; 25 | import { TcpClient } from './connections/TcpClient'; 26 | import { UdpClient } from './connections/UdpClient'; 27 | import { ISink } from './Sink'; 28 | 29 | const TCP = 'tcp:'; 30 | const UDP = 'udp:'; 31 | 32 | const defaultTcpEndpoint = { 33 | host: '0.0.0.0', 34 | port: 25888, 35 | protocol: TCP, 36 | }; 37 | 38 | const parseEndpoint = (endpoint: string | undefined): IEndpoint => { 39 | try { 40 | if (!endpoint) { 41 | return defaultTcpEndpoint; 42 | } 43 | 44 | const parsedUrl = url.parse(endpoint); 45 | if (!parsedUrl.hostname || !parsedUrl.port || !parsedUrl.protocol) { 46 | LOG(`Failed to parse the provided agent endpoint. Falling back to the default TCP endpoint.`, parsedUrl); 47 | return defaultTcpEndpoint; 48 | } 49 | 50 | if (parsedUrl.protocol !== TCP && parsedUrl.protocol !== UDP) { 51 | LOG( 52 | `The provided agent endpoint protocol '${parsedUrl.protocol}' is not supported. Please use TCP or UDP. Falling back to the default TCP endpoint.`, 53 | parsedUrl, 54 | ); 55 | return defaultTcpEndpoint; 56 | } 57 | 58 | return { 59 | host: parsedUrl.hostname, 60 | port: Number(parsedUrl.port), 61 | protocol: parsedUrl.protocol, 62 | }; 63 | } catch (e) { 64 | LOG('Failed to parse the provided agent endpoint', e); 65 | return defaultTcpEndpoint; 66 | } 67 | }; 68 | 69 | /** 70 | * A sink that flushes to the CW Agent. 71 | * This sink instance should be re-used to avoid 72 | * leaking connections. 73 | */ 74 | export class AgentSink implements ISink { 75 | public readonly name: string = 'AgentSink'; 76 | private readonly serializer: ISerializer; 77 | private readonly endpoint: IEndpoint; 78 | private readonly logGroupName: string; 79 | private readonly logStreamName: string | undefined; 80 | private readonly socketClient: ISocketClient; 81 | 82 | constructor(logGroupName: string, logStreamName?: string, serializer?: ISerializer) { 83 | this.logGroupName = logGroupName; 84 | this.logStreamName = logStreamName; 85 | this.serializer = serializer || new LogSerializer(); 86 | this.endpoint = parseEndpoint(Configuration.agentEndpoint); 87 | this.socketClient = this.getSocketClient(this.endpoint); 88 | LOG('Using socket client', this.socketClient.constructor.name); 89 | } 90 | 91 | public async accept(context: MetricsContext): Promise { 92 | if (this.logGroupName) { 93 | context.meta.LogGroupName = this.logGroupName; 94 | } 95 | 96 | if (this.logStreamName) { 97 | context.meta.LogStreamName = this.logStreamName; 98 | } 99 | 100 | const events = this.serializer.serialize(context); 101 | 102 | LOG(`Sending {} events to socket.`, events.length); 103 | 104 | for (let index = 0; index < events.length; index++) { 105 | const event = events[index]; 106 | const message = event + '\n'; 107 | const bytes = Buffer.from(message); 108 | await this.socketClient.sendMessage(bytes); 109 | } 110 | } 111 | 112 | private getSocketClient(endpoint: IEndpoint): ISocketClient { 113 | LOG('Getting socket client for connection.', endpoint); 114 | const client = endpoint.protocol === TCP ? new TcpClient(endpoint) : new UdpClient(endpoint); 115 | // eslint-disable-next-line @typescript-eslint/no-floating-promises 116 | client.warmup(); 117 | return client; 118 | } 119 | } 120 | -------------------------------------------------------------------------------- /src/sinks/ConsoleSink.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | import { Console } from 'console'; 17 | import { MetricsContext } from '../logger/MetricsContext'; 18 | import { LogSerializer } from '../serializers/LogSerializer'; 19 | import { ISerializer } from '../serializers/Serializer'; 20 | import { ISink } from './Sink'; 21 | 22 | /** 23 | * A sink that flushes log data to stdout. 24 | * This is the preferred sink for Lambda functions. 25 | */ 26 | export class ConsoleSink implements ISink { 27 | public readonly name: string = 'ConsoleSink'; 28 | 29 | private serializer: ISerializer; 30 | public readonly console: Console; 31 | private static readonly AWS_LAMBDA_LOG_FORMAT = 'AWS_LAMBDA_LOG_FORMAT'; 32 | 33 | constructor(serializer?: ISerializer) { 34 | this.serializer = serializer || new LogSerializer(); 35 | 36 | // To avoid escaping EMF when using Lambda JSON log format we need to use Console() instead of console 37 | this.console = 38 | process.env[ConsoleSink.AWS_LAMBDA_LOG_FORMAT] === 'JSON' ? new Console(process.stdout, process.stderr) : console; 39 | } 40 | 41 | public accept(context: MetricsContext): Promise { 42 | // tslint:disable-next-line 43 | const events = this.serializer.serialize(context); 44 | events.forEach((event) => this.console.log(event)); 45 | return Promise.resolve(); 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /src/sinks/Sink.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | import { MetricsContext } from '../logger/MetricsContext'; 17 | 18 | /** 19 | * An interface used to emit metric logs. 20 | */ 21 | export interface ISink { 22 | /** 23 | * The name of the sink. 24 | */ 25 | readonly name: string; 26 | 27 | /** 28 | * Flushes the metrics context to the sink. 29 | * @param context 30 | */ 31 | accept(context: MetricsContext): Promise; 32 | } 33 | -------------------------------------------------------------------------------- /src/sinks/__tests__/AgentSink.test.ts: -------------------------------------------------------------------------------- 1 | import { faker } from '@faker-js/faker'; 2 | import Configuration from '../../config/Configuration'; 3 | import { MetricsContext } from '../../logger/MetricsContext'; 4 | import { AgentSink } from '../AgentSink'; 5 | import { TcpClient } from '../connections/TcpClient'; 6 | 7 | test('default endpoint is tcp', () => { 8 | // arrange 9 | const logGroupName = faker.random.word(); 10 | 11 | // act 12 | const sink = new AgentSink(logGroupName); 13 | 14 | // assert 15 | // @ts-ignore 16 | expect(sink.endpoint.protocol).toBe('tcp:'); 17 | // @ts-ignore 18 | expect(sink.endpoint.host).toBe('0.0.0.0'); 19 | // @ts-ignore 20 | expect(sink.endpoint.port).toBe(25888); 21 | }); 22 | 23 | test('can parse udp endpoints', () => { 24 | // arrange 25 | Configuration.agentEndpoint = 'udp://127.0.0.1:1000'; 26 | const logGroupName = faker.random.word(); 27 | 28 | // act 29 | const sink = new AgentSink(logGroupName); 30 | 31 | // assert 32 | // @ts-ignore 33 | expect(sink.endpoint.protocol).toBe('udp:'); 34 | // @ts-ignore 35 | expect(sink.endpoint.host).toBe('127.0.0.1'); 36 | // @ts-ignore 37 | expect(sink.endpoint.port).toBe(1000); 38 | }); 39 | 40 | test('handles tcp connection error', async () => { 41 | // arrange 42 | const noProcessPort = 6553; 43 | Configuration.agentEndpoint = `tcp://127.0.0.1:${noProcessPort}`; 44 | const context = MetricsContext.empty(); 45 | const logGroupName = faker.random.word(); 46 | const sink = new AgentSink(logGroupName); 47 | 48 | // assert 49 | await expect(sink.accept(context)).rejects.toThrowError(/ECONNREFUSED/); 50 | }); 51 | 52 | test('LogGroup is stored in metadata', async () => { 53 | // arrange 54 | const expectedLogGroup = faker.random.alphaNumeric(); 55 | const context = MetricsContext.empty(); 56 | 57 | let receivedMessage: any = {}; 58 | TcpClient.prototype.sendMessage = (message: Buffer): Promise => { 59 | receivedMessage = JSON.parse(message.toString('utf8')); 60 | return Promise.resolve(); 61 | }; 62 | 63 | const sink = new AgentSink(expectedLogGroup); 64 | 65 | // act 66 | await sink.accept(context); 67 | 68 | // assert 69 | expect(receivedMessage._aws.LogGroupName).toBe(expectedLogGroup); 70 | }); 71 | 72 | test('empty LogGroup is not stored in metadata', async () => { 73 | // arrange 74 | const context = MetricsContext.empty(); 75 | 76 | let receivedMessage: any = {}; 77 | TcpClient.prototype.sendMessage = (message: Buffer): Promise => { 78 | receivedMessage = JSON.parse(message.toString('utf8')); 79 | return Promise.resolve(); 80 | }; 81 | 82 | const sink = new AgentSink(''); 83 | 84 | // act 85 | await sink.accept(context); 86 | 87 | // assert 88 | expect(receivedMessage._aws.LogGroupName).toBe(undefined); 89 | }); 90 | 91 | test('more than max number of metrics will send multiple messages', async () => { 92 | // arrange 93 | const context = MetricsContext.empty(); 94 | const expectedMetrics = 101; 95 | const expectedMessages = 2; 96 | for (let index = 0; index < expectedMetrics; index++) { 97 | context.putMetric(`${index}`, 1); 98 | } 99 | 100 | let sentMessages = 0; 101 | TcpClient.prototype.sendMessage = (): Promise => { 102 | sentMessages++; 103 | return Promise.resolve(); 104 | }; 105 | 106 | const sink = new AgentSink(''); 107 | 108 | // act 109 | await sink.accept(context); 110 | 111 | // assert 112 | expect(sentMessages).toBe(expectedMessages); 113 | }); 114 | -------------------------------------------------------------------------------- /src/sinks/__tests__/ConsoleSink.test.ts: -------------------------------------------------------------------------------- 1 | import { faker } from '@faker-js/faker'; 2 | import { MetricsContext } from '../../logger/MetricsContext'; 3 | import { ConsoleSink } from '../ConsoleSink'; 4 | 5 | beforeEach(() => { 6 | console.log = jest.fn(); 7 | }); 8 | 9 | afterEach(() => { 10 | delete process.env.AWS_LAMBDA_LOG_FORMAT; 11 | }); 12 | 13 | test('accept serializes and writes result to stdout', () => { 14 | // arrange 15 | const expected = faker.random.alphaNumeric(20); 16 | const serializer: any = { 17 | serialize: jest.fn(() => [expected]), 18 | }; 19 | 20 | const sink = new ConsoleSink(serializer); 21 | 22 | // act 23 | sink.accept(MetricsContext.empty()); 24 | 25 | // assert 26 | expect(sink.console).toBe(console); 27 | expect(console.log).toBeCalledWith(expected); 28 | }); 29 | 30 | test('accept serialized and writes result to stdout when lambda log format is JSON', () => { 31 | // arrange 32 | process.env.AWS_LAMBDA_LOG_FORMAT = 'JSON'; 33 | const expected = faker.random.alphaNumeric(20); 34 | const serializer: any = { 35 | serialize: jest.fn(() => [expected]), 36 | }; 37 | 38 | const sink = new ConsoleSink(serializer); 39 | const spy = jest.spyOn(sink.console, 'log'); 40 | 41 | // act 42 | sink.accept(MetricsContext.empty()); 43 | 44 | // assert 45 | expect(sink.console).not.toBe(console); 46 | expect(spy).toBeCalledWith(expected); 47 | }); 48 | 49 | test('accept writes multiple messages to stdout', () => { 50 | // arrange 51 | const expectedMessages = faker.datatype.number({ min: 2, max: 100 }); 52 | const expected = new Array(expectedMessages).fill(null).map(() => faker.random.alphaNumeric(20)); 53 | const serializer: any = { 54 | serialize: jest.fn(() => expected), 55 | }; 56 | 57 | const sink = new ConsoleSink(serializer); 58 | 59 | // act 60 | sink.accept(MetricsContext.empty()); 61 | 62 | // assert 63 | expect(sink.console).toBe(console); 64 | expect(console.log).toBeCalledTimes(expectedMessages); 65 | expected.forEach((e) => expect(console.log).toBeCalledWith(e)); 66 | }); 67 | 68 | test('accept writes multiple messages to stdout when lambda log format is JSON', () => { 69 | // arrange 70 | process.env.AWS_LAMBDA_LOG_FORMAT = 'JSON'; 71 | const expectedMessages = faker.datatype.number({ min: 2, max: 100 }); 72 | const expected = new Array(expectedMessages).fill(null).map(() => faker.random.alphaNumeric(20)); 73 | const serializer: any = { 74 | serialize: jest.fn(() => expected), 75 | }; 76 | 77 | const sink = new ConsoleSink(serializer); 78 | const spy = jest.spyOn(sink.console, 'log'); 79 | 80 | // act 81 | sink.accept(MetricsContext.empty()); 82 | 83 | // assert 84 | expect(sink.console).not.toBe(console); 85 | expect(spy).toBeCalledTimes(expectedMessages); 86 | expected.forEach((e) => expect(spy).toBeCalledWith(e)); 87 | }); 88 | -------------------------------------------------------------------------------- /src/sinks/__tests__/TcpClient.test.ts: -------------------------------------------------------------------------------- 1 | import { faker } from '@faker-js/faker'; 2 | import { TcpClient } from '../connections/TcpClient'; 3 | import sleep from '../../../test/utils/Sleep'; 4 | import net = require('net'); 5 | 6 | test('handles tcp client errors', async () => { 7 | // arrange 8 | const client = new TcpClient({ 9 | host: '0.0.0.0', 10 | port: 65535, 11 | protocol: 'tcp', 12 | }); 13 | 14 | // assert 15 | return expect(client.sendMessage(Buffer.from([]))).rejects.toThrowError(/ECONNREFUSED/); 16 | }); 17 | 18 | test('handles server disconnect', async () => { 19 | // arrange 20 | const port = 9999; 21 | const successSends = faker.datatype.number({ min: 1, max: 100 }); 22 | const failedSends = faker.datatype.number({ min: 1, max: 100 }); 23 | const successSendsReconnect = faker.datatype.number({ min: 1, max: 100 }); 24 | 25 | let receivedMessages = 0; 26 | const server = net.createServer(socket => socket.on('data', () => receivedMessages++)).listen(port, '0.0.0.0'); 27 | 28 | const client = new TcpClient({ host: '0.0.0.0', port: port, protocol: 'tcp' }); 29 | const killServer = async () => { 30 | await new Promise(resolve => server.close(resolve)); 31 | server.unref(); 32 | }; 33 | 34 | let failedCount = 0; 35 | const sendMessages = async (count: number) => { 36 | for (let index = 0; index < count; index++) { 37 | try { 38 | await client.sendMessage(Buffer.from('test\n')); 39 | // allow kernel + server time to get request 40 | await sleep(20); 41 | } catch (_) { 42 | failedCount++; 43 | } 44 | } 45 | }; 46 | 47 | // act 48 | await sendMessages(successSends); 49 | await killServer(); 50 | await sendMessages(failedSends); 51 | server.listen(port, '0.0.0.0'); 52 | await sendMessages(successSendsReconnect); 53 | 54 | // assert 55 | expect(failedCount).toBe(failedSends); 56 | expect(receivedMessages).toBe(successSends + successSendsReconnect); 57 | 58 | // cleanup 59 | // @ts-ignore 60 | client.disconnect('cleanup'); 61 | await killServer(); 62 | }, 10000); 63 | 64 | test('does not leak event listeners on failed sends', async () => { 65 | // arrange 66 | const runCount = 100; 67 | const client = new TcpClient({ 68 | host: '0.0.0.0', 69 | port: 65535, 70 | protocol: 'tcp', 71 | }); 72 | 73 | // act 74 | let failedCount = 0; 75 | for (let index = 0; index < runCount; index++) { 76 | try { 77 | await client.sendMessage(Buffer.from([])); 78 | } catch (_) { 79 | failedCount++; 80 | } 81 | } 82 | 83 | // assert 84 | expect(failedCount).toBe(runCount); 85 | 86 | // @ts-ignore 87 | const socket = client.socket; 88 | 89 | expect(socket.listeners('error').length).toBe(0); 90 | expect(socket.listeners('connect').length).toBe(1); 91 | expect(socket.listeners('timeout').length).toBe(0); 92 | }); 93 | 94 | test('does not leak event listeners on successful sends', async () => { 95 | // arrange 96 | const port = 9999; 97 | const server = net.createServer(socket => socket.pipe(socket)).listen(port, '0.0.0.0'); 98 | const client = new TcpClient({ 99 | host: '0.0.0.0', 100 | port: port, 101 | protocol: 'tcp', 102 | }); 103 | 104 | // act 105 | for (let index = 0; index < 100; index++) { 106 | await client.sendMessage(Buffer.from([])); 107 | } 108 | 109 | server.close(); 110 | server.unref(); 111 | 112 | // assert 113 | // @ts-ignore 114 | const socket = client.socket; 115 | 116 | expect(socket.listeners('error').length).toBe(0); 117 | expect(socket.listeners('connect').length).toBe(0); 118 | expect(socket.listeners('timeout').length).toBe(1); 119 | }); 120 | -------------------------------------------------------------------------------- /src/sinks/connections/IEndpoint.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | export interface IEndpoint { 17 | host: string; 18 | port: number; 19 | protocol: string; 20 | } 21 | -------------------------------------------------------------------------------- /src/sinks/connections/ISocketClient.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | export interface ISocketClient { 17 | warmup(): Promise; 18 | sendMessage(message: Buffer): Promise; 19 | } 20 | -------------------------------------------------------------------------------- /src/sinks/connections/TcpClient.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | import net = require('net'); 17 | import { LOG } from '../../utils/Logger'; 18 | import { IEndpoint } from './IEndpoint'; 19 | import { ISocketClient } from './ISocketClient'; 20 | 21 | export class TcpClient implements ISocketClient { 22 | private endpoint: IEndpoint; 23 | private socket: net.Socket; 24 | 25 | constructor(endpoint: IEndpoint) { 26 | this.endpoint = endpoint; 27 | this.socket = new net.Socket({ allowHalfOpen: true }) 28 | .setEncoding('utf8') 29 | .setKeepAlive(true) 30 | .setTimeout(5000) // idle timeout 31 | .on('timeout', () => this.disconnect('idle timeout')) 32 | .on('end', () => this.disconnect('end')) 33 | .on('data', data => LOG('TcpClient received data.', data)); 34 | 35 | // Used to create an initial connection on the socket right after creation to avoid socket failures. 36 | // eslint-disable-next-line @typescript-eslint/no-floating-promises 37 | this.initialConnect.apply(this); 38 | } 39 | 40 | public async initialConnect(): Promise { 41 | return new Promise((resolve, reject) => { 42 | this.socket.connect(this.endpoint.port, this.endpoint.host, (err?: Error) => { 43 | if (err) reject(err); 44 | else resolve(); 45 | }); 46 | }); 47 | } 48 | 49 | public async warmup(): Promise { 50 | try { 51 | await this.establishConnection(); 52 | } catch (err) { 53 | LOG('Failed to connect', err); 54 | } 55 | } 56 | 57 | public async sendMessage(message: Buffer): Promise { 58 | // ensure the socket is open and writable 59 | await this.waitForOpenConnection(); 60 | 61 | await new Promise((resolve, reject) => { 62 | const onSendError = (err: Error): void => { 63 | LOG('Failed to write', err); 64 | reject(err); 65 | }; 66 | 67 | const wasFlushedToKernel = this.socket.write(message, (err?: Error) => { 68 | if (!err) { 69 | LOG('Write succeeded'); 70 | resolve(); 71 | } else { 72 | onSendError(err); 73 | } 74 | }); 75 | 76 | if (!wasFlushedToKernel) { 77 | LOG('TcpClient data was not flushed to kernel buffer and was queued in memory.'); 78 | } 79 | }); 80 | } 81 | 82 | private disconnect(eventName: string): void { 83 | LOG('TcpClient disconnected due to:', eventName); 84 | this.socket.removeAllListeners(); 85 | this.socket.destroy(); 86 | this.socket.unref(); 87 | } 88 | 89 | private async waitForOpenConnection(): Promise { 90 | if (!this.socket.writable || this.socket.readyState !== 'open') { 91 | await this.establishConnection(); 92 | } 93 | } 94 | 95 | private async establishConnection(): Promise { 96 | await new Promise((resolve, reject) => { 97 | const onError = (e: Error): void => { 98 | // socket is already open, no need to connect 99 | if (e.message.includes('EISCONN')) { 100 | resolve(); 101 | return; 102 | } 103 | LOG('TCP Client received error', e); 104 | this.disconnect(e.message); 105 | reject(e); 106 | }; 107 | 108 | const onConnect = (): void => { 109 | this.socket.removeListener('error', onError); 110 | LOG('TcpClient connected.', this.endpoint); 111 | resolve(); 112 | }; 113 | 114 | // TODO: convert this to a proper state machine 115 | switch (this.socket.readyState) { 116 | case 'open': 117 | resolve(); 118 | break; 119 | case 'opening': 120 | // the socket is currently opening, we will resolve 121 | // or fail the current promise on the connect or 122 | // error events 123 | this.socket.once('connect', onConnect); 124 | this.socket.once('error', onError); 125 | break; 126 | default: 127 | LOG('opening connection with socket in state: ', this.socket.readyState); 128 | this.socket.connect(this.endpoint.port, this.endpoint.host, onConnect).once('error', onError); 129 | break; 130 | } 131 | }); 132 | } 133 | } 134 | -------------------------------------------------------------------------------- /src/sinks/connections/UdpClient.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | import dgram = require('dgram'); 17 | import { LOG } from '../../utils/Logger'; 18 | import { IEndpoint } from './IEndpoint'; 19 | import { ISocketClient } from './ISocketClient'; 20 | 21 | export class UdpClient implements ISocketClient { 22 | private endpoint: IEndpoint; 23 | 24 | constructor(endpoint: IEndpoint) { 25 | this.endpoint = endpoint; 26 | } 27 | 28 | // No warm up for UDP 29 | public warmup(): Promise { 30 | return Promise.resolve(); 31 | } 32 | 33 | public async sendMessage(message: Buffer): Promise { 34 | const client = dgram.createSocket('udp4'); 35 | client.send(message, this.endpoint.port, this.endpoint.host, (error: unknown) => { 36 | if (error) { 37 | LOG(error); 38 | } 39 | client.close(); 40 | }); 41 | 42 | return Promise.resolve(); 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /src/utils/Fetch.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | import { IncomingMessage, RequestOptions, request as httpRequest } from 'http'; 17 | 18 | const SOCKET_TIMEOUT = 1000; 19 | 20 | /** 21 | * Fetch data from a remote HTTP endpoint with the provided headers. 22 | * 23 | * @param options - HTTP request options 24 | */ 25 | const fetch = (options: RequestOptions): Promise => { 26 | return new Promise((resolve, reject) => { 27 | const request = httpRequest(options, (response: IncomingMessage) => { 28 | if (!response.statusCode) { 29 | reject( 30 | `Received undefined response status code from '${options.host || 'unknown'}/${options.path || 'unknown'}'`, 31 | ); 32 | return; 33 | } 34 | 35 | if (response.statusCode < 200 || response.statusCode > 299) { 36 | reject(new Error(`Failed to load page, status code: ${response.statusCode}`)); 37 | return; 38 | } 39 | 40 | // using similar approach to node-fetch 41 | // https://github.com/bitinn/node-fetch/blob/6a5d192034a0f438551dffb6d2d8df2c00921d16/src/body.js#L217 42 | const body: Uint8Array[] = []; 43 | let bodyBytes = 0; 44 | response.on('data', (chunk: Uint8Array) => { 45 | bodyBytes += chunk.length; 46 | body.push(chunk); 47 | }); 48 | 49 | response.on('end', () => { 50 | const buffer: Buffer = Buffer.concat(body, bodyBytes); 51 | resolve(buffer); 52 | }); 53 | }).on('error', (err: unknown) => { 54 | reject(err); 55 | }); 56 | 57 | request.on('socket', socket => { 58 | socket.on('timeout', () => { 59 | request.abort(); 60 | reject(`Socket timeout while connecting to '${options.host || 'unknown'}/${options.path || 'unknown'}'`); 61 | }); 62 | socket.setTimeout(SOCKET_TIMEOUT); 63 | }); 64 | 65 | request.end(); 66 | }); 67 | }; 68 | 69 | /** 70 | * Fetch a string from a remote HTTP endpoint with the provided headers. 71 | * 72 | * @param options - HTTP request options 73 | */ 74 | const fetchString = async (options: RequestOptions): Promise => { 75 | const buffer = await fetch(options); 76 | return buffer.toString(); 77 | }; 78 | 79 | /** 80 | * Fetch JSON data from a remote HTTP endpoint with the provided headers and de-serialize to the provided type. 81 | * There are no guarantees the response will conform to the contract defined by T. 82 | * It is up to the consumer to ensure the provided T captures all possible response types 83 | * from the provided endpoint. 84 | * 85 | * @param options - HTTP request options 86 | */ 87 | const fetchJSON = async (options: RequestOptions): Promise => { 88 | const responseString = await fetchString(options); 89 | return JSON.parse(responseString) as Promise; 90 | }; 91 | 92 | export { fetch, fetchJSON, fetchString }; 93 | -------------------------------------------------------------------------------- /src/utils/Logger.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | import config from '../config/Configuration'; 17 | 18 | const LOG = (...args: unknown[]): void => { 19 | if (config.debuggingLoggingEnabled) { 20 | console.log(...args); 21 | } 22 | }; 23 | 24 | export { LOG }; 25 | -------------------------------------------------------------------------------- /src/utils/Time.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | import { performance } from 'perf_hooks'; 17 | 18 | /** 19 | * Returns the current high resolution millisecond timestamp, 20 | * measured in Unix time 21 | */ 22 | const now = (): number => { 23 | return performance.timeOrigin + performance.now(); 24 | }; 25 | 26 | export { now }; 27 | -------------------------------------------------------------------------------- /src/utils/Validator.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | import { Constants } from '../Constants'; 17 | import { Unit } from '../logger/Unit'; 18 | import { StorageResolution } from '../logger/StorageResolution'; 19 | import { DimensionSetExceededError } from '../exceptions/DimensionSetExceededError'; 20 | import { InvalidDimensionError } from '../exceptions/InvalidDimensionError'; 21 | import { InvalidMetricError } from '../exceptions/InvalidMetricError'; 22 | import { InvalidNamespaceError } from '../exceptions/InvalidNamespaceError'; 23 | import { InvalidTimestampError } from '../exceptions/InvalidTimestampError'; 24 | 25 | /** 26 | * Validates dimension set. 27 | * @see [CloudWatch Dimensions](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_Dimension.html) 28 | * 29 | * @param dimensionSet 30 | * @throws {DimensionSetExceededError} Dimension set must not exceed 30 dimensions. 31 | * @throws {InvalidDimensionError} Dimension name and value must be valid. 32 | */ 33 | const validateDimensionSet = (dimensionSet: Record): void => { 34 | // Validates dimension set length 35 | if (Object.keys(dimensionSet).length > Constants.MAX_DIMENSION_SET_SIZE) 36 | throw new DimensionSetExceededError( 37 | `Maximum number of dimensions per dimension set allowed are ${Constants.MAX_DIMENSION_SET_SIZE}`, 38 | ); 39 | 40 | // Validate dimension key and value 41 | Object.entries(dimensionSet).forEach(([key, value]) => { 42 | dimensionSet[key] = value = String(value); 43 | 44 | if (!new RegExp(Constants.VALID_DIMENSION_REGEX).test(key)) { 45 | throw new InvalidDimensionError(`Dimension key ${key} has invalid characters`); 46 | } 47 | if (!new RegExp(Constants.VALID_DIMENSION_REGEX).test(value)) { 48 | throw new InvalidDimensionError(`Dimension value ${value} has invalid characters`); 49 | } 50 | if (key.trim().length == 0) { 51 | throw new InvalidDimensionError(`Dimension key ${key} must include at least one non-whitespace character`); 52 | } 53 | 54 | if (value.trim().length == 0) { 55 | throw new InvalidDimensionError(`Dimension value ${value} must include at least one non-whitespace character`); 56 | } 57 | 58 | if (key.length > Constants.MAX_DIMENSION_NAME_LENGTH) { 59 | throw new InvalidDimensionError( 60 | `Dimension key ${key} must not exceed maximum length ${Constants.MAX_DIMENSION_NAME_LENGTH}`, 61 | ); 62 | } 63 | 64 | if (value.length > Constants.MAX_DIMENSION_VALUE_LENGTH) { 65 | throw new InvalidDimensionError( 66 | `Dimension value ${value} must not exceed maximum length ${Constants.MAX_DIMENSION_VALUE_LENGTH}`, 67 | ); 68 | } 69 | 70 | if (key.startsWith(':')) { 71 | throw new InvalidDimensionError(`Dimension key ${key} cannot start with ':'`); 72 | } 73 | }); 74 | }; 75 | 76 | /** 77 | * Validates metric. 78 | * @see [CloudWatch Metric](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_MetricDatum.html) 79 | * 80 | * @param key 81 | * @param value 82 | * @param unit 83 | * @param storageResolution 84 | * 85 | * @throws {InvalidMetricError} Metric name must be valid. 86 | */ 87 | const validateMetric = ( 88 | key: string, 89 | value: number, 90 | unit?: Unit | string, 91 | storageResolution?: StorageResolution, 92 | metricNameAndResolutionMap?: Map, 93 | ): void => { 94 | if (key.trim().length == 0) { 95 | throw new InvalidMetricError(`Metric key ${key} must include at least one non-whitespace character`); 96 | } 97 | 98 | if (key.length > Constants.MAX_METRIC_NAME_LENGTH) { 99 | throw new InvalidMetricError( 100 | `Metric key ${key} must not exceed maximum length ${Constants.MAX_METRIC_NAME_LENGTH}`, 101 | ); 102 | } 103 | 104 | if (!Number.isFinite(value)) { 105 | throw new InvalidMetricError(`Metric value ${value} is not a number`); 106 | } 107 | 108 | if (value > Number.MAX_SAFE_INTEGER) { 109 | throw new InvalidMetricError(`Metric value ${value} must not exceed maximum value ${Number.MAX_SAFE_INTEGER}}`); 110 | } 111 | 112 | if (value < -Number.MAX_SAFE_INTEGER) { 113 | throw new InvalidMetricError( 114 | `Metric value ${value} must not be less than minimum value ${-Number.MAX_SAFE_INTEGER}`, 115 | ); 116 | } 117 | 118 | if ( 119 | unit !== undefined && 120 | !Object.values(Unit) 121 | .map((u) => String(u)) 122 | .includes(unit) 123 | ) { 124 | throw new InvalidMetricError(`Metric unit ${unit} is not valid`); 125 | } 126 | 127 | if ( 128 | storageResolution !== undefined && 129 | !Object.values(StorageResolution) 130 | .map((s) => s) 131 | .includes(storageResolution) 132 | ) { 133 | throw new InvalidMetricError(`Metric resolution ${storageResolution} is not valid`); 134 | } 135 | 136 | if ( 137 | metricNameAndResolutionMap?.has(key) && 138 | metricNameAndResolutionMap.get(key) !== (storageResolution ? storageResolution : StorageResolution.Standard) 139 | ) { 140 | throw new InvalidMetricError( 141 | `Resolution for metrics ${key} is already set. A single log event cannot have a metric with two different resolutions.`, 142 | ); 143 | } 144 | }; 145 | 146 | /** 147 | * Validates metric namespace. 148 | * @see [CloudWatch Namespace](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/cloudwatch_concepts.html#Namespace) 149 | * 150 | * @param namespace 151 | * @throws {InvalidNamespaceError} Namespace must be of valid length. 152 | */ 153 | const validateNamespace = (namespace: string): void => { 154 | if (namespace.trim().length == 0) { 155 | throw new InvalidNamespaceError(`Namespace must include at least one non-whitespace character`); 156 | } 157 | 158 | if (namespace.length > Constants.MAX_NAMESPACE_LENGTH) { 159 | throw new InvalidNamespaceError(`Namespace must not exceed maximum length ${Constants.MAX_NAMESPACE_LENGTH}`); 160 | } 161 | 162 | if (!new RegExp(Constants.VALID_NAMESPACE_REGEX).test(namespace)) { 163 | throw new InvalidNamespaceError(`Namespace ${namespace} has invalid characters`); 164 | } 165 | }; 166 | 167 | /** 168 | * Validates timestamp. 169 | * @see [CloudWatch Timestamp](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/cloudwatch_concepts.html#about_timestamp) 170 | * 171 | * @param timestamp 172 | */ 173 | const validateTimestamp = (timestamp: Date | number): void => { 174 | if (!isDate(timestamp)) { 175 | throw new InvalidTimestampError(`Timestamp ${String(timestamp)} is invalid`); 176 | } 177 | 178 | timestamp = new Date(timestamp); 179 | 180 | if (timestamp < new Date(Date.now() - Constants.MAX_TIMESTAMP_PAST_AGE)) { 181 | throw new InvalidTimestampError( 182 | `Timestamp ${String(timestamp)} must not be older than ${Constants.MAX_TIMESTAMP_PAST_AGE} milliseconds`, 183 | ); 184 | } 185 | 186 | if (timestamp > new Date(Date.now() + (Constants.MAX_TIMESTAMP_FUTURE_AGE as number))) { 187 | throw new InvalidTimestampError( 188 | `Timestamp ${String(timestamp)} must not be newer than ${Constants.MAX_TIMESTAMP_FUTURE_AGE} milliseconds`, 189 | ); 190 | } 191 | }; 192 | 193 | const isDate = (timestamp: Date | number): boolean => { 194 | return (timestamp instanceof Date && !isNaN(new Date(timestamp).getTime())) || new Date(timestamp).getTime() > 0; 195 | }; 196 | 197 | export { validateDimensionSet, validateMetric, validateNamespace, validateTimestamp }; 198 | -------------------------------------------------------------------------------- /test/canary/agent/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:16.17.0-alpine AS base 2 | RUN mkdir -p /app/src 3 | WORKDIR /app/src 4 | 5 | COPY package.json ./ 6 | RUN rm -rf node_modules/aws-embedded-metrics 7 | COPY node_modules/aws-embedded-metrics ./node_modules/aws-embedded-metrics 8 | COPY . . 9 | 10 | CMD [ "node", "index" ] -------------------------------------------------------------------------------- /test/canary/agent/container-definitions.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "logConfiguration": { 4 | "logDriver": "awslogs", 5 | "options": { 6 | "awslogs-group": "/ecs/emf-node-canary", 7 | "awslogs-region": "us-west-2", 8 | "awslogs-stream-prefix": "ecs" 9 | } 10 | }, 11 | "dependsOn": [ 12 | { 13 | "containerName": "cloudwatch-agent", 14 | "condition": "START" 15 | } 16 | ], 17 | "environment": [ 18 | { 19 | "name": "AWS_EMF_AGENT_ENDPOINT", 20 | "value": "tcp://127.0.0.1:25888" 21 | } 22 | ], 23 | "image": "863722843142.dkr.ecr.us-west-2.amazonaws.com/emf-node-canary", 24 | "name": "emf-node-canary" 25 | }, 26 | { 27 | "name": "cloudwatch-agent", 28 | "image": "public.ecr.aws/cloudwatch-agent/cloudwatch-agent:latest", 29 | "logConfiguration": { 30 | "logDriver": "awslogs", 31 | "options": { 32 | "awslogs-group": "/ecs/emf-node-canary", 33 | "awslogs-region": "us-west-2", 34 | "awslogs-stream-prefix": "ecs" 35 | } 36 | }, 37 | "environment": [ 38 | { 39 | "name": "CW_CONFIG_CONTENT", 40 | "value": "{\"agent\":{\"omit_hostname\":true,\"debug\":true},\"logs\":{\"metrics_collected\":{\"emf\":{}}}}" 41 | } 42 | ] 43 | } 44 | ] 45 | -------------------------------------------------------------------------------- /test/canary/agent/index.js: -------------------------------------------------------------------------------- 1 | const { metricScope, Unit, Configuration, StorageResolution } = require('aws-embedded-metrics'); 2 | 3 | let version = ''; 4 | try { 5 | const json = require('./node_modules/aws-embedded-metrics/package.json'); 6 | version = json.version || 'Unknown'; 7 | } catch (_) {} 8 | 9 | let init = true; 10 | 11 | Configuration.logGroupName = '/Canary/NodeJS/CloudWatchAgent/Metrics'; 12 | 13 | const recordMetric = metricScope(metrics => () => { 14 | if (init) { 15 | metrics.putMetric('Init', 1, Unit.Count); 16 | init = false; 17 | } 18 | 19 | const memoryUsage = process.memoryUsage(); 20 | 21 | metrics.setNamespace('Canary'); 22 | metrics.setDimensions({ Runtime: 'NodeJS', Platform: 'ECS', Agent: 'CloudWatchAgent', Version: version }); 23 | metrics.putMetric('Invoke', 1, Unit.Count); 24 | 25 | metrics.putMetric('Memory.HeapTotal', memoryUsage.heapTotal, Unit.Bytes); 26 | metrics.putMetric('Memory.HeapUsed', memoryUsage.heapUsed, Unit.Bytes, StorageResolution.High); 27 | metrics.putMetric('Memory.RSS', memoryUsage.rss, Unit.Bytes); 28 | }); 29 | 30 | // delay the start so that the CW Agent has time to startup 31 | // the better option would be to configure a health check 32 | // on the agent and link it to the ECS dependsOn config 33 | // additionally, we should implement a circular buffer 34 | // in the AgentSink to allow retries on transient failures 35 | setTimeout(() => 36 | setInterval(() => 37 | recordMetric(), 38 | 100), 39 | 5000); 40 | -------------------------------------------------------------------------------- /test/canary/agent/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "agent", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "local": "AWS_EMF_ENVIRONMENT=Local node index" 8 | }, 9 | "keywords": [], 10 | "author": "", 11 | "license": "ISC" 12 | } 13 | -------------------------------------------------------------------------------- /test/integ/agent/.aws/.gitignore: -------------------------------------------------------------------------------- 1 | credentials 2 | config -------------------------------------------------------------------------------- /test/integ/agent/.aws/amazon-cloudwatch-agent.json: -------------------------------------------------------------------------------- 1 | { 2 | "agent": { 3 | "omit_hostname": true, 4 | "debug": true 5 | }, 6 | "logs": { 7 | "metrics_collected": { 8 | "emf": {} 9 | } 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /test/integ/agent/Dockerfile: -------------------------------------------------------------------------------- 1 | # TODO: Fix the NO_PUBKEY error for jammy and update to use the :latest tag 2 | FROM public.ecr.aws/lts/ubuntu:20.04 3 | 4 | RUN apt-get update && \ 5 | apt-get install -y ca-certificates curl && \ 6 | rm -rf /var/lib/apt/lists/* 7 | 8 | RUN curl -O https://amazon-cloud-watch-agent.s3.amazonaws.com/debian/amd64/1.237768.0/amazon-cloudwatch-agent.deb && \ 9 | dpkg -i -E amazon-cloudwatch-agent.deb && \ 10 | rm -rf /tmp/* && \ 11 | rm -rf /opt/aws/amazon-cloudwatch-agent/bin/amazon-cloudwatch-agent-config-wizard && \ 12 | rm -rf /opt/aws/amazon-cloudwatch-agent/bin/amazon-cloudwatch-agent-ctl && \ 13 | rm -rf /opt/aws/amazon-cloudwatch-agent/bin/config-downloader 14 | 15 | COPY ./.aws/amazon-cloudwatch-agent.json /opt/aws/amazon-cloudwatch-agent/bin/default_linux_config.json 16 | COPY ./.aws/config /root/.aws/config 17 | COPY ./.aws/credentials /root/.aws/credentials 18 | 19 | ENV RUN_IN_CONTAINER="True" 20 | ENTRYPOINT ["/opt/aws/amazon-cloudwatch-agent/bin/start-amazon-cloudwatch-agent"] -------------------------------------------------------------------------------- /test/integ/agent/end-to-end.integ.ts: -------------------------------------------------------------------------------- 1 | import { metricScope } from '../../../src/logger/MetricScope'; 2 | import Sleep from '../../utils/Sleep'; 3 | import Configuration from '../../../src/config/Configuration'; 4 | import { CloudWatch, GetMetricStatisticsCommandInput } from '@aws-sdk/client-cloudwatch'; 5 | import { hostname } from 'os'; 6 | const cwmClient = new CloudWatch(); 7 | 8 | const now = () => new Date().getTime(); 9 | const startTime = new Date(); 10 | const timeoutMillis = 120_000; 11 | 12 | const serviceName = `IntegrationTests-${hostname()}`; 13 | const serviceType = 'AutomatedTest'; 14 | const logGroupName = 'aws-emf-node-integ'; 15 | 16 | Configuration.serviceName = serviceName; 17 | Configuration.serviceType = serviceType; 18 | Configuration.logGroupName = logGroupName; 19 | Configuration.debuggingLoggingEnabled = true; 20 | 21 | const dimensionKey = 'Operation'; 22 | const dimensionValue = 'Integ-Test-Agent'; 23 | const dimensions: Record = {}; 24 | dimensions[dimensionKey] = dimensionValue; 25 | 26 | test( 27 | 'end to end integration test with agent over UDP', 28 | async () => { 29 | // arrange 30 | const metricName = 'UDP-SingleFlush'; 31 | const expectedSamples = 1; 32 | Configuration.agentEndpoint = 'udp://0.0.0.0:25888'; 33 | 34 | const doWork = metricScope(metrics => () => { 35 | metrics.putDimensions(dimensions); 36 | metrics.putMetric(metricName, 100, 'Milliseconds'); 37 | }); 38 | 39 | // act 40 | doWork(); 41 | 42 | // assert 43 | await waitForMetricExistence(metricName, expectedSamples); 44 | }, 45 | timeoutMillis, 46 | ); 47 | 48 | test( 49 | 'multiple flushes over TCP connection that cross over idle timeout', 50 | async () => { 51 | // arrange 52 | const idleTimeout = 500; 53 | const metricName = 'TCP-MultipleFlushes'; 54 | const expectedSamples = 3; 55 | 56 | Configuration.agentEndpoint = 'tcp://0.0.0.0:25888'; 57 | 58 | const doWork = metricScope(metrics => () => { 59 | metrics.putDimensions(dimensions); 60 | metrics.putMetric(metricName, 100, 'Milliseconds'); 61 | metrics.setProperty('RequestId', '422b1569-16f6-4a03-b8f0-fe3fd9b100f8'); 62 | }); 63 | 64 | // act 65 | doWork(); 66 | doWork(); 67 | await Sleep(idleTimeout); 68 | doWork(); 69 | 70 | // assert 71 | await waitForMetricExistence(metricName, expectedSamples); 72 | }, 73 | timeoutMillis, 74 | ); 75 | 76 | const metricExists = async (metricName: string, expectedSampleCount: number): Promise => { 77 | const request: GetMetricStatisticsCommandInput = { 78 | Namespace: 'aws-embedded-metrics', 79 | MetricName: metricName, 80 | Dimensions: [ 81 | { Name: 'ServiceName', Value: serviceName }, 82 | { Name: 'ServiceType', Value: serviceType }, 83 | { Name: 'LogGroup', Value: logGroupName }, 84 | { Name: dimensionKey, Value: dimensionValue }, 85 | ], 86 | Period: 60, 87 | StartTime: new Date(startTime.getTime() - 5000), 88 | EndTime: new Date(now()), 89 | Statistics: ['SampleCount'], 90 | }; 91 | 92 | const result = await cwmClient.getMetricStatistics(request); 93 | 94 | if (result && result.Datapoints && result.Datapoints.length > 0) { 95 | const samples = result.Datapoints.map(dataPoint => dataPoint.SampleCount || 0).reduce((total, i) => total + i); 96 | console.log(`Received ${samples} samples.`); 97 | return samples === expectedSampleCount; 98 | } 99 | 100 | return false; 101 | }; 102 | 103 | const waitForMetricExistence = async (metricName: string, expectedSampleCount: number): Promise => { 104 | let attempts = 0; 105 | while (!(await metricExists(metricName, expectedSampleCount))) { 106 | console.log('No metrics yet. Sleeping before trying again. Attempt #', attempts++); 107 | await Sleep(2000); 108 | } 109 | }; 110 | -------------------------------------------------------------------------------- /test/utils/Sleep.ts: -------------------------------------------------------------------------------- 1 | export default function(ms: number): Promise { 2 | return new Promise(resolve => setTimeout(resolve, ms)); 3 | } 4 | -------------------------------------------------------------------------------- /test/utils/TestSink.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. 3 | * Licensed under the Apache License, Version 2.0 (the 4 | * "License"); you may not use this file except in compliance 5 | * with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software 10 | * distributed under the License is distributed on an "AS IS" BASIS, 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | * See the License for the specific language governing permissions and 13 | * limitations under the License. 14 | */ 15 | 16 | import { MetricsContext } from '../../src/logger/MetricsContext'; 17 | import { ISink } from '../../src/sinks/Sink'; 18 | 19 | /** 20 | * A sink that flushes log data to stdout. 21 | * This is the preferred sink for Lambda functions. 22 | */ 23 | export class TestSink implements ISink { 24 | public readonly name: string = 'TestSink'; 25 | 26 | public events: MetricsContext[] = []; 27 | 28 | public forceAcceptRejects: boolean; 29 | 30 | constructor(forceAcceptRejects: boolean) { 31 | this.forceAcceptRejects = forceAcceptRejects; 32 | } 33 | 34 | public accept(context: MetricsContext): Promise { 35 | this.events.push(context); 36 | return this.forceAcceptRejects ? Promise.reject() : Promise.resolve(); 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /tsconfig.eslint.json: -------------------------------------------------------------------------------- 1 | // As of typescript-eslint 2.0, when project is specified within parserOptions, it will now hard fail when 2 | // parsing files that are not included within the provided tsconfig(s). Because of this, we need a dedicated 3 | // tsconfig for linting that includes tests that we want linted 4 | // See here for more details https://github.com/typescript-eslint/typescript-eslint/releases/tag/v2.0.0 5 | { 6 | "extends": "./tsconfig.json", 7 | "include": ["src", "test"], 8 | "exclude": ["node_modules"] 9 | } 10 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es6", 4 | "module": "commonjs", 5 | "declaration": true, 6 | "outDir": "./lib", 7 | "strict": true, 8 | "esModuleInterop": true, 9 | "moduleResolution": "Node" 10 | }, 11 | "include": ["src"], 12 | "exclude": ["node_modules", "test", "**/__tests__/*"] 13 | } 14 | --------------------------------------------------------------------------------