├── .editorconfig ├── .eslintrc.json ├── .github ├── dependabot.yml └── workflows │ └── ci.yml ├── .gitignore ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── examples └── metrics.js ├── header.png ├── index.js ├── lib ├── aggregators.js ├── errors.js ├── loggers.js ├── logging.js ├── metrics.js └── reporters.js ├── package.json ├── test-other ├── integration_check.js ├── lint_text.sh └── types_check.ts ├── test ├── aggregators_tests.js ├── loggers_tests.js ├── metrics_tests.js ├── module_tests.js └── reporters_tests.js └── tsconfig.json /.editorconfig: -------------------------------------------------------------------------------- 1 | [*] 2 | indent_style = space 3 | insert_final_newline = true 4 | trim_trailing_whitespace = true 5 | indent_size = 4 6 | 7 | [*.js] 8 | max_line_length = 110 9 | 10 | [package*.json] 11 | indent_size = 2 12 | 13 | [*.{yaml,yml}] 14 | indent_size = 2 15 | -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "eslint:recommended", 3 | "env": { 4 | "es2019": true, 5 | "node": true 6 | }, 7 | "parserOptions": { 8 | "ecmaVersion": 2019, 9 | "sourceType": "script" 10 | }, 11 | "rules": { 12 | "semi": ["error", "always"], 13 | "quotes": ["error", "single"], 14 | "eqeqeq": ["error", "always", { "null": "ignore" }], 15 | "guard-for-in": ["error"], 16 | "no-extend-native": ["error"], 17 | "wrap-iife": ["error"], 18 | "indent": ["error", 4], 19 | "no-use-before-define": ["error", { "functions": false }], 20 | "new-cap": ["error"], 21 | "no-caller": ["error"], 22 | "strict": ["error"], 23 | "no-trailing-spaces": ["error"], 24 | "max-params": ["error", 10], 25 | "max-depth": ["error", 5], 26 | "max-statements": ["error", 25], 27 | "max-len": ["error", 110], 28 | "no-var": ["error"], 29 | // Allow unused vars if prefixed with "_" 30 | "no-unused-vars": ["error", { "argsIgnorePattern": "^_" }], 31 | "no-multi-str": ["error"], 32 | "camelcase": ["error"], 33 | "space-unary-ops": ["error"], 34 | "comma-spacing": ["error"], 35 | "eol-last": ["error"], 36 | "space-before-blocks": ["error", "always"], 37 | "keyword-spacing": ["error"], 38 | "linebreak-style": ["error", "unix"], 39 | "no-confusing-arrow": ["error"], 40 | "object-curly-spacing": ["error", "always"] 41 | }, 42 | "overrides": [ 43 | { 44 | "files": ["test/**/*.js"], 45 | "env": { 46 | "mocha": true 47 | }, 48 | "rules": { 49 | "max-statements": ["off"] 50 | } 51 | } 52 | ] 53 | } 54 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: 'github-actions' 4 | directory: '/' 5 | schedule: 6 | interval: 'monthly' 7 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | on: 2 | pull_request: {} 3 | push: 4 | branches: 5 | - main 6 | 7 | name: Continuous Integration 8 | 9 | env: 10 | default_node_version: 22 11 | 12 | jobs: 13 | test: 14 | runs-on: ubuntu-latest 15 | strategy: 16 | matrix: 17 | # Test supported release Node.js versions (even numbers) plus current 18 | # development version. 19 | node_version: [12, 14, 16, 18, 20, 22, 23] 20 | 21 | steps: 22 | - uses: actions/checkout@v4 23 | 24 | - name: Install Node.js 25 | uses: actions/setup-node@v4 26 | with: 27 | node-version: ${{ matrix.node_version }} 28 | cache: 'npm' 29 | cache-dependency-path: '**/package.json' 30 | 31 | - name: Install dependencies 32 | run: npm install 33 | 34 | - name: Run loader tests 35 | run: | 36 | npm test 37 | 38 | test-integration: 39 | runs-on: ubuntu-latest 40 | steps: 41 | - uses: actions/checkout@v4 42 | 43 | - name: Install Node.js 44 | uses: actions/setup-node@v4 45 | with: 46 | node-version: ${{ env.default_node_version }} 47 | cache: 'npm' 48 | cache-dependency-path: '**/package.json' 49 | 50 | - name: Install dependencies 51 | run: npm install 52 | 53 | - name: Check Integration 54 | env: 55 | DATADOG_API_KEY: ${{ secrets.DATADOG_API_KEY }} 56 | DATADOG_APP_KEY: ${{ secrets.DATADOG_APP_KEY }} 57 | DATADOG_SITE: ${{ secrets.DATADOG_API_HOST }} 58 | run: | 59 | npm run check-integration 60 | 61 | lint: 62 | runs-on: ubuntu-latest 63 | steps: 64 | - uses: actions/checkout@v4 65 | 66 | - name: Install Node.js 67 | uses: actions/setup-node@v4 68 | with: 69 | node-version: ${{ env.default_node_version }} 70 | cache: 'npm' 71 | cache-dependency-path: '**/package.json' 72 | 73 | - name: Install dependencies 74 | run: npm install 75 | 76 | - name: Lint Code 77 | run: | 78 | npm run check-codestyle 79 | 80 | - name: Lint Text 81 | run: npm run check-text 82 | 83 | types: 84 | name: Build/Check Types 85 | runs-on: ubuntu-latest 86 | steps: 87 | - uses: actions/checkout@v4 88 | 89 | - name: Install Node.js 90 | uses: actions/setup-node@v4 91 | with: 92 | node-version: ${{ env.default_node_version }} 93 | cache: 'npm' 94 | cache-dependency-path: '**/package.json' 95 | 96 | - name: Install dependencies 97 | run: npm install 98 | 99 | - name: Build Types 100 | run: | 101 | npm run build-types 102 | 103 | - name: Check Types 104 | run: | 105 | npm run check-types 106 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | 3 | # Logs 4 | logs 5 | *.log 6 | 7 | # IDE 8 | .idea 9 | .vscode 10 | 11 | # Runtime data 12 | pids 13 | *.pid 14 | *.seed 15 | 16 | # Directory for instrumented libs generated by jscoverage/JSCover 17 | lib-cov 18 | 19 | # Coverage directory used by tools like istanbul 20 | coverage 21 | 22 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 23 | .grunt 24 | 25 | # Compiled binary addons (http://nodejs.org/api/addons.html) 26 | build/Release 27 | 28 | # Type Definitions 29 | dist 30 | 31 | # Dependency directory 32 | # Commenting this out is preferred by some people, see 33 | # https://www.npmjs.org/doc/misc/npm-faq.html#should-i-check-my-node_modules-folder-into-git- 34 | node_modules 35 | package-lock.json 36 | 37 | # Node versioning files for NVM, Nodenv, etc. This is a library that supports 38 | # multiple versions of Node.js so whatever a user has set locally should not be 39 | # shared with others. 40 | .node-version 41 | .nvmrc 42 | 43 | # Users Environment Variables 44 | .lock-wscript 45 | .env 46 | 47 | # Scratch working files 48 | scratch.* 49 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | Contributions are welcome, and they are greatly appreciated! Every little bit helps, and credit will always be given. 4 | 5 | 6 | ## Ways to Contribute 7 | 8 | ### Report Bugs 9 | 10 | Report bugs by [filing an issue][issues]. Please check to make sure someone hasn’t already raised the issue you are concerned about. When filing a new issue, please include: 11 | - Any details about your local setup that might be helpful in troubleshooting. 12 | - Detailed steps or a working code sample to reproduce the bug. 13 | 14 | 15 | ### Fix Bugs or Implement New Features 16 | 17 | Look through the [issues][] for any that you might be able to help with. **Issues tagged with “bug” or “help wanted” are high priority** and we’d love help from anyone who has time to work on them. The “enhancement” tag indicates new features that would helpful to have, and we are happy to accept PRs for those, too. 18 | 19 | Follow the [local development](#local-development) steps below to get started, then file a [pull request][pulls] with your work. Unless you have a compelling reason not to, please make sure to include new tests or update existing tests for your changes. 20 | 21 | 22 | ### Write Documentation 23 | 24 | We could always use improvements to the documentation, whether to the README, this guide, the JSDoc strings in the code (which show up in some editors), or or even on the web in blog posts, articles, and such. 25 | 26 | For changes to this repo, follow the [local development](#local-development) steps below to get started, then file a [pull request][pulls] with your work. 27 | 28 | 29 | ## Local Development 30 | 31 | 1. If you don't have commit rights to this repo, [fork it][fork]. 32 | 33 | 2. Install Node.js 12 or newer. 34 | 35 | 3. Clone your fork (or this repo if you have commit rights) to your local development machine: 36 | 37 | ```sh 38 | git clone 39 | ``` 40 | 41 | 4. Switch to the cloned directory and install dependencies: 42 | 43 | ```sh 44 | cd node-datadog-metrics 45 | npm install 46 | ``` 47 | 48 | 5. Start a new branch to work in: 49 | 50 | ```sh 51 | git checkout -b name-of-your-branch 52 | ``` 53 | 54 | 6. Make your changes to teh relevant files. 55 | 56 | 7. Run tests and other checks before committing! 57 | 58 | - Run tests: `npm test` 59 | - Check code style: `npm run check-codestyle && npm run check-text` 60 | - Build and check TypeScript types: `npm run build-types && npm run check-types` 61 | - **(Not required!)** If you have a Datadog account you can push test metrics to, run a complete, live integration check: 62 | 63 | ```sh 64 | export DATADOG_API_KEY='' 65 | export DATADOG_APP_KEY='' 66 | export DATADOG_SITE='' 67 | npm run check-integration 68 | ``` 69 | 70 | Most of these checks will also run automatically when you create a PR, but it can be good to run some of the quick checks yourself before pushing your code to get quicker feedback. 71 | 72 | 8. Commit and push your changes. 73 | 74 | 9. [Open a pull request][pulls] with your changes. A maintainer will 75 | 76 | 77 | ## Building/Publishing Releases 78 | 79 | **Maintainers only.** We current publish new releases manually. To create a new release, follow these steps: 80 | 81 | 1. Pull and check out the latest `main` branch (or whatever branch is relevant if publishing a patch for a previous release). 82 | 83 | 2. Prepare for the release. 84 | - Most checks will have run in CI, but you may want to run them again locally if you’ve changed anything notable. (See [local development](#local-development) notes above.) 85 | 86 | - Update the version number in `package.json`. 87 | 88 | - Finalize the “release history” section of `README.md`: 89 | - Replace the “In Development:” heading with the version number and current date, e.g. “0.12.0 (2024-12-01)” 90 | 91 | - Do a quick review of the notes for this release and clean up any typos or reword anything that’s not clear. Remove any sections that are blank or just have “TBD” listed. 92 | 93 | - Replace the “view diff” link with a compare link for the new version number: 94 | 95 | ``` 96 | https://github.com/dbader/node-datadog-metrics/compare/v...v 97 | ``` 98 | 99 | For example, if the new version number is 0.12.0 and the previous was 0.11.4: 100 | 101 | ``` 102 | https://github.com/dbader/node-datadog-metrics/compare/v0.11.4...v0.12.0 103 | ``` 104 | 105 | 3. Commit and tag. 106 | - Commit your changes. The commit message should be something like `Prepare v`. 107 | - Tag the commit as `v`: 108 | 109 | ```sh 110 | git tag v 111 | ``` 112 | 113 | - Push the commit and tags to GitHub: 114 | 115 | ```sh 116 | git push 117 | git push --tags 118 | ``` 119 | 120 | 4. Publish to NPM! 121 | 122 | Make sure to choose an appropriate `--tag` value: 123 | - `latest` for a new current release. 124 | - `next` for a pre-release. 125 | - `v.x` for patches to non-current versions (e.g. if the current release is v0.12.1 but you are publishing v0.11.5, tag it as `v0.11.x`). 126 | 127 | Do a dry run with `--dry-run` before publishing just to make sure everything is good: 128 | 129 | ```sh 130 | npm run clean 131 | npm publish --dry-run --tag 132 | ``` 133 | 134 | If the results look good, go ahead and publish! 135 | 136 | ```sh 137 | npm publish --tag 138 | ``` 139 | 140 | 5. Create a release on GitHub at https://github.com/dbader/node-datadog-metrics/releases/new. 141 | - Choose the tag you pushed in step 3. 142 | - The title should be `Version `. 143 | - Paste the notes from the “release history” section of the README as the description. Make sure to indent properly (they are indented one level as a bulleted list item in the README). 144 | - Attach the tarball of the published package from NPM. (You can find the URL for it by running `npm view datadog-metrics`.) 145 | 146 | 6. Prepare for future development: 147 | - Add a new “in development” section to the top of the release history in `README.md`: 148 | 149 | ```markdown 150 | ### In Development: 151 | 152 | **Breaking Changes:** 153 | 154 | TBD 155 | 156 | **New Features:** 157 | 158 | TBD 159 | 160 | **Deprecations:** 161 | 162 | TBD 163 | 164 | **Bug Fixes:** 165 | 166 | TBD 167 | 168 | **Maintenance:** 169 | 170 | TBD 171 | 172 | [View diff](https://github.com/dbader/node-datadog-metrics/compare/v...main) 173 | ``` 174 | 175 | - Update the version number in `package.json` to be `-dev`, e.g. `0.12.1-dev` if you just published `0.12.0`. 176 | 177 | - Commit your changes and push to GitHub. 178 | 179 | 180 | [issues]: https://github.com/dbader/node-datadog-metrics/issues 181 | [pulls]: https://github.com/dbader/node-datadog-metrics/pulls 182 | [fork]: https://github.com/dbader/node-datadog-metrics/fork 183 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2014 Daniel Bader 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # datadog-metrics 2 | > Buffered metrics reporting via the Datadog HTTP API. 3 | 4 | [![NPM Version][npm-image]][npm-url] 5 | [![Build Status][ci-status-image]][ci-status-url] 6 | [![Downloads Stats][npm-downloads]][npm-url] 7 | 8 | Datadog-metrics lets you collect application metrics through Datadog's HTTP API. Using the HTTP API has the benefit that you **don't need to install the Datadog Agent (StatsD)**. Just get an API key, install the module and you're ready to go. 9 | 10 | The downside of using the HTTP API is that it can negatively affect your app's performance. Datadog-metrics **solves this issue by buffering metrics locally and periodically flushing them** to Datadog. 11 | 12 | ## Installation 13 | 14 | Datadog-metrics is compatible with Node.js v12 and later. You can install it with NPM: 15 | 16 | ```sh 17 | npm install datadog-metrics --save 18 | ``` 19 | 20 | ## Example 21 | 22 | ![](header.png) 23 | 24 | Save the following into a file named `example_app.js`: 25 | ```js 26 | var metrics = require('datadog-metrics'); 27 | metrics.init({ host: 'myhost', prefix: 'myapp.' }); 28 | 29 | function collectMemoryStats() { 30 | var memUsage = process.memoryUsage(); 31 | metrics.gauge('memory.rss', memUsage.rss); 32 | metrics.gauge('memory.heapTotal', memUsage.heapTotal); 33 | metrics.gauge('memory.heapUsed', memUsage.heapUsed); 34 | }; 35 | 36 | setInterval(collectMemoryStats, 5000); 37 | ``` 38 | 39 | Run it: 40 | ```sh 41 | DATADOG_API_KEY=YOUR_KEY DEBUG=metrics node example_app.js 42 | ``` 43 | 44 | ## Tutorial 45 | 46 | There's also a longer [tutorial](https://dbader.org/blog/monitoring-your-nodejs-app-with-datadog) that walks you through setting up a monitoring dashboard on Datadog using datadog-metrics. 47 | 48 | 49 | ## Usage 50 | 51 | ### Datadog API key 52 | 53 | Make sure the `DATADOG_API_KEY` or `DD_API_KEY` environment variable is set to your Datadog API key (you can also set it via the `apiKey` option in code). You can find the API key under [Integrations > APIs](https://app.datadoghq.com/account/settings#api). *Please note the API key is different from an **application key**. For more details, see [Datadog’s “API and Application Keys” docs](https://docs.datadoghq.com/account_management/api-app-keys/).* 54 | 55 | ### Module setup 56 | 57 | There are three ways to use this module to instrument an application. 58 | They differ in the level of control that they provide. 59 | 60 | #### Use case #1: Just let me track some metrics already! 61 | 62 | Just require datadog-metrics and you're ready to go. After that you can call 63 | `gauge`, `increment` and `histogram` to start reporting metrics. 64 | 65 | ```js 66 | var metrics = require('datadog-metrics'); 67 | metrics.gauge('mygauge', 42); 68 | ``` 69 | 70 | #### Use case #2: I want some control over this thing! 71 | 72 | If you want more control you can configure the module with a call to `init`. 73 | Make sure you call this before you use the `gauge`, `increment` and `histogram` 74 | functions. See the documentation for `init` below to learn more. 75 | 76 | ```js 77 | var metrics = require('datadog-metrics'); 78 | metrics.init({ host: 'myhost', prefix: 'myapp.' }); 79 | metrics.gauge('mygauge', 42); 80 | ``` 81 | 82 | 83 | #### Use case #3: Must. Control. Everything. 84 | 85 | If you need even more control you can create one or more `BufferedMetricsLogger` instances and manage them yourself: 86 | 87 | ```js 88 | var metrics = require('datadog-metrics'); 89 | var metricsLogger = new metrics.BufferedMetricsLogger({ 90 | site: 'datadoghq.eu', 91 | apiKey: 'TESTKEY', 92 | host: 'myhost', 93 | prefix: 'myapp.', 94 | flushIntervalSeconds: 15, 95 | defaultTags: ['env:staging', 'region:us-east-1'], 96 | onError (error) { 97 | console.error('There was an error auto-flushing metrics:', error); 98 | } 99 | }); 100 | metricsLogger.gauge('mygauge', 42); 101 | ``` 102 | 103 | ## API 104 | 105 | ### Initialization 106 | 107 | `metrics.init(options)` 108 | 109 | Where `options` is an object and can contain the following: 110 | 111 | * `host`: Sets the hostname reported with each metric. (optional) 112 | * Setting a hostname is useful when you're running the same application 113 | on multiple machines and you want to track them separately in Datadog. 114 | * `prefix`: Sets a default prefix for all metrics. (optional) 115 | * Use this to namespace your metrics. 116 | * `flushIntervalSeconds`: How often to send metrics to Datadog. (optional) 117 | * This defaults to 15 seconds. Set it to `0` to disable auto-flushing (which 118 | means you must call `flush()` manually). 119 | * `site`: Sets the Datadog "site", or server where metrics are sent. (optional) 120 | * Defaults to `datadoghq.com`. 121 | * See more details on setting your site at: 122 | https://docs.datadoghq.com/getting_started/site/#access-the-datadog-site 123 | * You can also set this via the `DATADOG_SITE` or `DD_SITE` environment variable. 124 | * Ignored if you set the `reporter` option. 125 | * `apiKey`: Sets the Datadog API key. (optional) 126 | * It's usually best to keep this in an environment variable. 127 | Datadog-metrics looks for the API key in the `DATADOG_API_KEY` or 128 | `DD_API_KEY` environment variable by default. 129 | * You must either set this option or the environment variable. An API key 130 | is required to send metrics. 131 | * Make sure not to confuse this with your _application_ key! For more 132 | details, see: https://docs.datadoghq.com/account_management/api-app-keys/ 133 | * Ignored if you set the `reporter` option. 134 | * `appKey`: ⚠️ Deprecated. This does nothing and will be removed in an upcoming 135 | release. 136 | 137 | Sets the Datadog _application_ key. This is not actually needed for sending 138 | metrics or distributions, and you probably shouldn’t set it. Do not confuse 139 | this with your _API_ key! For more, see: 140 | https://docs.datadoghq.com/account_management/api-app-keys/ 141 | * `defaultTags`: Default tags used for all metric reporting. (optional) 142 | * Set tags that are common to all metrics. 143 | * `onError`: A function to call when there are asynchronous errors seding 144 | buffered metrics to Datadog. It takes one argument (the error). (optional) 145 | * If this option is not set, the error will be logged to stderr. 146 | * `histogram`: An object with default options for all histograms. This has the 147 | same properties as the options object on the `histogram()` method. Options 148 | specified when calling the method are layered on top of this object. 149 | (optional) 150 | * `retries`: How many times to retry failed metric submissions to Datadog’s API. 151 | * Defaults to `2`. 152 | * Ignored if you set the `reporter` option. 153 | * `retryBackoff`: How long to wait before retrying a failed Datadog API call. 154 | Subsequent retries multiply this delay by 2^(retry count). For example, if 155 | this is set to `1`, retries will happen after 1, then 2, then 4 seconds. 156 | * Defaults to `1`. 157 | * Ignored if you set the `reporter` option. 158 | * `reporter`: An object that actually sends the buffered metrics. (optional) 159 | * There are two built-in reporters you can use: 160 | 1. `reporters.DatadogReporter` sends metrics to Datadog’s API, and is 161 | the default. 162 | 2. `reporters.NullReporter` throws the metrics away. It’s useful for 163 | tests or temporarily disabling your metrics. 164 | 165 | Example: 166 | 167 | ```js 168 | metrics.init({ host: 'myhost', prefix: 'myapp.' }); 169 | ``` 170 | 171 | Disabling metrics using `NullReporter`: 172 | 173 | ```js 174 | metrics.init({ host: 'myhost', reporter: metrics.NullReporter() }); 175 | ``` 176 | 177 | Send metrics to a totally different service instead of Datadog: 178 | 179 | ```js 180 | metrics.init({ 181 | reporter: { 182 | async report(series) { 183 | // `series` is an array of metrics objects, formatted basically how the 184 | // Datadog v1 metrics API and v1 distributions API want them. 185 | const response = await fetch('https://my-datadog-like-api.com/series', { 186 | method: 'POST', 187 | body: JSON.stringify({ series }) 188 | }); 189 | return await response.json(); 190 | } 191 | } 192 | }); 193 | ``` 194 | 195 | 196 | ### Gauges 197 | 198 | `metrics.gauge(key, value[, tags[, timestamp]])` 199 | 200 | Record the current *value* of a metric. The most recent value since the last 201 | flush will be recorded. Optionally, specify a set of tags to associate with the 202 | metric. This should be used for sum values such as total hard disk space, 203 | process uptime, total number of active users, or number of rows in a database 204 | table. The optional timestamp is in milliseconds since 1 Jan 1970 00:00:00 UTC, 205 | e.g. from `Date.now()`. 206 | 207 | Example: 208 | 209 | ```js 210 | metrics.gauge('test.mem_free', 23); 211 | ``` 212 | 213 | ### Counters 214 | 215 | `metrics.increment(key[, value[, tags[, timestamp]]])` 216 | 217 | Increment the counter by the given *value* (or `1` by default). Optionally, 218 | specify a list of *tags* to associate with the metric. This is useful for 219 | counting things such as incrementing a counter each time a page is requested. 220 | The optional timestamp is in milliseconds since 1 Jan 1970 00:00:00 UTC, 221 | e.g. from `Date.now()`. 222 | 223 | Example: 224 | 225 | ```js 226 | metrics.increment('test.requests_served'); 227 | metrics.increment('test.awesomeness_factor', 10); 228 | ``` 229 | 230 | ### Histograms 231 | 232 | `metrics.histogram(key, value[, tags[, timestamp[, options]]])` 233 | 234 | Sample a histogram value. Histograms will produce metrics that 235 | describe the distribution of the recorded values, namely the minimum, 236 | maximum, average, median, count and the 75th, 85th, 95th and 99th percentiles. 237 | Optionally, specify a list of *tags* to associate with the metric. 238 | The optional timestamp is in milliseconds since 1 Jan 1970 00:00:00 UTC, 239 | e.g. from `Date.now()`. 240 | 241 | Example: 242 | 243 | ```js 244 | metrics.histogram('test.service_time', 0.248); 245 | ``` 246 | 247 | You can also specify an options object to adjust which aggregations and 248 | percentiles should be calculated. For example, to only calculate an average, 249 | count, and 99th percentile: 250 | 251 | ```js 252 | metrics.histogram('test.service_time', 0.248, ['tag:value'], Date.now(), { 253 | // Aggregates can include 'max', 'min', 'sum', 'avg', 'median', or 'count'. 254 | aggregates: ['avg', 'count'], 255 | // Percentiles can include any decimal between 0 and 1. 256 | percentiles: [0.99] 257 | }); 258 | ``` 259 | 260 | ### Distributions 261 | 262 | `metrics.distribution(key, value[, tags[, timestamp]])` 263 | 264 | Send a distribution value. Distributions are similar to histograms (they create 265 | several metrics for count, average, percentiles, etc.), but they are calculated 266 | server-side on Datadog’s systems. This is much higher-overhead than histograms, 267 | and the individual calculations made from it have to be configured on the 268 | Datadog website instead of in the options for this package. 269 | 270 | You should use this in environments where you have many instances of your 271 | application running in parallel, or instances constantly starting and stopping 272 | with different hostnames or identifiers and tagging each one separately is not 273 | feasible. AWS Lambda or serverless functions are a great example of this. In 274 | such environments, you also might want to use a distribution instead of 275 | `increment` or `gauge` (if you have two instances of your app sending those 276 | metrics at the same second, and they are not tagged differently or have 277 | different `host` names, one will overwrite the other — distributions will not). 278 | 279 | Example: 280 | 281 | ```js 282 | metrics.distribution('test.service_time', 0.248); 283 | ``` 284 | 285 | ### Flushing 286 | 287 | By default, datadog-metrics will automatically flush, or send accumulated 288 | metrics to Datadog, at regular intervals, and, in environments that support it, 289 | before your program exits. (However, if you call `process.exit()` to cause a 290 | hard exit, datadog-metrics doesn’t get a chance to flush. In this case, you may 291 | want to call `await metrics.stop()` first.) 292 | 293 | You can adjust the interval by using the `flushIntervalSeconds` option. Setting 294 | it to `0` will disable auto-flushing entirely: 295 | 296 | ```js 297 | // Set auto-flush interval to 10 seconds. 298 | metrics.init({ flushIntervalSeconds: 10 }); 299 | ``` 300 | 301 | You can also send accumulated metrics manually at any time by calling 302 | `metrics.flush()`. 303 | 304 | Please note that, when calling the `BufferedMetricsLogger` constructor directly, 305 | `flushIntervalSeconds` defaults to `0` instead. When constructing your own 306 | logger this way, you must expicitly opt-in to auto-flushing by setting a 307 | positive value. 308 | 309 | 310 | #### `metrics.flush()` 311 | 312 | Sends any buffered metrics to Datadog and returns a promise. By default, 313 | `flush()` will be called for you automatically unless you set 314 | `flushIntervalSeconds` to `0` (see above for more details). 315 | 316 | ⚠️ This method used to take two callback arguments for handling successes and 317 | errors. That form is deprecated and will be removed in a future update: 318 | 319 | ```js 320 | // Deprecated: 321 | metrics.flush( 322 | () => console.log('Flush suceeded!'), 323 | (error) => console.log('Flush error:', error) 324 | ); 325 | 326 | // Current, using `await`: 327 | try { 328 | await metrics.flush(); 329 | console.log('Flush suceeded!'); 330 | } catch (error) { 331 | console.log('Flush error:', error); 332 | } 333 | 334 | // Or, using Promise callbacks: 335 | metrics.flush() 336 | .then(() => console.log('Flush succeeded')) 337 | .catch((error) => console.log('Flush error:', error)) ; 338 | ``` 339 | 340 | #### `metrics.stop(options)` 341 | 342 | Stops auto-flushing (if enabled) and flushes any currently buffered metrics. 343 | This is mainly useful if you want to manually clean up and send remaining 344 | metrics before hard-quitting your program (usually by calling `process.exit()`). 345 | Returns a promise for the result of the flush. 346 | 347 | Takes an optional object with properties: 348 | * `flush` (boolean) Whether to flush any remaining metrics after stopping. 349 | Defaults to `true`. 350 | 351 | 352 | ## Logging 353 | 354 | Datadog-metrics uses the [debug](https://github.com/visionmedia/debug) 355 | library for logging at runtime. You can enable debug logging by setting 356 | the `DEBUG` environment variable when you run your app. 357 | 358 | Example: 359 | 360 | ```sh 361 | DEBUG=metrics node app.js 362 | ``` 363 | 364 | ## Contributing 365 | 366 | Contributions are always welcome! For more info on how to contribute or develop locally, please see [`CONTRIBUTING.md`](./CONTRIBUTING.md). 367 | 368 | ## Release History 369 | 370 | ### In Development: 371 | 372 | **Breaking Changes:** 373 | 374 | TBD 375 | 376 | **New Features:** 377 | 378 | TBD 379 | 380 | **Deprecations:** 381 | 382 | TBD 383 | 384 | **Bug Fixes:** 385 | 386 | TBD 387 | 388 | **Maintenance:** 389 | 390 | TBD 391 | 392 | [View diff](https://github.com/dbader/node-datadog-metrics/compare/v0.12.1...main) 393 | 394 | 395 | ### 0.12.1 (2024-12-18) 396 | 397 | **New Features:** 398 | 399 | * When auto-flushing is enabled, metrics are now also flushed before the process exits. In previous versions, you needed to do this manually by calling `metrics.flush()` at the every end of your program. (#141) 400 | 401 | You will still need to flush manually if you set `flushIntervalSeconds` to `0` or `stop()` (see below) if you are quitting your program by calling `process.exit()` [(which interrupts a variety of operations)](https://nodejs.org/docs/latest/api/process.html#processexitcode). 402 | 403 | * A new `stop()` method disables auto-flushing and flushes any currently buffered metrics (you can leave metrics in the buffer instead with the `flush` option: `stop({flush: false})`). (#141) 404 | 405 | [View diff](https://github.com/dbader/node-datadog-metrics/compare/v0.12.0...v0.12.1) 406 | 407 | 408 | ### 0.12.0 (2024-12-05) 409 | 410 | Datadog-metrics now automatically retries failed metric submissions and uses promises for asynchronous actions! There are a handful of other deprecations and small improvements. 411 | 412 | **Breaking Changes:** 413 | 414 | * The `DatadogReporter` constructor now takes an options object instead of positional arguments. Using this constructor directly is pretty rare, so this won’t affect most users. (#138) 415 | 416 | **New Features:** 417 | 418 | * Promises: asynchronous actions now use promises instead of callbacks. Callbacks are deprecated, but still work — they’ll be fully removed in v0.13.0. This affects: 419 | 420 | * The `flush()` method now returns a promise. 421 | * The `report(series)` method on any custom reporters should now return a promise. For now, datadog-metrics will use the old callback-based behavior if the method signature has callbacks listed after `series` argument. 422 | 423 | (See #125) 424 | 425 | * Retries: flushes to Datadog’s API are now retried automatically. This can help you work around intermittent network issues or rate limits. To adjust retries, use the `retries` and `retryBackoff` options. 426 | 427 | ```js 428 | metrics.init({ 429 | // How many times to retry. To disable retries, set this to 0. 430 | retries: 2, 431 | // How long (in seconds) to wait between retries. Subsequent retries 432 | // wait exponentially longer. 433 | retryBackoff: 1 434 | }); 435 | ``` 436 | 437 | (See #138) 438 | 439 | * Environment variables: you can now use *either* `DATADOG_` or `DD_` prefixes for environment variables (previously, only `DATADOG_` worked). For example, you can set your API key via `DATADOG_API_KEY` or `DD_API_KEY`. (#137) 440 | 441 | **Deprecations:** 442 | 443 | * The `appKey` option is no longer supported. Application keys (as opposed to API keys) are not actually needed for sending metrics or distributions to the Datadog API. Including it in your configuration adds no benefits, but risks exposing a sensitive credential. (#127) 444 | 445 | * The `DATADOG_API_HOST` environment variable is now deprecated. Please use `DATADOG_SITE` or `DD_SITE` instead (the `apiHost` option was renamed to `site` in v0.11.0, but the `DATADOG_API_HOST` environment variable was accidentally left as-is). (#134) 446 | 447 | [View diff](https://github.com/dbader/node-datadog-metrics/compare/v0.11.4...v0.12.0) 448 | 449 | 450 | ### 0.11.4 (2024-11-10) 451 | 452 | This release updates the TypeScript types for this project, and doesn’t include any changes to functionality. There are also no changes since v0.11.4-a.1. 453 | 454 | **Bug Fixes:** 455 | 456 | * `BufferedMetricsLogger` is now an actual class & type when you import it in TypeScript. That is, you can now do: 457 | 458 | ```typescript 459 | import { BufferedMetricsLogger } from 'datadog-metrics'; 460 | 461 | function useLogger(logger: BufferedMetricsLogger) { 462 | // ... 463 | } 464 | ``` 465 | 466 | Previously, you would have had to declare the type for `logger` as `typeof BufferedMetricsLogger.prototype`. (#120) 467 | 468 | [View diff](https://github.com/dbader/node-datadog-metrics/compare/v0.11.3...v0.11.4) 469 | 470 | 471 | ### 0.11.4-a.1 (2024-10-31) 472 | 473 | This pre-release is meant for testing a fix for #119. 474 | 475 | **Bug Fixes:** 476 | 477 | * Typings: Ensure `BufferedMetricsLogger` is seen as an actual class & type when importing in TypeScript. (#120) 478 | 479 | [View diff](https://github.com/dbader/node-datadog-metrics/compare/v0.11.3...v0.11.4-a.1) 480 | 481 | 482 | ### 0.11.3 (2024-10-31) 483 | 484 | ⚠️ No changes in this release since v0.11.2. This fixes a publishing error with v0.11.3a1. 485 | 486 | 487 | ### 0.11.3a1 (2024-10-31) 488 | 489 | ⛔️ **Do not use this release.** 490 | 491 | 492 | ### 0.11.2 (2024-06-25) 493 | 494 | **Fixes & Maintenance:** 495 | 496 | * Fix types and documentation for the `aggregates` option for histograms and the `histogram.aggregates` option for the library as a whole. It was previously listed as `aggregations`, which was incorrect. (Thanks to @Calyhre in #117.) 497 | 498 | * Improve documentation and add a more detailed error message about API keys vs. application keys. (#118) 499 | 500 | [View diff](https://github.com/dbader/node-datadog-metrics/compare/v0.11.1...v0.11.2) 501 | 502 | 503 | ### 0.11.1 (2023-09-28) 504 | 505 | **Fixes & Maintenance:** 506 | 507 | * Resolve a deprecation warning from the underlying datadog-api-client library. This also updates the minimum required version of that library. (Thanks to @acatalucci-synth & @fcsonline in #112.) 508 | 509 | [View diff](https://github.com/dbader/node-datadog-metrics/compare/v0.11.0...v0.11.1) 510 | 511 | 512 | ### 0.11.0 (2022-02-21) 513 | 514 | **New Features:** 515 | 516 | * Built-in TypeScript definitions. If you use TypeScript, you no longer need to install separate type definitions from `@types/datadog-metrics` — they’re now built-in. Please make sure to remove `@types/datadog-metrics` from your dev dependencies. 517 | 518 | Even if you’re writing regular JavaScript, you should now see better autocomplete suggestions and documentation in editors that support TypeScript definitions (e.g. VisualStudio Code, WebStorm). 519 | 520 | **Breaking Changes:** 521 | 522 | * datadog-metrics now uses modern `class` syntax internally. In most cases, you shouldn’t need to change anything. However, if you are calling `BufferedMetricsLogger.apply(...)` or `BufferedMetricsLogger.call(...)`, you’ll need to change your code to use `new BufferedMetricsLogger(...)` instead. 523 | 524 | **Deprecated Features:** 525 | 526 | * The `apiHost` option has been renamed to `site` so that it matches up with Datadog docs and official packages. The old `apiHost` name still works for now, but will be removed in the future. 527 | 528 | * The `reporters.DataDogReporter` class has been renamed to `reporters.DatadogReporter` (lower-case D in "dog") so that it correctly matches Datadog’s actual name. The old name still works, but will be removed in the future. 529 | 530 | [View diff](https://github.com/dbader/node-datadog-metrics/compare/v0.10.2...v0.11.0) 531 | 532 | 533 | ### 0.10.2 (2022-10-14) 534 | 535 | This release includes several new features and bugfixes! 536 | 537 | **New Features:** 538 | * Support for distribution metrics. You can now send distributions to Datadog by doing: 539 | 540 | ```js 541 | const metrics = require('datadog-metrics'); 542 | metrics.distribution('my.metric.name', 3.8, ['tags:here']); 543 | ``` 544 | 545 | Distributions are similar to histograms (they create several metrics for count, average, percentiles, etc.), but they are calculated server-side on Datadog’s systems. For more details and guidance on when to use them, see: 546 | * The documentation in this project’s README 547 | * Datadog’s documentation at https://docs.datadoghq.com/metrics/distributions/ 548 | 549 | (Thanks to @Mr0grog.) 550 | 551 | * Add an `onError` option for handling asynchronous errors while flushing buffered metrics. You can use this to get details on an error or to send error info to a tracking service like Sentry.io: 552 | 553 | ```js 554 | const metrics = require('datadog-metrics'); 555 | metrics.init({ 556 | onError (error) { 557 | console.error('There was an error sending to Datadog:', error); 558 | } 559 | }); 560 | ``` 561 | 562 | * The built-in reporter classes are now available for you to use. If you need to disable the metrics library for some reason, you can now do so with: 563 | 564 | ```js 565 | const metrics = require('datadog-metrics'); 566 | metrics.init({ 567 | reporter: new metrics.reporters.NullReporter(), 568 | }); 569 | ``` 570 | 571 | (Thanks to @Mr0grog.) 572 | 573 | * Add an option for setting histogram defaults. In v0.10.0, the `histogram()` function gained the ability to set what aggregations and percentiles it generates with a final `options` argument. You can now specify a `histogram` option for `init()` or `BufferedMetricsLogger` in order to set default options for all calls to `histogram()`. Any options you set in the actual `histogram()` call will layer on top of the defaults: 574 | 575 | ```js 576 | const metrics = require('datadog-metrics'); 577 | metrics.init({ 578 | histogram: { 579 | aggregates: ['sum', 'avg'], 580 | percentiles: [0.99] 581 | } 582 | }); 583 | 584 | // Acts as if the options had been set to: 585 | // { aggregates: ['sum', 'avg'], percentiles: [0.99] } 586 | metrics.histogram('my.metric.name', 3.8); 587 | 588 | // Acts as if the options had been set to: 589 | // { aggregates: ['sum', 'avg'], percentiles: [0.5, 0.95] } 590 | metrics.histogram('my.metric.name', 3.8, [], Date.now(), { 591 | percentiles: [0.5, 0.95] 592 | }); 593 | ``` 594 | 595 | (Thanks to @Mr0grog.) 596 | 597 | * Add a `.median` aggregation for histograms. When you log a histogram metric, it ultimately creates several metrics that track the minimum value, average value, maximum value, etc. There is now one that tracks the median value. StatsD creates the same metric from histograms, so you may find this useful if transitioning from StatsD. (Thanks to @Mr0grog.) 598 | 599 | * This package no longer locks specific versions of its dependencies (instead, your package manager can choose any version that is compatible). This may help when deduplicating packages for faster installs or smaller bundles. (Thanks to @Mr0grog.) 600 | 601 | **Bug Fixes:** 602 | 603 | * Don’t use `unref()` on timers in non-Node.js environments. This is a step towards browser compatibility, although we are not testing browser-based usage yet. (Thanks to @Mr0grog.) 604 | * The `apiHost` option was broken in v0.10.0 and now works again. (Thanks to @Mr0grog and @npeters.) 605 | * Creating a second instance of `BufferedMetricsLogger` will not longer change the credentials used by previously created `BufferedMetricsLogger` instances. (Thanks to @Mr0grog.) 606 | 607 | **Internal Updates:** 608 | 609 | * Renamed the default branch in this repo to `main`. (Thanks to @dbader.) 610 | * Use GitHub actions for continuous integration. (Thanks to @Mr0grog.) 611 | * Code style cleanup. (Thanks to @Mr0grog.) 612 | * When flushing, send each metric with its own list of tags. This helps mitigate subtle errors where a change to one metric’s tags may affect others. (Thanks to @Mr0grog.) 613 | 614 | [View diff](https://github.com/dbader/node-datadog-metrics/compare/v0.10.1...v0.10.2) 615 | 616 | 617 | ### 0.10.1 (2022-09-11) 618 | 619 | * FIX: bug in 0.10.0 where `@datadog/datadog-api-client` was not used correctly. (Thanks to @gquinteros93) 620 | 621 | [View diff](https://github.com/dbader/node-datadog-metrics/compare/4c29447bbde00565d5258e722b147601f3cc014c...023acfa3a2c5d8dd3f5bbb48c8c02467b2519559) 622 | 623 | 624 | ### 0.10.0 (2022-09-08) 625 | 626 | * **Breaking change:** we now use Datadog’s official `@datadog/datadog-api-client` package to send metrics to Datadog. This makes `datadog-metrics` usable with Webpack, but removes the `agent` option. If you were using this option and the new library does not provide a way to meet your needs, please let us know by filing an issue! (Thanks to @thatguychrisw) 627 | * You can now customize what metrics are generated by a histogram. When logging a histogram metric, the 5th argument is an optional object with information about which aggregations and percentiles to create metrics for: 628 | 629 | ```js 630 | const metrics = require('datadog-metrics'); 631 | metrics.histogram('my.metric.name', 3.8, [], Date.now(), { 632 | // Aggregates can include 'max', 'min', 'sum', 'avg', or 'count'. 633 | aggregates: ['max', 'min', 'sum', 'avg', 'count'], 634 | // Percentiles can include any decimal between 0 and 1. 635 | percentiles: [0.75, 0.85, 0.95, 0.99] 636 | }); 637 | ``` 638 | 639 | (Thanks to @gquinteros93.) 640 | 641 | * INTERNAL: Clean up continuous integration on TravisCI. (Thanks to @ErikBoesen.) 642 | 643 | [View diff](https://github.com/dbader/node-datadog-metrics/compare/99bdc97cead1d3fabe12bf594e945c64cba5d643...4c29447bbde00565d5258e722b147601f3cc014c) 644 | 645 | 646 | ### 0.9.3 (2021-03-22) 647 | 648 | * INTERNAL: Update `dogapi` and `jshint` to their latest versions. (Thanks to @ErikBoesen.) 649 | 650 | [View diff](https://github.com/dbader/node-datadog-metrics/compare/6f27d31c7931d3d68cd888a0237c87e3ed4feea1...99bdc97cead1d3fabe12bf594e945c64cba5d643) 651 | 652 | 653 | ### 0.9.2 (2021-03-14) 654 | 655 | * Expose new `apiHost` option on `init()` and `BufferedMetricsLogger` constructor. This makes it possible to actually configure the Datadog site to submit metrics to. For example, you can now submit metrics to Datadog’s Europe servers with: 656 | 657 | ```js 658 | const metrics = require('datadog-metrics'); 659 | metrics.init({ 660 | apiHost: 'datadoghq.eu' 661 | }); 662 | ``` 663 | 664 | (Thanks to @ErikBoesen.) 665 | 666 | [View diff](https://github.com/dbader/node-datadog-metrics/compare/8adaeb9f7bff15f620337a0fb882921925d63116...6f27d31c7931d3d68cd888a0237c87e3ed4feea1) 667 | 668 | 669 | ### 0.9.1 (2021-02-19) 670 | 671 | * FIX: Add default Datadog site. (Thanks to @ErikBoesen.) 672 | 673 | [View diff](https://github.com/dbader/node-datadog-metrics/compare/e58b13055b803a9c4f4c7b2426e3784b8fd4e0ae...8adaeb9f7bff15f620337a0fb882921925d63116) 674 | 675 | 676 | ### 0.9.0 (2021-02-10) 677 | 678 | * Clean up continuous integration tooling on TravisCI. (Thanks to @rpelliard.) 679 | * Correct “Datadog” throughout the documentation. It turns out there’s not supposed to be a captial D in the middle. (Thanks to @dbenamy.) 680 | * INTERNAL: Add internal support for submitting metrics to different Datadog sites (e.g. `datadoghq.eu` for Europe). (Thanks to @fermelone.) 681 | 682 | [View diff](https://github.com/dbader/node-datadog-metrics/compare/ebb4bf701f700841e8b5c5325165f13397249b51...e58b13055b803a9c4f4c7b2426e3784b8fd4e0ae) 683 | 684 | 685 | ### 0.8.2 (2020-11-16) 686 | 687 | * Added @ErikBoesen as a maintainer! 688 | * INTERNAL: Update `dogapi` version. 689 | * INTERNAL: Validate the `onSuccess` callback in `NullReporter`. (Thanks to @dkMorlok.) 690 | 691 | [View diff](https://github.com/dbader/node-datadog-metrics/compare/294e36e411e805c664a2bf614c083ed862860ce1...ebb4bf701f700841e8b5c5325165f13397249b51) 692 | 693 | 694 | ### 0.8.1 695 | 696 | * FIX: don't increment count when value is 0 (Thanks to @haspriyank) 697 | 698 | 699 | ### 0.8.0 700 | 701 | * allow passing in custom https agent (Thanks to @flovilmart) 702 | 703 | 704 | ### 0.7.0 705 | 706 | * update metric type `counter` to `count` as `counter` is deprecated by Datadog (Thanks to @dustingibbs) 707 | 708 | 709 | ### 0.6.1 710 | 711 | * FIX: bump debug to 3.1.0 to fix [NSP Advisory #534](https://nodesecurity.io/advisories/534) (Thanks to @kirkstrobeck) 712 | 713 | 714 | ### 0.6.0 715 | 716 | * FIX: call onSuccess on flush even if buffer is empty (Thanks to @mousavian) 717 | 718 | 719 | ### 0.5.0 720 | 721 | * ADD: ability to set custom timestamps (Thanks to @ronny) 722 | * FIX: 0 as valid option for flushIntervalSeconds (thanks to @dkMorlok) 723 | 724 | 725 | ### 0.4.0 726 | 727 | * ADD: Initialize with a default set of tags (thanks to @spence) 728 | 729 | 730 | ### 0.3.0 731 | 732 | * FIX: Don't overwrite metrics with the same key but different tags when aggregating them (Thanks @akrylysov and @RavivIsraeli!) 733 | * ADD: Add success/error callbacks to `metrics.flush()` (Thanks @akrylysov!) 734 | * ADD: Allow Datadog APP key to be configured (Thanks @gert-fresh!) 735 | * Bump dependencies to latest 736 | * Update docs 737 | 738 | 739 | ### 0.2.1 740 | 741 | * Update docs (module code remains unchanged) 742 | 743 | 744 | ### 0.2.0 745 | 746 | * API redesign 747 | * Remove `setDefaultXYZ()` and added `init()` 748 | 749 | 750 | ### 0.1.1 751 | 752 | * Allow `increment` to be called with a default value of 1 753 | 754 | 755 | ### 0.1.0 756 | 757 | * The first proper release 758 | * Rename `counter` to `increment` 759 | 760 | 761 | ### 0.0.0 762 | 763 | * Work in progress 764 | 765 | 766 | ## Meta 767 | 768 | This module is heavily inspired by the Python [dogapi module](https://github.com/DataDog/dogapi). 769 | 770 | Daniel Bader – [@dbader_org](https://twitter.com/dbader_org) – mail@dbader.org 771 | 772 | Distributed under the MIT license. See ``LICENSE`` for more information. 773 | 774 | Your contributions are always welcome! See [`CONTRIBUTING.md`](./CONTRIBUTING.md) for more. 775 | 776 | [https://github.com/dbader/node-datadog-metrics](https://github.com/dbader/node-datadog-metrics) 777 | 778 | [npm-image]: https://img.shields.io/npm/v/datadog-metrics.svg?style=flat-square 779 | [npm-url]: https://npmjs.org/package/datadog-metrics 780 | [npm-downloads]: https://img.shields.io/npm/dm/datadog-metrics.svg?style=flat-square 781 | [ci-status-image]: https://github.com/dbader/node-datadog-metrics/actions/workflows/ci.yml/badge.svg?branch=main 782 | [ci-status-url]: https://github.com/dbader/node-datadog-metrics/actions/workflows/ci.yml?query=branch%3Amain 783 | -------------------------------------------------------------------------------- /examples/metrics.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const promisfy = require('util').promisify; 4 | const metrics = require('../index'); 5 | 6 | metrics.init({ 7 | apiKey: '', 8 | flushIntervalSeconds: 0, 9 | }); 10 | 11 | metrics.flush = promisfy(metrics.flush) 12 | 13 | ;(async () => { 14 | metrics.increment('node-datadog-metrics.test'); 15 | 16 | await metrics.flush(); 17 | })(); 18 | -------------------------------------------------------------------------------- /header.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbader/node-datadog-metrics/cbf67ee5cd09f8a99a5507d739e9f0c7732bb4ca/header.png -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | const { BufferedMetricsLogger } = require('./lib/loggers'); 3 | const reporters = require('./lib/reporters'); 4 | 5 | /** @typedef {import("./lib/loggers").BufferedMetricsLoggerOptions} BufferedMetricsLoggerOptions */ 6 | 7 | let sharedLogger = null; 8 | 9 | /** 10 | * Configure the datadog-metrics library. 11 | * 12 | * Any settings used here will apply to the top-level metrics functions (e.g. 13 | * `increment()`, `gauge()`). If you need multiple separate configurations, use 14 | * the `BufferedMetricsLogger` class. 15 | * @param {BufferedMetricsLoggerOptions} [opts] 16 | */ 17 | function init(opts) { 18 | opts = opts || {}; 19 | if (!opts.flushIntervalSeconds && opts.flushIntervalSeconds !== 0) { 20 | opts.flushIntervalSeconds = 15; 21 | } 22 | sharedLogger = new BufferedMetricsLogger(opts); 23 | } 24 | 25 | /** 26 | * Wrap a function so that it gets called as a method of `sharedLogger`. If 27 | * `sharedLogger` does not exist when the function is called, it will be 28 | * created with default values. 29 | * @template {Function} T 30 | * @param {T} func The function to wrap. 31 | * @returns {T} 32 | */ 33 | function callOnSharedLogger(func) { 34 | // @ts-expect-error Can't find a good way to prove to the TypeScript 35 | // compiler that this satisfies the types. :( 36 | return (...args) => { 37 | if (sharedLogger === null) { 38 | // Special case: don't make a new logger just to stop it. 39 | // @ts-expect-error TypeScript compiler can't figure this one out. 40 | if (func === BufferedMetricsLogger.prototype.stop) { 41 | return Promise.resolve(undefined); 42 | } 43 | 44 | init(); 45 | } 46 | return func.apply(sharedLogger, args); 47 | }; 48 | } 49 | 50 | module.exports = { 51 | init, 52 | flush: callOnSharedLogger(BufferedMetricsLogger.prototype.flush), 53 | stop: callOnSharedLogger(BufferedMetricsLogger.prototype.stop), 54 | gauge: callOnSharedLogger(BufferedMetricsLogger.prototype.gauge), 55 | increment: callOnSharedLogger(BufferedMetricsLogger.prototype.increment), 56 | histogram: callOnSharedLogger(BufferedMetricsLogger.prototype.histogram), 57 | distribution: callOnSharedLogger(BufferedMetricsLogger.prototype.distribution), 58 | 59 | BufferedMetricsLogger, 60 | 61 | reporters 62 | }; 63 | -------------------------------------------------------------------------------- /lib/aggregators.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | class Aggregator { 4 | /** 5 | * Create an aggregator to group and buffer metric objects. 6 | * @param {string[]} defaultTags 7 | */ 8 | constructor(defaultTags) { 9 | this.buffer = new Map(); 10 | this.defaultTags = defaultTags || []; 11 | } 12 | 13 | /** @protected */ 14 | makeBufferKey(key, tags) { 15 | tags = tags || ['']; 16 | return key + '#' + tags.concat().sort().join('.'); 17 | } 18 | 19 | addPoint(Type, key, value, tags, host, timestampInMillis, options) { 20 | const bufferKey = this.makeBufferKey(key, tags); 21 | if (!this.buffer.has(bufferKey)) { 22 | this.buffer.set(bufferKey, new Type(key, tags, host, options)); 23 | } 24 | 25 | this.buffer.get(bufferKey).addPoint(value, timestampInMillis); 26 | } 27 | 28 | flush() { 29 | let series = []; 30 | for (const item of this.buffer.values()) { 31 | series.push(...item.flush()); 32 | } 33 | 34 | // Add default tags 35 | if (this.defaultTags) { 36 | for (const metric of series) { 37 | metric.tags.unshift(...this.defaultTags); 38 | } 39 | } 40 | 41 | this.buffer.clear(); 42 | 43 | return series; 44 | } 45 | } 46 | 47 | module.exports = { 48 | Aggregator 49 | }; 50 | -------------------------------------------------------------------------------- /lib/errors.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | /** 4 | * Represents an authorization failure response from the Datadog API, usually 5 | * because of an invalid API key. 6 | * 7 | * @property {'DATADOG_AUTHORIZATION_ERROR'} code 8 | * @property {number} status 9 | */ 10 | class AuthorizationError extends Error { 11 | /** 12 | * Create an `AuthorizationError`. 13 | * @param {string} message 14 | * @param {object} [options] 15 | * @param {Error} [options.cause] 16 | */ 17 | constructor(message, options = {}) { 18 | // @ts-expect-error the ECMAScript version we target with TypeScript 19 | // does not include `error.cause` (new in ES 2022), but all versions of 20 | // Node.js we support do. 21 | super(message, { cause: options.cause }); 22 | this.code = 'DATADOG_AUTHORIZATION_ERROR'; 23 | this.status = 403; 24 | } 25 | } 26 | 27 | module.exports = { AuthorizationError }; 28 | -------------------------------------------------------------------------------- /lib/loggers.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | const { logDebug, logDeprecation, logError } = require('./logging'); 3 | const Aggregator = require('./aggregators').Aggregator; 4 | const { DatadogReporter } = require('./reporters'); 5 | const Gauge = require('./metrics').Gauge; 6 | const Counter = require('./metrics').Counter; 7 | const Histogram = require('./metrics').Histogram; 8 | const Distribution = require('./metrics').Distribution; 9 | 10 | const supportsProcessExit = typeof process !== 'undefined' 11 | && typeof process.once === 'function'; 12 | 13 | /** 14 | * @typedef {object} AggregatorType Buffers metrics to send. 15 | * @property {( 16 | * type: Function, 17 | * key: string, 18 | * value: number, 19 | * tags: string[], 20 | * host: string, 21 | * timestampInMillis: number, 22 | * options: any 23 | * ) => void} addPoint Call this function to add a data point to the 24 | * aggregator. The `type` parameter is a metric class to use. 25 | * @property {() => any[]} flush Returns an array of API-formatted metric 26 | * objects ready to be sent to a reporter. 27 | */ 28 | 29 | /** 30 | * @typedef {object} CallbackReporterType 31 | * @property {(series: any[], onSuccess?: Function, onError?: Function) => void} report 32 | */ 33 | 34 | /** 35 | * @typedef {object} PromiseReporterType 36 | * @property {(series: any[]) => Promise} report 37 | */ 38 | 39 | /** 40 | * @typedef {PromiseReporterType|CallbackReporterType} ReporterType 41 | */ 42 | 43 | /** 44 | * @typedef {object} BufferedMetricsLoggerOptions 45 | * @property {string} [apiKey] Datadog API key. Ignored if you set the 46 | * `reporter` option. 47 | * @property {string} [appKey] DEPRECATED: App keys aren't actually used for 48 | * metrics and are no longer supported. 49 | * @property {string} [host] Default host for all reported metrics 50 | * @property {string} [prefix] Default key prefix for all metrics 51 | * @property {string} [site] Sets the Datadog "site", or server where metrics 52 | * are sent. Ignored if you set the `reporter` option. 53 | * For details and options, see: 54 | * https://docs.datadoghq.com/getting_started/site/#access-the-datadog-site 55 | * @property {string} [apiHost] DEPRECATED: Please use `site` instead. 56 | * @property {number} [flushIntervalSeconds] How often to send metrics to 57 | * Datadog (in seconds). 58 | * @property {string[]} [defaultTags] Default tags used for all metrics. 59 | * @property {object} [histogram] Default options for histograms. 60 | * @property {string[]} [histogram.aggregates] A list of aggregations to 61 | * to create metrics for on histograms. Values can be any of: 62 | * 'max', 'min', 'sum', 'avg', 'count', or 'median'. 63 | * @property {number[]} [histogram.percentiles] A list of percentiles to create 64 | * metrics for on histograms. Each value must be a number between 0 65 | * and 1. For example, to create 50th and 90th percentile metrics for 66 | * each histogram, set this option to `[0.5, 0.9]`. 67 | * @property {(error: any) => void} [onError] A function to call when there are 68 | * asynchronous errors sending metrics. It takes one argument -- 69 | * the error. 70 | * @property {AggregatorType} [aggregator] An aggregator instance for buffering 71 | * metrics between flushes. 72 | * @property {ReporterType} [reporter] An object that actually sends the 73 | * buffered metrics. 74 | * @property {number} [retries] How many times to retry failed attempts to send 75 | * metrics to Datadog's API. Ignored if you set the `reporter` option. 76 | * @property {number} [retryBackoff] How many seconds to wait before retrying a 77 | * failed API request. Subsequent retries will multiply this delay. 78 | * Ignored if you set the `reporter` option. 79 | */ 80 | 81 | /** 82 | * BufferedMetricsLogger manages the buffering and sending of metrics to Datadog 83 | * and provides convenience methods for logging those metrics. 84 | */ 85 | class BufferedMetricsLogger { 86 | /** 87 | * BufferedMetricsLogger manages the buffering and sending of metrics to Datadog 88 | * and provides convenience methods for logging those metrics. 89 | * 90 | * Because you don't want to send an HTTP request for each data point, this 91 | * buffers all metrics in a given time period before sending them to Datadog 92 | * in one batch (you can adjust this with the `flushIntervalSeconds` option). 93 | * 94 | * For more about the API, see: http://docs.datadoghq.com/guides/metrics/ 95 | * @param {BufferedMetricsLoggerOptions} [opts] 96 | */ 97 | constructor (opts) { 98 | if (opts.apiHost || process.env.DATADOG_API_HOST) { 99 | logDeprecation( 100 | 'The `apiHost` option (and `DATADOG_API_HOST` environment ' + 101 | 'variable) for `init()` and `BufferedMetricsLogger` has been ' + 102 | 'deprecated and will be removed in a future release. Please ' + 103 | 'use the `site` option (or `DATADOG_SITE` environment ' + 104 | 'variable) instead.' 105 | ); 106 | opts.site = opts.site || opts.apiHost; 107 | } 108 | 109 | this.performAutoFlush = this.performAutoFlush.bind(this); 110 | this.handleProcessExit = this.handleProcessExit.bind(this); 111 | 112 | /** @private */ 113 | this.aggregator = opts.aggregator || new Aggregator(opts.defaultTags); 114 | /** @private @type {ReporterType} */ 115 | this.reporter = opts.reporter || new DatadogReporter({ 116 | apiKey: opts.apiKey, 117 | site: opts.site, 118 | retries: opts.retries, 119 | retryBackoff: opts.retryBackoff 120 | }); 121 | /** @private */ 122 | this.host = opts.host; 123 | /** @private */ 124 | this.prefix = opts.prefix || ''; 125 | /** @private */ 126 | this.histogramOptions = opts.histogram; 127 | 128 | /** @private */ 129 | this.onError = null; 130 | if (typeof opts.onError === 'function') { 131 | this.onError = opts.onError; 132 | } else if (opts.onError != null) { 133 | throw new TypeError('The `onError` option must be a function'); 134 | } 135 | 136 | /** @private */ 137 | this.flushTimer = null; 138 | /** @private */ 139 | this.flushIntervalSeconds = 0; 140 | if (opts.flushIntervalSeconds < 0) { 141 | throw new TypeError(`flushIntervalSeconds must be >= 0 (got: ${opts.flushIntervalSeconds})`); 142 | } else { 143 | this.flushIntervalSeconds = opts.flushIntervalSeconds; 144 | } 145 | 146 | this.start(); 147 | } 148 | 149 | /** 150 | * Prepend the global key prefix and set the default host. 151 | * @private 152 | */ 153 | addPoint(Type, key, value, tags, timestampInMillis, options) { 154 | this.aggregator.addPoint(Type, this.prefix + key, value, tags, this.host, timestampInMillis, options); 155 | } 156 | 157 | /** 158 | * Record the current *value* of a metric. When flushed, a gauge reports only 159 | * the most recent value, ignoring any other values recorded since the last 160 | * flush. 161 | * 162 | * Optionally, specify a set of tags to associate with the metric. This should 163 | * be continuously varying values such as total hard disk space, process uptime, 164 | * total number of active users, or number of rows in a database table. The 165 | * optional timestamp is in milliseconds since 1 Jan 1970 00:00:00 UTC, 166 | * e.g. from `Date.now()`. 167 | * 168 | * @param {string} key 169 | * @param {number} value 170 | * @param {string[]} [tags] 171 | * @param {number} [timestampInMillis] 172 | * 173 | * @example 174 | * metrics.gauge('test.mem_free', 23); 175 | */ 176 | gauge(key, value, tags, timestampInMillis) { 177 | this.addPoint(Gauge, key, value, tags, timestampInMillis); 178 | } 179 | 180 | /** 181 | * Increment the counter by the given *value* (or `1` by default). Optionally, 182 | * specify a list of *tags* to associate with the metric. This is useful for 183 | * counting things such as incrementing a counter each time a page is requested. 184 | * The optional timestamp is in milliseconds since 1 Jan 1970 00:00:00 UTC, 185 | * e.g. from `Date.now()`. 186 | * @param {string} key 187 | * @param {number} [value] 188 | * @param {string[]} [tags] 189 | * @param {number} [timestampInMillis] 190 | * 191 | * @example 192 | * metrics.increment('test.requests_served'); 193 | * metrics.increment('test.awesomeness_factor', 10); 194 | */ 195 | increment(key, value, tags, timestampInMillis) { 196 | if (value === undefined || value === null) { 197 | this.addPoint(Counter, key, 1, tags, timestampInMillis); 198 | } else { 199 | this.addPoint(Counter, key, value, tags, timestampInMillis); 200 | } 201 | } 202 | 203 | /** 204 | * Sample a histogram value. Histograms will produce metrics that 205 | * describe the distribution of the recorded values, namely the minimum, 206 | * maximum, average, median, count and the 75th, 85th, 95th and 99th percentiles. 207 | * Optionally, specify a list of *tags* to associate with the metric. 208 | * The optional timestamp is in milliseconds since 1 Jan 1970 00:00:00 UTC, 209 | * e.g. from `Date.now()`. 210 | * @param {string} key 211 | * @param {number} value 212 | * @param {string[]} [tags] 213 | * @param {number} [timestampInMillis] 214 | * @param {any} [options] 215 | * 216 | * @example 217 | * metrics.histogram('test.service_time', 0.248); 218 | * 219 | * @example 220 | * // Set custom options: 221 | * metrics.histogram('test.service_time', 0.248, ['tag:value'], Date.now(), { 222 | * // Aggregates can include 'max', 'min', 'sum', 'avg', 'median', or 'count'. 223 | * aggregates: ['avg', 'count'], 224 | * // Percentiles can include any decimal between 0 and 1. 225 | * percentiles: [0.99] 226 | * }); 227 | */ 228 | histogram(key, value, tags, timestampInMillis, options = {}) { 229 | this.addPoint(Histogram, key, value, tags, timestampInMillis, { 230 | ...this.histogramOptions, 231 | ...options 232 | }); 233 | } 234 | 235 | /** 236 | * Send a distribution value. Distributions are similar to histograms (they create 237 | * several metrics for count, average, percentiles, etc.), but they are calculated 238 | * server-side on Datadog’s systems. This is much higher-overhead than histograms, 239 | * and the individual calculations made from it have to be configured on the 240 | * Datadog website instead of in the options for this package. 241 | * 242 | * You should use this in environments where you have many instances of your 243 | * application running in parallel, or instances constantly starting and stopping 244 | * with different hostnames or identifiers and tagging each one separately is not 245 | * feasible. AWS Lambda or serverless functions are a great example of this. In 246 | * such environments, you also might want to use a distribution instead of 247 | * `increment` or `gauge` (if you have two instances of your app sending those 248 | * metrics at the same second, and they are not tagged differently or have 249 | * different `host` names, one will overwrite the other — distributions will not). 250 | * @param {string} key 251 | * @param {number} value 252 | * @param {string[]} [tags] 253 | * @param {number} [timestampInMillis] 254 | * 255 | * @example 256 | * metrics.distribution('test.service_time', 0.248); 257 | */ 258 | distribution(key, value, tags, timestampInMillis) { 259 | this.addPoint(Distribution, key, value, tags, timestampInMillis); 260 | } 261 | 262 | /** 263 | * Send buffered metrics to Datadog. 264 | * 265 | * Unless you've set `flushIntervalSeconds` to 0, this will be called 266 | * automatically for you. However, you may want to call it manually when 267 | * your application quits to send any remaining metrics. 268 | * 269 | * Returns a promise indicating when sending has completed. Older support 270 | * for success and error callbacks as arguments to this method is deprecated 271 | * and will be removed in the future. Please switch to promises! 272 | * 273 | * @param {() => void} [onSuccess] DEPRECATED! This argument will be removed 274 | * soon. Please use the returned promise instead. 275 | * @param {(error: Error) => void} [onError] DEPRECATED! This argument will 276 | * be removed soon. Please use the returned promise instead. 277 | * @returns {Promise} 278 | */ 279 | flush(onSuccess, onError) { 280 | const result = new Promise((resolve, reject) => { 281 | const series = this.aggregator.flush(); 282 | if (series.length > 0) { 283 | logDebug('Flushing %d metrics to Datadog', series.length); 284 | 285 | if (this.reporter.report.length > 1) { 286 | logDeprecation( 287 | 'Callback arguments on the `report()` method of a ' + 288 | 'reporter are deprecated and will stop working in a ' + 289 | 'future release. Please update your reporter to ' + 290 | 'return a promise instead.' 291 | ); 292 | this.reporter.report(series, resolve, reject); 293 | } else { 294 | // @ts-expect-error TS can't figure out we have a promise here. 295 | this.reporter.report(series).then(resolve, reject); 296 | } 297 | } else { 298 | logDebug('Nothing to flush'); 299 | resolve(); 300 | } 301 | }); 302 | 303 | if (onSuccess || onError) { 304 | logDeprecation( 305 | 'The `onSuccess` and `onError` callback arguments for ' + 306 | 'BufferedMetricsLogger.flush() are deprecated and will be ' + 307 | 'removed in a future release. Please use the promise object that ' + 308 | '`flush()` returns instead.' 309 | ); 310 | result.then( 311 | typeof onSuccess === 'function' ? onSuccess : () => null, 312 | typeof onError === 'function' ? onError : () => null 313 | ); 314 | } 315 | 316 | // Notify global handler *and* ensure a simple call to `logger.flush()` 317 | // without error handling doesn't throw an unhandled rejection error. 318 | result.catch((error) => { 319 | if (this.onError) { 320 | this.onError(error); 321 | } else { 322 | logError('failed to send metrics (err=%s)', error); 323 | } 324 | }); 325 | 326 | return result; 327 | } 328 | 329 | /** 330 | * Start auto-flushing metrics. 331 | */ 332 | start() { 333 | if (this.flushTimer) { 334 | logDebug('Auto-flushing is already enabled'); 335 | } else if (this.flushIntervalSeconds > 0) { 336 | logDebug('Auto-flushing every %d seconds', this.flushIntervalSeconds); 337 | if (supportsProcessExit) { 338 | process.once('beforeExit', this.handleProcessExit); 339 | } 340 | this.performAutoFlush(); 341 | } else { 342 | logDebug('Auto-flushing is disabled'); 343 | } 344 | } 345 | 346 | /** 347 | * Stop auto-flushing metrics. By default, this will also flush any 348 | * currently buffered metrics. You can leave them in the buffer and not 349 | * flush by setting the `flush` option to `false`. 350 | * @param {Object} [options] 351 | * @param {boolean} [options.flush] Whether to flush before returning. 352 | * Defaults to true. 353 | * @returns {Promise} 354 | */ 355 | async stop(options) { 356 | clearTimeout(this.flushTimer); 357 | this.flushTimer = null; 358 | if (supportsProcessExit) { 359 | process.off('beforeExit', this.handleProcessExit); 360 | } 361 | if (!options || options.flush) { 362 | await this.flush(); 363 | } 364 | } 365 | 366 | /** @private */ 367 | performAutoFlush() { 368 | this.flush(); 369 | if (this.flushIntervalSeconds) { 370 | const interval = this.flushIntervalSeconds * 1000; 371 | this.flushTimer = setTimeout(this.performAutoFlush, interval); 372 | // Let the event loop exit if this is the only active timer. 373 | if (this.flushTimer.unref) this.flushTimer.unref(); 374 | } 375 | } 376 | 377 | /** @private */ 378 | async handleProcessExit() { 379 | logDebug('Auto-flushing before process exits...'); 380 | this.flush(); 381 | } 382 | } 383 | 384 | module.exports = { 385 | BufferedMetricsLogger 386 | }; 387 | -------------------------------------------------------------------------------- /lib/logging.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const util = require('util'); 4 | const debug = require('debug'); 5 | 6 | const prefix = 'metrics'; 7 | const deprecationMessages = new Set(); 8 | 9 | /** 10 | * A prefixed instance of the `debug` logger. You can call this directly, or 11 | * call `extend()` on it to create a nested logger. 12 | * @type {debug.Debugger} 13 | */ 14 | const logDebug = debug(prefix); 15 | 16 | /** 17 | * Logs an error object or message to stderr. Unlike `logDebug()`, this will 18 | * always print output, so should only be used for significant failures users 19 | * *need* to know about. 20 | * @param {string|Error} error The error to log. 21 | */ 22 | function logError(error, ...extra) { 23 | if (typeof error === 'string') { 24 | const message = util.format(error, ...extra); 25 | console.error(`${prefix}: ERROR: ${message}`); 26 | } else { 27 | console.error(`${prefix}:`, error); 28 | } 29 | } 30 | 31 | /** 32 | * Logs a deprecation warning to stderr once. If this is called multiple times 33 | * with the same message, it will only log once. 34 | * @param {string} message Deprecation message to log. 35 | */ 36 | function logDeprecation(message) { 37 | if (!deprecationMessages.has(message)) { 38 | // We always want to log here, so don't use `logDebug`. 39 | console.warn(`${prefix}: ${message}`); 40 | deprecationMessages.add(message); 41 | } 42 | } 43 | 44 | module.exports = { 45 | logDebug, 46 | logDeprecation, 47 | logError 48 | }; 49 | -------------------------------------------------------------------------------- /lib/metrics.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const DEFAULT_HISTOGRAM_AGGREGATES = ['max', 'min', 'sum', 'avg', 'count', 'median']; 4 | const DEFAULT_HISTOGRAM_PERCENTILES = [0.75, 0.85, 0.95, 0.99]; 5 | 6 | 7 | /** Base class for all metric types. */ 8 | class Metric { 9 | /** 10 | * Create a new metric object. Metric objects model each unique metric name 11 | * and tag combination, keep track of each relevant data point, and 12 | * calculate any derivative metrics (e.g. averages, percentiles, etc.). 13 | * @param {string} key 14 | * @param {string[]} [tags] 15 | * @param {string} [host] 16 | */ 17 | constructor(key, tags, host) { 18 | this.key = key; 19 | this.tags = tags || []; 20 | this.host = host || ''; 21 | } 22 | 23 | addPoint() { 24 | return null; 25 | } 26 | 27 | flush() { 28 | return null; 29 | } 30 | 31 | /** @protected */ 32 | posixTimestamp(timestampInMillis) { 33 | // theoretically, 0 is a valid timestamp, albeit unlikely 34 | const timestamp = timestampInMillis === undefined ? Date.now() : timestampInMillis; 35 | return Math.round(timestamp / 1000); 36 | } 37 | 38 | /** @protected */ 39 | updateTimestamp(timestampInMillis) { 40 | this.timestamp = this.posixTimestamp(timestampInMillis); 41 | } 42 | 43 | /** @protected */ 44 | serializeMetric(value, type, key) { 45 | return { 46 | metric: key || this.key, 47 | points: [[this.timestamp, value]], 48 | type: type, 49 | host: this.host, 50 | tags: this.tags.slice() 51 | }; 52 | } 53 | } 54 | 55 | 56 | class Gauge extends Metric { 57 | /** 58 | * Record the current *value* of a metric. They most recent value in 59 | * a given flush interval will be recorded. Optionally, specify a set of 60 | * tags to associate with the metric. This should be used for sum values 61 | * such as total hard disk space, process uptime, total number of active 62 | * users, or number of rows in a database table. 63 | * @param {string} key 64 | * @param {string[]} [tags] 65 | * @param {string} [host] 66 | */ 67 | constructor(key, tags, host) { 68 | super(key, tags, host); 69 | this.value = 0; 70 | } 71 | 72 | addPoint(val, timestampInMillis) { 73 | this.value = val; 74 | this.updateTimestamp(timestampInMillis); 75 | } 76 | 77 | flush() { 78 | return [this.serializeMetric(this.value, 'gauge')]; 79 | } 80 | } 81 | 82 | 83 | class Counter extends Metric { 84 | /** 85 | * Increment the counter by the given *value*. Optionally, specify a list of 86 | * *tags* to associate with the metric. This is useful for counting things 87 | * such as incrementing a counter each time a page is requested. 88 | * @param {string} key 89 | * @param {string[]} [tags] 90 | * @param {string} [host] 91 | */ 92 | constructor(key, tags, host) { 93 | super(key, tags, host); 94 | this.value = 0; 95 | } 96 | 97 | addPoint(val, timestampInMillis) { 98 | this.value += val; 99 | this.updateTimestamp(timestampInMillis); 100 | } 101 | 102 | flush() { 103 | return [this.serializeMetric(this.value, 'count')]; 104 | } 105 | } 106 | 107 | 108 | class Histogram extends Metric { 109 | /** 110 | * Sample a histogram value. Histograms will produce metrics that 111 | * describe the distribution of the recorded values, namely the minimum, 112 | * maximum, average, count and the 75th, 85th, 95th and 99th percentiles. 113 | * Optionally, specify a list of *tags* to associate with the metric. 114 | * @param {string} key 115 | * @param {string[]} [tags] 116 | * @param {string} [host] 117 | */ 118 | constructor(key, tags, host, options = {}) { 119 | super(key, tags, host); 120 | this.min = Infinity; 121 | this.max = -Infinity; 122 | this.sum = 0; 123 | this.count = 0; 124 | this.samples = []; 125 | this.aggregates = options.aggregates || DEFAULT_HISTOGRAM_AGGREGATES; 126 | this.percentiles = options.percentiles || DEFAULT_HISTOGRAM_PERCENTILES; 127 | } 128 | 129 | addPoint(val, timestampInMillis) { 130 | this.updateTimestamp(timestampInMillis); 131 | 132 | this.min = Math.min(val, this.min); 133 | this.max = Math.max(val, this.max); 134 | this.sum += val; 135 | this.count += 1; 136 | 137 | // The number of samples recorded is unbounded at the moment. 138 | // If this becomes a problem we might want to limit how many 139 | // samples we keep. 140 | this.samples.push(val); 141 | } 142 | 143 | flush() { 144 | let points = []; 145 | if (this.aggregates.includes('min')) { 146 | points.push(this.serializeMetric(this.min, 'gauge', this.key + '.min')); 147 | } 148 | if (this.aggregates.includes('max')) { 149 | points.push(this.serializeMetric(this.max, 'gauge', this.key + '.max')); 150 | } 151 | if (this.aggregates.includes('sum')) { 152 | points.push(this.serializeMetric(this.sum, 'gauge', this.key + '.sum')); 153 | } 154 | if (this.aggregates.includes('count')) { 155 | points.push(this.serializeMetric(this.count, 'count', this.key + '.count')); 156 | } 157 | if (this.aggregates.includes('avg')) { 158 | points.push( 159 | this.serializeMetric(this.average(), 'gauge', this.key + '.avg') 160 | ); 161 | } 162 | 163 | // Careful, calling samples.sort() will sort alphabetically giving 164 | // the wrong result. We must define our own compare function. 165 | this.samples.sort((a, b) => a - b); 166 | 167 | if (this.aggregates.includes('median')) { 168 | points.push( 169 | this.serializeMetric(this.median(this.samples), 'gauge', this.key + '.median') 170 | ); 171 | } 172 | 173 | const percentiles = this.percentiles.map((p) => { 174 | const val = this.samples[Math.round(p * this.samples.length) - 1]; 175 | const suffix = '.' + Math.floor(p * 100) + 'percentile'; 176 | return this.serializeMetric(val, 'gauge', this.key + suffix); 177 | }); 178 | return points.concat(percentiles); 179 | } 180 | 181 | average() { 182 | if (this.count === 0) { 183 | return 0; 184 | } else { 185 | return this.sum / this.count; 186 | } 187 | } 188 | 189 | median(sortedSamples) { 190 | if (this.count === 0) { 191 | return 0; 192 | } else if (this.count % 2 === 1) { 193 | return sortedSamples[(this.count - 1) / 2]; 194 | } else { 195 | return (sortedSamples[this.count / 2 - 1] + sortedSamples[this.count / 2]) / 2; 196 | } 197 | } 198 | } 199 | 200 | 201 | class Distribution extends Metric { 202 | /** 203 | * Similar to a histogram, but sends every point to Datadog and does the 204 | * calculations server-side. 205 | * 206 | * This is higher overhead than Counter or Histogram, but is particularly useful 207 | * for serverless functions or other environments where many instances of your 208 | * application may be running concurrently or constantly starting and stopping, 209 | * and it does not make sense to tag each of them separately so metrics from 210 | * each don't overwrite each other. 211 | * 212 | * See more documentation of use cases and how distribution work at: 213 | * https://docs.datadoghq.com/metrics/types/?tab=distribution#metric-types 214 | * @param {string} key 215 | * @param {string[]} [tags] 216 | * @param {string} [host] 217 | */ 218 | constructor(key, tags, host) { 219 | super(key, tags, host); 220 | this.points = []; 221 | } 222 | 223 | addPoint(val, timestampInMillis) { 224 | const lastTimestamp = this.timestamp; 225 | this.updateTimestamp(timestampInMillis); 226 | if (lastTimestamp === this.timestamp) { 227 | this.points[this.points.length - 1][1].push(val); 228 | } else { 229 | this.points.push([this.timestamp, [val]]); 230 | } 231 | } 232 | 233 | flush() { 234 | return [this.serializeMetric(this.points, 'distribution')]; 235 | } 236 | 237 | serializeMetric(points, type, key) { 238 | return { 239 | metric: key || this.key, 240 | points: points || this.points, 241 | type: type, 242 | host: this.host, 243 | tags: this.tags.slice() 244 | }; 245 | } 246 | } 247 | 248 | module.exports = { 249 | Metric, 250 | Gauge, 251 | Counter, 252 | Histogram, 253 | Distribution 254 | }; 255 | -------------------------------------------------------------------------------- /lib/reporters.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | const datadogApiClient = require('@datadog/datadog-api-client'); 3 | const { AuthorizationError } = require('./errors'); 4 | const { logDebug, logDeprecation } = require('./logging'); 5 | 6 | const RETRYABLE_ERROR_CODES = new Set([ 7 | 'ECONNREFUSED', 8 | 'ECONNRESET', 9 | 'ENOTFOUND', 10 | 'EPIPE', 11 | 'ETIMEDOUT' 12 | ]); 13 | 14 | async function sleep(milliseconds) { 15 | await new Promise((r) => setTimeout(r, milliseconds)); 16 | } 17 | 18 | /** 19 | * A Reporter that throws away metrics instead of sending them to Datadog. This 20 | * is useful for disabling metrics in your application and for tests. 21 | */ 22 | class NullReporter { 23 | async report(_series) { 24 | // Do nothing. 25 | } 26 | } 27 | 28 | /** 29 | * @private 30 | * A custom HTTP implementation for Datadog that retries failed requests. 31 | * Datadog has retries built in, but they don't handle network errors (just 32 | * HTTP errors), and we want to retry in both cases. This inherits from the 33 | * built-in HTTP library since we want to use the same fetch implementation 34 | * Datadog uses instead of adding another dependency. 35 | */ 36 | class RetryHttp extends datadogApiClient.client.IsomorphicFetchHttpLibrary { 37 | constructor(options = {}) { 38 | super(options); 39 | 40 | // HACK: ensure enableRetry is always `false` so the base class logic 41 | // does not actually retry (since we manage retries here). 42 | Object.defineProperty(this, 'enableRetry', { 43 | get () { return false; }, 44 | set () {}, 45 | }); 46 | } 47 | 48 | async send(request) { 49 | let i = 0; 50 | while (true) { // eslint-disable-line no-constant-condition 51 | let response, error; 52 | try { 53 | response = await super.send(request); 54 | } catch (e) { 55 | error = e; 56 | } 57 | 58 | if (this.isRetryable(response || error, i)) { 59 | await sleep(this.retryDelay(response || error, i)); 60 | } else if (response) { 61 | return response; 62 | } else { 63 | throw error; 64 | } 65 | 66 | i++; 67 | } 68 | } 69 | 70 | /** 71 | * @private 72 | * @param {any} response HTTP response or error object 73 | * @returns {boolean} 74 | */ 75 | isRetryable(response, tryCount) { 76 | return tryCount < this.maxRetries && ( 77 | RETRYABLE_ERROR_CODES.has(response.code) 78 | || response.httpStatusCode === 429 79 | || response.httpStatusCode >= 500 80 | ); 81 | } 82 | 83 | /** 84 | * @private 85 | * @param {any} response HTTP response or error object 86 | * @param {number} tryCount 87 | * @returns {number} 88 | */ 89 | retryDelay(response, tryCount) { 90 | if (response.httpStatusCode === 429) { 91 | // Datadog's official client supports just the 'x-ratelimit-reset' 92 | // header, so we support that here in addition to the standardized 93 | // 'retry-after' heaer. 94 | // There is also an upcoming IETF standard for 'ratelimit', but it 95 | // has moved away from the syntax used in 'x-ratelimit-reset'. This 96 | // stuff might change in the future. 97 | // https://datatracker.ietf.org/doc/draft-ietf-httpapi-ratelimit-headers/ 98 | const delayHeader = response.headers['retry-after'] 99 | || response.headers['x-ratelimit-reset']; 100 | const delayValue = parseInt(delayHeader, 10); 101 | if (!isNaN(delayValue) && delayValue > 0) { 102 | return delayValue * 1000; 103 | } 104 | } 105 | 106 | return this.backoffMultiplier ** tryCount * this.backoffBase * 1000; 107 | } 108 | } 109 | 110 | /** 111 | * @typedef {Object} DatadogReporterOptions 112 | * @property {string} [apiKey] Datadog API key. 113 | * @property {string} [appKey] DEPRECATED! This option does nothing. 114 | * @property {string} [site] The Datadog "site" to send metrics to. 115 | * @property {number} [retries] Retry failed requests up to this many times. 116 | * @property {number} [retryBackoff] Delay before retries. Subsequent retries 117 | * wait this long multiplied by 2^(retry count). 118 | */ 119 | 120 | /** @type {WeakMap} */ 121 | const datadogClients = new WeakMap(); 122 | 123 | /** 124 | * Create a reporter that sends metrics to Datadog's API. 125 | */ 126 | class DatadogReporter { 127 | /** 128 | * Create a reporter that sends metrics to Datadog's API. 129 | * @param {DatadogReporterOptions} [options] 130 | */ 131 | constructor(options = {}) { 132 | if (typeof options !== 'object') { 133 | throw new TypeError('DatadogReporter takes an options object, not multiple string arguments.'); 134 | } 135 | 136 | if (options.appKey) { 137 | logDeprecation( 138 | 'The `appKey` option is no longer supported since it is ' + 139 | 'not used for submitting metrics, distributions, events, ' + 140 | 'or logs.' 141 | ); 142 | } 143 | 144 | const apiKey = options.apiKey || process.env.DATADOG_API_KEY || process.env.DD_API_KEY; 145 | this.site = options.site 146 | || process.env.DATADOG_SITE 147 | || process.env.DD_SITE 148 | || process.env.DATADOG_API_HOST; 149 | 150 | if (!apiKey) { 151 | throw new Error( 152 | 'Datadog API key not found. You must specify one via the ' + 153 | '`apiKey` configuration option or the DATADOG_API_KEY or ' + 154 | 'DD_API_KEY environment variable.' 155 | ); 156 | } 157 | 158 | const configuration = datadogApiClient.client.createConfiguration({ 159 | authMethods: { 160 | apiKeyAuth: apiKey, 161 | }, 162 | httpApi: new RetryHttp(), 163 | maxRetries: options.retries >= 0 ? options.retries : 2, 164 | }); 165 | 166 | // HACK: Specify backoff here rather than in configration options to 167 | // support values less than 2 (mainly for faster tests). 168 | const backoff = options.retryBackoff >= 0 ? options.retryBackoff : 1; 169 | configuration.httpApi.backoffBase = backoff; 170 | 171 | if (this.site) { 172 | // Strip leading `app.` from the site in case someone copy/pasted the 173 | // URL from their web browser. More details on correct configuration: 174 | // https://docs.datadoghq.com/getting_started/site/#access-the-datadog-site 175 | this.site = this.site.replace(/^app\./i, ''); 176 | configuration.setServerVariables({ 177 | site: this.site 178 | }); 179 | } 180 | 181 | datadogClients.set(this, new datadogApiClient.v1.MetricsApi(configuration)); 182 | } 183 | 184 | /** 185 | * Send an array of serialized metrics to Datadog. 186 | * @param {any[]} series 187 | * @returns {Promise} 188 | */ 189 | async report(series) { 190 | logDebug('Calling report with %j', series); 191 | 192 | // Distributions must be submitted via a different method than other 193 | // metrics, so split them up. 194 | const metrics = []; 195 | const distributions = []; 196 | for (const metric of series) { 197 | if (metric.type === 'distribution') { 198 | distributions.push(metric); 199 | } else { 200 | metrics.push(metric); 201 | } 202 | } 203 | 204 | const metricsApi = datadogClients.get(this); 205 | 206 | let submissions = []; 207 | if (metrics.length) { 208 | submissions.push(metricsApi.submitMetrics({ 209 | body: { series: metrics } 210 | })); 211 | } 212 | if (distributions.length) { 213 | submissions.push(metricsApi.submitDistributionPoints({ 214 | body: { series: distributions } 215 | })); 216 | } 217 | 218 | try { 219 | await Promise.all(submissions); 220 | logDebug('sent metrics successfully'); 221 | } catch (error) { 222 | if (error.code === 403) { 223 | throw new AuthorizationError( 224 | 'Your Datadog API key is not authorized to send ' + 225 | 'metrics. Check to make sure the DATADOG_API_KEY or ' + 226 | 'DD_API_KEY environment variable or the `apiKey` init ' + 227 | 'option is set to a valid API key for your Datadog ' + 228 | 'account, and that it is not an *application* key. ' + 229 | 'For more, see: ' + 230 | 'https://docs.datadoghq.com/account_management/api-app-keys/', 231 | { cause: error } 232 | ); 233 | } 234 | 235 | throw error; 236 | } 237 | } 238 | } 239 | 240 | /** 241 | * @deprecated Please use `DatadogReporter` instead. 242 | */ 243 | class DataDogReporter extends DatadogReporter { 244 | /** 245 | * Create a reporter that sends metrics to Datadog's API. 246 | * @deprecated 247 | * @param {string} [apiKey] 248 | * @param {string} [appKey] 249 | * @param {string} [site] 250 | */ 251 | constructor(apiKey, appKey, site) { 252 | logDeprecation( 253 | 'DataDogReporter has been renamed to DatadogReporter (lower-case ' + 254 | 'D in "dog"); the old name will be removed in a future release.' 255 | ); 256 | super({ apiKey, appKey, site }); 257 | } 258 | } 259 | 260 | module.exports = { 261 | NullReporter, 262 | DatadogReporter, 263 | DataDogReporter 264 | }; 265 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "datadog-metrics", 3 | "version": "0.12.2-dev", 4 | "description": "Buffered metrics reporting via the Datadog HTTP API", 5 | "main": "index.js", 6 | "types": "dist/index.d.ts", 7 | "repository": { 8 | "type": "git", 9 | "url": "git@github.com:dbader/node-datadog-metrics.git" 10 | }, 11 | "bugs": { 12 | "url": "https://github.com/dbader/node-datadog-metrics/issues" 13 | }, 14 | "scripts": { 15 | "prepack": "npm run clean && npm run build-types && npm run check-types", 16 | "test": "mocha --reporter spec", 17 | "check-integration": "node test-other/integration_check.js", 18 | "check-codestyle": "npx eslint .", 19 | "check-text": "test-other/lint_text.sh", 20 | "build-types": "tsc --build", 21 | "check-types": "tsc --noEmit --strict test-other/types_check.ts", 22 | "clean": "tsc --build --clean" 23 | }, 24 | "keywords": [ 25 | "datadog", 26 | "metrics", 27 | "stats" 28 | ], 29 | "author": "Daniel Bader (http://dbader.org/)", 30 | "license": "MIT", 31 | "devDependencies": { 32 | "@types/node": "^12.20.55", 33 | "chai": "4.3.6", 34 | "chai-as-promised": "^7.1.2", 35 | "chai-string": "1.5.0", 36 | "eslint": "^8.24.0", 37 | "mocha": "9.2.2", 38 | "nock": "^13.2.9", 39 | "typescript": "^4.8.4" 40 | }, 41 | "dependencies": { 42 | "@datadog/datadog-api-client": "^1.17.0", 43 | "debug": "^4.1.0" 44 | }, 45 | "engines": { 46 | "node": ">=12.0.0" 47 | }, 48 | "files": [ 49 | "README.md", 50 | "LICENSE", 51 | "index.js", 52 | "lib/**", 53 | "dist/**", 54 | "!dist/tsconfig.tsbuildinfo" 55 | ] 56 | } 57 | -------------------------------------------------------------------------------- /test-other/integration_check.js: -------------------------------------------------------------------------------- 1 | /** 2 | * A basic test of our complete integration with Datadog. This check sends some 3 | * metrics, then queries to make sure they actually got ingested correctly by 4 | * Datadog and will show up as expected. 5 | */ 6 | 7 | 'use strict'; 8 | 9 | const { client, v1 } = require('@datadog/datadog-api-client'); 10 | const datadogMetrics = require('..'); 11 | 12 | function floorTo(value, points) { 13 | const factor = 10 ** points; 14 | return Math.round(value * factor) / factor; 15 | } 16 | 17 | // Remove when upgrading to Node.js 16; this is built-in (node:times/promises). 18 | function sleep(milliseconds) { 19 | return new Promise(r => setTimeout(r, milliseconds)); 20 | } 21 | 22 | // Make timestamps round seconds for ease of comparison. 23 | const NOW = floorTo(Date.now(), -3); 24 | const MINUTE = 60 * 1000; 25 | 26 | // How long to keep querying for the metric before giving up. 27 | const MAX_WAIT_TIME = 2.5 * MINUTE; 28 | // How long to wait between checks. 29 | const CHECK_INTERVAL_SECONDS = 15; 30 | 31 | const testPoints = [ 32 | [NOW - 60 * 1000, floorTo(10 * Math.random(), 1)], 33 | [NOW - 30 * 1000, floorTo(10 * Math.random(), 1)], 34 | ]; 35 | 36 | const testMetrics = [ 37 | { 38 | type: 'gauge', 39 | name: 'node.datadog.metrics.test.gauge', 40 | tags: ['test-tag-1'], 41 | }, 42 | { 43 | type: 'distribution', 44 | name: 'node.datadog.metrics.test.dist', 45 | tags: ['test-tag-2'], 46 | }, 47 | ]; 48 | 49 | async function main() { 50 | datadogMetrics.init({ flushIntervalSeconds: 0 }); 51 | 52 | for (const metric of testMetrics) { 53 | await sendMetric(metric); 54 | } 55 | 56 | await sleep(5000); 57 | 58 | for (const metric of testMetrics) { 59 | const result = await waitForSentMetric(metric); 60 | 61 | if (!result) { 62 | process.exitCode = 1; 63 | } 64 | } 65 | } 66 | 67 | async function sendMetric(metric) { 68 | console.log(`Sending random points for ${metric.type} "${metric.name}"`); 69 | 70 | for (const [timestamp, value] of testPoints) { 71 | datadogMetrics[metric.type](metric.name, value, metric.tags, timestamp); 72 | await new Promise((resolve, reject) => { 73 | datadogMetrics.flush(resolve, reject); 74 | }); 75 | } 76 | } 77 | 78 | async function queryMetric(metric) { 79 | const configuration = client.createConfiguration({ 80 | authMethods: { 81 | apiKeyAuth: process.env.DATADOG_API_KEY, 82 | appKeyAuth: process.env.DATADOG_APP_KEY, 83 | }, 84 | }); 85 | configuration.setServerVariables({ site: process.env.DATADOG_SITE }); 86 | const metricsApi = new v1.MetricsApi(configuration); 87 | 88 | // NOTE: Query timestamps are seconds, but result points are milliseconds. 89 | const data = await metricsApi.queryMetrics({ 90 | from: Math.floor((NOW - 5 * MINUTE) / 1000), 91 | to: Math.ceil(Date.now() / 1000), 92 | query: `${metric.name}{${metric.tags[0]}}`, 93 | }); 94 | 95 | return data.series && data.series[0]; 96 | } 97 | 98 | async function waitForSentMetric(metric) { 99 | const endTime = Date.now() + MAX_WAIT_TIME; 100 | while (Date.now() < endTime) { 101 | console.log(`Querying Datadog for sent points in ${metric.type} "${metric.name}"...`); 102 | const series = await queryMetric(metric); 103 | 104 | if (series) { 105 | const found = testPoints.every(([timestamp, value]) => { 106 | return series.pointlist.some(([remoteTimestamp, remoteValue]) => { 107 | // Datadog may round values differently or place them into 108 | // time intervals based on the metric's configuration. Look 109 | // for timestamp/value combinations that are close enough. 110 | return ( 111 | Math.abs(remoteTimestamp - timestamp) < 10000 && 112 | Math.abs(remoteValue - value) < 0.1 113 | ); 114 | }); 115 | }); 116 | 117 | if (found) { 118 | console.log('✔︎ Found sent points! Test passed.'); 119 | return true; 120 | } else { 121 | console.log(' Found series, but with no matching points.'); 122 | console.log(` Looking for: ${JSON.stringify(testPoints)}`); 123 | console.log(' Found:', JSON.stringify(series, null, 2)); 124 | } 125 | } 126 | 127 | console.log(` Nothing found, waiting ${CHECK_INTERVAL_SECONDS}s before trying again.`); 128 | await sleep(CHECK_INTERVAL_SECONDS * 1000); 129 | } 130 | 131 | console.log('✘ Nothing found and gave up waiting. Test failed!'); 132 | return false; 133 | } 134 | 135 | if (require.main === module) { 136 | main().catch(error => { 137 | process.exitCode = 1; 138 | console.error(error); 139 | }); 140 | } 141 | -------------------------------------------------------------------------------- /test-other/lint_text.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -euo pipefail 3 | 4 | # Find and log lines with "DataDog" or "dataDog", but allow "DataDogReporter" 5 | # and "github.com/DataDog", etc. This uses grep to find all lines with 6 | # "DataDog", then awk to filter out the lines that only have allowed patterns 7 | # (but not the lines that have both allowed patterns and "DataDog"). 8 | # 9 | # This could be done in one step with negative lookahead assertions in grep, 10 | # but that requires PCRE2, and many people have grep without PCRE2 support. 11 | bad_lines=$( 12 | grep \ 13 | -r \ 14 | --exclude lint_text.sh \ 15 | --exclude-dir node_modules \ 16 | --exclude-dir .git \ 17 | --line-number \ 18 | --extended-regexp \ 19 | '[dD]ataDog' \ 20 | . \ 21 | | awk ' 22 | # Store the raw line then remove allowed patterns before processing. 23 | {raw_line=$0; gsub(/DataDogReporter|github\.com\/DataDog/,"")} 24 | # Print every line that has DataDog in it. 25 | /[dD]ataDog/ {print raw_line} 26 | ' 27 | ) 28 | 29 | if [ -n "${bad_lines}" ]; then 30 | echo 'The correct spelling of "Datadog" does not capitalize the second "D".' 31 | echo 'Please fix these lines ("DataDogReporter" is allowed for now):' 32 | echo '' 33 | echo "${bad_lines}" 34 | echo '' 35 | 36 | line_count=$(echo "${bad_lines}" | wc -l | sed 's/[[:space:]]//g') 37 | echo "$line_count errors." 38 | 39 | exit 1 40 | fi 41 | -------------------------------------------------------------------------------- /test-other/types_check.ts: -------------------------------------------------------------------------------- 1 | // This file is used to check the typings! It is not mean to be executed. 2 | import { 3 | BufferedMetricsLogger, 4 | reporters, 5 | init, 6 | flush, 7 | stop, 8 | gauge, 9 | increment, 10 | histogram, 11 | distribution 12 | } from '..'; 13 | import type { BufferedMetricsLoggerOptions } from '..'; 14 | 15 | function useLogger(logger: BufferedMetricsLogger) { 16 | logger.gauge('gauge.key', 0); 17 | logger.increment('increment.key'); 18 | logger.histogram('histogram.key', 11); 19 | logger.distribution('distribution.key', 11); 20 | logger.flush(); 21 | logger.stop(); 22 | logger.stop({ flush: false }); 23 | } 24 | 25 | useLogger(new BufferedMetricsLogger()); 26 | 27 | init({ 28 | apiKey: 'abc123', 29 | appKey: 'xyz456', 30 | apiHost: 'datadoghq.eu', 31 | site: 'datadoghq.eu', 32 | prefix: 'key.prefix', 33 | host: 'some-name-for-this-machine', 34 | flushIntervalSeconds: 5, 35 | defaultTags: ['tag'], 36 | histogram: { 37 | aggregates: ['sum', 'avg'], 38 | percentiles: [0.99] 39 | }, 40 | onError (error) { console.error(error); }, 41 | aggregator: { 42 | addPoint (type: Function, key: string, value: number, tags: string[], host: string, timestampInMillis: number, options: any) { 43 | console.log("Adding a point!"); 44 | }, 45 | flush () { 46 | return []; 47 | } 48 | }, 49 | reporter: new reporters.NullReporter() 50 | }); 51 | 52 | gauge('gauge.key', 0); 53 | increment('increment.key'); 54 | histogram('histogram.key', 11); 55 | distribution('distribution.key', 11); 56 | flush(); 57 | stop(); 58 | stop({ flush: false }); 59 | -------------------------------------------------------------------------------- /test/aggregators_tests.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const chai = require('chai'); 4 | chai.use(require('chai-string')); 5 | 6 | chai.should(); 7 | 8 | const aggregators = require('../lib/aggregators'); 9 | const metrics = require('../lib/metrics'); 10 | 11 | describe('Aggregator', function() { 12 | it('should flush correctly when empty', function() { 13 | const agg = new aggregators.Aggregator(); 14 | agg.flush().should.have.lengthOf(0); 15 | }); 16 | 17 | it('should flush a single metric correctly', function() { 18 | const agg = new aggregators.Aggregator(); 19 | agg.addPoint(metrics.Gauge, 'mykey', 23, ['mytag'], 'myhost'); 20 | agg.flush().should.have.lengthOf(1); 21 | }); 22 | 23 | it('should flush multiple metrics correctly', function() { 24 | const agg = new aggregators.Aggregator(); 25 | agg.addPoint(metrics.Gauge, 'mykey', 23, ['mytag'], 'myhost'); 26 | agg.addPoint(metrics.Gauge, 'mykey2', 42, ['mytag'], 'myhost'); 27 | agg.flush().should.have.lengthOf(2); 28 | }); 29 | 30 | it('should flush multiple metrics correctly even if only the tag differs', function() { 31 | const agg = new aggregators.Aggregator(); 32 | agg.addPoint(metrics.Gauge, 'mykey', 23, ['mytag1'], 'myhost'); 33 | agg.addPoint(metrics.Gauge, 'mykey', 23, ['mytag2'], 'myhost'); 34 | agg.flush().should.have.lengthOf(2); 35 | }); 36 | 37 | it('should clear the buffer after flushing', function() { 38 | const agg = new aggregators.Aggregator(); 39 | agg.addPoint(metrics.Gauge, 'mykey', 23, ['mytag'], 'myhost'); 40 | agg.flush().should.have.lengthOf(1); 41 | agg.flush().should.have.lengthOf(0); 42 | }); 43 | 44 | it('should update an existing metric correctly', function() { 45 | const agg = new aggregators.Aggregator(); 46 | agg.addPoint(metrics.Counter, 'test.mykey', 2, ['mytag'], 'myhost'); 47 | agg.addPoint(metrics.Counter, 'test.mykey', 3, ['mytag'], 'myhost'); 48 | const f = agg.flush(); 49 | f.should.have.lengthOf(1); 50 | f[0].should.have.nested.property('points[0][1]', 5); 51 | f[0].points.should.have.lengthOf(1); 52 | }); 53 | 54 | it('should aggregate by key + tag', function() { 55 | const agg = new aggregators.Aggregator(); 56 | agg.addPoint(metrics.Counter, 'test.mykey', 2, ['mytag1'], 'myhost'); 57 | agg.addPoint(metrics.Counter, 'test.mykey', 3, ['mytag2'], 'myhost'); 58 | const f = agg.flush(); 59 | f.should.have.lengthOf(2); 60 | f[0].should.have.nested.property('points[0][1]', 2); 61 | f[1].should.have.nested.property('points[0][1]', 3); 62 | }); 63 | 64 | it('should treat all empty tags definitions the same', function() { 65 | const agg = new aggregators.Aggregator(); 66 | agg.addPoint(metrics.Gauge, 'noTagsKey', 1, null, 'myhost'); 67 | agg.addPoint(metrics.Gauge, 'noTagsKey', 2, undefined, 'myhost'); 68 | agg.addPoint(metrics.Gauge, 'noTagsKey', 3, [], 'myhost'); 69 | const f = agg.flush(); 70 | f.should.have.lengthOf(1); 71 | f[0].should.have.nested.property('points[0][1]', 3); 72 | }); 73 | 74 | it('should normalize the tag order', function() { 75 | const agg = new aggregators.Aggregator(); 76 | agg.addPoint(metrics.Gauge, 'mykey', 1, ['t1', 't2', 't3'], 'myhost'); 77 | agg.addPoint(metrics.Gauge, 'mykey', 2, ['t3', 't2', 't1'], 'myhost'); 78 | const f = agg.flush(); 79 | f.should.have.lengthOf(1); 80 | f[0].should.have.nested.property('points[0][1]', 2); 81 | }); 82 | 83 | it('should report default tags', function() { 84 | const defaultTags = ['one', 'two']; 85 | const agg = new aggregators.Aggregator(defaultTags); 86 | agg.addPoint(metrics.Counter, 'test.mykey', 2, ['mytag1'], 'myhost'); 87 | agg.addPoint(metrics.Counter, 'test.mykey', 3, ['mytag2'], 'myhost'); 88 | const f = agg.flush(); 89 | f.should.have.lengthOf(2); 90 | f[0].tags.should.eql(['one', 'two', 'mytag1']); 91 | f[1].tags.should.eql(['one', 'two', 'mytag2']); 92 | }); 93 | 94 | it('should add default tags for compound metrics', function() { 95 | const defaultTags = ['one', 'two']; 96 | const agg = new aggregators.Aggregator(defaultTags); 97 | agg.addPoint(metrics.Histogram, 'test.mykey', 2, ['mytag1'], 'myhost'); 98 | const f = agg.flush(); 99 | 100 | for (const flushed of f) { 101 | flushed.tags.should.eql(['one', 'two', 'mytag1']); 102 | } 103 | }); 104 | }); 105 | 106 | -------------------------------------------------------------------------------- /test/loggers_tests.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const chai = require('chai'); 4 | chai.use(require('chai-string')); 5 | chai.use(require('chai-as-promised')); 6 | 7 | chai.should(); 8 | const { BufferedMetricsLogger } = require('../lib/loggers'); 9 | const { NullReporter } = require('../lib/reporters'); 10 | 11 | class MockReporter { 12 | constructor() { 13 | this.calls = []; 14 | this.error = null; 15 | } 16 | 17 | async report(metrics) { 18 | this.calls.push(metrics); 19 | if (!metrics || metrics.length === 0) { 20 | throw new Error('No metrics were sent to the reporter!'); 21 | } else if (this.error) { 22 | throw this.error; 23 | } 24 | } 25 | } 26 | 27 | describe('BufferedMetricsLogger', function() { 28 | let warnLogs = []; 29 | let errorLogs = []; 30 | const originalWarn = console.warn; 31 | const originalError = console.error; 32 | 33 | this.beforeEach(() => { 34 | console.warn = (...args) => warnLogs.push(args); 35 | console.error = (...args) => errorLogs.push(args); 36 | }); 37 | 38 | this.afterEach(() => { 39 | console.warn = originalWarn; 40 | console.error = originalError; 41 | warnLogs = []; 42 | errorLogs = []; 43 | }); 44 | 45 | it('should have a gauge() metric', function() { 46 | const l = new BufferedMetricsLogger({ 47 | reporter: new NullReporter() 48 | }); 49 | l.aggregator = { 50 | addPoint (Type, key, value, tags, host, timestampInMillis) { 51 | key.should.equal('test.gauge'); 52 | value.should.equal(23); 53 | tags.should.eql(['a:a']); 54 | timestampInMillis.should.eql(1234567890); 55 | } 56 | }; 57 | l.gauge('test.gauge', 23, ['a:a'], 1234567890); 58 | }); 59 | 60 | it('should have an increment() metric', function() { 61 | const l = new BufferedMetricsLogger({ 62 | reporter: new NullReporter() 63 | }); 64 | 65 | l.aggregator = { 66 | addPoint (_Type, key, value, _tags, _host) { 67 | key.should.equal('test.counter'); 68 | value.should.equal(1); 69 | } 70 | }; 71 | l.increment('test.counter'); 72 | 73 | l.aggregator = { 74 | addPoint (_Type, key, value, _tags, _host) { 75 | key.should.equal('test.counter2'); 76 | value.should.equal(0); 77 | } 78 | }; 79 | l.increment('test.counter2', 0); 80 | 81 | l.aggregator = { 82 | addPoint (_Type, key, value, _tags, _host) { 83 | key.should.equal('test.counter3'); 84 | value.should.equal(1); 85 | } 86 | }; 87 | l.increment('test.counter3', null); 88 | 89 | l.aggregator = { 90 | addPoint (Type, key, value, tags, host, timestampInMillis) { 91 | key.should.equal('test.counter4'); 92 | value.should.equal(23); 93 | tags.should.eql(['z:z', 'a:a']); 94 | timestampInMillis.should.equal(1234567890); 95 | } 96 | }; 97 | l.increment('test.counter4', 23, ['z:z', 'a:a'], 1234567890); 98 | }); 99 | 100 | it('should have a histogram() metric', function() { 101 | const l = new BufferedMetricsLogger({ 102 | reporter: new NullReporter() 103 | }); 104 | l.aggregator = { 105 | addPoint (Type, key, value, tags, host, timestampInMillis) { 106 | key.should.equal('test.histogram'); 107 | value.should.equal(23); 108 | tags.should.eql(['a:a']); 109 | timestampInMillis.should.eql(1234567890); 110 | } 111 | }; 112 | l.histogram('test.histogram', 23, ['a:a'], 1234567890); 113 | }); 114 | 115 | it('should support setting options for histograms', function() { 116 | const l = new BufferedMetricsLogger({ 117 | reporter: new NullReporter() 118 | }); 119 | l.histogram('test.histogram', 23, ['a:a'], 1234567890, { 120 | percentiles: [0.5] 121 | }); 122 | 123 | const f = l.aggregator.flush(); 124 | const percentiles = f.filter(x => x.metric.endsWith('percentile')); 125 | percentiles.should.have.lengthOf(1); 126 | percentiles.should.have.nested.property( 127 | '[0].metric', 128 | 'test.histogram.50percentile' 129 | ); 130 | }); 131 | 132 | it('should support the `histogram` option', function() { 133 | const l = new BufferedMetricsLogger({ 134 | reporter: new NullReporter(), 135 | histogram: { 136 | percentiles: [0.5], 137 | aggregates: ['sum'] 138 | } 139 | }); 140 | l.histogram('test.histogram', 23); 141 | 142 | const f = l.aggregator.flush(); 143 | const percentiles = f.filter(x => x.metric.endsWith('percentile')); 144 | percentiles.should.have.lengthOf(1); 145 | percentiles.should.have.nested.property( 146 | '[0].metric', 147 | 'test.histogram.50percentile' 148 | ); 149 | const aggregates = f.filter(x => /\.histogram\.[a-z]/.test(x.metric)); 150 | aggregates.should.have.length(1); 151 | aggregates.should.have.nested.property( 152 | '[0].metric', 153 | 'test.histogram.sum' 154 | ); 155 | }); 156 | 157 | it('should allow setting a default host', function() { 158 | const l = new BufferedMetricsLogger({ 159 | reporter: new NullReporter(), 160 | host: 'myhost' 161 | }); 162 | l.aggregator = { 163 | addPoint (Type, key, value, tags, host) { 164 | host.should.equal('myhost'); 165 | } 166 | }; 167 | l.gauge('test.gauge', 23); 168 | l.increment('test.counter', 23); 169 | l.histogram('test.histogram', 23); 170 | }); 171 | 172 | it('should allow setting a default key prefix', function() { 173 | const l = new BufferedMetricsLogger({ 174 | reporter: new NullReporter(), 175 | prefix: 'mynamespace.' 176 | }); 177 | l.aggregator = { 178 | addPoint (Type, key, _value, _tags, _host) { 179 | key.should.startWith('mynamespace.test.'); 180 | } 181 | }; 182 | l.gauge('test.gauge', 23); 183 | l.increment('test.counter', 23); 184 | l.histogram('test.histogram', 23); 185 | }); 186 | 187 | it('should allow setting default tags', function() { 188 | const l = new BufferedMetricsLogger({ 189 | reporter: new NullReporter(), 190 | defaultTags: ['one', 'two'] 191 | }); 192 | l.aggregator.defaultTags.should.deep.equal(['one', 'two']); 193 | }); 194 | 195 | it('should allow setting site', function() { 196 | const l = new BufferedMetricsLogger({ 197 | apiKey: 'abc123', 198 | site: 'datadoghq.eu' 199 | }); 200 | l.reporter.should.have.property('site', 'datadoghq.eu'); 201 | }); 202 | 203 | it('should allow setting site with "app.*" URLs', function() { 204 | const l = new BufferedMetricsLogger({ 205 | apiKey: 'abc123', 206 | site: 'app.datadoghq.eu' 207 | }); 208 | l.reporter.should.have.property('site', 'datadoghq.eu'); 209 | }); 210 | 211 | it('should allow deprecated `apiHost` option', function() { 212 | const l = new BufferedMetricsLogger({ 213 | apiKey: 'abc123', 214 | apiHost: 'datadoghq.eu' 215 | }); 216 | l.reporter.should.have.property('site', 'datadoghq.eu'); 217 | 218 | const apiHostWarnings = warnLogs.filter(x => x[0].includes('apiHost')); 219 | apiHostWarnings.should.have.lengthOf(1); 220 | }); 221 | 222 | describe('flush()', function () { 223 | let reporter; 224 | 225 | function standardFlushTests() { 226 | let logger; 227 | 228 | beforeEach(function () { 229 | logger = new BufferedMetricsLogger({ apiKey: 'abc', reporter }); 230 | logger.gauge('test.gauge', 23); 231 | }); 232 | 233 | describe('on success', function () { 234 | it('should resolve the promise', async function () { 235 | await logger.flush().should.be.fulfilled; 236 | }); 237 | 238 | it('should call the success callback', (done) => { 239 | logger.flush( 240 | () => done(), 241 | (error) => done(error || new Error('Error handler called with no error object.')) 242 | ); 243 | }); 244 | }); 245 | 246 | describe('on error', function () { 247 | beforeEach(() => { 248 | reporter.error = new Error('test error'); 249 | }); 250 | 251 | it('should reject the promise with the reporter error', async () => { 252 | await logger.flush().should.be.rejectedWith(reporter.error); 253 | }); 254 | 255 | it('should call the flush error handler with the reporter error', (done) => { 256 | logger.flush( 257 | () => done(new Error('The success handler was called!')), 258 | (error) => { 259 | if (error === reporter.error) { 260 | done(); 261 | } else { 262 | done(new Error('Error was not the reporter error')); 263 | } 264 | } 265 | ); 266 | }); 267 | 268 | it('should call the `onError` init option if set', async () => { 269 | let onErrorCalled = false; 270 | let onErrorValue = null; 271 | 272 | logger = new BufferedMetricsLogger({ 273 | apiKey: 'abc', 274 | onError (error) { 275 | onErrorCalled = true; 276 | onErrorValue = error; 277 | }, 278 | reporter 279 | }); 280 | logger.gauge('test.gauge', 23); 281 | 282 | await logger.flush().should.be.rejected; 283 | onErrorCalled.should.equal(true); 284 | onErrorValue.should.equal(reporter.error); 285 | }); 286 | 287 | it('should log if `onError` init option is not set', async () => { 288 | await logger.flush().catch(() => null); 289 | 290 | errorLogs.should.have.lengthOf(1); 291 | }); 292 | }); 293 | } 294 | 295 | describe('with a promise-based reporter', function() { 296 | beforeEach(() => { 297 | reporter = new MockReporter(); 298 | }); 299 | 300 | standardFlushTests(); 301 | }); 302 | 303 | describe('[deprecated] with a callback-based reporter', function() { 304 | beforeEach(() => { 305 | reporter = new MockReporter(); 306 | reporter.report = function(metrics, onSuccess, onError) { 307 | return this.__proto__.report.call(this, metrics) 308 | .then(onSuccess, onError); 309 | }; 310 | }); 311 | 312 | standardFlushTests(); 313 | }); 314 | }); 315 | 316 | describe('stop()', function () { 317 | beforeEach(function () { 318 | this.reporter = new MockReporter(); 319 | this.logger = new BufferedMetricsLogger({ 320 | flushIntervalSeconds: 0.1, 321 | reporter: this.reporter 322 | }); 323 | this.logger.gauge('test.gauge', 23); 324 | }); 325 | 326 | it('flushes by default', async function () { 327 | this.reporter.calls.should.have.lengthOf(0); 328 | await this.logger.stop(); 329 | this.reporter.calls.should.have.lengthOf(1); 330 | }); 331 | 332 | it('does not flush if `flush: false`', async function () { 333 | this.reporter.calls.should.have.lengthOf(0); 334 | await this.logger.stop({ flush: false }); 335 | this.reporter.calls.should.have.lengthOf(0); 336 | }); 337 | 338 | it('stops auto-flushing', async function () { 339 | await this.logger.stop({ flush: false }); 340 | this.reporter.calls.should.have.lengthOf(0); 341 | 342 | await new Promise(r => setTimeout(r, 125)); 343 | this.reporter.calls.should.have.lengthOf(0); 344 | }); 345 | 346 | it('stops auto-flushing on exit', async function () { 347 | await this.logger.stop({ flush: false }); 348 | this.reporter.calls.should.have.lengthOf(0); 349 | 350 | process.emit('beforeExit', 0); 351 | this.reporter.calls.should.have.lengthOf(0); 352 | }); 353 | }); 354 | 355 | describe('option: flushIntervalSeconds', function () { 356 | beforeEach(function () { 357 | this.reporter = new MockReporter(); 358 | }); 359 | 360 | it('flushes after the specified number of seconds', async function () { 361 | this.logger = new BufferedMetricsLogger({ 362 | flushIntervalSeconds: 0.1, 363 | reporter: this.reporter 364 | }); 365 | this.logger.gauge('test.gauge', 23); 366 | 367 | this.reporter.calls.should.have.lengthOf(0); 368 | await new Promise(r => setTimeout(r, 125)); 369 | this.reporter.calls.should.have.lengthOf(1); 370 | }); 371 | 372 | it('flushes before exiting if auto-flushing', async function () { 373 | this.logger = new BufferedMetricsLogger({ 374 | flushIntervalSeconds: 0.1, 375 | reporter: this.reporter 376 | }); 377 | this.logger.gauge('test.gauge', 23); 378 | 379 | this.reporter.calls.should.have.lengthOf(0); 380 | process.emit('beforeExit', 0); 381 | this.reporter.calls.should.have.lengthOf(1); 382 | }); 383 | 384 | it('does not auto-flush if set to 0', async function () { 385 | this.logger = new BufferedMetricsLogger({ 386 | flushIntervalSeconds: 0, 387 | reporter: this.reporter 388 | }); 389 | this.logger.gauge('test.gauge', 23); 390 | this.reporter.calls.should.have.lengthOf(0); 391 | 392 | await new Promise(r => setTimeout(r, 125)); 393 | this.reporter.calls.should.have.lengthOf(0); 394 | 395 | process.emit('beforeExit', 0); 396 | this.reporter.calls.should.have.lengthOf(0); 397 | }); 398 | 399 | it('throws if set to a negative value', async function () { 400 | (() => { 401 | this.logger = new BufferedMetricsLogger({ 402 | flushIntervalSeconds: -1, 403 | reporter: this.reporter 404 | }); 405 | }).should.throw(/flushIntervalSeconds/); 406 | }); 407 | }); 408 | }); 409 | -------------------------------------------------------------------------------- /test/metrics_tests.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const chai = require('chai'); 4 | chai.use(require('chai-string')); 5 | 6 | chai.should(); 7 | 8 | const metrics = require('../lib/metrics'); 9 | 10 | describe('Metric', function() { 11 | it('should set its initial state correctly', function() { 12 | const m = new metrics.Metric('the.key', ['tag1'], 'myhost'); 13 | m.key.should.equal('the.key'); 14 | m.tags.should.deep.equal(['tag1']); 15 | m.host.should.equal('myhost'); 16 | }); 17 | 18 | it('should update the timestamp with Date.now when a data point is added', function() { 19 | const m = new metrics.Metric(); 20 | const now = Date.now(); 21 | m.updateTimestamp(); 22 | const diff = (m.timestamp * 1000) - now; 23 | Math.abs(diff).should.lessThan(1000); // within one second 24 | }); 25 | it('should update the timestamp when a data point is added with a timestamp in ms', function() { 26 | const m = new metrics.Metric(); 27 | m.updateTimestamp(123000); 28 | m.timestamp.should.equal(123); 29 | }); 30 | }); 31 | 32 | describe('Gauge', function() { 33 | it('should extend Metric', function() { 34 | const g = new metrics.Gauge(); 35 | g.updateTimestamp.should.be.a('function'); 36 | }); 37 | 38 | it('should flush correctly', function() { 39 | const g = new metrics.Gauge('the.key', ['mytag'], 'myhost'); 40 | g.addPoint(1); 41 | const f = g.flush(); 42 | f.should.have.lengthOf(1); 43 | f[0].should.have.property('metric', 'the.key'); 44 | f[0].should.have.deep.property('tags', ['mytag']); 45 | f[0].should.have.property('host', 'myhost'); 46 | f[0].should.have.property('type', 'gauge'); 47 | f[0].should.have.deep.property('points', [[ g.timestamp, 1 ]]); 48 | }); 49 | 50 | it('should flush correctly when given timestamp', function() { 51 | const g = new metrics.Gauge('the.key', ['mytag'], 'myhost'); 52 | g.addPoint(1, 123000); 53 | const f = g.flush(); 54 | f.should.have.lengthOf(1); 55 | f[0].should.have.property('metric', 'the.key'); 56 | f[0].should.have.deep.property('tags', ['mytag']); 57 | f[0].should.have.property('host', 'myhost'); 58 | f[0].should.have.property('type', 'gauge'); 59 | f[0].should.have.deep.property('points', [[ 123, 1 ]]); 60 | }); 61 | }); 62 | 63 | describe('Counter', function() { 64 | it('should extend Metric', function() { 65 | const g = new metrics.Counter(); 66 | g.updateTimestamp.should.be.a('function'); 67 | }); 68 | 69 | it('should flush correctly', function() { 70 | const g = new metrics.Counter('the.key', ['mytag'], 'myhost'); 71 | g.addPoint(1); 72 | const f = g.flush(); 73 | f.should.have.lengthOf(1); 74 | f[0].should.have.property('metric', 'the.key'); 75 | f[0].should.have.deep.property('tags', ['mytag']); 76 | f[0].should.have.property('host', 'myhost'); 77 | f[0].should.have.property('type', 'count'); 78 | f[0].should.have.deep.property('points', [[ g.timestamp, 1 ]]); 79 | }); 80 | 81 | it('should flush correctly when given a timestamp', function() { 82 | const g = new metrics.Counter('the.key', ['mytag'], 'myhost'); 83 | g.addPoint(1, 123000); 84 | const f = g.flush(); 85 | f.should.have.lengthOf(1); 86 | f[0].should.have.property('metric', 'the.key'); 87 | f[0].should.have.deep.property('tags', ['mytag']); 88 | f[0].should.have.property('host', 'myhost'); 89 | f[0].should.have.property('type', 'count'); 90 | f[0].should.have.deep.property('points', [[ 123, 1 ]]); 91 | }); 92 | }); 93 | 94 | describe('Histogram', function() { 95 | it('should extend Metric', function() { 96 | const h = new metrics.Histogram(); 97 | h.updateTimestamp.should.be.a('function'); 98 | }); 99 | 100 | it('should report the min and max of all values', function() { 101 | const h = new metrics.Histogram('hist'); 102 | let f = h.flush(); 103 | 104 | f.should.have.nested.property('[0].metric', 'hist.min'); 105 | f.should.have.nested.deep.property('[0].points', [[ h.timestamp, Infinity ]]); 106 | f.should.have.nested.property('[1].metric', 'hist.max'); 107 | f.should.have.nested.deep.property('[1].points', [[ h.timestamp, -Infinity ]]); 108 | 109 | h.addPoint(23); 110 | 111 | f = h.flush(); 112 | f.should.have.nested.property('[0].metric', 'hist.min'); 113 | f.should.have.nested.deep.property('[0].points', [[ h.timestamp, 23 ]]); 114 | f.should.have.nested.property('[1].metric', 'hist.max'); 115 | f.should.have.nested.deep.property('[0].points', [[ h.timestamp, 23 ]]); 116 | }); 117 | 118 | it('should report a sum of all values', function() { 119 | const h = new metrics.Histogram('hist'); 120 | let f = h.flush(); 121 | 122 | f.should.have.nested.property('[2].metric', 'hist.sum'); 123 | f.should.have.nested.deep.property('[2].points', [[ h.timestamp, 0 ]]); 124 | 125 | h.addPoint(2); 126 | h.addPoint(3); 127 | 128 | f = h.flush(); 129 | f.should.have.nested.property('[2].metric', 'hist.sum'); 130 | f.should.have.nested.deep.property('[2].points', [[ h.timestamp, 5 ]]); 131 | }); 132 | 133 | it('should report the number of samples (count)', function() { 134 | const h = new metrics.Histogram('hist'); 135 | let f = h.flush(); 136 | 137 | f.should.have.nested.property('[3].metric', 'hist.count'); 138 | f.should.have.nested.deep.property('[3].points', [[ h.timestamp, 0 ]]); 139 | 140 | h.addPoint(2); 141 | h.addPoint(3); 142 | 143 | f = h.flush(); 144 | f.should.have.nested.property('[3].metric', 'hist.count'); 145 | f.should.have.nested.deep.property('[3].points', [[ h.timestamp, 2 ]]); 146 | }); 147 | 148 | it('should report the average', function() { 149 | const h = new metrics.Histogram('hist'); 150 | let f = h.flush(); 151 | 152 | f.should.have.nested.property('[4].metric', 'hist.avg'); 153 | f.should.have.nested.deep.property('[4].points', [[ h.timestamp, 0 ]]); 154 | 155 | h.addPoint(2); 156 | h.addPoint(3); 157 | 158 | f = h.flush(); 159 | f.should.have.nested.property('[4].metric', 'hist.avg'); 160 | f.should.have.nested.deep.property('[4].points', [[ h.timestamp, 2.5 ]]); 161 | }); 162 | 163 | it('should report the median', function() { 164 | const h = new metrics.Histogram('hist'); 165 | let f = h.flush(); 166 | 167 | f.should.have.nested.property('[5].metric', 'hist.median'); 168 | f.should.have.nested.deep.property('[5].points', [[ h.timestamp, 0 ]]); 169 | 170 | h.addPoint(2); 171 | h.addPoint(3); 172 | h.addPoint(10); 173 | 174 | f = h.flush(); 175 | f.should.have.nested.property('[5].metric', 'hist.median'); 176 | f.should.have.nested.deep.property('[5].points', [[ h.timestamp, 3 ]]); 177 | 178 | h.addPoint(4); 179 | 180 | f = h.flush(); 181 | f.should.have.nested.property('[5].metric', 'hist.median'); 182 | f.should.have.nested.deep.property('[5].points', [[ h.timestamp, 3.5 ]]); 183 | }); 184 | 185 | it('should report the correct percentiles', function() { 186 | const h = new metrics.Histogram('hist'); 187 | h.addPoint(1); 188 | let f = h.flush(); 189 | 190 | f.should.have.nested.property('[6].metric', 'hist.75percentile'); 191 | f.should.have.nested.deep.property('[6].points', [[ h.timestamp, 1 ]]); 192 | f.should.have.nested.property('[7].metric', 'hist.85percentile'); 193 | f.should.have.nested.deep.property('[7].points', [[ h.timestamp, 1 ]]); 194 | f.should.have.nested.property('[8].metric', 'hist.95percentile'); 195 | f.should.have.nested.deep.property('[8].points', [[ h.timestamp, 1 ]]); 196 | f.should.have.nested.property('[9].metric', 'hist.99percentile'); 197 | f.should.have.nested.deep.property('[9].points', [[ h.timestamp, 1 ]]); 198 | 199 | // Create 100 samples from [1..100] so we can 200 | // verify the calculated percentiles. 201 | for (let i = 2; i <= 100; i++) { 202 | h.addPoint(i); 203 | } 204 | f = h.flush(); 205 | 206 | f.should.have.nested.property('[6].metric', 'hist.75percentile'); 207 | f.should.have.nested.deep.property('[6].points', [[ h.timestamp, 75 ]]); 208 | f.should.have.nested.property('[7].metric', 'hist.85percentile'); 209 | f.should.have.nested.deep.property('[7].points', [[ h.timestamp, 85 ]]); 210 | f.should.have.nested.property('[8].metric', 'hist.95percentile'); 211 | f.should.have.nested.deep.property('[8].points', [[ h.timestamp, 95 ]]); 212 | f.should.have.nested.property('[9].metric', 'hist.99percentile'); 213 | f.should.have.nested.deep.property('[9].points', [[ h.timestamp, 99 ]]); 214 | }); 215 | 216 | it('should use custom percentiles and aggregates', function() { 217 | const aggregates = ['avg']; 218 | const percentiles = [0.85]; 219 | const h = new metrics.Histogram('hist', [], 'myhost', { aggregates, percentiles }); 220 | h.addPoint(1); 221 | let f = h.flush(); 222 | 223 | f.should.have.nested.property('[0].metric', 'hist.avg'); 224 | f.should.have.nested.deep.property('[0].points', [[ h.timestamp, 1 ]]); 225 | 226 | f.should.have.nested.property('[1].metric', 'hist.85percentile'); 227 | f.should.have.nested.deep.property('[1].points', [[ h.timestamp, 1 ]]); 228 | 229 | // Create 100 samples from [1..100] so we can 230 | // verify the calculated percentiles. 231 | for (let i = 2; i <= 100; i++) { 232 | h.addPoint(i); 233 | } 234 | f = h.flush(); 235 | 236 | f.should.have.nested.property('[1].metric', 'hist.85percentile'); 237 | f.should.have.nested.deep.property('[1].points', [[ h.timestamp, 85 ]]); 238 | }); 239 | }); 240 | 241 | describe('Distribution', function() { 242 | it('should extend Metric', function() { 243 | const g = new metrics.Distribution(); 244 | g.updateTimestamp.should.be.a('function'); 245 | }); 246 | 247 | it('should flush correctly', function() { 248 | const g = new metrics.Distribution('the.key', ['mytag'], 'myhost'); 249 | g.addPoint(1); 250 | const f = g.flush(); 251 | f.should.have.lengthOf(1); 252 | f[0].should.have.property('metric', 'the.key'); 253 | f[0].should.have.deep.property('tags', ['mytag']); 254 | f[0].should.have.property('host', 'myhost'); 255 | f[0].should.have.property('type', 'distribution'); 256 | f[0].should.have.deep.property('points', [[ g.timestamp, [1] ]]); 257 | }); 258 | 259 | it('should flush correctly when given timestamp', function() { 260 | const g = new metrics.Distribution('the.key', ['mytag'], 'myhost'); 261 | g.addPoint(1, 123000); 262 | const f = g.flush(); 263 | f.should.have.lengthOf(1); 264 | f[0].should.have.property('metric', 'the.key'); 265 | f[0].should.have.deep.property('tags', ['mytag']); 266 | f[0].should.have.property('host', 'myhost'); 267 | f[0].should.have.property('type', 'distribution'); 268 | f[0].should.have.deep.property('points', [[ 123, [1] ]]); 269 | }); 270 | 271 | it('should format multiple points from different times', function () { 272 | const g = new metrics.Distribution('the.key', ['mytag'], 'myhost'); 273 | g.addPoint(1, 123000); 274 | g.addPoint(2, 125000); 275 | g.addPoint(3, 121000); 276 | 277 | const f = g.flush(); 278 | f.should.have.lengthOf(1); 279 | f[0].points.should.eql([ 280 | [123, [1]], 281 | [125, [2]], 282 | [121, [3]] 283 | ]); 284 | }); 285 | 286 | it('should format multiple points from the same time', function () { 287 | const g = new metrics.Distribution('the.key', ['mytag'], 'myhost'); 288 | g.addPoint(1, 123000); 289 | g.addPoint(2, 125000); 290 | g.addPoint(3, 125000); 291 | 292 | const f = g.flush(); 293 | f.should.have.lengthOf(1); 294 | f[0].points.should.eql([ 295 | [123, [1]], 296 | [125, [2, 3]] 297 | ]); 298 | }); 299 | }); 300 | -------------------------------------------------------------------------------- /test/module_tests.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const chai = require('chai'); 4 | const reporters = require('../lib/reporters.js'); 5 | 6 | chai.should(); 7 | 8 | /** @type {import("..") DotadogMetrics} */ 9 | let metrics = null; 10 | 11 | // Force-reload the module before every test so we 12 | // can realistically test all the scenarios. 13 | beforeEach(function() { 14 | delete require.cache[require.resolve('../index.js')]; 15 | metrics = require('../index.js'); 16 | }); 17 | 18 | afterEach(async function() { 19 | await metrics.stop({ flush: false }); 20 | }); 21 | 22 | describe('datadog-metrics', function() { 23 | it('should let me create a metrics logger instance', function() { 24 | metrics.BufferedMetricsLogger.should.be.a('function'); 25 | const logger = new metrics.BufferedMetricsLogger({ 26 | reporter: new reporters.NullReporter() 27 | }); 28 | logger.gauge('test.gauge', 23); 29 | }); 30 | 31 | it('should let me configure a shared metrics logger instance', async function() { 32 | metrics.init.should.be.a('function'); 33 | metrics.init({ 34 | flushIntervalSeconds: 0, 35 | reporter: { 36 | async report (series) { 37 | series.should.have.lengthOf(12); // 3 + 9 for the histogram. 38 | series[0].should.have.nested.property('points[0][1]', 23); 39 | series[0].should.have.property('metric', 'test.gauge'); 40 | series[0].tags.should.have.lengthOf(0); 41 | } 42 | } 43 | }); 44 | metrics.gauge('test.gauge', 23); 45 | metrics.increment('test.counter'); 46 | metrics.increment('test.counter', 23); 47 | metrics.histogram('test.histogram', 23); 48 | await metrics.flush(); 49 | }); 50 | 51 | it('should report gauges with the same name but different tags separately', async function() { 52 | metrics.init.should.be.a('function'); 53 | metrics.init({ 54 | flushIntervalSeconds: 0, 55 | reporter: { 56 | async report (series) { 57 | series.should.have.lengthOf(2); 58 | series[0].should.have.nested.property('points[0][1]', 1); 59 | series[0].should.have.property('metric', 'test.gauge'); 60 | series[0].should.have.deep.property('tags', ['tag1']); 61 | series[1].should.have.nested.property('points[0][1]', 2); 62 | series[1].should.have.property('metric', 'test.gauge'); 63 | series[1].should.have.deep.property('tags', ['tag2']); 64 | } 65 | } 66 | }); 67 | metrics.gauge('test.gauge', 1, ['tag1']); 68 | metrics.gauge('test.gauge', 2, ['tag2']); 69 | await metrics.flush(); 70 | }); 71 | 72 | it('should lazily provide a shared metrics logger instance', function() { 73 | process.env.DATADOG_API_KEY = 'TESTKEY'; 74 | metrics.gauge('test.gauge', 23); 75 | metrics.increment('test.counter'); 76 | metrics.increment('test.counter', 23); 77 | metrics.histogram('test.histogram', 23); 78 | delete process.env.DATADOG_API_KEY; 79 | }); 80 | 81 | it('should publicly export built-in reporters', function() { 82 | metrics.reporters.should.have.property('DatadogReporter', reporters.DatadogReporter); 83 | metrics.reporters.should.have.property('NullReporter', reporters.NullReporter); 84 | }); 85 | }); 86 | -------------------------------------------------------------------------------- /test/reporters_tests.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const chai = require('chai'); 4 | const nock = require('nock'); 5 | 6 | const { DatadogReporter, NullReporter } = require('../lib/reporters'); 7 | const { AuthorizationError } = require('../lib/errors'); 8 | 9 | chai.use(require('chai-as-promised')); 10 | chai.should(); 11 | 12 | const mockMetric = { 13 | metric: 'test.gauge', 14 | points: [[Math.floor(Date.now() / 1000), 1]], 15 | type: 'gauge', 16 | }; 17 | 18 | describe('NullReporter', function() { 19 | it('should always resolve', async function() { 20 | const reporter = new NullReporter(); 21 | await reporter.report([mockMetric]); 22 | }); 23 | }); 24 | 25 | describe('DatadogReporter', function() { 26 | afterEach(() => { 27 | nock.cleanAll(); 28 | }); 29 | 30 | describe('constructor', function() { 31 | let originalEnv = Object.entries(process.env); 32 | 33 | afterEach(() => { 34 | process.env = Object.fromEntries(originalEnv); 35 | }); 36 | 37 | it('creates a DatadogReporter', () => { 38 | const instance = new DatadogReporter({ 39 | apiKey: 'abc', 40 | site: 'datadoghq.eu' 41 | }); 42 | instance.should.be.an.instanceof(DatadogReporter); 43 | }); 44 | 45 | it('reads the API key from DATADOG_API_KEY environment if not specified', () => { 46 | process.env.DATADOG_API_KEY = 'abc'; 47 | const instance = new DatadogReporter(); 48 | instance.should.be.an.instanceof(DatadogReporter); 49 | }); 50 | 51 | it('reads the API key from DD_API_KEY environment if not specified', () => { 52 | process.env.DD_API_KEY = 'abc'; 53 | const instance = new DatadogReporter(); 54 | instance.should.be.an.instanceof(DatadogReporter); 55 | }); 56 | 57 | it('throws if no API key is set', () => { 58 | (() => new DatadogReporter()).should.throw(/DATADOG_API_KEY/); 59 | }); 60 | }); 61 | 62 | describe('report', function() { 63 | let reporter; 64 | 65 | beforeEach(() => { 66 | reporter = new DatadogReporter({ 67 | apiKey: 'abc', 68 | retryBackoff: 0.01 69 | }); 70 | }); 71 | 72 | it('should resolve on success', async function () { 73 | nock('https://api.datadoghq.com') 74 | .post('/api/v1/series') 75 | .reply(202, { errors: [] }); 76 | 77 | await reporter.report([mockMetric]).should.be.fulfilled; 78 | }); 79 | 80 | it('should reject on http error', async function () { 81 | nock('https://api.datadoghq.com') 82 | .post('/api/v1/series') 83 | .times(3) 84 | .reply(500, { errors: ['Unknown!'] }); 85 | 86 | await reporter.report([mockMetric]).should.be.rejected; 87 | }); 88 | 89 | it('should retry on http error', async function () { 90 | nock('https://api.datadoghq.com') 91 | .post('/api/v1/series') 92 | .times(1) 93 | .reply(500, { errors: ['Unknown!'] }) 94 | .post('/api/v1/series') 95 | .times(1) 96 | .reply(202, { errors: [] }); 97 | 98 | await reporter.report([mockMetric]).should.be.fulfilled; 99 | }); 100 | 101 | it('should respect the `Retry-After` header', async function () { 102 | const callTimes = []; 103 | 104 | nock('https://api.datadoghq.com') 105 | .post('/api/v1/series') 106 | .times(1) 107 | .reply(() => { 108 | callTimes.push(Date.now()); 109 | return [429, { errors: ['Uhoh'] }, { 'Retry-After': '1' }]; 110 | }) 111 | .post('/api/v1/series') 112 | .times(1) 113 | .reply(() => { 114 | callTimes.push(Date.now()); 115 | return [202, { errors: [] }]; 116 | }); 117 | 118 | await reporter.report([mockMetric]).should.be.fulfilled; 119 | 120 | const timeDelta = callTimes[1] - callTimes[0]; 121 | timeDelta.should.be.within(980, 1020); 122 | }); 123 | 124 | it('should respect the `X-RateLimit-Reset` header', async function () { 125 | const callTimes = []; 126 | 127 | nock('https://api.datadoghq.com') 128 | .post('/api/v1/series') 129 | .times(1) 130 | .reply(() => { 131 | callTimes.push(Date.now()); 132 | return [429, { errors: ['Uhoh'] }, { 'X-RateLimit-Reset': '1' }]; 133 | }) 134 | .post('/api/v1/series') 135 | .times(1) 136 | .reply(() => { 137 | callTimes.push(Date.now()); 138 | return [202, { errors: [] }]; 139 | }); 140 | 141 | await reporter.report([mockMetric]).should.be.fulfilled; 142 | 143 | const timeDelta = callTimes[1] - callTimes[0]; 144 | timeDelta.should.be.within(980, 1020); 145 | }); 146 | 147 | it('should reject on network error', async function () { 148 | nock('https://api.datadoghq.com') 149 | .post('/api/v1/series') 150 | .times(3) 151 | .replyWithError({ 152 | message: 'connect ECONNREFUSED', 153 | code: 'ECONNREFUSED' 154 | }); 155 | 156 | await reporter.report([mockMetric]).should.be.rejected; 157 | }); 158 | 159 | it('should retry on network error', async function () { 160 | nock('https://api.datadoghq.com') 161 | .post('/api/v1/series') 162 | .times(1) 163 | .replyWithError({ 164 | message: 'connect ECONNREFUSED', 165 | code: 'ECONNREFUSED' 166 | }) 167 | .post('/api/v1/series') 168 | .times(1) 169 | .reply(202, { errors: [] }); 170 | 171 | await reporter.report([mockMetric]).should.be.fulfilled; 172 | }); 173 | 174 | it('should not retry on unknown errors', async function () { 175 | nock('https://api.datadoghq.com') 176 | .post('/api/v1/series') 177 | .times(1) 178 | .replyWithError({ message: 'Oh no!' }); 179 | 180 | await reporter.report([mockMetric]).should.be.rejectedWith('Oh no!'); 181 | }); 182 | 183 | it('rejects with AuthorizationError when the API key is invalid', async function() { 184 | nock('https://api.datadoghq.com') 185 | .post('/api/v1/series') 186 | .reply(403, { errors: ['Forbidden'] }); 187 | 188 | await reporter.report([mockMetric]).should.be.rejectedWith(AuthorizationError); 189 | }); 190 | }); 191 | 192 | it('should allow two instances to use different credentials', async function() { 193 | const apiKeys = ['abc', 'xyz']; 194 | let receivedKeys = []; 195 | 196 | nock('https://api.datadoghq.com') 197 | .matchHeader('dd-api-key', (values) => { 198 | receivedKeys.push(values[0]); 199 | return true; 200 | }) 201 | .post('/api/v1/series') 202 | .times(apiKeys.length) 203 | .reply(202, { errors: [] }); 204 | 205 | const reporters = apiKeys.map(apiKey => new DatadogReporter({ apiKey })); 206 | await Promise.all(reporters.map(r => r.report([mockMetric]))); 207 | 208 | receivedKeys.should.deep.equal(apiKeys); 209 | }); 210 | }); 211 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "lib": ["es2019"], 4 | "target": "es2019", 5 | 6 | "module": "commonjs", 7 | "esModuleInterop": true, 8 | "allowSyntheticDefaultImports": true, 9 | "moduleResolution": "node", 10 | "allowJs": true, 11 | "checkJs": true, 12 | "composite": true, 13 | 14 | "outDir": "dist", 15 | "declarationMap": true, 16 | "declaration": true, 17 | "emitDeclarationOnly": true 18 | }, 19 | "include": ["./index.js", "./lib/**/*"] 20 | } 21 | --------------------------------------------------------------------------------