├── .babelrc ├── .circleci └── config.yml ├── .eslintignore ├── .eslintrc.yaml ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ ├── config.yml │ └── feature_request.md ├── pull_request_template.md └── workflows │ └── stale.yml ├── .gitignore ├── .hound.yml ├── .ldrelease └── config.yml ├── .prettierrc ├── CHANGELOG.md ├── CODEOWNERS ├── CONTRIBUTING.md ├── LICENSE.txt ├── README.md ├── SECURITY.md ├── attribute_reference.js ├── big_segments.js ├── caching_store_wrapper.js ├── configuration.js ├── context.js ├── context_filter.js ├── contract-tests ├── README.md ├── index.js ├── log.js ├── package.json ├── sdkClientEntity.js └── testharness-suppressions.txt ├── diagnostic_events.js ├── docs └── typedoc.js ├── errors.js ├── evaluator.js ├── event_factory.js ├── event_processor.js ├── event_summarizer.js ├── feature_store.js ├── feature_store_event_wrapper.js ├── file_data_source.js ├── flags_state.js ├── index.d.ts ├── index.js ├── integrations.js ├── interfaces.js ├── loggers.js ├── messages.js ├── operators.js ├── package.json ├── polling.js ├── requestor.js ├── scripts └── better-audit.sh ├── sharedtest ├── big_segment_store_tests.js ├── feature_store_tests.js ├── persistent_feature_store_tests.js └── store_tests.js ├── streaming.js ├── test-types.ts ├── test ├── LDClient-big-segments-test.js ├── LDClient-end-to-end-test.js ├── LDClient-evaluation-all-flags-test.js ├── LDClient-evaluation-test.js ├── LDClient-events-test.js ├── LDClient-listeners-test.js ├── LDClient-test.js ├── LDClient-tls-test.js ├── attribute_reference-test.js ├── big_segments-test.js ├── caching_store_wrapper-test.js ├── configuration-test.js ├── context-test.js ├── context_filter-test.js ├── diagnostic_events-test.js ├── evaluator-big-segments-test.js ├── evaluator-bucketing-test.js ├── evaluator-clause-test.js ├── evaluator-flag-test.js ├── evaluator-pre-conditions-test.js ├── evaluator-rule-test.js ├── evaluator-segment-match-test.js ├── evaluator_helpers.js ├── event_processor-test.js ├── event_summarizer-test.js ├── feature_store-test.js ├── feature_store_event_wrapper-test.js ├── feature_store_test_base.js ├── file_data_source-test.js ├── loggers-test.js ├── operators-test.js ├── polling-test.js ├── requestor-test.js ├── store_tests_big_segments-test.js ├── streaming-test.js ├── stubs.js ├── test_data-test.js └── update_queue-test.js ├── test_data.js ├── tsconfig.json ├── update_queue.js ├── utils ├── __tests__ │ ├── httpUtils-test.js │ └── wrapPromiseCallback-test.js ├── asyncUtils.js ├── httpUtils.js ├── stringifyAttrs.js └── wrapPromiseCallback.js └── versioned_data_kind.js /.babelrc: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "test": { 4 | "presets": [ 5 | [ 6 | "@babel/preset-env", 7 | { 8 | "targets": { 9 | "node": "6" 10 | } 11 | } 12 | ] 13 | ] 14 | } 15 | } 16 | } -------------------------------------------------------------------------------- /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2.1 2 | 3 | orbs: 4 | win: circleci/windows@2.4.0 5 | 6 | workflows: 7 | build-and-test-all: 8 | jobs: 9 | # CircleCI's current generation of Node images, cimg/node, allow you to leave the 10 | # patch version unpinned, but require you to specify the minor version. The one 11 | # exception is cimg/node:current, which will always give us the latest release in 12 | # the latest major version-- and the latest major version is where it's most likely 13 | # that there would be a new minor version, anyway. 14 | - build-test-linux: 15 | name: latest Node version 16 | docker-image: cimg/node:current 17 | run-lint: true 18 | - build-test-linux: 19 | name: Node 17.9 20 | docker-image: cimg/node:17.9 21 | - build-test-linux: 22 | name: Node 16.14 23 | docker-image: cimg/node:16.14 24 | - build-test-linux: 25 | name: Node 15.14 26 | docker-image: cimg/node:15.14 27 | - build-test-linux: 28 | name: Node 14.19 29 | docker-image: cimg/node:14.19 30 | - build-test-linux: 31 | name: Node 13.14 32 | docker-image: cimg/node:13.14 33 | - build-test-linux: 34 | name: Node 12.22 35 | docker-image: cimg/node:12.22 36 | - build-test-windows: 37 | name: Windows 38 | 39 | jobs: 40 | build-test-linux: 41 | parameters: 42 | run-lint: 43 | type: boolean 44 | default: false 45 | docker-image: 46 | type: string 47 | run-contract-tests: 48 | type: boolean 49 | default: true 50 | docker: 51 | - image: <> 52 | steps: 53 | - checkout 54 | - run: echo "Node version:" `node --version` 55 | - run: npm install 56 | - run: 57 | command: npm test 58 | environment: 59 | JEST_JUNIT_OUTPUT: "reports/junit/js-test-results.xml" 60 | - run: npm run check-typescript 61 | - when: 62 | condition: <> 63 | steps: 64 | - run: npm run lint 65 | - when: 66 | condition: <> 67 | steps: 68 | - run: 69 | command: npm run contract-test-service 70 | background: true 71 | - run: mkdir -p reports/junit 72 | - run: TEST_HARNESS_PARAMS="-junit reports/junit/contract-test-results.xml -skip-from contract-tests/testharness-suppressions.txt" npm run contract-test-harness 73 | - run: 74 | name: dependency audit 75 | command: ./scripts/better-audit.sh 76 | - store_test_results: 77 | path: reports/junit 78 | - store_artifacts: 79 | path: reports/junit 80 | 81 | build-test-windows: 82 | executor: 83 | name: win/default 84 | shell: powershell.exe 85 | steps: 86 | - checkout 87 | - run: node --version 88 | - run: npm install 89 | - run: npm test 90 | -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | docs/ 3 | test-types.js 4 | test/ 5 | test.js 6 | -------------------------------------------------------------------------------- /.eslintrc.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | # parser: babel-eslint # we're not currently transpiling this SDK 3 | root: true 4 | extends: 5 | - eslint:recommended # https://eslint.org/docs/rules/ 6 | env: 7 | es6: true 8 | node: true 9 | parserOptions: 10 | ecmaVersion: 2018 11 | plugins: 12 | #- babel 13 | - prettier 14 | globals: 15 | VERSION: true 16 | describe: true 17 | it: true 18 | expect: true 19 | jest: true 20 | beforeEach: true 21 | afterEach: true 22 | window: false # don't allow references to "window" or "document" in the Node SDK 23 | document: false 24 | rules: 25 | # https://eslint.org/docs/rules/array-callback-return 26 | array-callback-return: error 27 | 28 | # https://eslint.org/docs/rules/arrow-body-style 29 | arrow-body-style: 30 | - error 31 | - as-needed 32 | 33 | # https://github.com/babel/eslint-plugin-babel 34 | #babel/semi: error 35 | 36 | # Deprecations are required to turn enforce this 37 | camelcase: warn 38 | 39 | # https://eslint.org/docs/rules/curly 40 | curly: 41 | - error 42 | - all 43 | 44 | # https://eslint.org/docs/rules/eqeqeq 45 | eqeqeq: error 46 | 47 | # https://eslint.org/docs/rules/no-array-constructor 48 | no-array-constructor: error 49 | 50 | # https://eslint.org/docs/rules/no-eval 51 | no-eval: error 52 | 53 | # https://eslint.org/docs/rules/no-implicit-coercion 54 | no-implicit-coercion: 55 | - 'off' 56 | - boolean: false 57 | number: true 58 | string: true 59 | allow: [] 60 | 61 | # https://eslint.org/docs/rules/no-implied-eval 62 | no-implied-eval: error 63 | 64 | # https://eslint.org/docs/rules/no-nested-ternary 65 | no-nested-ternary: error 66 | 67 | # https://eslint.org/docs/rules/no-new-object 68 | no-new-object: error 69 | 70 | # https://eslint.org/docs/rules/no-new-wrappers 71 | no-new-wrappers: error 72 | 73 | # https://eslint.org/docs/rules/no-param-reassign 74 | no-param-reassign: 75 | - error 76 | - props: true 77 | 78 | # https://eslint.org/docs/rules/no-restricted-syntax 79 | no-restricted-syntax: 80 | - error 81 | - selector: ForInStatement 82 | message: avoid for...in which can pick up properties from prototypes; use for...of, or plain for if you need array indices 83 | 84 | # https://eslint.org/docs/rules/no-return-assign 85 | no-return-assign: error 86 | 87 | # https://eslint.org/docs/rules/no-self-compare 88 | no-self-compare: error 89 | 90 | # https://eslint.org/docs/rules/no-use-before-define 91 | no-use-before-define: 92 | - error 93 | - functions: false 94 | 95 | # https://eslint.org/docs/rules/no-var 96 | no-var: error 97 | 98 | # https://eslint.org/docs/rules/prefer-arrow-callback 99 | prefer-arrow-callback: error 100 | 101 | # https://eslint.org/docs/rules/prefer-const 102 | prefer-const: error 103 | 104 | # https://github.com/prettier/eslint-plugin-prettier 105 | prettier/prettier: 106 | - error 107 | 108 | quotes: 109 | - error 110 | - single 111 | - avoidEscape: true 112 | 113 | # https://eslint.org/docs/rules/radix 114 | radix: error 115 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is this a support request?** 11 | This issue tracker is maintained by LaunchDarkly SDK developers and is intended for feedback on the SDK code. If you're not sure whether the problem you are having is specifically related to the SDK, or to the LaunchDarkly service overall, it may be more appropriate to contact the LaunchDarkly support team; they can help to investigate the problem and will consult the SDK team if necessary. You can submit a support request by going [here](https://support.launchdarkly.com/hc/en-us/requests/new) or by emailing support@launchdarkly.com. 12 | 13 | Note that issues filed on this issue tracker are publicly accessible. Do not provide any private account information on your issues. If your problem is specific to your account, you should submit a support request as described above. 14 | 15 | **Describe the bug** 16 | A clear and concise description of what the bug is. 17 | 18 | **To reproduce** 19 | Steps to reproduce the behavior. 20 | 21 | **Expected behavior** 22 | A clear and concise description of what you expected to happen. 23 | 24 | **Logs** 25 | If applicable, add any log output related to your problem. 26 | 27 | **SDK version** 28 | The version of this SDK that you are using. 29 | 30 | **Language version, developer tools** 31 | For instance, Go 1.11 or Ruby 2.5.3. If you are using a language that requires a separate compiler, such as C, please include the name and version of the compiler too. 32 | 33 | **OS/platform** 34 | For instance, Ubuntu 16.04, Windows 10, or Android 4.0.3. If your code is running in a browser, please also include the browser type and version. 35 | 36 | **Additional context** 37 | Add any other context about the problem here. 38 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: false 2 | contact_links: 3 | - name: Support request 4 | url: https://support.launchdarkly.com/hc/en-us/requests/new 5 | about: File your support requests with LaunchDarkly's support team 6 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I would love to see the SDK [...does something new...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | **Requirements** 2 | 3 | - [ ] I have added test coverage for new or changed functionality 4 | - [ ] I have followed the repository's [pull request submission guidelines](../blob/master/CONTRIBUTING.md#submitting-pull-requests) 5 | - [ ] I have validated my changes against all supported platform versions 6 | 7 | **Related issues** 8 | 9 | Provide links to any issues in this repository or elsewhere relating to this pull request. 10 | 11 | **Describe the solution you've provided** 12 | 13 | Provide a clear and concise description of what you expect to happen. 14 | 15 | **Describe alternatives you've considered** 16 | 17 | Provide a clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | 21 | Add any other context about the pull request here. 22 | -------------------------------------------------------------------------------- /.github/workflows/stale.yml: -------------------------------------------------------------------------------- 1 | name: 'Close stale issues and PRs' 2 | on: 3 | schedule: 4 | - cron: '30 1 * * *' 5 | 6 | jobs: 7 | stale: 8 | uses: launchdarkly/gh-actions/.github/workflows/sdk-stale.yml@main 9 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | package-lock.json 2 | /docs/build/ 3 | /local/ 4 | **/node_modules/ 5 | junit.xml 6 | npm-debug.log 7 | test-types.js 8 | .vscode 9 | coverage/ 10 | -------------------------------------------------------------------------------- /.hound.yml: -------------------------------------------------------------------------------- 1 | { 2 | asi: false, 3 | bitwise: true, 4 | browser: true, 5 | camelcase: false, 6 | curly: true, 7 | forin: true, 8 | immed: true, 9 | latedef: "nofunc", 10 | maxlen: 120, 11 | newcap: true, 12 | noarg: true, 13 | noempty: true, 14 | nonew: true, 15 | eqeqeq: true, 16 | predef: [ 17 | "$", 18 | "jQuery", 19 | "jasmine", 20 | "beforeEach", 21 | "describe", 22 | "expect", 23 | "it", 24 | "angular", 25 | "inject", 26 | "module", 27 | "require" 28 | ], 29 | quotmark: true, 30 | trailing: true, 31 | undef: true, 32 | unused: true 33 | } -------------------------------------------------------------------------------- /.ldrelease/config.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | repo: 4 | public: node-server-sdk 5 | private: node-server-sdk-private 6 | 7 | branches: 8 | - name: main 9 | description: 7.x 10 | - name: 6.x 11 | - name: 5.x 12 | 13 | publications: 14 | - url: https://www.npmjs.com/package/launchdarkly-node-server-sdk 15 | description: npm 16 | 17 | jobs: 18 | - docker: 19 | image: node:12-buster 20 | template: 21 | name: npm 22 | 23 | documentation: 24 | gitHubPages: true 25 | title: LaunchDarkly Server-Side Node SDK 26 | 27 | sdk: 28 | displayName: "Node" 29 | -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "arrowParens": "avoid", 3 | "trailingComma": "es5", 4 | "singleQuote": true, 5 | "printWidth": 120 6 | } 7 | -------------------------------------------------------------------------------- /CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Repository Maintainers 2 | * @launchdarkly/team-sdk 3 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to the LaunchDarkly Server-Side SDK for Node.js 2 | 3 | LaunchDarkly has published an [SDK contributor's guide](https://docs.launchdarkly.com/sdk/concepts/contributors-guide) that provides a detailed explanation of how our SDKs work. See below for additional information on how to contribute to this SDK. 4 | 5 | ## Submitting bug reports and feature requests 6 | 7 | The LaunchDarkly SDK team monitors the [issue tracker](https://github.com/launchdarkly/node-server-sdk/issues) in the SDK repository. Bug reports and feature requests specific to this SDK should be filed in this issue tracker. The SDK team will respond to all newly filed issues within two business days. 8 | 9 | ## Submitting pull requests 10 | 11 | We encourage pull requests and other contributions from the community. Before submitting pull requests, ensure that all temporary or unintended code is removed. Don't worry about adding reviewers to the pull request; the LaunchDarkly SDK team will add themselves. The SDK team will acknowledge all pull requests within two business days. 12 | 13 | ## Build instructions 14 | 15 | ### Prerequisites 16 | 17 | The project should be built and tested against the lowest compatible version, Node 12. It uses `npm`, which is bundled in all supported versions of Node. 18 | 19 | ### Setup 20 | 21 | To install project dependencies, from the project root directory: 22 | 23 | ``` 24 | npm install 25 | ``` 26 | 27 | ### Testing 28 | 29 | To run all unit tests: 30 | 31 | ``` 32 | npm test 33 | ``` 34 | 35 | To verify that the TypeScript declarations compile correctly (this involves compiling the file `test-types.ts`, so if you have changed any types or interfaces, you will want to update that code): 36 | 37 | ``` 38 | npm run check-typescript 39 | ``` 40 | 41 | To run the SDK contract test suite (see [`contract-tests/README.md`](./contract-tests/README.md)): 42 | 43 | ```bash 44 | npm run contract-tests 45 | ``` 46 | 47 | ### Auditing package dependencies 48 | 49 | The `npm audit` tool compares all dependencies and transitive dependencies to a database of package versions with known vulnerabilities. However, the output of this tool includes both runtime and development dependencies. 50 | 51 | Runtime dependencies can affect applications using the SDK; they can only be fixed by updating one of the explicit dependencies in `package.json`. Development dependencies cannot affect applications, but will still cause `npm audit` to flag the project; they can be fixed by running `npm audit fix` to add overrides for transitive dependencies in `package-lock.json`. 52 | 53 | It is important _not_ to run `npm audit fix` if there are any bad _runtime_ dependencies, because it will hide the problem in our own build, without actually fixing the vulnerability when an application uses the SDK. 54 | 55 | The script `scripts/better-audit.sh`, which is run in the CI build and can also be run manually, processes the output of `npm audit` to eliminate all duplicate entries and then determines whether each entry is coming from a runtime dependency or a development dependency. If there are any runtime ones, it terminates with an error code so the build will fail. 56 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright 2016 Catamorphic, Co. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # LaunchDarkly Server-Side SDK for Node.js 2 | 3 | # Use [@launchdarkly/node-server-sdk](https://www.npmjs.com/package/@launchdarkly/node-server-sdk) instead of this package. 4 | 5 | As mentioned in the [repository changelog](https://github.com/launchdarkly/node-server-sdk/blob/main/CHANGELOG.md), the `launchdarkly-node-server-sdk` project has been renamed to `@launchdarkly/node-server-sdk`. All future releases will be made from the [new repository](https://github.com/launchdarkly/js-core/tree/main/packages/sdk/server-node). Please consider upgrading and filing potential requests in that repository's [issue tracker](https://github.com/launchdarkly/js-core/issues?q=is%3Aissue+is%3Aopen+label%3A%22package%3A+sdk%2Fserver-node%22+sort%3Aupdated-desc). 6 | 7 | # ☝️☝️☝️☝️☝️☝️ 8 | 9 | ## v7.x readme 10 | 11 | [![NPM](https://img.shields.io/npm/v/launchdarkly-node-server-sdk.svg?style=flat-square)](https://www.npmjs.com/package/launchdarkly-node-server-sdk) 12 | [![CircleCI](https://circleci.com/gh/launchdarkly/node-server-sdk.svg?style=svg)](https://circleci.com/gh/launchdarkly/node-server-sdk) 13 | [![Documentation](https://img.shields.io/static/v1?label=GitHub+Pages&message=API+reference&color=00add8)](https://launchdarkly.github.io/node-server-sdk) 14 | 15 | The LaunchDarkly Server-Side SDK for Node.js is designed primarily for use in multi-user systems such as web servers and applications. It follows the server-side LaunchDarkly model for multi-user contexts. It is not intended for use in desktop and embedded systems applications. 16 | 17 | For using LaunchDarkly in *client-side* Node.js applications, refer to our [Client-side Node.js SDK](https://github.com/launchdarkly/node-client-sdk). 18 | 19 | ## LaunchDarkly overview 20 | 21 | [LaunchDarkly](https://www.launchdarkly.com) is a feature management platform that serves trillions of feature flags daily to help teams build better software, faster. [Get started](https://docs.launchdarkly.com/home/getting-started) using LaunchDarkly today! 22 | 23 | [![Twitter Follow](https://img.shields.io/twitter/follow/launchdarkly.svg?style=social&label=Follow&maxAge=2592000)](https://twitter.com/intent/follow?screen_name=launchdarkly) 24 | 25 | ## Supported Node versions 26 | 27 | This version of the LaunchDarkly SDK is compatible with Node.js versions 12 and above. 28 | 29 | ## Getting started 30 | 31 | Refer to the [SDK reference guide](https://docs.launchdarkly.com/sdk/server-side/node-js) for instructions on getting started with using the SDK. 32 | 33 | ## Learn more 34 | 35 | Read our [documentation](http://docs.launchdarkly.com) for in-depth instructions on configuring and using LaunchDarkly. You can also head straight to the [complete reference guide for this SDK](https://docs.launchdarkly.com/sdk/server-side/node-js). 36 | 37 | The authoritative description of all properties and methods is in the [TypeScript documentation](https://launchdarkly.github.io/node-server-sdk/). 38 | 39 | ## Testing 40 | 41 | We run integration tests for all our SDKs using a centralized test harness. This approach gives us the ability to test for consistency across SDKs, as well as test networking behavior in a long-running application. These tests cover each method in the SDK, and verify that event sending, flag evaluation, stream reconnection, and other aspects of the SDK all behave correctly. 42 | 43 | ## Contributing 44 | 45 | We encourage pull requests and other contributions from the community. Check out our [contributing guidelines](CONTRIBUTING.md) for instructions on how to contribute to this SDK. 46 | 47 | ## About LaunchDarkly 48 | 49 | * LaunchDarkly is a continuous delivery platform that provides feature flags as a service and allows developers to iterate quickly and safely. We allow you to easily flag your features and manage them from the LaunchDarkly dashboard. With LaunchDarkly, you can: 50 | * Roll out a new feature to a subset of your users (like a group of users who opt-in to a beta tester group), gathering feedback and bug reports from real-world use cases. 51 | * Gradually roll out a feature to an increasing percentage of users, and track the effect that the feature has on key metrics (for instance, how likely is a user to complete a purchase if they have feature A versus feature B?). 52 | * Turn off a feature that you realize is causing performance problems in production, without needing to re-deploy, or even restart the application with a changed configuration file. 53 | * Grant access to certain features based on user attributes, like payment plan (eg: users on the ‘gold’ plan get access to more features than users in the ‘silver’ plan). Disable parts of your application to facilitate maintenance, without taking everything offline. 54 | * LaunchDarkly provides feature flag SDKs for a wide variety of languages and technologies. Read [our documentation](https://docs.launchdarkly.com/sdk) for a complete list. 55 | * Explore LaunchDarkly 56 | * [launchdarkly.com](https://www.launchdarkly.com/ "LaunchDarkly Main Website") for more information 57 | * [docs.launchdarkly.com](https://docs.launchdarkly.com/ "LaunchDarkly Documentation") for our documentation and SDK reference guides 58 | * [apidocs.launchdarkly.com](https://apidocs.launchdarkly.com/ "LaunchDarkly API Documentation") for our API documentation 59 | * [blog.launchdarkly.com](https://blog.launchdarkly.com/ "LaunchDarkly Blog Documentation") for the latest product updates 60 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Reporting and Fixing Security Issues 2 | 3 | Please report all security issues to the LaunchDarkly security team by submitting a bug bounty report to our [HackerOne program](https://hackerone.com/launchdarkly?type=team). LaunchDarkly will triage and address all valid security issues following the response targets defined in our program policy. Valid security issues may be eligible for a bounty. 4 | 5 | Please do not open issues or pull requests for security issues. This makes the problem immediately visible to everyone, including potentially malicious actors. 6 | -------------------------------------------------------------------------------- /attribute_reference.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Take a key string and escape the characters to allow it to be used as a reference. 3 | * @param {string} key 4 | * @returns {string} The processed key. 5 | */ 6 | function processEscapeCharacters(key) { 7 | return key.replace(/~/g, '~0').replace(/\//g, '~1'); 8 | } 9 | 10 | /** 11 | * @param {string} reference The reference to get the components of. 12 | * @returns {string[]} The components of the reference. Escape characters will be converted to their representative values. 13 | */ 14 | function getComponents(reference) { 15 | const referenceWithoutPrefix = reference.startsWith('/') ? reference.substring(1) : reference; 16 | return referenceWithoutPrefix 17 | .split('/') 18 | .map(component => (component.indexOf('~') >= 0 ? component.replace(/~1/g, '/').replace(/~0/g, '~') : component)); 19 | } 20 | 21 | /** 22 | * @param {string} reference The reference to check if it is a literal. 23 | * @returns true if the reference is a literal. 24 | */ 25 | function isLiteral(reference) { 26 | return !reference.startsWith('/'); 27 | } 28 | 29 | /** 30 | * Get an attribute value from a literal. 31 | * @param {Object} target 32 | * @param {string} literal 33 | */ 34 | function getFromLiteral(target, literal) { 35 | if (target !== null && target !== undefined && Object.prototype.hasOwnProperty.call(target, literal)) { 36 | return target[literal]; 37 | } 38 | } 39 | 40 | /** 41 | * Gets the `target` object's value at the `reference`'s location. 42 | * 43 | * This method method follows the rules for accessing attributes for use 44 | * in evaluating clauses. 45 | * 46 | * Accessing the root of the target will always result in undefined. 47 | * 48 | * @param {Object} target 49 | * @param {string} reference 50 | * @returns The `target` object's value at the `reference`'s location. 51 | * Undefined if the field does not exist or if the reference is not valid. 52 | */ 53 | function get(target, reference) { 54 | if (reference === '' || reference === '/') { 55 | return undefined; 56 | } 57 | 58 | if (isLiteral(reference)) { 59 | return getFromLiteral(target, reference); 60 | } 61 | 62 | const components = getComponents(reference); 63 | let current = target; 64 | for (const component of components) { 65 | if ( 66 | current !== null && 67 | current !== undefined && 68 | typeof current === 'object' && 69 | // We do not want to allow indexing into an array. 70 | !Array.isArray(current) && 71 | // For arrays and strings, in addition to objects, a hasOwnProperty check 72 | // will be true for indexes (as strings or numbers), which are present 73 | // in the object/string/array. 74 | Object.prototype.hasOwnProperty.call(current, component) 75 | ) { 76 | current = current[component]; 77 | } else { 78 | return undefined; 79 | } 80 | } 81 | 82 | return current; 83 | } 84 | 85 | /** 86 | * Compare two references and determine if they are equivalent. 87 | * @param {string} a 88 | * @param {string} b 89 | */ 90 | function compare(a, b) { 91 | const aIsLiteral = isLiteral(a); 92 | const bIsLiteral = isLiteral(b); 93 | if (aIsLiteral && bIsLiteral) { 94 | return a === b; 95 | } 96 | if (aIsLiteral) { 97 | const bComponents = getComponents(b); 98 | if (bComponents.length !== 1) { 99 | return false; 100 | } 101 | return a === bComponents[0]; 102 | } 103 | if (bIsLiteral) { 104 | const aComponents = getComponents(a); 105 | if (aComponents.length !== 1) { 106 | return false; 107 | } 108 | return b === aComponents[0]; 109 | } 110 | return a === b; 111 | } 112 | 113 | /** 114 | * @param {string} a 115 | * @param {string} b 116 | * @returns The two strings joined by '/'. 117 | */ 118 | function join(a, b) { 119 | return `${a}/${b}`; 120 | } 121 | 122 | /** 123 | * There are cases where a field could have been named with a preceeding '/'. 124 | * If that attribute was private, then the literal would appear to be a reference. 125 | * This method can be used to convert a literal to a reference in such situations. 126 | * @param {string} literal The literal to convert to a reference. 127 | * @returns A literal which has been converted to a reference. 128 | */ 129 | function literalToReference(literal) { 130 | return `/${processEscapeCharacters(literal)}`; 131 | } 132 | 133 | /** 134 | * Clone an object excluding the values referenced by a list of references. 135 | * @param {Object} target The object to clone. 136 | * @param {string[]} references A list of references from the cloned object. 137 | * @returns {{cloned: Object, excluded: string[]}} The cloned object and a list of excluded values. 138 | */ 139 | function cloneExcluding(target, references) { 140 | const stack = []; 141 | const cloned = {}; 142 | const excluded = []; 143 | 144 | stack.push( 145 | ...Object.keys(target).map(key => ({ 146 | key, 147 | ptr: literalToReference(key), 148 | source: target, 149 | parent: cloned, 150 | visited: [target], 151 | })) 152 | ); 153 | 154 | while (stack.length) { 155 | const item = stack.pop(); 156 | if (!references.some(ptr => compare(ptr, item.ptr))) { 157 | const value = item.source[item.key]; 158 | 159 | // Handle null because it overlaps with object, which we will want to handle later. 160 | if (value === null) { 161 | item.parent[item.key] = value; 162 | } else if (Array.isArray(value)) { 163 | item.parent[item.key] = [...value]; 164 | } else if (typeof value === 'object') { 165 | //Arrays and null must already be handled. 166 | 167 | //Prevent cycles by not visiting the same object 168 | //with in the same branch. Parallel branches 169 | //may contain the same object. 170 | if (item.visited.includes(value)) { 171 | continue; 172 | } 173 | 174 | item.parent[item.key] = {}; 175 | 176 | stack.push( 177 | ...Object.keys(value).map(key => ({ 178 | key, 179 | ptr: join(item.ptr, processEscapeCharacters(key)), 180 | source: value, 181 | parent: item.parent[item.key], 182 | visited: [...item.visited, value], 183 | })) 184 | ); 185 | } else { 186 | item.parent[item.key] = value; 187 | } 188 | } else { 189 | excluded.push(item.ptr); 190 | } 191 | } 192 | return { cloned, excluded: excluded.sort() }; 193 | } 194 | 195 | function isValidReference(reference) { 196 | return !reference.match(/\/\/|(^\/.*~[^0|^1])|~$/); 197 | } 198 | 199 | /** 200 | * Check if the given attribute reference is for the "kind" attribute. 201 | * @param {string} reference String containing an attribute reference. 202 | */ 203 | function isKind(reference) { 204 | // There are only 2 valid ways to specify the kind attribute, 205 | // so this just checks them. Given the current flow of evaluation 206 | // this is much less intense a process than doing full validation and parsing. 207 | return reference === 'kind' || reference === '/kind'; 208 | } 209 | 210 | module.exports = { 211 | cloneExcluding, 212 | compare, 213 | get, 214 | isValidReference, 215 | literalToReference, 216 | isKind, 217 | }; 218 | -------------------------------------------------------------------------------- /big_segments.js: -------------------------------------------------------------------------------- 1 | const { createHash } = require('crypto'); 2 | const { EventEmitter } = require('events'); 3 | const LRUCache = require('lru-cache'); 4 | 5 | const defaultStaleAfter = 120; 6 | const defaultStatusPollInterval = 5; 7 | const defaultUserCacheSize = 1000; 8 | const defaultUserCacheTime = 5; 9 | const emptyMembership = {}; 10 | 11 | function BigSegmentStoreManager(store, config, logger) { 12 | const staleTimeMs = (config.staleAfter > 0 ? config.staleAfter : defaultStaleAfter) * 1000; 13 | const pollIntervalMs = (config.statusPollInterval > 0 ? config.statusPollInterval : defaultStatusPollInterval) * 1000; 14 | const pollTask = store ? setInterval(() => pollStoreAndUpdateStatus(), pollIntervalMs) : null; 15 | const cache = store 16 | ? new LRUCache({ 17 | max: config.userCacheSize || defaultUserCacheSize, 18 | maxAge: (config.userCacheTime || defaultUserCacheTime) * 1000, 19 | }) 20 | : null; 21 | let lastStatus; 22 | 23 | const ret = {}; 24 | 25 | ret.close = () => { 26 | clearInterval(pollTask); 27 | store && store.close && store.close(); 28 | }; 29 | 30 | const statusProvider = new EventEmitter(); 31 | ret.statusProvider = statusProvider; 32 | statusProvider.getStatus = () => lastStatus; 33 | statusProvider.requireStatus = async () => { 34 | if (!lastStatus) { 35 | await pollStoreAndUpdateStatus(); 36 | } 37 | return lastStatus; 38 | }; 39 | 40 | // Called by the evaluator when it needs to get the Big Segment membership state for a user. 41 | // 42 | // If there is a cached membership state for the user, it returns the cached state. Otherwise, 43 | // it converts the user key into the hash string used by the BigSegmentStore, queries the store, 44 | // and caches the result. 45 | // 46 | // The return value is a two-element array where the first element is the membership object, 47 | // and the second element is a status value ("HEALTHY", "STALE", or "STORE_ERROR"). An undefined 48 | // return value is equivalent to [ null, "NOT_CONFIGURED" ]; 49 | ret.getUserMembership = async userKey => { 50 | if (!store) { 51 | return undefined; 52 | } 53 | let membership = cache.get(userKey); 54 | if (!membership) { 55 | try { 56 | membership = await store.getUserMembership(hashForUserKey(userKey)); 57 | if (membership === null || membership === undefined) { 58 | membership = emptyMembership; 59 | } 60 | cache.set(userKey, membership); 61 | } catch (e) { 62 | logger.error('Big Segment store membership query returned error: ' + e); 63 | return [null, 'STORE_ERROR']; 64 | } 65 | cache.set(userKey, membership); 66 | } 67 | if (!lastStatus) { 68 | await pollStoreAndUpdateStatus(); 69 | } 70 | if (!lastStatus.available) { 71 | return [membership, 'STORE_ERROR']; 72 | } 73 | return [membership, lastStatus.stale ? 'STALE' : 'HEALTHY']; 74 | }; 75 | 76 | async function pollStoreAndUpdateStatus() { 77 | if (!store) { 78 | lastStatus = { available: false, stale: false }; 79 | return; 80 | } 81 | logger.debug('Querying Big Segment store status'); 82 | let newStatus; 83 | try { 84 | const metadata = await store.getMetadata(); 85 | newStatus = { available: true, stale: !metadata || !metadata.lastUpToDate || isStale(metadata.lastUpToDate) }; 86 | } catch (e) { 87 | logger.error('Big Segment store status query returned error: ' + e); 88 | newStatus = { available: false, stale: false }; 89 | } 90 | if (!lastStatus || lastStatus.available !== newStatus.available || lastStatus.stale !== newStatus.stale) { 91 | logger.debug( 92 | 'Big Segment store status changed from %s to %s', 93 | JSON.stringify(lastStatus), 94 | JSON.stringify(newStatus) 95 | ); 96 | lastStatus = newStatus; 97 | statusProvider.emit('change', newStatus); 98 | } 99 | } 100 | 101 | function isStale(timestamp) { 102 | return new Date().getTime() - timestamp >= staleTimeMs; 103 | } 104 | 105 | return ret; 106 | } 107 | 108 | function hashForUserKey(userKey) { 109 | const hasher = createHash('sha256'); 110 | hasher.update(userKey); 111 | return hasher.digest('base64'); 112 | } 113 | 114 | module.exports = { 115 | BigSegmentStoreManager, 116 | hashForUserKey, 117 | }; 118 | -------------------------------------------------------------------------------- /context.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Validate a context kind. 3 | * @param {string} kind 4 | * @returns true if the kind is valid. 5 | */ 6 | function validKind(kind) { 7 | return typeof kind === 'string' && kind !== 'kind' && kind.match(/^(\w|\.|-)+$/); 8 | } 9 | 10 | /** 11 | * Validate a context key. 12 | * @param {string} key 13 | * @returns true if the key is valid. 14 | */ 15 | function validKey(key) { 16 | return key !== undefined && key !== null && key !== '' && typeof key === 'string'; 17 | } 18 | 19 | /** 20 | * Perform a check of basic context requirements. 21 | * @param {Object} context 22 | * @param {boolean} allowLegacyKey If true, then a legacy user can have an 23 | * empty or non-string key. A legacy user is a context without a kind. 24 | * @returns true if the context meets basic requirements. 25 | */ 26 | function checkContext(context, allowLegacyKey) { 27 | if (context) { 28 | if (allowLegacyKey && (context.kind === undefined || context.kind === null)) { 29 | return context.key !== undefined && context.key !== null; 30 | } 31 | const key = context.key; 32 | const kind = context.kind === undefined ? 'user' : context.kind; 33 | const kindValid = validKind(kind); 34 | const keyValid = kind === 'multi' || validKey(key); 35 | if (kind === 'multi') { 36 | const kinds = Object.keys(context).filter(key => key !== 'kind'); 37 | return keyValid && kinds.every(key => validKind(key)) && kinds.every(key => validKey(context[key].key)); 38 | } 39 | return keyValid && kindValid; 40 | } 41 | return false; 42 | } 43 | 44 | /** 45 | * The partial URL encoding is needed because : is a valid character in context keys. 46 | * 47 | * Partial encoding is the replacement of all colon (:) characters with the URL 48 | * encoded equivalent (%3A) and all percent (%) characters with the URL encoded 49 | * equivalent (%25). 50 | * @param {string} key The key to encode. 51 | * @returns {string} Partially URL encoded key. 52 | */ 53 | function encodeKey(key) { 54 | if (key.includes('%') || key.includes(':')) { 55 | return key.replace(/%/g, '%25').replace(/:/g, '%3A'); 56 | } 57 | return key; 58 | } 59 | 60 | /** 61 | * For a given context get a list of context kinds. 62 | * @param {Object} context 63 | * @returns A list of kinds in the context. 64 | */ 65 | function getContextKinds(context) { 66 | if (context) { 67 | if (context.kind === null || context.kind === undefined) { 68 | return ['user']; 69 | } 70 | if (context.kind !== 'multi') { 71 | return [context.kind]; 72 | } 73 | return Object.keys(context).filter(kind => kind !== 'kind'); 74 | } 75 | return []; 76 | } 77 | 78 | function getCanonicalKey(context) { 79 | if (context) { 80 | if ((context.kind === undefined || context.kind === null || context.kind === 'user') && context.key) { 81 | return context.key; 82 | } else if (context.kind !== 'multi' && context.key) { 83 | return `${context.kind}:${encodeKey(context.key)}`; 84 | } else if (context.kind === 'multi') { 85 | return Object.keys(context) 86 | .sort() 87 | .filter(key => key !== 'kind') 88 | .map(key => `${key}:${encodeKey(context[key].key)}`) 89 | .join(':'); 90 | } 91 | } 92 | } 93 | 94 | module.exports = { 95 | checkContext, 96 | getContextKinds, 97 | getCanonicalKey, 98 | }; 99 | -------------------------------------------------------------------------------- /context_filter.js: -------------------------------------------------------------------------------- 1 | const AttributeReference = require('./attribute_reference'); 2 | 3 | function ContextFilter(config) { 4 | const filter = {}; 5 | 6 | const allAttributesPrivate = config.allAttributesPrivate; 7 | const privateAttributes = config.privateAttributes || []; 8 | 9 | // These attributes cannot be removed via a private attribute. 10 | const protectedAttributes = ['key', 'kind', '_meta', 'anonymous']; 11 | 12 | const legacyTopLevelCopyAttributes = ['name', 'ip', 'firstName', 'lastName', 'email', 'avatar', 'country']; 13 | 14 | /** 15 | * For the given context and configuration get a list of attributes to filter. 16 | * @param {Object} context 17 | * @returns {string[]} A list of the attributes to filter. 18 | */ 19 | const getAttributesToFilter = context => 20 | (allAttributesPrivate 21 | ? Object.keys(context) 22 | : [...privateAttributes, ...((context._meta && context._meta.privateAttributes) || [])] 23 | ).filter(attr => !protectedAttributes.some(protectedAttr => AttributeReference.compare(attr, protectedAttr))); 24 | 25 | /** 26 | * @param {Object} context 27 | * @returns {Object} A copy of the context with private attributes removed, 28 | * and the redactedAttributes meta populated. 29 | */ 30 | const filterSingleKind = context => { 31 | if (typeof context !== 'object' || context === null || Array.isArray(context)) { 32 | return undefined; 33 | } 34 | 35 | const { cloned, excluded } = AttributeReference.cloneExcluding(context, getAttributesToFilter(context)); 36 | cloned.key = String(cloned.key); 37 | if (excluded.length) { 38 | if (!cloned._meta) { 39 | cloned._meta = {}; 40 | } 41 | cloned._meta.redactedAttributes = excluded; 42 | } 43 | if (cloned._meta) { 44 | delete cloned._meta['privateAttributes']; 45 | if (Object.keys(cloned._meta).length === 0) { 46 | delete cloned._meta; 47 | } 48 | } 49 | // Make sure anonymous is boolean if present. 50 | // Null counts as present, and would be falsy, which is the default. 51 | if (cloned.anonymous !== undefined) { 52 | cloned.anonymous = !!cloned.anonymous; 53 | } 54 | 55 | return cloned; 56 | }; 57 | 58 | /** 59 | * @param {Object} context 60 | * @returns {Object} A copy of the context with the private attributes removed, 61 | * and the redactedAttributes meta populated for each sub-context. 62 | */ 63 | const filterMultiKind = context => { 64 | const filtered = { 65 | kind: context.kind, 66 | }; 67 | const contextKeys = Object.keys(context); 68 | 69 | for (const contextKey of contextKeys) { 70 | if (contextKey !== 'kind') { 71 | const filteredContext = filterSingleKind(context[contextKey]); 72 | if (filteredContext) { 73 | filtered[contextKey] = filteredContext; 74 | } 75 | } 76 | } 77 | return filtered; 78 | }; 79 | 80 | /** 81 | * Convert the LDUser object into an LDContext object. 82 | * @param {Object} user The LDUser to produce an LDContext for. 83 | * @returns {Object} A single kind context based on the provided user. 84 | */ 85 | const legacyToSingleKind = user => { 86 | const filtered = { 87 | /* Destructure custom items into the top level. 88 | Duplicate keys will be overridden by previously 89 | top level items. 90 | */ 91 | ...(user.custom || {}), 92 | 93 | // Implicity a user kind. 94 | kind: 'user', 95 | 96 | key: user.key, 97 | }; 98 | 99 | if (user.anonymous !== undefined) { 100 | filtered.anonymous = !!user.anonymous; 101 | } 102 | 103 | // Copy top level keys and convert them to strings. 104 | // Remove keys that may have been destructured from `custom`. 105 | for (const key of legacyTopLevelCopyAttributes) { 106 | delete filtered[key]; 107 | if (user[key] !== undefined && user[key] !== null) { 108 | filtered[key] = String(user[key]); 109 | } 110 | } 111 | 112 | if (user.privateAttributeNames !== undefined && user.privateAttributeNames !== null) { 113 | filtered._meta = filtered._meta || {}; 114 | // If any private attributes started with '/' we need to convert them to references, otherwise the '/' will 115 | // cause the literal to incorrectly be treated as a reference. 116 | filtered._meta.privateAttributes = user.privateAttributeNames.map(literal => 117 | literal.startsWith('/') ? AttributeReference.literalToReference(literal) : literal 118 | ); 119 | } 120 | 121 | return filtered; 122 | }; 123 | 124 | filter.filter = context => { 125 | if (context.kind === undefined || context.kind === null) { 126 | return filterSingleKind(legacyToSingleKind(context)); 127 | } else if (context.kind === 'multi') { 128 | return filterMultiKind(context); 129 | } else { 130 | return filterSingleKind(context); 131 | } 132 | }; 133 | 134 | return filter; 135 | } 136 | 137 | module.exports = ContextFilter; 138 | -------------------------------------------------------------------------------- /contract-tests/README.md: -------------------------------------------------------------------------------- 1 | # SDK contract test service 2 | 3 | This directory contains an implementation of the cross-platform SDK testing protocol defined by https://github.com/launchdarkly/sdk-test-harness. See that project's `README` for details of this protocol, and the kinds of SDK capabilities that are relevant to the contract tests. This code should not need to be updated unless the SDK has added or removed such capabilities. 4 | 5 | To run these tests locally, run `npm run contract-tests` from the SDK project root directory. This will start the test service, download the correct version of the test harness tool, and run the tests. 6 | 7 | Or, to test against an in-progress local version of the test harness, run `npm run contract-test-service` from the SDK project root directory; then, in the root directory of the `sdk-test-harness` project, build the test harness and run it from the command line. 8 | -------------------------------------------------------------------------------- /contract-tests/index.js: -------------------------------------------------------------------------------- 1 | const express = require('express'); 2 | const bodyParser = require('body-parser'); 3 | 4 | const { Log } = require('./log'); 5 | const { newSdkClientEntity, badCommandError } = require('./sdkClientEntity'); 6 | 7 | const app = express(); 8 | let server = null; 9 | 10 | const port = 8000; 11 | 12 | let clientCounter = 0; 13 | const clients = {}; 14 | 15 | const mainLog = Log('service'); 16 | 17 | app.use(bodyParser.json()); 18 | 19 | app.get('/', (req, res) => { 20 | res.header('Content-Type', 'application/json'); 21 | res.json({ 22 | capabilities: [ 23 | 'server-side', 24 | 'all-flags-client-side-only', 25 | 'all-flags-details-only-for-tracked-flags', 26 | 'all-flags-with-reasons', 27 | 'tags', 28 | 'user-type', 29 | ], 30 | }); 31 | }); 32 | 33 | app.delete('/', (req, res) => { 34 | mainLog.info('Test service has told us to exit'); 35 | res.status(204); 36 | res.send(); 37 | 38 | // Defer the following actions till after the response has been sent 39 | setTimeout(() => { 40 | server.close(() => process.exit()); 41 | // We force-quit with process.exit because, even after closing the server, there could be some 42 | // scheduled tasks lingering if an SDK instance didn't get cleaned up properly, and we don't want 43 | // that to prevent us from quitting. 44 | }, 1); 45 | }); 46 | 47 | app.post('/', async (req, res) => { 48 | const options = req.body; 49 | 50 | clientCounter += 1; 51 | const clientId = clientCounter.toString(); 52 | const resourceUrl = `/clients/${clientId}`; 53 | 54 | try { 55 | const client = await newSdkClientEntity(options); 56 | clients[clientId] = client; 57 | 58 | res.status(201); 59 | res.set('Location', resourceUrl); 60 | } catch (e) { 61 | res.status(500); 62 | const message = e.message || JSON.stringify(e); 63 | mainLog.error('Error creating client: ' + message); 64 | res.write(message); 65 | } 66 | res.send(); 67 | }); 68 | 69 | app.post('/clients/:id', async (req, res) => { 70 | const client = clients[req.params.id]; 71 | if (!client) { 72 | res.status(404); 73 | } else { 74 | try { 75 | const respValue = await client.doCommand(req.body); 76 | if (respValue) { 77 | res.status(200); 78 | res.write(JSON.stringify(respValue)); 79 | } else { 80 | res.status(204); 81 | } 82 | } catch (e) { 83 | const isBadRequest = e === badCommandError; 84 | res.status(isBadRequest ? 400 : 500); 85 | res.write(e.message || JSON.stringify(e)); 86 | if (!isBadRequest && e.stack) { 87 | console.log(e.stack); 88 | } 89 | } 90 | } 91 | res.send(); 92 | }); 93 | 94 | app.delete('/clients/:id', async (req, res) => { 95 | const client = clients[req.params.id]; 96 | if (!client) { 97 | res.status(404); 98 | res.send(); 99 | } else { 100 | client.close(); 101 | delete clients[req.params.id]; 102 | res.status(204); 103 | res.send(); 104 | } 105 | }); 106 | 107 | server = app.listen(port, () => { 108 | console.log('Listening on port %d', port); 109 | }); 110 | -------------------------------------------------------------------------------- /contract-tests/log.js: -------------------------------------------------------------------------------- 1 | const ld = require('launchdarkly-node-server-sdk'); 2 | 3 | function Log(tag) { 4 | function doLog(level, message) { 5 | console.log(new Date().toISOString() + ` [${tag}] ${level}: ${message}`); 6 | } 7 | return { 8 | info: message => doLog('info', message), 9 | error: message => doLog('error', message), 10 | }; 11 | } 12 | 13 | function sdkLogger(tag) { 14 | return ld.basicLogger({ 15 | level: 'debug', 16 | destination: line => { 17 | console.log(new Date().toISOString() + ` [${tag}.sdk] ${line}`); 18 | }, 19 | }); 20 | } 21 | 22 | module.exports.Log = Log; 23 | module.exports.sdkLogger = sdkLogger; 24 | -------------------------------------------------------------------------------- /contract-tests/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "node-server-sdk-contract-tests", 3 | "version": "0.0.0", 4 | "main": "index.js", 5 | "scripts": { 6 | "start": "node index.js" 7 | }, 8 | "author": "", 9 | "license": "Apache-2.0", 10 | "dependencies": { 11 | "body-parser": "^1.19.0", 12 | "express": "^4.17.1", 13 | "launchdarkly-node-server-sdk": "file:.." 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /contract-tests/sdkClientEntity.js: -------------------------------------------------------------------------------- 1 | const ld = require('launchdarkly-node-server-sdk'); 2 | 3 | const { Log, sdkLogger } = require('./log'); 4 | 5 | const badCommandError = new Error('unsupported command'); 6 | 7 | function makeSdkConfig(options, tag) { 8 | const cf = { 9 | logger: sdkLogger(tag), 10 | }; 11 | const maybeTime = seconds => (seconds === undefined || seconds === null ? undefined : seconds / 1000); 12 | if (options.streaming) { 13 | cf.streamUri = options.streaming.baseUri; 14 | cf.streamInitialReconnectDelay = maybeTime(options.streaming.initialRetryDelayMs); 15 | } 16 | if (options.events) { 17 | cf.allAttributesPrivate = options.events.allAttributesPrivate; 18 | cf.eventsUri = options.events.baseUri; 19 | cf.capacity = options.events.capacity; 20 | cf.diagnosticOptOut = !options.events.enableDiagnostics; 21 | cf.flushInterval = maybeTime(options.events.flushIntervalMs); 22 | cf.privateAttributes = options.events.globalPrivateAttributes; 23 | } 24 | if (options.tags) { 25 | cf.application = { 26 | id: options.tags.applicationId, 27 | version: options.tags.applicationVersion, 28 | }; 29 | } 30 | return cf; 31 | } 32 | 33 | async function newSdkClientEntity(options) { 34 | const c = {}; 35 | const log = Log(options.tag); 36 | 37 | log.info('Creating client with configuration: ' + JSON.stringify(options.configuration)); 38 | const timeout = 39 | options.configuration.startWaitTimeMs !== null && options.configuration.startWaitTimeMs !== undefined 40 | ? options.configuration.startWaitTimeMs 41 | : 5000; 42 | const client = ld.init( 43 | options.configuration.credential || 'unknown-sdk-key', 44 | makeSdkConfig(options.configuration, options.tag) 45 | ); 46 | try { 47 | await Promise.race([client.waitForInitialization(), new Promise(resolve => setTimeout(resolve, timeout))]); 48 | } catch (_) { 49 | // if waitForInitialization() rejects, the client failed to initialize, see next line 50 | } 51 | if (!client.initialized() && !options.configuration.initCanFail) { 52 | client.close(); 53 | throw new Error('client initialization failed'); 54 | } 55 | 56 | c.close = () => { 57 | client.close(); 58 | log.info('Test ended'); 59 | }; 60 | 61 | c.doCommand = async params => { 62 | log.info('Received command: ' + params.command); 63 | switch (params.command) { 64 | case 'evaluate': { 65 | const pe = params.evaluate; 66 | if (pe.detail) { 67 | return await client.variationDetail(pe.flagKey, pe.context || pe.user, pe.defaultValue); 68 | } else { 69 | const value = await client.variation(pe.flagKey, pe.context || pe.user, pe.defaultValue); 70 | return { value }; 71 | } 72 | } 73 | 74 | case 'evaluateAll': { 75 | const pea = params.evaluateAll; 76 | const eao = { 77 | clientSideOnly: pea.clientSideOnly, 78 | detailsOnlyForTrackedFlags: pea.detailsOnlyForTrackedFlags, 79 | withReasons: pea.withReasons, 80 | }; 81 | return { state: await client.allFlagsState(pea.context || pea.user, eao) }; 82 | } 83 | 84 | case 'identifyEvent': 85 | client.identify(params.identifyEvent.context || params.identifyEvent.user); 86 | return undefined; 87 | 88 | case 'customEvent': { 89 | const pce = params.customEvent; 90 | client.track(pce.eventKey, pce.context || pce.user, pce.data, pce.metricValue); 91 | return undefined; 92 | } 93 | 94 | case 'flushEvents': 95 | client.flush(); 96 | return undefined; 97 | 98 | case 'getBigSegmentStoreStatus': 99 | return undefined; 100 | 101 | default: 102 | throw badCommandError; 103 | } 104 | }; 105 | 106 | return c; 107 | } 108 | 109 | module.exports.newSdkClientEntity = newSdkClientEntity; 110 | module.exports.badCommandError = badCommandError; 111 | -------------------------------------------------------------------------------- /contract-tests/testharness-suppressions.txt: -------------------------------------------------------------------------------- 1 | streaming/validation/drop and reconnect if stream event has malformed JSON 2 | streaming/validation/drop and reconnect if stream event has well-formed JSON not matching schema 3 | -------------------------------------------------------------------------------- /diagnostic_events.js: -------------------------------------------------------------------------------- 1 | const os = require('os'); 2 | const { v4: uuidv4 } = require('uuid'); 3 | const configuration = require('./configuration'); 4 | const packageJson = require('./package.json'); 5 | 6 | // An object that maintains information that will go into diagnostic events, and knows how to format 7 | // those events. It is instantiated by the SDK client, and shared with the event processor. 8 | function DiagnosticsManager(config, diagnosticId, startTime) { 9 | let dataSinceDate = startTime; 10 | let streamInits = []; 11 | const acc = {}; 12 | 13 | // Creates the initial event that is sent by the event processor when the SDK starts up. This will not 14 | // be repeated during the lifetime of the SDK client. 15 | acc.createInitEvent = () => ({ 16 | kind: 'diagnostic-init', 17 | id: diagnosticId, 18 | creationDate: startTime, 19 | sdk: makeSdkData(config), 20 | configuration: makeConfigData(config), 21 | platform: makePlatformData(), 22 | }); 23 | 24 | // Records a stream connection attempt (called by the stream processor). 25 | // timestamp: Time of the *beginning* of the connection attempt. 26 | // failed: True if the connection failed, or we got a read timeout before receiving a "put". 27 | // durationMillis: Elapsed time between starting timestamp and when we either gave up/lost the 28 | // connection or received a successful "put". 29 | acc.recordStreamInit = (timestamp, failed, durationMillis) => { 30 | const item = { timestamp, failed, durationMillis }; 31 | streamInits.push(item); 32 | }; 33 | 34 | // Creates a periodic event containing time-dependent stats, and resets the state of the manager with 35 | // regard to those stats. 36 | // Note: the reason droppedEvents, deduplicatedUsers, and eventsInLastBatch are passed into this function, 37 | // instead of being properties of the DiagnosticsManager, is that the event processor is the one who's 38 | // calling this function and is also the one who's tracking those stats. 39 | acc.createStatsEventAndReset = (droppedEvents, deduplicatedUsers, eventsInLastBatch) => { 40 | const currentTime = new Date().getTime(); 41 | const ret = { 42 | kind: 'diagnostic', 43 | id: diagnosticId, 44 | creationDate: currentTime, 45 | dataSinceDate, 46 | droppedEvents, 47 | deduplicatedUsers, 48 | eventsInLastBatch, 49 | streamInits, 50 | }; 51 | dataSinceDate = currentTime; 52 | streamInits = []; 53 | return ret; 54 | }; 55 | 56 | return acc; 57 | } 58 | 59 | function DiagnosticId(sdkKey) { 60 | const ret = { 61 | diagnosticId: uuidv4(), 62 | }; 63 | if (sdkKey) { 64 | ret.sdkKeySuffix = sdkKey.length > 6 ? sdkKey.substring(sdkKey.length - 6) : sdkKey; 65 | } 66 | return ret; 67 | } 68 | 69 | function makeSdkData(config) { 70 | const sdkData = { 71 | name: 'node-server-sdk', 72 | version: packageJson.version, 73 | }; 74 | if (config.wrapperName) { 75 | sdkData.wrapperName = config.wrapperName; 76 | } 77 | if (config.wrapperVersion) { 78 | sdkData.wrapperVersion = config.wrapperVersion; 79 | } 80 | return sdkData; 81 | } 82 | 83 | function makeConfigData(config) { 84 | const defaults = configuration.defaults(); 85 | const secondsToMillis = sec => Math.trunc(sec * 1000); 86 | 87 | function getComponentDescription(component, defaultName) { 88 | if (component) { 89 | return component.description || 'custom'; 90 | } 91 | return defaultName; 92 | } 93 | 94 | const configData = { 95 | customBaseURI: config.baseUri !== defaults.baseUri, 96 | customStreamURI: config.streamUri !== defaults.streamUri, 97 | customEventsURI: config.eventsUri !== defaults.eventsUri, 98 | eventsCapacity: config.capacity, 99 | connectTimeoutMillis: secondsToMillis(config.timeout), 100 | socketTimeoutMillis: secondsToMillis(config.timeout), // Node doesn't distinguish between these two kinds of timeouts 101 | eventsFlushIntervalMillis: secondsToMillis(config.flushInterval), 102 | pollingIntervalMillis: secondsToMillis(config.pollInterval), 103 | // startWaitMillis: n/a (Node SDK does not have this feature) 104 | // samplingInterval: n/a (Node SDK does not have this feature) 105 | reconnectTimeMillis: secondsToMillis(config.streamInitialReconnectDelay), 106 | streamingDisabled: !config.stream, 107 | usingRelayDaemon: !!config.useLdd, 108 | offline: !!config.offline, 109 | allAttributesPrivate: !!config.allAttributesPrivate, 110 | contextKeysCapacity: config.contextKeysCapacity, 111 | contextKeysFlushIntervalMillis: secondsToMillis(config.contextKeysFlushInterval), 112 | usingProxy: !!(config.proxyAgent || config.proxyHost), 113 | usingProxyAuthenticator: !!config.proxyAuth, 114 | diagnosticRecordingIntervalMillis: secondsToMillis(config.diagnosticRecordingInterval), 115 | dataStoreType: getComponentDescription(config.featureStore, 'memory'), 116 | }; 117 | 118 | return configData; 119 | } 120 | 121 | function makePlatformData() { 122 | return { 123 | name: 'Node', 124 | osArch: os.arch(), 125 | osName: normalizePlatformName(os.platform()), 126 | osVersion: os.release(), 127 | // Note that os.release() is not the same OS version string that would be reported by other languages. 128 | // It's defined as being the value returned by "uname -r" (e.g. on Mac OS 10.14, this is "18.7.0"; on 129 | // Ubuntu 16.04, it is "4.4.0-1095-aws"), or GetVersionExW in Windows. 130 | nodeVersion: process.versions.node, 131 | }; 132 | } 133 | 134 | function normalizePlatformName(platformName) { 135 | // The following logic is based on how Node.js reports the platform name 136 | switch (platformName) { 137 | case 'darwin': 138 | return 'MacOS'; 139 | case 'win32': 140 | return 'Windows'; 141 | case 'linux': 142 | return 'Linux'; 143 | default: 144 | return platformName; 145 | } 146 | } 147 | 148 | module.exports = { 149 | DiagnosticsManager, 150 | DiagnosticId, 151 | }; 152 | -------------------------------------------------------------------------------- /docs/typedoc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | out: '/tmp/project-releaser/project/docs/build/html', 3 | exclude: [ 4 | '**/node_modules/**', 5 | 'test-types.ts' 6 | ], 7 | name: "LaunchDarkly Server-Side Node SDK (7.0.0)", 8 | readme: 'none', // don't add a home page with a copy of README.md 9 | entryPoints: "/tmp/project-releaser/project/index.d.ts" 10 | }; 11 | -------------------------------------------------------------------------------- /errors.js: -------------------------------------------------------------------------------- 1 | function createCustomError(name) { 2 | function CustomError(message, code) { 3 | Error.captureStackTrace && Error.captureStackTrace(this, this.constructor); 4 | this.message = message; 5 | this.code = code; 6 | } 7 | 8 | CustomError.prototype = new Error(); 9 | CustomError.prototype.name = name; 10 | CustomError.prototype.constructor = CustomError; 11 | 12 | return CustomError; 13 | } 14 | 15 | exports.LDPollingError = createCustomError('LaunchDarklyPollingError'); 16 | exports.LDStreamingError = createCustomError('LaunchDarklyStreamingError'); 17 | exports.LDUnexpectedResponseError = createCustomError('LaunchDarklyUnexpectedResponseError'); 18 | exports.LDInvalidSDKKeyError = createCustomError('LaunchDarklyInvalidSDKKeyError'); 19 | exports.LDClientError = createCustomError('LaunchDarklyClientError'); 20 | 21 | exports.isHttpErrorRecoverable = function (status) { 22 | if (status >= 400 && status < 500) { 23 | return status === 400 || status === 408 || status === 429; 24 | } 25 | return true; 26 | }; 27 | -------------------------------------------------------------------------------- /event_factory.js: -------------------------------------------------------------------------------- 1 | function isExperiment(flag, reason) { 2 | if (reason) { 3 | // If the reason says we're in an experiment, we are. Otherwise, apply 4 | // the legacy rule exclusion logic. 5 | if (reason.inExperiment) { 6 | return true; 7 | } 8 | switch (reason.kind) { 9 | case 'RULE_MATCH': { 10 | const index = reason.ruleIndex; 11 | if (index !== undefined) { 12 | const rules = flag.rules || []; 13 | return index >= 0 && index < rules.length && !!rules[index].trackEvents; 14 | } 15 | break; 16 | } 17 | case 'FALLTHROUGH': 18 | return !!flag.trackEventsFallthrough; 19 | } 20 | } 21 | return false; 22 | } 23 | 24 | function EventFactory(withReasons) { 25 | const ef = {}; 26 | 27 | ef.newEvalEvent = (flag, context, detail, defaultVal, prereqOfFlag) => { 28 | const addExperimentData = isExperiment(flag, detail.reason); 29 | const e = { 30 | kind: 'feature', 31 | creationDate: new Date().getTime(), 32 | key: flag.key, 33 | context, 34 | value: detail.value, 35 | variation: detail.variationIndex, 36 | default: defaultVal, 37 | version: flag.version, 38 | }; 39 | // the following properties are handled separately so we don't waste bandwidth on unused keys 40 | if (addExperimentData || flag.trackEvents) { 41 | e.trackEvents = true; 42 | } 43 | if (flag.debugEventsUntilDate) { 44 | e.debugEventsUntilDate = flag.debugEventsUntilDate; 45 | } 46 | if (prereqOfFlag) { 47 | e.prereqOf = prereqOfFlag.key; 48 | } 49 | if (addExperimentData || withReasons) { 50 | e.reason = detail.reason; 51 | } 52 | return e; 53 | }; 54 | 55 | ef.newDefaultEvent = (flag, context, detail) => { 56 | const e = { 57 | kind: 'feature', 58 | creationDate: new Date().getTime(), 59 | key: flag.key, 60 | context, 61 | value: detail.value, 62 | default: detail.value, 63 | version: flag.version, 64 | }; 65 | // the following properties are handled separately so we don't waste bandwidth on unused keys 66 | if (flag.trackEvents) { 67 | e.trackEvents = true; 68 | } 69 | if (flag.debugEventsUntilDate) { 70 | e.debugEventsUntilDate = flag.debugEventsUntilDate; 71 | } 72 | if (withReasons) { 73 | e.reason = detail.reason; 74 | } 75 | return e; 76 | }; 77 | 78 | ef.newUnknownFlagEvent = (key, context, detail) => { 79 | const e = { 80 | kind: 'feature', 81 | creationDate: new Date().getTime(), 82 | key: key, 83 | context, 84 | value: detail.value, 85 | default: detail.value, 86 | }; 87 | if (withReasons) { 88 | e.reason = detail.reason; 89 | } 90 | return e; 91 | }; 92 | 93 | ef.newIdentifyEvent = context => ({ 94 | kind: 'identify', 95 | creationDate: new Date().getTime(), 96 | context, 97 | }); 98 | 99 | ef.newCustomEvent = (eventName, context, data, metricValue) => { 100 | const e = { 101 | kind: 'custom', 102 | creationDate: new Date().getTime(), 103 | key: eventName, 104 | context, 105 | }; 106 | if (data !== null && data !== undefined) { 107 | e.data = data; 108 | } 109 | if (metricValue !== null && metricValue !== undefined) { 110 | e.metricValue = metricValue; 111 | } 112 | return e; 113 | }; 114 | 115 | return ef; 116 | } 117 | 118 | module.exports = { 119 | EventFactory, 120 | isExperiment, 121 | }; 122 | -------------------------------------------------------------------------------- /event_summarizer.js: -------------------------------------------------------------------------------- 1 | const { getContextKinds } = require('./context'); 2 | 3 | function getKinds(event) { 4 | if (event.context) { 5 | return getContextKinds(event.context); 6 | } 7 | if (event.contextKeys) { 8 | return Object.keys(event.contextKeys); 9 | } 10 | return []; 11 | } 12 | 13 | function EventSummarizer() { 14 | const es = {}; 15 | 16 | let startDate = 0, 17 | endDate = 0, 18 | counters = {}, 19 | contextKinds = {}; 20 | 21 | es.summarizeEvent = event => { 22 | if (event.kind === 'feature') { 23 | const counterKey = 24 | event.key + 25 | ':' + 26 | (event.variation !== null && event.variation !== undefined ? event.variation : '') + 27 | ':' + 28 | (event.version !== null && event.version !== undefined ? event.version : ''); 29 | const counterVal = counters[counterKey]; 30 | let kinds = contextKinds[event.key]; 31 | if (!kinds) { 32 | kinds = new Set(); 33 | contextKinds[event.key] = kinds; 34 | } 35 | getKinds(event).forEach(kind => kinds.add(kind)); 36 | 37 | if (counterVal) { 38 | counterVal.count = counterVal.count + 1; 39 | } else { 40 | counters[counterKey] = { 41 | count: 1, 42 | key: event.key, 43 | version: event.version, 44 | variation: event.variation, 45 | value: event.value, 46 | default: event.default, 47 | }; 48 | } 49 | if (startDate === 0 || event.creationDate < startDate) { 50 | startDate = event.creationDate; 51 | } 52 | if (event.creationDate > endDate) { 53 | endDate = event.creationDate; 54 | } 55 | } 56 | }; 57 | 58 | es.getSummary = () => { 59 | const flagsOut = {}; 60 | for (const c of Object.values(counters)) { 61 | let flag = flagsOut[c.key]; 62 | if (!flag) { 63 | flag = { 64 | default: c.default, 65 | counters: [], 66 | contextKinds: [...contextKinds[c.key]], 67 | }; 68 | flagsOut[c.key] = flag; 69 | } 70 | const counterOut = { 71 | value: c.value, 72 | count: c.count, 73 | }; 74 | if (c.variation !== undefined && c.variation !== null) { 75 | counterOut.variation = c.variation; 76 | } 77 | if (c.version !== undefined && c.version !== null) { 78 | counterOut.version = c.version; 79 | } else { 80 | counterOut.unknown = true; 81 | } 82 | flag.counters.push(counterOut); 83 | } 84 | return { 85 | startDate: startDate, 86 | endDate: endDate, 87 | features: flagsOut, 88 | }; 89 | }; 90 | 91 | es.clearSummary = () => { 92 | startDate = 0; 93 | endDate = 0; 94 | counters = {}; 95 | contextKinds = {}; 96 | }; 97 | 98 | return es; 99 | } 100 | 101 | module.exports = EventSummarizer; 102 | -------------------------------------------------------------------------------- /feature_store.js: -------------------------------------------------------------------------------- 1 | // The default in-memory implementation of a feature store, which holds feature flags and 2 | // other related data received from LaunchDarkly. 3 | // 4 | // Other implementations of the same interface can be used by passing them in the featureStore 5 | // property of the client configuration (that's why the interface here is async, even though 6 | // the in-memory store doesn't do anything asynchronous - because other implementations may 7 | // need to be async). The interface is defined by LDFeatureStore in index.d.ts. 8 | // 9 | // Additional implementations should use CachingStoreWrapper if possible. 10 | 11 | // Note that the contract for feature store methods does *not* require callbacks to be deferred 12 | // with setImmediate, process.nextTick, etc. It is both allowed and desirable to call them 13 | // directly whenever possible (i.e. if we don't actually have to do any I/O), since otherwise 14 | // feature flag retrieval is a major performance bottleneck. These methods are for internal use 15 | // by the SDK, and the SDK does not make any assumptions about whether a callback executes 16 | // before or after the next statement. 17 | 18 | function InMemoryFeatureStore() { 19 | let allData = {}; 20 | let initCalled = false; 21 | 22 | const store = {}; 23 | 24 | function callbackResult(cb, result) { 25 | cb && cb(result); 26 | } 27 | 28 | store.get = (kind, key, cb) => { 29 | const items = allData[kind.namespace] || {}; 30 | if (Object.hasOwnProperty.call(items, key)) { 31 | const item = items[key]; 32 | 33 | if (!item || item.deleted) { 34 | callbackResult(cb, null); 35 | } else { 36 | callbackResult(cb, item); 37 | } 38 | } else { 39 | callbackResult(cb, null); 40 | } 41 | }; 42 | 43 | store.all = (kind, cb) => { 44 | const results = {}; 45 | const items = allData[kind.namespace] || {}; 46 | 47 | for (const [key, item] of Object.entries(items)) { 48 | if (item && !item.deleted) { 49 | results[key] = item; 50 | } 51 | } 52 | 53 | callbackResult(cb, results); 54 | }; 55 | 56 | store.init = (newData, cb) => { 57 | allData = newData; 58 | initCalled = true; 59 | callbackResult(cb); 60 | }; 61 | 62 | store.delete = (kind, key, version, cb) => { 63 | let items = allData[kind.namespace]; 64 | if (!items) { 65 | items = {}; 66 | allData[kind] = items; 67 | } 68 | const deletedItem = { version: version, deleted: true }; 69 | if (Object.hasOwnProperty.call(items, key)) { 70 | const old = items[key]; 71 | if (!old || old.version < version) { 72 | items[key] = deletedItem; 73 | } 74 | } else { 75 | items[key] = deletedItem; 76 | } 77 | 78 | callbackResult(cb); 79 | }; 80 | 81 | store.upsert = (kind, item, cb) => { 82 | const key = item.key; 83 | let items = allData[kind.namespace]; 84 | if (!items) { 85 | items = {}; 86 | allData[kind.namespace] = items; 87 | } 88 | 89 | if (Object.hasOwnProperty.call(items, key)) { 90 | const old = items[key]; 91 | if (old && old.version < item.version) { 92 | items[key] = clone(item); 93 | } 94 | } else { 95 | items[key] = clone(item); 96 | } 97 | 98 | callbackResult(cb); 99 | }; 100 | 101 | store.initialized = cb => { 102 | callbackResult(cb, initCalled === true); 103 | }; 104 | 105 | store.close = () => { 106 | // Close on the in-memory store is a no-op 107 | }; 108 | 109 | store.description = 'memory'; 110 | 111 | return store; 112 | } 113 | 114 | // Deep clone an object. Does not preserve any 115 | // functions on the object 116 | function clone(obj) { 117 | return JSON.parse(JSON.stringify(obj)); 118 | } 119 | 120 | module.exports = InMemoryFeatureStore; 121 | -------------------------------------------------------------------------------- /file_data_source.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'), 2 | dataKind = require('./versioned_data_kind'), 3 | loggers = require('./loggers'); 4 | 5 | let yamlAvailable; 6 | let yamlParser; 7 | 8 | /* 9 | FileDataSource provides a way to use local files as a source of feature flag state, instead of 10 | connecting to LaunchDarkly. This would typically be used in a test environment. 11 | 12 | See documentation in index.d.ts. 13 | */ 14 | function FileDataSource(options) { 15 | if (yamlAvailable === undefined) { 16 | try { 17 | const yaml = require('yaml'); 18 | yamlAvailable = true; 19 | yamlParser = yaml.parse; 20 | } catch (err) { 21 | yamlAvailable = false; 22 | } 23 | } 24 | // If the yaml package is available, we can use its parser for all files because 25 | // every valid JSON document is also a valid YAML document. 26 | const parseData = yamlAvailable ? yamlParser : JSON.parse; 27 | 28 | const paths = (options && options.paths) || []; 29 | const autoUpdate = !!options.autoUpdate; 30 | 31 | return config => { 32 | const logger = options.logger || config.logger || loggers.nullLogger(); 33 | const featureStore = config.featureStore; 34 | const timestamps = {}; 35 | let watchers = []; 36 | let pendingUpdate = false; 37 | let inited = false; 38 | 39 | function getFileTimestampPromise(path) { 40 | return new Promise((resolve, reject) => { 41 | fs.stat(path, (err, stat) => { 42 | if (err) { 43 | reject(err); 44 | } else { 45 | resolve(stat.mtimeMs || stat.mtime); // mtimeMs isn't always available; either of these values will work for us 46 | } 47 | }); 48 | }); 49 | } 50 | 51 | function loadFilePromise(path, allDataIn) { 52 | const allData = allDataIn; 53 | return new Promise((resolve, reject) => 54 | fs.readFile(path, 'utf8', (err, data) => (err ? reject(err) : resolve(data))) 55 | ) 56 | .then(data => { 57 | const parsed = parseData(data) || {}; 58 | const addItem = (kind, item) => { 59 | if (!allData[kind.namespace]) { 60 | allData[kind.namespace] = {}; 61 | } 62 | if (allData[kind.namespace][item.key]) { 63 | throw new Error('found duplicate key: "' + item.key + '"'); 64 | } else { 65 | allData[kind.namespace][item.key] = item; 66 | } 67 | }; 68 | Object.keys(parsed.flags || {}).forEach(key => { 69 | addItem(dataKind.features, parsed.flags[key]); 70 | }); 71 | Object.keys(parsed.flagValues || {}).forEach(key => { 72 | addItem(dataKind.features, makeFlagWithValue(key, parsed.flagValues[key])); 73 | }); 74 | Object.keys(parsed.segments || {}).forEach(key => { 75 | addItem(dataKind.segments, parsed.segments[key]); 76 | }); 77 | logger.info('Loaded flags from ' + path); 78 | }) 79 | .then(() => getFileTimestampPromise(path)) 80 | .then(timestamp => { 81 | timestamps[path] = timestamp; 82 | }); 83 | } 84 | 85 | function loadAllPromise() { 86 | pendingUpdate = false; 87 | const allData = {}; 88 | let p = Promise.resolve(); 89 | for (let i = 0; i < paths.length; i++) { 90 | (path => { 91 | p = p 92 | .then(() => loadFilePromise(path, allData)) 93 | .catch(e => { 94 | throw new Error('Unable to load flags: ' + e + ' [' + path + ']'); 95 | }); 96 | })(paths[i]); 97 | } 98 | return p.then(() => initStorePromise(allData)); 99 | } 100 | 101 | function initStorePromise(data) { 102 | return new Promise(resolve => 103 | featureStore.init(data, () => { 104 | inited = true; 105 | resolve(); 106 | }) 107 | ); 108 | } 109 | 110 | function makeFlagWithValue(key, value) { 111 | return { 112 | key: key, 113 | on: true, 114 | fallthrough: { variation: 0 }, 115 | variations: [value], 116 | }; 117 | } 118 | 119 | function maybeReloadForPath(path) { 120 | if (pendingUpdate) { 121 | return; // coalesce updates so we don't do multiple reloads if a whole set of files was just updated 122 | } 123 | const reload = () => { 124 | loadAllPromise() 125 | .then(() => { 126 | logger.warn('Reloaded flags from file data'); 127 | }) 128 | .catch(() => {}); 129 | }; 130 | getFileTimestampPromise(path) 131 | .then(timestamp => { 132 | // We do this check of the modified time because there's a known issue with fs.watch() 133 | // reporting multiple changes when really the file has only changed once. 134 | if (timestamp !== timestamps[path]) { 135 | pendingUpdate = true; 136 | setTimeout(reload, 10); 137 | // The 10ms delay above is arbitrary - we just don't want to have the number be zero, 138 | // because in a case where multiple fs.watch events are fired off one after another, 139 | // we want the reload to happen only after all of the event handlers have executed. 140 | } 141 | }) 142 | .catch(() => { 143 | logger.warn('Unexpected error trying to get timestamp of file: ' + path); 144 | }); 145 | } 146 | 147 | function startWatching() { 148 | paths.forEach(path => { 149 | const watcher = fs.watch(path, { persistent: false }, () => { 150 | maybeReloadForPath(path); 151 | }); 152 | watchers.push(watcher); 153 | }); 154 | } 155 | 156 | function stopWatching() { 157 | watchers.forEach(w => w.close()); 158 | watchers = []; 159 | } 160 | 161 | const fds = {}; 162 | 163 | fds.start = fn => { 164 | const cb = fn || (() => {}); 165 | 166 | if (autoUpdate) { 167 | startWatching(); 168 | } 169 | 170 | loadAllPromise().then( 171 | () => cb(), 172 | err => cb(err) 173 | ); 174 | }; 175 | 176 | fds.stop = () => { 177 | if (autoUpdate) { 178 | stopWatching(); 179 | } 180 | }; 181 | 182 | fds.initialized = () => inited; 183 | 184 | fds.close = () => { 185 | fds.stop(); 186 | }; 187 | 188 | return fds; 189 | }; 190 | } 191 | 192 | module.exports = FileDataSource; 193 | -------------------------------------------------------------------------------- /flags_state.js: -------------------------------------------------------------------------------- 1 | function FlagsStateBuilder(valid, withReasons) { 2 | const builder = {}; 3 | const flagValues = {}; 4 | const flagMetadata = {}; 5 | 6 | builder.addFlag = (flag, value, variation, reason, trackEvents, trackReason, detailsOnlyIfTracked) => { 7 | flagValues[flag.key] = value; 8 | const meta = {}; 9 | if (variation !== undefined && variation !== null) { 10 | meta.variation = variation; 11 | } 12 | const omitDetails = 13 | detailsOnlyIfTracked && 14 | !trackEvents && 15 | !trackReason && 16 | (flag.debugEventsUntilDate === undefined || flag.debugEventsUntilDate === null); 17 | if (!omitDetails) { 18 | meta.version = flag.version; 19 | } 20 | if (reason && (trackReason || (withReasons && !omitDetails))) { 21 | meta.reason = reason; 22 | } 23 | if (trackEvents) { 24 | meta.trackEvents = true; 25 | } 26 | if (trackReason) { 27 | meta.trackReason = true; 28 | } 29 | if (flag.debugEventsUntilDate !== undefined && flag.debugEventsUntilDate !== null) { 30 | meta.debugEventsUntilDate = flag.debugEventsUntilDate; 31 | } 32 | flagMetadata[flag.key] = meta; 33 | }; 34 | 35 | builder.build = () => ({ 36 | valid: valid, 37 | allValues: () => flagValues, 38 | getFlagValue: key => flagValues[key], 39 | getFlagReason: key => (flagMetadata[key] ? flagMetadata[key].reason : null), 40 | toJSON: () => Object.assign({}, flagValues, { $flagsState: flagMetadata, $valid: valid }), 41 | }); 42 | 43 | return builder; 44 | } 45 | 46 | module.exports = FlagsStateBuilder; 47 | -------------------------------------------------------------------------------- /integrations.js: -------------------------------------------------------------------------------- 1 | const FileDataSource = require('./file_data_source'); 2 | const TestData = require('./test_data'); 3 | 4 | module.exports = { 5 | FileDataSource, 6 | TestData, 7 | }; 8 | -------------------------------------------------------------------------------- /interfaces.js: -------------------------------------------------------------------------------- 1 | // This file is currently only a placeholder to allow TypeScript packages to import the 2 | // interface types that are declared in index.d.ts for the "interfaces" submodule. 3 | -------------------------------------------------------------------------------- /loggers.js: -------------------------------------------------------------------------------- 1 | const util = require('util'); 2 | 3 | const logLevels = ['debug', 'info', 'warn', 'error', 'none']; 4 | 5 | /** 6 | * A simple logger that writes to stderr. See index.d.ts 7 | */ 8 | function basicLogger(options) { 9 | const destination = (options && options.destination) || console.error; 10 | if (typeof destination !== 'function') { 11 | throw new Error('destination for basicLogger was set to a non-function'); 12 | } 13 | 14 | let minLevel = 1; // default is 'info' 15 | if (options && options.level) { 16 | for (let i = 0; i < logLevels.length; i++) { 17 | if (logLevels[i] === options.level) { 18 | minLevel = i; 19 | } 20 | } 21 | } 22 | 23 | function write(prefix, args) { 24 | if (args.length < 1) { 25 | return; 26 | } 27 | let line; 28 | if (args.length === 1) { 29 | line = prefix + args[0]; 30 | } else { 31 | const tempArgs = [...args]; 32 | tempArgs[0] = prefix + tempArgs[0]; 33 | line = util.format(...tempArgs); 34 | } 35 | destination(line); 36 | } 37 | 38 | const logger = {}; 39 | for (let i = 0; i < logLevels.length; i++) { 40 | const levelName = logLevels[i]; 41 | if (levelName !== 'none') { 42 | if (i < minLevel) { 43 | logger[levelName] = () => {}; 44 | } else { 45 | const prefix = levelName + ': [LaunchDarkly] '; 46 | logger[levelName] = function () { 47 | // can't use arrow function with "arguments" 48 | write(prefix, arguments); 49 | }; 50 | } 51 | } 52 | } 53 | 54 | return logger; 55 | } 56 | 57 | /** 58 | * Returns a logger that does nothing. 59 | */ 60 | function nullLogger() { 61 | return { 62 | debug: () => {}, 63 | info: () => {}, 64 | warn: () => {}, 65 | error: () => {}, 66 | }; 67 | } 68 | 69 | // The safeLogger logic exists because we allow the application to pass in a custom logger, but 70 | // there is no guarantee that the logger works correctly and if it ever throws exceptions there 71 | // could be serious consequences (e.g. an uncaught exception within an error event handler, due 72 | // to the SDK trying to log the error, can terminate the application). An exception could result 73 | // from faulty logic in the logger implementation, or it could be that this is not a logger at 74 | // all but some other kind of object; the former is handled by a catch block that logs an error 75 | // message to the SDK's default logger, and we can at least partly guard against the latter by 76 | // checking for the presence of required methods at configuration time. 77 | 78 | /** 79 | * Asserts that the caller-supplied logger contains all required methods 80 | * and wraps it in an exception handler that falls back to the fallbackLogger. 81 | * @param {LDLogger} logger 82 | * @param {LDLogger} fallbackLogger 83 | */ 84 | function safeLogger(logger, fallbackLogger) { 85 | validateLogger(logger); 86 | 87 | const wrappedLogger = {}; 88 | logLevels.forEach(level => { 89 | if (level !== 'none') { 90 | wrappedLogger[level] = wrapLoggerLevel(logger, fallbackLogger, level); 91 | } 92 | }); 93 | 94 | return wrappedLogger; 95 | } 96 | 97 | function validateLogger(logger) { 98 | logLevels.forEach(level => { 99 | if (level !== 'none' && (!logger[level] || typeof logger[level] !== 'function')) { 100 | throw new Error('Provided logger instance must support logger.' + level + '(...) method'); 101 | // Note that the SDK normally does not throw exceptions to the application, but that rule 102 | // does not apply to LDClient.init() which will throw an exception if the parameters are so 103 | // invalid that we cannot proceed with creating the client. An invalid logger meets those 104 | // criteria since the SDK calls the logger during nearly all of its operations. 105 | } 106 | }); 107 | } 108 | 109 | function wrapLoggerLevel(logger, fallbackLogger, level) { 110 | const logFn = logger[level]; 111 | return function wrappedLoggerMethod() { 112 | try { 113 | return logFn.apply(logger, arguments); 114 | } catch (err) { 115 | fallbackLogger.error('Error calling provided logger instance method ' + level + ': ' + err); 116 | fallbackLogger[level].apply(fallbackLogger, arguments); 117 | } 118 | }; 119 | } 120 | 121 | module.exports = { 122 | basicLogger, 123 | nullLogger, 124 | safeLogger, 125 | }; 126 | -------------------------------------------------------------------------------- /messages.js: -------------------------------------------------------------------------------- 1 | const errors = require('./errors'); 2 | 3 | exports.deprecated = (oldName, newName) => `"${oldName}" is deprecated, please use "${newName}"`; 4 | 5 | exports.httpErrorMessage = (err, context, retryMessage) => { 6 | let desc; 7 | if (err.status) { 8 | desc = `error ${err.status}${err.status === 401 ? ' (invalid SDK key)' : ''}`; 9 | } else { 10 | desc = `I/O error (${err.message || err})`; 11 | } 12 | const action = errors.isHttpErrorRecoverable(err.status) ? retryMessage : 'giving up permanently'; 13 | return `Received ${desc} for ${context} - ${action}`; 14 | }; 15 | 16 | exports.missingContextKeyNoEvent = () => 'User was unspecified or had no key; event will not be sent'; 17 | 18 | exports.optionBelowMinimum = (name, value, min) => 19 | `Config option "${name}" had invalid value of ${value}, using minimum of ${min} instead`; 20 | 21 | exports.unknownOption = name => `Ignoring unknown config option "${name}"`; 22 | 23 | exports.wrongOptionType = (name, expectedType, actualType) => 24 | `Config option "${name}" should be of type ${expectedType}, got ${actualType}, using default value`; 25 | 26 | exports.wrongOptionTypeBoolean = (name, actualType) => 27 | `Config option "${name}" should be a boolean, got ${actualType}, converting to boolean`; 28 | 29 | exports.invalidTagValue = name => `Config option "${name}" must only contain letters, numbers, ., _ or -.`; 30 | 31 | exports.tagValueTooLong = name => `Value of "${name}" was longer than 64 characters and was discarded.`; 32 | -------------------------------------------------------------------------------- /operators.js: -------------------------------------------------------------------------------- 1 | const semver = require('semver'); 2 | 3 | // Our reference SDK, Go, parses date/time strings with the time.RFC3339Nano format. This regex should match 4 | // strings that are valid in that format, and no others. 5 | // Acceptable: 2019-10-31T23:59:59Z, 2019-10-31T23:59:59.100Z, 2019-10-31T23:59:59-07, 2019-10-31T23:59:59-07:00, etc. 6 | // Unacceptable: no "T", no time zone designation 7 | const dateRegex = new RegExp('^\\d\\d\\d\\d-\\d\\d-\\d\\dT\\d\\d:\\d\\d:\\d\\d(\\.\\d\\d*)?(Z|[-+]\\d\\d(:\\d\\d)?)'); 8 | 9 | function stringOperator(f) { 10 | return (userValue, clauseValue) => 11 | typeof userValue === 'string' && typeof clauseValue === 'string' && f(userValue, clauseValue); 12 | } 13 | 14 | function numericOperator(f) { 15 | return (userValue, clauseValue) => 16 | typeof userValue === 'number' && typeof clauseValue === 'number' && f(userValue, clauseValue); 17 | } 18 | 19 | function dateOperator(f) { 20 | return (userValue, clauseValue) => { 21 | const userValueNum = parseDate(userValue); 22 | const clauseValueNum = parseDate(clauseValue); 23 | return userValueNum !== null && clauseValueNum !== null && f(userValueNum, clauseValueNum); 24 | }; 25 | } 26 | 27 | function parseDate(input) { 28 | switch (typeof input) { 29 | case 'number': 30 | return input; 31 | case 'string': 32 | return dateRegex.test(input) ? Date.parse(input) : null; 33 | default: 34 | return null; 35 | } 36 | } 37 | 38 | function semVerOperator(fn) { 39 | return (a, b) => { 40 | const av = parseSemVer(a), 41 | bv = parseSemVer(b); 42 | return av && bv ? fn(av, bv) : false; 43 | }; 44 | } 45 | 46 | function parseSemVer(input) { 47 | if (typeof input !== 'string') { 48 | return null; 49 | } 50 | if (input.startsWith('v')) { 51 | // the semver library tolerates a leading "v", but the standard does not. 52 | return null; 53 | } 54 | let ret = semver.parse(input); 55 | if (!ret) { 56 | const versionNumericComponents = new RegExp('^\\d+(\\.\\d+)?(\\.\\d+)?').exec(input); 57 | if (versionNumericComponents) { 58 | let transformed = versionNumericComponents[0]; 59 | for (let i = 1; i < versionNumericComponents.length; i++) { 60 | if (versionNumericComponents[i] === undefined) { 61 | transformed = transformed + '.0'; 62 | } 63 | } 64 | transformed = transformed + input.substring(versionNumericComponents[0].length); 65 | ret = semver.parse(transformed); 66 | } 67 | } 68 | return ret; 69 | } 70 | 71 | function safeRegexMatch(pattern, value) { 72 | try { 73 | return new RegExp(pattern).test(value); 74 | } catch (e) { 75 | // do not propagate this exception, just treat a bad regex as a non-match for consistency with other SDKs 76 | return false; 77 | } 78 | } 79 | 80 | const operators = { 81 | in: (a, b) => a === b, 82 | endsWith: stringOperator((a, b) => a.endsWith(b)), 83 | startsWith: stringOperator((a, b) => a.startsWith(b)), 84 | matches: stringOperator((a, b) => safeRegexMatch(b, a)), 85 | contains: stringOperator((a, b) => a.indexOf(b) > -1), 86 | lessThan: numericOperator((a, b) => a < b), 87 | lessThanOrEqual: numericOperator((a, b) => a <= b), 88 | greaterThan: numericOperator((a, b) => a > b), 89 | greaterThanOrEqual: numericOperator((a, b) => a >= b), 90 | before: dateOperator((a, b) => a < b), 91 | after: dateOperator((a, b) => a > b), 92 | semVerEqual: semVerOperator((a, b) => a.compare(b) === 0), 93 | semVerLessThan: semVerOperator((a, b) => a.compare(b) < 0), 94 | semVerGreaterThan: semVerOperator((a, b) => a.compare(b) > 0), 95 | }; 96 | 97 | const operatorNone = () => false; 98 | 99 | function fn(op) { 100 | return operators[op] || operatorNone; 101 | } 102 | 103 | module.exports = { 104 | operators: operators, 105 | fn: fn, 106 | }; 107 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "launchdarkly-node-server-sdk", 3 | "version": "7.0.4", 4 | "description": "LaunchDarkly Server-Side SDK for Node.js", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "jest --ci --coverage --runInBand", 8 | "check-typescript": "node_modules/typescript/bin/tsc", 9 | "lint": "eslint --format 'node_modules/eslint-formatter-pretty' --ignore-path .eslintignore .", 10 | "lint-fix": "eslint --fix --format 'node_modules/eslint-formatter-pretty' --ignore-path .eslintignore .", 11 | "contract-test-service": "npm --prefix contract-tests install && npm --prefix contract-tests start", 12 | "contract-test-harness": "curl -s https://raw.githubusercontent.com/launchdarkly/sdk-test-harness/master/downloader/run.sh \\ | VERSION=v2 PARAMS=\"-url http://localhost:8000 -debug -stop-service-at-end $TEST_HARNESS_PARAMS\" sh", 13 | "contract-tests": "npm run contract-test-service & npm run contract-test-harness" 14 | }, 15 | "types": "./index.d.ts", 16 | "repository": { 17 | "type": "git", 18 | "url": "https://github.com/launchdarkly/node-server-sdk.git" 19 | }, 20 | "keywords": [ 21 | "launchdarkly", 22 | "analytics", 23 | "client" 24 | ], 25 | "license": "Apache-2.0", 26 | "bugs": { 27 | "url": "https://github.com/launchdarkly/node-server-sdk/issues" 28 | }, 29 | "homepage": "https://github.com/launchdarkly/node-server-sdk", 30 | "dependencies": { 31 | "async": "^3.2.4", 32 | "launchdarkly-eventsource": "1.4.4", 33 | "lru-cache": "^6.0.0", 34 | "node-cache": "^5.1.0", 35 | "semver": "^7.5.4", 36 | "tunnel": "0.0.6", 37 | "uuid": "^8.3.2" 38 | }, 39 | "engines": { 40 | "node": ">= 12.0.0" 41 | }, 42 | "devDependencies": { 43 | "@babel/core": "^7.14.6", 44 | "@babel/preset-env": "^7.14.5", 45 | "@types/jest": "^27.4.0", 46 | "@types/node": "^15.12.2", 47 | "babel-jest": "^27.0.2", 48 | "eslint": "^7.28.0", 49 | "eslint-config-prettier": "^8.3.0", 50 | "eslint-formatter-pretty": "^4.1.0", 51 | "eslint-plugin-prettier": "^3.4.0", 52 | "jest": "^27.0.4", 53 | "jest-junit": "^12.2.0", 54 | "launchdarkly-js-test-helpers": "^2.2.0", 55 | "prettier": "^2.3.1", 56 | "tmp": "^0.2.1", 57 | "typescript": "~4.4.4", 58 | "yaml": "^1.10.2" 59 | }, 60 | "jest": { 61 | "rootDir": ".", 62 | "testEnvironment": "node", 63 | "testMatch": [ 64 | "**/*-test.js" 65 | ], 66 | "testResultsProcessor": "jest-junit" 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /polling.js: -------------------------------------------------------------------------------- 1 | const errors = require('./errors'); 2 | const messages = require('./messages'); 3 | const dataKind = require('./versioned_data_kind'); 4 | 5 | function PollingProcessor(config, requestor) { 6 | const processor = {}, 7 | featureStore = config.featureStore, 8 | intervalMs = config.pollInterval * 1000; 9 | 10 | let stopped = false; 11 | 12 | let pollTask; 13 | 14 | function poll(maybeCallback) { 15 | const cb = maybeCallback || function () {}; 16 | 17 | if (stopped) { 18 | return; 19 | } 20 | 21 | config.logger.debug('Polling LaunchDarkly for feature flag updates'); 22 | 23 | requestor.requestAllData((err, respBody) => { 24 | if (err) { 25 | if (err.status && !errors.isHttpErrorRecoverable(err.status)) { 26 | const message = messages.httpErrorMessage(err, 'polling request'); 27 | config.logger.error(message); 28 | cb(new errors.LDPollingError(message)); 29 | processor.stop(); 30 | } else { 31 | config.logger.warn(messages.httpErrorMessage(err, 'polling request', 'will retry')); 32 | } 33 | } else { 34 | if (respBody) { 35 | const allData = JSON.parse(respBody); 36 | const initData = {}; 37 | initData[dataKind.features.namespace] = allData.flags; 38 | initData[dataKind.segments.namespace] = allData.segments; 39 | featureStore.init(initData, () => { 40 | cb(); 41 | }); 42 | } 43 | // There wasn't an error but there wasn't any new data either, so just keep polling 44 | } 45 | }); 46 | } 47 | 48 | processor.start = cb => { 49 | if (!pollTask && !stopped) { 50 | pollTask = setInterval(() => poll(cb), intervalMs); 51 | // setInterval always waits for the delay before firing the first time, but we want to do an initial poll right away 52 | poll(cb); 53 | } 54 | }; 55 | 56 | processor.stop = () => { 57 | stopped = true; 58 | if (pollTask) { 59 | clearInterval(pollTask); 60 | } 61 | }; 62 | 63 | processor.close = () => { 64 | processor.stop(); 65 | }; 66 | 67 | return processor; 68 | } 69 | 70 | module.exports = PollingProcessor; 71 | -------------------------------------------------------------------------------- /requestor.js: -------------------------------------------------------------------------------- 1 | const httpUtils = require('./utils/httpUtils'); 2 | 3 | /** 4 | * Creates a new Requestor object, which handles remote requests to fetch feature flags or segments for LaunchDarkly. 5 | * This is never called synchronously when requesting a feature flag for a user (e.g. via the variation method). 6 | * 7 | * It will be called at the configured polling interval in polling mode. Older versions of the SDK also 8 | * could use the Requestor to make a polling request even in streaming mode, for very large data sets, 9 | * but the LD infrastructure no longer uses that behavior. 10 | * 11 | * @param {String} sdkKey the SDK key 12 | * @param {Object} config the LaunchDarkly client configuration object 13 | **/ 14 | function Requestor(sdkKey, config) { 15 | const requestor = {}; 16 | 17 | const headers = httpUtils.getDefaultHeaders(sdkKey, config); 18 | const requestWithETagCaching = httpUtils.httpWithETagCache(); 19 | 20 | function makeRequest(resource) { 21 | const url = config.baseUri + resource; 22 | const requestParams = { method: 'GET', headers }; 23 | return (cb, errCb) => { 24 | requestWithETagCaching(url, requestParams, null, config, (err, resp, body) => { 25 | if (err) { 26 | errCb(err); 27 | } else { 28 | cb(resp, body); 29 | } 30 | }); 31 | }; 32 | } 33 | 34 | function processResponse(cb) { 35 | return (response, body) => { 36 | if (response.statusCode !== 200 && response.statusCode !== 304) { 37 | const err = new Error('Unexpected status code: ' + response.statusCode); 38 | err.status = response.statusCode; 39 | cb(err, null); 40 | } else { 41 | cb(null, response.statusCode === 304 ? null : body); 42 | } 43 | }; 44 | } 45 | 46 | function processErrorResponse(cb) { 47 | return err => { 48 | cb(err, null); 49 | }; 50 | } 51 | 52 | // Note that requestAllData will pass (null, null) rather than (null, body) if it gets a 304 response; 53 | // this is deliberate so that we don't keep updating the data store unnecessarily if there are no changes. 54 | requestor.requestAllData = cb => { 55 | const req = makeRequest('/sdk/latest-all'); 56 | req(processResponse(cb), processErrorResponse(cb)); 57 | }; 58 | 59 | return requestor; 60 | } 61 | 62 | module.exports = Requestor; 63 | -------------------------------------------------------------------------------- /scripts/better-audit.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # This script processes the output of "npm audit" to make it more useful, as follows: 4 | # - For each flagged vulnerability, it looks at the "path" field and extracts both the flagged 5 | # package (the last element in the path) and the topmost dependency that led to it (the first 6 | # element in the path). 7 | # - It sorts these and eliminates duplicates. 8 | # - It then compares each of the topmost dependencies to package.json to see if it is from 9 | # "dependencies", "peerDependencies", or "devDependencies". If it is either of the first two 10 | # then this is a real runtime vulnerability, and must be fixed by updating the topmost 11 | # dependency. If it is from devDependencies, then it can be safely fixed with "npm audit fix". 12 | 13 | set -e 14 | 15 | function readPackages() { 16 | inCategory=$1 17 | jq -r ".${inCategory} | keys | .[]" package.json 2>/dev/null || true 18 | } 19 | 20 | function isInList() { 21 | item=$1 22 | shift 23 | for x in $@; do 24 | if [ "$item" == "$x" ]; then 25 | true 26 | return 27 | fi 28 | done 29 | false 30 | } 31 | 32 | dependencies=$(readPackages dependencies) 33 | devDependencies=$(readPackages devDependencies) 34 | peerDependencies=$(readPackages peerDependencies) 35 | 36 | function processItems() { 37 | flaggedRuntime=0 38 | flaggedDev=0 39 | while read -r badPackage topLevelDep; do 40 | echo -n "flagged package \"$badPackage\", referenced via \"$topLevelDep\" " 41 | for category in dependencies peerDependencies devDependencies; do 42 | if isInList $topLevelDep ${!category}; then 43 | if [ "$category" == "devDependencies" ]; then 44 | echo "-- from \"$category\"" 45 | flaggedDev=1 46 | else 47 | echo "-- from \"$category\" (RUNTIME) ***" 48 | flaggedRuntime=1 49 | fi 50 | break 51 | fi 52 | done 53 | done 54 | echo 55 | if [ "$flaggedRuntime" == "1" ]; then 56 | echo "*** At least one runtime dependency was flagged. These must be fixed by updating package.json." 57 | echo "Do not use 'npm audit fix'." 58 | exit 1 # return an error, causing the build to fail 59 | elif [ "$flaggedDev" == "1" ]; then 60 | echo "Only development dependencies were flagged. You may safely run 'npm audit fix', which will" 61 | echo "fix these by adding overrides to package-lock.json." 62 | else 63 | echo "Congratulations! No dependencies were flagged by 'npm audit'." 64 | fi 65 | } 66 | 67 | echo "Running npm audit..." 68 | echo 69 | 70 | npm audit --json \ 71 | | grep '"path":' \ 72 | | sort | uniq \ 73 | | sed -n -e 's#.*"path": "\([^"]*\)".*#\1#p' \ 74 | | awk -F '>' '{ print $NF,$1 }' \ 75 | | sort | uniq \ 76 | | processItems 77 | -------------------------------------------------------------------------------- /sharedtest/big_segment_store_tests.js: -------------------------------------------------------------------------------- 1 | const { nullLogger } = require('../loggers'); 2 | 3 | const { withCloseable } = require('launchdarkly-js-test-helpers'); 4 | 5 | // See index.d.ts for interface documentation 6 | 7 | const fakeUserHash = 'userhash'; 8 | 9 | function runBigSegmentStoreTests(storeFactory, clearExistingData, setMetadata, setSegments) { 10 | function doAllTestsWithPrefix(prefix) { 11 | async function withStoreAndEmptyData(action) { 12 | await clearExistingData(prefix); 13 | await withCloseable(storeFactory(prefix, nullLogger()), action); 14 | } 15 | 16 | describe('getMetadata', () => { 17 | it('valid value', async () => { 18 | const expected = { lastUpToDate: 1234567890 }; 19 | await withStoreAndEmptyData(async store => { 20 | await setMetadata(prefix, expected); 21 | 22 | const meta = await store.getMetadata(); 23 | expect(meta).toEqual(expected); 24 | }); 25 | }); 26 | 27 | it('no value', async () => { 28 | await withStoreAndEmptyData(async store => { 29 | const meta = await store.getMetadata(); 30 | expect(meta).toEqual({ lastUpToDate: undefined }); 31 | }); 32 | }); 33 | }); 34 | 35 | describe('getUserMembership', () => { 36 | it('not found', async () => { 37 | await withStoreAndEmptyData(async store => { 38 | const membership = await store.getUserMembership(fakeUserHash); 39 | if (membership) { 40 | // either null/undefined or an empty membership would be acceptable 41 | expect(membership).toEqual({}); 42 | } 43 | }); 44 | }); 45 | 46 | it('includes only', async () => { 47 | await withStoreAndEmptyData(async store => { 48 | await setSegments(prefix, fakeUserHash, ['key1', 'key2'], []); 49 | 50 | const membership = await store.getUserMembership(fakeUserHash); 51 | expect(membership).toEqual({ key1: true, key2: true }); 52 | }); 53 | }); 54 | 55 | it('excludes only', async () => { 56 | await withStoreAndEmptyData(async store => { 57 | await setSegments(prefix, fakeUserHash, [], ['key1', 'key2']); 58 | 59 | const membership = await store.getUserMembership(fakeUserHash); 60 | expect(membership).toEqual({ key1: false, key2: false }); 61 | }); 62 | }); 63 | 64 | it('includes and excludes', async () => { 65 | await withStoreAndEmptyData(async store => { 66 | await setSegments(prefix, fakeUserHash, ['key1', 'key2'], ['key2', 'key3']); 67 | 68 | const membership = await store.getUserMembership(fakeUserHash); 69 | expect(membership).toEqual({ key1: true, key2: true, key3: false }); // include of key2 overrides exclude 70 | }); 71 | }); 72 | }); 73 | } 74 | 75 | describe('with non-empty prefix', () => { 76 | doAllTestsWithPrefix('testprefix'); 77 | }); 78 | 79 | describe('with empty prefix', () => { 80 | doAllTestsWithPrefix(undefined); 81 | }); 82 | } 83 | 84 | module.exports = { 85 | runBigSegmentStoreTests, 86 | }; 87 | -------------------------------------------------------------------------------- /sharedtest/feature_store_tests.js: -------------------------------------------------------------------------------- 1 | const dataKind = require('../versioned_data_kind'); 2 | 3 | const { promisifySingle, withCloseable, AsyncQueue } = require('launchdarkly-js-test-helpers'); 4 | 5 | function runFeatureStoreTests(createStore, clearExistingData) { 6 | const feature1 = { key: 'foo', version: 10 }; 7 | const feature2 = { key: 'bar', version: 10 }; 8 | 9 | async function withInitedStore(asyncAction) { 10 | if (clearExistingData) { 11 | await clearExistingData(); 12 | } 13 | const store = createStore(); 14 | await withCloseable(store, async () => { 15 | const initData = { 16 | [dataKind.features.namespace]: { 17 | foo: feature1, 18 | bar: feature2, 19 | }, 20 | [dataKind.segments.namespace]: {}, 21 | }; 22 | await promisifySingle(store.init)(initData); 23 | await asyncAction(store); 24 | }); 25 | } 26 | 27 | it('is initialized after calling init()', async () => { 28 | await withInitedStore(async store => { 29 | const result = await promisifySingle(store.initialized)(); 30 | expect(result).toBe(true); 31 | }); 32 | }); 33 | 34 | it('init() completely replaces previous data', async () => { 35 | await withInitedStore(async store => { 36 | const flags = { 37 | first: { key: 'first', version: 1 }, 38 | second: { key: 'second', version: 1 }, 39 | }; 40 | const segments = { first: { key: 'first', version: 2 } }; 41 | const initData1 = { 42 | [dataKind.features.namespace]: flags, 43 | [dataKind.segments.namespace]: segments, 44 | }; 45 | 46 | await promisifySingle(store.init)(initData1); 47 | const items1 = await promisifySingle(store.all)(dataKind.features); 48 | expect(items1).toEqual(flags); 49 | const items2 = await promisifySingle(store.all)(dataKind.segments); 50 | expect(items2).toEqual(segments); 51 | 52 | const newFlags = { first: { key: 'first', version: 3 } }; 53 | const newSegments = { first: { key: 'first', version: 4 } }; 54 | const initData2 = { 55 | [dataKind.features.namespace]: newFlags, 56 | [dataKind.segments.namespace]: newSegments, 57 | }; 58 | 59 | await promisifySingle(store.init)(initData2); 60 | const items3 = await promisifySingle(store.all)(dataKind.features); 61 | expect(items3).toEqual(newFlags); 62 | const items4 = await promisifySingle(store.all)(dataKind.segments); 63 | expect(items4).toEqual(newSegments); 64 | }); 65 | }); 66 | 67 | it('gets existing feature', async () => { 68 | await withInitedStore(async store => { 69 | const result = await promisifySingle(store.get)(dataKind.features, feature1.key); 70 | expect(result).toEqual(feature1); 71 | }); 72 | }); 73 | 74 | it('does not get nonexisting feature', async () => { 75 | await withInitedStore(async store => { 76 | const result = await promisifySingle(store.get)(dataKind.features, 'biz'); 77 | expect(result).toBe(null); 78 | }); 79 | }); 80 | 81 | it('gets all features', async () => { 82 | await withInitedStore(async store => { 83 | const result = await promisifySingle(store.all)(dataKind.features); 84 | expect(result).toEqual({ 85 | foo: feature1, 86 | bar: feature2, 87 | }); 88 | }); 89 | }); 90 | 91 | it('upserts with newer version', async () => { 92 | const newVer = { key: feature1.key, version: feature1.version + 1 }; 93 | await withInitedStore(async store => { 94 | await promisifySingle(store.upsert)(dataKind.features, newVer); 95 | const result = await promisifySingle(store.get)(dataKind.features, feature1.key); 96 | expect(result).toEqual(newVer); 97 | }); 98 | }); 99 | 100 | it('does not upsert with older version', async () => { 101 | const oldVer = { key: feature1.key, version: feature1.version - 1 }; 102 | await withInitedStore(async store => { 103 | await promisifySingle(store.upsert)(dataKind.features, oldVer); 104 | const result = await promisifySingle(store.get)(dataKind.features, feature1.key); 105 | expect(result).toEqual(feature1); 106 | }); 107 | }); 108 | 109 | it('upserts new feature', async () => { 110 | const newFeature = { key: 'biz', version: 99 }; 111 | await withInitedStore(async store => { 112 | await promisifySingle(store.upsert)(dataKind.features, newFeature); 113 | const result = await promisifySingle(store.get)(dataKind.features, newFeature.key); 114 | expect(result).toEqual(newFeature); 115 | }); 116 | }); 117 | 118 | it('handles upsert race condition within same client correctly', async () => { 119 | // Not sure if there is a way to do this one with async/await 120 | const ver1 = { key: feature1.key, version: feature1.version + 1 }; 121 | const ver2 = { key: feature1.key, version: feature1.version + 2 }; 122 | const calls = new AsyncQueue(); 123 | await withInitedStore(async store => { 124 | const callback = () => { 125 | calls.add(null); 126 | }; 127 | 128 | // Deliberately do not wait for the first upsert to complete before starting the second, 129 | // so their transactions will be interleaved unless we're correctly serializing updates 130 | store.upsert(dataKind.features, ver2, callback); 131 | store.upsert(dataKind.features, ver1, callback); 132 | 133 | // Now wait until both have completed 134 | await calls.take(); 135 | await calls.take(); 136 | const result = await promisifySingle(store.get)(dataKind.features, feature1.key); 137 | expect(result).toEqual(ver2); 138 | }); 139 | }); 140 | 141 | it('deletes with newer version', async () => { 142 | await withInitedStore(async store => { 143 | await promisifySingle(store.delete)(dataKind.features, feature1.key, feature1.version + 1); 144 | const result = await promisifySingle(store.get)(dataKind.features, feature1.key); 145 | expect(result).toBe(null); 146 | }); 147 | }); 148 | 149 | it('does not delete with older version', async () => { 150 | await withInitedStore(async store => { 151 | await promisifySingle(store.delete)(dataKind.features, feature1.key, feature1.version - 1); 152 | const result = await promisifySingle(store.get)(dataKind.features, feature1.key); 153 | expect(result).not.toBe(null); 154 | }); 155 | }); 156 | 157 | it('allows deleting unknown feature', async () => { 158 | await withInitedStore(async store => { 159 | await promisifySingle(store.delete)(dataKind.features, 'biz', 99); 160 | const result = await promisifySingle(store.get)(dataKind.features, 'biz'); 161 | expect(result).toBe(null); 162 | }); 163 | }); 164 | 165 | it('does not upsert older version after delete', async () => { 166 | await withInitedStore(async store => { 167 | await promisifySingle(store.delete)(dataKind.features, feature1.key, feature1.version + 1); 168 | await promisifySingle(store.upsert)(dataKind.features, feature1); 169 | const result = await promisifySingle(store.get)(dataKind.features, feature1.key); 170 | expect(result).toBe(null); 171 | }); 172 | }); 173 | } 174 | 175 | module.exports = { 176 | runFeatureStoreTests, 177 | }; 178 | -------------------------------------------------------------------------------- /sharedtest/persistent_feature_store_tests.js: -------------------------------------------------------------------------------- 1 | const { nullLogger } = require('../loggers'); 2 | const dataKind = require('../versioned_data_kind'); 3 | 4 | const { runFeatureStoreTests } = require('./feature_store_tests'); 5 | 6 | const { promisifySingle, withCloseable } = require('launchdarkly-js-test-helpers'); 7 | 8 | // See index.d.ts for interface documentation 9 | 10 | const cacheTime = 30; 11 | const logger = nullLogger(); 12 | 13 | function runPersistentFeatureStoreTests(createStore, clearExistingData, createStoreWithConcurrentUpdateHook) { 14 | function doAllTestsWithPrefix(prefix) { 15 | describe('without cache', () => { 16 | runFeatureStoreTests( 17 | () => createStore(prefix, 0, logger), 18 | () => clearExistingData(prefix) 19 | ); 20 | 21 | runPersistentFeatureStoreUncachedTests(prefix, createStore, clearExistingData); 22 | }); 23 | 24 | describe('with cache', () => { 25 | runFeatureStoreTests( 26 | () => createStore(prefix, cacheTime, logger), 27 | () => clearExistingData(prefix) 28 | ); 29 | 30 | // There are no special tests here that apply only when caching is enabled. 31 | // We are testing the cache behavior separately in caching_store_wrapper-test. 32 | }); 33 | 34 | if (createStoreWithConcurrentUpdateHook) { 35 | describe('concurrent modification tests', () => { 36 | runPersistentFeatureStoreConcurrentUpdateTests( 37 | prefix, 38 | createStore, 39 | clearExistingData, 40 | createStoreWithConcurrentUpdateHook 41 | ); 42 | }); 43 | } 44 | } 45 | 46 | describe('with non-empty prefix', () => { 47 | doAllTestsWithPrefix('testprefix'); 48 | }); 49 | 50 | describe('with empty prefix', () => { 51 | doAllTestsWithPrefix(undefined); 52 | }); 53 | 54 | runPersistentFeatureStoreSeparatePrefixesTest(createStore, clearExistingData); 55 | } 56 | 57 | function runPersistentFeatureStoreUncachedTests(prefix, createStore, clearExistingData) { 58 | const feature1 = { key: 'foo', version: 10 }; 59 | 60 | async function testInitStateDetection(initData) { 61 | await clearExistingData(prefix); 62 | await withCloseable(createStore(prefix, 0, logger), async store1 => { 63 | await withCloseable(createStore(prefix, 0, logger), async store2 => { 64 | const result1 = await promisifySingle(store1.initialized)(); 65 | expect(result1).toBe(false); 66 | 67 | await promisifySingle(store2.init)(initData); 68 | const result2 = await promisifySingle(store1.initialized)(); 69 | expect(result2).toBe(true); 70 | }); 71 | }); 72 | } 73 | 74 | it('can detect if another instance has initialized the store', async () => { 75 | await testInitStateDetection({ features: { foo: feature1 } }); 76 | }); 77 | 78 | it('can detect if another instance has initialized the store, even with empty data', async () => { 79 | await testInitStateDetection({ features: {} }); 80 | }); 81 | } 82 | 83 | function runPersistentFeatureStoreSeparatePrefixesTest(createStore, clearExistingData) { 84 | it('is independent from other instances with different prefixes', async () => { 85 | const prefix1 = 'a'; 86 | const prefix2 = 'b'; 87 | const flag = { key: 'flag', version: 1 }; 88 | await clearExistingData(prefix1); 89 | await clearExistingData(prefix2); 90 | await withCloseable(createStore(prefix1, 0, logger), async storeA => { 91 | await promisifySingle(storeA.init)({ features: { flag: flag } }); 92 | await withCloseable(createStore(prefix2, 0, logger), async storeB => { 93 | await promisifySingle(storeB.init)({ features: {} }); 94 | // create another instance just to make sure we're not reading cached data 95 | await withCloseable(createStore(prefix2, 0, logger), async storeB1 => { 96 | const item1 = await promisifySingle(storeB1.get)(dataKind.features, 'flag'); 97 | expect(item1).toBe(null); 98 | const item2 = await promisifySingle(storeA.get)(dataKind.features, 'flag'); 99 | expect(item2).toEqual(flag); 100 | }); 101 | }); 102 | }); 103 | }); 104 | } 105 | 106 | function runPersistentFeatureStoreConcurrentUpdateTests( 107 | prefix, 108 | createStore, 109 | clearExistingData, 110 | createStoreWithConcurrentUpdateHook 111 | ) { 112 | const flagKey = 'flag'; 113 | const initialVersion = 1; 114 | 115 | function makeFlagWithVersion(v) { 116 | return { key: flagKey, version: v }; 117 | } 118 | 119 | async function initStore(store) { 120 | const allData = { features: {} }; 121 | allData['features'][flagKey] = makeFlagWithVersion(initialVersion); 122 | await promisifySingle(store.init)(allData); 123 | } 124 | 125 | function writeCompetingVersions(competingStore, flagVersionsToWrite) { 126 | let i = 0; 127 | return callback => { 128 | if (i < flagVersionsToWrite.length) { 129 | const newFlag = makeFlagWithVersion(flagVersionsToWrite[i]); 130 | i++; 131 | competingStore.upsert(dataKind.features, newFlag, callback); 132 | } else { 133 | callback(); 134 | } 135 | }; 136 | } 137 | 138 | it('handles upsert race condition against other client with lower version', async () => { 139 | await clearExistingData(prefix); 140 | await withCloseable(createStore(prefix, 0, logger), async competingStore => { 141 | const myDesiredVersion = 10; 142 | const competingStoreVersions = [2, 3, 4]; // proves that we can retry multiple times if necessary 143 | 144 | const myStore = createStoreWithConcurrentUpdateHook( 145 | prefix, 146 | logger, 147 | writeCompetingVersions(competingStore, competingStoreVersions) 148 | ); 149 | await withCloseable(myStore, async myStore => { 150 | await initStore(myStore); 151 | await promisifySingle(myStore.upsert)(dataKind.features, makeFlagWithVersion(myDesiredVersion)); 152 | const result = await promisifySingle(myStore.get)(dataKind.features, flagKey); 153 | expect(result.version).toEqual(myDesiredVersion); 154 | }); 155 | }); 156 | }); 157 | 158 | it('handles upsert race condition against other client with higher version', async () => { 159 | await clearExistingData(prefix); 160 | await withCloseable(createStore(prefix, 0, logger), async competingStore => { 161 | const myDesiredVersion = 2; 162 | const competingStoreVersion = 3; 163 | 164 | const myStore = createStoreWithConcurrentUpdateHook( 165 | prefix, 166 | logger, 167 | writeCompetingVersions(competingStore, [competingStoreVersion]) 168 | ); 169 | await withCloseable(myStore, async myStore => { 170 | await initStore(myStore); 171 | await promisifySingle(myStore.upsert)(dataKind.features, makeFlagWithVersion(myDesiredVersion)); 172 | const result = await promisifySingle(myStore.get)(dataKind.features, flagKey); 173 | expect(result.version).toEqual(competingStoreVersion); 174 | }); 175 | }); 176 | }); 177 | } 178 | 179 | module.exports = { 180 | runPersistentFeatureStoreTests, 181 | runPersistentFeatureStoreUncachedTests, 182 | runPersistentFeatureStoreConcurrentUpdateTests, 183 | }; 184 | -------------------------------------------------------------------------------- /sharedtest/store_tests.js: -------------------------------------------------------------------------------- 1 | const { runBigSegmentStoreTests } = require('./big_segment_store_tests'); 2 | const { runPersistentFeatureStoreTests } = require('./persistent_feature_store_tests'); 3 | 4 | module.exports = { 5 | runBigSegmentStoreTests, 6 | runPersistentFeatureStoreTests, 7 | }; 8 | -------------------------------------------------------------------------------- /streaming.js: -------------------------------------------------------------------------------- 1 | const errors = require('./errors'); 2 | const httpUtils = require('./utils/httpUtils'); 3 | const messages = require('./messages'); 4 | const { EventSource } = require('launchdarkly-eventsource'); 5 | const dataKind = require('./versioned_data_kind'); 6 | 7 | // The read timeout for the stream is a fixed value that is set to be slightly longer than the expected 8 | // interval between heartbeats from the LaunchDarkly streaming server. If this amount of time elapses 9 | // with no new data, the connection will be cycled. 10 | const streamReadTimeoutMillis = 5 * 60 * 1000; // 5 minutes 11 | 12 | // Note that the requestor parameter is unused now that LD no longer uses "indirect" stream 13 | // events. The parameter is retained here for backward compatibility with any code that uses 14 | // this constructor directly, since it is documented in index.d.ts. 15 | function StreamProcessor(sdkKey, config, requestor, diagnosticsManager, specifiedEventSourceFactory) { 16 | const processor = {}, 17 | featureStore = config.featureStore; 18 | let es; 19 | let connectionAttemptStartTime; 20 | 21 | const headers = httpUtils.getDefaultHeaders(sdkKey, config); 22 | 23 | const eventSourceFactory = specifiedEventSourceFactory || EventSource; 24 | 25 | function getKeyFromPath(kind, path) { 26 | return path.startsWith(kind.streamApiPath) ? path.substring(kind.streamApiPath.length) : null; 27 | } 28 | 29 | function logConnectionStarted() { 30 | connectionAttemptStartTime = new Date().getTime(); 31 | } 32 | 33 | function logConnectionResult(success) { 34 | if (connectionAttemptStartTime && diagnosticsManager) { 35 | diagnosticsManager.recordStreamInit( 36 | connectionAttemptStartTime, 37 | !success, 38 | new Date().getTime() - connectionAttemptStartTime 39 | ); 40 | } 41 | connectionAttemptStartTime = null; 42 | } 43 | 44 | processor.start = fn => { 45 | const cb = fn || function () {}; 46 | 47 | logConnectionStarted(); 48 | 49 | function handleError(err) { 50 | // launchdarkly-eventsource expects this function to return true if it should retry, false to shut down. 51 | if (err.status && !errors.isHttpErrorRecoverable(err.status)) { 52 | const message = messages.httpErrorMessage(err, 'streaming request'); 53 | config.logger.error(message); 54 | logConnectionResult(false); 55 | cb(new errors.LDStreamingError(err.message, err.status)); 56 | return false; 57 | } 58 | const message = messages.httpErrorMessage(err, 'streaming request', 'will retry'); 59 | config.logger.warn(message); 60 | logConnectionResult(false); 61 | logConnectionStarted(); 62 | return true; 63 | } 64 | 65 | es = new eventSourceFactory(config.streamUri + '/all', { 66 | agent: config.proxyAgent, 67 | errorFilter: handleError, 68 | headers, 69 | initialRetryDelayMillis: 1000 * config.streamInitialReconnectDelay, 70 | readTimeoutMillis: streamReadTimeoutMillis, 71 | retryResetIntervalMillis: 60000, 72 | tlsParams: config.tlsParams, 73 | }); 74 | 75 | es.onclose = () => { 76 | config.logger.info('Closed LaunchDarkly stream connection'); 77 | }; 78 | 79 | // This stub handler only exists because error events must have a listener; handleError() does the work. 80 | es.onerror = () => {}; 81 | 82 | es.onopen = () => { 83 | config.logger.info('Opened LaunchDarkly stream connection'); 84 | }; 85 | 86 | es.onretrying = e => { 87 | config.logger.info('Will retry stream connection in ' + e.delayMillis + ' milliseconds'); 88 | }; 89 | 90 | function reportJsonError(type, data) { 91 | config.logger.error('Stream received invalid data in "' + type + '" message'); 92 | config.logger.debug('Invalid JSON follows: ' + data); 93 | cb(new errors.LDStreamingError('Malformed JSON data in event stream')); 94 | } 95 | 96 | es.addEventListener('put', e => { 97 | config.logger.debug('Received put event'); 98 | if (e && e.data) { 99 | logConnectionResult(true); 100 | let all; 101 | try { 102 | all = JSON.parse(e.data); 103 | } catch (err) { 104 | reportJsonError('put', e.data); 105 | return; 106 | } 107 | const initData = {}; 108 | initData[dataKind.features.namespace] = all.data.flags; 109 | initData[dataKind.segments.namespace] = all.data.segments; 110 | featureStore.init(initData, () => { 111 | cb(); 112 | }); 113 | } else { 114 | cb(new errors.LDStreamingError('Unexpected payload from event stream')); 115 | } 116 | }); 117 | 118 | es.addEventListener('patch', e => { 119 | config.logger.debug('Received patch event'); 120 | if (e && e.data) { 121 | let patch; 122 | try { 123 | patch = JSON.parse(e.data); 124 | } catch (err) { 125 | reportJsonError('patch', e.data); 126 | return; 127 | } 128 | for (const kind of Object.values(dataKind)) { 129 | const key = getKeyFromPath(kind, patch.path); 130 | if (key !== null) { 131 | config.logger.debug('Updating ' + key + ' in ' + kind.namespace); 132 | featureStore.upsert(kind, patch.data); 133 | break; 134 | } 135 | } 136 | } else { 137 | cb(new errors.LDStreamingError('Unexpected payload from event stream')); 138 | } 139 | }); 140 | 141 | es.addEventListener('delete', e => { 142 | config.logger.debug('Received delete event'); 143 | if (e && e.data) { 144 | let data; 145 | try { 146 | data = JSON.parse(e.data); 147 | } catch (err) { 148 | reportJsonError('delete', e.data); 149 | return; 150 | } 151 | const version = data.version; 152 | for (const kind of Object.values(dataKind)) { 153 | const key = getKeyFromPath(kind, data.path); 154 | if (key !== null) { 155 | config.logger.debug('Deleting ' + key + ' in ' + kind.namespace); 156 | featureStore.delete(kind, key, version); 157 | break; 158 | } 159 | } 160 | } else { 161 | cb(new errors.LDStreamingError('Unexpected payload from event stream')); 162 | } 163 | }); 164 | }; 165 | 166 | processor.stop = () => { 167 | if (es) { 168 | es.close(); 169 | } 170 | }; 171 | 172 | processor.close = () => { 173 | processor.stop(); 174 | }; 175 | 176 | return processor; 177 | } 178 | 179 | module.exports = StreamProcessor; 180 | -------------------------------------------------------------------------------- /test/LDClient-big-segments-test.js: -------------------------------------------------------------------------------- 1 | const { hashForUserKey } = require('../big_segments'); 2 | const { makeBigSegmentRef } = require('../evaluator'); 3 | const { TestData } = require('../integrations'); 4 | const stubs = require('./stubs'); 5 | const { makeSegmentMatchClause } = require('./evaluator_helpers'); 6 | const { withCloseable } = require('launchdarkly-js-test-helpers'); 7 | 8 | describe('LDClient - big segments', () => { 9 | 10 | const user = { key: 'userkey' }; 11 | const bigSegment = { 12 | key: 'segmentkey', 13 | version: 1, 14 | unbounded: true, 15 | generation: 2, 16 | }; 17 | const flag = { 18 | key: 'flagkey', 19 | on: true, 20 | variations: [ false, true ], 21 | fallthrough: { variation: 0 }, 22 | rules: [ 23 | { variation: 1, clauses: [ makeSegmentMatchClause(bigSegment) ] }, 24 | ], 25 | } 26 | 27 | async function makeClient(bigSegmentsStore, config) { 28 | const td = TestData(); 29 | td.usePreconfiguredFlag(flag); 30 | td.usePreconfiguredSegment(bigSegment); 31 | 32 | const bigSegmentsConfig = { 33 | store: bigSegmentsStore && (() => bigSegmentsStore), 34 | ...(config && config.bigSegments), 35 | }; 36 | 37 | return stubs.createClient({ ...config, updateProcessor: td, bigSegments: bigSegmentsConfig }); 38 | } 39 | 40 | it('user not found in big segment store', async () => { 41 | const store = { 42 | getMetadata: async () => { return { lastUpToDate: new Date().getTime() } }, 43 | getUserMembership: async userHash => null, 44 | }; 45 | 46 | await withCloseable(await makeClient(store), async client => { 47 | await client.waitForInitialization(); 48 | const result = await client.variationDetail(flag.key, user, false); 49 | expect(result.value).toBe(false); 50 | expect(result.reason.bigSegmentsStatus).toEqual('HEALTHY'); 51 | }); 52 | }); 53 | 54 | it('user found, segment matched', async () => { 55 | const membership = { [makeBigSegmentRef(bigSegment)]: true }; 56 | const store = { 57 | getMetadata: async () => { return { lastUpToDate: new Date().getTime() } }, 58 | getUserMembership: async userHash => (userHash === hashForUserKey(user.key) ? membership : null), 59 | }; 60 | 61 | await withCloseable(await makeClient(store), async client => { 62 | await client.waitForInitialization(); 63 | const result = await client.variationDetail(flag.key, user, false); 64 | expect(result.value).toBe(true); 65 | expect(result.reason.bigSegmentsStatus).toEqual('HEALTHY'); 66 | }); 67 | }); 68 | 69 | it('store error', async () => { 70 | const store = { 71 | getMetadata: async () => { return { lastUpToDate: new Date().getTime() } }, 72 | getUserMembership: async userHash => { throw new Error("sorry") }, 73 | }; 74 | 75 | await withCloseable(await makeClient(store), async client => { 76 | await client.waitForInitialization(); 77 | const result = await client.variationDetail(flag.key, user, false); 78 | expect(result.value).toBe(false); 79 | expect(result.reason.bigSegmentsStatus).toEqual('STORE_ERROR'); 80 | }); 81 | }); 82 | 83 | it('not configured', async () => { 84 | await withCloseable(await makeClient(null), async client => { 85 | await client.waitForInitialization(); 86 | const result = await client.variationDetail(flag.key, user, false); 87 | expect(result.value).toBe(false); 88 | expect(result.reason.bigSegmentsStatus).toEqual('NOT_CONFIGURED'); 89 | }); 90 | }); 91 | 92 | }); 93 | -------------------------------------------------------------------------------- /test/LDClient-listeners-test.js: -------------------------------------------------------------------------------- 1 | const { TestData } = require('../integrations'); 2 | 3 | const { withClient } = require('./stubs'); 4 | import { AsyncQueue } from 'launchdarkly-js-test-helpers'; 5 | import { makeFlagWithSegmentMatch } from './evaluator_helpers'; 6 | 7 | describe('LDClient event listeners', () => { 8 | describe('flag change events', () => { 9 | it('sends event when flag is added', async () => { 10 | const td = TestData(); 11 | await withClient({ updateProcessor: td }, async client => { 12 | const changes = new AsyncQueue(); 13 | client.on('update', params => changes.add(params)); 14 | 15 | td.update(td.flag('new-flag')); 16 | 17 | const change = await changes.take(); 18 | expect(change.key).toEqual('new-flag'); 19 | }); 20 | }); 21 | 22 | it('sends event when flag is updated', async () => { 23 | const td = TestData(); 24 | td.update(td.flag('flag1').on(true)); 25 | td.update(td.flag('flag2').on(true)); 26 | 27 | await withClient({ updateProcessor: td }, async client => { 28 | const changes = new AsyncQueue(); 29 | const flag2Changes = new AsyncQueue(); 30 | client.on('update', params => changes.add(params)); 31 | client.on('update:flag2', params => flag2Changes.add(params)); 32 | 33 | td.update(td.flag('flag1').on(false)); 34 | td.update(td.flag('flag2').on(false)); 35 | 36 | const change1 = await changes.take(); 37 | expect(change1.key).toEqual('flag1'); 38 | const change2 = await changes.take(); 39 | expect(change2.key).toEqual('flag2'); 40 | 41 | const flag2Change = await flag2Changes.take(); 42 | expect(flag2Change.key).toEqual('flag2'); 43 | }); 44 | }); 45 | 46 | it('sends an event when a segment used by a flag is updated', async () => { 47 | const td = TestData(); 48 | const segment = { 49 | key: 'segment1', 50 | includedContexts: [ {contextKind: 'org', values: ['org-key']} ], 51 | version: 1 52 | }; 53 | 54 | td.usePreconfiguredSegment(segment); 55 | td.usePreconfiguredFlag(makeFlagWithSegmentMatch(segment)); 56 | 57 | await withClient({ updateProcessor: td }, async client => { 58 | const flagChanges = new AsyncQueue(); 59 | client.on('update:feature', params => flagChanges.add(params)); 60 | 61 | td.usePreconfiguredSegment({ 62 | key: 'segment1', 63 | includedContexts: [ {contextKind: 'org', values: ['org-key', 'second-key']} ], 64 | version: 2 65 | }); 66 | 67 | const flagChange = await flagChanges.take(); 68 | expect(flagChange.key).toEqual('feature'); 69 | }); 70 | }); 71 | }); 72 | 73 | it('sends an event for a nested segment update', async () => { 74 | const td = TestData(); 75 | const segment1 = { 76 | key: 'segment1', 77 | includedContexts: [ {contextKind: 'org', values: ['org-key']} ], 78 | version: 1 79 | }; 80 | const segment2 = { 81 | key: 'segment2', 82 | rules: [ 83 | { 84 | clauses: [ { attribute: '', op: 'segmentMatch', values: [segment1.key] } ], 85 | weight: 100000 86 | } 87 | ], 88 | version: 1 89 | }; 90 | td.usePreconfiguredSegment(segment1); 91 | td.usePreconfiguredSegment(segment2); 92 | td.usePreconfiguredFlag(makeFlagWithSegmentMatch(segment2)); 93 | 94 | await withClient({ updateProcessor: td }, async client => { 95 | const flagChanges = new AsyncQueue(); 96 | client.on('update:feature', params => flagChanges.add(params)); 97 | 98 | td.usePreconfiguredSegment({ 99 | key: 'segment1', 100 | includedContexts: [ {contextKind: 'org', values: ['org-key', 'second-key']} ], 101 | version: 2 102 | }); 103 | 104 | const flagChange = await flagChanges.take(); 105 | expect(flagChange.key).toEqual('feature'); 106 | }); 107 | }); 108 | 109 | it('does not hang on circular segment dependencies', async () => { 110 | const td = TestData(); 111 | const segment1 = { 112 | key: 'segment1', 113 | clauses: [ { attribute: '', op: 'segmentMatch', values: ['segment2'] } ], 114 | version: 1 115 | }; 116 | const segment2 = { 117 | key: 'segment2', 118 | rules: [ 119 | { 120 | clauses: [ { attribute: '', op: 'segmentMatch', values: [segment1.key] } ], 121 | weight: 100000 122 | } 123 | ], 124 | version: 1 125 | }; 126 | 127 | td.usePreconfiguredSegment(segment1); 128 | td.usePreconfiguredSegment(segment2); 129 | td.usePreconfiguredFlag(makeFlagWithSegmentMatch(segment2)); 130 | 131 | // The implementation happens to produce the event anyway, 132 | // but what we really care about is that it doesn't hang. 133 | // So, if in the future, it didn't produce an event for this malformed flag, 134 | // then that would likely be ok. The malformed nature of a circular 135 | // dependency should be transient. 136 | await withClient({ updateProcessor: td }, async client => { 137 | const flagChanges = new AsyncQueue(); 138 | client.on('update:feature', params => flagChanges.add(params)); 139 | 140 | td.usePreconfiguredSegment({ 141 | key: 'segment1', 142 | includedContexts: [ {contextKind: 'org', values: ['org-key', 'second-key']} ], 143 | version: 2 144 | }); 145 | 146 | const flagChange = await flagChanges.take(); 147 | expect(flagChange.key).toEqual('feature'); 148 | }); 149 | }); 150 | 151 | describe('bigSegmentStoreStatusProvider', () => { 152 | it('returns unavailable status when not configured', async () => { 153 | await withClient({}, async client => { 154 | expect(client.bigSegmentStoreStatusProvider.getStatus()).toBeUndefined(); 155 | const status = await client.bigSegmentStoreStatusProvider.requireStatus(); 156 | expect(status.available).toBe(false); 157 | expect(status.stale).toBe(false); 158 | }); 159 | }); 160 | 161 | it('sends status updates', async () => { 162 | const store = { 163 | getMetadata: async () => { return { lastUpToDate: new Date().getTime() }; }, 164 | }; 165 | const config = { bigSegments: { store: () => store, statusPollInterval: 0.01 } }; 166 | await withClient(config, async client => { 167 | const status1 = await client.bigSegmentStoreStatusProvider.requireStatus(); 168 | expect(status1.available).toBe(true); 169 | 170 | const statuses = new AsyncQueue(); 171 | client.bigSegmentStoreStatusProvider.on('change', s => statuses.add(s)); 172 | 173 | store.getMetadata = async () => { throw new Exception('sorry'); }; 174 | 175 | const status2 = await statuses.take(); 176 | expect(status2.available).toBe(false); 177 | }); 178 | }); 179 | }); 180 | }); 181 | -------------------------------------------------------------------------------- /test/LDClient-test.js: -------------------------------------------------------------------------------- 1 | var LDClient = require('../index.js'); 2 | var messages = require('../messages'); 3 | var stubs = require('./stubs'); 4 | 5 | describe('LDClient', () => { 6 | 7 | describe('ready event', () => { 8 | it('is fired in offline mode', done => { 9 | var client = LDClient.init('sdk_key', { offline: true }); 10 | client.on('ready', () => { 11 | done(); 12 | }); 13 | }); 14 | }); 15 | 16 | describe('failed event', () => { 17 | it('is fired if initialization fails', done => { 18 | var updateProcessor = stubs.stubUpdateProcessor(); 19 | updateProcessor.error = { status: 403 }; 20 | var client = stubs.createClient({ updateProcessor: updateProcessor }, {}); 21 | 22 | client.on('failed', err => { 23 | expect(err).toEqual(updateProcessor.error); 24 | done(); 25 | }); 26 | }); 27 | }); 28 | 29 | describe('isOffline()', () => { 30 | it('returns true in offline mode', done => { 31 | var client = LDClient.init('sdk_key', {offline: true}); 32 | client.on('ready', () => { 33 | expect(client.isOffline()).toEqual(true); 34 | done(); 35 | }); 36 | }); 37 | }); 38 | 39 | describe('secureModeHash()', () => { 40 | it('correctly computes hash for a known message and secret', () => { 41 | var client = LDClient.init('secret', {offline: true}); 42 | var hash = client.secureModeHash({"key": "Message"}); 43 | expect(hash).toEqual("aa747c502a898200f9e4fa21bac68136f886a0e27aec70ba06daf2e2a5cb5597"); 44 | }); 45 | 46 | it.each([ 47 | [{key: 'Message'}, 'aa747c502a898200f9e4fa21bac68136f886a0e27aec70ba06daf2e2a5cb5597'], 48 | [{kind: 'user', key: 'Message'}, 'aa747c502a898200f9e4fa21bac68136f886a0e27aec70ba06daf2e2a5cb5597'], 49 | [{kind: 'org', key: 'orgtest'}, '40bc9b2e66a842e269ab98dad813e4e15203bbbfd91e8c96b92f3ae6f3f5e223'], 50 | [{kind: 'multi', user: {key: 'user:test'}, org: {key: 'org:test'}}, '607cc91526c615823e320dabca7967ce544fbe83bcb2b7287163f2d1c7aa210f'] 51 | ])('it uses the canonical key', (context, expectedHash) => { 52 | const client = LDClient.init('secret', {offline: true}); 53 | const hash = client.secureModeHash(context); 54 | 55 | expect(hash).toEqual(expectedHash); 56 | }); 57 | }); 58 | 59 | describe('waitForInitialization()', () => { 60 | it('resolves when ready', async () => { 61 | var client = stubs.createClient({}, {}); 62 | await client.waitForInitialization(); 63 | }); 64 | 65 | it('resolves immediately if the client is already ready', async () => { 66 | var client = stubs.createClient({}, {}); 67 | await client.waitForInitialization(); 68 | await client.waitForInitialization(); 69 | }); 70 | 71 | it('is rejected if initialization fails', async () => { 72 | var err = { status: 403 }; 73 | var updateProcessor = stubs.stubUpdateProcessor(); 74 | updateProcessor.error = err; 75 | var client = stubs.createClient({ updateProcessor: updateProcessor }, {}); 76 | await expect(client.waitForInitialization()).rejects.toBe(err); 77 | }); 78 | 79 | it('creates only one Promise', async () => { 80 | const updateProcessor = stubs.stubUpdateProcessor(); 81 | updateProcessor.shouldInitialize = false; 82 | const client = stubs.createClient({ updateProcessor: updateProcessor }, {}); 83 | const p1 = client.waitForInitialization(); 84 | const p2 = client.waitForInitialization(); 85 | expect(p2).toBe(p1); 86 | }) 87 | }); 88 | 89 | describe('close()', () => { 90 | it('does not crash when closing an offline client', done => { 91 | var client = LDClient.init('sdk_key', {offline: true}); 92 | expect(() => client.close()).not.toThrow(); 93 | done(); 94 | }); 95 | }); 96 | }); 97 | -------------------------------------------------------------------------------- /test/LDClient-tls-test.js: -------------------------------------------------------------------------------- 1 | import * as LDClient from '../index'; 2 | 3 | import { 4 | AsyncQueue, 5 | TestHttpHandlers, 6 | TestHttpServer, 7 | failOnTimeout, 8 | withCloseable 9 | } from 'launchdarkly-js-test-helpers'; 10 | import * as stubs from './stubs'; 11 | 12 | describe('LDClient TLS configuration', () => { 13 | const sdkKey = 'secret'; 14 | let logger = stubs.stubLogger(); 15 | 16 | it('can connect via HTTPS to a server with a self-signed certificate, if CA is specified', async () => { 17 | await withCloseable(TestHttpServer.startSecure, async server => { 18 | server.forMethodAndPath('get', '/sdk/latest-all', TestHttpHandlers.respondJson({})); 19 | 20 | const config = { 21 | baseUri: server.url, 22 | sendEvents: false, 23 | stream: false, 24 | logger: stubs.stubLogger(), 25 | tlsParams: { ca: server.certificate }, 26 | diagnosticOptOut: true, 27 | }; 28 | 29 | await withCloseable(LDClient.init(sdkKey, config), async client => { 30 | await client.waitForInitialization(); 31 | }); 32 | }); 33 | }); 34 | 35 | it('cannot connect via HTTPS to a server with a self-signed certificate, using default config', async () => { 36 | await withCloseable(TestHttpServer.startSecure, async server => { 37 | server.forMethodAndPath('get', '/sdk/latest-all', TestHttpHandlers.respondJson({})); 38 | 39 | const logCapture = stubs.asyncLogCapture(); 40 | const config = { 41 | baseUri: server.url, 42 | sendEvents: false, 43 | stream: false, 44 | logger: logCapture.logger, 45 | diagnosticOptOut: true, 46 | }; 47 | 48 | await withCloseable(LDClient.init(sdkKey, config), async client => { 49 | const message1 = await failOnTimeout(logCapture.warn.take(), 1000, 'timed out waiting for log message'); 50 | expect(message1).toMatch(/only disable the streaming API/); // irrelevant message due to our use of polling mode 51 | const message2 = await failOnTimeout(logCapture.warn.take(), 1000, 'timed out waiting for log message'); 52 | expect(message2).toMatch(/self.signed/); 53 | }); 54 | }); 55 | }); 56 | 57 | it('can use custom TLS options for streaming as well as polling', async () => { 58 | await withCloseable(TestHttpServer.startSecure, async server => { 59 | const eventData = { data: { flags: { flag: { version: 1 } }, segments: {} } }; 60 | await withCloseable(new AsyncQueue(), async events => { 61 | events.add({ type: 'put', data: JSON.stringify(eventData) }); 62 | server.forMethodAndPath('get', '/stream/all', TestHttpHandlers.sseStream(events)); 63 | 64 | const config = { 65 | baseUri: server.url, 66 | streamUri: server.url + '/stream', 67 | sendEvents: false, 68 | logger: logger, 69 | tlsParams: { ca: server.certificate }, 70 | diagnosticOptOut: true, 71 | }; 72 | 73 | await withCloseable(LDClient.init(sdkKey, config), async client => { 74 | await client.waitForInitialization(); // this won't return until the stream receives the "put" event 75 | }); 76 | }); 77 | }); 78 | }); 79 | 80 | it('can use custom TLS options for posting events', async () => { 81 | await withCloseable(TestHttpServer.startSecure, async server => { 82 | server.forMethodAndPath('post', '/events/bulk', TestHttpHandlers.respond(200)); 83 | server.forMethodAndPath('get', '/sdk/latest-all', TestHttpHandlers.respondJson({})); 84 | 85 | const config = { 86 | baseUri: server.url, 87 | eventsUri: server.url + '/events', 88 | stream: false, 89 | logger: stubs.stubLogger(), 90 | tlsParams: { ca: server.certificate }, 91 | diagnosticOptOut: true, 92 | }; 93 | 94 | await withCloseable(LDClient.init(sdkKey, config), async client => { 95 | await client.waitForInitialization(); 96 | client.identify({ key: 'user' }); 97 | await client.flush(); 98 | 99 | const flagsRequest = await server.nextRequest(); 100 | expect(flagsRequest.path).toEqual('/sdk/latest-all'); 101 | 102 | const eventsRequest = await server.nextRequest(); 103 | expect(eventsRequest.path).toEqual('/events/bulk'); 104 | const eventData = JSON.parse(eventsRequest.body); 105 | expect(eventData.length).toEqual(1); 106 | expect(eventData[0].kind).toEqual('identify'); 107 | }); 108 | }); 109 | }); 110 | }); 111 | -------------------------------------------------------------------------------- /test/big_segments-test.js: -------------------------------------------------------------------------------- 1 | const { BigSegmentStoreManager, hashForUserKey } = require('../big_segments'); 2 | const { nullLogger } = require('../loggers'); 3 | const { AsyncQueue } = require('launchdarkly-js-test-helpers'); 4 | 5 | describe('BigSegmentStoreManager', () => { 6 | const userKey = 'userkey', userHash = hashForUserKey(userKey); 7 | const logger = nullLogger(); 8 | const alwaysUpToDate = async () => { 9 | return { lastUpToDate: new Date().getTime() }; 10 | }; 11 | const alwaysStale = async () => { 12 | return { lastUpToDate: new Date().getTime() - 1000000 }; 13 | }; 14 | function membershipForExpectedUser(expectedMembership) { 15 | return async (hash) => { 16 | expect(hash).toEqual(userHash); 17 | return expectedMembership; 18 | } 19 | } 20 | async function withManager(store, config, action) { 21 | const m = BigSegmentStoreManager(store, config, logger); 22 | try { 23 | await action(m); 24 | } finally { 25 | m.close(); 26 | } 27 | } 28 | 29 | describe('membership query', () => { 30 | it('with uncached result and healthy status', async () => { 31 | const expectedMembership = { key1: true, key2: true }; 32 | const store = { 33 | getMetadata: alwaysUpToDate, 34 | getUserMembership: membershipForExpectedUser(expectedMembership), 35 | }; 36 | await withManager(store, {}, async m => { 37 | const result = await m.getUserMembership(userKey); 38 | expect(result).toEqual([ expectedMembership, 'HEALTHY' ]); 39 | }); 40 | }); 41 | 42 | it('with cached result and healthy status', async () => { 43 | const expectedMembership = { key1: true, key2: true }; 44 | let queryCount = 0; 45 | const store = { 46 | getMetadata: alwaysUpToDate, 47 | getUserMembership: async hash => { 48 | queryCount++; 49 | return await membershipForExpectedUser(expectedMembership)(hash); 50 | }, 51 | }; 52 | await withManager(store, {}, async m => { 53 | const result1 = await m.getUserMembership(userKey); 54 | expect(result1).toEqual([ expectedMembership, 'HEALTHY' ]); 55 | const result2 = await m.getUserMembership(userKey); 56 | expect(result2).toEqual(result1); 57 | 58 | expect(queryCount).toEqual(1); 59 | }); 60 | }); 61 | 62 | it('with stale status', async () => { 63 | const expectedMembership = { key1: true, key2: true }; 64 | const store = { 65 | getMetadata: alwaysStale, 66 | getUserMembership: membershipForExpectedUser(expectedMembership), 67 | }; 68 | await withManager(store, {}, async m => { 69 | const result = await m.getUserMembership(userKey); 70 | expect(result).toEqual([ expectedMembership, 'STALE' ]); 71 | }); 72 | }); 73 | 74 | it('with stale status due to no store metadata', async () => { 75 | const expectedMembership = { key1: true, key2: true }; 76 | const store = { 77 | getMetadata: async () => undefined, 78 | getUserMembership: membershipForExpectedUser(expectedMembership), 79 | }; 80 | await withManager(store, {}, async m => { 81 | const result = await m.getUserMembership(userKey); 82 | expect(result).toEqual([ expectedMembership, 'STALE' ]); 83 | }); 84 | }); 85 | 86 | it('least recent user is evicted from cache', async () => { 87 | const userKey1 = 'userkey1', userKey2 = 'userkey2', userKey3 = 'userkey3'; 88 | const userHash1 = hashForUserKey(userKey1), userHash2 = hashForUserKey(userKey2), userHash3 = hashForUserKey(userKey3); 89 | const memberships = {}; 90 | memberships[userHash1] = { seg1: true }; 91 | memberships[userHash2] = { seg2: true }; 92 | memberships[userHash3] = { seg3: true }; 93 | let queriedUsers = []; 94 | const store = { 95 | getMetadata: alwaysUpToDate, 96 | getUserMembership: async hash => { 97 | queriedUsers.push(hash); 98 | return memberships[hash]; 99 | }, 100 | }; 101 | const config = { userCacheSize: 2 }; 102 | await withManager(store, config, async m => { 103 | const result1 = await m.getUserMembership(userKey1); 104 | const result2 = await m.getUserMembership(userKey2); 105 | const result3 = await m.getUserMembership(userKey3); 106 | expect(result1).toEqual([ memberships[userHash1], 'HEALTHY' ]); 107 | expect(result2).toEqual([ memberships[userHash2], 'HEALTHY' ]); 108 | expect(result3).toEqual([ memberships[userHash3], 'HEALTHY' ]); 109 | 110 | expect(queriedUsers).toEqual([ userHash1, userHash2, userHash3 ]); 111 | 112 | // Since the capacity is only 2 and userKey1 was the least recently used, that key should be 113 | // evicted by the userKey3 query. Now only userKey2 and userKey3 are in the cache, and 114 | // querying them again should not cause a new query to the store. 115 | 116 | const result2a = await m.getUserMembership(userKey2); 117 | const result3a = await m.getUserMembership(userKey3); 118 | expect(result2a).toEqual(result2); 119 | expect(result3a).toEqual(result3); 120 | 121 | expect(queriedUsers).toEqual([ userHash1, userHash2, userHash3 ]); 122 | 123 | const result1a = await m.getUserMembership(userKey1); 124 | expect(result1a).toEqual(result1); 125 | 126 | expect(queriedUsers).toEqual([ userHash1, userHash2, userHash3, userHash1 ]); 127 | }); 128 | }); 129 | }); 130 | 131 | describe('status polling', () => { 132 | it('detects store unavailability', async () => { 133 | const store = { 134 | getMetadata: alwaysUpToDate, 135 | }; 136 | await withManager(store, { statusPollInterval: 0.01 }, async m => { 137 | const status1 = await m.statusProvider.requireStatus(); 138 | expect(status1.available).toBe(true); 139 | 140 | const statuses = new AsyncQueue(); 141 | m.statusProvider.on('change', s => statuses.add(s)); 142 | 143 | store.getMetadata = async () => { throw new Error('sorry'); }; 144 | 145 | const status2 = await statuses.take(); 146 | expect(status2.available).toBe(false); 147 | expect(m.statusProvider.getStatus()).toEqual(status2); 148 | 149 | store.getMetadata = alwaysUpToDate; 150 | 151 | const status3 = await statuses.take(); 152 | expect(status3.available).toBe(true); 153 | expect(m.statusProvider.getStatus()).toEqual(status3); 154 | }); 155 | }); 156 | 157 | it('detects stale status', async () => { 158 | const store = { 159 | getMetadata: alwaysUpToDate, 160 | }; 161 | await withManager(store, { statusPollInterval: 0.01, staleAfter: 0.2 }, async m => { 162 | const status1 = await m.statusProvider.requireStatus(); 163 | expect(status1.stale).toBe(false); 164 | 165 | const statuses = new AsyncQueue(); 166 | m.statusProvider.on('change', s => statuses.add(s)); 167 | 168 | store.getMetadata = alwaysStale; 169 | 170 | const status2 = await statuses.take(); 171 | expect(status2.stale).toBe(true); 172 | expect(m.statusProvider.getStatus()).toEqual(status2); 173 | 174 | store.getMetadata = alwaysUpToDate; 175 | 176 | const status3 = await statuses.take(); 177 | expect(status3.stale).toBe(false); 178 | expect(m.statusProvider.getStatus()).toEqual(status3); 179 | }); 180 | }); 181 | }); 182 | }); 183 | -------------------------------------------------------------------------------- /test/context-test.js: -------------------------------------------------------------------------------- 1 | const { checkContext, getContextKinds, getCanonicalKey } = require('../context'); 2 | 3 | describe.each([ 4 | { key: 'test' }, 5 | { kind: 'user', key: 'test' }, 6 | { kind: 'multi', user: { key: 'test' } }]) 7 | ('given a context which contains a single kind', (context) => { 8 | it('should get the context kind', () => { 9 | expect(getContextKinds(context)).toEqual(['user']); 10 | }); 11 | 12 | it('should be valid', () => { 13 | expect(checkContext(context, false)).toBeTruthy(); 14 | }); 15 | }); 16 | 17 | describe('given a valid multi-kind context', () => { 18 | const context = { 19 | kind: 'multi', 20 | user: { 21 | key: 'user' 22 | }, 23 | org: { 24 | key: 'org' 25 | } 26 | }; 27 | 28 | it('should get a list of the kinds', () => { 29 | expect(getContextKinds(context).sort()).toEqual(['org', 'user']); 30 | }); 31 | 32 | it('should be valid', () => { 33 | expect(checkContext(context, false)).toBeTruthy(); 34 | }); 35 | }); 36 | 37 | // A sample of invalid characters. 38 | const invalidSampleChars = [...`#$%&'()*+,/:;<=>?@[\\]^\`{|}~ ¡¢£¤¥¦§¨©ª«¬­®¯°±² 39 | ³´µ¶·¸¹º»¼½¾¿À汉字`]; 40 | const badKinds = invalidSampleChars.map(char => ({ kind: char, key: 'test' })); 41 | 42 | describe.each([ 43 | {}, // An empty object is not a valid context. 44 | { key: '' }, // If allowLegacyKey is not true, then this should be invalid. 45 | { kind: 'kind', key: 'kind' }, // The kind cannot be kind. 46 | { kind: 'user' }, // The context needs to have a key. 47 | { kind: 'org', key: '' }, // For a non-legacy context the key cannot be empty. 48 | { kind: ' ', key: 'test' }, // Kind cannot be whitespace only. 49 | { kind: 'cat dog', key: 'test' }, // Kind cannot contain whitespace 50 | { kind: '~!@#$%^&*()_+', key: 'test' }, // Special characters are not valid. 51 | ...badKinds, 52 | ])('given invalid contexts', (context) => { 53 | it('should not be valid', () => { 54 | expect(checkContext(context, false)).toBeFalsy(); 55 | }); 56 | }); 57 | 58 | const validChars = ['0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-_.']; 59 | const goodKinds = validChars.map(char => ([{ kind: char, key: 'test' }, false])); 60 | 61 | describe.each([ 62 | [{ key: '' }, true], // Allow a legacy context with an empty key. 63 | ...goodKinds 64 | ])('given valid contexts', (context, allowLegacyKey) => { 65 | it('should be valid and can get context kinds', () => { 66 | expect(checkContext(context, allowLegacyKey)).toBeTruthy(); 67 | expect(getContextKinds(context)).toEqual([context.kind || 'user']) 68 | }); 69 | }); 70 | 71 | describe('when determining canonical keys', () => { 72 | it.each([ 73 | [{ key: 'test' }, 'test'], 74 | [{ kind: 'user', key: 'test' }, 'test'], 75 | [{ kind: 'org', key: 'orgtest' }, 'org:orgtest'], 76 | [{ kind: 'multi', user: { key: 'usertest' } }, 'user:usertest'], 77 | [{ kind: 'multi', user: { key: 'usertest' }, org: { key: 'orgtest' } }, 'org:orgtest:user:usertest'], 78 | [{ kind: 'multi', user: { key: 'user:test' }, org: { key: 'org:test' } }, 'org:org%3Atest:user:user%3Atest'], 79 | [{ kind: 'multi', user: { key: 'user:test' }, org: { key: 'org:test' } }, 'org:org%3Atest:user:user%3Atest'], 80 | [{ kind: 'multi', user: { key: 'user%test' }, org: { key: 'org%test' } }, 'org:org%25test:user:user%25test'], 81 | [ 82 | { kind: 'multi', user: { key: 'user%:test' }, org: { key: 'org%:test' } }, 83 | 'org:org%25%3Atest:user:user%25%3Atest', 84 | ], 85 | ])('produces a canonical key for valid contexts', (context, canonicalKey) => { 86 | expect(getCanonicalKey(context)).toEqual(canonicalKey); 87 | }); 88 | 89 | it('does not break with an null/undefined context', () => { 90 | expect(getCanonicalKey(undefined)).toBeUndefined(); 91 | expect(getCanonicalKey(null)).toBeUndefined(); 92 | }); 93 | }); 94 | -------------------------------------------------------------------------------- /test/diagnostic_events-test.js: -------------------------------------------------------------------------------- 1 | const os = require('os'); 2 | const packageJson = require('../package.json'); 3 | const configuration = require('../configuration'); 4 | const { DiagnosticsManager, DiagnosticId } = require('../diagnostic_events'); 5 | 6 | describe('DiagnosticId', () => { 7 | it('uses last 6 characters of SDK key', () => { 8 | const id = DiagnosticId('my-sdk-key'); 9 | expect(id.sdkKeySuffix).toEqual('dk-key'); 10 | }); 11 | 12 | it('creates random UUID', () => { 13 | const id0 = DiagnosticId('my-sdk-key'); 14 | const id1 = DiagnosticId('my-sdk-key'); 15 | expect(id0.diagnosticId).toBeTruthy(); 16 | expect(id1.diagnosticId).toBeTruthy(); 17 | expect(id0.diagnosticId).not.toEqual(id1.diagnosticId); 18 | }); 19 | }); 20 | 21 | describe('DiagnosticsManager', () => { 22 | const id = DiagnosticId('my-sdk-key'); 23 | const defaultConfig = configuration.validate({}); 24 | 25 | it('copies DiagnosticId', () => { 26 | const manager = DiagnosticsManager(defaultConfig, id, 100000); 27 | const event = manager.createInitEvent(); 28 | expect(event.id).toEqual(id); 29 | }); 30 | 31 | it('copies start time', () => { 32 | const manager = DiagnosticsManager(defaultConfig, id, 100000); 33 | const event = manager.createInitEvent(); 34 | expect(event.creationDate).toEqual(100000); 35 | }); 36 | 37 | it('provides SDK data', () => { 38 | const manager = DiagnosticsManager(defaultConfig, id, 100000); 39 | const event = manager.createInitEvent(); 40 | expect(event.sdk).toEqual({ 41 | name: 'node-server-sdk', 42 | version: packageJson.version 43 | }); 44 | }); 45 | 46 | it('provides platform data', () => { 47 | const manager = DiagnosticsManager(defaultConfig, id, 100000); 48 | const event = manager.createInitEvent(); 49 | expect(event.platform).toEqual({ 50 | name: 'Node', 51 | osArch: os.arch(), 52 | osName: event.platform.osName, // this may have been transformed by normalizePlatformName 53 | osVersion: os.release(), 54 | nodeVersion: process.versions.node, 55 | }); 56 | }); 57 | 58 | function verifyConfig(configIn, configOut) { 59 | const config = configuration.validate(configIn); 60 | const manager = DiagnosticsManager(config, id, 100000); 61 | const event = manager.createInitEvent(); 62 | expect(event.configuration).toMatchObject(configOut); 63 | } 64 | 65 | it('translates default configuration', () => { 66 | verifyConfig({}, { 67 | allAttributesPrivate: false, 68 | connectTimeoutMillis: 5000, 69 | customBaseURI: false, 70 | customEventsURI: false, 71 | customStreamURI: false, 72 | dataStoreType: 'memory', 73 | diagnosticRecordingIntervalMillis: 900000, 74 | eventsCapacity: 10000, 75 | eventsFlushIntervalMillis: 5000, 76 | offline: false, 77 | pollingIntervalMillis: 30000, 78 | reconnectTimeMillis: 1000, 79 | socketTimeoutMillis: 5000, 80 | streamingDisabled: false, 81 | contextKeysCapacity: 1000, 82 | contextKeysFlushIntervalMillis: 300000, 83 | usingProxy: false, 84 | usingProxyAuthenticator: false, 85 | usingRelayDaemon: false, 86 | }); 87 | }); 88 | 89 | it('translates custom configuration', () => { 90 | verifyConfig({ baseUri: 'http://other' }, { 91 | customBaseURI: true, 92 | customEventsURI: false, 93 | customStreamURI: false, 94 | }); 95 | verifyConfig({ eventsUri: 'http://other' }, { 96 | customBaseURI: false, 97 | customEventsURI: true, 98 | customStreamURI: false, 99 | }); 100 | verifyConfig({ streamUri: 'http://other' }, { 101 | customBaseURI: false, 102 | customEventsURI: false, 103 | customStreamURI: true, 104 | }); 105 | verifyConfig({ allAttributesPrivate: true }, { allAttributesPrivate: true }); 106 | verifyConfig({ timeout: 6 }, { connectTimeoutMillis: 6000, socketTimeoutMillis: 6000 }); 107 | verifyConfig({ diagnosticRecordingInterval: 999 }, { diagnosticRecordingIntervalMillis: 999000 }); 108 | verifyConfig({ capacity: 999 }, { eventsCapacity: 999 }); 109 | verifyConfig({ flushInterval: 33 }, { eventsFlushIntervalMillis: 33000 }); 110 | verifyConfig({ stream: false }, { streamingDisabled: true }); 111 | verifyConfig({ streamInitialReconnectDelay: 33 }, { reconnectTimeMillis: 33000 }); 112 | verifyConfig({ contextKeysCapacity: 111 }, { contextKeysCapacity: 111 }); 113 | verifyConfig({ contextKeysFlushInterval: 33 }, { contextKeysFlushIntervalMillis: 33000 }); 114 | verifyConfig({ useLdd: true }, { usingRelayDaemon: true }); 115 | 116 | const fakeProxy = {}; 117 | verifyConfig({ proxyAgent: fakeProxy }, { usingProxy: true, usingProxyAuthenticator: false }); 118 | verifyConfig({ proxyHost: 'my-proxy' }, { usingProxy: true, usingProxyAuthenticator: false }); 119 | verifyConfig({ proxyAgent: fakeProxy, proxyAuth: 'basic' }, { usingProxy: true, usingProxyAuthenticator: true }); 120 | 121 | const fakeStore = { description: 'WeirdStore' }; 122 | verifyConfig({ featureStore: fakeStore }, { dataStoreType: fakeStore.description }); 123 | }); 124 | 125 | it('creates periodic event from stats, then resets', () => { 126 | const manager = DiagnosticsManager(defaultConfig, id, 100000); 127 | const timeBeforeReset = new Date().getTime(); 128 | const event1 = manager.createStatsEventAndReset(4, 5, 6); 129 | 130 | expect(event1).toMatchObject({ 131 | kind: 'diagnostic', 132 | dataSinceDate: 100000, 133 | droppedEvents: 4, 134 | deduplicatedUsers: 5, 135 | eventsInLastBatch: 6, 136 | }); 137 | 138 | expect(event1.creationDate).toBeGreaterThanOrEqual(timeBeforeReset); 139 | 140 | const event2 = manager.createStatsEventAndReset(1, 2, 3); 141 | 142 | expect(event2).toMatchObject({ 143 | kind: 'diagnostic', 144 | dataSinceDate: event1.creationDate, 145 | droppedEvents: 1, 146 | deduplicatedUsers: 2, 147 | eventsInLastBatch: 3, 148 | }); 149 | 150 | expect(event2.creationDate).toBeGreaterThanOrEqual(event1.creationDate); 151 | }); 152 | }); 153 | -------------------------------------------------------------------------------- /test/evaluator-pre-conditions-test.js: -------------------------------------------------------------------------------- 1 | const { Evaluator } = require('../evaluator'); 2 | 3 | const { 4 | eventFactory, 5 | asyncEvaluate, 6 | } = require('./evaluator_helpers'); 7 | 8 | describe('when given a bad context', () => { 9 | it('handles a legacy user without a key', async () => { 10 | const [err, detail, events] = await asyncEvaluate(Evaluator(), {}, {}, eventFactory); 11 | expect(detail).toEqual({ 12 | value: null, 13 | variationIndex: null, 14 | reason: { 15 | kind: 'ERROR', 16 | errorKind: 'USER_NOT_SPECIFIED' 17 | } 18 | }); 19 | }); 20 | 21 | it('handles a single kind context without a key', async () => { 22 | const [err, detail, events] = await asyncEvaluate(Evaluator(), {}, { 23 | kind: 'user' 24 | }, eventFactory); 25 | expect(detail).toEqual({ 26 | value: null, 27 | variationIndex: null, 28 | reason: { 29 | kind: 'ERROR', 30 | errorKind: 'USER_NOT_SPECIFIED' 31 | } 32 | }); 33 | }); 34 | 35 | 36 | it.each(["", " ", "#^&%&^", "almost ", 8, true, {}]) 37 | ('handles a single kind context with an invalid kind', async (kind) => { 38 | const [err, detail, events] = await asyncEvaluate(Evaluator(), {}, { 39 | kind, 40 | key: 'goodKey' 41 | }, eventFactory); 42 | expect(detail).toEqual({ 43 | value: null, 44 | variationIndex: null, 45 | reason: { 46 | kind: 'ERROR', 47 | errorKind: 'USER_NOT_SPECIFIED' 48 | } 49 | }); 50 | }); 51 | 52 | // For a multi-kind context the act of making something a key will 53 | // produce a string. So testing non-string types is just testing 54 | // the characters they contain. 55 | it.each(["", " ", "#^&%&^", "almost "]) 56 | ('handles a multi kind context with an invalid kind', async (kind) => { 57 | const context = { 58 | kind: 'multi', 59 | }; 60 | context[kind] = { 61 | key: 'goodKey' 62 | } 63 | const [err, detail, events] = await asyncEvaluate(Evaluator(), {}, context, eventFactory); 64 | expect(detail).toEqual({ 65 | value: null, 66 | variationIndex: null, 67 | reason: { 68 | kind: 'ERROR', 69 | errorKind: 'USER_NOT_SPECIFIED' 70 | } 71 | }); 72 | }); 73 | 74 | it.each([undefined, null]) 75 | ('handles a multi kind context with a context missing a key', async (key) => { 76 | const [err, detail, events] = await asyncEvaluate(Evaluator(), {}, { 77 | kind: 'multi', 78 | user: { 79 | key 80 | } 81 | }, eventFactory); 82 | expect(detail).toEqual({ 83 | value: null, 84 | variationIndex: null, 85 | reason: { 86 | kind: 'ERROR', 87 | errorKind: 'USER_NOT_SPECIFIED' 88 | } 89 | }); 90 | }); 91 | }); 92 | 93 | it('handles a missing flag', async () => { 94 | const [err, detail, events] = await asyncEvaluate(Evaluator(), undefined, { 95 | key: "userKey" 96 | }, eventFactory); 97 | expect(detail).toEqual({ 98 | value: null, 99 | variationIndex: null, 100 | reason: { 101 | kind: 'ERROR', 102 | errorKind: 'FLAG_NOT_FOUND' 103 | } 104 | }); 105 | }); 106 | -------------------------------------------------------------------------------- /test/evaluator-rule-test.js: -------------------------------------------------------------------------------- 1 | const { Evaluator } = require('../evaluator'); 2 | const { 3 | basicUser, 4 | basicSingleKindUser, 5 | basicMultiKindUser, 6 | eventFactory, 7 | makeFlagWithRules, 8 | asyncEvaluate, 9 | makeClauseThatMatchesUser, 10 | makeClauseThatDoesNotMatchUser, 11 | } = require('./evaluator_helpers'); 12 | 13 | // Tests of flag evaluation at the rule level. Clause-level behavior is covered in detail in 14 | // evaluator-clause-test and evaluator-segment-match-test. 15 | 16 | // const basicUser = { key: 'userkey' }; 17 | // const singleKindUser = { kind: 'user', key: 'userkey' }; 18 | // const multiKindWithUser = { kind: 'multi', user: { key: 'userkey' } }; 19 | 20 | describe('Evaluator - rules with user kinds', () => { 21 | const matchClause = makeClauseThatMatchesUser(basicUser); 22 | const noMatchClause = makeClauseThatDoesNotMatchUser(basicUser); 23 | 24 | it.each([basicUser, basicSingleKindUser, basicMultiKindUser]) 25 | ('matches user from rules', async (userToTest) => { 26 | const rule0 = { id: 'id0', clauses: [noMatchClause], variation: 1 }; 27 | const rule1 = { id: 'id1', clauses: [matchClause], variation: 2 }; 28 | const flag = makeFlagWithRules([rule0, rule1]); 29 | const [err, detail, events] = await asyncEvaluate(Evaluator(), flag, userToTest, eventFactory); 30 | expect(detail).toMatchObject({ 31 | value: 'c', variationIndex: 2, 32 | reason: { kind: 'RULE_MATCH', ruleIndex: 1, ruleId: 'id1' } 33 | }); 34 | expect(events).toBeUndefined(); 35 | }); 36 | 37 | 38 | 39 | it.each([basicUser, basicSingleKindUser, basicMultiKindUser]) 40 | ('returns error if rule variation is too high', async (userToTest) => { 41 | const rule = { id: 'id', clauses: [matchClause], variation: 99 }; 42 | const flag = makeFlagWithRules([rule]); 43 | const [err, detail, events] = await asyncEvaluate(Evaluator(), flag, userToTest, eventFactory); 44 | expect(err).toEqual(Error('Invalid variation index in flag')); 45 | expect(detail).toMatchObject({ value: null, variationIndex: null, reason: { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' } }); 46 | expect(events).toBeUndefined(); 47 | }); 48 | 49 | it.each([basicUser, basicSingleKindUser, basicMultiKindUser]) 50 | ('returns error if rule variation is negative', async (userToTest) => { 51 | const rule = { id: 'id', clauses: [matchClause], variation: -1 }; 52 | const flag = makeFlagWithRules([rule]); 53 | const [err, detail, events] = await asyncEvaluate(Evaluator(), flag, userToTest, eventFactory); 54 | expect(err).toEqual(Error('Invalid variation index in flag')); 55 | expect(detail).toMatchObject({ value: null, variationIndex: null, reason: { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' } }); 56 | expect(events).toBeUndefined(); 57 | }); 58 | 59 | it.each([basicUser, basicSingleKindUser, basicMultiKindUser]) 60 | ('returns error if rule has no variation or rollout', async (userToTest) => { 61 | const rule = { id: 'id', clauses: [matchClause] }; 62 | const flag = makeFlagWithRules([rule]); 63 | const [err, detail, events] = await asyncEvaluate(Evaluator(), flag, basicUser, eventFactory); 64 | expect(err).toEqual(Error('Variation/rollout object with no variation or rollout')); 65 | expect(detail).toMatchObject({ value: null, variationIndex: null, reason: { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' } }); 66 | expect(events).toBeUndefined(); 67 | }); 68 | 69 | it.each([basicUser, basicSingleKindUser, basicMultiKindUser]) 70 | ('returns error if rule has rollout with no variations', async (userToTest) => { 71 | const rule = { id: 'id', clauses: [matchClause], rollout: { variations: [] } }; 72 | const flag = makeFlagWithRules([rule]); 73 | const [err, detail, events] = await asyncEvaluate(Evaluator(), flag, userToTest, eventFactory); 74 | expect(err).toEqual(Error('Variation/rollout object with no variation or rollout')); 75 | expect(detail).toMatchObject({ value: null, variationIndex: null, reason: { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' } }); 76 | expect(events).toBeUndefined(); 77 | }); 78 | 79 | it.each([basicUser, basicSingleKindUser, basicMultiKindUser]) 80 | ('does not overflow the call stack when evaluating a huge number of rules', async (userToTest) => { 81 | const ruleCount = 5000; 82 | const flag = { 83 | key: 'flag', 84 | targets: [], 85 | on: true, 86 | variations: [false, true], 87 | fallthrough: { variation: 0 } 88 | }; 89 | // Note, for this test to be meaningful, the rules must *not* match the user, since we 90 | // stop evaluating rules on the first match. 91 | const rules = []; 92 | for (var i = 0; i < ruleCount; i++) { 93 | rules.push({ clauses: [noMatchClause], variation: 1 }); 94 | } 95 | flag.rules = rules; 96 | const [err, detail, events] = await asyncEvaluate(Evaluator(), flag, userToTest, eventFactory); 97 | expect(err).toEqual(null); 98 | expect(detail.value).toEqual(false); 99 | }); 100 | }); 101 | 102 | describe('Evaluator - rules with non-user kinds', () => { 103 | const targetKey = 'targetKey'; 104 | const targetContextKind = 'org'; 105 | const matchClause = { attribute: 'key', op: 'in', values: [targetKey], contextKind: targetContextKind } 106 | const noMatchClause = { attribute: 'key', op: 'in', values: ['not-' + targetKey], contextKind: targetContextKind } 107 | 108 | const singleKindContext = { 109 | kind: targetContextKind, 110 | key: targetKey 111 | }; 112 | const multiKindContext = { 113 | kind: 'multi', 114 | }; 115 | multiKindContext[targetContextKind] = { 116 | key: targetKey 117 | }; 118 | 119 | it.each([singleKindContext, multiKindContext]) 120 | ('matches user from rules', async (contextToTest) => { 121 | const rule0 = { id: 'id0', clauses: [noMatchClause], variation: 1 }; 122 | const rule1 = { id: 'id1', clauses: [matchClause], variation: 2 }; 123 | const flag = makeFlagWithRules([rule0, rule1]); 124 | const [err, detail, events] = await asyncEvaluate(Evaluator(), flag, contextToTest, eventFactory); 125 | expect(detail).toMatchObject({ 126 | value: 'c', variationIndex: 2, 127 | reason: { kind: 'RULE_MATCH', ruleIndex: 1, ruleId: 'id1' } 128 | }); 129 | expect(events).toBeUndefined(); 130 | }); 131 | }); 132 | -------------------------------------------------------------------------------- /test/evaluator_helpers.js: -------------------------------------------------------------------------------- 1 | const { EventFactory } = require('../event_factory'); 2 | 3 | // All three of these users should represent the same user. 4 | const basicUser = { key: 'userkey' }; 5 | const basicSingleKindUser = { kind: 'user', key: 'userkey' }; 6 | const basicMultiKindUser = { kind: 'multi', user: { key: 'userkey' } }; 7 | 8 | const eventFactory = EventFactory(false); 9 | 10 | // Evaluator.evaluate uses a callback instead of a promise because it's a slightly more efficient 11 | // way to pass multiple return values. But for the purposes of our tests, it's much easier to use 12 | // a promise and async/await, so we'll transform it with this helper. Unlike usual Node promise 13 | // semantics, here we treat "err" as just another return parameter rather than throwing an error 14 | // (because the other parameters can still be non-null even if there's an error). 15 | function asyncEvaluate(evaluator, flag, user, eventFactory) { 16 | return new Promise(resolve => { 17 | evaluator.evaluate(flag, user, eventFactory, (err, detail, events) => resolve([err, detail, events])); 18 | }); 19 | } 20 | 21 | function makeFlagWithRules(rules, fallthrough) { 22 | if (!fallthrough) { 23 | fallthrough = { variation: 0 }; 24 | } 25 | return { 26 | key: 'feature', 27 | on: true, 28 | rules: rules, 29 | targets: [], 30 | fallthrough: fallthrough, 31 | offVariation: 1, 32 | variations: ['a', 'b', 'c'] 33 | }; 34 | } 35 | 36 | function makeBooleanFlagWithRules(rules) { 37 | return { 38 | key: 'feature', 39 | on: true, 40 | prerequisites: [], 41 | rules: rules, 42 | targets: [], 43 | salt: '', 44 | fallthrough: { variation: 0 }, 45 | offVariation: 0, 46 | variations: [false, true], 47 | version: 1 48 | }; 49 | } 50 | 51 | function makeBooleanFlagWithOneClause(clause) { 52 | return makeBooleanFlagWithRules([{ clauses: [clause], variation: 1 }]); 53 | } 54 | 55 | function makeFlagWithSegmentMatch(segment) { 56 | return makeBooleanFlagWithOneClause(makeSegmentMatchClause(segment)); 57 | } 58 | 59 | function makeClauseThatMatchesUser(user) { 60 | return { attribute: 'key', op: 'in', values: [user.key] }; 61 | } 62 | 63 | function makeClauseThatDoesNotMatchUser(user) { 64 | return { attribute: 'key', op: 'in', values: ['not-' + user.key] }; 65 | } 66 | 67 | function makeSegmentMatchClause(segment) { 68 | return { attribute: '', op: 'segmentMatch', values: [segment.key] }; 69 | } 70 | 71 | function prepareQueries(data) { 72 | let flagsMap = {}, segmentsMap = {}; 73 | for (const f of (data.flags || [])) { 74 | flagsMap[f.key] = f; 75 | } 76 | for (const s of (data.segments || [])) { 77 | segmentsMap[s.key] = s; 78 | } 79 | return { 80 | getFlag: (key, cb) => cb(flagsMap[key]), 81 | getSegment: (key, cb) => cb(segmentsMap[key]), 82 | getBigSegmentsMembership: (key, cb) => { 83 | if (data.bigSegments) { 84 | cb([data.bigSegments[key], 'HEALTHY']); 85 | } else { 86 | cb(null); 87 | } 88 | }, 89 | }; 90 | } 91 | 92 | module.exports = { 93 | basicUser, 94 | basicSingleKindUser, 95 | basicMultiKindUser, 96 | eventFactory, 97 | asyncEvaluate, 98 | makeFlagWithRules, 99 | makeBooleanFlagWithRules, 100 | makeBooleanFlagWithOneClause, 101 | makeFlagWithSegmentMatch, 102 | makeClauseThatMatchesUser, 103 | makeClauseThatDoesNotMatchUser, 104 | makeSegmentMatchClause, 105 | prepareQueries, 106 | }; 107 | -------------------------------------------------------------------------------- /test/event_summarizer-test.js: -------------------------------------------------------------------------------- 1 | var EventSummarizer = require('../event_summarizer'); 2 | 3 | describe('EventSummarizer', function() { 4 | 5 | var user = { key: 'key1' }; 6 | 7 | it('does nothing for identify event', function() { 8 | var es = EventSummarizer(); 9 | var snapshot = es.getSummary(); 10 | es.summarizeEvent({ kind: 'identify', creationDate: 1000, user: user }); 11 | expect(es.getSummary()).toEqual(snapshot); 12 | }); 13 | 14 | it('does nothing for custom event', function() { 15 | var es = EventSummarizer(); 16 | var snapshot = es.getSummary(); 17 | es.summarizeEvent({ kind: 'custom', creationDate: 1000, key: 'eventkey', context: user }); 18 | expect(es.getSummary()).toEqual(snapshot); 19 | }); 20 | 21 | it('sets start and end dates for feature events', function() { 22 | var es = EventSummarizer(); 23 | var event1 = { kind: 'feature', creationDate: 2000, key: 'key', context: user }; 24 | var event2 = { kind: 'feature', creationDate: 1000, key: 'key', context: user }; 25 | var event3 = { kind: 'feature', creationDate: 1500, key: 'key', context: user }; 26 | es.summarizeEvent(event1); 27 | es.summarizeEvent(event2); 28 | es.summarizeEvent(event3); 29 | var data = es.getSummary(); 30 | 31 | expect(data.startDate).toEqual(1000); 32 | expect(data.endDate).toEqual(2000); 33 | }); 34 | 35 | it('increments counters for feature events', function() { 36 | var es = EventSummarizer(); 37 | var event1 = { kind: 'feature', creationDate: 1000, key: 'key1', version: 11, context: user, 38 | variation: 1, value: 100, default: 111 }; 39 | var event2 = { kind: 'feature', creationDate: 1000, key: 'key1', version: 11, context: user, 40 | variation: 2, value: 200, default: 111 }; 41 | var event3 = { kind: 'feature', creationDate: 1000, key: 'key2', version: 22, context: user, 42 | variation: 1, value: 999, default: 222 }; 43 | var event4 = { kind: 'feature', creationDate: 1000, key: 'key1', version: 11, context: user, 44 | variation: 1, value: 100, default: 111 }; 45 | var event5 = { kind: 'feature', creationDate: 1000, key: 'badkey', context: user, 46 | value: 333, default: 333 }; 47 | var event6 = { kind: 'feature', creationDate: 1000, key: 'zero-version', version: 0, context: user, 48 | variation: 1, value: 100, default: 444 }; 49 | es.summarizeEvent(event1); 50 | es.summarizeEvent(event2); 51 | es.summarizeEvent(event3); 52 | es.summarizeEvent(event4); 53 | es.summarizeEvent(event5); 54 | es.summarizeEvent(event6); 55 | var data = es.getSummary(); 56 | 57 | data.features.key1.counters.sort(function(a, b) { return a.value - b.value; }); 58 | var expectedFeatures = { 59 | 'zero-version': { 60 | default: 444, 61 | counters: [ 62 | { variation: 1, value: 100, version: 0, count: 1} 63 | ], 64 | contextKinds: ['user'] 65 | }, 66 | key1: { 67 | default: 111, 68 | counters: [ 69 | { variation: 1, value: 100, version: 11, count: 2 }, 70 | { variation: 2, value: 200, version: 11, count: 1 } 71 | ], 72 | contextKinds: ['user'] 73 | }, 74 | key2: { 75 | default: 222, 76 | counters: [ { variation: 1, value: 999, version: 22, count: 1 }], 77 | contextKinds: ['user'] 78 | }, 79 | badkey: { 80 | default: 333, 81 | counters: [ { value: 333, unknown: true, count: 1 }], 82 | contextKinds: ['user'] 83 | }, 84 | }; 85 | expect(data.features).toEqual(expectedFeatures); 86 | }); 87 | 88 | it('distinguishes between zero and null/undefined in feature variation', function() { 89 | var es = EventSummarizer(); 90 | var event1 = { kind: 'feature', creationDate: 1000, key: 'key1', version: 11, context: user, 91 | variation: 0, value: 100, default: 111 }; 92 | var event2 = { kind: 'feature', creationDate: 1000, key: 'key1', version: 11, context: user, 93 | variation: null, value: 111, default: 111 }; 94 | var event3 = { kind: 'feature', creationDate: 1000, key: 'key1', version: 11, context: user, 95 | /* variation undefined */ value: 111, default: 111 }; 96 | es.summarizeEvent(event1); 97 | es.summarizeEvent(event2); 98 | es.summarizeEvent(event3); 99 | var data = es.getSummary(); 100 | 101 | data.features.key1.counters.sort(function(a, b) { return a.value - b.value; }); 102 | var expectedFeatures = { 103 | key1: { 104 | default: 111, 105 | counters: [ 106 | { variation: 0, value: 100, version: 11, count: 1 }, 107 | { value: 111, version: 11, count: 2 } 108 | ], 109 | contextKinds: ['user'] 110 | } 111 | }; 112 | expect(data.features).toEqual(expectedFeatures); 113 | }); 114 | 115 | it('includes keys from all kinds', () => { 116 | const es = EventSummarizer(); 117 | const event1 = { 118 | kind: 'feature', creationDate: 1000, key: 'key1', version: 11, context: { key: "test" }, 119 | variation: 1, value: 100, default: 111 120 | }; 121 | const event2 = { 122 | kind: 'feature', creationDate: 1000, key: 'key1', version: 11, context: { kind: 'org', key: "test" }, 123 | variation: 1, value: 100, default: 111 124 | }; 125 | const event3 = { 126 | kind: 'feature', creationDate: 1000, key: 'key1', version: 11, 127 | context: { kind: 'multi', bacon: { key: "crispy" }, eggs: { key: "scrambled" } }, 128 | variation: 1, value: 100, default: 111 129 | }; 130 | es.summarizeEvent(event1); 131 | es.summarizeEvent(event2); 132 | es.summarizeEvent(event3); 133 | const data = es.getSummary(); 134 | 135 | const expectedFeatures = { 136 | key1: { 137 | default: 111, 138 | counters: [ 139 | { variation: 1, value: 100, version: 11, count: 3 }, 140 | ], 141 | contextKinds: ['user', 'org', 'bacon', 'eggs'] 142 | } 143 | }; 144 | expect(data.features).toEqual(expectedFeatures); 145 | }); 146 | }); 147 | -------------------------------------------------------------------------------- /test/feature_store-test.js: -------------------------------------------------------------------------------- 1 | const InMemoryFeatureStore = require('../feature_store'); 2 | const dataKind = require('../versioned_data_kind'); 3 | const { runFeatureStoreTests } = require('../sharedtest/feature_store_tests'); 4 | const stubs = require('./stubs'); 5 | const { promisifySingle } = require('launchdarkly-js-test-helpers'); 6 | 7 | describe('InMemoryFeatureStore', () => { 8 | runFeatureStoreTests( 9 | () => new InMemoryFeatureStore(), 10 | ); 11 | }); 12 | 13 | describe('custom feature store in configuration', () => { 14 | const defaultUser = { key: 'user' }; 15 | 16 | async function makeStoreWithFlag() { 17 | const store = new InMemoryFeatureStore(); 18 | const flag = { key: 'flagkey', on: false, offVariation: 0, variations: [ true ] }; 19 | const data = {}; 20 | data[dataKind.features.namespace] = { 'flagkey': flag }; 21 | await promisifySingle(store.init)(data); 22 | return store; 23 | } 24 | 25 | it('can be specified as an instance', async () => { 26 | const store = await makeStoreWithFlag(); 27 | const config = { featureStore: store }; 28 | const client = stubs.createClient(config); 29 | await client.waitForInitialization(); 30 | const result = await client.variation('flagkey', defaultUser, false); 31 | expect(result).toEqual(true); 32 | }); 33 | 34 | it('can be specified as a factory function', async () => { 35 | const store = await makeStoreWithFlag(); 36 | const config = { featureStore: () => store }; 37 | const client = stubs.createClient(config); 38 | await client.waitForInitialization(); 39 | const result = await client.variation('flagkey', defaultUser, false); 40 | expect(result).toEqual(true); 41 | }); 42 | }) -------------------------------------------------------------------------------- /test/feature_store_test_base.js: -------------------------------------------------------------------------------- 1 | var dataKind = require('../versioned_data_kind'); 2 | const { runFeatureStoreTests } = require('../sharedtest/feature_store_tests'); 3 | const { 4 | runPersistentFeatureStoreUncachedTests, 5 | runPersistentFeatureStoreConcurrentUpdateTests, 6 | } = require('../sharedtest/persistent_feature_store_tests'); 7 | 8 | // This file contains obsolete entry points with somewhat different semantics for the 9 | // standard test suites in sharedtest/store_tests. It is retained here because older versions 10 | // of the database integration packages reference this file directly, even though it was 11 | // never documented. It isn't referenced by the SDK's own tests, and can be removed in the 12 | // next major version. 13 | 14 | // Parameters: 15 | // - makeStore(): creates an instance of the feature store 16 | // - clearExistingData(callback): if specified, will be called before each test to clear any 17 | // storage that the store instances may be sharing; this also implies that the feature store 18 | // - isCached: true if the instances returned by makeStore() have caching enabled. 19 | // - makeStoreWithPrefix(prefix): creates an uncached instance of the store with a key prefix 20 | function baseFeatureStoreTests(makeStore, clearExistingData, isCached, makeStoreWithPrefix) { 21 | if (clearExistingData) { 22 | // We're testing a persistent feature store implementation. 23 | const asyncClearExistingData = () => new Promise(resolve => clearExistingData(resolve)); 24 | runFeatureStoreTests( 25 | makeStore, 26 | asyncClearExistingData, 27 | ); 28 | if (!isCached) { 29 | runPersistentFeatureStoreUncachedTests( 30 | (prefix, cacheTTL, logger) => makeStorePrefix ? makeStoreWithPrefix(prefix) : makeStore(), 31 | asyncClearExistingData, 32 | ); 33 | } 34 | } else { 35 | // We're testing an in-memory store or some other nonstandard implementation that doesn't 36 | // have shared-database semantics. 37 | runFeatureStoreTests( 38 | makeStore, 39 | () => new Promise(resolve => clearExistingData(resolve)), 40 | ); 41 | } 42 | } 43 | 44 | // Parameters: 45 | // - makeStore(): creates a normal feature store. 46 | // - makeStoreWithHook(hook): creates a feature store that operates on the same underlying data as 47 | // the first store. This store will call the hook function (passing a callback) immediately before 48 | // it attempts to make any update. 49 | 50 | function concurrentModificationTests(makeStore, makeStoreWithHook) { 51 | runPersistentFeatureStoreConcurrentUpdateTests( 52 | prefix => makeStore(), 53 | (prefix, hook) => makeStoreWithHook(hook), 54 | ) 55 | } 56 | 57 | module.exports = { 58 | baseFeatureStoreTests: baseFeatureStoreTests, 59 | concurrentModificationTests: concurrentModificationTests 60 | }; 61 | -------------------------------------------------------------------------------- /test/loggers-test.js: -------------------------------------------------------------------------------- 1 | const loggers = require('../loggers'); 2 | 3 | describe('basicLogger', () => { 4 | it('uses console.error by default', () => { 5 | const realConsoleError = console.error; 6 | const mock = jest.fn(); 7 | try { 8 | console.error = mock; 9 | const logger = loggers.basicLogger(); 10 | logger.warn('hello'); 11 | expect(mock).toHaveBeenCalledWith('warn: [LaunchDarkly] hello'); 12 | } finally { 13 | console.error = realConsoleError; 14 | } 15 | }); 16 | 17 | it('can write to an arbitrary function', () => { 18 | const outputFn = jest.fn(); 19 | const logger = loggers.basicLogger({ destination: outputFn }); 20 | logger.warn('hello'); 21 | expect(outputFn).toHaveBeenCalledWith('warn: [LaunchDarkly] hello'); 22 | }); 23 | 24 | it('throws an exception immediately if destination is not a function', () => { 25 | expect(() => loggers.basicLogger({ destination: 'Mars' })).toThrow(); 26 | }); 27 | 28 | it('does not use util.format if there is only one argument', () => { 29 | const outputFn = jest.fn(); 30 | const logger = loggers.basicLogger({ destination: outputFn }); 31 | logger.warn('%d things'); 32 | expect(outputFn).toHaveBeenCalledWith('warn: [LaunchDarkly] %d things'); 33 | }); 34 | 35 | it('does not use util.format if there are multiple arguments', () => { 36 | const outputFn = jest.fn(); 37 | const logger = loggers.basicLogger({ destination: outputFn }); 38 | logger.warn('%d things', 3); 39 | expect(outputFn).toHaveBeenCalledWith('warn: [LaunchDarkly] 3 things'); 40 | }); 41 | 42 | describe('output filtering by level', () => { 43 | const testLevel = (minLevel, enabledLevels) => { 44 | it('level: ' + minLevel, () => { 45 | const outputFn = jest.fn(); 46 | const config = { destination: outputFn }; 47 | if (minLevel) { 48 | config.level = minLevel; 49 | } 50 | const logger = loggers.basicLogger({ level: minLevel, destination: outputFn }); 51 | logger.debug('some debug output'); 52 | logger.info('some info output'); 53 | logger.warn('some warn output'); 54 | logger.error('some error output'); 55 | for (const [level, shouldBeEnabled] of Object.entries(enabledLevels)) { 56 | const line = level + ': [LaunchDarkly] some ' + level + ' output'; 57 | if (shouldBeEnabled) { 58 | expect(outputFn).toHaveBeenCalledWith(line); 59 | } else { 60 | expect(outputFn).not.toHaveBeenCalledWith(line); 61 | } 62 | } 63 | }); 64 | }; 65 | 66 | testLevel('debug', { 'debug': true, 'info': true, 'warn': true, 'error': true }); 67 | testLevel('info', { 'debug': false, 'info': true, 'warn': true, 'error': true }); 68 | testLevel('warn', { 'debug': false, 'info': false, 'warn': true, 'error': true }); 69 | testLevel('error', { 'debug': false, 'info': false, 'warn': false, 'error': true }); 70 | testLevel('none', { 'debug': false, 'info': false, 'warn': false, 'error': false }); 71 | 72 | // default is info 73 | testLevel(undefined, { 'debug': false, 'info': true, 'warn': true, 'error': true }); 74 | }); 75 | }); 76 | 77 | describe('safeLogger', () => { 78 | function mockLogger() { 79 | return { 80 | error: jest.fn(), 81 | warn: jest.fn(), 82 | info: jest.fn(), 83 | debug: jest.fn(), 84 | }; 85 | } 86 | 87 | const levels = ['error', 'warn', 'info', 'debug']; 88 | 89 | it('throws an error if you pass in a logger that does not conform to the LDLogger schema', () => { 90 | const fallbackLogger = mockLogger(); 91 | 92 | // If the method does not exist 93 | levels.forEach(method => { 94 | const logger = mockLogger(); 95 | delete logger[method]; 96 | expect(() => loggers.safeLogger(logger, fallbackLogger)).toThrow(/Provided logger instance must support .* method/); 97 | }); 98 | 99 | // If the method is not a function 100 | levels.forEach(method => { 101 | const logger = mockLogger(); 102 | logger[method] = 'invalid'; 103 | expect(() => loggers.safeLogger(logger, fallbackLogger)).toThrow(/Provided logger instance must support .* method/); 104 | }); 105 | }); 106 | 107 | it('If a logger method throws an error, the error is caught and logged, then the fallback logger is called', () => { 108 | const err = Error('Something bad happened'); 109 | 110 | levels.forEach(level => { 111 | const logger = mockLogger(); 112 | logger[level] = jest.fn(() => { 113 | throw err 114 | }); 115 | const fallbackLogger = mockLogger(); 116 | const wrappedLogger = loggers.safeLogger(logger, fallbackLogger); 117 | 118 | expect(() => wrappedLogger[level]('this is a logline', 'with multiple', 'arguments')).not.toThrow(); 119 | 120 | expect(fallbackLogger.error).toHaveBeenNthCalledWith(1, 'Error calling provided logger instance method ' + level + ': ' + err); 121 | 122 | const nthCall = level === 'error' ? 2 : 1; 123 | expect(fallbackLogger[level]).toHaveBeenNthCalledWith(nthCall, 'this is a logline', 'with multiple', 'arguments'); 124 | }); 125 | }); 126 | }); 127 | -------------------------------------------------------------------------------- /test/operators-test.js: -------------------------------------------------------------------------------- 1 | var assert = require('assert'); 2 | var operators = require('../operators'); 3 | 4 | describe('operators', function() { 5 | const paramsTable = [ 6 | // numeric comparisons 7 | [ 'in', 99, 99, true ], 8 | [ 'in', 99.0001, 99.0001, true ], 9 | [ 'in', 99, 99.0001, false ], 10 | [ 'in', 99.0001, 99, false ], 11 | [ 'lessThan', 99, 99.0001, true ], 12 | [ 'lessThan', 99.0001, 99, false ], 13 | [ 'lessThan', 99, 99, false ], 14 | [ 'lessThanOrEqual', 99, 99.0001, true ], 15 | [ 'lessThanOrEqual', 99.0001, 99, false ], 16 | [ 'lessThanOrEqual', 99, 99, true ], 17 | [ 'greaterThan', 99.0001, 99, true ], 18 | [ 'greaterThan', 99, 99.0001, false ], 19 | [ 'greaterThan', 99, 99, false ], 20 | [ 'greaterThanOrEqual', 99.0001, 99, true ], 21 | [ 'greaterThanOrEqual', 99, 99.0001, false ], 22 | [ 'greaterThanOrEqual', 99, 99, true ], 23 | 24 | // string comparisons 25 | [ 'in', 'x', 'x', true ], 26 | [ 'in', 'x', 'xyz', false ], 27 | [ 'startsWith', 'xyz', 'x', true ], 28 | [ 'startsWith', 'x', 'xyz', false ], 29 | [ 'endsWith', 'xyz', 'z', true ], 30 | [ 'endsWith', 'z', 'xyz', false ], 31 | [ 'contains', 'xyz', 'y', true ], 32 | [ 'contains', 'y', 'xyz', false ], 33 | 34 | // mixed strings and numbers 35 | [ 'in', '99', 99, false ], 36 | [ 'in', 99, '99', false ], 37 | [ 'contains', '99', 99, false ], 38 | [ 'startsWith', '99', 99, false ], 39 | [ 'endsWith', '99', 99, false ], 40 | [ 'lessThanOrEqual', '99', 99, false ], 41 | [ 'lessThanOrEqual', 99, '99', false ], 42 | [ 'greaterThanOrEqual', '99', 99, false ], 43 | [ 'greaterThanOrEqual', 99, '99', false ], 44 | 45 | // regex 46 | [ 'matches', 'hello world', 'hello.*rld', true ], 47 | [ 'matches', 'hello world', 'hello.*rl', true ], 48 | [ 'matches', 'hello world', 'l+', true ], 49 | [ 'matches', 'hello world', '(world|planet)', true ], 50 | [ 'matches', 'hello world', 'aloha', false ], 51 | [ 'matches', 'hello world', '***not a regex', false ], 52 | [ 'matches', 'hello world', 3, false ], 53 | [ 'matches', 3, 'hello', false ], 54 | 55 | // dates 56 | [ 'before', 0, 1, true ], 57 | [ 'before', -100, 0, true ], 58 | [ 'before', '1970-01-01T00:00:00Z', 1000, true ], 59 | [ 'before', '1970-01-01T00:00:00.500Z', 1000, true ], 60 | [ 'before', true, 1000, false ], // wrong type 61 | [ 'after', '1970-01-01T00:00:02.500Z', 1000, true ], 62 | [ 'after', '1970-01-01 00:00:02.500Z', 1000, false ], // malformed timestamp 63 | [ 'before', '1970-01-01T00:00:02+01:00', 1000, true ], 64 | [ 'before', -1000, 1000, true ], 65 | [ 'after', '1970-01-01T00:00:01.001Z', 1000, true ], 66 | [ 'after', '1970-01-01T00:00:00-01:00', 1000, true ], 67 | 68 | // semver 69 | [ 'semVerEqual', '2.0.1', '2.0.1', true ], 70 | [ 'semVerEqual', '2.0.1', '02.0.1', false ], // leading zeroes should be disallowed 71 | [ 'semVerEqual', '2.0', '2.0.0', true ], 72 | [ 'semVerEqual', '2', '2.0.0', true ], 73 | [ 'semVerEqual', '2-rc1', '2.0.0-rc1', true ], 74 | [ 'semVerEqual', '2+build2', '2.0.0+build2', true ], 75 | [ 'semVerEqual', '2.0.0', '2.0.0+build2', true ], // build metadata should be ignored in comparison 76 | [ 'semVerEqual', '2.0.0', '2.0.0-rc1', false ], // prerelease should not be ignored 77 | [ 'semVerEqual', '2.0.0', '2.0.0+build_2', false ], // enforce allowable character set in build metadata 78 | [ 'semVerEqual', '2.0.0', 'v2.0.0', false ], // disallow leading 'v' 79 | [ 'semVerLessThan', '2.0.0', '2.0.1', true ], 80 | [ 'semVerLessThan', '2.0', '2.0.1', true ], 81 | [ 'semVerLessThan', '2.0.1', '2.0.0', false ], 82 | [ 'semVerLessThan', '2.0.1', '2.0', false ], 83 | [ 'semVerLessThan', '2.0.0-rc', '2.0.0-rc.beta', true ], 84 | [ 'semVerLessThan', '2.0.0-rc', '2.0.0', true ], 85 | [ 'semVerLessThan', '2.0.0-rc.3', '2.0.0-rc.29', true ], 86 | [ 'semVerLessThan', '2.0.0-rc.x29', '2.0.0-rc.x3', true ], 87 | [ 'semVerGreaterThan', '2.0.1', '2.0.0', true ], 88 | [ 'semVerGreaterThan', '2.0.1', '2.0', true ], 89 | [ 'semVerGreaterThan', '2.0.0', '2.0.1', false ], 90 | [ 'semVerGreaterThan', '2.0', '2.0.1', false ], 91 | [ 'semVerGreaterThan', '2.0.0-rc.1', '2.0.0-rc.0', true ], 92 | [ 'semVerLessThan', '2.0.1', 'xbad%ver', false ], 93 | [ 'semVerGreaterThan', '2.0.1', 'xbad%ver', false ] 94 | ]; 95 | 96 | paramsTable.forEach(function(params) { 97 | it('result is ' + params[3] + ' for ' + JSON.stringify(params[1]) + ' ' + params[0] + ' ' + 98 | JSON.stringify(params[2]), function() { 99 | assert.equal(operators.fn(params[0])(params[1], params[2]), params[3]); 100 | }); 101 | }); 102 | }); 103 | -------------------------------------------------------------------------------- /test/polling-test.js: -------------------------------------------------------------------------------- 1 | const InMemoryFeatureStore = require('../feature_store'); 2 | const PollingProcessor = require('../polling'); 3 | const dataKind = require('../versioned_data_kind'); 4 | const { AsyncQueue, failOnResolve, failOnTimeout, promisify, promisifySingle } = require('launchdarkly-js-test-helpers'); 5 | const stubs = require('./stubs'); 6 | 7 | describe('PollingProcessor', () => { 8 | const longInterval = 100000; 9 | const allData = { flags: { flag: { version: 1 } }, segments: { segment: { version: 1 } } }; 10 | const jsonData = JSON.stringify(allData); 11 | 12 | let store; 13 | let config; 14 | let processor; 15 | 16 | beforeEach(() => { 17 | store = InMemoryFeatureStore(); 18 | config = { featureStore: store, pollInterval: longInterval, logger: stubs.stubLogger() }; 19 | }); 20 | 21 | afterEach(() => { 22 | processor && processor.stop(); 23 | }); 24 | 25 | it('makes no request before start', () => { 26 | const requestor = { 27 | requestAllData: jest.fn() 28 | }; 29 | processor = PollingProcessor(config, requestor); 30 | 31 | expect(requestor.requestAllData).not.toHaveBeenCalled(); 32 | }); 33 | 34 | it('polls immediately on start', () => { 35 | const requestor = { 36 | requestAllData: jest.fn() 37 | }; 38 | processor = PollingProcessor(config, requestor); 39 | 40 | processor.start(() => {}); 41 | 42 | expect(requestor.requestAllData).toHaveBeenCalledTimes(1); 43 | }); 44 | 45 | it('calls callback on success', async () => { 46 | const requestor = { 47 | requestAllData: cb => cb(null, jsonData) 48 | }; 49 | processor = PollingProcessor(config, requestor); 50 | 51 | const err = await new Promise(resolve => processor.start(resolve)); 52 | expect(err).not.toBe(expect.anything()); 53 | }); 54 | 55 | it('initializes feature store', async () => { 56 | const requestor = { 57 | requestAllData: cb => cb(null, jsonData) 58 | }; 59 | processor = PollingProcessor(config, requestor); 60 | 61 | await promisify(processor.start)(); 62 | 63 | const flags = await promisifySingle(store.all)(dataKind.features); 64 | expect(flags).toEqual(allData.flags); 65 | const segments = await promisifySingle(store.all)(dataKind.segments); 66 | expect(segments).toEqual(allData.segments); 67 | processor.stop(); 68 | }); 69 | 70 | it('polls repeatedly', async() => { 71 | const calls = new AsyncQueue(); 72 | const requestor = { 73 | requestAllData: cb => { 74 | calls.add(); 75 | cb(null, jsonData); 76 | } 77 | }; 78 | config.pollInterval = 0.05; // note, pollInterval is in seconds 79 | processor = PollingProcessor(config, requestor); 80 | 81 | processor.start(() => {}); 82 | const startTime = new Date().getTime(); 83 | for (let i = 0; i < 4; i++) { 84 | await failOnTimeout(calls.take(), 500, 'timed out waiting for poll request #' + (i + 1)); 85 | } 86 | expect(new Date().getTime() - startTime).toBeLessThanOrEqual(500); 87 | }); 88 | 89 | async function testRecoverableError(err) { 90 | const calls = new AsyncQueue(); 91 | let count = 0; 92 | const requestor = { 93 | // The first two calls will return the error; the third will succeed. 94 | requestAllData: cb => { 95 | calls.add(); 96 | count++; 97 | if (count > 2) { 98 | cb(null, jsonData); 99 | } else { 100 | cb(err); 101 | } 102 | } 103 | }; 104 | config.pollInterval = 0.05; 105 | processor = PollingProcessor(config, requestor); 106 | 107 | let errReceived; 108 | processor.start(e => { errReceived = e; }); 109 | 110 | for (let i = 0; i < 3; i++) { 111 | await failOnTimeout(calls.take(), 500, 'timed out waiting for poll request #' + (i + 1)); 112 | } 113 | 114 | expect(config.logger.error).not.toHaveBeenCalled(); 115 | expect(errReceived).toBeUndefined(); 116 | } 117 | 118 | it.each([400, 408, 429, 500, 503])( 119 | 'continues polling after error %d', 120 | async (status) => { 121 | const err = new Error('sorry'); 122 | err.status = status; 123 | await testRecoverableError(err); 124 | } 125 | ); 126 | 127 | it('continues polling after I/O error', async () => await testRecoverableError(new Error('sorry'))); 128 | 129 | async function testUnrecoverableError(status) { 130 | const err = new Error('sorry'); 131 | err.status = status; 132 | 133 | const calls = new AsyncQueue(); 134 | const requestor = { 135 | requestAllData: cb => { 136 | calls.add(); 137 | cb(err); 138 | } 139 | }; 140 | config.pollInterval = 0.05; 141 | processor = PollingProcessor(config, requestor); 142 | 143 | const result = new AsyncQueue(); 144 | processor.start(e => result.add(e)); 145 | 146 | const errReceived = await failOnTimeout(result.take(), 1000, 'timed out waiting for initialization to complete'); 147 | expect(errReceived.message).toMatch(new RegExp('error ' + status + '.*giving up permanently')); 148 | 149 | await failOnTimeout(calls.take(), 10, 'expected initial poll request but did not see one'); 150 | await failOnResolve(calls.take(), 100, 'received unexpected second poll request'); 151 | expect(config.logger.error).toHaveBeenCalledTimes(1); 152 | } 153 | 154 | it.each([401, 403])( 155 | 'stops polling after error %d', 156 | testUnrecoverableError 157 | ); 158 | }); 159 | -------------------------------------------------------------------------------- /test/requestor-test.js: -------------------------------------------------------------------------------- 1 | import Requestor from '../requestor'; 2 | import * as httpUtils from '../utils/httpUtils'; 3 | import * as dataKind from '../versioned_data_kind'; 4 | 5 | import { promisify, TestHttpHandlers, TestHttpServer, withCloseable } from 'launchdarkly-js-test-helpers'; 6 | 7 | describe('Requestor', () => { 8 | const sdkKey = 'x'; 9 | const badUri = 'http://bad-uri'; 10 | const someData = { key: { version: 1 } }; 11 | const allData = { flags: someData, segments: someData }; 12 | 13 | describe('requestAllData', () => { 14 | it('gets data', async () => 15 | await withCloseable(TestHttpServer.start, async server => { 16 | server.forMethodAndPath('get', '/sdk/latest-all', TestHttpHandlers.respondJson(allData)); 17 | const r = Requestor(sdkKey, { baseUri: server.url }); 18 | const result = await promisify(r.requestAllData)(); 19 | expect(JSON.parse(result)).toEqual(allData); 20 | }) 21 | ); 22 | 23 | it('returns error result for HTTP error', async () => 24 | await withCloseable(TestHttpServer.start, async server => { 25 | server.forMethodAndPath('get', '/sdk/latest-all', TestHttpHandlers.respond(401)); 26 | const r = Requestor(sdkKey, { baseUri: server.url }); 27 | const req = promisify(r.requestAllData)(); 28 | await expect(req).rejects.toThrow(/401/); 29 | }) 30 | ); 31 | 32 | it('returns error result for network error', async () => { 33 | const r = Requestor(sdkKey, { baseUri: badUri }); 34 | const req = promisify(r.requestAllData)(); 35 | await expect(req).rejects.toThrow(/bad-uri/); 36 | }); 37 | 38 | it('stores and sends etag', async () => { 39 | const etag = "abc123"; 40 | await withCloseable(TestHttpServer.start, async server => { 41 | server.forMethodAndPath('get', '/sdk/latest-all', (req, res) => { 42 | if (req.headers['if-none-match'] === etag) { 43 | TestHttpHandlers.respond(304)(req, res); 44 | } else { 45 | TestHttpHandlers.respond(200, { 'content-type': 'application/json', etag }, JSON.stringify(allData))(req, res); 46 | } 47 | }); 48 | const r = Requestor(sdkKey, { baseUri: server.url }); 49 | const result1 = await promisify(r.requestAllData)(); 50 | expect(JSON.parse(result1)).toEqual(allData); 51 | const result2 = await promisify(r.requestAllData)(); 52 | expect(result2).toEqual(null); 53 | const req1 = await server.nextRequest(); 54 | const req2 = await server.nextRequest(); 55 | expect(req1.headers['if-none-match']).toBe(undefined); 56 | expect(req2.headers['if-none-match']).toEqual(etag); 57 | }) 58 | }); 59 | }); 60 | }); 61 | -------------------------------------------------------------------------------- /test/store_tests_big_segments-test.js: -------------------------------------------------------------------------------- 1 | const { runBigSegmentStoreTests } = require('../sharedtest/store_tests'); 2 | 3 | // This verifies that the runBigSegmentStoreTests test suite behaves as expected as long as the 4 | // BigSegmentStore implementation behaves as expected, so we can distinguish between flaws in the 5 | // implementations and flaws in the test logic. Unfortunately, we can't verify that the test suite 6 | // would *fail* in cases where they ought to fail (if the store does not behave as expected), 7 | // because in Jest there is no way to say to express a conditional expectation of failure. But 8 | // at least we can verify that they won't fail for no good reason. 9 | 10 | const mockDatabase = {}; 11 | 12 | function createStore(prefix, logger) { 13 | if (!mockDatabase[prefix]) { 14 | mockDatabase[prefix] = {}; 15 | } 16 | return { 17 | getMetadata: async () => { 18 | return mockDatabase[prefix]['$metadata'] || { lastUpToDate: undefined }; 19 | }, 20 | getUserMembership: async userHashKey => { 21 | return mockDatabase[prefix][userHashKey]; 22 | }, 23 | close: () => {}, 24 | } 25 | } 26 | 27 | async function clearExistingData(prefix) { 28 | mockDatabase[prefix] = {}; 29 | } 30 | 31 | async function setMetadata(prefix, metadata) { 32 | if (!mockDatabase[prefix]) { 33 | mockDatabase[prefix] = {}; 34 | } 35 | mockDatabase[prefix]['$metadata'] = metadata; 36 | } 37 | 38 | async function setSegments(prefix, userHashKey, included, excluded) { 39 | if (!mockDatabase[prefix]) { 40 | mockDatabase[prefix] = {}; 41 | } 42 | const membership = {}; 43 | for (const ref of (excluded || [])) { 44 | membership[ref] = false; 45 | } 46 | for (const ref of (included || [])) { 47 | membership[ref] = true; 48 | } 49 | mockDatabase[prefix][userHashKey] = membership; 50 | } 51 | 52 | describe('runBigSegmentStoreTests', () => { 53 | describe('tests pass with valid mock store', () => { 54 | runBigSegmentStoreTests( 55 | createStore, 56 | clearExistingData, 57 | setMetadata, 58 | setSegments 59 | ); 60 | }); 61 | }); 62 | -------------------------------------------------------------------------------- /test/stubs.js: -------------------------------------------------------------------------------- 1 | const { withCloseable } = require('launchdarkly-js-test-helpers'); 2 | var InMemoryFeatureStore = require('../feature_store'); 3 | var LDClient = require('../index.js'); 4 | var dataKind = require('../versioned_data_kind'); 5 | 6 | import { AsyncQueue } from 'launchdarkly-js-test-helpers'; 7 | 8 | import { format } from 'util'; 9 | 10 | function stubEventProcessor() { 11 | var eventProcessor = { 12 | events: [], 13 | sendEvent: function(event) { 14 | eventProcessor.events.push(event); 15 | }, 16 | flush: function(callback) { 17 | if (callback) { 18 | setImmediate(callback); 19 | } else { 20 | return Promise.resolve(null); 21 | } 22 | }, 23 | close: function() {} 24 | }; 25 | return eventProcessor; 26 | } 27 | 28 | function stubLogger() { 29 | return { 30 | debug: jest.fn(), 31 | info: jest.fn(), 32 | warn: jest.fn(), 33 | error: jest.fn() 34 | }; 35 | } 36 | 37 | function asyncLogCapture() { 38 | const logCapture = {all: new AsyncQueue()}; 39 | const logger = {}; 40 | for (const level of ['debug', 'info', 'warn', 'error']) { 41 | logCapture[level] = new AsyncQueue(); 42 | logger[level] = function(fmt, ...args) { 43 | const message = format(fmt, ...args); 44 | logCapture[level].add(message); 45 | logCapture.all.add({ level: level, message: message }); 46 | } 47 | } 48 | logCapture.logger = logger; 49 | return logCapture; 50 | } 51 | 52 | function stubUpdateProcessor() { 53 | var updateProcessor = { 54 | start: function(callback) { 55 | if (updateProcessor.shouldInitialize) { 56 | setImmediate(callback, updateProcessor.error); 57 | } 58 | }, 59 | shouldInitialize: true 60 | }; 61 | return updateProcessor; 62 | } 63 | 64 | function createClient(overrideOptions) { 65 | var defaults = { 66 | eventProcessor: stubEventProcessor(), 67 | updateProcessor: stubUpdateProcessor(), 68 | logger: stubLogger() 69 | }; 70 | return LDClient.init('secret', Object.assign({}, defaults, overrideOptions)); 71 | } 72 | 73 | async function withClient(overrideOptions, callback) { 74 | return withCloseable(createClient(overrideOptions), callback); 75 | } 76 | 77 | function initializedStoreWithFlags(...flags) { 78 | const flagsMap = {}; 79 | for (const f of flags) { 80 | flagsMap[f.key] = f; 81 | } 82 | const store = InMemoryFeatureStore(); 83 | store.init({ 84 | [dataKind.features.namespace]: flagsMap, 85 | [dataKind.segments.namespace]: {} 86 | }); 87 | return store; 88 | } 89 | 90 | function uninitializedStoreWithFlags(...flags) { 91 | const store = InMemoryFeatureStore(); 92 | for (const f of flags) { 93 | store.upsert(dataKind.features, f); 94 | } 95 | return store; 96 | } 97 | 98 | module.exports = { 99 | asyncLogCapture, 100 | createClient, 101 | initializedStoreWithFlags, 102 | stubEventProcessor, 103 | stubLogger, 104 | stubUpdateProcessor, 105 | uninitializedStoreWithFlags, 106 | withClient, 107 | }; 108 | -------------------------------------------------------------------------------- /test/update_queue-test.js: -------------------------------------------------------------------------------- 1 | var UpdateQueue = require('../update_queue'); 2 | 3 | describe('UpdateQueue', function() { 4 | it('executes task immediately if there are no pending tasks', function(done) { 5 | const q = new UpdateQueue(); 6 | 7 | var updated = false; 8 | const updateFn = function(a, b, cb) { 9 | expect(a).toEqual(1); 10 | expect(b).toEqual(2); 11 | updated = true; 12 | cb(); 13 | }; 14 | 15 | q.enqueue(updateFn, [1, 2], function() { 16 | expect(updated).toEqual(true); 17 | done(); 18 | }); 19 | }); 20 | 21 | it('serializes async tasks in the order submitted', function(done) { 22 | const q = new UpdateQueue(); 23 | 24 | var progress = []; 25 | 26 | // This simulates a condition in which events are being received asynchronously and each 27 | // event triggers an asynchronous task. We want to make sure that the tasks are executed in 28 | // the order submitted, even if one is submitted during the execution of the previous one. 29 | const taskFn = function(i, cb) { 30 | progress.push('start ' + i); 31 | // assume that we're doing something asynchronous here - make sure it takes a little time 32 | setTimeout(cb, 20); 33 | }; 34 | 35 | const expected = [ 36 | 'submit 1', 37 | 'start 1', // note, this one executes immediately because there was nothing pending 38 | 'submit 2', 39 | 'submit 3', 40 | 'end 1', 41 | 'start 2', 42 | 'end 2', 43 | 'start 3', 44 | 'end 3' 45 | ]; 46 | 47 | for (var i = 1; i <= 3; i++) { 48 | const j = i; 49 | setImmediate(function() { 50 | progress.push('submit ' + j); 51 | q.enqueue(taskFn, [j], function() { 52 | progress.push('end ' + j); 53 | if (j >= 3) { 54 | expect(progress).toEqual(expected); 55 | done(); 56 | } 57 | }); 58 | }); 59 | } 60 | }); 61 | }); 62 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "commonjs", 4 | "strict": true, 5 | "lib": [ 6 | "dom", 7 | "es6" 8 | ] 9 | }, 10 | "files": [ 11 | "index.d.ts", 12 | "test-types.ts" 13 | ] 14 | } -------------------------------------------------------------------------------- /update_queue.js: -------------------------------------------------------------------------------- 1 | function UpdateQueue() { 2 | const updateQueue = []; 3 | this.enqueue = (updateFn, fnArgs, cb) => { 4 | updateQueue.push([updateFn, fnArgs, cb]); 5 | if (updateQueue.length === 1) { 6 | // if nothing else is in progress, we can start this one right away 7 | executePendingUpdates(); 8 | } 9 | }; 10 | function executePendingUpdates() { 11 | if (updateQueue.length > 0) { 12 | const entry = updateQueue[0]; 13 | const fn = entry[0]; 14 | const args = entry[1]; 15 | const cb = entry[2]; 16 | const newCb = () => { 17 | updateQueue.shift(); 18 | if (updateQueue.length > 0) { 19 | setImmediate(executePendingUpdates); 20 | } 21 | cb && cb(); 22 | }; 23 | fn.apply(null, args.concat([newCb])); 24 | } 25 | } 26 | } 27 | 28 | module.exports = UpdateQueue; 29 | -------------------------------------------------------------------------------- /utils/__tests__/httpUtils-test.js: -------------------------------------------------------------------------------- 1 | const httpUtils = require('../httpUtils'); 2 | const packageJson = require('../../package.json'); 3 | 4 | it('sets SDK key', () => { 5 | const h = httpUtils.getDefaultHeaders('my-sdk-key', {}); 6 | expect(h).toMatchObject({ authorization: 'my-sdk-key' }); 7 | }); 8 | 9 | it('sets user agent', () => { 10 | const h = httpUtils.getDefaultHeaders('my-sdk-key', {}); 11 | expect(h).toMatchObject({ 'user-agent': 'NodeJSClient/' + packageJson.version }); 12 | }); 13 | 14 | it('does not include wrapper header by default', () => { 15 | const h = httpUtils.getDefaultHeaders('my-sdk-key', {}); 16 | expect(h['x-launchdarkly-wrapper']).toBeUndefined(); 17 | }); 18 | 19 | it('sets wrapper header with name only', () => { 20 | const h = httpUtils.getDefaultHeaders('my-sdk-key', { wrapperName: 'my-wrapper' }); 21 | expect(h).toMatchObject({ 'x-launchdarkly-wrapper': 'my-wrapper' }); 22 | }); 23 | 24 | it('sets wrapper header with name and version', () => { 25 | const h = httpUtils.getDefaultHeaders('my-sdk-key', { wrapperName: 'my-wrapper', wrapperVersion: '2.0' }); 26 | expect(h).toMatchObject({ 'x-launchdarkly-wrapper': 'my-wrapper/2.0' }); 27 | }); 28 | 29 | it('sets the X-LaunchDarkly-Tags header with valid tags.', () => { 30 | const h = httpUtils.getDefaultHeaders('my-sdk-key', { 31 | application: { 32 | id: 'test-application', 33 | version: 'test-version', 34 | }, 35 | }); 36 | expect(h).toMatchObject({ 37 | 'x-launchdarkly-tags': 'application-id/test-application application-version/test-version', 38 | }); 39 | }); 40 | -------------------------------------------------------------------------------- /utils/__tests__/wrapPromiseCallback-test.js: -------------------------------------------------------------------------------- 1 | const wrapPromiseCallback = require('../wrapPromiseCallback'); 2 | 3 | describe('wrapPromiseCallback', () => { 4 | it('should resolve to the value', () => { 5 | const promise = wrapPromiseCallback(Promise.resolve('woohoo')); 6 | return expect(promise).resolves.toBe('woohoo'); 7 | }); 8 | 9 | it('should reject with the error', () => { 10 | const error = new Error('something went wrong'); 11 | const promise = wrapPromiseCallback(Promise.reject(error)); 12 | return expect(promise).rejects.toBe(error); 13 | }); 14 | 15 | it('should call the callback with a value if the promise resolves', done => { 16 | const promise = wrapPromiseCallback(Promise.resolve('woohoo'), (error, value) => { 17 | expect(promise).toBeUndefined(); 18 | expect(error).toBeNull(); 19 | expect(value).toBe('woohoo'); 20 | done(); 21 | }); 22 | }); 23 | 24 | it('should call the callback with an error if the promise rejects', done => { 25 | const actualError = new Error('something went wrong'); 26 | const promise = wrapPromiseCallback(Promise.reject(actualError), (error, value) => { 27 | expect(promise).toBeUndefined(); 28 | expect(error).toBe(actualError); 29 | expect(value).toBeNull(); 30 | done(); 31 | }); 32 | }); 33 | }); 34 | -------------------------------------------------------------------------------- /utils/asyncUtils.js: -------------------------------------------------------------------------------- 1 | const async = require('async'); 2 | 3 | // The safeAsync functions allow us to use async collection functions as efficiently as possible 4 | // while avoiding stack overflows. When the async utilities call our iteratee, they provide a 5 | // callback for delivering a result. Calling that callback directly is efficient (we are not 6 | // really worried about blocking a thread by doing too many computations without yielding; our 7 | // flag evaluations are pretty fast, and if we end up having to do any I/O, that will cause us 8 | // to yield anyway)... but, if there are many items in the collection, it will result in too 9 | // many nested calls. So, we'll pick an arbitrary threshold of how many items can be in the 10 | // collection before we switch over to deferring the callbacks with setImmediate(). 11 | 12 | const maxNestedCalls = 50; 13 | 14 | function safeIteratee(collection, iteratee) { 15 | if (!collection || collection.length <= maxNestedCalls) { 16 | return iteratee; 17 | } 18 | return (value, callback) => iteratee(value, (...args) => setImmediate(callback, ...args)); 19 | } 20 | 21 | function safeAsyncEach(collection, iteratee, resultCallback) { 22 | return async.each(collection, safeIteratee(collection, iteratee), resultCallback); 23 | } 24 | 25 | function safeAsyncEachSeries(collection, iteratee, resultCallback) { 26 | return async.eachSeries(collection, safeIteratee(collection, iteratee), resultCallback); 27 | } 28 | 29 | module.exports = { 30 | safeAsyncEach: safeAsyncEach, 31 | safeAsyncEachSeries: safeAsyncEachSeries, 32 | }; 33 | -------------------------------------------------------------------------------- /utils/httpUtils.js: -------------------------------------------------------------------------------- 1 | const http = require('http'); 2 | const https = require('https'); 3 | const url = require('url'); 4 | const configuration = require('../configuration'); 5 | 6 | const packageJson = require('../package.json'); 7 | 8 | const userAgent = 'NodeJSClient/' + packageJson.version; 9 | 10 | function getDefaultHeaders(sdkKey, config) { 11 | // Use lowercase header names for convenience in our test code, where we may be checking for headers in a 12 | // real HTTP request that will be lowercased by the request API 13 | const ret = { 14 | authorization: sdkKey, 15 | 'user-agent': userAgent, 16 | }; 17 | if (config.wrapperName) { 18 | ret['x-launchdarkly-wrapper'] = config.wrapperVersion 19 | ? config.wrapperName + '/' + config.wrapperVersion 20 | : config.wrapperName; 21 | } 22 | const tags = configuration.getTags(config); 23 | const tagKeys = Object.keys(tags); 24 | if (tagKeys.length) { 25 | ret['x-launchdarkly-tags'] = tagKeys 26 | .sort() 27 | .flatMap(key => 28 | Array.isArray(tags[key]) ? tags[key].sort().map(value => `${key}/${value}`) : [`${key}/${tags[key]}`] 29 | ) 30 | .join(' '); 31 | } 32 | return ret; 33 | } 34 | 35 | // Convenience wrapper for making an HTTP/HTTPS request via Node's standard modules. Unlike http.request, 36 | // the callback takes (error, response, body) parameters instead of just (response). 37 | function httpRequest(requestUrl, options, body, config, callback) { 38 | // Note: https.request allows a url parameter to be passed separately from options, but only in v10.9.0+, so 39 | // we still have to parse the URL until our minimum Node version is increased. 40 | const urlOpts = url.parse(requestUrl); 41 | const isSecure = urlOpts.protocol === 'https:'; 42 | const allOptions = Object.assign( 43 | {}, 44 | config && config.tlsParams, 45 | urlOpts, 46 | { 47 | timeout: config && config.timeout ? config.timeout * 1000 : undefined, 48 | agent: config && config.proxyAgent, 49 | }, 50 | options 51 | ); 52 | const req = (isSecure ? https : http).request(allOptions, resp => { 53 | let body = ''; 54 | resp.on('data', chunk => { 55 | body += chunk; 56 | }); 57 | resp.on('end', () => { 58 | callback(null, resp, body); 59 | }); 60 | }); 61 | req.on('error', err => { 62 | callback(err); 63 | }); 64 | if (body !== null && body !== undefined) { 65 | req.write(body); 66 | } 67 | req.end(); 68 | } 69 | 70 | // Creates an in-memory etag cache and returns a wrapper for httpRequest that uses the cache. This is a 71 | // naive implementation that does not place a bound on the cache; the SDK will normally always be hitting 72 | // the same URL (the only time we don't is if we get an "indirect/put" stream event, but in that case we 73 | // deliberately do not use the cache). 74 | function httpWithETagCache() { 75 | const cache = {}; 76 | return (requestUrl, options, body, config, callback) => { 77 | const cacheEntry = cache[requestUrl]; 78 | const cachedEtag = cacheEntry && cacheEntry.etag; 79 | let newOptions = options; 80 | if (cachedEtag) { 81 | const newHeaders = Object.assign({}, options && options.headers, { 'if-none-match': cachedEtag }); 82 | newOptions = Object.assign({}, options, { headers: newHeaders }); 83 | } 84 | return httpRequest(requestUrl, newOptions, body, config, (err, resp, body) => { 85 | if (err) { 86 | callback(err); 87 | } else { 88 | if (resp.statusCode === 304 && cacheEntry) { 89 | callback(null, resp, cacheEntry.body); 90 | } else { 91 | if (resp.headers['etag']) { 92 | cache[requestUrl] = { etag: resp.headers['etag'], body }; 93 | } 94 | callback(null, resp, body); 95 | } 96 | } 97 | }); 98 | }; 99 | } 100 | 101 | module.exports = { 102 | getDefaultHeaders, 103 | httpRequest, 104 | httpWithETagCache, 105 | }; 106 | -------------------------------------------------------------------------------- /utils/stringifyAttrs.js: -------------------------------------------------------------------------------- 1 | module.exports = function stringifyAttrs(object, attrs) { 2 | if (!object) { 3 | return object; 4 | } 5 | let newObject; 6 | for (const attr of attrs) { 7 | const value = object[attr]; 8 | if (value !== undefined && typeof value !== 'string') { 9 | newObject = newObject || Object.assign({}, object); 10 | newObject[attr] = String(value); 11 | } 12 | } 13 | return newObject || object; 14 | }; 15 | -------------------------------------------------------------------------------- /utils/wrapPromiseCallback.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Wrap a promise to invoke an optional callback upon resolution or rejection. 3 | * 4 | * This function assumes the callback follows the Node.js callback type: (err, value) => void 5 | * 6 | * If a callback is provided: 7 | * - if the promise is resolved, invoke the callback with (null, value) 8 | * - if the promise is rejected, invoke the callback with (error, null) 9 | * 10 | * @param {Promise} promise 11 | * @param {Function} callback 12 | * @returns Promise | undefined 13 | */ 14 | module.exports = function wrapPromiseCallback(promise, callback) { 15 | const ret = promise.then( 16 | value => { 17 | if (callback) { 18 | setImmediate(() => { 19 | callback(null, value); 20 | }); 21 | } 22 | return value; 23 | }, 24 | error => { 25 | if (callback) { 26 | setImmediate(() => { 27 | callback(error, null); 28 | }); 29 | } else { 30 | return Promise.reject(error); 31 | } 32 | } 33 | ); 34 | 35 | return !callback ? ret : undefined; 36 | }; 37 | -------------------------------------------------------------------------------- /versioned_data_kind.js: -------------------------------------------------------------------------------- 1 | /* 2 | These objects denote the types of data that can be stored in the feature store and 3 | referenced in the API. If we add another storable data type in the future, as long as it 4 | follows the same pattern (having "key", "version", and "deleted" properties), we only need 5 | to add a corresponding constant here and the existing store should be able to handle it. 6 | 7 | Note, for things to work correctly, the "namespace" property must match the key used in 8 | module.exports. 9 | */ 10 | 11 | const features = { 12 | namespace: 'features', 13 | streamApiPath: '/flags/', 14 | requestPath: '/sdk/latest-flags/', 15 | priority: 1, 16 | getDependencyKeys: flag => { 17 | if (!flag.prerequisites || !flag.prerequisites.length) { 18 | return []; 19 | } 20 | return flag.prerequisites.map(p => p.key); 21 | }, 22 | }; 23 | 24 | const segments = { 25 | namespace: 'segments', 26 | streamApiPath: '/segments/', 27 | requestPath: '/sdk/latest-segments/', 28 | priority: 0, 29 | }; 30 | 31 | module.exports = { 32 | features: features, 33 | segments: segments, 34 | }; 35 | --------------------------------------------------------------------------------