├── .eslintrc.json ├── .github └── workflows │ ├── pull-request.yaml │ └── push-master.yaml ├── .gitignore ├── .prettierrc.json ├── .yarn └── releases │ └── yarn-3.2.4.cjs ├── .yarnrc.yml ├── CHANGELOG.md ├── LICENSE ├── README.md ├── dangerfile.js ├── jest.config.js ├── package.json ├── scripts └── build.js ├── src ├── __data__ │ └── imdb.js ├── dataStrategies.js ├── date.test.js ├── exampleTypes.js ├── index.js ├── index.test.js ├── memory.test.js ├── provider-debug │ └── index.js ├── provider-memory │ ├── date.js │ ├── exampleTypes.js │ ├── index.js │ └── results.js ├── utils.js └── utils.test.js └── yarn.lock /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": ["eslint:recommended", "plugin:import/recommended"], 3 | "parserOptions": { 4 | "ecmaVersion": "2022", 5 | "sourceType": "module" 6 | }, 7 | "env": { 8 | "node": true, 9 | "jest": true, 10 | "es2022": true 11 | }, 12 | "rules": { 13 | "import/extensions": [2, { "js": "always" }] 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /.github/workflows/pull-request.yaml: -------------------------------------------------------------------------------- 1 | name: Lint, Test, and Danger 2 | 3 | on: pull_request 4 | 5 | jobs: 6 | lint-test-danger: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - name: Checkout repo 10 | uses: actions/checkout@v3 11 | with: 12 | token: ${{ secrets.PAT_DECRAPIFIER }} 13 | fetch-depth: 0 14 | 15 | - name: Setup node 16 | uses: actions/setup-node@v3 17 | with: 18 | node-version: 16.x 19 | cache: 'yarn' 20 | 21 | - name: Install dependencies 22 | run: yarn install --immutable 23 | 24 | - name: Lint 25 | run: yarn run lint:ci || touch FAIL_JOB 26 | 27 | - name: Test 28 | run: yarn run test:ci || touch FAIL_JOB 29 | 30 | - name: Setup Decrapifier user 31 | run: | 32 | git config user.email "decrapifier@govspend.com" 33 | git config user.name "Decrapifier" 34 | git config push.default upstream 35 | git checkout --track origin/${{github.head_ref}} 36 | 37 | - name: Run danger 38 | run: yarn run danger ci || touch FAIL_JOB 39 | env: 40 | DANGER_GITHUB_API_TOKEN: ${{ secrets.PAT_DECRAPIFIER }} 41 | 42 | - name: check-if-lint-or-test-runs-failed 43 | run: 'if test -f "FAIL_JOB"; then exit 1; fi' 44 | -------------------------------------------------------------------------------- /.github/workflows/push-master.yaml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | 8 | jobs: 9 | build: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Checkout repo 13 | uses: actions/checkout@v3 14 | 15 | - name: Setup node 16 | uses: actions/setup-node@v3 17 | with: 18 | node-version: 16.x 19 | cache: 'yarn' 20 | 21 | - name: Install dependencies 22 | run: yarn install --immutable 23 | 24 | - name: Publish to npm 25 | run: | 26 | echo -e "npmRegistryServer: \"https://registry.npmjs.org/\"\nnpmAuthToken: \"$NPM_TOKEN\"" >> ./.yarnrc.yml 27 | yarn npm publish --access public 28 | env: 29 | NPM_TOKEN: '${{ secrets.NPM_TOKEN }}' 30 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | 8 | # Runtime data 9 | pids 10 | *.pid 11 | *.seed 12 | *.pid.lock 13 | 14 | # Directory for instrumented libs generated by jscoverage/JSCover 15 | lib-cov 16 | 17 | # Coverage directory used by tools like istanbul 18 | coverage/* 19 | 20 | # Output files 21 | dist/* 22 | test-results.json 23 | lint-results.json 24 | 25 | # nyc test coverage 26 | .nyc_output 27 | 28 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 29 | .grunt 30 | 31 | # Bower dependency directory (https://bower.io/) 32 | bower_components 33 | 34 | # node-waf configuration 35 | .lock-wscript 36 | 37 | # Compiled binary addons (http://nodejs.org/api/addons.html) 38 | build/Release 39 | 40 | # Dependency directories 41 | node_modules/ 42 | jspm_packages/ 43 | 44 | # Typescript v1 declaration files 45 | typings/ 46 | 47 | # Optional npm cache directory 48 | .npm 49 | 50 | # Optional eslint cache 51 | .eslintcache 52 | 53 | # Optional REPL history 54 | .node_repl_history 55 | 56 | # Output of 'npm pack' 57 | *.tgz 58 | 59 | # Yarn Integrity file 60 | .yarn-integrity 61 | 62 | # dotenv environment variables file 63 | .env 64 | 65 | test-results.json 66 | lint-results.json 67 | 68 | # yarn stuff 69 | # see https://yarnpkg.com/getting-started/qa#which-files-should-be-gitignored 70 | .yarn/* 71 | !.yarn/patches 72 | !.yarn/plugins 73 | !.yarn/releases 74 | !.yarn/sdks 75 | !.yarn/versions 76 | -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "semi": false, 3 | "singleQuote": true 4 | } 5 | -------------------------------------------------------------------------------- /.yarnrc.yml: -------------------------------------------------------------------------------- 1 | nodeLinker: node-modules 2 | yarnPath: .yarn/releases/yarn-3.2.4.cjs 3 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ### 0.12.12 2 | 3 | - Add exports to package.json so node knows which version of the code to import 4 | - Do not use esbuild to transpile source when running tests. Instead use jest ESM support 5 | 6 | ### 0.12.11 7 | 8 | - Build and publish browser-safe version of this package 9 | 10 | ### 0.12.10 11 | 12 | - Fix package publishing 13 | 14 | ### 0.12.9 15 | 16 | - Thread search options in example types 17 | 18 | ### 0.12.8 19 | 20 | - Use yarn 21 | - Use esbuild for both packaging and test running 22 | - Change github actions to use yarn 23 | - Use import/export syntax instead of CJS 24 | - Co-locate tests with source code 25 | 26 | ### 0.12.7 27 | 28 | - Add coverage to danger 29 | 30 | ### 0.12.6 31 | 32 | - Update the pull-request workflow to better catch test failures 33 | 34 | ### 0.12.5 35 | 36 | - Use the provider specifed on the schema if available. 37 | 38 | ### 0.12.5 39 | 40 | - Ensure \_meta is stripped from filterOnly nodes / nodes without valid context if debug option is falsy 41 | 42 | ### 0.12.4 43 | 44 | - Revert parallelization until we can make it configurable and test more thoroughly 45 | 46 | ### 0.12.3 47 | 48 | - Converted tests to jest 49 | 50 | ### 0.12.2 51 | 52 | - Updated CI to use node16 and npm7 53 | - Updated package-lock.json to version 2 54 | 55 | ### 0.12.1 56 | 57 | - Performance: executing runSearch requests in parallel 58 | 59 | ### 0.12.0 60 | 61 | - Add last 1 Day and last 1 hour to date math calculations 62 | 63 | ### 0.11.3 64 | 65 | - Changed over CI from circleCI to Github Actions. 66 | 67 | ### 0.11.2 68 | 69 | - Cleanup packag-lock.json 70 | - Fix unit test: Date example type test cases - lastCalendarMonth 71 | - this is a permant fix as it lock the date for the tests 72 | - Refactored date test to make use of bdd-lazy-var 73 | 74 | ### 0.11.1 75 | 76 | - Fix unit test: Date example type test cases - lastCalendarMonth 77 | 78 | ### 0.11.0 79 | 80 | - Export fn to attach \_meta.filters to search nodes 81 | 82 | ### 0.10.0 83 | 84 | - Add next18Months rolling memory date type option 85 | 86 | ### 0.9.4 87 | 88 | - Fix memory date type and add tests 89 | 90 | ### 0.9.3 91 | 92 | - Fix memory facet `exclude` mode & add test 93 | 94 | ### 0.9.2 95 | 96 | - Fix memory exists to work & add test cases for exists & bool types 97 | 98 | ### 0.9.1 99 | 100 | - Memory facet type: handle dotted paths 101 | 102 | ### 0.9.0 103 | 104 | - Memory facet type 105 | - Unwind results 106 | - Support non-primitive values when counting results 107 | - Preserve types of results when filtering them 108 | - Memory results type: do not paginate if pageSize = 0 109 | 110 | ### 0.8.3 111 | 112 | - Bump duti 113 | 114 | ### 0.8.2 115 | 116 | - Fix memory facet type to respect optionsFilter when provided 117 | 118 | ### 0.8.1 119 | 120 | - Fix issue with filter on provider-memory bool type 121 | 122 | ### 0.8.0 123 | 124 | - Add support for date math operations on provider-memory date type 125 | 126 | ### 0.7.1 127 | 128 | - Fix "Desccription" typo to "Description" 129 | 130 | ### 0.7.0 131 | 132 | - Pass the node and path in `onResult`, not the raw context value 133 | - Optimize performance by reducing the number of tree traversals 134 | - Clean up repo by using new `futil` methods instead of local utils 135 | - General refactoring 136 | 137 | ### 0.6.1 138 | 139 | - Fix `raw` function 140 | 141 | ### 0.6.0 142 | 143 | - Add global `raw` example type 144 | - Chore: move from `futil-js` to `futil` 145 | 146 | ### 0.5.2 147 | 148 | - Memory Provider: Fix bug in results type pagination 149 | 150 | ### 0.5.1 151 | 152 | - Documentation: Define DSL 153 | 154 | ### 0.5.0 155 | 156 | - Memory Provider: Add `totalRecords` to `results` example type 157 | 158 | ### 0.4.2 159 | 160 | - Throw Error object instead of just the error message 161 | 162 | ### 0.4.1 163 | 164 | - Pass schemas along in subquery type 165 | - Add missing hasValue functions for subquery and savedSearch 166 | 167 | ### 0.4.0 168 | 169 | - Memory Provider: Add savedSearch type 170 | - Memory Provider: Add subquery type 171 | - Memory Provider: Facet now coerces values to strings (since number options are strings anyway) 172 | - Added facet export data strategy 173 | 174 | ### 0.3.1 175 | 176 | - Refactoring 177 | 178 | ### 0.3.0 179 | 180 | - Add memory provider and tests 181 | 182 | ### 0.2.1 183 | 184 | - Pass `schema` to more step functions 185 | 186 | ### 0.2.0 187 | 188 | - Flatten legacy fields, so `data` and `config` (and their more modern `filterConfig` and `resultConfig` aliases) are merged into the nodes so types can safely ignore the distinction 189 | 190 | ### 0.1.0 191 | 192 | - Supporting `children`. 193 | 194 | ### 0.0.5 195 | 196 | - Ecosystem And Resources readme update (version bump for npm pubishing) 197 | 198 | ### 0.0.4 199 | 200 | - Pass getProvider and getSchema to result functions 201 | 202 | ### 0.0.3 203 | 204 | - Fixed some core bugs, added better tests, and moved to async/await 205 | 206 | ### 0.0.2 207 | 208 | - Add CI configuration and other developer tooling 209 | 210 | ### 0.0.1 211 | 212 | - Initial release 213 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 SmartProcure 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | > :warning: **Development has moved to the [contexture monorepo](https://github.com/smartprocure/contexture)**: This package lives in https://github.com/smartprocure/contexture/tree/master/packages/server 2 | 3 | 4 | # contexture 5 | 6 | The Contexture DSL (Domain Specific Language) Processor 7 | 8 | ## Overview 9 | 10 | Contexture is a tool for running the Contexture DSL, which is primarily about abstracting queries/filters and results/aggregrations. 11 | Each leaf node in a Contexture Tree can affect other leaf nodes (e.g., acting as a filter) and has results of it's own (e.g. a top N aggregation or search results) which are affected by the other nodes. 12 | Non leaf nodes describe how leaves relate to each other, e.g. as a boolean join of `and`/`or`, and Contexture is smart enough to make sure that filters are included based on their joins - e.g. two nodes `or`ed together won't affect each other's results, but they will if they're `and`ed together. 13 | 14 | The canonical example of a Contexture Node is faceted search, where you have a checkbox list that is both a filter (restricts results to things checked) and an aggregation (show the top n values which can be checked). Contexture allows them to be nested in advanced searches with boolean joins like `and`/`or`/`not`. 15 | 16 | Contexture takes as input the tree DSL and returns it hydrated with contextual results on it's `context`, and uses `provider`s for different backing data stores (like elasticsearch and mongo) to actually run the search results. This means that Contexture typically runs on the server, but it doesn't have to - you can build providers that call APIs instead of directly hitting a database. 17 | While the Contexture DSL can be built anyway you'd like, it pairs well with the `contexture-client`, which leverages the generic structure and makes sure things update only when needed. 18 | 19 | ### Ecosystem And Resources 20 | 21 | | Github | npm | Description | 22 | | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------ | ----------------------------------------------------------------------------------------------------------------- | 23 | | [`contexture`](http://github.com/smartprocure/contexture) | [`contexture`](https://www.npmjs.com/package/contexture) | The core library that exectues the DSL to retrieve data | 24 | | [`contexture-elasticsearch`](http://github.com/smartprocure/contexture-elasticsearch) | [`contexture-elasticsearch`](https://www.npmjs.com/package/contexture-elasticsearch) | Elasticsearch provider for contexture | 25 | | [`contexture-mongo`](http://github.com/smartprocure/contexture-mongo) | [`contexture-mongo`](https://www.npmjs.com/package/contexture-mongo) | MongoDB provider for contexture | 26 | | [`contexture-client`](http://github.com/smartprocure/contexture-client) | [`contexture-client`](https://www.npmjs.com/package/contexture-client) | The client library that manages the DSL, allowing for hyper efficient updates running only what is exactly needed | 27 | | [`contexture-react`](http://github.com/smartprocure/contexture-react) | [`contexture-react`](https://www.npmjs.com/package/contexture-react) | React components for building contexture interfaces | 28 | | [`contexture-export`](http://github.com/smartprocure/contexture-export) | [`contexture-export`](https://www.npmjs.com/package/contexture-export) | Export searches into files or any other target | 29 | | [`contexture-ec18-talk`](http://github.com/smartprocure/contexture-ec18-talk) | n/a | Elasticon 2018 Talk About Contexture | 30 | 31 | ## Example Usage 32 | 33 | ```js 34 | let Contexture = require('contexture') 35 | let provider = require('contexture-mongo') 36 | let types = require('contexture-mongo/types') 37 | let schemas = require('./path/to/schemas') 38 | 39 | let process = Contexture({ 40 | schemas, 41 | providers: { 42 | mongo: provider({ 43 | getMongooseClient: () => mongoose, 44 | types, 45 | }), 46 | }, 47 | }) 48 | ``` 49 | 50 | Then later: 51 | 52 | ```js 53 | await process(dsl) 54 | ``` 55 | 56 | or 57 | 58 | ```js 59 | await process(dsl, { 60 | debug: true, 61 | }) 62 | ``` 63 | 64 | ## Process Options 65 | 66 | Process can handle a few options: 67 | 68 | | Option | Description | 69 | | ---------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | 70 | | `debug` | Sends `_meta` as part of the response, which includes per node request records, relevant filters, and other debug info | 71 | | `onResult` | A callback which is called whenever a node finishes producing it's results, which can be used to send partial results over websockets for example | 72 | 73 | ## Core Concepts 74 | 75 | ### Overview 76 | 77 | `Contexture` will process a serialized contexture tree dsl, where each leaf node has a `Schema` representing what it is querying and which data `Provider` it uses, along with a `Provider`-specific `Type` that defines how it applies `filter`s to other contexts and how it interacts with its `Provider` to get `results`. 78 | 79 | #### Glossary 80 | 81 | - Schema 82 | 83 | - A `Schema` represents a type of data - like a model in an ORM or a Schema in GraphQL. It has one or more[^manyproviders] `Providers` to tie it to a real data source, and well as configuration specific to each `Provider` - things like which index, type, or collection to use and type specific config like how to build a `summaryView` for results or what fields to highlight by default.- 84 | 85 | - Provider 86 | 87 | - A `Provider` contains all of the database[^db] specific logic, including how to actually run a search (`runSearch`), how to combine filters in a group (`groupCombinator`), any client specific configuration (which in practice is overridden/injected at runtime from sails config), and implementations for all of its `Types` 88 | 89 | - Type 90 | - A `Type` is an implementation of a specific node type for a given `Provider`. It can include functions to produce a filter (`filter`), how it interacts with the `Provider`'s search function to generate results (`result`), and checks[^checks] to determine if a filter should be included (`hasValue`) or if results should be run (`validContext`). All of the methods are optional - some node types only have filters (like `bool` or `number`), some only have results (like `dateHistogram` and `terms_stats`), and others have both (like `facet`). All of the functions can be asynchronous by returning a promise, allowing more complex filters like the `geo` filter which can geocode addresses on the fly. 91 | 92 | [^db]: Does not actually have to be a database - a provider could talk to an API, the file system, or even make stuff up on the fly 93 | [^checks]: These checks are above and beyond what the client specifies and are meant as last minute validation - the client is intelligent enough to not send up things without values or missing properties, but this provides an additional check in case something gets through (e.g., a `terms_stats` without a `sort` field). 94 | [^manyproviders]: If there are multiple `Providers`, it will default to the first one unless a provider is also specified with the schema on the data context itself 95 | 96 | ## Implementation Details 97 | 98 | ### Process Algorithm 99 | 100 | For each of these steps, walk the tree in a parent-first DFS traversal, with each function optionally asynchronous by returning a promise. Along the way, intermediate data is added to contexts on an object called `_meta`. For each context, every type/processor combination is pulled on the fly, meaning it will use the correct local `Provider` and `Type` info even if some contexts have different schemas[^multischema] 101 | 102 | - Clean/Prep everything (adding `_meta`, etc) 103 | - Add `materializedPaths` (used later by `relevantFilters`) 104 | - Run `filter` for each item if it `hasValue` 105 | - Add `relevantFilters` for each item (all filters combined in their groups by the `groupCombinator` except for their own filters and any filters related to them in the tree via an `OR` 106 | - Get `result` for each item if it `hasValidContext` and is not `filterOnly` (as determined by the client event architecture), passing a pre curried search function that includes `relevantFilters` so types don't need to worry about it - logging each request on `_meta.requests` 107 | - Combine `took` values for all requests to get an accurate number and pass results to `onResult` as they come in if they are defined 108 | - Unless in `debug` mode, scrub off `_meta` from the response 109 | 110 | ### Providers 111 | 112 | All `Provider` must specify the following properties: 113 | 114 | - `groupCombinator` 115 | - A function that takes the group and an array of its filters and returns them combined. 116 | - `runSearch` 117 | - A function that actually runs the search. It takes the current context, schema, filters (as processed by the `relevantFilters` function and combined with `groupCombinators`), and the criteria for the current context's results (eg `aggs` for an es aggregation or `highlight` for es results). This function can conditionally do different things based on what it is passed - like knowing if it should run an aggregation or a scan/scroll. 118 | - `types` 119 | - A object hash of the `Provider`'s type implementations. It can optionally include a type called `default` whose properties will be used if one of its types are missing something (e.g specifying the default behavior of `validContext` to always allow or prevent results from running) 120 | 121 | Additionally, a provider may expose config for it's client (e.g. `hosts` or request `timeout` for elasticsearch). 122 | 123 | ### Types 124 | 125 | All `Types` can implement any if the following properties. All are optional: 126 | 127 | - `filter` 128 | - Takes the current context and produces the filter that will apply to other data contexts in the group (except those related via `OR`). Typically JSON but can be a string as in the SQL case. 129 | - `hasValue` 130 | - Takes the current context and returns a truthy value for whether or not it has a value 131 | - `result` 132 | - Takes the current context, a curried version of the provider's `runSearch` with filters and everything pre-applied (so it is really easy to run searches), the current schema, and the current provider for advanced use cases. This can run one or more async calls - as long as it returns a promise for the final result. If you need to do additional filtering logic, you can use `runSearch` on the provider directly instead of the convenient curried version and inspect the `_meta.relevantFilters` property to see which filters would have been auto-applied, allowing you to do literally any kind of search you want - but there hasn't been a need for this yet. 133 | - `validContext` 134 | - Takes the current context and returns a truthy value for whether or not it should get results. 135 | 136 | [^multischema]: This completely solves and obviates the need for the `MultiIndexGroupProcessor` on the client and handles it in much more elegant way (and in a single service call, instead of `n` services calls). A caveat is that it does not currently handle schemas from different providers (because filters are generated based on their context's local schema), so you can't currently mix a elasticsearch schema with a mongo schema (because it could try to call mongo with elastic search filters for example). 137 | 138 | ### Schemas 139 | 140 | Schemas are named by convention based on their filename and should be in `camelCase`. A schema must have one or more provider specific set of configuration properties. 141 | -------------------------------------------------------------------------------- /dangerfile.js: -------------------------------------------------------------------------------- 1 | /* global schedule */ 2 | 3 | let { danger, markdown, fail, message, warn } = require('danger') 4 | 5 | let fs = require('fs') 6 | let duti = require('duti') 7 | let coverage = require('danger-plugin-coverage').default 8 | 9 | let readJson = (path) => { 10 | try { 11 | return JSON.parse(fs.readFileSync(path, { encoding: 'utf8' })) 12 | } catch (e) { 13 | // 14 | } 15 | } 16 | 17 | let args = { 18 | danger, 19 | fail, 20 | message, 21 | warn, 22 | markdown, 23 | lintResults: readJson('./lint-results.json'), 24 | testResults: readJson('./test-results.json'), 25 | config: { 26 | prNetChangeThreshold: 500, 27 | personalityNetChangeThreshold: 500, 28 | recommendedPrReviewers: 1, 29 | rootFolder: 'src', 30 | }, 31 | } 32 | 33 | // Danger can do a run on a local repo via `danger local`, in which case 34 | // `danger.github` will not be defined: 35 | // - `danger local -h` 36 | // - https://danger.systems/js/tutorials/fast-feedback.html 37 | if (danger.github) { 38 | duti.prAssignee(args) 39 | duti.netNegativePR(args) 40 | duti.bigPr(args) 41 | duti.noPrDescription(args) 42 | duti.requestedReviewers(args) 43 | duti.emptyChangelog(args) 44 | duti.versionBump(args) 45 | duti.autoFix(args) 46 | } 47 | 48 | duti.hasLintWarnings(args) 49 | duti.hasLintErrors(args) 50 | duti.hasTestErrors(args) 51 | 52 | schedule( 53 | coverage({ 54 | showAllFiles: true, 55 | threshold: { statements: 0, branches: 0, functions: 0, lines: 0 }, 56 | }) 57 | ) 58 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | // https://jestjs.io/docs/configuration 2 | export default { 3 | testMatch: ['/src/**/*.test.js'], 4 | coverageReporters: ['clover'], 5 | collectCoverageFrom: ['src/**/*.js'], 6 | } 7 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "contexture", 3 | "version": "0.12.12", 4 | "description": "The Contexture (aka ContextTree) Core", 5 | "type": "module", 6 | "exports": { 7 | ".": { 8 | "import": "./dist/esm/index.js", 9 | "require": "./dist/cjs/index.js" 10 | }, 11 | "./*": { 12 | "import": "./dist/esm/*", 13 | "require": "./dist/cjs/*" 14 | } 15 | }, 16 | "files": [ 17 | "./dist" 18 | ], 19 | "scripts": { 20 | "prepack": "node scripts/build.js", 21 | "test": "NODE_OPTIONS=--experimental-vm-modules jest", 22 | "test:ci": "yarn test --coverage --json --outputFile test-results.json", 23 | "fmt": "prettier --ignore-path .gitignore --write .", 24 | "lint": "eslint --ignore-path .gitignore .", 25 | "lint:ci": "yarn lint -o lint-results.json -f json", 26 | "duti:fix": "yarn lint --fix && yarn fmt" 27 | }, 28 | "repository": { 29 | "type": "git", 30 | "url": "git+https://github.com/smartprocure/contexture.git" 31 | }, 32 | "keywords": [ 33 | "search", 34 | "data-context" 35 | ], 36 | "author": "Samuel Greene", 37 | "license": "MIT", 38 | "bugs": { 39 | "url": "https://github.com/smartprocure/contexture/issues" 40 | }, 41 | "homepage": "https://github.com/smartprocure/contexture#readme", 42 | "dependencies": { 43 | "@elastic/datemath": "^5.0.3", 44 | "date-fns": "^2.11.1", 45 | "futil": "^1.66.1", 46 | "lodash": "^4.17.21", 47 | "moment": "^2.24.0", 48 | "moment-timezone": "^0.5.28" 49 | }, 50 | "devDependencies": { 51 | "@flex-development/toggle-pkg-type": "^1.0.1", 52 | "danger": "^11.1.2", 53 | "danger-plugin-coverage": "^1.6.2", 54 | "duti": "^0.15.2", 55 | "esbuild": "^0.15.12", 56 | "eslint": "^8.25.0", 57 | "eslint-plugin-import": "^2.26.0", 58 | "glob": "^8.0.3", 59 | "jest": "^29.0.2", 60 | "mockdate": "^3.0.5", 61 | "prettier": "^2.7.1" 62 | }, 63 | "packageManager": "yarn@3.2.4" 64 | } 65 | -------------------------------------------------------------------------------- /scripts/build.js: -------------------------------------------------------------------------------- 1 | import fs from 'fs/promises' 2 | import glob from 'glob' 3 | import esbuild from 'esbuild' 4 | // https://github.com/flex-development/toggle-pkg-type#when-should-i-use-this 5 | import toggleTypeModule from '@flex-development/toggle-pkg-type' 6 | 7 | // Clear build directory since esbuild won't do it for us 8 | await fs.rm('dist', { force: true, recursive: true }) 9 | 10 | let entryPoints = glob.sync('src/**/*.js', { 11 | ignore: [ 12 | // Tests 13 | 'src/**/*.test.js', 14 | // Data for tests 15 | 'src/**/__data__/*', 16 | ], 17 | }) 18 | 19 | // Build project 20 | 21 | toggleTypeModule('off') 22 | 23 | await esbuild.build({ 24 | platform: 'node', 25 | format: 'cjs', 26 | target: 'es2022', 27 | outdir: 'dist/cjs', 28 | entryPoints, 29 | }) 30 | 31 | await fs.writeFile('./dist/cjs/package.json', '{ "type": "commonjs" }') 32 | 33 | toggleTypeModule('on') 34 | 35 | await esbuild.build({ 36 | platform: 'node', 37 | format: 'esm', 38 | target: 'es2022', 39 | outdir: 'dist/esm', 40 | entryPoints, 41 | }) 42 | 43 | await fs.writeFile('./dist/esm/package.json', '{ "type": "module" }') 44 | -------------------------------------------------------------------------------- /src/dataStrategies.js: -------------------------------------------------------------------------------- 1 | // TODO: All of this should move to contexture-export 2 | 3 | import _ from 'lodash/fp.js' 4 | import F from 'futil' 5 | 6 | let Tree = F.tree(_.get('children'), (key) => ({ key })) 7 | let setFilterOnly = Tree.transform((node) => { 8 | node.filterOnly = true 9 | }) 10 | let lastChild = (x) => _.last(Tree.traverse(x)) 11 | 12 | let wrapTree = _.curry((analysisNodes, tree) => ({ 13 | key: 'analysisRoot', 14 | type: 'group', 15 | join: 'and', 16 | schema: tree.schema, 17 | children: [setFilterOnly(tree), ..._.castArray(analysisNodes)], 18 | })) 19 | 20 | export let analyzeTree = _.curry( 21 | async (service, tree, analysisNodes, options) => 22 | lastChild(await service(wrapTree(analysisNodes, tree), options)) 23 | ) 24 | 25 | export let facet = ({ service, tree, field, size = 100, sortDir, options }) => { 26 | let analyze = analyzeTree(service, tree) 27 | let getTotalRecords = _.memoize(async () => { 28 | let result = await analyze( 29 | { 30 | key: 'analysisOutput', 31 | type: 'cardinality', 32 | field, 33 | }, 34 | options 35 | ) 36 | return _.get('context.value', result) 37 | }) 38 | 39 | let done = false 40 | let getNext = async () => { 41 | let result = await analyze( 42 | { 43 | key: 'analysisOutput', 44 | type: 'facet', 45 | field, 46 | size, 47 | sortDir, 48 | }, 49 | options 50 | ) 51 | done = true 52 | return _.map('name', result.context.options) 53 | } 54 | 55 | return { 56 | getTotalRecords, 57 | hasNext: () => !done, 58 | getNext, 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /src/date.test.js: -------------------------------------------------------------------------------- 1 | import _ from 'lodash/fp.js' 2 | import MockDate from 'mockdate' 3 | import moment from 'moment-timezone' 4 | import Contexture from './index.js' 5 | import provider from './provider-memory/index.js' 6 | import memoryExampleTypes from './provider-memory/exampleTypes.js' 7 | 8 | let dates = () => [ 9 | { 10 | key: 'last15Months', 11 | date: moment().subtract(15, 'months').format(), 12 | }, 13 | { 14 | key: 'lastMonth', 15 | date: moment().subtract(1, 'months').format(), 16 | }, 17 | { 18 | key: 'last3Days', 19 | date: moment().subtract(3, 'days').format(), 20 | }, 21 | { 22 | key: 'last6Days', 23 | date: moment().subtract(6, 'days').format(), 24 | }, 25 | { 26 | key: 'last20Days', 27 | date: moment().subtract(20, 'days').format(), 28 | }, 29 | { 30 | key: 'last6Months', 31 | date: moment().subtract(6, 'months').format(), 32 | }, 33 | { 34 | key: 'last10Weeks', 35 | date: moment().subtract(10, 'weeks').toDate().getTime(), 36 | }, 37 | { 38 | key: 'last20Months', 39 | date: moment().subtract(20, 'months').format('LLLL'), 40 | }, 41 | { 42 | key: 'last5Years', 43 | date: moment().subtract(5, 'years').format('MM/DD/YYYY'), 44 | }, 45 | { 46 | key: 'tomorrow', 47 | date: moment().add(1, 'days').format(), 48 | }, 49 | { 50 | key: 'nextMonth', 51 | date: moment().add(1, 'months').format(), 52 | }, 53 | { 54 | key: 'next6Months', 55 | date: moment().add(6, 'months').format(), 56 | }, 57 | { 58 | key: 'next5Years', 59 | date: moment().add(5, 'years').format(), 60 | }, 61 | ] 62 | 63 | let dsl = { 64 | key: 'root', 65 | type: 'group', 66 | schema: 'date', 67 | join: 'and', 68 | children: [ 69 | { 70 | type: 'date', 71 | field: 'date', 72 | }, 73 | { 74 | key: 'results', 75 | type: 'results', 76 | config: { 77 | page: 1, 78 | }, 79 | }, 80 | ], 81 | } 82 | 83 | let process = () => 84 | Contexture({ 85 | schemas: { 86 | date: { 87 | memory: { 88 | records: dates(), 89 | }, 90 | }, 91 | }, 92 | providers: { 93 | memory: { 94 | ...provider, 95 | types: { 96 | ...memoryExampleTypes(), 97 | }, 98 | }, 99 | }, 100 | }) 101 | 102 | let testRange = async ({ range = 'exact', from, to, expected }) => { 103 | let tree = _.cloneDeep(dsl) 104 | tree.children[0] = { ...tree.children[0], range, from, to } 105 | let response = await process()(tree) 106 | let results = _.map((key) => _.find({ key }, dates()), expected) 107 | expect(response.children[1].context).toEqual({ 108 | results, 109 | totalRecords: results.length, 110 | }) 111 | } 112 | 113 | describe('Date example type test cases', () => { 114 | beforeAll(() => { 115 | MockDate.set(moment('2021-12-01T21:39:10.172Z', moment.ISO_8601)) 116 | }) 117 | afterAll(() => { 118 | MockDate.reset() 119 | }) 120 | it('allFutureDates', async () => 121 | testRange({ 122 | range: 'allFutureDates', 123 | expected: ['tomorrow', 'nextMonth', 'next6Months', 'next5Years'], 124 | })) 125 | it('allPastDates', async () => 126 | testRange({ 127 | range: 'allPastDates', 128 | expected: [ 129 | 'last15Months', 130 | 'lastMonth', 131 | 'last3Days', 132 | 'last6Days', 133 | 'last20Days', 134 | 'last6Months', 135 | 'last10Weeks', 136 | 'last20Months', 137 | 'last5Years', 138 | ], 139 | })) 140 | it('last3Days', async () => testRange({ range: 'last3Days', expected: [] })) 141 | it('last7Days', async () => 142 | testRange({ range: 'last7Days', expected: ['last3Days', 'last6Days'] })) 143 | it('last90Days', async () => 144 | testRange({ 145 | range: 'last90Days', 146 | expected: [ 147 | 'lastMonth', 148 | 'last3Days', 149 | 'last6Days', 150 | 'last20Days', 151 | 'last10Weeks', 152 | ], 153 | })) 154 | it('lastCalendarMonth', async () => 155 | testRange({ 156 | range: 'lastCalendarMonth', 157 | expected: ['lastMonth', 'last3Days', 'last6Days', 'last20Days'], 158 | })) 159 | it('next6Months', async () => 160 | testRange({ range: 'next6Months', expected: ['tomorrow', 'nextMonth'] })) 161 | it('next36Months', async () => 162 | testRange({ 163 | range: 'next36Months', 164 | expected: ['tomorrow', 'nextMonth', 'next6Months'], 165 | })) 166 | it('exact FROM with open TO', async () => 167 | testRange({ 168 | from: moment().subtract(65, 'days').format(), 169 | expected: [ 170 | 'lastMonth', 171 | 'last3Days', 172 | 'last6Days', 173 | 'last20Days', 174 | 'tomorrow', 175 | 'nextMonth', 176 | 'next6Months', 177 | 'next5Years', 178 | ], 179 | })) 180 | it('exact TO with open FROM', async () => 181 | testRange({ 182 | to: new Date(), 183 | expected: [ 184 | 'last15Months', 185 | 'lastMonth', 186 | 'last3Days', 187 | 'last6Days', 188 | 'last20Days', 189 | 'last6Months', 190 | 'last10Weeks', 191 | 'last20Months', 192 | 'last5Years', 193 | ], 194 | })) 195 | it('exact FROM & TO', async () => 196 | testRange({ 197 | from: moment().subtract(1, 'weeks').format(), 198 | to: new Date(), 199 | expected: ['last3Days', 'last6Days'], 200 | })) 201 | }) 202 | -------------------------------------------------------------------------------- /src/exampleTypes.js: -------------------------------------------------------------------------------- 1 | import _ from 'lodash/fp.js' 2 | import * as strategies from './dataStrategies.js' 3 | 4 | export default ({ getSavedSearch } = {}) => ({ 5 | savedSearch: { 6 | hasValue: (node) => node.search || node.searchId, 7 | async filter(node, schema, { processGroup, options }) { 8 | let debugSearch = (x) => processGroup(x, { ...options, debug: true }) 9 | let search = node.search || (await getSavedSearch(node.searchId)) 10 | let result = await strategies.analyzeTree( 11 | debugSearch, 12 | search, 13 | { key: 'targetNode' }, 14 | options 15 | ) 16 | return result._meta.relevantFilters 17 | }, 18 | }, 19 | subquery: { 20 | hasValue: (node) => 21 | node.localField && node.foreignField && (node.search || node.searchId), 22 | async filter(node, schema, { processGroup, getProvider, options }) { 23 | let tree = node.search || (await getSavedSearch(node.searchId)) 24 | return getProvider(node).types.facet.filter( 25 | { 26 | field: node.localField, 27 | values: await strategies 28 | .facet({ 29 | service: processGroup, 30 | tree, 31 | field: node.foreignField, 32 | size: 0, // get all results 33 | options, 34 | }) 35 | .getNext(), 36 | }, 37 | schema 38 | ) 39 | }, 40 | }, 41 | raw: { 42 | hasValue: _.get('filter'), 43 | filter: ({ filter }) => filter, 44 | validContext: _.get('result'), 45 | result: async ({ result }, search, schema, { options }) => ({ 46 | result: await search(result, options), 47 | }), 48 | }, 49 | }) 50 | -------------------------------------------------------------------------------- /src/index.js: -------------------------------------------------------------------------------- 1 | import F from 'futil' 2 | import _ from 'lodash/fp.js' 3 | import { 4 | Tree, 5 | getRelevantFilters, 6 | attachFilters, 7 | getProvider as getProviderUtil, 8 | runTypeFunction as runTypeFunctionUtil, 9 | } from './utils.js' 10 | 11 | let process = _.curry(async ({ providers, schemas }, group, options = {}) => { 12 | let getProvider = getProviderUtil(providers, schemas) 13 | let getSchema = (schema) => schemas[schema] 14 | let runTypeFunction = runTypeFunctionUtil({ 15 | options, 16 | getSchema, 17 | getProvider, 18 | processGroup: (g, options) => process({ providers, schemas }, g, options), 19 | }) 20 | try { 21 | await attachFilters(runTypeFunction)(group) 22 | Tree.walk((node) => { 23 | // Skip groups 24 | if (!Tree.traverse(node)) 25 | node._meta.relevantFilters = getRelevantFilters( 26 | getProvider(node).groupCombinator, 27 | node._meta.path, 28 | group 29 | ) 30 | })(group) 31 | await Tree.walkAsync(async (node) => { 32 | let validContext = await runTypeFunction('validContext', node) 33 | 34 | // Reject filterOnly 35 | if (node.filterOnly || !validContext) { 36 | if (!options.debug) delete node._meta 37 | return 38 | } 39 | let curriedSearch = _.partial(getProvider(node).runSearch, [ 40 | options, 41 | node, 42 | getSchema(node.schema), 43 | node._meta.relevantFilters, 44 | ]) 45 | 46 | node.context = await runTypeFunction('result', node, curriedSearch).catch( 47 | (error) => { 48 | throw F.extendOn(error, { node }) 49 | } 50 | ) 51 | let path = node._meta.path 52 | if (!options.debug) delete node._meta 53 | if (options.onResult) options.onResult({ path, node }) 54 | })(group) 55 | 56 | return group 57 | } catch (error) { 58 | throw error.node ? error : new Error(`Uncaught search exception: ${error}`) 59 | } 60 | }) 61 | 62 | export default process 63 | 64 | // Psuedo code process 65 | // ----- 66 | // add _meta 67 | // add materializedPaths 68 | // add filter (reject !hasValue, reject contextOnly) 69 | // iterate DFS 70 | // get filters for context (by materialized path, filter lookup) 71 | // reject filterOnly 72 | // add resultProcessor (aka `query`) 73 | // SEARCH 74 | // process result, loop ^ 75 | // onResult 76 | // return results 77 | -------------------------------------------------------------------------------- /src/index.test.js: -------------------------------------------------------------------------------- 1 | import _ from 'lodash/fp.js' 2 | import Contexture from './index.js' 3 | import provider from './provider-debug/index.js' 4 | 5 | describe('Contexture Core', () => { 6 | let process = Contexture({ 7 | schemas: { 8 | test: { 9 | debug: true, 10 | }, 11 | }, 12 | providers: { 13 | debug: provider, 14 | }, 15 | }) 16 | let dsl = { 17 | key: 'root', 18 | type: 'group', 19 | schema: 'test', 20 | // join: 'and', 21 | children: [ 22 | { 23 | key: 'filter', 24 | type: 'test', 25 | data: { 26 | value: 1, 27 | }, 28 | config: { 29 | c: 1, 30 | }, 31 | }, 32 | { 33 | key: 'results', 34 | type: 'results', 35 | config: { 36 | page: 1, 37 | }, 38 | }, 39 | ], 40 | } 41 | it('should work', async () => { 42 | let { 43 | children: [filter, results], 44 | } = await process(dsl) 45 | expect(filter.context).toEqual({ 46 | abc: 123, 47 | }) 48 | expect(filter._meta).toBeFalsy() 49 | expect(results.context).toEqual({ 50 | results: [], 51 | }) 52 | expect(results._meta).toBeFalsy() 53 | }) 54 | it('should add _meta with debug option', async () => { 55 | let result = await process(dsl, { debug: true }) 56 | let { 57 | children: [filter, results], 58 | } = result 59 | 60 | expect(filter._meta).toEqual({ 61 | requests: [ 62 | { 63 | where: undefined, 64 | retrieve: { test: { c: 1 } }, 65 | }, 66 | ], 67 | path: ['root', 'filter'], 68 | hasValue: true, 69 | relevantFilters: undefined, 70 | filter: { 71 | 'filter (test)': { 72 | value: 1, 73 | }, 74 | }, 75 | }) 76 | expect(results._meta).toEqual({ 77 | requests: [ 78 | { 79 | where: { 80 | 'filter (test)': { 81 | value: 1, 82 | }, 83 | }, 84 | retrieve: { 85 | results: { 86 | page: 1, 87 | }, 88 | }, 89 | }, 90 | ], 91 | path: ['root', 'results'], 92 | hasValue: true, 93 | relevantFilters: { 94 | 'filter (test)': { 95 | value: 1, 96 | }, 97 | }, 98 | filter: undefined, 99 | }) 100 | }) 101 | it('should remove _meta from all valid nodes if debug option is falsy', async () => { 102 | let result = await process(dsl, { debug: false }) 103 | let { 104 | children: [filter, results], 105 | } = result 106 | 107 | expect(_.has('_meta', filter)).toEqual(false) 108 | expect(_.has('_meta', results)).toEqual(false) 109 | }) 110 | it('should also remove _meta from nodes without a valid context / filterOnly nodes if debug option is falsy', async () => { 111 | let newDSL = { 112 | ...dsl, 113 | children: dsl.children.concat({ 114 | key: 'filterOnlyFilter', 115 | type: 'test', 116 | data: { 117 | value: 1, 118 | }, 119 | config: { 120 | c: 1, 121 | }, 122 | filterOnly: true, 123 | }), 124 | } 125 | 126 | let result = await process(newDSL, { debug: false }) 127 | let { 128 | children: [, , filterOnlyNode], 129 | } = result 130 | 131 | expect(_.has('_meta', filterOnlyNode)).toEqual(false) 132 | }) 133 | }) 134 | -------------------------------------------------------------------------------- /src/memory.test.js: -------------------------------------------------------------------------------- 1 | import _ from 'lodash/fp.js' 2 | import Contexture from './index.js' 3 | import provider from './provider-memory/index.js' 4 | import memoryExampleTypes from './provider-memory/exampleTypes.js' 5 | import exampleTypes from './exampleTypes.js' 6 | import movies from './__data__/imdb.js' 7 | 8 | let getResultsNode = () => ({ 9 | key: 'results', 10 | type: 'results', 11 | config: { 12 | page: 1, 13 | }, 14 | }) 15 | 16 | let getSavedSearch = async (id) => 17 | ({ 18 | AdamFavorites: { 19 | key: 'criteria', 20 | type: 'group', 21 | schema: 'favorites', 22 | join: 'and', 23 | children: [ 24 | { 25 | key: 'filter', 26 | type: 'facet', 27 | field: 'user', 28 | values: ['Adam'], 29 | }, 30 | ], 31 | }, 32 | HopeFavorites: { 33 | key: 'criteria', 34 | type: 'group', 35 | schema: 'favorites', 36 | join: 'and', 37 | children: [ 38 | { 39 | key: 'filter', 40 | type: 'facet', 41 | field: 'user', 42 | values: ['Hope'], 43 | }, 44 | ], 45 | }, 46 | }[id]) 47 | 48 | describe('Memory Provider', () => { 49 | let now = new Date() 50 | let process = Contexture({ 51 | schemas: { 52 | test: { 53 | memory: { 54 | records: [{ a: 1, b: 1 }, { a: 1, b: 3 }, { a: 2, b: 2 }, { a: 3 }], 55 | }, 56 | }, 57 | test2: { 58 | memory: { 59 | records: [ 60 | { b: 1, c: 1 }, 61 | { b: 2, c: 2 }, 62 | { b: 3, c: 1 }, 63 | ], 64 | }, 65 | }, 66 | bool: { 67 | memory: { 68 | records: [ 69 | { a: true, b: true }, 70 | { a: true, b: false }, 71 | { a: false, b: true }, 72 | { a: false, b: false }, 73 | { a: true }, 74 | { a: false }, 75 | { a: 1 }, 76 | { a: 0 }, 77 | { a: '1' }, 78 | ], 79 | }, 80 | }, 81 | arrayFacets: { 82 | memory: { 83 | records: [ 84 | { b: 1, c: [1, 2] }, 85 | { b: 2, c: [1, 2] }, 86 | { b: 3, c: [1, 2] }, 87 | ], 88 | }, 89 | }, 90 | arrayOfObjectsFacets: { 91 | memory: { 92 | records: [ 93 | { b: 1, c: [{ a: 1 }, { b: 1 }] }, 94 | { b: 2, c: [{ a: 1 }, { b: 1 }] }, 95 | { b: 3, c: [{ a: 1 }, { b: 1 }] }, 96 | ], 97 | }, 98 | }, 99 | movies: { 100 | memory: { 101 | records: _.map((x) => { 102 | x.released = new Date(x.released) 103 | return x 104 | }, movies), 105 | }, 106 | }, 107 | favorites: { 108 | memory: { 109 | records: [ 110 | { movie: 'Game of Thrones', user: 'Adam' }, 111 | { movie: 'The Matrix', user: 'Adam' }, 112 | { movie: 'Star Trek: The Next Generation', user: 'Adam' }, 113 | { movie: 'Game of Thrones', user: 'Hope' }, 114 | { movie: 'The Lucky One', user: 'Hope' }, 115 | ], 116 | }, 117 | }, 118 | currentYearMovies: { 119 | memory: { 120 | records: _.flow( 121 | _.take(5), 122 | _.map((x) => ({ ...x, released: now })) 123 | )(movies), 124 | }, 125 | }, 126 | }, 127 | providers: { 128 | memory: { 129 | ...provider, 130 | types: { 131 | ...memoryExampleTypes(), 132 | ...exampleTypes({ 133 | getSavedSearch, 134 | }), 135 | }, 136 | }, 137 | }, 138 | }) 139 | describe('basic test cases', () => { 140 | it('should handle basic AND test case', async () => { 141 | let dsl = { 142 | key: 'root', 143 | type: 'group', 144 | schema: 'test', 145 | join: 'and', 146 | children: [ 147 | { 148 | key: 'filter', 149 | type: 'facet', 150 | field: 'a', 151 | values: [1, 2], 152 | }, 153 | { 154 | key: 'filter2', 155 | type: 'facet', 156 | field: 'a', 157 | values: [1], 158 | }, 159 | getResultsNode(), 160 | ], 161 | } 162 | let result = await process(dsl) 163 | expect(result.children[0].context).toEqual({ 164 | cardinality: 1, 165 | options: [{ name: 1, count: 2 }], 166 | }) 167 | expect(result.children[1].context).toEqual({ 168 | cardinality: 2, 169 | options: [ 170 | { name: 1, count: 2 }, 171 | { name: 2, count: 1 }, 172 | ], 173 | }) 174 | expect(result.children[2].context).toEqual({ 175 | results: [ 176 | { a: 1, b: 1 }, 177 | { a: 1, b: 3 }, 178 | ], 179 | totalRecords: 2, 180 | }) 181 | }) 182 | it('should handle basic OR test case', async () => { 183 | let dsl = { 184 | key: 'root', 185 | type: 'group', 186 | schema: 'test', 187 | join: 'or', 188 | children: [ 189 | { 190 | key: 'filter', 191 | type: 'facet', 192 | field: 'a', 193 | values: [1, 2], 194 | }, 195 | { 196 | key: 'filter2', 197 | type: 'facet', 198 | field: 'a', 199 | values: [1], 200 | }, 201 | getResultsNode(), 202 | ], 203 | } 204 | let result = await process(dsl) 205 | expect(result.children[0].context).toEqual({ 206 | cardinality: 3, 207 | options: [ 208 | { name: 1, count: 2 }, 209 | { name: 2, count: 1 }, 210 | { name: 3, count: 1 }, 211 | ], 212 | }) 213 | expect(result.children[1].context).toEqual({ 214 | cardinality: 3, 215 | options: [ 216 | { name: 1, count: 2 }, 217 | { name: 2, count: 1 }, 218 | { name: 3, count: 1 }, 219 | ], 220 | }) 221 | expect(result.children[2].context).toEqual({ 222 | results: [{ a: 1, b: 1 }, { a: 1, b: 3 }, { a: 2, b: 2 }, { a: 3 }], 223 | totalRecords: 4, 224 | }) 225 | }) 226 | it('should handle EXCLUDE mode', async () => { 227 | let dsl = { 228 | key: 'root', 229 | type: 'group', 230 | schema: 'test', 231 | join: 'and', 232 | children: [ 233 | { 234 | key: 'filter', 235 | type: 'facet', 236 | mode: 'exclude', 237 | field: 'a', 238 | values: [1, 2], 239 | }, 240 | getResultsNode(), 241 | ], 242 | } 243 | let result = await process(dsl) 244 | expect(result.children[1].context).toEqual({ 245 | results: [{ a: 3 }], 246 | totalRecords: 1, 247 | }) 248 | }) 249 | it('should handle savedSearch', async () => { 250 | let dsl = { 251 | key: 'root', 252 | type: 'group', 253 | schema: 'test', 254 | join: 'and', 255 | children: [ 256 | { 257 | key: 'savedSearch', 258 | type: 'savedSearch', 259 | search: { 260 | key: 'root', 261 | type: 'group', 262 | schema: 'test', 263 | join: 'and', 264 | children: [ 265 | { 266 | key: 'filter2', 267 | type: 'facet', 268 | field: 'a', 269 | values: [1], 270 | }, 271 | getResultsNode(), 272 | ], 273 | }, 274 | }, 275 | getResultsNode(), 276 | ], 277 | } 278 | let result = await process(dsl) 279 | expect(result.children[1].context).toEqual({ 280 | results: [ 281 | { a: 1, b: 1 }, 282 | { a: 1, b: 3 }, 283 | ], 284 | totalRecords: 2, 285 | }) 286 | }) 287 | it('should handle subquery', async () => { 288 | let dsl = { 289 | key: 'root', 290 | type: 'group', 291 | schema: 'test2', 292 | join: 'and', 293 | children: [ 294 | { 295 | key: 'subquery', 296 | type: 'subquery', 297 | localField: 'b', 298 | foreignField: 'b', 299 | search: { 300 | key: 'root', 301 | type: 'group', 302 | schema: 'test', 303 | join: 'and', 304 | children: [ 305 | { 306 | key: 'filter', 307 | type: 'facet', 308 | field: 'a', 309 | values: [1], 310 | }, 311 | getResultsNode(), 312 | ], 313 | }, 314 | }, 315 | getResultsNode(), 316 | ], 317 | } 318 | let result = await process(dsl) 319 | expect(result.children[1].context).toEqual({ 320 | results: [ 321 | { b: 1, c: 1 }, 322 | { b: 3, c: 1 }, 323 | ], 324 | totalRecords: 2, 325 | }) 326 | }) 327 | it('should unwind array facets', async () => { 328 | let dsl = { 329 | key: 'root', 330 | type: 'group', 331 | schema: 'arrayFacets', 332 | join: 'and', 333 | children: [ 334 | { 335 | key: 'filter', 336 | type: 'facet', 337 | field: 'c', 338 | }, 339 | ], 340 | } 341 | let result = await process(dsl) 342 | expect(result.children[0].context.options).toEqual([ 343 | { name: 1, count: 3 }, 344 | { name: 2, count: 3 }, 345 | ]) 346 | }) 347 | it('should unwind array of objects facets', async () => { 348 | let dsl = { 349 | key: 'root', 350 | type: 'group', 351 | schema: 'arrayOfObjectsFacets', 352 | join: 'and', 353 | children: [ 354 | { 355 | key: 'filter', 356 | type: 'facet', 357 | field: 'c', 358 | }, 359 | ], 360 | } 361 | let result = await process(dsl) 362 | expect(result.children[0].context.options).toEqual([ 363 | { name: { a: 1 }, count: 3 }, 364 | { name: { b: 1 }, count: 3 }, 365 | ]) 366 | }) 367 | }) 368 | 369 | describe('exists test cases', () => { 370 | let dsl = { 371 | key: 'root', 372 | type: 'group', 373 | schema: 'test', 374 | join: 'and', 375 | children: [ 376 | { 377 | key: 'filter', 378 | type: 'exists', 379 | field: 'a', 380 | }, 381 | getResultsNode(), 382 | ], 383 | } 384 | it('exists (null) should work', async () => { 385 | dsl.children[0].values = null 386 | let result = await process(dsl) 387 | expect(result.children[1].context).toEqual({ 388 | results: [{ a: 1, b: 1 }, { a: 1, b: 3 }, { a: 2, b: 2 }, { a: 3 }], 389 | totalRecords: 4, 390 | }) 391 | }) 392 | it('exists (true) should work', async () => { 393 | dsl.children[0].value = true 394 | let result = await process(dsl) 395 | expect(result.children[1].context).toEqual({ 396 | results: [{ a: 1, b: 1 }, { a: 1, b: 3 }, { a: 2, b: 2 }, { a: 3 }], 397 | totalRecords: 4, 398 | }) 399 | }) 400 | it('exists (false) should work', async () => { 401 | dsl.children[0].field = 'b' 402 | dsl.children[0].value = false 403 | let result = await process(dsl) 404 | expect(result.children[1].context).toEqual({ 405 | results: [{ a: 3 }], 406 | totalRecords: 1, 407 | }) 408 | }) 409 | }) 410 | 411 | describe('bool test cases', () => { 412 | let dsl = { 413 | key: 'root', 414 | type: 'group', 415 | schema: 'bool', 416 | join: 'and', 417 | children: [ 418 | { 419 | key: 'filter', 420 | type: 'bool', 421 | field: 'a', 422 | }, 423 | getResultsNode(), 424 | ], 425 | } 426 | it('bool (null) should work', async () => { 427 | dsl.children[0].values = null 428 | let result = await process(dsl) 429 | expect(result.children[1].context).toEqual({ 430 | results: [ 431 | { a: true, b: true }, 432 | { a: true, b: false }, 433 | { a: false, b: true }, 434 | { a: false, b: false }, 435 | { a: true }, 436 | { a: false }, 437 | { a: 1 }, 438 | { a: 0 }, 439 | { a: '1' }, 440 | ], 441 | totalRecords: 9, 442 | }) 443 | }) 444 | it('bool (true) should work', async () => { 445 | dsl.children[0].value = true 446 | let result = await process(dsl) 447 | expect(result.children[1].context).toEqual({ 448 | results: [{ a: true, b: true }, { a: true, b: false }, { a: true }], 449 | totalRecords: 3, 450 | }) 451 | }) 452 | it('bool (false) should work', async () => { 453 | dsl.children[0].field = 'b' 454 | dsl.children[0].value = false 455 | let result = await process(dsl) 456 | expect(result.children[1].context).toEqual({ 457 | results: [ 458 | { a: true, b: false }, 459 | { a: false, b: false }, 460 | ], 461 | totalRecords: 2, 462 | }) 463 | }) 464 | }) 465 | 466 | describe('imdb test cases', () => { 467 | it('should handle facets', async () => { 468 | let dsl = { 469 | key: 'root', 470 | type: 'group', 471 | schema: 'movies', 472 | join: 'and', 473 | children: [ 474 | { 475 | key: 'ratings', 476 | type: 'facet', 477 | field: 'rated', 478 | values: ['R', 'PG-13'], 479 | }, 480 | getResultsNode(), 481 | ], 482 | } 483 | let result = await process(dsl) 484 | let ratings = _.find({ key: 'ratings' }, result.children).context 485 | expect(ratings.cardinality).toBe(25) 486 | expect(ratings.options).toEqual([ 487 | { name: 'R', count: 1104 }, 488 | { name: 'PG-13', count: 525 }, 489 | { name: 'TV-14', count: 361 }, 490 | { name: 'PG', count: 333 }, 491 | { name: 'Not Rated', count: 217 }, 492 | { name: 'TV-PG', count: 169 }, 493 | { name: 'TV-MA', count: 152 }, 494 | { name: 'Approved', count: 149 }, 495 | { name: 'Unrated', count: 125 }, 496 | { name: 'G', count: 87 }, 497 | ]) 498 | let results = _.find({ key: 'results' }, result.children).context.results 499 | let inspectedResults = results.map(_.pick(['title', 'year', 'rated'])) 500 | expect(inspectedResults).toEqual([ 501 | { title: 'The Dark Knight Rises', year: 2012, rated: 'PG-13' }, 502 | { title: 'The Usual Suspects', year: 1995, rated: 'R' }, 503 | { title: 'American Beauty', year: 1999, rated: 'R' }, 504 | { title: 'The Prestige', year: 2006, rated: 'PG-13' }, 505 | { title: 'Braveheart', year: 1995, rated: 'R' }, 506 | { 507 | title: 'Eternal Sunshine of the Spotless Mind', 508 | year: 2004, 509 | rated: 'R', 510 | }, 511 | { title: 'The Sixth Sense', year: 1999, rated: 'PG-13' }, 512 | { title: 'Life Is Beautiful', year: 1997, rated: 'PG-13' }, 513 | { title: "Pan's Labyrinth", year: 2006, rated: 'R' }, 514 | { title: 'Heat', year: 1995, rated: 'R' }, 515 | ]) 516 | }) 517 | it('should handle text', async () => { 518 | let dsl = { 519 | key: 'root', 520 | type: 'group', 521 | schema: 'movies', 522 | join: 'and', 523 | children: [ 524 | { 525 | key: 'filter', 526 | type: 'text', 527 | field: 'title', 528 | value: 'game', 529 | operator: 'startsWith', 530 | }, 531 | getResultsNode(), 532 | ], 533 | } 534 | let result = await process(dsl) 535 | let results = _.find({ key: 'results' }, result.children).context.results 536 | let inspectedResults = _.map('title', results) 537 | expect(inspectedResults).toEqual([ 538 | 'Game of Thrones', 539 | 'Gamer', 540 | 'Game Night', 541 | ]) 542 | }) 543 | it('should handle date (exact)', async () => { 544 | let dsl = { 545 | key: 'root', 546 | type: 'group', 547 | schema: 'movies', 548 | join: 'and', 549 | children: [ 550 | { 551 | key: 'datefilter', 552 | type: 'date', 553 | field: 'released', 554 | from: '2013-01-01', 555 | }, 556 | getResultsNode(), 557 | ], 558 | } 559 | let result = await process(dsl) 560 | let results = _.find({ key: 'results' }, result.children).context.results 561 | let inspectedResults = _.map('year', results) 562 | 563 | expect(inspectedResults).toEqual([ 564 | 2011, 1977, 2012, 1995, 1999, 1981, 2008, 2006, 1995, 2004, 565 | ]) 566 | }) 567 | it('should handle date (range)', async () => { 568 | let dsl = { 569 | key: 'root', 570 | type: 'group', 571 | schema: 'currentYearMovies', 572 | join: 'and', 573 | children: [ 574 | { 575 | key: 'datefilter', 576 | type: 'date', 577 | field: 'released', 578 | range: 'thisCalendarYear', 579 | }, 580 | getResultsNode(), 581 | ], 582 | } 583 | let result = await process(dsl) 584 | let results = _.find({ key: 'results' }, result.children).context.results 585 | let inspectedResults = _.flow( 586 | _.map((x) => x.released.getFullYear()), 587 | _.uniq 588 | )(results) 589 | expect(inspectedResults).toEqual([now.getFullYear()]) 590 | }) 591 | it('should handle results sorting', async () => { 592 | let dsl = { 593 | key: 'root', 594 | type: 'group', 595 | schema: 'movies', 596 | join: 'and', 597 | children: [ 598 | { 599 | key: 'results', 600 | type: 'results', 601 | page: 1, 602 | pageSize: 1, 603 | sortField: 'year', 604 | }, 605 | ], 606 | } 607 | let result = await process(dsl) 608 | let results = _.find({ key: 'results' }, result.children).context.results 609 | let inspectedResults = _.map('year', results) 610 | expect(inspectedResults).toEqual([2013]) 611 | 612 | dsl.children[0].sortDir = 'asc' 613 | let ascResult = await process(dsl) 614 | let ascResults = _.find({ key: 'results' }, ascResult.children).context 615 | .results 616 | let ascInspectedResults = _.map('year', ascResults) 617 | expect(ascInspectedResults).toEqual([1915]) 618 | }) 619 | it('should handle subquery', async () => { 620 | let dsl = { 621 | key: 'root', 622 | type: 'group', 623 | schema: 'movies', 624 | join: 'and', 625 | children: [ 626 | { 627 | key: 'subquery', 628 | type: 'subquery', 629 | localField: 'title', 630 | foreignField: 'movie', 631 | search: { 632 | key: 'root', 633 | type: 'group', 634 | schema: 'favorites', 635 | join: 'and', 636 | children: [ 637 | { 638 | key: 'filter', 639 | type: 'facet', 640 | field: 'user', 641 | values: ['Adam'], 642 | }, 643 | ], 644 | }, 645 | }, 646 | getResultsNode(), 647 | ], 648 | } 649 | let result = await process(dsl) 650 | let results = result.children[1].context.results 651 | expect(_.map('title', results)).toEqual([ 652 | 'Game of Thrones', 653 | 'Star Trek: The Next Generation', 654 | 'The Matrix', 655 | ]) 656 | }) 657 | it('should handle subquery by saved search id', async () => { 658 | let dsl = { 659 | key: 'root', 660 | type: 'group', 661 | schema: 'movies', 662 | join: 'and', 663 | children: [ 664 | { 665 | key: 'subquery', 666 | type: 'subquery', 667 | localField: 'title', 668 | foreignField: 'movie', 669 | searchId: 'AdamFavorites', 670 | }, 671 | getResultsNode(), 672 | ], 673 | } 674 | let result = await process(dsl) 675 | let results = result.children[1].context.results 676 | expect(_.map('title', results)).toEqual([ 677 | 'Game of Thrones', 678 | 'Star Trek: The Next Generation', 679 | 'The Matrix', 680 | ]) 681 | }) 682 | it('should handle pagination', async () => { 683 | let dsl = { 684 | key: 'results', 685 | type: 'results', 686 | pageSize: 2, 687 | schema: 'favorites', 688 | } 689 | let result = await process(dsl) 690 | let firstPage = result.context.results 691 | expect(_.map('movie', firstPage)).toEqual([ 692 | 'Game of Thrones', 693 | 'The Matrix', 694 | ]) 695 | result = await process({ ...dsl, page: 2 }) 696 | let secondPage = result.context.results 697 | expect(_.map('movie', secondPage)).toEqual([ 698 | 'Star Trek: The Next Generation', 699 | 'Game of Thrones', 700 | ]) 701 | }) 702 | it('should handle raw', async () => { 703 | let dsl = { 704 | key: 'raw', 705 | schema: 'movies', 706 | type: 'raw', 707 | filter: (x) => x.year > 2010, 708 | result: _.flow(_.map('year'), _.uniq), 709 | } 710 | let result = await process(dsl) 711 | expect(result.context.result).toEqual([2011, 2012, 2013]) 712 | }) 713 | it('should handle onResult', async () => { 714 | let dsl = { 715 | key: 'root', 716 | type: 'group', 717 | schema: 'movies', 718 | join: 'and', 719 | children: [ 720 | { 721 | key: 'ratings', 722 | type: 'facet', 723 | field: 'rated', 724 | values: ['R', 'PG-13'], 725 | }, 726 | getResultsNode(), 727 | ], 728 | } 729 | let results = [] 730 | let onResult = (x) => { 731 | results.push(x) 732 | } 733 | await process(dsl, { onResult }) 734 | expect(_.map('path', results)).toEqual([ 735 | ['root'], 736 | ['root', 'ratings'], 737 | ['root', 'results'], 738 | ]) 739 | }) 740 | }) 741 | }) 742 | -------------------------------------------------------------------------------- /src/provider-debug/index.js: -------------------------------------------------------------------------------- 1 | let DebugProvider = { 2 | groupCombinator: (group, filters) => ({ 3 | [group.join]: filters, 4 | }), 5 | runSearch(options, context, schema, filters, aggs) { 6 | let request = { where: filters, retrieve: aggs } 7 | context._meta.requests.push(request) 8 | return Promise.resolve(request) 9 | }, 10 | types: { 11 | default: { 12 | validContext: () => true, 13 | hasValue: () => true, 14 | }, 15 | test: { 16 | filter: (x) => ({ [`${x.field || x.key} (${x.type})`]: x.data }), 17 | result: (context, search) => 18 | search({ test: context.config }).then(() => ({ 19 | abc: 123, 20 | })), 21 | }, 22 | results: { 23 | result: (context, search) => 24 | search({ results: context.config }).then(() => ({ 25 | results: [], 26 | })), 27 | }, 28 | }, 29 | } 30 | 31 | export default DebugProvider 32 | -------------------------------------------------------------------------------- /src/provider-memory/date.js: -------------------------------------------------------------------------------- 1 | import _ from 'lodash/fp.js' 2 | import moment from 'moment-timezone' 3 | import datemath from '@elastic/datemath' 4 | 5 | let dateMin = -8640000000000000 6 | let dateMax = 8640000000000000 7 | 8 | let getStartOfQuarter = (quarterOffset, timezone) => { 9 | let quarter = moment().tz(timezone).quarter() + quarterOffset 10 | return moment().tz(timezone).quarter(quarter).startOf('quarter') 11 | } 12 | 13 | let getEndOfQuarter = (date) => moment(date).add(1, 'Q').subtract(1, 'ms') 14 | 15 | let quarterToOffset = { 16 | thisCalendarQuarter: 0, 17 | lastCalendarQuarter: -1, 18 | nextCalendarQuarter: 1, 19 | } 20 | 21 | // https://www.elastic.co/guide/en/elasticsearch/reference/7.x/common-options.html#date-math 22 | let rangeToDatemath = { 23 | last1Hour: { from: 'now-1h', to: 'now' }, 24 | last1Day: { from: 'now-1d', to: 'now' }, 25 | last3Days: { from: 'now-3d', to: 'now' }, 26 | last7Days: { from: 'now-7d', to: 'now' }, 27 | last30Days: { from: 'now-30d', to: 'now' }, 28 | last90Days: { from: 'now-90d', to: 'now' }, 29 | last180Days: { from: 'now-180d', to: 'now' }, 30 | last12Months: { from: 'now/d-12M', to: 'now' }, 31 | last15Months: { from: 'now/d-15M', to: 'now' }, 32 | last18Months: { from: 'now/d-18M', to: 'now' }, 33 | last24Months: { from: 'now/d-24M', to: 'now' }, 34 | last36Months: { from: 'now/d-36M', to: 'now' }, 35 | last48Months: { from: 'now/d-48M', to: 'now' }, 36 | last60Months: { from: 'now/d-60M', to: 'now' }, 37 | lastCalendarMonth: { from: 'now-1M/M', to: 'now/M-1ms' }, 38 | lastCalendarYear: { from: 'now-1y/y', to: 'now/y-1ms' }, 39 | thisCalendarMonth: { from: 'now/M', to: 'now+1M/M-1ms' }, 40 | thisCalendarYear: { from: 'now/y', to: 'now+1y/y-1ms' }, 41 | nextCalendarMonth: { from: 'now+1M/M', to: 'now+2M/M-1ms' }, 42 | nextCalendarYear: { from: 'now+1y/y', to: 'now+2y/y-1ms' }, 43 | next30Days: { from: 'now/d', to: 'now/d+30d-1ms' }, 44 | next60Days: { from: 'now/d', to: 'now/d+60d-1ms' }, 45 | next90Days: { from: 'now/d', to: 'now/d+90d-1ms' }, 46 | next6Months: { from: 'now/d', to: 'now/d+6M-1ms' }, 47 | next12Months: { from: 'now/d', to: 'now/d+12M-1ms' }, 48 | next18Months: { from: 'now/d', to: 'now/d+18M-1ms' }, 49 | next24Months: { from: 'now/d', to: 'now/d+24M-1ms' }, 50 | next36Months: { from: 'now/d', to: 'now/d+36M-1ms' }, 51 | allPastDates: { from: '', to: 'now/d-1ms' }, 52 | allFutureDates: { from: 'now/d', to: '' }, 53 | } 54 | 55 | let parseAndShift = (exp, timezone) => { 56 | let computed = datemath.parse(exp) 57 | // Replace the server timezone with the user's timezone if the expression 58 | // is relative to the start of a day, month, year, etc. 59 | return /\//.test(exp) ? moment(computed).tz(timezone, true) : computed 60 | } 61 | 62 | let rollingRangeToDates = (range, timezone) => { 63 | if (_.has(range, quarterToOffset)) { 64 | let from = getStartOfQuarter(quarterToOffset[range], timezone) 65 | let to = getEndOfQuarter(from) 66 | return { from, to } 67 | } else { 68 | let expressions = rangeToDatemath[range] 69 | let from = parseAndShift(expressions.from, timezone) 70 | let to = parseAndShift(expressions.to, timezone) 71 | return { from, to } 72 | } 73 | } 74 | 75 | let dateTypeToFormatFn = { 76 | date: (x) => x && moment.utc(x).toDate(), 77 | unix: (x) => x && moment.utc(x).unix(), 78 | timestamp: (x) => x && new Date(x).getTime(), 79 | } 80 | 81 | let hasValue = ({ from, to, range }) => 82 | range && 83 | range !== 'allDates' && 84 | ((range === 'exact' && (from || to)) || range !== 'exact') 85 | 86 | export default { 87 | hasValue, 88 | // NOTE: timezone is only used for rolling dates 89 | filter({ 90 | field, 91 | range, 92 | dateType = 'timestamp', 93 | timezone = 'UTC', 94 | ...context 95 | }) { 96 | let { from, to } = 97 | range === 'exact' ? context : rollingRangeToDates(range, timezone) 98 | 99 | let format = dateTypeToFormatFn[dateType] 100 | 101 | if (!from) { 102 | from = dateMin 103 | } 104 | if (!to) { 105 | to = dateMax 106 | } 107 | 108 | return _.flow(_.get(field), format, _.inRange(format(from), format(to))) 109 | }, 110 | } 111 | -------------------------------------------------------------------------------- /src/provider-memory/exampleTypes.js: -------------------------------------------------------------------------------- 1 | import _ from 'lodash/fp.js' 2 | import F from 'futil' 3 | import date from './date.js' 4 | import results from './results.js' 5 | 6 | export default () => ({ 7 | default: { 8 | validContext: () => true, 9 | hasValue: () => true, 10 | }, 11 | date, 12 | results, 13 | number: { 14 | hasValue: (node) => F.isNotNil(node.min) || F.isNotNil(node.max), 15 | filter: ({ field, min = -Infinity, max = Infinity }) => 16 | _.conforms({ 17 | [field]: _.inRange(min, max), 18 | }), 19 | }, 20 | exists: { 21 | hasValue: ({ value }) => _.isBoolean(value), 22 | filter: ({ field, value }) => 23 | // No _.conforms here since it does not get invoked on props which do not exist 24 | _.flow(_.get(field), value ? F.isNotNil : _.isNil), 25 | }, 26 | bool: { 27 | hasValue: ({ value }) => _.isBoolean(value), 28 | filter: ({ field, value }) => 29 | _.conforms({ 30 | [field]: _.isEqual(value), 31 | }), 32 | }, 33 | facet: { 34 | hasValue: (node) => _.size(node.values), 35 | filter: ({ field, values, mode = 'include' }) => 36 | _.flow( 37 | _.get(field), 38 | _.castArray, 39 | mode === 'include' 40 | ? _.intersectionWith(_.isEqual, values) 41 | : _.differenceWith(_.isEqual, _, values), 42 | _.negate(_.isEmpty) 43 | ), 44 | result({ field, size = 10, optionsFilter }, search) { 45 | let options = search( 46 | _.flow( 47 | _.flatMap(field), 48 | _.reject(_.isUndefined), 49 | _.map(JSON.stringify), 50 | optionsFilter ? _.filter(F.matchAnyWord(optionsFilter)) : _.identity, 51 | _.countBy(_.identity), 52 | _.toPairs, 53 | _.map(([name, count]) => ({ name: JSON.parse(name), count })), 54 | _.orderBy('count', 'desc') 55 | ) 56 | ) 57 | return { 58 | cardinality: _.size(options), 59 | options: size ? _.take(size, options) : options, 60 | } 61 | }, 62 | }, 63 | text: { 64 | hasValue: (node) => node.value || _.size(node.values), 65 | filter({ join = 'all', values, value, operator = 'containsWord', field }) { 66 | let regexMap = (operator, val) => 67 | ({ 68 | containsWord: val, 69 | startsWith: `^${val}`, 70 | wordStartsWith: `\\b${val}`, 71 | endsWith: `${val}$`, 72 | wordEndsWith: `${val}\\b`, 73 | is: `^${val}$`, 74 | containsExact: `\\b${val}\\b`, 75 | }[operator]) 76 | 77 | let conditions = _.map( 78 | (nodeValue) => (recordValue) => 79 | RegExp(regexMap(operator, nodeValue), 'i').test(recordValue), 80 | values || [value] 81 | ) 82 | 83 | let combinator = { 84 | all: _.overEvery, 85 | any: _.overSome, 86 | none: F.overNone, 87 | }[join] 88 | 89 | return _.conforms({ 90 | [field]: combinator(conditions), 91 | }) 92 | }, 93 | }, 94 | statistical: { 95 | result: ({ field }, search) => ({ 96 | count: search(_.size), 97 | avg: search(_.meanBy(field)), 98 | max: search(_.maxBy(field)), 99 | min: search(_.minBy(field)), 100 | sum: search(_.sumBy(field)), 101 | }), 102 | }, 103 | }) 104 | -------------------------------------------------------------------------------- /src/provider-memory/index.js: -------------------------------------------------------------------------------- 1 | import _ from 'lodash/fp.js' 2 | import F from 'futil' 3 | 4 | let MemoryProvider = { 5 | groupCombinator: (group, filters) => 6 | ({ 7 | and: _.overEvery, 8 | or: _.overSome, 9 | not: F.overNone, 10 | }[group.join || 'and'](filters)), 11 | runSearch: (options, node, schema, filters, aggs) => 12 | _.flow(_.filter(filters), aggs)(schema.memory.records), 13 | } 14 | 15 | export default MemoryProvider 16 | -------------------------------------------------------------------------------- /src/provider-memory/results.js: -------------------------------------------------------------------------------- 1 | import _ from 'lodash/fp.js' 2 | 3 | export default { 4 | result: ( 5 | { pageSize = 10, page = 1, sortField, sortDir = 'desc' }, 6 | search 7 | ) => ({ 8 | totalRecords: search(_.size), 9 | results: search( 10 | _.flow( 11 | _.orderBy(sortField, sortDir), 12 | pageSize > 0 13 | ? _.slice((page - 1) * pageSize, page * pageSize) 14 | : _.identity 15 | ) 16 | ), 17 | }), 18 | } 19 | -------------------------------------------------------------------------------- /src/utils.js: -------------------------------------------------------------------------------- 1 | import _ from 'lodash/fp.js' 2 | import F from 'futil' 3 | 4 | let getChildren = (x) => F.cascade(['children', 'items', 'data.items'], x) 5 | 6 | export let Tree = F.tree(getChildren) 7 | 8 | export let getRelevantFilters = _.curry((groupCombinator, Path, group) => { 9 | if (!_.includes(group.key, Path)) 10 | // If we're not in the path, it doesn't matter what the rest of it is 11 | Path = [] 12 | 13 | let path = Path.slice(1) // pop off this level 14 | let currentKey = path[0] 15 | 16 | let relevantChildren = getChildren(group) 17 | // Pull .filter if it's a leaf node 18 | if (!relevantChildren) return group._meta.filter 19 | // Exclude sibling criteria in OR groups where the group is in the paths (meaning only exclude ORs that are in relation via path) 20 | if (group.join === 'or' && currentKey) 21 | relevantChildren = _.filter({ key: currentKey }, relevantChildren) 22 | // Exclude self 23 | relevantChildren = _.reject( 24 | (node) => node.key === currentKey && !getChildren(node), 25 | relevantChildren 26 | ) 27 | 28 | let relevantFilters = _.compact( 29 | _.map(getRelevantFilters(groupCombinator, path), relevantChildren) 30 | ) 31 | if (!relevantFilters.length) return 32 | if (relevantFilters.length === 1 && group.join !== 'not') 33 | return relevantFilters[0] 34 | 35 | return groupCombinator(group, _.compact(relevantFilters)) 36 | }) 37 | 38 | export let getProvider = _.curry( 39 | (providers, schemas, node) => 40 | providers[ 41 | node.provider || 42 | _.get('provider', schemas[node.schema]) || 43 | F.firstCommonKey(providers, schemas[node.schema]) 44 | ] || 45 | F.throws( 46 | new Error( 47 | `No Provider found ${node.schema} and was not overridden for ${node.key}` 48 | ) 49 | ) 50 | ) 51 | 52 | export let runTypeFunction = (config) => async (name, node, search) => { 53 | let schema = config.getSchema(node.schema) 54 | let fn = F.cascade( 55 | [`${node.type}.${name}`, `default.${name}`], 56 | config.getProvider(node).types, 57 | _.noop 58 | ) 59 | try { 60 | return await (search 61 | ? fn(node, search, schema, config) 62 | : fn(node, schema, config)) 63 | } catch (error) { 64 | throw { 65 | message: `Failed running search for ${node.type} (${ 66 | node.key 67 | }) at ${name}: ${_.getOr(error, 'message', error)}`, 68 | error, 69 | node, 70 | } 71 | } 72 | } 73 | 74 | let extendAllOn = _.extendAll.convert({ immutable: false }) 75 | 76 | let initNode = (node, i, [{ schema, _meta: { path = [] } = {} } = {}]) => { 77 | // Add schema, _meta path and requests 78 | F.defaultsOn( 79 | { schema, _meta: { requests: [], path: path.concat([node.key]) } }, 80 | node 81 | ) 82 | // Flatten legacy fields 83 | extendAllOn([node, node.config, node.data]) 84 | } 85 | 86 | export let attachFilters = (runTypeFunction) => async (group) => 87 | Tree.walkAsync(async (node, ...args) => { 88 | initNode(node, ...args) 89 | node._meta.hasValue = await runTypeFunction('hasValue', node) 90 | if (node._meta.hasValue && !node.contextOnly) { 91 | node._meta.filter = await runTypeFunction('filter', node) 92 | } 93 | })(group) 94 | -------------------------------------------------------------------------------- /src/utils.test.js: -------------------------------------------------------------------------------- 1 | import { getProvider, getRelevantFilters } from './utils.js' 2 | import DebugProvider from './provider-debug/index.js' 3 | 4 | describe('Utils', () => { 5 | // Not handled - missing schema, schema with no matching provider 6 | describe('getProvider', () => { 7 | let Providers = { 8 | provider1: { 9 | a: 1, 10 | }, 11 | provider2: { 12 | a: 2, 13 | }, 14 | } 15 | let Schemas = { 16 | schema1: { 17 | randomProperty: 6, 18 | provider1: { 19 | random: 'stuff', 20 | }, 21 | provider2: { 22 | random: 'other stuff', 23 | }, 24 | }, 25 | schema2: { 26 | randomProperty: 6, 27 | provider2: { 28 | random: 'stuff', 29 | }, 30 | }, 31 | schema3: { 32 | randomProperty: 6, 33 | provider1: { 34 | random: 'stuff', 35 | }, 36 | provider2: { 37 | random: 'other stuff', 38 | }, 39 | provider: 'provider2', 40 | }, 41 | } 42 | let f = getProvider(Providers, Schemas) 43 | it('should support explicit providers', () => { 44 | let provider = f({ 45 | random: 'stuff', 46 | schema: 'schema2', 47 | provider: 'provider1', 48 | }) 49 | expect(provider).toBe(Providers.provider1) 50 | }) 51 | it('should get first provider on schema', () => { 52 | let provider = f({ 53 | schema: 'schema1', 54 | }) 55 | expect(provider).toBe(Providers.provider1) 56 | }) 57 | it('should get the provider specified on schema', () => { 58 | let provider = f({ 59 | schema: 'schema3', 60 | }) 61 | expect(provider).toBe(Providers.provider2) 62 | }) 63 | }) 64 | describe('getRelevantFilters', () => { 65 | it('should handle basic sibling', () => { 66 | let result = getRelevantFilters( 67 | DebugProvider.groupCombinator, 68 | ['a', 'b', 'c'], 69 | { 70 | key: 'a', 71 | join: 'and', 72 | children: [ 73 | { 74 | key: 'b', 75 | join: 'and', 76 | children: [ 77 | { 78 | key: 'c', 79 | }, 80 | { 81 | key: 'd', 82 | _meta: { 83 | filter: 'test', 84 | }, 85 | }, 86 | ], 87 | }, 88 | ], 89 | } 90 | ) 91 | expect(result).toEqual('test') 92 | }) 93 | it('should handle basic sibling, but with items instead of children', () => { 94 | let result = getRelevantFilters( 95 | DebugProvider.groupCombinator, 96 | ['a', 'b', 'c'], 97 | { 98 | key: 'a', 99 | join: 'and', 100 | items: [ 101 | { 102 | key: 'b', 103 | join: 'and', 104 | items: [ 105 | { 106 | key: 'c', 107 | }, 108 | { 109 | key: 'd', 110 | _meta: { 111 | filter: 'test', 112 | }, 113 | }, 114 | ], 115 | }, 116 | ], 117 | } 118 | ) 119 | expect(result).toEqual('test') 120 | }) 121 | it('should handle two siblings', () => { 122 | let result = getRelevantFilters( 123 | DebugProvider.groupCombinator, 124 | ['a', 'b', 'c'], 125 | { 126 | key: 'a', 127 | join: 'and', 128 | children: [ 129 | { 130 | key: 'b', 131 | join: 'and', 132 | children: [ 133 | { 134 | key: 'c', 135 | }, 136 | { 137 | key: 'd', 138 | _meta: { 139 | filter: 'test', 140 | }, 141 | }, 142 | { 143 | key: 'e', 144 | _meta: { 145 | filter: 'test2', 146 | }, 147 | }, 148 | ], 149 | }, 150 | ], 151 | } 152 | ) 153 | expect(result).toEqual({ 154 | and: ['test', 'test2'], 155 | }) 156 | }) 157 | it('should handle sibling a level above and collapse', () => { 158 | let result = getRelevantFilters( 159 | DebugProvider.groupCombinator, 160 | ['a', 'b', 'c'], 161 | { 162 | key: 'a', 163 | join: 'and', 164 | children: [ 165 | { 166 | key: 'b', 167 | join: 'and', 168 | children: [ 169 | { 170 | key: 'c', 171 | }, 172 | { 173 | key: 'd', 174 | _meta: { 175 | filter: 'test', 176 | }, 177 | }, 178 | ], 179 | }, 180 | { 181 | key: 'blah', 182 | _meta: { 183 | filter: 'blah', 184 | }, 185 | }, 186 | ], 187 | } 188 | ) 189 | expect(result).toEqual({ 190 | and: ['test', 'blah'], 191 | }) 192 | }) 193 | it('should handle ORs', () => { 194 | let result = getRelevantFilters( 195 | DebugProvider.groupCombinator, 196 | ['a', 'b', 'c'], 197 | { 198 | key: 'a', 199 | join: 'and', 200 | children: [ 201 | { 202 | key: 'b', 203 | join: 'or', 204 | children: [ 205 | { 206 | key: 'c', 207 | }, 208 | { 209 | key: 'd', 210 | _meta: { 211 | filter: 'test', 212 | }, 213 | }, 214 | ], 215 | }, 216 | { 217 | key: 'blah', 218 | _meta: { 219 | filter: 'blah', 220 | }, 221 | }, 222 | ], 223 | } 224 | ) 225 | expect(result).toEqual('blah') 226 | }) 227 | it('should not collapse NOT', () => { 228 | let result = getRelevantFilters( 229 | DebugProvider.groupCombinator, 230 | ['a', 'b', 'c'], 231 | { 232 | key: 'a', 233 | join: 'and', 234 | children: [ 235 | { 236 | key: 'b', 237 | join: 'and', 238 | children: [ 239 | { 240 | key: 'c', 241 | }, 242 | { 243 | key: 'd', 244 | _meta: { 245 | filter: 'test', 246 | }, 247 | }, 248 | ], 249 | }, 250 | { 251 | key: 'blah', 252 | join: 'not', 253 | children: [ 254 | { 255 | key: 'asdf', 256 | _meta: { 257 | filter: 'blah', 258 | }, 259 | }, 260 | ], 261 | }, 262 | ], 263 | } 264 | ) 265 | expect(result).toEqual({ 266 | and: [ 267 | 'test', 268 | { 269 | not: ['blah'], 270 | }, 271 | ], 272 | }) 273 | }) 274 | 275 | it('should handle nested OR', () => { 276 | let result = getRelevantFilters( 277 | DebugProvider.groupCombinator, 278 | ['root', 'analysis', 'results'], 279 | { 280 | key: 'root', 281 | join: 'and', 282 | children: [ 283 | { 284 | key: 'criteria', 285 | join: 'and', 286 | children: [ 287 | { 288 | key: 'cgnya6ja8ys10iwl8fr', 289 | _meta: { 290 | filter: 'cable', 291 | }, 292 | }, 293 | { 294 | key: 'criteria', 295 | join: 'or', 296 | children: [ 297 | { 298 | key: '8ilrqpm1je3m8ed5z5mi', 299 | _meta: { 300 | filter: 'agency:DOD', 301 | }, 302 | }, 303 | { 304 | key: 'e0sj1aby2bh3f168ncdi', 305 | _meta: { 306 | filter: 'agency:FL', 307 | }, 308 | }, 309 | ], 310 | _meta: { 311 | filter: 'test2-3', 312 | }, 313 | }, 314 | ], 315 | }, 316 | { 317 | type: 'group', 318 | key: 'analysis', 319 | join: 'and', 320 | children: [ 321 | { 322 | key: 'results', 323 | }, 324 | ], 325 | }, 326 | ], 327 | } 328 | ) 329 | expect(result).toEqual({ 330 | and: [ 331 | 'cable', 332 | { 333 | or: ['agency:DOD', 'agency:FL'], 334 | }, 335 | ], 336 | }) 337 | }) 338 | it('should handle deep nested OR', () => { 339 | let result = getRelevantFilters( 340 | DebugProvider.groupCombinator, 341 | ['root', 'analysisOR', 'analysis', 'results'], 342 | { 343 | key: 'root', 344 | join: 'and', 345 | children: [ 346 | { 347 | key: 'criteria', 348 | join: 'and', 349 | children: [ 350 | { 351 | key: 'cgnya6ja8ys10iwl8fr', 352 | _meta: { 353 | filter: 'cable', 354 | }, 355 | }, 356 | { 357 | key: 'criteria', 358 | join: 'or', 359 | children: [ 360 | { 361 | key: '8ilrqpm1je3m8ed5z5mi', 362 | _meta: { 363 | filter: 'agency:DOD', 364 | }, 365 | }, 366 | { 367 | key: 'e0sj1aby2bh3f168ncdi', 368 | _meta: { 369 | filter: 'agency:FL', 370 | }, 371 | }, 372 | ], 373 | _meta: { 374 | filter: 'test2-3', 375 | }, 376 | }, 377 | ], 378 | }, 379 | { 380 | key: 'analysisOR', 381 | join: 'or', 382 | children: [ 383 | { 384 | type: 'group', 385 | key: 'analysis', 386 | join: 'and', 387 | children: [ 388 | { 389 | key: 'results', 390 | }, 391 | ], 392 | }, 393 | { 394 | key: 'asdf', 395 | _meta: { 396 | filter: 'res:FL', 397 | }, 398 | }, 399 | ], 400 | }, 401 | ], 402 | } 403 | ) 404 | expect(result).toEqual({ 405 | and: [ 406 | 'cable', 407 | { 408 | or: ['agency:DOD', 'agency:FL'], 409 | }, 410 | ], 411 | }) 412 | }) 413 | it('should handle a top level OR', () => { 414 | let result = getRelevantFilters( 415 | DebugProvider.groupCombinator, 416 | ['root', 'analysisOR', 'analysis', 'results'], 417 | { 418 | key: 'root', 419 | join: 'or', 420 | children: [ 421 | { 422 | key: 'criteria', 423 | join: 'and', 424 | children: [ 425 | { 426 | key: 'cgnya6ja8ys10iwl8fr', 427 | _meta: { 428 | filter: 'cable', 429 | }, 430 | }, 431 | { 432 | key: 'criteria', 433 | join: 'or', 434 | children: [ 435 | { 436 | key: '8ilrqpm1je3m8ed5z5mi', 437 | _meta: { 438 | filter: 'agency:DOD', 439 | }, 440 | }, 441 | { 442 | key: 'e0sj1aby2bh3f168ncdi', 443 | _meta: { 444 | filter: 'agency:FL', 445 | }, 446 | }, 447 | ], 448 | _meta: { 449 | filter: 'test2-3', 450 | }, 451 | }, 452 | ], 453 | }, 454 | { 455 | key: 'analysisOR', 456 | join: 'and', 457 | children: [ 458 | { 459 | type: 'group', 460 | key: 'analysis', 461 | join: 'and', 462 | children: [ 463 | { 464 | key: 'results', 465 | }, 466 | ], 467 | }, 468 | { 469 | key: 'asdf', 470 | _meta: { 471 | filter: 'res:FL', 472 | }, 473 | }, 474 | ], 475 | }, 476 | ], 477 | } 478 | ) 479 | expect(result).toEqual('res:FL') 480 | }) 481 | it('should handle nested AND', () => { 482 | let result = getRelevantFilters( 483 | DebugProvider.groupCombinator, 484 | ['root', 'criteria', 'criteria2', 'dod'], 485 | { 486 | key: 'root', 487 | join: 'and', 488 | children: [ 489 | { 490 | key: 'criteria', 491 | join: 'and', 492 | children: [ 493 | { 494 | key: 'cgnya6ja8ys10iwl8fr', 495 | _meta: { 496 | filter: 'cable', 497 | }, 498 | }, 499 | { 500 | key: 'criteria2', 501 | join: 'or', 502 | children: [ 503 | { 504 | key: 'dod', 505 | _meta: { 506 | filter: 'agency:DOD', 507 | }, 508 | }, 509 | { 510 | key: 'fl', 511 | _meta: { 512 | filter: 'agency:FL', 513 | }, 514 | }, 515 | ], 516 | _meta: { 517 | filter: 'test2-3', 518 | }, 519 | }, 520 | ], 521 | }, 522 | { 523 | type: 'group', 524 | key: 'analysis', 525 | join: 'and', 526 | children: [ 527 | { 528 | key: 'results', 529 | _meta: { 530 | filter: 'result', 531 | }, 532 | }, 533 | ], 534 | }, 535 | ], 536 | } 537 | ) 538 | expect(result).toEqual({ 539 | and: ['cable', 'result'], 540 | }) 541 | }) 542 | }) 543 | }) 544 | --------------------------------------------------------------------------------