├── .editorconfig
├── .gitignore
├── .istanbul.yml
├── .npmignore
├── .travis.yml
├── CHANGELOG.md
├── LICENSE
├── README.md
├── docs
├── common-patterns.md
├── guide.md
└── index.md
├── lib
└── index.js
├── package-lock.json
├── package.json
├── tests
├── counts-js-to-batchloader.test.js
├── counts-no-batch-to-batch.test.js
├── get-results-by-key.text.js
├── get-unique-keys.test.js
├── helpers
│ └── make-services.js
├── loader-factory.test.js
├── loader-large-populate.test.js
├── loader-no-loader.test.js
├── loader-small-populate.test.js
├── make-services-await.test.js
└── make-services.test.js
└── types
├── index.d.ts
├── tests.ts
└── tsconfig.json
/.editorconfig:
--------------------------------------------------------------------------------
1 | # http://editorconfig.org
2 | root = true
3 |
4 | [*]
5 | indent_style = space
6 | indent_size = 2
7 | end_of_line = lf
8 | charset = utf-8
9 | trim_trailing_whitespace = true
10 | insert_final_newline = true
11 |
12 | [*.md]
13 | trim_trailing_whitespace = false
14 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 |
3 | # Logs
4 | logs
5 | *.log
6 |
7 | # Runtime data
8 | pids
9 | *.pid
10 | *.seed
11 |
12 | # Directory for instrumented libs generated by jscoverage/JSCover
13 | lib-cov
14 |
15 | # Coverage directory used by tools like istanbul
16 | coverage
17 |
18 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
19 | .grunt
20 |
21 | # Compiled binary addons (http://nodejs.org/api/addons.html)
22 | build/Release
23 |
24 | # Dependency directory
25 | # Commenting this out is preferred by some people, see
26 | # https://www.npmjs.org/doc/misc/npm-faq.html#should-i-check-my-node_modules-folder-into-git-
27 | node_modules
28 |
29 | # Users Environment Variables
30 | .lock-wscript
31 |
32 | dist/
33 | .idea/
34 |
--------------------------------------------------------------------------------
/.istanbul.yml:
--------------------------------------------------------------------------------
1 | verbose: false
2 | instrumentation:
3 | root: ./lib/
4 | include-all-sources: true
5 | reporting:
6 | print: summary
7 | reports:
8 | - html
9 | - text
10 | - lcov
11 | watermarks:
12 | statements: [50, 80]
13 | lines: [50, 80]
14 | functions: [50, 80]
15 | branches: [50, 80]
16 |
--------------------------------------------------------------------------------
/.npmignore:
--------------------------------------------------------------------------------
1 | .editorconfig
2 | .jshintrc
3 | .travis.yml
4 | .istanbul.yml
5 | .babelrc
6 | .idea/
7 | .vscode/
8 | test/
9 | coverage/
10 | .github/
11 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: node_js
2 | node_js:
3 | - 'node'
4 | - '6'
5 | addons:
6 | code_climate:
7 | repo_token: 'your repo token'
8 | notifications:
9 | email: false
10 | before_script:
11 | - npm install -g codeclimate-test-reporter
12 | after_script:
13 | - codeclimate-test-reporter < coverage/lcov.info
14 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Change Log
2 |
3 | ## [v0.3.5](https://github.com/feathers-plus/batch-loader/tree/v0.3.5) (2019-01-02)
4 | [Full Changelog](https://github.com/feathers-plus/batch-loader/compare/v0.3.4...v0.3.5)
5 |
6 | **Merged pull requests:**
7 |
8 | - Fixed bug in custom version of makeCallingParams. [\#8](https://github.com/feathers-plus/batch-loader/pull/8) ([eddyystop](https://github.com/eddyystop))
9 |
10 | ## [v0.3.4](https://github.com/feathers-plus/batch-loader/tree/v0.3.4) (2018-11-24)
11 | [Full Changelog](https://github.com/feathers-plus/batch-loader/compare/v0.3.3...v0.3.4)
12 |
13 | **Closed issues:**
14 |
15 | - Creating a batchloader for belongsToMany relationships [\#6](https://github.com/feathers-plus/batch-loader/issues/6)
16 |
17 | **Merged pull requests:**
18 |
19 | - Add typings, tests and necessary infrastructure [\#5](https://github.com/feathers-plus/batch-loader/pull/5) ([j2L4e](https://github.com/j2L4e))
20 |
21 | ## [v0.3.3](https://github.com/feathers-plus/batch-loader/tree/v0.3.3) (2018-04-18)
22 | [Full Changelog](https://github.com/feathers-plus/batch-loader/compare/v0.3.2...v0.3.3)
23 |
24 | **Implemented enhancements:**
25 |
26 | - Revert old changes and apply new strategy to add pagination support. [\#4](https://github.com/feathers-plus/batch-loader/pull/4) ([otang](https://github.com/otang))
27 |
28 | ## [v0.3.2](https://github.com/feathers-plus/batch-loader/tree/v0.3.2) (2018-04-13)
29 | [Full Changelog](https://github.com/feathers-plus/batch-loader/compare/v0.3.1...v0.3.2)
30 |
31 | ## [v0.3.1](https://github.com/feathers-plus/batch-loader/tree/v0.3.1) (2018-04-13)
32 | [Full Changelog](https://github.com/feathers-plus/batch-loader/compare/v0.3.0...v0.3.1)
33 |
34 | **Fixed bugs:**
35 |
36 | - Fix README.md link to documentation page [\#2](https://github.com/feathers-plus/batch-loader/pull/2) ([leedongwei](https://github.com/leedongwei))
37 |
38 | **Merged pull requests:**
39 |
40 | - Add support for handling page object responses for services using pagination. [\#3](https://github.com/feathers-plus/batch-loader/pull/3) ([otang](https://github.com/otang))
41 |
42 | ## [v0.3.0](https://github.com/feathers-plus/batch-loader/tree/v0.3.0) (2017-11-14)
43 | [Full Changelog](https://github.com/feathers-plus/batch-loader/compare/v0.2.1...v0.3.0)
44 |
45 | ## [v0.2.1](https://github.com/feathers-plus/batch-loader/tree/v0.2.1) (2017-11-14)
46 | [Full Changelog](https://github.com/feathers-plus/batch-loader/compare/v0.2.0...v0.2.1)
47 |
48 | ## [v0.2.0](https://github.com/feathers-plus/batch-loader/tree/v0.2.0) (2017-11-13)
49 | [Full Changelog](https://github.com/feathers-plus/batch-loader/compare/v0.1.2...v0.2.0)
50 |
51 | **Closed issues:**
52 |
53 | - batch-loader on the browser [\#1](https://github.com/feathers-plus/batch-loader/issues/1)
54 |
55 | ## [v0.1.2](https://github.com/feathers-plus/batch-loader/tree/v0.1.2) (2017-11-06)
56 | [Full Changelog](https://github.com/feathers-plus/batch-loader/compare/v0.1.1...v0.1.2)
57 |
58 | ## [v0.1.1](https://github.com/feathers-plus/batch-loader/tree/v0.1.1) (2017-11-06)
59 | [Full Changelog](https://github.com/feathers-plus/batch-loader/compare/v0.1.0...v0.1.1)
60 |
61 | ## [v0.1.0](https://github.com/feathers-plus/batch-loader/tree/v0.1.0) (2017-11-06)
62 |
63 |
64 | \* *This Change Log was automatically generated by [github_changelog_generator](https://github.com/skywinder/Github-Changelog-Generator)*
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2017 Feathers
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
23 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # feathers-ecosystem/batch-loader
2 |
3 |
5 |
6 | > Reduce requests to backend services by batching calls and caching records.
7 |
8 | ## Installation
9 |
10 | ```
11 | npm install @feathers-plus/batch-loader --save
12 | ```
13 |
14 | ## Documentation
15 |
16 | Please refer to the [batch-loader documentation](./docs/index.md) for more details.
17 |
18 | ## Basic Example
19 |
20 | Use the `loaderFactory` static method to create a basic batch-loader. This is simply syntatic sugar for manually creating a batch-loader. This "Basic Example" and "Complete Example" create the same batch-loader.
21 |
22 | ```js
23 | const BatchLoader = require("@feathers-plus/batch-loader");
24 |
25 | const usersBatchLoader = BatchLoader.loaderFactory(
26 | app.service("users"),
27 | "id",
28 | false
29 | );
30 |
31 | app
32 | .service("comments")
33 | .find()
34 | .then((comments) =>
35 | Promise.all(
36 | comments.map((comment) => {
37 | // Attach user record
38 | return usersBatchLoader
39 | .load(comment.userId)
40 | .then((user) => (comment.userRecord = user));
41 | })
42 | )
43 | );
44 | ```
45 |
46 | ## Complete Example
47 |
48 | Use the `BatchLoader` class to create more complex loaders. These loaders can call other services, call DB's directly, or even call third party services. This example manually implements the same loader created with the `loaderFactory` above.
49 |
50 | ```js
51 | const BatchLoader = require("@feathers-plus/batch-loader");
52 | const { getResultsByKey, getUniqueKeys } = BatchLoader;
53 |
54 | const usersBatchLoader = new BatchLoader((keys) =>
55 | app
56 | .service("users")
57 | .find({ query: { id: { $in: getUniqueKeys(keys) } } })
58 | .then((result) => getResultsByKey(keys, result, (user) => user.id, "!"))
59 | );
60 |
61 | app
62 | .service("comments")
63 | .find()
64 | .then((comments) =>
65 | Promise.all(
66 | comments.map((comment) => {
67 | // Attach user record
68 | return usersBatchLoader
69 | .load(comment.userId)
70 | .then((user) => (comment.userRecord = user));
71 | })
72 | )
73 | );
74 | ```
75 |
76 | ## License
77 |
78 | Copyright (c) 2017 John J. Szwaronek
79 |
80 | Licensed under the [MIT license](LICENSE).
81 |
--------------------------------------------------------------------------------
/docs/common-patterns.md:
--------------------------------------------------------------------------------
1 | ## Creating a new batch-loader per Request.
2 |
3 | In many applications, a server using batch-loader serves requests to many different users with different access permissions. It may be dangerous to use one cache across many users, and it is encouraged to create a new batch-loader per request:
4 |
5 | ```js
6 | function createLoaders(authToken) {
7 | return {
8 | users: new BatchLoader((ids) => genUsers(authToken, ids)),
9 | cdnUrls: new BatchLoader((rawUrls) => genCdnUrls(authToken, rawUrls)),
10 | stories: new BatchLoader((keys) => genStories(authToken, keys)),
11 | };
12 | }
13 |
14 | // When handling an incoming request:
15 | var loaders = createLoaders(request.query.authToken);
16 |
17 | // Then, within application logic:
18 | var user = await loaders.users.load(4);
19 | var pic = await loaders.cdnUrls.load(user.rawPicUrl);
20 | ```
21 |
22 | ## Loading by alternative keys.
23 |
24 | Occasionally, some kind of value can be accessed in multiple ways. For example, perhaps a "User" type can be loaded not only by an "id" but also by a "username" value. If the same user is loaded by both keys, then it may be useful to fill both caches when a user is loaded from either source:
25 |
26 | ```js
27 | let userByIDLoader = new BatchLoader((ids) =>
28 | genUsersByID(ids).then((users) => {
29 | for (let user of users) {
30 | usernameLoader.prime(user.username, user);
31 | }
32 | return users;
33 | })
34 | );
35 |
36 | let usernameLoader = new BatchLoader((names) =>
37 | genUsernames(names).then((users) => {
38 | for (let user of users) {
39 | userByIDLoader.prime(user.id, user);
40 | }
41 | return users;
42 | })
43 | );
44 | ```
45 |
46 | ## Persistent caches
47 |
48 | By default, batch-loader uses the standard Map which simply grows until the batch-loader is released. A custom cache is provided as a convenience if you want to persist caches for longer periods of time. It implements a **least-recently-used** algorithm and allows you to limit the number of records cached.
49 |
50 | ```js
51 | const BatchLoader = require('@feathers-plus/batch-loader');
52 | const cache = require('@feathers-plus/cache');
53 |
54 | const usersLoader = new BatchLoader(
55 | keys => { ... },
56 | { cacheMap: cache({ max: 100 })
57 | );
58 | ```
59 |
60 |
The default cache is appropriate when requests to your application are short-lived.
61 |
62 | ## Using non-Feathers services
63 |
64 | batch-loader provides a simplified and consistent API over various data sources, when its used as part of your application's data fetching layer. Custom Feathers services can use batch-loaders to natively accesses local and remote resources.
65 |
66 | ### Redis
67 |
68 | Redis is a very simple key-value store which provides the batch load method MGET which makes it very well suited for use with batch-loader.
69 |
70 | ```js
71 | const BatchLoader = require("@feathers-plus/batch-loader");
72 | const redis = require("redis");
73 |
74 | const client = redis.createClient();
75 |
76 | const redisLoader = new BatchLoader(
77 | (keys) =>
78 | new Promise((resolve, reject) => {
79 | client.mget(keys, (error, results) => {
80 | if (error) return reject(error);
81 |
82 | resolve(
83 | results.map((result, index) =>
84 | result !== null ? result : new Error(`No key: ${keys[index]}`)
85 | )
86 | );
87 | });
88 | })
89 | );
90 | ```
91 |
92 | ### SQLite
93 |
94 | While not a key-value store, SQL offers a natural batch mechanism with SELECT \* WHERE IN statements. While batch-loader is best suited for key-value stores, it is still suited for SQL when queries remain simple. This example requests the entire row at a given id, however your usage may differ.
95 |
96 | This example uses the sqlite3 client which offers a parallelize method to further batch queries together. Another non-caching batch-loader utilizes this method to provide a similar API. batch-loaders can access other batch-loaders.
97 |
98 | ```js
99 | const BatchLoader = require("@feathers-plus/batch-loader");
100 | const sqlite3 = require("sqlite3");
101 |
102 | const db = new sqlite3.Database("./to/your/db.sql");
103 |
104 | // Dispatch a WHERE-IN query, ensuring response has rows in correct order.
105 | const userLoader = new BatchLoader((ids) => {
106 | const params = ids.map((id) => "?").join();
107 | const query = `SELECT * FROM users WHERE id IN (${params})`;
108 | return queryLoader
109 | .load([query, ids])
110 | .then((rows) =>
111 | ids.map(
112 | (id) =>
113 | rows.find((row) => row.id === id) || new Error(`Row not found: ${id}`)
114 | )
115 | );
116 | });
117 |
118 | // Parallelize all queries, but do not cache.
119 | const queryLoader = new BatchLoader(
120 | (queries) =>
121 | new Promise((resolve) => {
122 | const waitingOn = queries.length;
123 | const results = [];
124 | db.parallelize(() => {
125 | queries.forEach((query, index) => {
126 | db.all.apply(
127 | db,
128 | query.concat((error, result) => {
129 | results[index] = error || result;
130 | if (--waitingOn === 0) {
131 | resolve(results);
132 | }
133 | })
134 | );
135 | });
136 | });
137 | }),
138 | { cache: false }
139 | );
140 |
141 | // Usage
142 |
143 | const promise1 = userLoader.load("1234");
144 | const promise2 = userLoader.load("5678");
145 |
146 | Promise.all([promise1, promise2]).then(([user1, user2]) => {
147 | console.log(user1, user2);
148 | });
149 | ```
150 |
151 | ### Knex.js
152 |
153 | This example demonstrates how to use batch-loader with SQL databases via Knex.js, which is a SQL query builder and a client for popular databases such as PostgreSQL, MySQL, MariaDB etc.
154 |
155 | ```js
156 | const BatchLoader = require("@feathers-plus/batch-loader");
157 | const db = require("./db"); // an instance of Knex client
158 |
159 | // The list of batch loaders
160 |
161 | const batchLoader = {
162 | user: new BatchLoader((ids) =>
163 | db
164 | .table("users")
165 | .whereIn("id", ids)
166 | .select()
167 | .then((rows) => ids.map((id) => rows.find((x) => x.id === id)))
168 | ),
169 |
170 | story: new BatchLoader((ids) =>
171 | db
172 | .table("stories")
173 | .whereIn("id", ids)
174 | .select()
175 | .then((rows) => ids.map((id) => rows.find((x) => x.id === id)))
176 | ),
177 |
178 | storiesByUserId: new BatchLoader((ids) =>
179 | db
180 | .table("stories")
181 | .whereIn("author_id", ids)
182 | .select()
183 | .then((rows) => ids.map((id) => rows.filter((x) => x.author_id === id)))
184 | ),
185 | };
186 |
187 | // Usage
188 |
189 | Promise.all([
190 | batchLoader.user.load("1234"),
191 | batchLoader.storiesByUserId.load("1234"),
192 | ]).then(([user, stories]) => {
193 | /* ... */
194 | });
195 | ```
196 |
197 | ### RethinkDB
198 |
199 | Full implementation:
200 |
201 | ```js
202 | const BatchLoader = require("@feathers-plus/batch-loader");
203 | const r = require("rethinkdb");
204 | const db = await r.connect();
205 |
206 | const batchLoadFunc = (keys) =>
207 | db
208 | .table("example_table")
209 | .getAll(...keys)
210 | .then((res) => res.toArray())
211 | .then(normalizeRethinkDbResults(keys, "id"));
212 |
213 | const exampleLoader = new BatchLoader(batchLoadFunc);
214 |
215 | await exampleLoader.loadMany([1, 2, 3]); // [{"id": 1, "name": "Document 1"}, {"id": 2, "name": "Document 2"}, Error];
216 |
217 | await exampleLoader.load(1); // {"id": 1, "name": "Document 1"}
218 |
219 | function indexResults(results, indexField, cacheKeyFn = (key) => key) {
220 | const indexedResults = new Map();
221 | results.forEach((res) => {
222 | indexedResults.set(cacheKeyFn(res[indexField]), res);
223 | });
224 | return indexedResults;
225 | }
226 |
227 | function normalizeRethinkDbResults(
228 | keys,
229 | indexField,
230 | cacheKeyFn = (key) => key
231 | ) {
232 | return (results) => {
233 | const indexedResults = indexResults(results, indexField, cacheKeyFn);
234 | return keys.map(
235 | (val) =>
236 | indexedResults.get(cacheKeyFn(val)) ||
237 | new Error(`Key not found : ${val}`)
238 | );
239 | };
240 | }
241 | ```
242 |
--------------------------------------------------------------------------------
/docs/guide.md:
--------------------------------------------------------------------------------
1 | Loading data from database is one of the major tasks for most web applications. The goal of batch-loader is to improve the performance of database queries with two techniques: batching and caching.
2 |
3 | ## Batching
4 |
5 | Batching is batch-loader's primary feature. The reason for batching is to merge multiple similar database queries into one single query when possible. For example:
6 |
7 | ```js
8 | Promise.all([
9 | posts.find({ query: { id: 1 } }),
10 | posts.find({ query: { id: 2 } }),
11 | posts.find({ query: { id: { $in: [3, 4] } } }),
12 | posts.find({ query: { id: 5 } }),
13 | ]);
14 | ```
15 |
16 | is slower than
17 |
18 | ```js
19 | posts.find({ query: { id: { $in: [1, 2, 3, 4, 5] } } });
20 | ```
21 |
22 | The latter sends only one query to database and retrieves the same 5 records as the former does, and therefore is much more efficient.
23 |
24 | Batch-loader is a tool to help you batch database calls in such a way. First, create a batch-loader by providing a batch loading function which accepts an array of keys and an optional context. It returns a Promise which resolves to an array of values.
25 |
26 | ```js
27 | const BatchLoader = require('@feathers-plus/batch-loader');
28 | const usersLoader = new BatchLoader((keys, context) => {
29 | return app.service('users').find({ query: { id: { $in: keys } } })
30 | .then(records => {
31 | recordsByKey = /* recordsByKey[i] is the value for key[i] */;
32 | return recordsByKey;
33 | });
34 | },
35 | { context: {} }
36 | );
37 | ```
38 |
39 | You can then call the batch-loader with individual keys. It will coalesce all requests made within the current event loop into a single call to the batch-loader function, and return the results to each call.
40 |
41 | ```js
42 | usersLoader.load(1).then((user) => console.log("key 1", user));
43 | usersLoader.load(2).then((user) => console.log("key 2", user));
44 | usersLoader
45 | .loadMany([1, 2, 3, 4])
46 | .then((users) => console.log(users.length, users));
47 | ```
48 |
49 | The above will result in one database service call, i.e. `users.find({ query: { id: { $in: [1, 2, 3, 4] } } })`, instead of 6.
50 |
51 |
*"[W]ill coalesce all requests made within the current event loop into a single call"* sounds ominous. Just don't worry about it. Make `usersLoader.load` and `usersLoader.loadMany` calls the same way you would `users.get` and `users.find`. Everything will work as expected while, behind the scenes, batch-loader is making the fewest database calls logically possible.
52 |
53 | ### Batch Function
54 |
55 | The batch loading function accepts an array of keys and an optional context. It returns a Promise which resolves to an array of values. Each index in the returned array of values must correspond to the same index in the array of keys.
56 |
57 | For example, if the `usersLoader` from above is called with `[1, 2, 3, 4, 99]`, we would execute `users.find({ query: { id: { $in: [1, 2, 3, 4, 99] } } })`. The Feathers service could return the results:
58 |
59 | ```js
60 | [ { id: 4, name: 'Aubree' }
61 | { id: 2, name: 'Marshall' },
62 | { id: 1, name: 'John' },
63 | { id: 3, name: 'Barbara' } ]
64 | ```
65 |
66 | Please not that the order of the results will usually differ from the order of the keys and here, in addition, there is no `users` with an `id` of `99`.
67 |
68 | The batch function has to to reorganize the above results and return:
69 |
70 | ```js
71 | [
72 | { id: 1, name: "John" },
73 | { id: 2, name: "Marshall" },
74 | { id: 3, name: "Barbara" },
75 | { id: 4, name: "Aubree" },
76 | null,
77 | ];
78 | ```
79 |
80 | The `null` indicating there is no record for `user.id === 99`.
81 |
82 | ### Convenience Methods
83 |
84 | Batch-loader provides two convenience functions that will perform this reorganization for you.
85 |
86 | ```js
87 | const BatchLoader = require('@feathers-plus/batch-loader');
88 | const { getResultsByKey, getUniqueKeys } = BatchLoader;
89 |
90 | const usersLoader = new BatchLoader(keys =>
91 | app.service('users').find({ query: { id: { $in: getUniqueKeys(keys) } } })
92 | .then(records => getResultsByKey(keys, records, user => user.id, ''));
93 | );
94 | ```
95 |
96 | **getUniqueKeys** eliminates any duplicate elements in the keys.
97 |
98 | > The array of keys may contain duplicates when the batch-loader's memoization cache is disabled.
99 |
100 | **getResultsByKey** reorganizes the records from the service call into the result expected from the batch function. The `''` parameter indicates each key expects a single record or `null`. Other options are `'!'` when each key requires a single record, and `'[]'` when each key requires an array of 0, 1 or more records.
101 |
102 | ## Caching
103 |
104 | Each batch-loader instance contains a unique memoized cache. Once `load` or `loadMany` is called, the resulting value is cached. This eliminates redundant database requests, relieving pressure on your database. It also creates fewer objects which may relieve memory pressure on your application.
105 |
106 | ```js
107 | Promise.all([userLoader.load(1), userLoader.load(1)]).then((users) =>
108 | assert(users[0] === users[1])
109 | );
110 | ```
111 |
112 |
The same object is returned for each of multiple hits on the cache. You should not mutate that object directly as the mutation would be reflected in every reference to the object. Rather you should deep-copy before mutating the copy.
113 |
114 | ### Caching Per Request
115 |
116 | It may be dangerous to use one cache across many users, and it is encouraged to create a new batch-loader per request. Typically batch-loader instances are created when a request begins and are released once the request ends.
117 |
118 | Since the cache exists for a limited time only, the cache contents should not normally grow large enough to cause memory pressure on the application.
119 |
120 | ### Persistent Caches
121 |
122 | A batch-loader can be shared between requests and between users if care is taken. Use caution when used in long-lived applications or those which serve many users with different access permissions.
123 |
124 | The main advantage is having the cache already primed at the start of each request, which could result in fewer initial database requests.
125 |
126 | #### Memory pressure
127 |
128 | There are two concerns though. First the cache could keep filling up with records causing memory pressure. This can be handled with a custom cache.
129 |
130 | **@feathers-plus/cache** is a least-recently-used (LRU) cache which you can inject when initializing the batch-loader. You can specify the maximum number of records to be kept in the cache, and it will retain the least recently used records.
131 |
132 | ```js
133 | const BatchLoader = require('@feathers-plus/batch-loader');
134 | const cache = require('@feathers-plus/cache');
135 |
136 | const usersLoader = new BatchLoader(
137 | keys => { ... },
138 | { cacheMap: cache({ max: 100 })
139 | );
140 | ```
141 |
142 | #### Mutation
143 |
144 | The other concern is a record mutating. You can create a hook which clears a record from its BatchLoaders' caches when it mutates.
145 |
146 | ```js
147 | usersLoader.clear(1);
148 | ```
149 |
150 | > `@feathers-plus/cache/lib/hooks` contains hooks which clear the keys of mutated records.
151 |
152 | ## Explore Performance Gains
153 |
154 | ### Our Sample Data
155 |
156 | We will be using Feathers database services containing the following data:
157 |
158 | ```js
159 | // app.service('posts')
160 | const postsStore = [
161 | { id: 1, body: "John post", userId: 101, starIds: [102, 103, 104] },
162 | { id: 2, body: "Marshall post", userId: 102, starIds: [101, 103, 104] },
163 | { id: 3, body: "Barbara post", userId: 103 },
164 | { id: 4, body: "Aubree post", userId: 104 },
165 | ];
166 |
167 | // app.service('comments')
168 | const commentsStore = [
169 | { id: 11, text: "John post Marshall comment 11", postId: 1, userId: 102 },
170 | { id: 12, text: "John post Marshall comment 12", postId: 1, userId: 102 },
171 | { id: 13, text: "John post Marshall comment 13", postId: 1, userId: 102 },
172 | { id: 14, text: "Marshall post John comment 14", postId: 2, userId: 101 },
173 | { id: 15, text: "Marshall post John comment 15", postId: 2, userId: 101 },
174 | { id: 16, text: "Barbara post John comment 16", postId: 3, userId: 101 },
175 | { id: 17, text: "Aubree post Marshall comment 17", postId: 4, userId: 102 },
176 | ];
177 |
178 | // app.service('users')
179 | const usersStore = [
180 | { id: 101, name: "John" },
181 | { id: 102, name: "Marshall" },
182 | { id: 103, name: "Barbara" },
183 | { id: 104, name: "Aubree" },
184 | ];
185 | ```
186 |
187 | We want to see how using batch-loader affects the number of database calls, and we will do that by populating the `posts` records with related information.
188 |
189 | ### Using Plain JavaScript
190 |
191 | First, let's add the related `comments` records to each `posts` record using regular JavaScript, and let's do this using both Promises and async/await.
192 |
193 | ```js
194 | // Populate using Promises.
195 | Promise.resolve(posts.find()
196 | .then(posts => Promise.all(posts.map(post => comments.find({ query: { postId: post.id } })
197 | .then(comments => {
198 | post.commentRecords = comments;
199 | return post;
200 | })
201 | )))
202 | )
203 | .then(data => ... );
204 |
205 | // Populate using async/await.
206 | const postRecords = await posts.find();
207 | const data = await Promise.all(postRecords.map(async post => {
208 | post.commentRecords = await comments.find({ query: { postId: post.id } });
209 | return post;
210 | }));
211 | ```
212 |
213 | Both of these make the following database service calls, and both get the following result.
214 |
215 | ```js
216 | ... posts find
217 | ... comments find { postId: 1 }
218 | ... comments find { postId: 2 }
219 | ... comments find { postId: 3 }
220 | ... comments find { postId: 4 }
221 |
222 | [ { id: 1,
223 | body: 'John post',
224 | userId: 101,
225 | starIds: [ 102, 103, 104 ],
226 | commentRecords: [
227 | { id: 11, text: 'John post Marshall comment 11', postId: 1, userId: 102 },
228 | { id: 12, text: 'John post Marshall comment 12', postId: 1, userId: 102 },
229 | { id: 13, text: 'John post Marshall comment 13', postId: 1, userId: 102 } ] },
230 | { ... }
231 | ]
232 | ```
233 |
234 | ### Using Neither Batching nor Caching
235 |
236 | The batch-loader function will be called for every `load` and `loadMany` when batching and caching are disabled in the batch-loader. This means it acts just like individual `get` and `find` method calls. Let's rewrite the above example using such a rudimentary batch-loader:
237 |
238 | ```js
239 | const BatchLoader = require('@feathers-plus/batch-loader');
240 | const { getResultsByKey, getUniqueKeys } = BatchLoader;
241 |
242 | // Populate using Promises.
243 | const commentsLoaderPromises = new BatchLoader(
244 | keys => comments.find({ query: { postId: { $in: getUniqueKeys(keys) } } })
245 | .then(result => getResultsByKey(keys, result, comment => comment.postId, '[]')),
246 | { batch: false, cache: false }
247 | );
248 |
249 | Promise.resolve(posts.find()
250 | .then(postRecords => Promise.all(postRecords.map(post => commentsLoaderPromises.load(post.id)
251 | .then(comments => {
252 | post.commentRecords = comments;
253 | return post;
254 | })
255 | )))
256 | )
257 | .then(data => { ... });
258 |
259 | // Populate using async/await.
260 | const commentsLoaderAwait = new BatchLoader(async keys => {
261 | const postRecords = await comments.find({ query: { postId: { $in: getUniqueKeys(keys) } } });
262 | return getResultsByKey(keys, postRecords, comment => comment.postId, '[]');
263 | },
264 | { batch: false, cache: false }
265 | );
266 |
267 | const postRecords = await posts.find();
268 | const data = await Promise.all(postRecords.map(async post => {
269 | post.commentRecords = await commentsLoaderAwait.load(post.id);
270 | return post;
271 | }));
272 | ```
273 |
274 | Both of these make the same database service calls as did the [plain JavaScript example](#Using-Plain-JavaScript), because batching and caching were both disabled.
275 |
276 | ```text
277 | ... posts find
278 | ... comments find { postId: { '$in': [ 1 ] } }
279 | ... comments find { postId: { '$in': [ 2 ] } }
280 | ... comments find { postId: { '$in': [ 3 ] } }
281 | ... comments find { postId: { '$in': [ 4 ] } }
282 | ```
283 |
284 | > A batch-loader with neither batching nor caching makes the same database calls as does a plain Javascript implementation. This is a convenient way to debug issues you might have with batch-loader. The _"magic"_ disappears when you disable batching and caching, which makes it simpler to understand what is happening.
285 |
286 | ### Using Batching and Caching
287 |
288 | Batching and caching are enabled when we remove the 2 `{ batch: false, cache: false }` in the above example. A very different performance profile is now produced:
289 |
290 | ```text
291 | ... posts find
292 | ... comments find { postId: { '$in': [ 1, 2, 3, 4 ] } }
293 | ```
294 |
295 | Only 1 service call was made for the `comments` records, instead of the previous 4.
296 |
297 | ### A Realistic Example
298 |
299 | The more service calls made, the better batch-loader performs. The above example populated the `posts` records with just the `comments` records. Let's see the effect batch-loader has when we fully populate the `posts` records.
300 |
301 | ```js
302 | const { map, parallel } = require('asyncro');
303 | const BatchLoader = require('@feathers-plus/batch-loader');
304 |
305 | const { getResultsByKey, getUniqueKeys } = BatchLoader;
306 |
307 | tester({ batch: false, cache: false })
308 | .then(data => { ... )
309 |
310 | async function tester (options) {
311 | const commentsLoader = new BatchLoader(async keys => {
312 | const result = await comments.find({ query: { postId: { $in: getUniqueKeys(keys) } } });
313 | return getResultsByKey(keys, result, comment => comment.postId, '[]');
314 | },
315 | options
316 | );
317 |
318 | const usersLoader = new BatchLoader(async keys => {
319 | const result = await users.find({ query: { id: { $in: getUniqueKeys(keys) } } });
320 | return getResultsByKey(keys, result, user => user.id, '');
321 | },
322 | options
323 | );
324 |
325 | const postRecords = await posts.find();
326 |
327 | await map(postRecords, async post => {
328 | await parallel([
329 | // Join one users record to posts, for post.userId === users.id
330 | async () => {
331 | post.userRecord = await usersLoader.load(post.userId);
332 | },
333 | // Join 0, 1 or many comments records to posts, where comments.postId === posts.id
334 | async () => {
335 | const commentRecords = await commentsLoader.load(post.id);
336 | post.commentRecords = commentRecords;
337 |
338 | // Join one users record to comments, for comments.userId === users.id
339 | await map(commentRecords, async comment => {
340 | comment.userRecord = await usersLoader.load(comment.userId);
341 | });
342 | },
343 | // Join 0, 1 or many users record to posts, where posts.starIds === users.id
344 | async () => {
345 | if (!post.starIds) return null;
346 |
347 | post.starUserRecords = await usersLoader.loadMany(post.starIds);
348 | }
349 | ]);
350 | });
351 |
352 | return postRecords;
353 | }
354 | ```
355 |
356 | > Notice `usersLoader` is being called within 3 quite different joins. These joins will share their batching and cache, noticeably improving overall performance.
357 |
358 | This example has batching and caching disabled. These 22 service calls are made when it is run. They are the same calls which a plain JavaScript implementation would have made:
359 |
360 | ```text
361 | ... posts find
362 | ... users find { id: { '$in': [ 101 ] } }
363 | ... comments find { postId: { '$in': [ 1 ] } }
364 | ... users find { id: { '$in': [ 102 ] } }
365 | ... users find { id: { '$in': [ 103 ] } }
366 | ... users find { id: { '$in': [ 104 ] } }
367 | ... users find { id: { '$in': [ 102 ] } }
368 | ... comments find { postId: { '$in': [ 2 ] } }
369 | ... users find { id: { '$in': [ 101 ] } }
370 | ... users find { id: { '$in': [ 103 ] } }
371 | ... users find { id: { '$in': [ 104 ] } }
372 | ... users find { id: { '$in': [ 103 ] } }
373 | ... comments find { postId: { '$in': [ 3 ] } }
374 | ... users find { id: { '$in': [ 104 ] } }
375 | ... comments find { postId: { '$in': [ 4 ] } }
376 | ... users find { id: { '$in': [ 102 ] } }
377 | ... users find { id: { '$in': [ 102 ] } }
378 | ... users find { id: { '$in': [ 102 ] } }
379 | ... users find { id: { '$in': [ 101 ] } }
380 | ... users find { id: { '$in': [ 101 ] } }
381 | ... users find { id: { '$in': [ 101 ] } }
382 | ... users find { id: { '$in': [ 102 ] } }
383 | ```
384 |
385 | Now let's enable batching and caching by changing `tester({ batch: false, cache: false })` to `tester()`. Only these **three** service calls are now made to obtain the same results:
386 |
387 | ```text
388 | ... posts find
389 | ... users find { id: { '$in': [ 101, 102, 103, 104 ] } }
390 | ... comments find { postId: { '$in': [ 1, 2, 3, 4 ] } }
391 | ```
392 |
393 | > The 2 BatchLoaders reduced the number of services calls from 22 for a plain implementation, to just 3!
394 |
395 | The final populated result is:
396 |
397 | ```js
398 | [
399 | {
400 | id: 1,
401 | body: "John post",
402 | userId: 101,
403 | starIds: [102, 103, 104],
404 | userRecord: { id: 101, name: "John" },
405 | starUserRecords: [
406 | { id: 102, name: "Marshall" },
407 | { id: 103, name: "Barbara" },
408 | { id: 104, name: "Aubree" },
409 | ],
410 | commentRecords: [
411 | {
412 | id: 11,
413 | text: "John post Marshall comment 11",
414 | postId: 1,
415 | userId: 102,
416 | userRecord: { id: 102, name: "Marshall" },
417 | },
418 | {
419 | id: 12,
420 | text: "John post Marshall comment 12",
421 | postId: 1,
422 | userId: 102,
423 | userRecord: { id: 102, name: "Marshall" },
424 | },
425 | {
426 | id: 13,
427 | text: "John post Marshall comment 13",
428 | postId: 1,
429 | userId: 102,
430 | userRecord: { id: 102, name: "Marshall" },
431 | },
432 | ],
433 | },
434 | {
435 | id: 2,
436 | body: "Marshall post",
437 | userId: 102,
438 | starIds: [101, 103, 104],
439 | userRecord: { id: 102, name: "Marshall" },
440 | starUserRecords: [
441 | { id: 101, name: "John" },
442 | { id: 103, name: "Barbara" },
443 | { id: 104, name: "Aubree" },
444 | ],
445 | commentRecords: [
446 | {
447 | id: 14,
448 | text: "Marshall post John comment 14",
449 | postId: 2,
450 | userId: 101,
451 | userRecord: { id: 101, name: "John" },
452 | },
453 | {
454 | id: 15,
455 | text: "Marshall post John comment 15",
456 | postId: 2,
457 | userId: 101,
458 | userRecord: { id: 101, name: "John" },
459 | },
460 | ],
461 | },
462 | {
463 | id: 3,
464 | body: "Barbara post",
465 | userId: 103,
466 | userRecord: { id: 103, name: "Barbara" },
467 | commentRecords: [
468 | {
469 | id: 16,
470 | text: "Barbara post John comment 16",
471 | postId: 3,
472 | userId: 101,
473 | userRecord: { id: 101, name: "John" },
474 | },
475 | ],
476 | },
477 | {
478 | id: 4,
479 | body: "Aubree post",
480 | userId: 104,
481 | userRecord: { id: 104, name: "Aubree" },
482 | commentRecords: [
483 | {
484 | id: 17,
485 | text: "Aubree post Marshall comment 17",
486 | postId: 4,
487 | userId: 102,
488 | userRecord: { id: 102, name: "Marshall" },
489 | },
490 | ],
491 | },
492 | ];
493 | ```
494 |
495 | ## See also
496 |
497 | - [facebook/dataloader](https://github.com/facebook/dataloader) from which batch-loader is derived.
498 |
--------------------------------------------------------------------------------
/docs/index.md:
--------------------------------------------------------------------------------
1 |
2 |
25 |
26 | Create a new batch-loader given a batch loading function and options.
27 |
28 | - **Arguments:**
29 | - `{Function} batchLoadFunc`
30 | - `{Object} [ options ]`
31 | - `{Boolean} batch`
32 | - `{Boolean} cache`
33 | - `{Function} cacheKeyFn`
34 | - `{Object} cacheMap`
35 | - `{Object} context`
36 | - `{Number} maxBatchSize`
37 |
38 | | Argument | Type | Default | Description |
39 | | --------------- | :--------: | ------- | ------------------------------------------------ |
40 | | `batchLoadFunc` | `Function` | | See [Batch Function](./guide.md#batch-function). |
41 | | `options` | `Object` | | Options. |
42 |
43 | | `options` | Argument | Type | Default | Description |
44 | | -------------- | -------- | ------------ | -------------------------------------------------------------------------------------------------------------------------------------- | ----------- |
45 | | `batch` | Boolean | `true` | Set to false to disable batching, invoking `batchLoadFunc` with a single load key. |
46 | | `cache` | Boolean | `true` | Set to false to disable memoization caching, creating a new Promise and new key in the `batchLoadFunc` for every load of the same key. |
47 | | `cacheKeyFn` | Function | `key => key` | Produces cache key for a given load key. Useful when keys are objects and two objects should be considered equivalent. |
48 | | `cacheMap` | Object | `new Map()` | Instance of Map (or an object with a similar API) to be used as cache. See below. |
49 | | `context` | Object | `null` | A context object to pass into `batchLoadFunc` as its second argument. |
50 | | `maxBatchSize` | Number | `Infinity` | Limits the number of keys when calling `batchLoadFunc`. |
51 |
52 | - **Example**
53 |
54 | ```js
55 | const BatchLoader = require("@feathers-plus/batch-loader");
56 | const { getResultsByKey, getUniqueKeys } = BatchLoader;
57 |
58 | const usersLoader = new BatchLoader(
59 | async (keys, context) => {
60 | const data = await users.find({
61 | query: { id: { $in: getUniqueKeys(keys) } },
62 | paginate: false,
63 | });
64 | return getResultsByKey(keys, data, (user) => user.id, "");
65 | },
66 | { context: {}, batch: true, cache: true }
67 | );
68 | ```
69 |
70 | - **Pagination**
71 |
72 | The number of results returned by a query using `$in` is controlled by the pagination `max` set for that Feathers service. You need to specify a `paginate: false` option to ensure that records for all the keys are returned.
73 |
74 | The maximum number of keys the `batchLoadFunc` is called with can be controlled by the BatchLoader itself with the `maxBatchSize` option.
75 |
76 | - **option.cacheMap**
77 |
78 | The default cache will grow without limit, which is reasonable for short lived batch-loaders which are rebuilt on every request. The number of records cached can be limited with a _least-recently-used_ cache:
79 |
80 | ```js
81 | const BatchLoader = require('@feathers-plus/batch-loader');
82 | const cache = require('@feathers-plus/cache');
83 |
84 | const usersLoader = new BatchLoader(
85 | keys => { ... },
86 | { cacheMap: cache({ max: 100 })
87 | );
88 | ```
89 |
90 | > You can consider wrapping npm's `lru` on the browser.
91 |
92 | - **See also:** [Guide](./guide.md)
93 |
94 |
95 |
static BatchLoader.getUniqueKeys( keys )
96 |
97 | Returns the unique elements in an array.
98 |
99 | - **Arguments:**
100 | - `{Array} keys`
101 |
102 | | Argument | Type | Default | Description |
103 | | -------- | ------------------------------ | ------- | --------------------------------- |
104 | | `keys` | `Array<` `String /` `Number >` | | The keys. May contain duplicates. |
105 |
106 | - **Example:**
107 |
108 | ```js
109 | const usersLoader = new BatchLoader(async keys =>
110 | const data = users.find({ query: { id: { $in: getUniqueKeys(keys) } } })
111 | ...
112 | );
113 | ```
114 |
115 | - **Details**
116 |
117 | The array of keys may contain duplicates when the batch-loader's memoization cache is disabled.
118 |
119 |
Function does not handle keys of type Object nor Array.
120 |
121 |
122 |
static BatchLoader.getResultsByKey( keys, records, getRecordKeyFunc, type [, options] )
123 |
124 | Reorganizes the records from the service call into the result expected from the batch function.
125 |
126 | - **Arguments:**
127 | - `{Array} keys`
128 | - `{Array