├── .eslintrc.json ├── .github └── workflows │ └── run-tests.yml ├── .gitignore ├── CHANGELOG.md ├── LICENSE ├── README.md ├── examples └── promise-pool.js ├── package.json ├── performance └── testing.js ├── src ├── contracts.ts ├── index.ts ├── promise-pool-error.ts ├── promise-pool-executor.ts ├── promise-pool.ts ├── return-value.ts ├── stop-the-promise-pool-error.ts └── validation-error.ts ├── test ├── pool-from-iterable.js ├── promise-pool.js └── stop-the-pool.js └── tsconfig.json /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "es6": true, 4 | "node": true 5 | }, 6 | "extends": [ 7 | "@supercharge/typescript" 8 | ], 9 | "rules": { 10 | "@typescript-eslint/return-await": 0, 11 | "@typescript-eslint/require-await": 0, 12 | "@typescript-eslint/method-signature-style": 0, 13 | "@typescript-eslint/strict-boolean-expressions": 0 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /.github/workflows/run-tests.yml: -------------------------------------------------------------------------------- 1 | name: Run tests 2 | 3 | on: [push, pull_request] 4 | 5 | concurrency: 6 | group: ${{ github.ref }} 7 | cancel-in-progress: true 8 | 9 | jobs: 10 | test: 11 | name: Node.js ${{ matrix.node-version }} on ${{ matrix.os }} 12 | runs-on: ${{ matrix.os }} 13 | strategy: 14 | matrix: 15 | node-version: [16.x, 18.x, 20.x] 16 | os: 17 | - ubuntu-latest 18 | 19 | steps: 20 | - name: Git checkout 21 | uses: actions/checkout@v4 22 | 23 | - name: Use Node.js ${{ matrix.node-version }} 24 | uses: actions/setup-node@v4 25 | with: 26 | node-version: ${{ matrix.node-version }} 27 | 28 | - name: Install dependencies 29 | run: npm install 30 | 31 | - name: Run tests 32 | run: npm run test:full 33 | env: 34 | CI: true 35 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | haters 2 | 3 | lib-cov 4 | *.seed 5 | *.log 6 | *.csv 7 | *.dat 8 | *.out 9 | *.pid 10 | *.gz 11 | .github-todos 12 | 13 | pids 14 | results 15 | 16 | dist 17 | node_modules 18 | npm-debug.log 19 | package-lock.json 20 | yarn.lock 21 | 22 | # code coverage folder 23 | coverage 24 | .nyc_output 25 | 26 | # Secrets 27 | .env 28 | .env.** 29 | 30 | # IDEs and editors 31 | .idea 32 | .vscode 33 | 34 | .vagrant 35 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## [3.2.0](https://github.com/supercharge/promise-pool/compare/v3.1.1...v3.2.0) - 2024-03-25 4 | 5 | ### Added 6 | - add `"sideEffects": false` to `package.json` [#83](https://github.com/supercharge/promise-pool/issues/83) 7 | 8 | ### Updated 9 | - updated types for the handler function in the promise pool processor 10 | 11 | ### Updated 12 | - bump dependencies 13 | 14 | 15 | ## [3.1.1](https://github.com/supercharge/promise-pool/compare/v3.1.0...v3.1.1) - 2024-01-31 16 | 17 | ### Updated 18 | - bump dependencies 19 | 20 | ### Fixed 21 | - removed duplicated build files from `dist` (fixing https://github.com/supercharge/promise-pool/issues/82) 22 | 23 | 24 | ## [3.1.0](https://github.com/supercharge/promise-pool/compare/v3.0.0...v3.1.0) - 2023-09-25 25 | 26 | ### Added 27 | - clear timers from task timeouts to remove them from the event loop: this is helpful to clear up resources for long-running timeouts and fastly resolving tasks. If not cleared, the timeouts stay in the event loop until they’re due 28 | - add keep error handler when calling `pool.handleError` after `pool.for`: previously the error handler was lost 29 | 30 | ### Updated 31 | - bump dependencies 32 | 33 | 34 | ## [3.0.0](https://github.com/supercharge/promise-pool/compare/v2.4.0...v3.0.0) - 2023-07-09 35 | 36 | ### Added 37 | - [Accept iterables and async iterables](https://github.com/supercharge/promise-pool/pull/72) and not just arrays 38 | - add performance testing script in `performance` directory: this is helpful to detect whether the promise pool is significantly slower than `Promise.all` 39 | 40 | ### Updated 41 | - bump dependencies 42 | - [call `Error.captureStackTrace` only when available](https://github.com/supercharge/promise-pool/issues/74) 43 | 44 | ### Breaking Changes 45 | - require Node.js v16 46 | - require `ecmaScript` 2021 47 | - the `pool.items()` method changed from the narrower type `T[]` to the wider type `T[] | Iterable | AsyncIterable` which might be a breaking change for your code base 48 | 49 | 50 | ## [2.4.0](https://github.com/supercharge/promise-pool/compare/v2.3.2...v2.4.0) - 2023-02-10 51 | 52 | ### Added 53 | - [support task timeouts](https://github.com/supercharge/promise-pool#task-timeouts) 54 | - [support corresponding results](https://github.com/supercharge/promise-pool#correspond-source-items-and-their-results) 55 | 56 | ### Updated 57 | - bump dependencies 58 | - refined types when a promise or non-promise resuult will be handled 59 | 60 | 61 | ## [2.3.2](https://github.com/supercharge/promise-pool/compare/v2.3.1...v2.3.2) - 2022-08-05 62 | 63 | ### Fixed 64 | - remove active task after handling the result or error [#51](https://github.com/supercharge/promise-pool/issues/51) 65 | - we previously removed the active tasks before handling the result or error, which caused the pool to pick up the next task too early 66 | 67 | 68 | ## [2.3.1](https://github.com/supercharge/promise-pool/compare/v2.3.0...v2.3.1) - 2022-08-05 69 | 70 | ### Updated 71 | - bump dependencies 72 | 73 | ### Fixed 74 | - stop processing items after throwing an error from inside the `pool.handleError()` method [#51](https://github.com/supercharge/promise-pool/issues/51) 75 | 76 | 77 | ## [2.3.0](https://github.com/supercharge/promise-pool/compare/v2.2.0...v2.3.0) - 2022-06-08 78 | 79 | ### Added 80 | - `pool.useConcurrency()`: adjust the concurrency of a running pool 81 | 82 | ### Updated 83 | - bump dependencies 84 | 85 | 86 | ## [2.2.0](https://github.com/supercharge/promise-pool/compare/v2.1.0...v2.2.0) - 2022-05-20 87 | 88 | ### Added 89 | - `pool.onTaskStarted((item, pool) => { … })`: configure a callback that runs when an item is about to be processed 90 | - `pool.onTaskFinished((item, pool) => { … })`: configure a callback that runs when an item finished processing 91 | 92 | ### Updated 93 | - bump dependencies 94 | 95 | 96 | ## [2.1.0](https://github.com/supercharge/promise-pool/compare/v2.0.0...v2.1.0) - 2021-12-14 97 | 98 | ### Added 99 | - keep the original error in `error.raw` 100 | - this is useful if your errors store some kind of context 101 | - the `PromisePoolError` instance would otherwise loose the original error context 102 | 103 | ```js 104 | class CustomError extends Error { … } 105 | 106 | const { errors } = await PromisePool 107 | .withConcurrency(2) 108 | .for([1, 2, 3]) 109 | .process(() => { 110 | throw new CustomError('Oh no') 111 | }) 112 | 113 | errors[0].raw instanceof CustomError 114 | // true 115 | ``` 116 | 117 | ### Updated 118 | - bump dependencies 119 | - run tests for Node.js v17 120 | 121 | 122 | ## [2.0.0](https://github.com/supercharge/promise-pool/compare/v1.9.0...v2.0.0) - 2021-11-09 123 | 124 | ### Breaking Changes 125 | The `2.x` release line changes the exports of this package: 126 | 127 | ```js 128 | // Now: 2.x 129 | import { PromisePool } from '@supercharge/promise-pool' 130 | // or 131 | const { PromisePool } = require('@supercharge/promise-pool') 132 | 133 | // Before: 1.x 134 | import PromisePool from '@supercharge/promise-pool' // required the `esModuleInterop` flag in tsconfig.json 135 | // or 136 | const PromisePool = require('@supercharge/promise-pool') 137 | ``` 138 | 139 | The `1.x` releases used CommonJS- and ESM-compatible default exports. That required TypeScript packages using ESM imports to enable the `esModuleInterop` flag in their `tsconfig.json` file. The named exports in `2.x` don’t require that flag anymore. 140 | 141 | 142 | ## [1.9.0](https://github.com/supercharge/promise-pool/compare/v1.8.0...v1.9.0) - 2021-11-03 143 | 144 | ### Added 145 | - `pool.stop()` method 146 | - add `downlevelIteration: true` option to `tsconfig.json` 147 | 148 | ### Updated 149 | - bump dependencies 150 | - use UVU and c8 for testing (instead of Jest) 151 | - refined example output (in `examples/promise-pool.js`) 152 | - extend `README` with examples on how to stop an active promise pool 153 | 154 | 155 | ## [1.8.0](https://github.com/supercharge/promise-pool/compare/v1.7.0...v1.8.0) - 2021-09-24 156 | 157 | ### Added 158 | - test code on Node.js v16 159 | - provide `index` as the second argument in the `process` function 160 | ```js 161 | await PromisePool 162 | .withConcurrency(2) 163 | .for([1,2,3,4]) 164 | .process(async (num, index) => { 165 | // processing … 166 | }) 167 | ``` 168 | 169 | ### Updated 170 | - bump dependencies 171 | 172 | ### Removed 173 | - testing on Node.js v15 174 | 175 | 176 | ## [1.7.0](https://github.com/supercharge/promise-pool/compare/v1.6.2...v1.7.0) - 2021-04-11 177 | 178 | ### Updated 179 | - bump dependencies 180 | - refactored code to not use the `@supercharge/goodies` package 181 | 182 | ### Removed 183 | - dependency to `@supercharge/goodies` 184 | 185 | 186 | ## [1.6.2](https://github.com/supercharge/promise-pool/compare/v1.6.1...v1.6.2) - 2021-04-09 187 | 188 | ### Updated 189 | - bump dependencies 190 | 191 | ### Fixed 192 | - missing concurrency in certain cases when not calling `.withConcurrency()` 193 | 194 | 195 | ## [1.6.1](https://github.com/supercharge/promise-pool/compare/v1.6.0...v1.6.1) - 2021-03-28 196 | 197 | ### Fixed 198 | - typing error when processing a promise pool that was created from non-static methods 199 | 200 | 201 | ## [1.6.0](https://github.com/supercharge/promise-pool/compare/v1.5.0...v1.6.0) - 2020-11-03 202 | 203 | ### Added 204 | - `.handleError(handler)` method: aka “bring your own error handling”. This allows you to take over error handling from the pool. If you impelement the `.handleError` method, the pool won’t collect errors anymore. It puts error handling in your hands. 205 | 206 | ### Updated 207 | - bump dependencies 208 | 209 | ### Fixed 210 | - failed tasks are handled properly now and the pool ensures the concurrency limit. Before, the pool started to process all items as soon as one failed 211 | 212 | 213 | ## [1.5.0](https://github.com/supercharge/promise-pool/compare/v1.4.0...v1.5.0) - 2020-09-20 214 | 215 | ### Updated 216 | - bump dependencies 217 | - return types for `results` and `errors` now resolve properly for sync and async action handlers 218 | 219 | 220 | ## [1.4.0](https://github.com/supercharge/promise-pool/compare/v1.3.0...v1.4.0) - 2020-09-17 221 | 222 | ### Added 223 | - improved types supporting typed return values 224 | - improved error handling when rejecting a promise without an error instance (thank you [wzh](https://github.com/supercharge/promise-pool/pull/19)) 225 | 226 | ### Updated 227 | - bump dependencies 228 | - change `main` entrypoint in `package.json` to `dist` folder 229 | - move test runner from `@hapi/lab` to `jest` 230 | - move assertions from `@hapi/code` to `jest` 231 | 232 | 233 | ## [1.3.0](https://github.com/superchargejs/promise-pool/compare/v1.2.0...v1.3.0) - 2020-07-16 234 | 235 | ### Added 236 | - TypeScript typings 237 | 238 | ### Updated 239 | - bump dependencies 240 | - moved code base to TypeScript to automatically generate type definitions 241 | 242 | 243 | 244 | ## [1.2.0](https://github.com/superchargejs/promise-pool/compare/v1.1.1...v1.2.0) - 2019-10-15 245 | 246 | ### Added 247 | - static methods for `.withConcurrency` and `.for` 248 | - moves boilerplate from your code to the promise pool package 249 | - `new Pool().for(items)` is now `Pool.for(items)`) 250 | - `new Pool().withConcurrency(2)` is now `Pool.withConcurrency(2)`) 251 | - it’s always the details :) 252 | 253 | ### Updated 254 | - bump dependencies 255 | 256 | 257 | ## [1.1.1](https://github.com/superchargejs/promise-pool/compare/v1.1.0...v1.1.1) - 2019-09-24 258 | 259 | ### Updated 260 | - bump dependencies 261 | - move package docs to Supercharge docs 262 | 263 | 264 | ## [1.1.0](https://github.com/superchargejs/promise-pool/compare/v1.0.0...v1.1.0) - 2019-08-14 265 | 266 | ### Added 267 | - `module.exports.default` 268 | 269 | ### Updated 270 | - bump dependencies 271 | - update NPM scripts 272 | 273 | 274 | ## 1.0.0 - 2019-07-15 275 | 276 | ### Added 277 | - `1.0.0` release 🚀 🎉 278 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 The Supercharge Node.js Framework 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |
2 | 3 | 4 | 5 |
6 |
7 |

8 |

Promise Pool

9 |

10 |

11 | Map-like, concurrent promise processing for Node.js. 12 |

13 |
14 |

15 | Installation · 16 | Docs · 17 | Usage 18 |

19 |
20 |
21 |

22 | Latest Version 23 | Monthly downloads 24 |

25 |

26 | Follow @marcuspoehls and @superchargejs for updates! 27 |

28 |
29 | 30 | --- 31 | 32 | ## Installation 33 | 34 | ``` 35 | npm i @supercharge/promise-pool 36 | ``` 37 | 38 | 39 | ## Docs 40 | 41 | - 📖 [Documentation](https://superchargejs.com/docs/promise-pool) 42 | 43 | 44 | ## Usage 45 | Using the promise pool is pretty straightforward. The package exposes a class and you can create a promise pool instance using the fluent interface. 46 | 47 | Here’s an example using a concurrency of 2: 48 | 49 | ```js 50 | import { PromisePool } from '@supercharge/promise-pool' 51 | 52 | const users = [ 53 | { name: 'Marcus' }, 54 | { name: 'Norman' }, 55 | { name: 'Christian' } 56 | ] 57 | 58 | const { results, errors } = await PromisePool 59 | .withConcurrency(2) 60 | .for(users) 61 | .process(async (userData, index, pool) => { 62 | const user = await User.createIfNotExisting(userData) 63 | 64 | return user 65 | }) 66 | ``` 67 | 68 | The promise pool uses a default concurrency of 10: 69 | 70 | ```js 71 | await PromisePool 72 | .for(users) 73 | .process(async data => { 74 | // processes 10 items in parallel by default 75 | }) 76 | ``` 77 | 78 | 79 | ## Manually Stop the Pool 80 | You can stop the processing of a promise pool using the `pool` instance provided to the `.process()` and `.handleError()` methods. Here’s an example how you can stop an active promise pool from within the `.process()` method: 81 | 82 | ```js 83 | await PromisePool 84 | .for(users) 85 | .process(async (user, index, pool) => { 86 | if (condition) { 87 | return pool.stop() 88 | } 89 | 90 | // processes the `user` data 91 | }) 92 | ``` 93 | 94 | You may also stop the pool from within the `.handleError()` method in case you need to: 95 | 96 | ```js 97 | import { PromisePool } from '@supercharge/promise-pool' 98 | 99 | await PromisePool 100 | .for(users) 101 | .handleError(async (error, user, pool) => { 102 | if (error instanceof SomethingBadHappenedError) { 103 | return pool.stop() 104 | } 105 | 106 | // handle the given `error` 107 | }) 108 | .process(async (user, index, pool) => { 109 | // processes the `user` data 110 | }) 111 | ``` 112 | 113 | 114 | ## Bring Your Own Error Handling 115 | The promise pool allows for custom error handling. You can take over the error handling by implementing an error handler using the `.handleError(handler)`. 116 | 117 | > If you provide an error handler, the promise pool doesn’t collect any errors. You must then collect errors yourself. 118 | 119 | Providing a custom error handler allows you to exit the promise pool early by throwing inside the error handler function. Throwing errors is in line with Node.js error handling using async/await. 120 | 121 | ```js 122 | import { PromisePool } from '@supercharge/promise-pool' 123 | 124 | try { 125 | const errors = [] 126 | 127 | const { results } = await PromisePool 128 | .for(users) 129 | .withConcurrency(4) 130 | .handleError(async (error, user) => { 131 | if (error instanceof ValidationError) { 132 | errors.push(error) // you must collect errors yourself 133 | return 134 | } 135 | 136 | if (error instanceof ThrottleError) { // Execute error handling on specific errors 137 | await retryUser(user) 138 | return 139 | } 140 | 141 | throw error // Uncaught errors will immediately stop PromisePool 142 | }) 143 | .process(async data => { 144 | // the harder you work for something, 145 | // the greater you’ll feel when you achieve it 146 | }) 147 | 148 | await handleCollected(errors) // this may throw 149 | 150 | return { results } 151 | } catch (error) { 152 | await handleThrown(error) 153 | } 154 | ``` 155 | 156 | 157 | ## Callback for Started and Finished Tasks 158 | You can use the `onTaskStarted` and `onTaskFinished` methods to hook into the processing of tasks. The provided callback for each method will be called when a task started/finished processing: 159 | 160 | 161 | ```js 162 | import { PromisePool } from '@supercharge/promise-pool' 163 | 164 | await PromisePool 165 | .for(users) 166 | .onTaskStarted((item, pool) => { 167 | console.log(`Progress: ${pool.processedPercentage()}%`) 168 | console.log(`Active tasks: ${pool.processedItems().length}`) 169 | console.log(`Active tasks: ${pool.activeTasksCount()}`) 170 | console.log(`Finished tasks: ${pool.processedItems().length}`) 171 | console.log(`Finished tasks: ${pool.processedCount()}`) 172 | }) 173 | .onTaskFinished((item, pool) => { 174 | // update a progress bar or something else :) 175 | }) 176 | .process(async (user, index, pool) => { 177 | // processes the `user` data 178 | }) 179 | ``` 180 | 181 | You can also chain multiple `onTaskStarted` and `onTaskFinished` handling (in case you want to separate some functionality): 182 | 183 | ```js 184 | import { PromisePool } from '@supercharge/promise-pool' 185 | 186 | await PromisePool 187 | .for(users) 188 | .onTaskStarted(() => {}) 189 | .onTaskStarted(() => {}) 190 | .onTaskFinished(() => {}) 191 | .onTaskFinished(() => {}) 192 | .process(async (user, index, pool) => { 193 | // processes the `user` data 194 | }) 195 | ``` 196 | 197 | 198 | ## Task Timeouts 199 | Sometimes it’s useful to configure a timeout in which a task must finish processing. A task that times out is marked as failed. You may use the `withTaskTimeout()` method to configure a task’s timeout: 200 | 201 | 202 | ```js 203 | import { PromisePool } from '@supercharge/promise-pool' 204 | 205 | await PromisePool 206 | .for(users) 207 | .withTaskTimeout(2000) // milliseconds 208 | .process(async (user, index, pool) => { 209 | // processes the `user` data 210 | }) 211 | ``` 212 | 213 | **Notice:** a configured timeout is configured for each task, not for the whole pool. The example configures a 2-second timeout for each task in the pool. 214 | 215 | 216 | ## Correspond Source Items and Their Results 217 | Sometimes you want the processed results to align with your source items. The resulting items should have the same position in the `results` array as their related source items. Use the `useCorrespondingResults` method to apply this behavior: 218 | 219 | ```js 220 | import { setTimeout } from 'node:timers/promises' 221 | import { PromisePool } from '@supercharge/promise-pool' 222 | 223 | const { results } = await PromisePool 224 | .for([1, 2, 3]) 225 | .withConcurrency(5) 226 | .useCorrespondingResults() 227 | .process(async (number, index) => { 228 | const value = number * 2 229 | 230 | return await setTimeout(10 - index, value) 231 | }) 232 | 233 | /** 234 | * source array: [1, 2, 3] 235 | * result array: [2, 4 ,6] 236 | * --> result values match the position of their source items 237 | */ 238 | ``` 239 | 240 | For example, you may have three items you want to process. Using corresponding results ensures that the processed result for the first item from the source array is located at the first position in the result array (=index `0`). The result for the second item from the source array is placed at the second position in the result array, and so on … 241 | 242 | 243 | ### Return Values When Using Corresponding Results 244 | The `results` array returned by the promise pool after processing has a mixed return type. Each returned item is one of this type: 245 | 246 | - the actual value type: for results that successfully finished processing 247 | - `Symbol('notRun')`: for tasks that didn’t run 248 | - `Symbol('failed')`: for tasks that failed processing 249 | 250 | The `PromisePool` exposes both symbols and you may access them using 251 | 252 | - `Symbol('notRun')`: exposed as `PromisePool.notRun` 253 | - `Symbol('failed')`: exposed as `PromisePool.failed` 254 | 255 | You may repeat processing for all tasks that didn’t run or failed: 256 | 257 | ```js 258 | import { PromisePool } from '@supercharge/promise-pool' 259 | 260 | const { results, errors } = await PromisePool 261 | .for([1, 2, 3]) 262 | .withConcurrency(5) 263 | .useCorrespondingResults() 264 | .process(async (number) => { 265 | // … 266 | }) 267 | 268 | const itemsNotRun = results.filter(result => { 269 | return result === PromisePool.notRun 270 | }) 271 | 272 | const failedItems = results.filter(result => { 273 | return result === PromisePool.failed 274 | }) 275 | ``` 276 | 277 | When using corresponding results, you need to go through the `errors` array yourself. The default error handling (collect errors) stays the same and you can follow the described error handling section above. 278 | 279 | 280 | ## Contributing 281 | 282 | 1. Create a fork 283 | 2. Create your feature branch: `git checkout -b my-feature` 284 | 3. Commit your changes: `git commit -am 'Add some feature'` 285 | 4. Push to the branch: `git push origin my-new-feature` 286 | 5. Submit a pull request 🚀 287 | 288 | 289 | ## License 290 | MIT © [Supercharge](https://superchargejs.com) 291 | 292 | --- 293 | 294 | > [superchargejs.com](https://superchargejs.com)  ·  295 | > GitHub [@supercharge](https://github.com/supercharge)  ·  296 | > Twitter [@superchargejs](https://twitter.com/superchargejs) 297 | -------------------------------------------------------------------------------- /examples/promise-pool.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { PromisePool } = require('../dist') 4 | 5 | /** 6 | * Very basic, non-optimal shuffle function to randomly order the items. 7 | * 8 | * @param {any[]} array 9 | * 10 | * @returns {any[]} 11 | */ 12 | function shuffle (array) { 13 | return array.sort(() => Math.random() - 0.5) 14 | } 15 | 16 | async function run () { 17 | const timeouts = shuffle( 18 | [1, 2, 3, 4, 5, 6, 7, 8, 9, 10].map(item => item * 100) 19 | ) 20 | 21 | const { results, errors } = await PromisePool 22 | .for(timeouts) 23 | .withConcurrency(2) 24 | .process(async (timeout, index, pool) => { 25 | if (index > 100) { 26 | return pool.stop() 27 | } 28 | 29 | await new Promise(resolve => setTimeout(resolve, timeout)) 30 | console.log(`#${index}: waited ${timeout}ms`) 31 | 32 | return { item: index, timeout } 33 | }) 34 | 35 | console.log() 36 | console.log('Results ->') 37 | console.log(results) 38 | 39 | console.log() 40 | console.log(`Errors -> ${errors.length ? errors : 'none'}`) 41 | } 42 | 43 | run().catch(error => console.error('Failed to process the promise pool', error)) 44 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@supercharge/promise-pool", 3 | "description": "Map-like, concurrent promise processing for Node.js", 4 | "version": "3.2.0", 5 | "author": "Marcus Pöhls ", 6 | "bugs": { 7 | "url": "https://github.com/superchargejs/promise-pool/issues" 8 | }, 9 | "devDependencies": { 10 | "@supercharge/eslint-config-typescript": "~3.0.1", 11 | "@supercharge/tsconfig": "~4.0.0", 12 | "c8": "~8.0.1", 13 | "eslint": "~8.57.0", 14 | "expect": "~29.7.0", 15 | "typescript": "~5.4.3", 16 | "uvu": "~0.5.6" 17 | }, 18 | "engines": { 19 | "node": ">=8" 20 | }, 21 | "homepage": "https://github.com/superchargejs/promise-pool", 22 | "keywords": [ 23 | "supercharge", 24 | "superchargejs", 25 | "promise-pool", 26 | "nodejs", 27 | "async", 28 | "map", 29 | "async-map", 30 | "promises" 31 | ], 32 | "license": "MIT", 33 | "files": [ 34 | "dist" 35 | ], 36 | "main": "dist", 37 | "publishConfig": { 38 | "access": "public" 39 | }, 40 | "repository": { 41 | "type": "git", 42 | "url": "git+https://github.com/superchargejs/promise-pool.git" 43 | }, 44 | "scripts": { 45 | "build": "tsc", 46 | "dev": "tsc --watch", 47 | "lint": "eslint src --ext .js,.ts", 48 | "lint:fix": "npm run lint -- --fix", 49 | "test": "npm run build && npm run test:coverage", 50 | "test:full": "npm run build && npm run lint && npm run test:coverage", 51 | "test:run": "uvu", 52 | "test:coverage": "c8 --include=dist npm run test:run && npm run test:report", 53 | "test:report": "c8 report --reporter=html" 54 | }, 55 | "sideEffects": false, 56 | "types": "dist" 57 | } 58 | -------------------------------------------------------------------------------- /performance/testing.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { PromisePool } = require('../dist') 4 | const { setTimeout } = require('timers/promises') 5 | 6 | // const batchSize = 10 * 1000 7 | const batchSize = 50 * 1000 8 | 9 | function createPromises (size) { 10 | return Array.apply(null, { length: size }).map(x => exec()) 11 | } 12 | 13 | async function exec () { 14 | await setTimeout(1000) 15 | } 16 | 17 | async function run () { 18 | console.log('Creating promises') 19 | const promisePool = createPromises(batchSize) 20 | console.time('PromisePool') 21 | 22 | await PromisePool 23 | .withConcurrency(batchSize) 24 | .for(promisePool) 25 | .process(async (d) => { 26 | await d 27 | }) 28 | console.timeEnd('PromisePool') 29 | 30 | const promisesAll = createPromises(batchSize) 31 | console.time('Promise.all') 32 | await Promise.all(promisesAll) 33 | console.timeEnd('Promise.all') 34 | } 35 | 36 | run() 37 | .then(() => console.log('done')) 38 | .catch(error => console.error('Failed to process promise pool performance', error)) 39 | -------------------------------------------------------------------------------- /src/contracts.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | export interface UsesConcurrency { 4 | /** 5 | * Assign the given `concurrency` as the number of tasks being processed concurrently the promise pool. 6 | */ 7 | useConcurrency (concurrency: number): this 8 | 9 | /** 10 | * Returns the number of concurrently processed tasks. 11 | */ 12 | concurrency (): number 13 | } 14 | 15 | export interface Stoppable { 16 | /** 17 | * Stop the promise pool and returns any results that already have been calculated. 18 | * Stopping the pool waits for active task to finish processing before returning. 19 | */ 20 | stop (): void 21 | 22 | /** 23 | * Determine whether the pool is marked as stopped. 24 | */ 25 | isStopped(): boolean 26 | } 27 | 28 | export interface Statistics { 29 | /** 30 | * Returns the number of currently active tasks. 31 | * 32 | * @deprecated use the `activeTasksCount()` method (plural naming) instead 33 | */ 34 | activeTaskCount (): number 35 | 36 | /** 37 | * Returns the number of currently active tasks. 38 | */ 39 | activeTasksCount (): number 40 | 41 | /** 42 | * Returns the list of processed items. 43 | */ 44 | processedItems (): T[] 45 | 46 | /** 47 | * Returns the number of processed items. 48 | */ 49 | processedCount (): number 50 | 51 | /** 52 | * Returns the percentage progress of items that have been processed. 53 | */ 54 | processedPercentage (): number 55 | } 56 | 57 | export type ErrorHandler = (error: Error, item: T, pool: Stoppable & UsesConcurrency) => Promise | void 58 | 59 | export type ProcessHandler = (item: T, index: number, pool: Stoppable & UsesConcurrency) => Promise | R 60 | 61 | export type OnProgressCallback = (item: T, pool: Stoppable & Statistics & UsesConcurrency) => void 62 | 63 | export type SomeIterable = T[] | Iterable | AsyncIterable 64 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | import { PromisePool } from './promise-pool' 4 | 5 | export default PromisePool 6 | 7 | export * from './contracts' 8 | export * from './promise-pool' 9 | export * from './promise-pool-error' 10 | export * from './return-value' 11 | export * from './stop-the-promise-pool-error' 12 | export * from './validation-error' 13 | -------------------------------------------------------------------------------- /src/promise-pool-error.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | export class PromisePoolError extends Error { 4 | /** 5 | * Returns the item that caused this error. 6 | */ 7 | public item: T 8 | 9 | /** 10 | * Returns the original, raw error instance. 11 | */ 12 | public raw: E 13 | 14 | /** 15 | * Create a new instance for the given `message` and `item`. 16 | * 17 | * @param error The original error 18 | * @param item The item causing the error 19 | */ 20 | constructor (error: E, item: T) { 21 | super() 22 | 23 | this.raw = error 24 | this.item = item 25 | this.name = this.constructor.name 26 | this.message = this.messageFrom(error) 27 | 28 | if (Error.captureStackTrace && typeof Error.captureStackTrace === 'function') { 29 | Error.captureStackTrace(this, this.constructor) 30 | } 31 | } 32 | 33 | /** 34 | * Returns a new promise pool error instance wrapping the `error` and `item`. 35 | */ 36 | static createFrom(error: E, item: T): PromisePoolError { 37 | return new this(error, item) 38 | } 39 | 40 | /** 41 | * Returns the error message from the given `error`. 42 | */ 43 | private messageFrom (error: any): string { 44 | if (error instanceof Error) { 45 | return error.message 46 | } 47 | 48 | if (typeof error === 'object') { 49 | return error.message 50 | } 51 | 52 | if (typeof error === 'string' || typeof error === 'number') { 53 | return error.toString() 54 | } 55 | 56 | return '' 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /src/promise-pool-executor.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | import { PromisePool } from './promise-pool' 4 | import { ReturnValue } from './return-value' 5 | import { ValidationError } from './validation-error' 6 | import { PromisePoolError } from './promise-pool-error' 7 | import { StopThePromisePoolError } from './stop-the-promise-pool-error' 8 | import { ErrorHandler, ProcessHandler, OnProgressCallback, Statistics, Stoppable, UsesConcurrency, SomeIterable } from './contracts' 9 | 10 | export class PromisePoolExecutor implements UsesConcurrency, Stoppable, Statistics { 11 | /** 12 | * Stores the internal properties. 13 | */ 14 | private readonly meta: { 15 | /** 16 | * The list of items to process. 17 | */ 18 | items: SomeIterable 19 | 20 | /** 21 | * The list of processed items. 22 | */ 23 | processedItems: T[] 24 | 25 | /** 26 | * The number of concurrently running tasks. 27 | */ 28 | concurrency: number 29 | 30 | /** 31 | * Determine whether to put a task’s result at the same position in the result 32 | * array as its related source item has in the source array. 33 | */ 34 | shouldResultsCorrespond: boolean 35 | 36 | /** 37 | * The maximum timeout in milliseconds for the item handler, or `undefined` to disable. 38 | */ 39 | taskTimeout: number | undefined 40 | 41 | /** 42 | * Determine whether the pool is stopped. 43 | */ 44 | stopped: boolean 45 | 46 | /** 47 | * The intermediate list of currently running tasks. 48 | */ 49 | readonly tasks: any[] 50 | 51 | /** 52 | * The list of results. 53 | */ 54 | results: Array 55 | 56 | /** 57 | * The list of errors. 58 | */ 59 | readonly errors: Array> 60 | } 61 | 62 | /** 63 | * The async processing function receiving each item from the `items` array. 64 | */ 65 | private handler: ProcessHandler 66 | 67 | /** 68 | * The async error handling function. 69 | */ 70 | private errorHandler?: ErrorHandler 71 | 72 | /** 73 | * The `taskStarted` handler callback functions 74 | */ 75 | private onTaskStartedHandlers: Array> 76 | 77 | /** 78 | * The `taskFinished` handler callback functions 79 | */ 80 | private onTaskFinishedHandlers: Array> 81 | 82 | /** 83 | * Creates a new promise pool executer instance with a default concurrency of 10. 84 | */ 85 | constructor () { 86 | this.meta = { 87 | tasks: [], 88 | items: [], 89 | errors: [], 90 | results: [], 91 | stopped: false, 92 | concurrency: 10, 93 | shouldResultsCorrespond: false, 94 | processedItems: [], 95 | taskTimeout: 0 96 | } 97 | 98 | this.handler = (item) => item as any 99 | this.errorHandler = undefined 100 | this.onTaskStartedHandlers = [] 101 | this.onTaskFinishedHandlers = [] 102 | } 103 | 104 | /** 105 | * Set the number of tasks to process concurrently the promise pool. 106 | */ 107 | useConcurrency (concurrency: number): this { 108 | if (!this.isValidConcurrency(concurrency)) { 109 | throw ValidationError.createFrom(`"concurrency" must be a number, 1 or up. Received "${concurrency}" (${typeof concurrency})`) 110 | } 111 | 112 | this.meta.concurrency = concurrency 113 | 114 | return this 115 | } 116 | 117 | /** 118 | * Determine whether the given `concurrency` value is valid. 119 | */ 120 | private isValidConcurrency (concurrency: number): boolean { 121 | return typeof concurrency === 'number' && concurrency >= 1 122 | } 123 | 124 | /** 125 | * Set the timeout in ms for the pool handler 126 | */ 127 | withTaskTimeout (timeout: number | undefined): this { 128 | this.meta.taskTimeout = timeout 129 | 130 | return this 131 | } 132 | 133 | /** 134 | * Returns the number of concurrently processed tasks. 135 | */ 136 | concurrency (): number { 137 | return this.meta.concurrency 138 | } 139 | 140 | /** 141 | * Assign whether to keep corresponding results between source items and resulting tasks. 142 | */ 143 | useCorrespondingResults (shouldResultsCorrespond: boolean): this { 144 | this.meta.shouldResultsCorrespond = shouldResultsCorrespond 145 | 146 | return this 147 | } 148 | 149 | /** 150 | * Determine whether to keep corresponding results between source items and resulting tasks. 151 | */ 152 | shouldUseCorrespondingResults (): boolean { 153 | return this.meta.shouldResultsCorrespond 154 | } 155 | 156 | /** 157 | * Returns the task timeout in milliseconds. 158 | */ 159 | taskTimeout (): number | undefined { 160 | return this.meta.taskTimeout 161 | } 162 | 163 | /** 164 | * Set the items to be processed in the promise pool. 165 | */ 166 | for (items: SomeIterable): this { 167 | this.meta.items = items 168 | 169 | return this 170 | } 171 | 172 | /** 173 | * Returns the list of items to process. 174 | */ 175 | items (): SomeIterable { 176 | return this.meta.items 177 | } 178 | 179 | /** 180 | * Returns the number of items to process, or `NaN` if items are not an array. 181 | */ 182 | itemsCount (): number { 183 | const items = this.items() 184 | return Array.isArray(items) ? items.length : NaN 185 | } 186 | 187 | /** 188 | * Returns the list of active tasks. 189 | */ 190 | tasks (): any[] { 191 | return this.meta.tasks 192 | } 193 | 194 | /** 195 | * Returns the number of currently active tasks. 196 | * 197 | * @deprecated use the `activeTasksCount()` method (plural naming) instead 198 | */ 199 | activeTaskCount (): number { 200 | return this.activeTasksCount() 201 | } 202 | 203 | /** 204 | * Returns the number of currently active tasks. 205 | */ 206 | activeTasksCount (): number { 207 | return this.tasks().length 208 | } 209 | 210 | /** 211 | * Returns the list of processed items. 212 | */ 213 | processedItems (): T[] { 214 | return this.meta.processedItems 215 | } 216 | 217 | /** 218 | * Returns the number of processed items. 219 | */ 220 | processedCount (): number { 221 | return this.processedItems().length 222 | } 223 | 224 | /** 225 | * Returns the percentage progress of items that have been processed, or `NaN` if items is not an array. 226 | */ 227 | processedPercentage (): number { 228 | return (this.processedCount() / this.itemsCount()) * 100 229 | } 230 | 231 | /** 232 | * Returns the list of results. 233 | */ 234 | results (): Array { 235 | return this.meta.results 236 | } 237 | 238 | /** 239 | * Returns the list of errors. 240 | */ 241 | errors (): Array> { 242 | return this.meta.errors 243 | } 244 | 245 | /** 246 | * Set the handler that is applied to each item. 247 | */ 248 | withHandler (action: ProcessHandler): this { 249 | this.handler = action 250 | 251 | return this 252 | } 253 | 254 | /** 255 | * Determine whether a custom error handle is available. 256 | */ 257 | hasErrorHandler (): boolean { 258 | return !!this.errorHandler 259 | } 260 | 261 | /** 262 | * Set the error handler function to execute when an error occurs. 263 | */ 264 | handleError (handler?: (error: Error, item: T, pool: Stoppable & UsesConcurrency) => Promise | void): this { 265 | this.errorHandler = handler 266 | 267 | return this 268 | } 269 | 270 | /** 271 | * Set the handler function to execute when started a task. 272 | */ 273 | onTaskStarted (handlers: Array>): this { 274 | this.onTaskStartedHandlers = handlers 275 | 276 | return this 277 | } 278 | 279 | /** 280 | * Assign the given callback `handler` function to run when a task finished. 281 | */ 282 | 283 | onTaskFinished (handlers: Array>): this { 284 | this.onTaskFinishedHandlers = handlers 285 | 286 | return this 287 | } 288 | 289 | /** 290 | * Determines whether the number of active tasks is greater or equal to the concurrency limit. 291 | */ 292 | hasReachedConcurrencyLimit (): boolean { 293 | return this.activeTasksCount() >= this.concurrency() 294 | } 295 | 296 | /** 297 | * Stop a promise pool processing. 298 | */ 299 | stop (): void { 300 | this.markAsStopped() 301 | 302 | throw new StopThePromisePoolError() 303 | } 304 | 305 | /** 306 | * Mark the promise pool as stopped. 307 | */ 308 | markAsStopped (): this { 309 | this.meta.stopped = true 310 | 311 | return this 312 | } 313 | 314 | /** 315 | * Determine whether the pool is stopped. 316 | */ 317 | isStopped (): boolean { 318 | return this.meta.stopped 319 | } 320 | 321 | /** 322 | * Start processing the promise pool. 323 | */ 324 | async start (): Promise { 325 | return await this 326 | .validateInputs() 327 | .prepareResultsArray() 328 | .process() 329 | } 330 | 331 | /** 332 | * Ensure that the given input values are valid or throw an error otherwise. 333 | */ 334 | validateInputs (): this { 335 | if (typeof this.handler !== 'function') { 336 | throw ValidationError.createFrom('The first parameter for the .process(fn) method must be a function') 337 | } 338 | 339 | const timeout = this.taskTimeout() 340 | 341 | if (!(timeout == null || (typeof timeout === 'number' && timeout >= 0))) { 342 | throw ValidationError.createFrom(`"timeout" must be undefined or a number. A number must be 0 or up. Received "${String(timeout)}" (${typeof timeout})`) 343 | } 344 | 345 | if (!this.areItemsValid()) { 346 | throw ValidationError.createFrom(`"items" must be an array, an iterable or an async iterable. Received "${typeof this.items()}"`) 347 | } 348 | 349 | if (this.errorHandler && typeof this.errorHandler !== 'function') { 350 | throw ValidationError.createFrom(`The error handler must be a function. Received "${typeof this.errorHandler}"`) 351 | } 352 | 353 | this.onTaskStartedHandlers.forEach(handler => { 354 | if (handler && typeof handler !== 'function') { 355 | throw ValidationError.createFrom(`The onTaskStarted handler must be a function. Received "${typeof handler}"`) 356 | } 357 | }) 358 | 359 | this.onTaskFinishedHandlers.forEach(handler => { 360 | if (handler && typeof handler !== 'function') { 361 | throw ValidationError.createFrom(`The error handler must be a function. Received "${typeof handler}"`) 362 | } 363 | }) 364 | 365 | return this 366 | } 367 | 368 | /** 369 | * Determine whether the provided items are processable by the pool. We’re 370 | * handling arrays and (async) iterables. Everything else is not valid. 371 | */ 372 | private areItemsValid (): boolean { 373 | const items = this.items() as any 374 | 375 | return Array.isArray(items) || 376 | typeof items[Symbol.iterator] === 'function' || 377 | typeof items[Symbol.asyncIterator] === 'function' 378 | } 379 | 380 | /** 381 | * Prefill the results array with `notRun` symbol values if results should correspond. 382 | */ 383 | private prepareResultsArray (): this { 384 | const items = this.items() 385 | 386 | if (Array.isArray(items) && this.shouldUseCorrespondingResults()) { 387 | this.meta.results = Array(items.length).fill(PromisePool.notRun) 388 | } 389 | 390 | return this 391 | } 392 | 393 | /** 394 | * Starts processing the promise pool by iterating over the items 395 | * and running each item through the async `callback` function. 396 | */ 397 | async process (): Promise> { 398 | let index = 0 399 | 400 | for await (const item of this.items()) { 401 | if (this.isStopped()) { 402 | break 403 | } 404 | 405 | if (this.shouldUseCorrespondingResults()) { 406 | this.results()[index] = PromisePool.notRun 407 | } 408 | 409 | this.startProcessing(item, index) 410 | index += 1 411 | 412 | // don't consume the next item from iterable 413 | // until there's a free slot for a new task 414 | await this.waitForProcessingSlot() 415 | } 416 | 417 | return await this.drained() 418 | } 419 | 420 | /** 421 | * Wait for one of the active tasks to finish processing. 422 | */ 423 | async waitForProcessingSlot (): Promise { 424 | /** 425 | * We’re using a while loop here because it’s possible to decrease the pool’s 426 | * concurrency at runtime. We need to wait for as many tasks as needed to 427 | * finish processing before moving on to process the remaining tasks. 428 | */ 429 | while (this.hasReachedConcurrencyLimit()) { 430 | await this.waitForActiveTaskToFinish() 431 | } 432 | } 433 | 434 | /** 435 | * Wait for the next, currently active task to finish processing. 436 | */ 437 | async waitForActiveTaskToFinish (): Promise { 438 | await Promise.race( 439 | this.tasks() 440 | ) 441 | } 442 | 443 | /** 444 | * Create a processing function for the given `item`. 445 | */ 446 | startProcessing (item: T, index: number): void { 447 | const task: Promise = this.createTaskFor(item, index) 448 | .then(result => { 449 | this.save(result, index).removeActive(task) 450 | }) 451 | .catch(async error => { 452 | await this.handleErrorFor(error, item, index) 453 | this.removeActive(task) 454 | }) 455 | .finally(() => { 456 | this.processedItems().push(item) 457 | this.runOnTaskFinishedHandlers(item) 458 | }) 459 | 460 | this.tasks().push(task) 461 | this.runOnTaskStartedHandlers(item) 462 | } 463 | 464 | /** 465 | * Ensures a returned promise for the processing of the given `item`. 466 | */ 467 | async createTaskFor (item: T, index: number): Promise { 468 | if (this.taskTimeout() === undefined) { 469 | return this.handler(item, index, this) 470 | } 471 | 472 | const [timer, canceller] = this.createTaskTimeout(item) 473 | 474 | return Promise.race([ 475 | this.handler(item, index, this), 476 | timer(), 477 | ]).finally(canceller) 478 | } 479 | 480 | /** 481 | * Returns a tuple of a timer function and a canceller function that 482 | * times-out after the configured task timeout. 483 | */ 484 | private createTaskTimeout (item: T): [() => Promise, () => void] { 485 | let timerId: ReturnType | undefined 486 | 487 | const timer: () => Promise = async () => 488 | new Promise((_resolve, reject) => { 489 | timerId = setTimeout(() => { 490 | reject(new PromisePoolError(`Task in promise pool timed out after ${this.taskTimeout() as number}ms`, item)) 491 | }, this.taskTimeout()) 492 | }) 493 | 494 | const canceller: () => void = () => clearTimeout(timerId) 495 | 496 | return [timer, canceller] 497 | } 498 | 499 | /** 500 | * Save the given calculation `result`, possibly at the provided `position`. 501 | */ 502 | save (result: any, position: number): this { 503 | this.shouldUseCorrespondingResults() 504 | ? this.results()[position] = result 505 | : this.results().push(result) 506 | 507 | return this 508 | } 509 | 510 | /** 511 | * Remove the given `task` from the list of active tasks. 512 | */ 513 | removeActive (task: Promise): this { 514 | this.tasks().splice( 515 | this.tasks().indexOf(task), 1 516 | ) 517 | 518 | return this 519 | } 520 | 521 | /** 522 | * Create and save an error for the the given `item`. 523 | */ 524 | async handleErrorFor (error: Error, item: T, index: number): Promise { 525 | if (this.shouldUseCorrespondingResults()) { 526 | this.results()[index] = PromisePool.failed 527 | } 528 | 529 | if (this.isStoppingThePoolError(error)) { 530 | return 531 | } 532 | 533 | if (this.isValidationError(error)) { 534 | this.markAsStopped() 535 | throw error 536 | } 537 | 538 | this.hasErrorHandler() 539 | ? await this.runErrorHandlerFor(error, item) 540 | : this.saveErrorFor(error, item) 541 | } 542 | 543 | /** 544 | * Determine whether the given `error` is a `StopThePromisePoolError` instance. 545 | */ 546 | isStoppingThePoolError (error: Error): boolean { 547 | return error instanceof StopThePromisePoolError 548 | } 549 | 550 | /** 551 | * Determine whether the given `error` is a `ValidationError` instance. 552 | */ 553 | isValidationError (error: Error): boolean { 554 | return error instanceof ValidationError 555 | } 556 | 557 | /** 558 | * Run the user’s error handler, if available. 559 | */ 560 | async runErrorHandlerFor (processingError: Error, item: T): Promise { 561 | try { 562 | await this.errorHandler?.(processingError, item, this) 563 | } catch (error: any) { 564 | this.rethrowIfNotStoppingThePool(error) 565 | } 566 | } 567 | 568 | /** 569 | * Run the onTaskStarted handlers. 570 | */ 571 | runOnTaskStartedHandlers (item: T): void { 572 | this.onTaskStartedHandlers.forEach(handler => { 573 | handler(item, this) 574 | }) 575 | } 576 | 577 | /** 578 | * Run the onTaskFinished handlers. 579 | */ 580 | runOnTaskFinishedHandlers (item: T): void { 581 | this.onTaskFinishedHandlers.forEach(handler => { 582 | handler(item, this) 583 | }) 584 | } 585 | 586 | /** 587 | * Rethrow the given `error` if it’s not an instance of `StopThePromisePoolError`. 588 | */ 589 | rethrowIfNotStoppingThePool (error: Error): void { 590 | if (this.isStoppingThePoolError(error)) { 591 | return 592 | } 593 | 594 | throw error 595 | } 596 | 597 | /** 598 | * Create and save an error for the the given `item`. 599 | */ 600 | saveErrorFor (error: Error, item: T): void { 601 | this.errors().push( 602 | PromisePoolError.createFrom(error, item) 603 | ) 604 | } 605 | 606 | /** 607 | * Wait for all active tasks to finish. Once all the tasks finished 608 | * processing, returns an object containing the results and errors. 609 | */ 610 | async drained (): Promise> { 611 | await this.drainActiveTasks() 612 | 613 | return { 614 | errors: this.errors(), 615 | results: this.results() 616 | } 617 | } 618 | 619 | /** 620 | * Wait for all of the active tasks to finish processing. 621 | */ 622 | async drainActiveTasks (): Promise { 623 | await Promise.all( 624 | this.tasks() 625 | ) 626 | } 627 | } 628 | -------------------------------------------------------------------------------- /src/promise-pool.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | import { ReturnValue } from './return-value' 4 | import { PromisePoolExecutor } from './promise-pool-executor' 5 | import { ErrorHandler, ProcessHandler, OnProgressCallback, SomeIterable } from './contracts' 6 | 7 | export class PromisePool { 8 | /** 9 | * The processable items. 10 | */ 11 | private readonly items: SomeIterable 12 | 13 | /** 14 | * The number of promises running concurrently. 15 | */ 16 | private concurrency: number 17 | 18 | /** 19 | * Determine whether to put a task’s result at the same position in the result 20 | * array as its related source item has in the source array. Failing tasks 21 | * and those items that didn’t run carry a related symbol as a value. 22 | */ 23 | private shouldResultsCorrespond: boolean 24 | 25 | /** 26 | * The maximum timeout in milliseconds for the item handler, or `undefined` to disable. 27 | */ 28 | private timeout: number | undefined 29 | 30 | /** 31 | * The error handler callback function 32 | */ 33 | private errorHandler?: ErrorHandler 34 | 35 | /** 36 | * The `taskStarted` handler callback functions 37 | */ 38 | private readonly onTaskStartedHandlers: Array> 39 | 40 | /** 41 | * The `taskFinished` handler callback functions 42 | */ 43 | private readonly onTaskFinishedHandlers: Array> 44 | 45 | public static readonly notRun: symbol = Symbol('notRun') 46 | public static readonly failed: symbol = Symbol('failed') 47 | 48 | /** 49 | * Instantiates a new promise pool with a default `concurrency: 10` and `items: []`. 50 | */ 51 | constructor (items?: SomeIterable) { 52 | this.timeout = undefined 53 | this.concurrency = 10 54 | this.items = items ?? [] 55 | this.errorHandler = undefined 56 | this.onTaskStartedHandlers = [] 57 | this.onTaskFinishedHandlers = [] 58 | this.shouldResultsCorrespond = false 59 | } 60 | 61 | /** 62 | * Set the number of tasks to process concurrently in the promise pool. 63 | */ 64 | withConcurrency (concurrency: number): PromisePool { 65 | this.concurrency = concurrency 66 | 67 | return this 68 | } 69 | 70 | /** 71 | * Set the number of tasks to process concurrently in the promise pool. 72 | */ 73 | static withConcurrency (concurrency: number): PromisePool { 74 | return new this().withConcurrency(concurrency) 75 | } 76 | 77 | /** 78 | * Set the timeout in milliseconds for the pool handler. 79 | */ 80 | withTaskTimeout (timeout: number): PromisePool { 81 | this.timeout = timeout 82 | 83 | return this 84 | } 85 | 86 | /** 87 | * Set the timeout in milliseconds for the pool handler. 88 | */ 89 | static withTaskTimeout (timeout: number): PromisePool { 90 | return new this().withTaskTimeout(timeout) 91 | } 92 | 93 | /** 94 | * Set the items to be processed in the promise pool. 95 | */ 96 | for (items: SomeIterable): PromisePool { 97 | const pool = new PromisePool(items).withConcurrency(this.concurrency) 98 | 99 | if (typeof this.errorHandler === 'function') { 100 | pool.handleError(this.errorHandler as unknown as ErrorHandler) 101 | } 102 | 103 | return typeof this.timeout === 'number' 104 | ? pool.withTaskTimeout(this.timeout) 105 | : pool 106 | } 107 | 108 | /** 109 | * Set the items to be processed in the promise pool. 110 | */ 111 | static for (items: SomeIterable): PromisePool { 112 | return new this().for(items) 113 | } 114 | 115 | /** 116 | * Set the error handler function to execute when an error occurs. 117 | */ 118 | handleError (handler: ErrorHandler): PromisePool { 119 | this.errorHandler = handler 120 | 121 | return this 122 | } 123 | 124 | /** 125 | * Assign the given callback `handler` function to run when a task starts. 126 | */ 127 | onTaskStarted (handler: OnProgressCallback): PromisePool { 128 | this.onTaskStartedHandlers.push(handler) 129 | 130 | return this 131 | } 132 | 133 | /** 134 | * Assign the given callback `handler` function to run when a task finished. 135 | */ 136 | onTaskFinished (handler: OnProgressCallback): PromisePool { 137 | this.onTaskFinishedHandlers.push(handler) 138 | 139 | return this 140 | } 141 | 142 | /** 143 | * Assign whether to keep corresponding results between source items and resulting tasks. 144 | */ 145 | useCorrespondingResults (): PromisePool { 146 | this.shouldResultsCorrespond = true 147 | 148 | return this 149 | } 150 | 151 | /** 152 | * Starts processing the promise pool by iterating over the items 153 | * and running each item through the async `callback` function. 154 | */ 155 | async process ( 156 | callback: ProcessHandler 157 | ): Promise> { 158 | return new PromisePoolExecutor() 159 | .useConcurrency(this.concurrency) 160 | .useCorrespondingResults(this.shouldResultsCorrespond) 161 | .withTaskTimeout(this.timeout) 162 | .withHandler(callback) 163 | .handleError(this.errorHandler) 164 | .onTaskStarted(this.onTaskStartedHandlers) 165 | .onTaskFinished(this.onTaskFinishedHandlers) 166 | .for(this.items) 167 | .start() 168 | } 169 | } 170 | -------------------------------------------------------------------------------- /src/return-value.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | import { PromisePoolError } from './promise-pool-error' 4 | 5 | export interface ReturnValue { 6 | /** 7 | * The list of results returned by the processing function. 8 | */ 9 | results: R[] 10 | 11 | /** 12 | * The list of errors that occurred while processing all items in the pool. 13 | * Each error contains the error-causing item at `error.item` as a 14 | * reference for re-processing. 15 | */ 16 | errors: Array> 17 | } 18 | -------------------------------------------------------------------------------- /src/stop-the-promise-pool-error.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | export class StopThePromisePoolError extends Error {} 4 | -------------------------------------------------------------------------------- /src/validation-error.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | export class ValidationError extends Error { 4 | /** 5 | * Create a new instance for the given `message`. 6 | */ 7 | constructor (message?: string) { 8 | super(message) 9 | 10 | if (Error.captureStackTrace && typeof Error.captureStackTrace === 'function') { 11 | Error.captureStackTrace(this, this.constructor) 12 | } 13 | } 14 | 15 | /** 16 | * Returns a validation error with the given `message`. 17 | */ 18 | static createFrom (message: string): ValidationError { 19 | return new this(message) 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /test/pool-from-iterable.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { test } = require('uvu') 4 | const { expect } = require('expect') 5 | const { PromisePool } = require('../dist') 6 | 7 | const pause = timeout => new Promise(resolve => setTimeout(resolve, timeout)) 8 | 9 | const fakeClock = { 10 | time: 0, 11 | schedule: [], 12 | pause: (t) => new Promise(resolve => { 13 | fakeClock.schedule.push([fakeClock.time + t, resolve]) 14 | }), 15 | run: async () => { 16 | await pause(0) 17 | const s = fakeClock.schedule 18 | if (s.length === 0) return 19 | 20 | fakeClock.time += 1 21 | for (let i = 0; i < s.length;) { 22 | const [t, res] = s[i] 23 | if (t <= fakeClock.time) { 24 | res() 25 | s.splice(i, 1) 26 | } else { 27 | i += 1 28 | } 29 | } 30 | 31 | return fakeClock.run() 32 | } 33 | } 34 | 35 | test('supports iterable in the static .for method', async () => { 36 | const { results } = await PromisePool 37 | .for('hello') 38 | .withConcurrency(2) 39 | .process(async (letter) => { 40 | await pause(10) 41 | return letter.toUpperCase() 42 | }) 43 | 44 | expect(results.sort()).toEqual([...'EHLLO']) 45 | }) 46 | 47 | test('iterates lazily', async () => { 48 | const items = [10, 20, 30, 40] 49 | const logs = [] 50 | 51 | const iterable = { 52 | * [Symbol.iterator] () { 53 | for (const item of items) { 54 | logs.push(`yielded ${item}`) 55 | yield item 56 | } 57 | } 58 | } 59 | 60 | const { results } = await PromisePool 61 | .for(iterable) 62 | .withConcurrency(2) 63 | .process(async (item, index) => { 64 | await pause(10 * index) 65 | logs.push(`processed ${item}`) 66 | return item 67 | }) 68 | 69 | expect(logs).toEqual([ 70 | 'yielded 10', 71 | 'yielded 20', 72 | 'processed 10', 73 | 'yielded 30', 74 | 'processed 20', 75 | 'yielded 40', 76 | 'processed 30', 77 | 'processed 40' 78 | ]) 79 | 80 | expect(results).toEqual([10, 20, 30, 40]) 81 | }) 82 | 83 | test('supports async iterable in the static .for method', async () => { 84 | const items = [[10, 500], [20, 20], [30, 10], [40, 0]] 85 | const logs = [] 86 | 87 | const iterable = { 88 | async * [Symbol.asyncIterator] () { 89 | for (const item of items) { 90 | logs.push(`loaded ${item.join(',')}`) 91 | await pause(item[0]) 92 | 93 | logs.push(`yielded ${item.join(',')}`) 94 | yield item 95 | } 96 | } 97 | } 98 | 99 | const { results } = await PromisePool 100 | .for(iterable) 101 | .withConcurrency(2) 102 | .process(async (item) => { 103 | await pause(item[1]) 104 | logs.push(`processed ${item.join(',')}`) 105 | return item[0] 106 | }) 107 | 108 | expect(logs).toEqual([ 109 | 'loaded 10,500', 110 | 'yielded 10,500', 111 | 'loaded 20,20', 112 | 'yielded 20,20', 113 | 'processed 20,20', 114 | 'loaded 30,10', 115 | 'yielded 30,10', 116 | 'processed 30,10', 117 | 'loaded 40,0', 118 | 'yielded 40,0', 119 | 'processed 40,0', 120 | 'processed 10,500' 121 | ]) 122 | 123 | expect(results).toEqual([20, 30, 40, 10]) 124 | }) 125 | 126 | // The following test transitions from a badly parallelizable case, where the bottleneck 127 | // is the async iterator that generates the data sequentially, to a highly parallelizable 128 | // case where the async iterable only takes a small fraction of the overall "computation". 129 | // 130 | // The processes should be scheduled according to the following diagram, 131 | // where each "S" is a unit of time spent on the sequential computation (in the async iterable), 132 | // and each "P" is a unit of time spent on a parallelized task (in the process method): 133 | // 134 | // time: v0 v10 v20 v30 v40 135 | // SSSSSSSSSSPPPPPPPPPP SSSSPPPPPPPSPPPP 136 | // SSSSSSSSPPPPPPPPP SSPPPPPPSPPP 137 | // SSSSSSPPPPPPPPSPPPPPSPP 138 | // 139 | // loaded: 1 2 3 4 5 6 78 9 140 | // yielded: 1 2 3 4 5 6 78 9 141 | // processed: 1 2 3 45 6 (78 at once, then 9) 142 | // ^0 ^10 ^20 ^30 ^40 143 | // 144 | // Because there are so many steps in the test, the entropy from setTimeout would quickly 145 | // add up, resulting in unreliable test results. Therefore we will be using a fake clock 146 | // which ensures that things will happen in the right order – that is, things which are 147 | // scheduled to happen after, say, 70ms will hapen before things which are scheduled to 148 | // happen after 71ms. However, the actual time that elapses between these operations is 149 | // completely arbitrary. 150 | 151 | test('schedules async iterables efficiently', async () => { 152 | const logs = [] 153 | 154 | async function * generateIterable () { 155 | let index = 0 156 | let sequentialWait = 10 157 | let parallelWait = 10 158 | 159 | while (parallelWait > 1) { 160 | logs.push(`loaded ${index + 1} at ${fakeClock.time}`) 161 | await fakeClock.pause(sequentialWait) 162 | 163 | logs.push(`yielded ${index + 1} at ${fakeClock.time}`) 164 | yield parallelWait 165 | 166 | sequentialWait -= 2 167 | sequentialWait = Math.max(sequentialWait, 1) 168 | parallelWait -= 1 169 | index += 1 170 | } 171 | } 172 | 173 | PromisePool 174 | .for(generateIterable()) 175 | .withConcurrency(3) 176 | .process(async (timeout, index) => { 177 | await fakeClock.pause(timeout) 178 | logs.push(`processed ${index + 1} at ${fakeClock.time}`) 179 | }) 180 | 181 | await fakeClock.run() 182 | 183 | expect(logs).toEqual([ 184 | 'loaded 1 at 0', 185 | 'yielded 1 at 10', 186 | 'loaded 2 at 10', 187 | 'yielded 2 at 18', 188 | 'loaded 3 at 18', 189 | 'processed 1 at 20', 190 | 'yielded 3 at 24', 191 | 'loaded 4 at 24', 192 | 'processed 2 at 27', 193 | 'yielded 4 at 28', 194 | 'loaded 5 at 28', 195 | 'yielded 5 at 30', 196 | 'processed 3 at 32', 197 | 'loaded 6 at 32', 198 | 'yielded 6 at 33', 199 | 'processed 4 at 35', 200 | 'loaded 7 at 35', 201 | 'processed 5 at 36', 202 | 'yielded 7 at 36', 203 | 'loaded 8 at 36', 204 | 'yielded 8 at 37', 205 | 'processed 6 at 38', 206 | 'loaded 9 at 38', 207 | 'yielded 9 at 39', 208 | 'processed 7 at 40', 209 | 'processed 8 at 40', 210 | 'processed 9 at 41' 211 | ]) 212 | }) 213 | 214 | test.run() 215 | -------------------------------------------------------------------------------- /test/promise-pool.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { test } = require('uvu') 4 | const { expect } = require('expect') 5 | const { PromisePool, ValidationError, PromisePoolError } = require('../dist') 6 | 7 | async function pause (timeout) { 8 | return new Promise(resolve => { 9 | setTimeout(resolve, timeout) 10 | }) 11 | } 12 | 13 | test('creates a new PromisePool', async () => { 14 | const pool = new PromisePool() 15 | expect(pool.concurrency).toEqual(10) 16 | }) 17 | 18 | test('supports a static .for method', async () => { 19 | const users = [1, 2, 3] 20 | const userPool = PromisePool.for(users) 21 | expect(userPool.items).toEqual(users) 22 | expect(userPool instanceof PromisePool).toBe(true) 23 | }) 24 | 25 | test('supports a static .withConcurrency method', async () => { 26 | const pool = PromisePool.withConcurrency(4) 27 | expect(pool.concurrency).toEqual(4) 28 | expect(pool instanceof PromisePool).toBe(true) 29 | }) 30 | 31 | test('supports a static .withTaskTimeout method', async () => { 32 | const pool = PromisePool.withTaskTimeout(4000) 33 | expect(pool.timeout).toEqual(4000) 34 | expect(pool instanceof PromisePool).toBe(true) 35 | }) 36 | 37 | test('allows method chaining for the promise pool setup', async () => { 38 | const users = [1, 2, 3] 39 | const userPool = new PromisePool().withConcurrency(2).for(users) 40 | expect(userPool.items).toEqual(users) 41 | expect(userPool.concurrency).toEqual(2) 42 | expect(userPool).toBeInstanceOf(PromisePool) 43 | 44 | const timeouts = [1, 2, 3] 45 | const timeoutPool = new PromisePool().for(timeouts).withConcurrency(5) 46 | expect(timeoutPool.items).toEqual(timeouts) 47 | expect(timeoutPool.concurrency).toEqual(5) 48 | expect(timeoutPool).toBeInstanceOf(PromisePool) 49 | }) 50 | 51 | test('handles empty items', async () => { 52 | const pool = new PromisePool() 53 | const { results } = await pool.process(() => {}) 54 | expect(results).toEqual([]) 55 | }) 56 | 57 | test('ensures concurrency is a number', async () => { 58 | const pool = new PromisePool() 59 | const fn = () => {} 60 | 61 | expect(await pool.withConcurrency(1).process(fn)).toEqual({ errors: [], results: [] }) 62 | expect(await pool.withConcurrency(Infinity).process(fn)).toEqual({ errors: [], results: [] }) 63 | 64 | await expect(pool.withConcurrency(0).process(fn)).rejects.toThrow(ValidationError) 65 | await expect(pool.withConcurrency(-1).process(fn)).rejects.toThrow(ValidationError) 66 | await expect(pool.withConcurrency(null).process(fn)).rejects.toThrow(ValidationError) 67 | }) 68 | 69 | test('ensures timeout is a valid number', async () => { 70 | const pool = new PromisePool() 71 | const fn = () => {} 72 | 73 | await expect(pool.withTaskTimeout(-1).process(fn)).rejects.toThrow(ValidationError) 74 | await expect(pool.withTaskTimeout('-1').process(fn)).rejects.toThrow(ValidationError) 75 | }) 76 | 77 | test('ensures the items are an array', async () => { 78 | const pool = new PromisePool() 79 | const fn = () => {} 80 | 81 | await expect(pool.for(42).process(fn)).rejects.toThrow(ValidationError) 82 | await expect(await pool.for([]).process(fn)).toEqual({ errors: [], results: [] }) 83 | }) 84 | 85 | test('throws when missing the callback in .process', async () => { 86 | const pool = new PromisePool() 87 | expect(pool.process()).rejects.toThrow() 88 | }) 89 | 90 | test('concurrency: 1', async () => { 91 | const start = Date.now() 92 | const timeouts = [40, 10, 20, 30, 10] 93 | 94 | const { results, errors } = await PromisePool 95 | .withConcurrency(1) 96 | .for(timeouts) 97 | .process(async timeout => { 98 | await pause(timeout) 99 | return timeout 100 | }) 101 | 102 | expect(errors).toEqual([]) 103 | expect(results).toEqual(timeouts) 104 | 105 | const elapsed = Date.now() - start 106 | const expectedDuration = timeouts.reduce((sum, timeout) => sum + timeout, 0) 107 | 108 | /** 109 | * All tasks run sequentially and each task must finish first before starting 110 | * a new task. That means the total duration of processing all items from 111 | * the input array is the sum of all timeouts we have to wait for. 112 | */ 113 | expect(elapsed).toBeGreaterThanOrEqual(expectedDuration) 114 | expect(elapsed).toBeLessThanOrEqual(expectedDuration + 50) // allow 50ms offset 115 | }) 116 | 117 | test('concurrency: 2', async () => { 118 | const start = Date.now() 119 | const timeouts = [400, 100, 200, 300, 100] 120 | 121 | const { results, errors } = await PromisePool 122 | .withConcurrency(2) 123 | .for(timeouts) 124 | .process(async timeout => { 125 | await pause(timeout) 126 | return timeout 127 | }) 128 | 129 | expect(errors).toEqual([]) 130 | expect(results).toEqual([100, 200, 400, 100, 300]) 131 | 132 | const elapsed = Date.now() - start 133 | 134 | // expect 400ms because 2 tasks run in parallel, 135 | // and task 1 and 2 start, waiting 400ms and 100ms 136 | // and task 2 finishes (after 100ms) 137 | // and the pool starts task 3 waiting 200ms 138 | // and task 3 finishes (after 200ms) 139 | // and the pool starts task 4 waiting 300ms 140 | // and task 1 finishes (after 100ms (400ms in total)) 141 | // and the pool starts task 5 waiting 100ms 142 | // and task 5 finishes (after 100ms) 143 | // and task 4 finishes (after 300ms) 144 | expect(elapsed).toBeGreaterThanOrEqual(600) 145 | expect(elapsed).toBeLessThanOrEqual(650) 146 | }) 147 | 148 | test('ensures concurrency', async () => { 149 | const start = Date.now() 150 | const timeouts = [100, 20, 30, 10, 10, 10, 50] 151 | 152 | const { results } = await PromisePool 153 | .withConcurrency(2) 154 | .for(timeouts) 155 | .process(async timeout => { 156 | await pause(timeout) 157 | return timeout 158 | }) 159 | 160 | expect(results).toEqual([20, 30, 10, 10, 10, 100, 50]) 161 | 162 | const elapsed = Date.now() - start 163 | 164 | // expect the first task to take the longest processing time 165 | // and expect all other tasks to finish while task 1 is running 166 | expect(elapsed).toBeGreaterThanOrEqual(130) 167 | expect(elapsed).toBeLessThanOrEqual(160) 168 | }) 169 | 170 | test('handles concurrency greater than items in the list', async () => { 171 | const ids = [1, 2, 3, 4, 5] 172 | 173 | const { results } = await PromisePool 174 | .withConcurrency(3000) 175 | .for(ids) 176 | .process(async timeout => { 177 | await pause(timeout) 178 | return timeout 179 | }) 180 | 181 | expect(results).toEqual([1, 2, 3, 4, 5]) 182 | }) 183 | 184 | test('returns errors', async () => { 185 | const ids = [1, 2, 3, 4] 186 | 187 | const { results, errors } = await PromisePool 188 | .withConcurrency(2) 189 | .for(ids) 190 | .process(id => { 191 | if (id === 3) throw new Error('Oh no, not a 3.') 192 | 193 | return id 194 | }) 195 | 196 | expect(results).toEqual([1, 2, 4]) 197 | 198 | expect(errors.length).toEqual(1) 199 | expect(errors[0].item).toEqual(3) 200 | expect(errors[0]).toBeInstanceOf(Error) 201 | expect(errors[0].message).toEqual('Oh no, not a 3.') 202 | }) 203 | 204 | test('stores the original error', async () => { 205 | class CustomError extends Error { 206 | constructor (message, code) { 207 | super(message) 208 | 209 | this.code = code 210 | } 211 | } 212 | 213 | const ids = [1, 2, 3] 214 | 215 | const { errors } = await PromisePool 216 | .withConcurrency(2) 217 | .for(ids) 218 | .process(() => { 219 | throw new CustomError('Oh no, not a 3.', 123) 220 | }) 221 | 222 | expect(errors.length).toEqual(3) 223 | errors.forEach(error => { 224 | expect(error.raw).toBeInstanceOf(CustomError) 225 | expect(error.raw).toBeInstanceOf(CustomError) 226 | }) 227 | }) 228 | 229 | test('keeps processing with when errors occur', async () => { 230 | const ids = Array.from({ length: 10 }, (_, i) => i + 1) 231 | 232 | const start = Date.now() 233 | 234 | const { results, errors } = await PromisePool 235 | .withConcurrency(2) 236 | .for(ids) 237 | .process(async id => { 238 | await pause(20) 239 | 240 | if (id === 1) { 241 | throw new Error('I can’t keep the first item') 242 | } 243 | 244 | return id 245 | }) 246 | 247 | expect(results).toEqual([2, 3, 4, 5, 6, 7, 8, 9, 10]) 248 | 249 | expect(errors.length).toEqual(1) 250 | expect( 251 | errors.every(error => error.message === 'I can’t keep the first item') 252 | ).toBe(true) 253 | 254 | const elapsed = Date.now() - start 255 | 256 | // 10 tasks are in the pool 257 | // expect 20ms for 2 parally running tasks 258 | // results in 5 batches each batch taking about 20ms 259 | // takes around 100ms for all items to process 260 | expect(elapsed).toBeGreaterThanOrEqual(100) 261 | expect(elapsed).toBeLessThanOrEqual(200) 262 | }) 263 | 264 | test('fails when not passing a function for the error handler', async () => { 265 | const pool = await PromisePool 266 | .for([1, 2, 3]) 267 | .handleError('non-function') 268 | 269 | await expect(pool.process(() => {})).rejects.toThrow() 270 | }) 271 | 272 | test('keeps error handler when calling pool.handleError before pool.for', async () => { 273 | const ids = [1, 2, 3, 4] 274 | const collectedItemsOnError = [] 275 | 276 | const { results, errors } = await PromisePool 277 | .withConcurrency(2) 278 | .handleError((_, item) => { 279 | collectedItemsOnError.push(item) 280 | }) 281 | .for(ids) 282 | .process(id => { 283 | if (id === 3) throw new Error('Oh no, not a 3.') 284 | 285 | return id 286 | }) 287 | 288 | expect(errors).toEqual([]) 289 | expect(results).toEqual([1, 2, 4]) 290 | expect(collectedItemsOnError).toEqual([3]) 291 | }) 292 | 293 | test('should handle error and continue processing', async () => { 294 | const ids = [1, 2, 3, 4] 295 | const collectedItemsOnError = [] 296 | 297 | const { results, errors } = await PromisePool 298 | .withConcurrency(2) 299 | .for(ids) 300 | .handleError((_, item) => { 301 | collectedItemsOnError.push(item) 302 | }) 303 | .process(id => { 304 | if (id === 3) throw new Error('Oh no, not a 3.') 305 | 306 | return id 307 | }) 308 | 309 | expect(errors).toEqual([]) 310 | expect(results).toEqual([1, 2, 4]) 311 | expect(collectedItemsOnError).toEqual([3]) 312 | }) 313 | 314 | test('should allow custom processing on a specific error', async () => { 315 | const ids = [1, 2, 3, 4] 316 | let calledError 317 | 318 | const { results, errors } = await PromisePool 319 | .withConcurrency(2) 320 | .for(ids) 321 | .handleError(async error => { 322 | if (error instanceof RangeError) { 323 | calledError = error 324 | } 325 | }) 326 | .process(id => { 327 | if (id === 4) throw new RangeError('Oh no, too large') 328 | 329 | return id 330 | }) 331 | 332 | expect(errors).toEqual([]) 333 | expect(results).toEqual([1, 2, 3]) 334 | expect(calledError).toBeInstanceOf(RangeError) 335 | }) 336 | 337 | test('rethrowing an error from the error handler should stop promise pool immediately when using an async processing function', async () => { 338 | await expect( 339 | PromisePool 340 | .for(new Array(10)) 341 | .withConcurrency(2) 342 | .handleError(async error => { 343 | throw error 344 | }) 345 | .process(async (_item, index) => { 346 | if (index === 4) { 347 | throw new RangeError('Oh no, too large') 348 | } 349 | }) 350 | ).rejects.toThrowError(RangeError) 351 | }) 352 | 353 | test('rethrowing an error from the error handler should stop promise pool immediately when using a sync processing function', async () => { 354 | await expect( 355 | PromisePool 356 | .for(new Array(100)) 357 | .withConcurrency(2) 358 | .handleError(error => { 359 | throw error 360 | }) 361 | .process((_item, index) => { 362 | if (index === 4) { 363 | throw new RangeError('Oh no, too large') 364 | } 365 | }) 366 | ).rejects.toThrowError(RangeError) 367 | }) 368 | 369 | test('fails without error', async () => { 370 | const ids = [1, 2, 3, 4, 5] 371 | 372 | const { errors } = await PromisePool 373 | .withConcurrency(2) 374 | .for(ids) 375 | .process(async id => { 376 | await new Promise((resolve, reject) => setTimeout(reject, 10)) 377 | 378 | return id 379 | }) 380 | 381 | expect(errors.length).toEqual(ids.length) 382 | expect( 383 | errors.every(error => error.message === '') 384 | ).toBe(true) 385 | }) 386 | 387 | test('fails with string', async () => { 388 | const ids = [1, 2, 3] 389 | 390 | const { errors } = await PromisePool 391 | .withConcurrency(2) 392 | .for(ids) 393 | .process(async () => { 394 | // eslint-disable-next-line prefer-promise-reject-errors 395 | return Promise.reject('failed') 396 | }) 397 | 398 | expect( 399 | errors.every(error => error.message === 'failed') 400 | ).toBe(true) 401 | }) 402 | 403 | test('fails with Error (and stacktrace)', async () => { 404 | const ids = [1, 2, 3] 405 | 406 | const { errors } = await PromisePool 407 | .withConcurrency(2) 408 | .for(ids) 409 | .process(async () => { 410 | throw new Error('failing') 411 | }) 412 | 413 | expect( 414 | errors.every(error => error.message === 'failing') 415 | ).toBe(true) 416 | 417 | expect( 418 | errors.every(error => typeof error.stack === 'string') 419 | ).toBe(true) 420 | }) 421 | 422 | test('fails with object', async () => { 423 | const ids = [1, 2, 3] 424 | 425 | const { errors } = await PromisePool 426 | .withConcurrency(2) 427 | .for(ids) 428 | .process(async () => { 429 | // eslint-disable-next-line prefer-promise-reject-errors 430 | return Promise.reject({ message: 'failed' }) 431 | }) 432 | 433 | expect( 434 | errors.every(error => error.message === 'failed') 435 | ).toBe(true) 436 | }) 437 | 438 | test('.process provides an index as the second argument', async () => { 439 | const ids = [1, 2, 3, 4, 5] 440 | 441 | const { results } = await PromisePool 442 | .withConcurrency(10) 443 | .for(ids) 444 | .process(async (timeout, index) => { 445 | await pause(timeout) 446 | return { index, timeout } 447 | }) 448 | 449 | expect(results).toEqual([ 450 | { index: 0, timeout: 1 }, 451 | { index: 1, timeout: 2 }, 452 | { index: 2, timeout: 3 }, 453 | { index: 3, timeout: 4 }, 454 | { index: 4, timeout: 5 } 455 | ]) 456 | }) 457 | 458 | test('fails when not passing a function as an onTaskStarted callback', async () => { 459 | const pool = await PromisePool 460 | .for([1, 2, 3]) 461 | .onTaskStarted('non-function') 462 | 463 | await expect(pool.process(() => {})).rejects.toThrow() 464 | }) 465 | 466 | test('onTaskStarted is called when a task is about to be processed', async () => { 467 | const ids = [1, 2, 3, 4, 5] 468 | const startedIds = [] 469 | const concurrency = 1 470 | const percentageArr = [0, 20, 40, 60, 80] 471 | 472 | await PromisePool 473 | .withConcurrency(concurrency) 474 | .for(ids) 475 | .onTaskStarted((item, pool) => { 476 | startedIds.push(item) 477 | expect(pool.activeTaskCount()).toBeLessThanOrEqual(concurrency) 478 | expect(pool.processedPercentage()).toEqual(percentageArr.shift()) 479 | }) 480 | .process(async () => { 481 | return await Promise.resolve() 482 | }) 483 | 484 | expect(ids).toEqual(startedIds) 485 | }) 486 | 487 | test('fails when not passing a function as an onTaskFinished callback', async () => { 488 | const pool = await PromisePool 489 | .for([1, 2, 3]) 490 | .onTaskFinished('non-function') 491 | 492 | await expect(pool.process(() => {})).rejects.toThrow() 493 | }) 494 | 495 | test('onTaskFinished is called when a task was processed', async () => { 496 | const ids = [1, 2, 3, 4, 5] 497 | const concurrency = 2 498 | const finishedIds = [] 499 | const percentageArr = [20, 40, 60, 80, 100] 500 | 501 | await PromisePool 502 | .withConcurrency(concurrency) 503 | .for(ids) 504 | .onTaskFinished((item, pool) => { 505 | finishedIds.push(item) 506 | expect(finishedIds).toEqual(pool.processedItems()) 507 | expect(pool.activeTaskCount()).toBeLessThanOrEqual(concurrency) 508 | expect(pool.processedPercentage()).toEqual(percentageArr.shift()) 509 | }) 510 | .process(async () => { 511 | return await Promise.resolve() 512 | }) 513 | 514 | expect(ids).toEqual(finishedIds) 515 | }) 516 | 517 | test('onTaskStarted and onTaskFinished are called in the same amount', async () => { 518 | const ids = [1, 2, 3, 4, 5] 519 | const concurrency = 3 520 | const finishedIds = [] 521 | const startedIds = [] 522 | 523 | await PromisePool 524 | .withConcurrency(concurrency) 525 | .for(ids) 526 | .onTaskStarted((item) => { 527 | startedIds.push(item) 528 | }) 529 | .onTaskFinished((item) => { 530 | finishedIds.push(item) 531 | }) 532 | .process(async () => { 533 | return await Promise.resolve() 534 | }) 535 | 536 | expect(startedIds).toEqual(ids) 537 | expect(finishedIds).toEqual(ids) 538 | }) 539 | 540 | test('can decrease the concurrency while the pool is running', async () => { 541 | const concurrency = 3 542 | const timeouts = [10, 20, 30, 40, 50] 543 | 544 | const start = Date.now() 545 | 546 | await PromisePool 547 | .withConcurrency(concurrency) 548 | .for(timeouts) 549 | .process(async (timeout, _, pool) => { 550 | if (timeout >= 30) { 551 | pool.useConcurrency(1) 552 | } 553 | 554 | await pause(timeout) 555 | }) 556 | 557 | const elapsed = Date.now() - start 558 | 559 | expect(elapsed).toBeGreaterThanOrEqual(30 + 40 + 50) 560 | expect(elapsed).toBeLessThanOrEqual(30 + 40 + 50 + 8) // +8 is a leeway for the pool overhead 561 | }) 562 | 563 | test('can increase the concurrency while the pool is running', async () => { 564 | const concurrency = 1 565 | const timeouts = [10, 20, 30, 40, 50] 566 | 567 | const start = Date.now() 568 | 569 | await PromisePool 570 | .withConcurrency(concurrency) 571 | .for(timeouts) 572 | .process(async (timeout, _, pool) => { 573 | if (timeout >= 30) { 574 | pool.useConcurrency(3) 575 | } 576 | 577 | await pause(timeout) 578 | }) 579 | 580 | const elapsed = Date.now() - start 581 | 582 | /** 583 | * 1. the first two items run in sequence: 10ms + 20ms 584 | * 2. we’re changing the concurrency when hitting the third item 585 | * 3. the changed concurrency results in processing the remainin items in parallel 586 | * 4. processing the items 30,40,50 in parallel has the longest timeout is the limit 587 | */ 588 | expect(elapsed).toBeGreaterThanOrEqual(10 + 20 + 50 - 1) // -1 is a leeway if the pool is faster 589 | expect(elapsed).toBeLessThanOrEqual(10 + 20 + 50 + 8) // +8 is a leeway for the pool overhead 590 | }) 591 | 592 | test('fails to change the concurrency for a running pool to an invalid value', async () => { 593 | const timeouts = [100, 200, 300, 400, 500] 594 | 595 | await expect( 596 | PromisePool 597 | .withConcurrency(3) 598 | .for(timeouts) 599 | .process(async (timeout, _, pool) => { 600 | if (timeout >= 300) { 601 | pool.useConcurrency(-1) 602 | } 603 | 604 | await pause(timeout) 605 | }) 606 | ).rejects.toThrow(ValidationError) 607 | }) 608 | 609 | test('useCorrespondingResults keeps results in order', async () => { 610 | const timeouts = [20, undefined, 10] 611 | 612 | const { results } = await PromisePool 613 | .for(timeouts) 614 | .useCorrespondingResults() 615 | .process(async (timeout) => { 616 | if (timeout) { 617 | await pause(timeout) 618 | return timeout 619 | } 620 | throw new Error('did not work') 621 | }) 622 | 623 | expect(results).toEqual([20, PromisePool.failed, 10]) 624 | }) 625 | 626 | test('useCorrespondingResults defaults results to notRun symbol', async () => { 627 | const timeouts = [20, undefined, 10, 100] 628 | 629 | const { results } = await PromisePool 630 | .withConcurrency(1) 631 | .for(timeouts) 632 | .handleError((_error, _index, pool) => { 633 | pool.stop() 634 | }) 635 | .useCorrespondingResults() 636 | .process(async (timeout) => { 637 | if (timeout) { 638 | await pause(timeout) 639 | return timeout 640 | } 641 | throw new Error('did not work') 642 | }) 643 | 644 | expect(results).toEqual([ 645 | 20, 646 | PromisePool.failed, 647 | PromisePool.notRun, 648 | PromisePool.notRun 649 | ]) 650 | }) 651 | 652 | test('can timeout long-running handlers', async () => { 653 | const timers = [1, 2, 3, 4] 654 | const leeway = 5 655 | 656 | const { results, errors } = await PromisePool 657 | .withTaskTimeout(10) 658 | .for(timers) 659 | .process(async (timer) => { 660 | const computed = 10 * timer - leeway 661 | await pause(computed) 662 | 663 | return computed 664 | }) 665 | 666 | // only the first item resolves 667 | expect(results).toEqual([5]) 668 | 669 | // items 2, 3, and 4 time out 670 | expect(errors.length).toEqual(3) 671 | expect(errors[0]).toBeInstanceOf(PromisePoolError) 672 | expect(errors[1]).toBeInstanceOf(PromisePoolError) 673 | expect(errors[2]).toBeInstanceOf(PromisePoolError) 674 | 675 | expect(errors.map(error => error.item)).toEqual([2, 3, 4]) 676 | }) 677 | 678 | test.run() 679 | -------------------------------------------------------------------------------- /test/stop-the-pool.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { test } = require('uvu') 4 | const { expect } = require('expect') 5 | const { PromisePool } = require('../dist') 6 | 7 | const pause = timeout => new Promise(resolve => setTimeout(resolve, timeout)) 8 | 9 | test('stops the pool from .process', async () => { 10 | const timeouts = [10, 20, 30, 40, 50] 11 | 12 | const { results } = await PromisePool 13 | .for(timeouts) 14 | .process(async (timeout, _, pool) => { 15 | if (timeout > 30) { 16 | return pool.stop() 17 | } 18 | 19 | await pause(timeout) 20 | return timeout 21 | }) 22 | 23 | expect(results).toEqual([10, 20, 30]) 24 | }) 25 | 26 | test('stops the pool from .process without returning pool.stop', async () => { 27 | const timeouts = [10, 20, 30, 40, 50] 28 | 29 | const { results } = await PromisePool 30 | .for(timeouts) 31 | .process(async (timeout, _, pool) => { 32 | if (timeout === 30) { 33 | pool.stop() 34 | } 35 | 36 | await pause(timeout) 37 | return timeout 38 | }) 39 | 40 | expect(results).toEqual([10, 20]) 41 | }) 42 | 43 | test('stops the pool from sync .handleError', async () => { 44 | const timeouts = [10, 20, 30, 40, 50] 45 | 46 | const { results } = await PromisePool 47 | .withConcurrency(2) 48 | .for(timeouts) 49 | .withConcurrency(2) 50 | .handleError((_, __, pool) => { 51 | return pool.stop() 52 | }) 53 | .process(async timeout => { 54 | if (timeout > 30) { 55 | throw new Error('stop the pool') 56 | } 57 | 58 | await pause(timeout) 59 | return timeout 60 | }) 61 | 62 | expect(results).toEqual([10, 20, 30]) 63 | }) 64 | 65 | test('stops the pool from async error handler', async () => { 66 | const timeouts = [10, 20, 30, 40, 50] 67 | 68 | const { results } = await PromisePool 69 | .for(timeouts) 70 | .withConcurrency(2) 71 | .handleError(async (_, __, pool) => { 72 | pool.stop() 73 | }) 74 | .process(async (timeout) => { 75 | if (timeout < 30) { 76 | throw new Error('stop the pool') 77 | } 78 | 79 | await pause(timeout) 80 | return timeout 81 | }) 82 | 83 | expect(results).toEqual([]) 84 | }) 85 | 86 | test('stops on time with async error handler', async () => { 87 | const timeouts = [50, 40, 30, 20, 10] 88 | 89 | const { results } = await PromisePool 90 | .for(timeouts) 91 | .withConcurrency(2) 92 | .useCorrespondingResults() 93 | .handleError(async (_, __, pool) => { 94 | pool.stop() 95 | }) 96 | .process(async (timeout) => { 97 | if (timeout === 30) { 98 | throw new Error('stop the pool') 99 | } 100 | 101 | await pause(timeout) 102 | return timeout 103 | }) 104 | 105 | expect(results).toEqual([ 106 | 50, 107 | 40, 108 | PromisePool.failed, 109 | PromisePool.notRun, 110 | PromisePool.notRun 111 | ]) 112 | }) 113 | 114 | test('stops on time with timed stop call', async () => { 115 | const timeouts = [100, 200, 300] 116 | let processedSecond = false 117 | 118 | const { results } = await PromisePool 119 | .withConcurrency(1) 120 | .for(timeouts) 121 | .process(async (timeout, _, pool) => { 122 | // simulate user stopping pool after 50ms 123 | pause(50).then(() => pool.stop()).catch(() => {}) 124 | 125 | if (timeout === 200) { 126 | processedSecond = true 127 | } 128 | // simulate load 129 | await pause(timeout) 130 | return timeout 131 | }) 132 | 133 | // should only have finished the current task 134 | expect(results).toEqual([100]) 135 | 136 | // should not have started the second task 137 | expect(processedSecond).toEqual(false) 138 | }) 139 | 140 | test.run() 141 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "@supercharge/tsconfig", 3 | "compilerOptions": { 4 | "outDir": "./dist", 5 | "downlevelIteration": true 6 | }, 7 | "include": [ 8 | "./**/*" 9 | ], 10 | "exclude": [ 11 | "./node_modules", 12 | "./test", 13 | "./dist" 14 | ] 15 | } 16 | --------------------------------------------------------------------------------