├── .gitignore ├── LICENSE ├── bench ├── bench-alt.js └── bench.js ├── build.ts ├── deno.json ├── dist ├── index.async-storage.cjs ├── index.async-storage.js ├── index.cjs ├── index.d.ts ├── index.js └── index.min.js ├── index.async-storage.ts ├── index.ts ├── package.json ├── readme.md └── test ├── bun.test.ts └── deno-test.ts /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | .idea.md 3 | *.gz 4 | *.bak 5 | jsr.json 6 | deno.lock 7 | .vscode 8 | bun.lockb 9 | playground.ts 10 | index.experiments.ts -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 henrygd 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /bench/bench-alt.js: -------------------------------------------------------------------------------- 1 | // import { newQueue } from '../index.ts' 2 | import { newQueue } from '../dist/index.js' 3 | import pLimit from 'p-limit' 4 | import pq from 'promise-queue' 5 | import Queue from 'queue' 6 | import { queue as asyncQueue } from 'async' 7 | import fastq from 'fastq' 8 | 9 | const concurrency = 5 10 | let promises = 1_000 11 | let runs = 1_000 12 | let warmupRuns = 500 13 | 14 | const limit = pLimit(concurrency) 15 | const promiseQueue = new pq(concurrency) 16 | const queue = newQueue(concurrency) 17 | const fqQueue = fastq.promise((task) => task(), concurrency) 18 | const aQueue = asyncQueue(async (task) => await task(), concurrency) 19 | const q = new Queue({ results: [], concurrency, autostart: true }) 20 | 21 | let start = 0 22 | const times = {} 23 | 24 | function checkEqual(a, b) { 25 | if (a !== b) { 26 | throw new Error(`${a} !== ${b}`) 27 | } 28 | } 29 | 30 | async function bench(name, queue, addMethod, isWarmup) { 31 | start = performance.now() 32 | // benchmark test 33 | let j = 0 34 | const { promise, resolve } = Promise.withResolvers() 35 | for (let i = 0; i < promises; i++) { 36 | // need call for promise-queue and queue to work 37 | addMethod.call(queue, async () => ++j === promises && resolve()) 38 | } 39 | await promise 40 | checkEqual(j, promises) 41 | // collect times 42 | if (!isWarmup) { 43 | const time = performance.now() - start 44 | times[name].Average += time 45 | times[name].Fastest = Math.min(times[name].Fastest, time) 46 | times[name].Slowest = Math.max(times[name].Slowest, time) 47 | } 48 | } 49 | 50 | const libs = [ 51 | ['fastq', fqQueue, fqQueue.push], 52 | ['promise-queue', promiseQueue, promiseQueue.add], 53 | ['@henrygd/queue', queue, queue.add], 54 | ['async.queue', aQueue, aQueue.push], 55 | ['queue', q, q.push], 56 | ['p-limit', limit, limit], 57 | ] 58 | 59 | // warmup runs 60 | for (let i = 0; i < warmupRuns; i++) { 61 | logStatus(`Warming up (${i}/${warmupRuns})`) 62 | for (const lib of libs) { 63 | if (lib[0] === 'p-limit') { 64 | // p-limit warming messes up deno results 65 | continue 66 | } 67 | await bench(lib[0], lib[1], lib[2], true) 68 | } 69 | } 70 | 71 | for (const lib of libs) { 72 | logStatus(`Benching ${lib[0]}`) 73 | for (let j = 0; j <= runs; j++) { 74 | if (j === 0) { 75 | times[lib[0]] = { 76 | Average: 0, 77 | Fastest: Infinity, 78 | Slowest: 0, 79 | } 80 | } 81 | await bench(lib[0], lib[1], lib[2], false) 82 | } 83 | } 84 | 85 | logTable(times) 86 | 87 | function logTable(obj, keyColumnName = 'Name') { 88 | try { 89 | process.stdout.clearLine(0) 90 | process.stdout.cursorTo(0) 91 | process.stdout.write('') 92 | } catch { 93 | Deno.stdout.writeSync(new TextEncoder().encode(`\r`)) 94 | } 95 | 96 | console.log(`Benchmark results (${getRuntime()})`) 97 | 98 | // Convert object to array of objects 99 | let arr = Object.entries(obj).map(([key, data]) => ({ 100 | [keyColumnName]: key, 101 | ...data, 102 | })) 103 | 104 | // Sort the array based on the Average property 105 | arr.sort((a, b) => a.Average - b.Average) 106 | 107 | // Calculate the fastest (lowest) average 108 | const fastestAverage = arr[0].Average 109 | 110 | // add speed + format data 111 | arr = arr.map((item) => ({ 112 | Name: item.Name, 113 | Speed: 114 | item.Average === fastestAverage ? '1.00x' : (item.Average / fastestAverage).toFixed(2) + 'x', 115 | Average: parseFloat((item.Average / runs).toFixed(4)), 116 | Fastest: parseFloat(item.Fastest.toFixed(4)), 117 | Slowest: parseFloat(item.Slowest.toFixed(4)), 118 | })) 119 | 120 | // Determine column order 121 | const columns = [keyColumnName, 'Speed', 'Average', 'Fastest', 'Slowest'] 122 | 123 | // Display the sorted array as a table with specified column order 124 | console.table(arr, columns) 125 | } 126 | 127 | function getRuntime() { 128 | if (typeof Bun !== 'undefined') { 129 | return `Bun ${Bun.version}` 130 | } 131 | if (typeof process !== 'undefined' && process.versions && process.versions.node) { 132 | return `Node ${process.version}` 133 | } 134 | if (typeof Deno !== 'undefined') { 135 | return `Deno ${Deno.version.deno}` 136 | } 137 | return 'Unknown' 138 | } 139 | 140 | function logStatus(msg) { 141 | try { 142 | process.stdout.clearLine(0) 143 | process.stdout.cursorTo(0) 144 | process.stdout.write(msg) 145 | } catch { 146 | Deno.stdout.writeSync(new TextEncoder().encode(msg + '\r')) 147 | } 148 | } 149 | -------------------------------------------------------------------------------- /bench/bench.js: -------------------------------------------------------------------------------- 1 | import { run, bench, baseline } from 'mitata' 2 | import { newQueue } from '../dist/index.js' 3 | import pLimit from 'p-limit' 4 | import pq from 'promise-queue' 5 | import Queue from 'queue' 6 | import { queue as asyncQueue } from 'async' 7 | import fastq from 'fastq' 8 | 9 | const concurrency = 5 10 | let promises = 1_000 11 | 12 | const queue = newQueue(concurrency) 13 | const limit = pLimit(concurrency) 14 | const promiseQueue = new pq(concurrency) 15 | const q = new Queue({ results: [], concurrency, autostart: true }) 16 | const aQueue = asyncQueue(async (task) => await task(), concurrency) 17 | const fqQueue = fastq.promise((task) => task(), concurrency) 18 | 19 | function checkEqual(a, b) { 20 | if (a !== b) { 21 | throw new Error(`${a} !== ${b}`) 22 | } 23 | } 24 | 25 | function addBench(name, queue, addMethod, fn) { 26 | fn(name, async () => { 27 | let j = 0 28 | const { promise, resolve } = Promise.withResolvers() 29 | for (let i = 0; i < promises; i++) { 30 | // need to set this for promise-queue and queue to work 31 | addMethod.call(queue, async () => ++j === promises && resolve()) 32 | } 33 | await promise 34 | checkEqual(j, promises) 35 | }) 36 | } 37 | 38 | const libs = [ 39 | ['@henrygd/queue', queue, queue.add], 40 | ['promise-queue', promiseQueue, promiseQueue.add], 41 | ['fastq', fqQueue, fqQueue.push], 42 | ['async.queue', aQueue, aQueue.push], 43 | ['queue', q, q.push], 44 | ] 45 | 46 | for (const [name, queue, addMethod] of libs) { 47 | const benchFn = name === '@henrygd/queue' ? baseline : bench 48 | addBench(name, queue, addMethod, benchFn) 49 | } 50 | 51 | await run() 52 | // add p-limit after the warm up test bc it's slow and 53 | // fudges the results in deno 54 | addBench('p-limit', limit, limit, bench) 55 | console.log('') 56 | await run() 57 | -------------------------------------------------------------------------------- /build.ts: -------------------------------------------------------------------------------- 1 | import { build } from 'esbuild' 2 | 3 | await build({ 4 | entryPoints: ['index.ts'], 5 | mangleProps: /^res$|^rej$|^next$/, 6 | format: 'esm', 7 | outfile: './dist/index.js', 8 | }) 9 | 10 | await build({ 11 | entryPoints: ['index.ts'], 12 | minify: true, 13 | mangleProps: /^res$|^rej$|^next$/, 14 | format: 'esm', 15 | outfile: './dist/index.min.js', 16 | }) 17 | 18 | await build({ 19 | entryPoints: ['index.ts'], 20 | mangleProps: /^res$|^rej$|^next$/, 21 | format: 'cjs', 22 | outfile: './dist/index.cjs', 23 | }) 24 | 25 | await build({ 26 | entryPoints: ['index.async-storage.ts'], 27 | format: 'esm', 28 | outfile: './dist/index.async-storage.js', 29 | banner: { 30 | js: `/** 31 | * This version of \`@henrygd/queue\` supports AsyncLocalStorage / AsyncResource. 32 | * 33 | * It should not be used in a web browser. 34 | * @module 35 | */`, 36 | }, 37 | }) 38 | 39 | await build({ 40 | entryPoints: ['index.async-storage.ts'], 41 | format: 'cjs', 42 | outfile: './dist/index.async-storage.cjs', 43 | banner: { 44 | js: `/** 45 | * This version of \`@henrygd/queue\` supports AsyncLocalStorage / AsyncResource. 46 | * 47 | * It should not be used in a web browser. 48 | * @module 49 | */`, 50 | }, 51 | }) 52 | -------------------------------------------------------------------------------- /deno.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@henrygd/queue", 3 | "version": "1.0.7", 4 | "exports": "./index.ts", 5 | "imports": { 6 | "async_hooks": "node:async_hooks", 7 | "mitata": "npm:mitata", 8 | "os": "node:os" 9 | }, 10 | "lint": { 11 | "include": ["test/deno-test.ts"] 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /dist/index.async-storage.cjs: -------------------------------------------------------------------------------- 1 | /** 2 | * This version of `@henrygd/queue` supports AsyncLocalStorage / AsyncResource. 3 | * 4 | * It should not be used in a web browser. 5 | * @module 6 | */ 7 | var __defProp = Object.defineProperty; 8 | var __getOwnPropDesc = Object.getOwnPropertyDescriptor; 9 | var __getOwnPropNames = Object.getOwnPropertyNames; 10 | var __hasOwnProp = Object.prototype.hasOwnProperty; 11 | var __export = (target, all) => { 12 | for (var name in all) 13 | __defProp(target, name, { get: all[name], enumerable: true }); 14 | }; 15 | var __copyProps = (to, from, except, desc) => { 16 | if (from && typeof from === "object" || typeof from === "function") { 17 | for (let key of __getOwnPropNames(from)) 18 | if (!__hasOwnProp.call(to, key) && key !== except) 19 | __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); 20 | } 21 | return to; 22 | }; 23 | var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); 24 | var index_async_storage_exports = {}; 25 | __export(index_async_storage_exports, { 26 | newQueue: () => newQueue 27 | }); 28 | module.exports = __toCommonJS(index_async_storage_exports); 29 | var import_async_hooks = require("async_hooks"); 30 | let Promize = Promise; 31 | let newQueue = (concurrency) => { 32 | let active = 0; 33 | let size = 0; 34 | let head; 35 | let tail; 36 | let resolveDonePromise; 37 | let donePromise; 38 | let afterRun = () => { 39 | active--; 40 | if (--size) { 41 | run(); 42 | } else { 43 | donePromise = resolveDonePromise?.(); 44 | } 45 | }; 46 | let run = () => { 47 | if (head && active < concurrency) { 48 | active++; 49 | let curHead = head; 50 | head = head.next; 51 | curHead.p().then( 52 | (v) => (curHead.res(v), afterRun()), 53 | (e) => (curHead.rej(e), afterRun()) 54 | ); 55 | } 56 | }; 57 | return { 58 | add(p) { 59 | let node = { p: import_async_hooks.AsyncResource.bind(p) }; 60 | let promise = new Promize((res, rej) => { 61 | node.res = res; 62 | node.rej = rej; 63 | }); 64 | if (head) { 65 | tail = tail.next = node; 66 | } else { 67 | tail = head = node; 68 | } 69 | size++; 70 | run(); 71 | return promise; 72 | }, 73 | done: () => { 74 | if (!size) { 75 | return Promize.resolve(); 76 | } 77 | if (donePromise) { 78 | return donePromise; 79 | } 80 | return donePromise = new Promize((resolve) => resolveDonePromise = resolve); 81 | }, 82 | clear() { 83 | head = tail = null; 84 | size = active; 85 | }, 86 | active: () => active, 87 | size: () => size 88 | }; 89 | }; 90 | -------------------------------------------------------------------------------- /dist/index.async-storage.js: -------------------------------------------------------------------------------- 1 | /** 2 | * This version of `@henrygd/queue` supports AsyncLocalStorage / AsyncResource. 3 | * 4 | * It should not be used in a web browser. 5 | * @module 6 | */ 7 | import { AsyncResource } from "async_hooks"; 8 | let Promize = Promise; 9 | let newQueue = (concurrency) => { 10 | let active = 0; 11 | let size = 0; 12 | let head; 13 | let tail; 14 | let resolveDonePromise; 15 | let donePromise; 16 | let afterRun = () => { 17 | active--; 18 | if (--size) { 19 | run(); 20 | } else { 21 | donePromise = resolveDonePromise?.(); 22 | } 23 | }; 24 | let run = () => { 25 | if (head && active < concurrency) { 26 | active++; 27 | let curHead = head; 28 | head = head.next; 29 | curHead.p().then( 30 | (v) => (curHead.res(v), afterRun()), 31 | (e) => (curHead.rej(e), afterRun()) 32 | ); 33 | } 34 | }; 35 | return { 36 | add(p) { 37 | let node = { p: AsyncResource.bind(p) }; 38 | let promise = new Promize((res, rej) => { 39 | node.res = res; 40 | node.rej = rej; 41 | }); 42 | if (head) { 43 | tail = tail.next = node; 44 | } else { 45 | tail = head = node; 46 | } 47 | size++; 48 | run(); 49 | return promise; 50 | }, 51 | done: () => { 52 | if (!size) { 53 | return Promize.resolve(); 54 | } 55 | if (donePromise) { 56 | return donePromise; 57 | } 58 | return donePromise = new Promize((resolve) => resolveDonePromise = resolve); 59 | }, 60 | clear() { 61 | head = tail = null; 62 | size = active; 63 | }, 64 | active: () => active, 65 | size: () => size 66 | }; 67 | }; 68 | export { 69 | newQueue 70 | }; 71 | -------------------------------------------------------------------------------- /dist/index.cjs: -------------------------------------------------------------------------------- 1 | var __defProp = Object.defineProperty; 2 | var __getOwnPropDesc = Object.getOwnPropertyDescriptor; 3 | var __getOwnPropNames = Object.getOwnPropertyNames; 4 | var __hasOwnProp = Object.prototype.hasOwnProperty; 5 | var __export = (target, all) => { 6 | for (var name in all) 7 | __defProp(target, name, { get: all[name], enumerable: true }); 8 | }; 9 | var __copyProps = (to, from, except, desc) => { 10 | if (from && typeof from === "object" || typeof from === "function") { 11 | for (let key of __getOwnPropNames(from)) 12 | if (!__hasOwnProp.call(to, key) && key !== except) 13 | __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); 14 | } 15 | return to; 16 | }; 17 | var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); 18 | var async_queue_exports = {}; 19 | __export(async_queue_exports, { 20 | newQueue: () => newQueue 21 | }); 22 | module.exports = __toCommonJS(async_queue_exports); 23 | let Promize = Promise; 24 | let newQueue = (concurrency) => { 25 | let active = 0; 26 | let size = 0; 27 | let head; 28 | let tail; 29 | let resolveDonePromise; 30 | let donePromise; 31 | let afterRun = () => { 32 | active--; 33 | if (--size) { 34 | run(); 35 | } else { 36 | donePromise = resolveDonePromise?.(); 37 | } 38 | }; 39 | let run = () => { 40 | if (head && active < concurrency) { 41 | active++; 42 | let curHead = head; 43 | head = head.a; 44 | curHead.p().then( 45 | (v) => (curHead.b(v), afterRun()), 46 | (e) => (curHead.c(e), afterRun()) 47 | ); 48 | } 49 | }; 50 | return { 51 | add(p) { 52 | let node = { p }; 53 | let promise = new Promize((res, rej) => { 54 | node.b = res; 55 | node.c = rej; 56 | }); 57 | if (head) { 58 | tail = tail.a = node; 59 | } else { 60 | tail = head = node; 61 | } 62 | size++; 63 | run(); 64 | return promise; 65 | }, 66 | done: () => { 67 | if (!size) { 68 | return Promize.resolve(); 69 | } 70 | if (donePromise) { 71 | return donePromise; 72 | } 73 | return donePromise = new Promize((resolve) => resolveDonePromise = resolve); 74 | }, 75 | clear() { 76 | head = tail = null; 77 | size = active; 78 | }, 79 | active: () => active, 80 | size: () => size 81 | }; 82 | }; 83 | -------------------------------------------------------------------------------- /dist/index.d.ts: -------------------------------------------------------------------------------- 1 | /** Queue interface */ 2 | export interface Queue { 3 | /** Add an async function / promise wrapper to the queue */ 4 | add(promiseFunction: () => PromiseLike): Promise; 5 | /** Returns a promise that resolves when the queue is empty */ 6 | done(): Promise; 7 | /** Empties the queue (active promises are not cancelled) */ 8 | clear(): void; 9 | /** Returns the number of promises currently running */ 10 | active(): number; 11 | /** Returns the total number of promises in the queue */ 12 | size(): number; 13 | } 14 | /** 15 | * Creates a new queue with the specified concurrency level. 16 | * 17 | * @param {number} concurrency - The maximum number of concurrent operations. 18 | * @return {Queue} - The newly created queue. 19 | */ 20 | export declare let newQueue: (concurrency: number) => Queue; 21 | -------------------------------------------------------------------------------- /dist/index.js: -------------------------------------------------------------------------------- 1 | let Promize = Promise; 2 | let newQueue = (concurrency) => { 3 | let active = 0; 4 | let size = 0; 5 | let head; 6 | let tail; 7 | let resolveDonePromise; 8 | let donePromise; 9 | let afterRun = () => { 10 | active--; 11 | if (--size) { 12 | run(); 13 | } else { 14 | donePromise = resolveDonePromise?.(); 15 | } 16 | }; 17 | let run = () => { 18 | if (head && active < concurrency) { 19 | active++; 20 | let curHead = head; 21 | head = head.a; 22 | curHead.p().then( 23 | (v) => (curHead.b(v), afterRun()), 24 | (e) => (curHead.c(e), afterRun()) 25 | ); 26 | } 27 | }; 28 | return { 29 | add(p) { 30 | let node = { p }; 31 | let promise = new Promize((res, rej) => { 32 | node.b = res; 33 | node.c = rej; 34 | }); 35 | if (head) { 36 | tail = tail.a = node; 37 | } else { 38 | tail = head = node; 39 | } 40 | size++; 41 | run(); 42 | return promise; 43 | }, 44 | done: () => { 45 | if (!size) { 46 | return Promize.resolve(); 47 | } 48 | if (donePromise) { 49 | return donePromise; 50 | } 51 | return donePromise = new Promize((resolve) => resolveDonePromise = resolve); 52 | }, 53 | clear() { 54 | head = tail = null; 55 | size = active; 56 | }, 57 | active: () => active, 58 | size: () => size 59 | }; 60 | }; 61 | export { 62 | newQueue 63 | }; 64 | -------------------------------------------------------------------------------- /dist/index.min.js: -------------------------------------------------------------------------------- 1 | let l=Promise,f=m=>{let o=0,n=0,i,t,s,d,u=()=>{o--,--n?a():d=s?.()},a=()=>{if(i&&o(r.i(e),u()),e=>(r.r(e),u()))}};return{add(r){let e={p:r},v=new l((P,T)=>{e.i=P,e.r=T});return i?t=t.e=e:t=i=e,n++,a(),v},done:()=>n?d||(d=new l(r=>s=r)):l.resolve(),clear(){i=t=null,n=o},active:()=>o,size:()=>n}};export{f as newQueue}; 2 | -------------------------------------------------------------------------------- /index.async-storage.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * This version of `@henrygd/queue` supports AsyncLocalStorage / AsyncResource. 3 | * 4 | * It should not be used in a web browser. 5 | * 6 | * @module 7 | */ 8 | import { AsyncResource } from 'async_hooks' 9 | 10 | /** List node */ 11 | type Node = { 12 | /** input promise wrapper */ 13 | p: () => T 14 | /** resolve returned promise */ 15 | res: (value: T) => void 16 | /** reject returned promise */ 17 | rej: (reason: any) => void 18 | /** next node pointer */ 19 | next?: Node 20 | } 21 | 22 | /** Queue interface */ 23 | interface Queue { 24 | /** Add an async function / promise wrapper to the queue */ 25 | add(promiseFunction: () => PromiseLike): Promise 26 | /** Returns a promise that resolves when the queue is empty */ 27 | done(): Promise 28 | /** Empties the queue (active promises are not cancelled) */ 29 | clear(): void 30 | /** Returns the number of promises currently running */ 31 | active(): number 32 | /** Returns the total number of promises in the queue */ 33 | size(): number 34 | } 35 | 36 | // this just saves a few bytes 37 | let Promize = Promise 38 | 39 | /** 40 | * Creates a new queue with the specified concurrency level. 41 | * 42 | * @param {number} concurrency - The maximum number of concurrent operations. 43 | * @return {Queue} - The newly created queue. 44 | */ 45 | export let newQueue = (concurrency: number): Queue => { 46 | let active = 0 47 | let size = 0 48 | let head: Node> | undefined | null 49 | let tail: Node> | undefined | null 50 | let resolveDonePromise: (value: void | PromiseLike) => void 51 | let donePromise: Promise | void 52 | 53 | let afterRun = () => { 54 | active-- 55 | if (--size) { 56 | run() 57 | } else { 58 | donePromise = resolveDonePromise?.() 59 | } 60 | } 61 | 62 | let run = () => { 63 | if (head && active < concurrency) { 64 | active++ 65 | let curHead = head 66 | head = head.next 67 | curHead.p().then( 68 | (v) => (curHead.res(v), afterRun()), 69 | (e) => (curHead.rej(e), afterRun()) 70 | ) 71 | } 72 | } 73 | 74 | return { 75 | add(p: () => PromiseLike) { 76 | let node = { p: AsyncResource.bind(p) } as unknown as Node> 77 | let promise = new Promize((res, rej) => { 78 | node.res = res 79 | node.rej = rej 80 | }) 81 | if (head) { 82 | tail = tail!.next = node 83 | } else { 84 | tail = head = node 85 | } 86 | size++ 87 | run() 88 | return promise as Promise 89 | }, 90 | done: () => { 91 | if (!size) { 92 | return Promize.resolve() 93 | } 94 | if (donePromise) { 95 | return donePromise 96 | } 97 | return (donePromise = new Promize((resolve) => (resolveDonePromise = resolve))) 98 | }, 99 | clear() { 100 | head = tail = null 101 | size = active 102 | }, 103 | active: () => active, 104 | size: () => size, 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /index.ts: -------------------------------------------------------------------------------- 1 | /** List node */ 2 | type Node = { 3 | /** input promise wrapper */ 4 | p: () => T 5 | /** resolve returned promise */ 6 | res: (value: T) => void 7 | /** reject returned promise */ 8 | rej: (reason: any) => void 9 | /** next node pointer */ 10 | next?: Node 11 | } 12 | 13 | /** Queue interface */ 14 | export interface Queue { 15 | /** Add an async function / promise wrapper to the queue */ 16 | add(promiseFunction: () => PromiseLike): Promise 17 | /** Returns a promise that resolves when the queue is empty */ 18 | done(): Promise 19 | /** Empties the queue (active promises are not cancelled) */ 20 | clear(): void 21 | /** Returns the number of promises currently running */ 22 | active(): number 23 | /** Returns the total number of promises in the queue */ 24 | size(): number 25 | } 26 | 27 | // this just saves a few bytes 28 | let Promize = Promise 29 | 30 | /** 31 | * Creates a new queue with the specified concurrency level. 32 | * 33 | * @param {number} concurrency - The maximum number of concurrent operations. 34 | * @return {Queue} - The newly created queue. 35 | */ 36 | export let newQueue = (concurrency: number): Queue => { 37 | let active = 0 38 | let size = 0 39 | let head: Node> | undefined | null 40 | let tail: Node> | undefined | null 41 | let resolveDonePromise: (value: void | PromiseLike) => void 42 | let donePromise: Promise | void 43 | 44 | let afterRun = () => { 45 | active-- 46 | if (--size) { 47 | run() 48 | } else { 49 | donePromise = resolveDonePromise?.() 50 | } 51 | } 52 | 53 | let run = () => { 54 | if (head && active < concurrency) { 55 | active++ 56 | let curHead = head 57 | head = head.next 58 | curHead.p().then( 59 | (v) => (curHead.res(v), afterRun()), 60 | (e) => (curHead.rej(e), afterRun()) 61 | ) 62 | } 63 | } 64 | 65 | return { 66 | add(p: () => PromiseLike) { 67 | let node = { p } as Node> 68 | let promise = new Promize((res, rej) => { 69 | node.res = res 70 | node.rej = rej 71 | }) 72 | if (head) { 73 | tail = tail!.next = node 74 | } else { 75 | tail = head = node 76 | } 77 | size++ 78 | run() 79 | return promise as Promise 80 | }, 81 | done: () => { 82 | if (!size) { 83 | return Promize.resolve() 84 | } 85 | if (donePromise) { 86 | return donePromise 87 | } 88 | return (donePromise = new Promize((resolve) => (resolveDonePromise = resolve))) 89 | }, 90 | clear() { 91 | head = tail = null 92 | size = active 93 | }, 94 | active: () => active, 95 | size: () => size, 96 | } 97 | } 98 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@henrygd/queue", 3 | "version": "1.0.7", 4 | "license": "MIT", 5 | "type": "module", 6 | "description": "Tiny async queue with concurrency control. Like p-limit or fastq, but smaller and faster.", 7 | "author": "Hank Dollman (https://henrygd.me)", 8 | "repository": { 9 | "type": "git", 10 | "url": "git+https://github.com/henrygd/queue.git" 11 | }, 12 | "bugs": { 13 | "url": "https://github.com/henrygd/queue/issues" 14 | }, 15 | "exports": { 16 | ".": { 17 | "import": "./dist/index.js", 18 | "require": "./dist/index.cjs", 19 | "types": "./dist/index.d.ts" 20 | }, 21 | "./async-storage": { 22 | "import": "./dist/index.async-storage.js", 23 | "require": "./dist/index.async-storage.cjs", 24 | "types": "./dist/index.d.ts" 25 | } 26 | }, 27 | "types": "./dist/index.d.ts", 28 | "scripts": { 29 | "build": "bun run build.ts && ls -l dist/index.min.js && bun run generate-types", 30 | "build:skip-types": "bun run build.ts && ls -l dist/index.min.js", 31 | "generate-types": "tsc -d index.ts --outDir dist --emitDeclarationOnly > /dev/null", 32 | "test": "bun run test:dev && bun test:dist", 33 | "test:dev": "bun test test/bun.test.ts", 34 | "test:dist": "DIST=true bun test test/bun.test.ts", 35 | "test:deno": "bun run test:deno:dev && bun run test:deno:dist", 36 | "test:deno:dist": "DIST=true deno test test/deno-test.ts --allow-env", 37 | "test:deno:dev": "deno test test/deno-test.ts --allow-env", 38 | "bench": "node bench/bench.js", 39 | "bench:bun": "bun run bench/bench.js", 40 | "bench:deno": "deno run --allow-env --allow-hrtime --allow-sys bench/bench.js", 41 | "bench:alt": "node bench/bench-alt.js", 42 | "bench:alt:bun": "bun run bench/bench-alt.js", 43 | "bench:alt:deno": "deno run --allow-env --allow-hrtime bench/bench-alt.js" 44 | }, 45 | "keywords": [ 46 | "promise", 47 | "promises", 48 | "limit", 49 | "limited", 50 | "concurrency", 51 | "throttle", 52 | "rate", 53 | "batch", 54 | "task", 55 | "queue", 56 | "async", 57 | "await", 58 | "async-queue", 59 | "promise-queue", 60 | "fast", 61 | "worker" 62 | ], 63 | "devDependencies": { 64 | "@types/bun": "^1.1.3", 65 | "async": "^3.2.5", 66 | "esbuild": "^0.21.4", 67 | "fastq": "^1.17.1", 68 | "mitata": "^0.1.11", 69 | "p-limit": "^5.0.0", 70 | "promise-queue": "^2.2.5", 71 | "queue": "^7.0.0", 72 | "typescript": "^5.4.5" 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 | [size-image]: https://img.shields.io/github/size/henrygd/queue/dist/index.min.js?style=flat 2 | [license-image]: https://img.shields.io/github/license/henrygd/queue?style=flat&color=%2349ac0c 3 | [license-url]: /LICENSE 4 | 5 | # @henrygd/queue 6 | 7 | [![File Size][size-image]](https://github.com/henrygd/queue/blob/main/dist/index.min.js) [![MIT license][license-image]][license-url] [![JSR Score 100%](https://jsr.io/badges/@henrygd/queue/score)](https://jsr.io/@henrygd/queue) 8 | 9 | Tiny async queue with concurrency control. Like `p-limit` or `fastq`, but smaller and faster. See [comparisons and benchmarks](#comparisons-and-benchmarks) below. 10 | 11 | Works with: browsers Deno Node.js Cloudflare Workers Bun 12 | 13 | ## Usage 14 | 15 | Create a queue with the `newQueue` function. Then add async functions - or promise returning functions - to your queue with the `add` method. 16 | 17 | You can use `queue.done()` to wait for the queue to be empty. 18 | 19 | 20 | ```ts 21 | import { newQueue } from '@henrygd/queue' 22 | 23 | // create a new queue with a concurrency of 2 24 | const queue = newQueue(2) 25 | 26 | const pokemon = ['ditto', 'hitmonlee', 'pidgeot', 'poliwhirl', 'golem', 'charizard'] 27 | 28 | for (const name of pokemon) { 29 | queue.add(async () => { 30 | const res = await fetch(`https://pokeapi.co/api/v2/pokemon/${name}`) 31 | const json = await res.json() 32 | console.log(`${json.name}: ${json.height * 10}cm | ${json.weight / 10}kg`) 33 | }) 34 | } 35 | 36 | console.log('running') 37 | await queue.done() 38 | console.log('done') 39 | ``` 40 | 41 | The return value of `queue.add` is the same as the return value of the supplied function. 42 | 43 | ```ts 44 | const response = await queue.add(() => fetch('https://pokeapi.co/api/v2/pokemon')) 45 | console.log(response.ok, response.status, response.headers) 46 | ``` 47 | 48 | > [!TIP] 49 | > If you need support for Node's [AsyncLocalStorage](https://nodejs.org/api/async_context.html#introduction), import `@henrygd/queue/async-storage` instead. 50 | 51 | ## Queue interface 52 | 53 | ```ts 54 | /** Add an async function / promise wrapper to the queue */ 55 | queue.add(promiseFunction: () => PromiseLike): Promise 56 | /** Returns a promise that resolves when the queue is empty */ 57 | queue.done(): Promise 58 | /** Empties the queue (active promises are not cancelled) */ 59 | queue.clear(): void 60 | /** Returns the number of promises currently running */ 61 | queue.active(): number 62 | /** Returns the total number of promises in the queue */ 63 | queue.size(): number 64 | ``` 65 | 66 | ## Comparisons and benchmarks 67 | 68 | | Library | Version | Bundle size (B) | Weekly downloads | 69 | | :-------------------------------------------------------------- | :------ | :-------------- | :--------------- | 70 | | @henrygd/queue | 1.0.6 | 355 | dozens :) | 71 | | [p-limit](https://github.com/sindresorhus/p-limit) | 5.0.0 | 1,763 | 118,953,973 | 72 | | [async.queue](https://github.com/caolan/async) | 3.2.5 | 6,873 | 53,645,627 | 73 | | [fastq](https://github.com/mcollina/fastq) | 1.17.1 | 3,050 | 39,257,355 | 74 | | [queue](https://github.com/jessetane/queue) | 7.0.0 | 2,840 | 4,259,101 | 75 | | [promise-queue](https://github.com/promise-queue/promise-queue) | 2.2.5 | 2,200 | 1,092,431 | 76 | 77 | ### Note on benchmarks 78 | 79 | All libraries run the exact same test. Each operation measures how quickly the queue can resolve 1,000 async functions. The function just increments a counter and checks if it has reached 1,000.[^benchmark] 80 | 81 | We check for completion inside the function so that `promise-queue` and `p-limit` are not penalized by having to use `Promise.all` (they don't provide a promise that resolves when the queue is empty). 82 | 83 | ## Browser benchmark 84 | 85 | This test was run in Chromium. Chrome and Edge are the same. Firefox and Safari are slower and closer, with `@henrygd/queue` just edging out `promise-queue`. I think both are hitting the upper limit of what those browsers will allow. 86 | 87 | You can run or tweak for yourself here: https://jsbm.dev/TKyOdie0sbpOh 88 | 89 | ![@henrygd/queue - 13,665 Ops/s. fastq - 7,661 Ops/s. promise-queue - 7,650 Ops/s. async.queue - 4,060 Ops/s. p-limit - 1,067 Ops/s. queue - 721 Ops/s](https://henrygd-assets.b-cdn.net/queue/106/browser-benchmark.png) 90 | 91 | ## Node.js benchmarks 92 | 93 | > Note: `p-limit` 6.1.0 now places between `async.queue` and `queue` in Node and Deno. 94 | 95 | Ryzen 5 4500U | 8GB RAM | Node 22.3.0 96 | 97 | ![@henrygd/queue - 1.9x faster than fastq. 2.03x promise-queue. 3.86x async.queue. 20x queue. 86x p-limit.](https://henrygd-assets.b-cdn.net/queue/106/node-4500.png) 98 | 99 | Ryzen 7 6800H | 32GB RAM | Node 22.3.0 100 | 101 | ![@henrygd/queue - 1.9x faster than fastq. 2.01x promise-queue. 3.98x async.queue. 6.86x queue. 88x p-limit.](https://henrygd-assets.b-cdn.net/queue/106/node-6800h.png) 102 | 103 | ## Deno benchmarks 104 | 105 | > Note: `p-limit` 6.1.0 now places between `async.queue` and `queue` in Node and Deno. 106 | 107 | Ryzen 5 4500U | 8GB RAM | Deno 1.44.4 108 | 109 | ![@henrygd/queue - 1.9x faster than fastq. 2.01x promise-queue. 4.7x async.queue. 7x queue. 28x p-limit.](https://henrygd-assets.b-cdn.net/queue/106/deno-4500.png) 110 | 111 | Ryzen 7 6800H | 32GB RAM | Deno 1.44.4 112 | 113 | ![@henrygd/queue - 1.82x faster than fastq. 1.91x promise-queue. 3.47x async.queue. 7x queue. 26x p-limit.](https://henrygd-assets.b-cdn.net/queue/106/deno-6800h.png) 114 | 115 | ## Bun benchmarks 116 | 117 | Ryzen 5 4500U | 8GB RAM | Bun 1.1.17 118 | 119 | ![@henrygd/queue - 1.25x faster than promise-queue. 1.66x fastq. 2.73x async.queue. 5.44x p-limit. 12x queue.](https://henrygd-assets.b-cdn.net/queue/106/bun-4500.png) 120 | 121 | Ryzen 7 6800H | 32GB RAM | Bun 1.1.17 122 | 123 | ![@henrygd/queue - 1.17x faster than promise-queue. 1.51x fastq. 2.53x async.queue. 5.25x p-limit. 5.39x queue.](https://henrygd-assets.b-cdn.net/queue/106/bun-6800h.png) 124 | 125 | ## Cloudflare Workers benchmark 126 | 127 | Uses [oha](https://github.com/hatoo/oha) to make 1,000 requests to each worker. Each request creates a queue and resolves 5,000 functions. 128 | 129 | This was run locally using [Wrangler](https://developers.cloudflare.com/workers/get-started/guide/) on a Ryzen 7 6800H laptop. Wrangler uses the same [workerd](https://github.com/cloudflare/workerd) runtime as workers deployed to Cloudflare, so the relative difference should be accurate. Here's the [repository for this benchmark](https://github.com/henrygd/async-queue-wrangler-benchmark). 130 | 131 | | Library | Requests/sec | Total (sec) | Average | Slowest | 132 | | :------------- | :----------- | :---------- | :------ | :------ | 133 | | @henrygd/queue | 816.1074 | 1.2253 | 0.0602 | 0.0864 | 134 | | promise-queue | 647.2809 | 1.5449 | 0.0759 | 0.1149 | 135 | | fastq | 336.7031 | 3.0877 | 0.1459 | 0.2080 | 136 | | async.queue | 198.9986 | 5.0252 | 0.2468 | 0.3544 | 137 | | queue | 85.6483 | 11.6757 | 0.5732 | 0.7629 | 138 | | p-limit | 77.7434 | 12.8628 | 0.6316 | 0.9585 | 139 | 140 | ## Related 141 | 142 | [`@henrygd/semaphore`](https://github.com/henrygd/semaphore) - Fastest javascript inline semaphores and mutexes using async / await. 143 | 144 | ## License 145 | 146 | [MIT license](/LICENSE) 147 | 148 | [^benchmark]: In reality, you may not be running so many jobs at once, and your jobs will take much longer to resolve. So performance will depend more on the jobs themselves. 149 | -------------------------------------------------------------------------------- /test/bun.test.ts: -------------------------------------------------------------------------------- 1 | import { env } from 'bun' 2 | import { test, expect, describe } from 'bun:test' 3 | import { newQueue as devQueue } from '../index.ts' 4 | import { newQueue as distQueue } from '../dist/index.js' 5 | import { newQueue as devContextQueue } from '../index.async-storage.ts' 6 | import { newQueue as distContextQueue } from '../dist/index.async-storage.js' 7 | import { AsyncLocalStorage } from 'async_hooks' 8 | 9 | let newQueue: typeof devQueue 10 | let contextQueue: typeof devContextQueue 11 | 12 | if (env.DIST) { 13 | console.log('using dist files') 14 | newQueue = distQueue 15 | contextQueue = distContextQueue 16 | } else { 17 | console.log('using dev files') 18 | newQueue = devQueue 19 | contextQueue = devContextQueue 20 | } 21 | 22 | const wait = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms)) 23 | 24 | // type CreateQueue = typeof newQueue | typeof slimQueue 25 | type CreateQueue = typeof newQueue 26 | 27 | describe('main', () => { 28 | test('should process 2 promises concurrently', () => testConcurrency(newQueue)) 29 | 30 | test('should handle promise rejections correctly', () => testRejections(newQueue)) 31 | 32 | test('greater concurrency should be faster', async () => { 33 | const loops = 10 34 | const waitTime = 50 35 | const runTimes = [] as number[] 36 | 37 | for (let i = 3; i < 5; i++) { 38 | const queue = newQueue(i) 39 | const start = performance.now() 40 | for (let i = 0; i < loops; i++) { 41 | queue.add(() => wait(waitTime)) 42 | } 43 | await queue.done() 44 | runTimes.push(performance.now() - start) 45 | } 46 | 47 | const [timeOne, timeTwo] = runTimes 48 | expect(timeTwo).toBeLessThan(timeOne) 49 | }) 50 | 51 | test('add method should return a promise', () => { 52 | const queue = newQueue(2) 53 | const promise = queue.add(() => new Promise((resolve) => resolve(1))) 54 | expect(promise).toBeInstanceOf(Promise) 55 | expect(promise).resolves.toBe(1) 56 | 57 | const promiseErr = queue.add(() => new Promise((_, reject) => reject(new Error()))) 58 | expect(promiseErr).toBeInstanceOf(Promise) 59 | expect(promiseErr).rejects.toThrow() 60 | 61 | const asyncFn = queue.add(async () => { 62 | return 'hello' 63 | }) 64 | expect(asyncFn).toBeInstanceOf(Promise) 65 | expect(asyncFn).resolves.toBe('hello') 66 | 67 | const asyncFnErr = queue.add(async () => { 68 | throw new Error('hullo') 69 | }) 70 | expect(asyncFnErr).toBeInstanceOf(Promise) 71 | expect(asyncFnErr).rejects.toThrow('hullo') 72 | }) 73 | 74 | test('size should return the number of promises in the queue', async () => { 75 | const queue = newQueue(2) 76 | expect(queue.size()).toBe(0) 77 | for (let i = 0; i < 10; i++) { 78 | queue.add(() => wait(10)) 79 | } 80 | expect(queue.size()).toBe(10) 81 | await wait(15) 82 | expect(queue.size()).toBe(8) 83 | await wait(90) 84 | expect(queue.size()).toBe(0) 85 | }) 86 | 87 | test('jobs should not repeat', async () => { 88 | const queue = newQueue(2) 89 | const results = [] as number[] 90 | for (let i = 0; i < 10; i++) { 91 | queue.add(async () => { 92 | await wait(i) 93 | results.push(i) 94 | }) 95 | } 96 | await queue.done() 97 | expect(results).toEqual([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]) 98 | }) 99 | 100 | test('active should return the number of active jobs', async () => { 101 | const queue = newQueue(4) 102 | expect(queue.active()).toBe(0) 103 | for (let i = 0; i < 10; i++) { 104 | queue.add(() => wait(10)) 105 | } 106 | expect(queue.active()).toBe(4) 107 | await wait(25) 108 | expect(queue.active()).toBe(2) 109 | await wait(10) 110 | expect(queue.active()).toBe(0) 111 | }) 112 | 113 | test('queue.done() should work properly and be reusable', async () => { 114 | const queue = newQueue(2) 115 | // works on empty queue 116 | await queue.done() 117 | expect(queue.size()).toBe(0) 118 | // works with simple operation 119 | queue.add(() => wait(10)) 120 | expect(queue.size()).toBe(1) 121 | await queue.done() 122 | expect(queue.size()).toBe(0) 123 | // works repeatedly with clear on a bunch of random timings 124 | for (let i = 0; i < 10; i++) { 125 | const jobs = 50 126 | const jobTime = Math.ceil(Math.random() * 5 + 1) 127 | const clearTime = Math.ceil(Math.random() * 25 + 5) 128 | for (let i = 0; i < jobs; i++) { 129 | queue.add(() => wait(jobTime)) 130 | } 131 | setTimeout(() => { 132 | expect(queue.size()).toBeGreaterThanOrEqual( 133 | jobs - 1 - Math.trunc((clearTime / jobTime) * 2) 134 | ) 135 | queue.clear() 136 | expect(queue.size()).toBe(2) 137 | }, clearTime) 138 | await queue.done() 139 | expect(queue.size()).toBe(0) 140 | } 141 | 142 | // works with multiple awaits at same time 143 | async function waitDone() { 144 | queue.add(() => wait(10)) 145 | await queue.done() 146 | } 147 | const jobs = [] as Promise[] 148 | for (let i = 0; i < 2; i++) { 149 | jobs.push(waitDone()) 150 | } 151 | await Promise.all(jobs) 152 | }) 153 | 154 | test('clear should clear the queue', async () => { 155 | const queue = newQueue(2) 156 | const runTimes = [] as number[] 157 | 158 | for (let i = 0; i < 2; i++) { 159 | let start = performance.now() 160 | for (let i = 0; i < 10; i++) { 161 | queue.add(() => wait(50)) 162 | } 163 | if (i === 1) { 164 | setTimeout(queue.clear, 110) 165 | } 166 | await queue.done() 167 | runTimes.push(performance.now() - start) 168 | } 169 | 170 | const [runOne, runTwo] = runTimes 171 | expect(runOne).toBeGreaterThan(240) 172 | expect(runTwo).toBeGreaterThan(140) 173 | expect(runTwo).toBeLessThan(160) 174 | }) 175 | }) 176 | 177 | describe('async-storage', () => { 178 | test('should propagate async execution context properly', async () => { 179 | const queue = contextQueue(2) 180 | const store = new AsyncLocalStorage() 181 | 182 | const checkId = async (id: number) => { 183 | await Promise.resolve() 184 | const storeValue = store.getStore() as { id: number } 185 | expect(id).toBe(storeValue.id) 186 | } 187 | const startContext = async (id: number) => store.run({ id }, () => queue.add(() => checkId(id))) 188 | await Promise.all(Array.from({ length: 50 }, (_, id) => startContext(id))) 189 | }) 190 | }) 191 | 192 | async function testConcurrency(createQueue: CreateQueue) { 193 | const queue = createQueue(2) 194 | const running: number[] = [] 195 | const results: number[] = [] 196 | 197 | const createPromise = (value: number, delay: number) => () => 198 | new Promise((resolve) => { 199 | running.push(value) 200 | setTimeout(() => { 201 | resolve(value) 202 | running.splice(running.indexOf(value), 1) 203 | }, delay) 204 | }) 205 | 206 | const p1 = queue.add(createPromise(1, 300)) 207 | const p2 = queue.add(createPromise(2, 175)) 208 | const p3 = queue.add(createPromise(3, 200)) 209 | const p4 = queue.add(createPromise(4, 200)) 210 | 211 | for (const promise of [p1, p2, p3, p4]) { 212 | promise.then((result) => results.push(result)) 213 | } 214 | 215 | // Concurrent checks to ensure only 2 promises are running at the same time 216 | setTimeout(() => { 217 | expect(running).toContain(1) 218 | expect(running).toContain(2) 219 | expect(running.length).toBe(2) 220 | }, 50) 221 | setTimeout(() => { 222 | expect(running).toContain(1) 223 | expect(running).toContain(3) 224 | expect(running.length).toBe(2) 225 | }, 250) 226 | setTimeout(() => { 227 | expect(running).toContain(3) 228 | expect(running).toContain(4) 229 | expect(running.length).toBe(2) 230 | }, 350) 231 | 232 | await queue.done() 233 | 234 | expect(results).toEqual([2, 1, 3, 4]) 235 | } 236 | 237 | async function testRejections(createQueue: CreateQueue) { 238 | const queue = createQueue(2) 239 | 240 | const createPromise = (value: number, delay: number, shouldReject: boolean) => () => 241 | new Promise((resolve, reject) => { 242 | setTimeout(() => { 243 | if (shouldReject) { 244 | reject(new Error(`Promise ${value} failed`)) 245 | } else { 246 | resolve(value) 247 | } 248 | }, delay) 249 | }) 250 | 251 | const p1 = queue.add(createPromise(1, 100, false)) 252 | const p2 = queue.add(createPromise(2, 200, true)) 253 | const p3 = queue.add(createPromise(3, 300, false)) 254 | const p4 = queue.add(createPromise(4, 50, true)) 255 | 256 | const results: number[] = [] 257 | const errors: string[] = [] 258 | 259 | for (const promise of [p1, p2, p3, p4]) { 260 | promise.then((result) => results.push(result)).catch((error) => errors.push(error.message)) 261 | } 262 | 263 | await Promise.allSettled([p1, p2, p3, p4]) 264 | 265 | expect(results).toEqual([1, 3]) 266 | expect(errors).toEqual(['Promise 2 failed', 'Promise 4 failed']) 267 | expect(queue.size()).toBe(0) 268 | } 269 | -------------------------------------------------------------------------------- /test/deno-test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from 'jsr:@std/expect' 2 | import { newQueue as devQueue } from '../index.ts' 3 | import { newQueue as distQueue } from '../dist/index.js' 4 | import { newQueue as devContextQueue } from '../index.async-storage.ts' 5 | import { newQueue as distContextQueue } from '../dist/index.async-storage.js' 6 | import { AsyncLocalStorage } from 'async_hooks' 7 | 8 | const test = Deno.test 9 | 10 | let newQueue: typeof devQueue 11 | let contextQueue: typeof devContextQueue 12 | 13 | if (Deno.env.get('DIST')) { 14 | console.log('using dist files') 15 | newQueue = distQueue 16 | contextQueue = distContextQueue 17 | } else { 18 | console.log('using dev files') 19 | newQueue = devQueue 20 | contextQueue = devContextQueue 21 | } 22 | 23 | const wait = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms)) 24 | 25 | // type CreateQueue = typeof newQueue | typeof slimQueue 26 | type CreateQueue = typeof newQueue 27 | 28 | test('should process 2 promises concurrently', () => testConcurrency(newQueue)) 29 | 30 | test('should handle promise rejections correctly', () => testRejections(newQueue)) 31 | 32 | test('greater concurrency should be faster', async () => { 33 | const loops = 10 34 | const waitTime = 50 35 | const runTimes = [] as number[] 36 | 37 | for (let i = 3; i < 5; i++) { 38 | const queue = newQueue(i) 39 | const start = performance.now() 40 | for (let i = 0; i < loops; i++) { 41 | queue.add(() => wait(waitTime)) 42 | } 43 | await queue.done() 44 | runTimes.push(performance.now() - start) 45 | } 46 | 47 | const [timeOne, timeTwo] = runTimes 48 | expect(timeTwo).toBeLessThan(timeOne) 49 | }) 50 | 51 | test('add method should return a promise', () => { 52 | const queue = newQueue(2) 53 | const promise = queue.add(() => new Promise((resolve) => resolve(1))) 54 | expect(promise).toBeInstanceOf(Promise) 55 | expect(promise).resolves.toBe(1) 56 | 57 | const promiseErr = queue.add(() => new Promise((_, reject) => reject(new Error()))) 58 | expect(promiseErr).toBeInstanceOf(Promise) 59 | expect(promiseErr).rejects.toThrow() 60 | 61 | const asyncFn = queue.add(async () => { 62 | return 'hello' 63 | }) 64 | expect(asyncFn).toBeInstanceOf(Promise) 65 | expect(asyncFn).resolves.toBe('hello') 66 | 67 | const asyncFnErr = queue.add(async () => { 68 | throw new Error('hullo') 69 | }) 70 | expect(asyncFnErr).toBeInstanceOf(Promise) 71 | expect(asyncFnErr).rejects.toThrow('hullo') 72 | }) 73 | 74 | test('size should return the number of promises in the queue', async () => { 75 | const queue = newQueue(2) 76 | expect(queue.size()).toBe(0) 77 | for (let i = 0; i < 10; i++) { 78 | queue.add(() => wait(10)) 79 | } 80 | expect(queue.size()).toBe(10) 81 | await wait(15) 82 | expect(queue.size()).toBe(8) 83 | await wait(90) 84 | expect(queue.size()).toBe(0) 85 | }) 86 | 87 | test('jobs should not repeat', async () => { 88 | const queue = newQueue(2) 89 | const results = [] as number[] 90 | for (let i = 0; i < 10; i++) { 91 | queue.add(async () => { 92 | await wait(i) 93 | results.push(i) 94 | }) 95 | } 96 | await queue.done() 97 | expect(results).toEqual([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]) 98 | }) 99 | 100 | test('active should return the number of active jobs', async () => { 101 | const queue = newQueue(4) 102 | expect(queue.active()).toBe(0) 103 | for (let i = 0; i < 10; i++) { 104 | queue.add(() => wait(10)) 105 | } 106 | expect(queue.active()).toBe(4) 107 | await wait(25) 108 | expect(queue.active()).toBe(2) 109 | await wait(10) 110 | expect(queue.active()).toBe(0) 111 | }) 112 | 113 | test('queue.done() should work properly and be reusable', async () => { 114 | const queue = newQueue(2) 115 | // works on empty queue 116 | await queue.done() 117 | expect(queue.size()).toBe(0) 118 | // works with simple operation 119 | queue.add(() => wait(10)) 120 | expect(queue.size()).toBe(1) 121 | await queue.done() 122 | expect(queue.size()).toBe(0) 123 | // works repeatedly with clear on a bunch of random timings 124 | for (let i = 0; i < 10; i++) { 125 | const jobs = 50 126 | const jobTime = Math.ceil(Math.random() * 5 + 1) 127 | const clearTime = Math.ceil(Math.random() * 25 + 5) 128 | for (let i = 0; i < jobs; i++) { 129 | queue.add(() => wait(jobTime)) 130 | } 131 | setTimeout(() => { 132 | expect(queue.size()).toBeGreaterThanOrEqual(jobs - 1 - Math.trunc((clearTime / jobTime) * 2)) 133 | queue.clear() 134 | expect(queue.size()).toBe(2) 135 | }, clearTime) 136 | await queue.done() 137 | expect(queue.size()).toBe(0) 138 | } 139 | 140 | // works with multiple awaits at same time 141 | async function waitDone() { 142 | queue.add(() => wait(10)) 143 | await queue.done() 144 | } 145 | const jobs = [] as Promise[] 146 | for (let i = 0; i < 2; i++) { 147 | jobs.push(waitDone()) 148 | } 149 | await Promise.all(jobs) 150 | }) 151 | 152 | test('clear should clear the queue', async () => { 153 | const queue = newQueue(2) 154 | const runTimes = [] as number[] 155 | 156 | for (let i = 0; i < 2; i++) { 157 | let start = performance.now() 158 | for (let i = 0; i < 10; i++) { 159 | queue.add(() => wait(50)) 160 | } 161 | if (i === 1) { 162 | setTimeout(queue.clear, 110) 163 | } 164 | await queue.done() 165 | runTimes.push(performance.now() - start) 166 | } 167 | 168 | const [runOne, runTwo] = runTimes 169 | expect(runOne).toBeGreaterThan(240) 170 | expect(runTwo).toBeGreaterThan(140) 171 | expect(runTwo).toBeLessThan(160) 172 | }) 173 | 174 | test('should propagate async execution context properly', async () => { 175 | const queue = contextQueue(2) 176 | const store = new AsyncLocalStorage() 177 | 178 | const checkId = async (id: number) => { 179 | await Promise.resolve() 180 | const storeValue = store.getStore() as { id: number } 181 | expect(id).toBe(storeValue.id) 182 | } 183 | const startContext = async (id: number) => store.run({ id }, () => queue.add(() => checkId(id))) 184 | await Promise.all(Array.from({ length: 50 }, (_, id) => startContext(id))) 185 | }) 186 | 187 | async function testConcurrency(createQueue: CreateQueue) { 188 | const queue = createQueue(2) 189 | const running: number[] = [] 190 | const results: number[] = [] 191 | 192 | const createPromise = (value: number, delay: number) => () => 193 | new Promise((resolve) => { 194 | running.push(value) 195 | setTimeout(() => { 196 | resolve(value) 197 | running.splice(running.indexOf(value), 1) 198 | }, delay) 199 | }) 200 | 201 | const p1 = queue.add(createPromise(1, 300)) 202 | const p2 = queue.add(createPromise(2, 175)) 203 | const p3 = queue.add(createPromise(3, 200)) 204 | const p4 = queue.add(createPromise(4, 200)) 205 | 206 | for (const promise of [p1, p2, p3, p4]) { 207 | promise.then((result) => results.push(result)) 208 | } 209 | 210 | // Concurrent checks to ensure only 2 promises are running at the same time 211 | setTimeout(() => { 212 | expect(running).toContain(1) 213 | expect(running).toContain(2) 214 | expect(running.length).toBe(2) 215 | }, 50) 216 | setTimeout(() => { 217 | expect(running).toContain(1) 218 | expect(running).toContain(3) 219 | expect(running.length).toBe(2) 220 | }, 250) 221 | setTimeout(() => { 222 | expect(running).toContain(3) 223 | expect(running).toContain(4) 224 | expect(running.length).toBe(2) 225 | }, 350) 226 | 227 | await queue.done() 228 | 229 | expect(results).toEqual([2, 1, 3, 4]) 230 | } 231 | 232 | async function testRejections(createQueue: CreateQueue) { 233 | const queue = createQueue(2) 234 | 235 | const createPromise = (value: number, delay: number, shouldReject: boolean) => () => 236 | new Promise((resolve, reject) => { 237 | setTimeout(() => { 238 | if (shouldReject) { 239 | reject(new Error(`Promise ${value} failed`)) 240 | } else { 241 | resolve(value) 242 | } 243 | }, delay) 244 | }) 245 | 246 | const p1 = queue.add(createPromise(1, 100, false)) 247 | const p2 = queue.add(createPromise(2, 200, true)) 248 | const p3 = queue.add(createPromise(3, 300, false)) 249 | const p4 = queue.add(createPromise(4, 50, true)) 250 | 251 | const results: number[] = [] 252 | const errors: string[] = [] 253 | 254 | for (const promise of [p1, p2, p3, p4]) { 255 | promise.then((result) => results.push(result)).catch((error) => errors.push(error.message)) 256 | } 257 | 258 | await Promise.allSettled([p1, p2, p3, p4]) 259 | 260 | expect(results).toEqual([1, 3]) 261 | expect(errors).toEqual(['Promise 2 failed', 'Promise 4 failed']) 262 | } 263 | --------------------------------------------------------------------------------