├── .github ├── funding.yml └── workflows │ ├── release.yml │ └── test-ts.yml ├── .gitignore ├── .prettierrc.json ├── LICENSE ├── README.md ├── index.js ├── index.ts ├── jsr.json ├── package-lock.json ├── package.json ├── test ├── basic.test.ts ├── build │ ├── input.js │ ├── package-lock.json │ └── package.json └── index.ts └── tsconfig.json /.github/funding.yml: -------------------------------------------------------------------------------- 1 | github: paulmillr 2 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Publish release 2 | on: 3 | release: 4 | types: [created] 5 | jobs: 6 | release-js: 7 | name: 'jsbt v0.4.1' 8 | uses: paulmillr/jsbt/.github/workflows/release.yml@2318b9efe24831b4bd4cadf720c96f071c69c64d 9 | with: 10 | build-path: test/build 11 | # slow-types: false 12 | secrets: 13 | NPM_PUBLISH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }} 14 | permissions: 15 | contents: write 16 | id-token: write 17 | attestations: write 18 | -------------------------------------------------------------------------------- /.github/workflows/test-ts.yml: -------------------------------------------------------------------------------- 1 | name: Run TS tests 2 | on: 3 | - push 4 | - pull_request 5 | jobs: 6 | test-js: 7 | name: 'jsbt v0.4.1' 8 | uses: paulmillr/jsbt/.github/workflows/test-ts.yml@2318b9efe24831b4bd4cadf720c96f071c69c64d 9 | # with: 10 | # submodules: false 11 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | /lib 3 | *.d.ts 4 | *.d.ts.map 5 | *.js.map 6 | /test/build 7 | -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "printWidth": 100, 3 | "singleQuote": true, 4 | "trailingComma": "es5" 5 | } 6 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2020 Paul Miller (https://paulmillr.com) 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the “Software”), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # micro-ftch 2 | 3 | Wrappers for [built-in fetch()](https://developer.mozilla.org/en-US/docs/Web/API/fetch) enabling killswitch, logging, concurrency limit and other features. 4 | 5 | fetch is great, however, its usage in secure environments is complicated. The library makes it simple. 6 | 7 | ## Usage 8 | 9 | A standalone file 10 | [micro-ftch.js](https://github.com/paulmillr/micro-ftch/releases) is also available. 11 | 12 | > `npm install micro-ftch` 13 | 14 | > `jsr add jsr:@paulmillr/micro-ftch` 15 | 16 | ```ts 17 | import { ftch, jsonrpc, replayable } from 'micro-ftch'; 18 | 19 | let enabled = false; 20 | const net = ftch(fetch, { 21 | isValidRequest: () => enabled, 22 | log: (url, options) => console.log(url, options), 23 | timeout: 5000, 24 | concurrencyLimit: 10, 25 | }); 26 | const res = await net('https://example.com'); 27 | 28 | // Composable 29 | const rpc = jsonrpc(net, 'http://rpc_node/', { 30 | headers: {}, 31 | batchSize: 20, 32 | }); 33 | const res1 = await rpc.call('method', 'arg0', 'arg1'); 34 | const res2 = await rpc.callNamed('method', { arg0: '0', arg1: '1' }); // named arguments 35 | const testRpc = replayable(rpc); 36 | // Basic auth auto-parsing 37 | await net('https://user:pwd@httpbin.org/basic-auth/user/pwd'); 38 | ``` 39 | 40 | - [ftch](#ftch) 41 | - [isValidRequest](#isValidRequest) 42 | - [log](#log) 43 | - [timeout](#timeout) 44 | - [concurrencyLimit](#concurrencyLimit) 45 | - [Basic auth](#basic-auth) 46 | - [jsonrpc](#jsonrpc) 47 | - [replayable](#replayable) 48 | - [Privacy](#privacy) 49 | - [License](#license) 50 | 51 | There are three wrappers over `fetch()`: 52 | 53 | 1. `ftch(fetch)` - isValidRequest, logging, timeouts, concurrency limits, basic auth 54 | 2. `jsonrpc(fetch)` - batched JSON-RPC functionality 55 | 3. `replayable(fetch)` - log & replay network requests without actually calling network code. 56 | 57 | ## ftch 58 | 59 | Basic wrapper over `fetch()`. 60 | 61 | ### isValidRequest 62 | 63 | When isValidRequest killswitch is enabled, all requests will throw an error. 64 | You can dynamically enable and disable it any any time. 65 | 66 | ```ts 67 | let ENABLED = true; 68 | const f = ftch(fetch, { isValidRequest: () => ENABLED }); 69 | f('http://localhost'); // ok 70 | ENABLED = false; 71 | f('http://localhost'); // throws 72 | ENABLED = true; 73 | f('http://localhost'); // ok 74 | ``` 75 | 76 | ### log 77 | 78 | ```ts 79 | const f = ftch(fetch, { log: (url, opts) => console.log('fetching', url, opts) }); 80 | f('http://url/'); // will print request information 81 | ``` 82 | 83 | ### timeout 84 | 85 | ```ts 86 | // browser and OS may have additional timeouts, we cannot override them 87 | // a: per-request timeout 88 | const f = ftch(fetch); 89 | const res = await f('http://url/', { timeout: 1000 }); // throws if request takes more than one second 90 | 91 | // b: timeout for all 92 | const f = ftch(fetch, { timeout: 1000 }); 93 | const res = await f('http://url/'); // throws if request takes more than one second 94 | ``` 95 | 96 | ### concurrencyLimit 97 | 98 | Allows to not accidentally hit rate limits or do DoS. 99 | 100 | ```ts 101 | // browser and OS may have additional limits, we cannot override them 102 | const f = ftch(fetch, { concurrencyLimit: 1 }); 103 | const res = await Promise.all([f('http://url1/'), f('http://url2/')]); // these would be processed sequentially 104 | ``` 105 | 106 | ### Basic auth 107 | 108 | ```ts 109 | const f = ftch(fetch); 110 | const res = await f('https://user:pwd@httpbin.org/basic-auth/user/pwd'); // supports basic auth! 111 | ``` 112 | 113 | ### jsonrpc 114 | 115 | Supports batching multiple HTTP requests into one "Batched" JSON RPC HTTP request. Can massively speed-up when servers are single-threaded, has small per-user limits 116 | 117 | ```ts 118 | const rpc = jsonrpc(fetch, 'http://rpc_node/', { 119 | headers: {}, 120 | batchSize: 20, 121 | }); 122 | const res = await rpc.call('method', 'arg0', 'arg1'); 123 | const res2 = await rpc.callNamed('method', { arg0: '0', arg1: '1' }); // named arguments 124 | ``` 125 | 126 | ### replayable 127 | 128 | Small utility to log & replay network requests in tests, without actually calling network code. 129 | 130 | ```ts 131 | const ftch = ftch(fetch); 132 | const replayCapture = replayable(ftch); // wraps fetch 133 | await replayCapture('http://url/1'); // real network 134 | await replayCapture('http://url/2'); 135 | const logs = replayCapture.export(); // Exports logs 136 | 137 | // When logs provided - use cached version (faster) 138 | const replayTest = replayable(ftch, JSON.parse(logs)); 139 | await replayTest('http://url/1'); // cached 140 | await replayTest('http://url/2'); // cached 141 | await replayTest('http://url/3'); // real network 142 | 143 | // When done and everything is captured, turn on 'offline' mode to throw on network requests: 144 | const replayTestOffline = replayable(ftch, JSON.parse(logs), { 145 | offline: true, 146 | }); 147 | await replayTest('http://url/1'); // cached 148 | await replayTest('http://url/2'); // cached 149 | await replayTest('http://url/3'); // throws! 150 | ``` 151 | 152 | ## Privacy 153 | 154 | ftch() disables referrer by default by setting `referrerPolicy: 'no-referrer'`. 155 | 156 | ## License 157 | 158 | MIT (c) Paul Miller [(https://paulmillr.com)](https://paulmillr.com), see LICENSE file. 159 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Wrappers for [built-in fetch()](https://developer.mozilla.org/en-US/docs/Web/API/fetch) enabling 3 | * killswitch, logging, concurrency limit and other features. fetch is great, however, its usage in secure environments is complicated. The library makes it simple. 4 | * @module 5 | * @example 6 | ```js 7 | import { ftch, jsonrpc, replayable } from 'micro-ftch'; 8 | 9 | let enabled = false; 10 | const net = ftch(fetch, { 11 | isValidRequest: () => enabled, 12 | log: (url, options) => console.log(url, options), 13 | timeout: 5000, 14 | concurrencyLimit: 10, 15 | }); 16 | const res = await net('https://example.com'); 17 | 18 | // Composable 19 | const rpc = jsonrpc(net, 'http://rpc_node/', { 20 | headers: {}, 21 | batchSize: 20, 22 | }); 23 | const res1 = await rpc.call('method', 'arg0', 'arg1'); 24 | const res2 = await rpc.callNamed('method', { arg0: '0', arg1: '1' }); // named arguments 25 | const testRpc = replayable(rpc); 26 | // Basic auth auto-parsing 27 | await net('https://user:pwd@httpbin.org/basic-auth/user/pwd'); 28 | ``` 29 | */ 30 | // Utils 31 | // Awaiting for promise is equal to node nextTick 32 | const nextTick = async () => { }; 33 | // Small internal primitive to limit concurrency 34 | function limit(concurrencyLimit) { 35 | let currentlyProcessing = 0; 36 | const queue = []; 37 | const next = () => { 38 | if (!queue.length) 39 | return; 40 | if (currentlyProcessing >= concurrencyLimit) 41 | return; 42 | currentlyProcessing++; 43 | const first = queue.shift(); 44 | if (!first) 45 | throw new Error('empty queue'); // should not happen 46 | first(); 47 | }; 48 | return (fn) => new Promise((resolve, reject) => { 49 | queue.push(() => Promise.resolve() 50 | .then(fn) 51 | .then(resolve) 52 | .catch(reject) 53 | .finally(() => { 54 | currentlyProcessing--; 55 | next(); 56 | })); 57 | next(); 58 | }); 59 | } 60 | // NOTE: we don't expose actual request to make sure there is no way to trigger actual network code 61 | // from wrapped function 62 | const getRequestInfo = (req) => ({ 63 | headers: req.headers, 64 | ok: req.ok, 65 | redirected: req.redirected, 66 | status: req.status, 67 | statusText: req.statusText, 68 | type: req.type, 69 | url: req.url, 70 | }); 71 | /** 72 | * Small wrapper over fetch function 73 | * 74 | * @param fn - The fetch function to be wrapped. 75 | * @param opts - Options to control the behavior of the fetch wrapper. 76 | * @param [opts.isValidRequest] - Function to determine if the fetch request should be cancelled. 77 | * @param [opts.concurrencyLimit] - Limit on the number of concurrent fetch requests. 78 | * @param [opts.timeout] - Default timeout for all requests, can be overriden in request opts 79 | * @param [opts.log] - Callback to log all requests 80 | * @returns Wrapped fetch function 81 | * @example 82 | * ```js 83 | * let ENABLED = true; 84 | * const f = ftch(fetch, { isValidRequest: () => ENABLED }); 85 | * f('http://localhost'); // ok 86 | * ENABLED = false; 87 | * f('http://localhost'); // throws 88 | * ``` 89 | * @example 90 | * ```js 91 | * const f = ftch(fetch, { concurrencyLimit: 1 }); 92 | * const res = await Promise.all([f('http://url1/'), f('http://url2/')]); // these would be processed sequentially 93 | * ``` 94 | * @example 95 | * ```js 96 | * const f = ftch(fetch); 97 | * const res = await f('http://url/', { timeout: 1000 }); // throws if request takes more than one second 98 | * ``` 99 | * @example 100 | * ```js 101 | * const f = ftch(fetch, { timeout: 1000 }); // default timeout for all requests 102 | * const res = await f('http://url/'); // throws if request takes more than one second 103 | * ``` 104 | * @example 105 | * ```js 106 | * const f = ftch(fetch); 107 | * const res = await f('https://user:pwd@httpbin.org/basic-auth/user/pwd'); // basic auth 108 | * ``` 109 | * @example 110 | * ```js 111 | * const f = ftch(fetch, { log: (url, opts)=>console.log('NET', url, opts) }) 112 | * f('http://url/'); // will print request information 113 | * ``` 114 | */ 115 | export function ftch(fetchFunction, opts = {}) { 116 | const ks = opts.isValidRequest || opts.killswitch; 117 | if (ks && typeof ks !== 'function') 118 | throw new Error('opts.isValidRequest must be a function'); 119 | const noNetwork = (url) => ks && !ks(url); 120 | const wrappedFetch = async (url, reqOpts = {}) => { 121 | const abort = new AbortController(); 122 | let timeout = undefined; 123 | if (opts.timeout !== undefined || reqOpts.timeout !== undefined) { 124 | const ms = reqOpts.timeout !== undefined ? reqOpts.timeout : opts.timeout; 125 | timeout = setTimeout(() => abort.abort(), ms); 126 | } 127 | const headers = new Headers(); // We cannot re-use object from user since we may modify it 128 | const parsed = new URL(url); 129 | if (parsed.username) { 130 | const auth = btoa(`${parsed.username}:${parsed.password}`); 131 | headers.set('Authorization', `Basic ${auth}`); 132 | parsed.username = ''; 133 | parsed.password = ''; 134 | url = '' + parsed; 135 | } 136 | if (reqOpts.headers) { 137 | const h = reqOpts.headers instanceof Headers ? reqOpts.headers : new Headers(reqOpts.headers); 138 | h.forEach((v, k) => headers.set(k, v)); 139 | } 140 | if (noNetwork(url)) 141 | throw new Error('network disabled'); 142 | if (opts.log) 143 | opts.log(url, reqOpts); 144 | const res = await fetchFunction(url, { 145 | referrerPolicy: 'no-referrer', // avoid sending referrer by default 146 | ...reqOpts, 147 | headers, 148 | signal: abort.signal, 149 | }); 150 | if (noNetwork(url)) { 151 | abort.abort('network disabled'); 152 | throw new Error('network disabled'); 153 | } 154 | const body = new Uint8Array(await res.arrayBuffer()); 155 | if (timeout !== undefined) 156 | clearTimeout(timeout); 157 | return { 158 | ...getRequestInfo(res), 159 | // NOTE: this disables streaming parser and fetches whole body on request (instead of headers only as done in fetch) 160 | // But this allows to intercept and disable request if killswitch enabled. Also required for concurrency limit, 161 | // since actual request is not finished 162 | json: async () => JSON.parse(new TextDecoder().decode(body)), 163 | text: async () => new TextDecoder().decode(body), 164 | arrayBuffer: async () => body.buffer, 165 | }; 166 | }; 167 | if (opts.concurrencyLimit !== undefined) { 168 | const curLimit = limit(opts.concurrencyLimit); 169 | return (url, reqOpts) => curLimit(() => wrappedFetch(url, reqOpts)); 170 | } 171 | return wrappedFetch; 172 | } 173 | export class RpcError extends Error { 174 | code; 175 | constructor(error) { 176 | super(`FetchProvider(${error.code}): ${error.message || error}`); 177 | this.code = error.code; 178 | this.name = 'RpcError'; 179 | } 180 | } 181 | /** 182 | * Small utility class for Jsonrpc 183 | * @param fetchFunction - The fetch function 184 | * @param url - The RPC server url 185 | * @param opts - Options to control the behavior of RPC client 186 | * @param [opts.headers] - additional headers to send with requests 187 | * @param [opts.batchSize] - batch parallel requests up to this value into single request 188 | * @example 189 | * ```js 190 | * const rpc = new JsonrpcProvider(fetch, 'http://rpc_node/', { headers: {}, batchSize: 20 }); 191 | * const res = await rpc.call('method', 'arg0', 'arg1'); 192 | * const res2 = await rpc.callNamed('method', {arg0: '0', arg1: '1'}); // named arguments 193 | * ``` 194 | */ 195 | export class JsonrpcProvider { 196 | batchSize; 197 | headers; 198 | queue = []; 199 | fetchFunction; 200 | rpcUrl; 201 | constructor(fetchFunction, rpcUrl, options = {}) { 202 | if (typeof fetchFunction !== 'function') 203 | throw new Error('fetchFunction is required'); 204 | if (typeof rpcUrl !== 'string') 205 | throw new Error('rpcUrl is required'); 206 | this.fetchFunction = fetchFunction; 207 | this.rpcUrl = rpcUrl; 208 | this.batchSize = options.batchSize === undefined ? 1 : options.batchSize; 209 | this.headers = options.headers || {}; 210 | if (typeof this.headers !== 'object') 211 | throw new Error('invalid headers: expected object'); 212 | } 213 | async fetchJson(body) { 214 | const res = await this.fetchFunction(this.rpcUrl, { 215 | method: 'POST', 216 | headers: { 'Content-Type': 'application/json', ...this.headers }, 217 | body: JSON.stringify(body), 218 | }); 219 | return await res.json(); 220 | } 221 | jsonError(error) { 222 | return new RpcError(error); 223 | } 224 | async batchProcess() { 225 | await nextTick(); // this allows to collect as much requests as we can in single tick 226 | const curr = this.queue.splice(0, this.batchSize); 227 | if (!curr.length) 228 | return; 229 | const json = await this.fetchJson(curr.map((i, j) => ({ 230 | jsonrpc: '2.0', 231 | id: j, 232 | method: i.method, 233 | params: i.params, 234 | }))); 235 | if (!Array.isArray(json)) { 236 | const hasMsg = json.code && json.message; 237 | curr.forEach((req, index) => { 238 | const err = hasMsg 239 | ? this.jsonError(json) 240 | : new Error('invalid response in batch request ' + index); 241 | req.reject(err); 242 | }); 243 | return; 244 | } 245 | const processed = new Set(); 246 | for (const res of json) { 247 | // Server sent broken ids. We cannot throw error here, since we will have unresolved promises 248 | // Also, this will break app state. 249 | if (!Number.isSafeInteger(res.id) || res.id < 0 || res.id >= curr.length) 250 | continue; 251 | if (processed.has(res.id)) 252 | continue; // multiple responses for same id 253 | const { reject, resolve } = curr[res.id]; 254 | processed.add(res.id); 255 | if (res && res.error) 256 | reject(this.jsonError(res.error)); 257 | else 258 | resolve(res.result); 259 | } 260 | for (let i = 0; i < curr.length; i++) { 261 | if (!processed.has(i)) 262 | curr[i].reject(new Error(`response missing in batch request ` + i)); 263 | } 264 | } 265 | rpcBatch(method, params) { 266 | return new Promise((resolve, reject) => { 267 | this.queue.push({ method, params, resolve, reject }); 268 | this.batchProcess(); // this processed in parallel 269 | }); 270 | } 271 | async rpc(method, params) { 272 | if (typeof method !== 'string') 273 | throw new Error('rpc method name must be a string'); 274 | if (this.batchSize > 1) 275 | return this.rpcBatch(method, params); 276 | const json = await this.fetchJson({ 277 | jsonrpc: '2.0', 278 | id: 0, 279 | method, 280 | params, 281 | }); 282 | if (json && json.error) 283 | throw this.jsonError(json.error); 284 | return json.result; 285 | } 286 | call(method, ...args) { 287 | return this.rpc(method, args); 288 | } 289 | callNamed(method, params) { 290 | return this.rpc(method, params); 291 | } 292 | } 293 | /** 294 | * Batched JSON-RPC functionality. 295 | * @example 296 | ```js 297 | const rpc = jsonrpc(fetch, 'http://rpc_node/', { 298 | headers: {}, 299 | batchSize: 20, 300 | }); 301 | const res = await rpc.call('method', 'arg0', 'arg1'); 302 | const res2 = await rpc.callNamed('method', { arg0: '0', arg1: '1' }); // named arguments 303 | ``` 304 | */ 305 | export function jsonrpc(fetchFunction, rpcUrl, options = {}) { 306 | return new JsonrpcProvider(fetchFunction, rpcUrl, options); 307 | } 308 | const defaultGetKey = (url, opt) => JSON.stringify({ url, opt }); 309 | function normalizeHeader(header) { 310 | return header 311 | .split('-') 312 | .map((i) => i.charAt(0).toUpperCase() + i.slice(1).toLowerCase()) 313 | .join('-'); 314 | } 315 | const getKey = (url, opts, fn = defaultGetKey) => { 316 | let headers = opts.headers || {}; 317 | if (headers instanceof Headers) { 318 | const tmp = {}; 319 | // Headers is lowercase 320 | headers.forEach((v, k) => { 321 | tmp[normalizeHeader(k)] = v; 322 | }); 323 | headers = tmp; 324 | } 325 | return fn(url, { method: opts.method, headers, body: opts.body }); 326 | }; 327 | /** 328 | * Log & replay network requests without actually calling network code. 329 | * @param fetchFunction 330 | * @param logs - captured logs (JSON.parse(fetchReplay(...).export())) 331 | * @param opts 332 | * @param [opts.offline] - Offline mode, throws on non-captured requests 333 | * @param [opts.getKey] - Optional function to modify key information for capture/replay of requests 334 | * @example 335 | * ```js 336 | * // Capture logs 337 | * const ftch = ftch(fetch); 338 | * const replayCapture = replayable(ftch); // wraps fetch 339 | * await replayCapture('http://url/1'); 340 | * const logs = replayCapture.export(); // Exports logs 341 | * ``` 342 | * @example 343 | * ```js 344 | * // Replay logs 345 | * const replayTest = replayable(ftch, JSON.parse(logs)); 346 | * await replayTest('http://url/1'); // cached 347 | * await replayTest('http://url/2'); // real network 348 | * ``` 349 | * @example 350 | * ```js 351 | * // Offline mode 352 | * const replayTestOffline = replayable(ftch, JSON.parse(logs), { offline: true }); 353 | * await replayTest('http://url/1'); // cached 354 | * await replayTest('http://url/2'); // throws! 355 | * ``` 356 | * @example 357 | * ```js 358 | * // Custom log key function 359 | * const getKey = (url, opt) => JSON.stringify({ url: 'https://NODE_URL/', opt }); // use same url for any request 360 | * const replayCapture = replayable(ftch, {}, { getKey }); 361 | * ``` 362 | */ 363 | export function replayable(fetchFunction, logs = {}, opts = {}) { 364 | const accessed = new Set(); 365 | const wrapped = async (url, reqOpts) => { 366 | const key = getKey(url, reqOpts, opts.getKey); 367 | accessed.add(key); 368 | if (!logs[key]) { 369 | if (opts.offline) 370 | throw new Error(`fetchReplay: unknown request=${key}`); 371 | const req = await fetchFunction(url, reqOpts); 372 | // TODO: save this too? 373 | const info = getRequestInfo(req); 374 | return { 375 | ...info, 376 | json: async () => { 377 | const json = await req.json(); 378 | logs[key] = JSON.stringify(json); 379 | return json; 380 | }, 381 | text: async () => (logs[key] = await req.text()), 382 | arrayBuffer: async () => { 383 | const buffer = await req.arrayBuffer(); 384 | logs[key] = new TextDecoder().decode(new Uint8Array(buffer)); 385 | return buffer; 386 | }, 387 | }; 388 | } 389 | return { 390 | // Some default values (we don't store this info for now) 391 | headers: new Headers(), 392 | ok: true, 393 | redirected: false, 394 | status: 200, 395 | statusText: 'OK', 396 | type: 'basic', 397 | url: url, 398 | text: async () => logs[key], 399 | json: async () => JSON.parse(logs[key]), 400 | arrayBuffer: async () => new TextEncoder().encode(logs[key]).buffer, 401 | }; 402 | }; 403 | wrapped.logs = logs; 404 | wrapped.accessed = accessed; 405 | wrapped.export = () => JSON.stringify(Object.fromEntries(Object.entries(logs).filter(([k, _]) => accessed.has(k)))); 406 | return wrapped; 407 | } 408 | /** Internal methods for test purposes only. */ 409 | export const _TEST = { 410 | limit, 411 | }; 412 | //# sourceMappingURL=index.js.map -------------------------------------------------------------------------------- /index.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Wrappers for [built-in fetch()](https://developer.mozilla.org/en-US/docs/Web/API/fetch) enabling 3 | * killswitch, logging, concurrency limit and other features. fetch is great, however, its usage in secure environments is complicated. The library makes it simple. 4 | * @module 5 | * @example 6 | ```js 7 | import { ftch, jsonrpc, replayable } from 'micro-ftch'; 8 | 9 | let enabled = false; 10 | const net = ftch(fetch, { 11 | isValidRequest: () => enabled, 12 | log: (url, options) => console.log(url, options), 13 | timeout: 5000, 14 | concurrencyLimit: 10, 15 | }); 16 | const res = await net('https://example.com'); 17 | 18 | // Composable 19 | const rpc = jsonrpc(net, 'http://rpc_node/', { 20 | headers: {}, 21 | batchSize: 20, 22 | }); 23 | const res1 = await rpc.call('method', 'arg0', 'arg1'); 24 | const res2 = await rpc.callNamed('method', { arg0: '0', arg1: '1' }); // named arguments 25 | const testRpc = replayable(rpc); 26 | // Basic auth auto-parsing 27 | await net('https://user:pwd@httpbin.org/basic-auth/user/pwd'); 28 | ``` 29 | */ 30 | 31 | // Utils 32 | // Awaiting for promise is equal to node nextTick 33 | const nextTick = async () => {}; 34 | // Small internal primitive to limit concurrency 35 | function limit(concurrencyLimit: number): (fn: () => Promise) => Promise { 36 | let currentlyProcessing = 0; 37 | const queue: ((value?: unknown) => void)[] = []; 38 | const next = () => { 39 | if (!queue.length) return; 40 | if (currentlyProcessing >= concurrencyLimit) return; 41 | currentlyProcessing++; 42 | const first = queue.shift(); 43 | if (!first) throw new Error('empty queue'); // should not happen 44 | first(); 45 | }; 46 | return (fn: () => Promise): Promise => 47 | new Promise((resolve, reject) => { 48 | queue.push(() => 49 | Promise.resolve() 50 | .then(fn) 51 | .then(resolve) 52 | .catch(reject) 53 | .finally(() => { 54 | currentlyProcessing--; 55 | next(); 56 | }) 57 | ); 58 | next(); 59 | }); 60 | } 61 | 62 | /** Arguments for built-in fetch, with added timeout method. */ 63 | export type FetchOpts = RequestInit & { timeout?: number }; 64 | 65 | /** Built-in fetch, or function conforming to its interface. */ 66 | export type FetchFn = ( 67 | url: string, 68 | opts?: FetchOpts 69 | ) => Promise<{ 70 | headers: Headers; 71 | ok: boolean; 72 | redirected: boolean; 73 | status: number; 74 | statusText: string; 75 | type: ResponseType; 76 | url: string; 77 | json: () => Promise; 78 | text: () => Promise; 79 | arrayBuffer: () => Promise; 80 | }>; 81 | 82 | /** Options for `ftch`. isValidRequest can disable fetching, while log will receive all requests. */ 83 | export type FtchOpts = { 84 | isValidRequest?: (url?: string) => boolean; 85 | killswitch?: (url?: string) => boolean; 86 | concurrencyLimit?: number; 87 | timeout?: number; 88 | log?: (url: string, opts: FetchOpts) => void; 89 | }; 90 | 91 | type UnPromise = T extends Promise ? U : T; 92 | // NOTE: we don't expose actual request to make sure there is no way to trigger actual network code 93 | // from wrapped function 94 | const getRequestInfo = (req: UnPromise>) => ({ 95 | headers: req.headers, 96 | ok: req.ok, 97 | redirected: req.redirected, 98 | status: req.status, 99 | statusText: req.statusText, 100 | type: req.type, 101 | url: req.url, 102 | }); 103 | 104 | /** 105 | * Small wrapper over fetch function 106 | * 107 | * @param fn - The fetch function to be wrapped. 108 | * @param opts - Options to control the behavior of the fetch wrapper. 109 | * @param [opts.isValidRequest] - Function to determine if the fetch request should be cancelled. 110 | * @param [opts.concurrencyLimit] - Limit on the number of concurrent fetch requests. 111 | * @param [opts.timeout] - Default timeout for all requests, can be overriden in request opts 112 | * @param [opts.log] - Callback to log all requests 113 | * @returns Wrapped fetch function 114 | * @example 115 | * ```js 116 | * let ENABLED = true; 117 | * const f = ftch(fetch, { isValidRequest: () => ENABLED }); 118 | * f('http://localhost'); // ok 119 | * ENABLED = false; 120 | * f('http://localhost'); // throws 121 | * ``` 122 | * @example 123 | * ```js 124 | * const f = ftch(fetch, { concurrencyLimit: 1 }); 125 | * const res = await Promise.all([f('http://url1/'), f('http://url2/')]); // these would be processed sequentially 126 | * ``` 127 | * @example 128 | * ```js 129 | * const f = ftch(fetch); 130 | * const res = await f('http://url/', { timeout: 1000 }); // throws if request takes more than one second 131 | * ``` 132 | * @example 133 | * ```js 134 | * const f = ftch(fetch, { timeout: 1000 }); // default timeout for all requests 135 | * const res = await f('http://url/'); // throws if request takes more than one second 136 | * ``` 137 | * @example 138 | * ```js 139 | * const f = ftch(fetch); 140 | * const res = await f('https://user:pwd@httpbin.org/basic-auth/user/pwd'); // basic auth 141 | * ``` 142 | * @example 143 | * ```js 144 | * const f = ftch(fetch, { log: (url, opts)=>console.log('NET', url, opts) }) 145 | * f('http://url/'); // will print request information 146 | * ``` 147 | */ 148 | export function ftch(fetchFunction: FetchFn, opts: FtchOpts = {}): FetchFn { 149 | const ks = opts.isValidRequest || opts.killswitch; 150 | if (ks && typeof ks !== 'function') throw new Error('opts.isValidRequest must be a function'); 151 | const noNetwork = (url: string) => ks && !ks(url); 152 | const wrappedFetch: FetchFn = async (url, reqOpts = {}) => { 153 | const abort = new AbortController(); 154 | let timeout = undefined; 155 | if (opts.timeout !== undefined || reqOpts.timeout !== undefined) { 156 | const ms = reqOpts.timeout !== undefined ? reqOpts.timeout : opts.timeout; 157 | timeout = setTimeout(() => abort.abort(), ms); 158 | } 159 | const headers = new Headers(); // We cannot re-use object from user since we may modify it 160 | const parsed = new URL(url); 161 | if (parsed.username) { 162 | const auth = btoa(`${parsed.username}:${parsed.password}`); 163 | headers.set('Authorization', `Basic ${auth}`); 164 | parsed.username = ''; 165 | parsed.password = ''; 166 | url = '' + parsed; 167 | } 168 | if (reqOpts.headers) { 169 | const h = reqOpts.headers instanceof Headers ? reqOpts.headers : new Headers(reqOpts.headers); 170 | h.forEach((v, k) => headers.set(k, v)); 171 | } 172 | if (noNetwork(url)) throw new Error('network disabled'); 173 | if (opts.log) opts.log(url, reqOpts); 174 | const res = await fetchFunction(url, { 175 | referrerPolicy: 'no-referrer', // avoid sending referrer by default 176 | ...reqOpts, 177 | headers, 178 | signal: abort.signal, 179 | }); 180 | if (noNetwork(url)) { 181 | abort.abort('network disabled'); 182 | throw new Error('network disabled'); 183 | } 184 | const body = new Uint8Array(await res.arrayBuffer()); 185 | if (timeout !== undefined) clearTimeout(timeout); 186 | return { 187 | ...getRequestInfo(res), 188 | // NOTE: this disables streaming parser and fetches whole body on request (instead of headers only as done in fetch) 189 | // But this allows to intercept and disable request if killswitch enabled. Also required for concurrency limit, 190 | // since actual request is not finished 191 | json: async () => JSON.parse(new TextDecoder().decode(body)), 192 | text: async () => new TextDecoder().decode(body), 193 | arrayBuffer: async () => body.buffer, 194 | }; 195 | }; 196 | if (opts.concurrencyLimit !== undefined) { 197 | const curLimit = limit(opts.concurrencyLimit!); 198 | return (url, reqOpts) => curLimit(() => wrappedFetch(url, reqOpts)); 199 | } 200 | return wrappedFetch; 201 | } 202 | 203 | // Jsonrpc 204 | type PromiseCb = { 205 | resolve: (value: T | PromiseLike) => void; 206 | reject: (reason?: any) => void; 207 | }; 208 | 209 | export type JsonrpcInterface = { 210 | call: (method: string, ...args: any[]) => Promise; 211 | callNamed: (method: string, args: Record) => Promise; 212 | }; 213 | 214 | type NetworkOpts = { 215 | batchSize?: number; 216 | headers?: Record; 217 | }; 218 | 219 | type RpcParams = any[] | Record; 220 | type RpcErrorResponse = { code: number; message: string }; 221 | 222 | export class RpcError extends Error { 223 | readonly code: number; 224 | constructor(error: RpcErrorResponse) { 225 | super(`FetchProvider(${error.code}): ${error.message || error}`); 226 | this.code = error.code; 227 | this.name = 'RpcError'; 228 | } 229 | } 230 | 231 | /** 232 | * Small utility class for Jsonrpc 233 | * @param fetchFunction - The fetch function 234 | * @param url - The RPC server url 235 | * @param opts - Options to control the behavior of RPC client 236 | * @param [opts.headers] - additional headers to send with requests 237 | * @param [opts.batchSize] - batch parallel requests up to this value into single request 238 | * @example 239 | * ```js 240 | * const rpc = new JsonrpcProvider(fetch, 'http://rpc_node/', { headers: {}, batchSize: 20 }); 241 | * const res = await rpc.call('method', 'arg0', 'arg1'); 242 | * const res2 = await rpc.callNamed('method', {arg0: '0', arg1: '1'}); // named arguments 243 | * ``` 244 | */ 245 | export class JsonrpcProvider implements JsonrpcInterface { 246 | private batchSize: number; 247 | private headers: Record; 248 | private queue: ({ method: string; params: RpcParams } & PromiseCb)[] = []; 249 | private fetchFunction: FetchFn; 250 | readonly rpcUrl: string; 251 | constructor(fetchFunction: FetchFn, rpcUrl: string, options: NetworkOpts = {}) { 252 | if (typeof fetchFunction !== 'function') throw new Error('fetchFunction is required'); 253 | if (typeof rpcUrl !== 'string') throw new Error('rpcUrl is required'); 254 | this.fetchFunction = fetchFunction; 255 | this.rpcUrl = rpcUrl; 256 | this.batchSize = options.batchSize === undefined ? 1 : options.batchSize; 257 | this.headers = options.headers || {}; 258 | if (typeof this.headers !== 'object') throw new Error('invalid headers: expected object'); 259 | } 260 | private async fetchJson(body: unknown) { 261 | const res = await this.fetchFunction(this.rpcUrl, { 262 | method: 'POST', 263 | headers: { 'Content-Type': 'application/json', ...this.headers }, 264 | body: JSON.stringify(body), 265 | }); 266 | return await res.json(); 267 | } 268 | private jsonError(error: RpcErrorResponse) { 269 | return new RpcError(error); 270 | } 271 | private async batchProcess() { 272 | await nextTick(); // this allows to collect as much requests as we can in single tick 273 | const curr = this.queue.splice(0, this.batchSize); 274 | if (!curr.length) return; 275 | const json = await this.fetchJson( 276 | curr.map((i, j) => ({ 277 | jsonrpc: '2.0', 278 | id: j, 279 | method: i.method, 280 | params: i.params, 281 | })) 282 | ); 283 | if (!Array.isArray(json)) { 284 | const hasMsg = json.code && json.message; 285 | curr.forEach((req, index) => { 286 | const err = hasMsg 287 | ? this.jsonError(json) 288 | : new Error('invalid response in batch request ' + index); 289 | req.reject(err); 290 | }); 291 | return; 292 | } 293 | const processed = new Set(); 294 | for (const res of json) { 295 | // Server sent broken ids. We cannot throw error here, since we will have unresolved promises 296 | // Also, this will break app state. 297 | if (!Number.isSafeInteger(res.id) || res.id < 0 || res.id >= curr.length) continue; 298 | if (processed.has(res.id)) continue; // multiple responses for same id 299 | const { reject, resolve } = curr[res.id]; 300 | processed.add(res.id); 301 | if (res && res.error) reject(this.jsonError(res.error)); 302 | else resolve(res.result); 303 | } 304 | for (let i = 0; i < curr.length; i++) { 305 | if (!processed.has(i)) curr[i].reject(new Error(`response missing in batch request ` + i)); 306 | } 307 | } 308 | private rpcBatch(method: string, params: RpcParams) { 309 | return new Promise((resolve, reject) => { 310 | this.queue.push({ method, params, resolve, reject }); 311 | this.batchProcess(); // this processed in parallel 312 | }); 313 | } 314 | private async rpc(method: string, params: RpcParams): Promise { 315 | if (typeof method !== 'string') throw new Error('rpc method name must be a string'); 316 | if (this.batchSize > 1) return this.rpcBatch(method, params); 317 | const json = await this.fetchJson({ 318 | jsonrpc: '2.0', 319 | id: 0, 320 | method, 321 | params, 322 | }); 323 | if (json && json.error) throw this.jsonError(json.error); 324 | return json.result; 325 | } 326 | call(method: string, ...args: any[]): Promise { 327 | return this.rpc(method, args); 328 | } 329 | callNamed(method: string, params: Record): Promise { 330 | return this.rpc(method, params); 331 | } 332 | } 333 | 334 | /** 335 | * Batched JSON-RPC functionality. 336 | * @example 337 | ```js 338 | const rpc = jsonrpc(fetch, 'http://rpc_node/', { 339 | headers: {}, 340 | batchSize: 20, 341 | }); 342 | const res = await rpc.call('method', 'arg0', 'arg1'); 343 | const res2 = await rpc.callNamed('method', { arg0: '0', arg1: '1' }); // named arguments 344 | ``` 345 | */ 346 | export function jsonrpc( 347 | fetchFunction: FetchFn, 348 | rpcUrl: string, 349 | options: NetworkOpts = {} 350 | ): JsonrpcProvider { 351 | return new JsonrpcProvider(fetchFunction, rpcUrl, options); 352 | } 353 | 354 | type GetKeyFn = (url: string, opt: FetchOpts) => string; 355 | const defaultGetKey: GetKeyFn = (url, opt) => JSON.stringify({ url, opt }); 356 | 357 | /** Options for replayable() */ 358 | export type ReplayOpts = { 359 | offline?: boolean; // throw on non-logged requests 360 | getKey?: GetKeyFn; 361 | }; 362 | 363 | /** replayable() return function, additional methods */ 364 | export type ReplayFn = FetchFn & { 365 | logs: Record; 366 | accessed: Set; 367 | export: () => string; 368 | }; 369 | 370 | function normalizeHeader(header: string): string { 371 | return header 372 | .split('-') 373 | .map((i) => i.charAt(0).toUpperCase() + i.slice(1).toLowerCase()) 374 | .join('-'); 375 | } 376 | 377 | const getKey = (url: string, opts: FetchOpts, fn = defaultGetKey) => { 378 | let headers = opts.headers || {}; 379 | if (headers instanceof Headers) { 380 | const tmp: Record = {}; 381 | // Headers is lowercase 382 | headers.forEach((v, k) => { 383 | tmp[normalizeHeader(k)] = v; 384 | }); 385 | headers = tmp; 386 | } 387 | return fn(url, { method: opts.method, headers, body: opts.body }); 388 | }; 389 | 390 | /** 391 | * Log & replay network requests without actually calling network code. 392 | * @param fetchFunction 393 | * @param logs - captured logs (JSON.parse(fetchReplay(...).export())) 394 | * @param opts 395 | * @param [opts.offline] - Offline mode, throws on non-captured requests 396 | * @param [opts.getKey] - Optional function to modify key information for capture/replay of requests 397 | * @example 398 | * ```js 399 | * // Capture logs 400 | * const ftch = ftch(fetch); 401 | * const replayCapture = replayable(ftch); // wraps fetch 402 | * await replayCapture('http://url/1'); 403 | * const logs = replayCapture.export(); // Exports logs 404 | * ``` 405 | * @example 406 | * ```js 407 | * // Replay logs 408 | * const replayTest = replayable(ftch, JSON.parse(logs)); 409 | * await replayTest('http://url/1'); // cached 410 | * await replayTest('http://url/2'); // real network 411 | * ``` 412 | * @example 413 | * ```js 414 | * // Offline mode 415 | * const replayTestOffline = replayable(ftch, JSON.parse(logs), { offline: true }); 416 | * await replayTest('http://url/1'); // cached 417 | * await replayTest('http://url/2'); // throws! 418 | * ``` 419 | * @example 420 | * ```js 421 | * // Custom log key function 422 | * const getKey = (url, opt) => JSON.stringify({ url: 'https://NODE_URL/', opt }); // use same url for any request 423 | * const replayCapture = replayable(ftch, {}, { getKey }); 424 | * ``` 425 | */ 426 | export function replayable( 427 | fetchFunction: FetchFn, 428 | logs: Record = {}, 429 | opts: ReplayOpts = {} 430 | ): ReplayFn { 431 | const accessed: Set = new Set(); 432 | const wrapped = async (url: string, reqOpts: any) => { 433 | const key = getKey(url, reqOpts, opts.getKey); 434 | accessed.add(key); 435 | if (!logs[key]) { 436 | if (opts.offline) throw new Error(`fetchReplay: unknown request=${key}`); 437 | const req = await fetchFunction(url, reqOpts); 438 | // TODO: save this too? 439 | const info = getRequestInfo(req); 440 | return { 441 | ...info, 442 | json: async () => { 443 | const json = await req.json(); 444 | logs[key] = JSON.stringify(json); 445 | return json; 446 | }, 447 | text: async () => (logs[key] = await req.text()), 448 | arrayBuffer: async () => { 449 | const buffer = await req.arrayBuffer(); 450 | logs[key] = new TextDecoder().decode(new Uint8Array(buffer)); 451 | return buffer; 452 | }, 453 | }; 454 | } 455 | return { 456 | // Some default values (we don't store this info for now) 457 | headers: new Headers(), 458 | ok: true, 459 | redirected: false, 460 | status: 200, 461 | statusText: 'OK', 462 | type: 'basic' as ResponseType, 463 | url: url, 464 | text: async () => logs[key], 465 | json: async () => JSON.parse(logs[key]), 466 | arrayBuffer: async () => new TextEncoder().encode(logs[key]).buffer, 467 | }; 468 | }; 469 | wrapped.logs = logs; 470 | wrapped.accessed = accessed; 471 | wrapped.export = () => 472 | JSON.stringify(Object.fromEntries(Object.entries(logs).filter(([k, _]) => accessed.has(k)))); 473 | return wrapped; 474 | } 475 | 476 | /** Internal methods for test purposes only. */ 477 | export const _TEST: { 478 | limit: typeof limit; 479 | } = { 480 | limit, 481 | }; 482 | -------------------------------------------------------------------------------- /jsr.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@paulmillr/micro-ftch", 3 | "version": "0.5.0", 4 | "exports": "./index.ts", 5 | "publish": { 6 | "include": [ 7 | "index.ts", 8 | "jsr.json", 9 | "LICENSE", 10 | "README.md" 11 | ] 12 | }, 13 | "license": "MIT" 14 | } 15 | -------------------------------------------------------------------------------- /package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "micro-ftch", 3 | "version": "0.5.0", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "micro-ftch", 9 | "version": "0.5.0", 10 | "license": "MIT", 11 | "devDependencies": { 12 | "@paulmillr/jsbt": "0.4.1", 13 | "@types/node": "22.15.23", 14 | "micro-bmark": "0.4.1", 15 | "micro-should": "0.5.3", 16 | "prettier": "3.5.2", 17 | "typescript": "5.8.3" 18 | }, 19 | "funding": { 20 | "url": "https://paulmillr.com/funding/" 21 | } 22 | }, 23 | "node_modules/@paulmillr/jsbt": { 24 | "version": "0.4.1", 25 | "resolved": "https://registry.npmjs.org/@paulmillr/jsbt/-/jsbt-0.4.1.tgz", 26 | "integrity": "sha512-WomUnhmgUEeIDgeIL8mADU2SsYjeQf7M68OJyj8J2QbfnhJU6BEkwblwV9lq3mQJsPaXOl1gPVwAVZxzEXFCtw==", 27 | "dev": true, 28 | "license": "MIT", 29 | "bin": { 30 | "jsbt": "jsbt.js" 31 | } 32 | }, 33 | "node_modules/@types/node": { 34 | "version": "22.15.23", 35 | "resolved": "https://registry.npmjs.org/@types/node/-/node-22.15.23.tgz", 36 | "integrity": "sha512-7Ec1zaFPF4RJ0eXu1YT/xgiebqwqoJz8rYPDi/O2BcZ++Wpt0Kq9cl0eg6NN6bYbPnR67ZLo7St5Q3UK0SnARw==", 37 | "dev": true, 38 | "license": "MIT", 39 | "dependencies": { 40 | "undici-types": "~6.21.0" 41 | } 42 | }, 43 | "node_modules/micro-bmark": { 44 | "version": "0.4.1", 45 | "resolved": "https://registry.npmjs.org/micro-bmark/-/micro-bmark-0.4.1.tgz", 46 | "integrity": "sha512-DOZFW+kL6Y6SsVlPv1qz/vMqqszw+RxbLTPuGDjEhKkHOGdoSwfZC1r6St94kHSn2O9Db9UdYK8H0xw2vK2maw==", 47 | "dev": true, 48 | "license": "MIT" 49 | }, 50 | "node_modules/micro-should": { 51 | "version": "0.5.3", 52 | "resolved": "https://registry.npmjs.org/micro-should/-/micro-should-0.5.3.tgz", 53 | "integrity": "sha512-3gEuTzROE856pZSijMD5NonIrQTEGLdkMKj42S2JCqpXiaqQdoSqEd/mTonelAT0ZNwheY7pA/w6eAotQTXeWQ==", 54 | "dev": true, 55 | "license": "MIT" 56 | }, 57 | "node_modules/prettier": { 58 | "version": "3.5.2", 59 | "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.2.tgz", 60 | "integrity": "sha512-lc6npv5PH7hVqozBR7lkBNOGXV9vMwROAPlumdBkX0wTbbzPu/U1hk5yL8p2pt4Xoc+2mkT8t/sow2YrV/M5qg==", 61 | "dev": true, 62 | "license": "MIT", 63 | "bin": { 64 | "prettier": "bin/prettier.cjs" 65 | }, 66 | "engines": { 67 | "node": ">=14" 68 | }, 69 | "funding": { 70 | "url": "https://github.com/prettier/prettier?sponsor=1" 71 | } 72 | }, 73 | "node_modules/typescript": { 74 | "version": "5.8.3", 75 | "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.3.tgz", 76 | "integrity": "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==", 77 | "dev": true, 78 | "license": "Apache-2.0", 79 | "bin": { 80 | "tsc": "bin/tsc", 81 | "tsserver": "bin/tsserver" 82 | }, 83 | "engines": { 84 | "node": ">=14.17" 85 | } 86 | }, 87 | "node_modules/undici-types": { 88 | "version": "6.21.0", 89 | "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", 90 | "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", 91 | "dev": true, 92 | "license": "MIT" 93 | } 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "micro-ftch", 3 | "version": "0.5.0", 4 | "description": "Wrappers for built-in fetch() enabling killswitch, logging, concurrency limit and other features", 5 | "files": [ 6 | "index.js", 7 | "index.js.map", 8 | "index.d.ts", 9 | "index.d.ts.map", 10 | "index.ts" 11 | ], 12 | "devDependencies": { 13 | "@paulmillr/jsbt": "0.4.1", 14 | "@types/node": "22.15.23", 15 | "micro-bmark": "0.4.1", 16 | "micro-should": "0.5.3", 17 | "prettier": "3.5.2", 18 | "typescript": "5.8.3" 19 | }, 20 | "sideEffects": false, 21 | "author": "Paul Miller (https://paulmillr.com)", 22 | "license": "MIT", 23 | "homepage": "https://github.com/paulmillr/micro-ftch", 24 | "repository": { 25 | "type": "git", 26 | "url": "git+https://github.com/paulmillr/micro-ftch.git" 27 | }, 28 | "type": "module", 29 | "main": "./index.js", 30 | "module": "./index.js", 31 | "types": "./index.d.ts", 32 | "scripts": { 33 | "build": "tsc", 34 | "build:release": "npx jsbt esbuild test/build", 35 | "lint": "prettier --check index.ts", 36 | "format": "prettier --write index.ts", 37 | "test": "node --experimental-strip-types --no-warnings test/index.ts", 38 | "test:bun": "bun test/index.ts", 39 | "test:deno": "deno --allow-env --allow-net test/index.ts" 40 | }, 41 | "keywords": [ 42 | "fetch", 43 | "network", 44 | "killswitch", 45 | "concurrent", 46 | "parallel", 47 | "jsonrpc", 48 | "micro", 49 | "replay", 50 | "ftch" 51 | ], 52 | "funding": "https://paulmillr.com/funding/" 53 | } 54 | -------------------------------------------------------------------------------- /test/basic.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, should } from 'micro-should'; 2 | import { deepStrictEqual, rejects } from 'node:assert'; 3 | import { Buffer } from 'node:buffer'; 4 | import { createServer } from 'node:http'; 5 | import * as mftch from '../index.ts'; 6 | 7 | // NOTE: this will send real network requests to httpbin (to verify compat) 8 | const REAL_NETWORK = false; 9 | 10 | function httpServer(port, cb) { 11 | const server = createServer(async (req, res) => { 12 | if (req.method !== 'POST' || req.headers['content-type'] !== 'application/json') { 13 | res.writeHead(405); 14 | res.end('Method not allowed'); 15 | return; 16 | } 17 | res.writeHead(200, { 'Content-Type': 'application/json' }); 18 | const buf = []; 19 | for await (const chunk of req) buf.push(chunk); 20 | const body = Buffer.concat(buf).toString('utf8'); 21 | const response = await cb(JSON.parse(body), req.headers); 22 | res.end(JSON.stringify(response)); 23 | }); 24 | server.on('error', (err) => console.log('HTTP ERR', err)); 25 | const stop = () => 26 | new Promise((resolve, reject) => { 27 | server.close(async (err) => { 28 | await sleep(100); // this somehow broken, without it new server will throw ECONNRESET because old server not fully closed. 29 | // also, bun will silently use old server even after stopping, so we use different ports for different tests 30 | if (err) reject(err); 31 | else resolve(); 32 | }); 33 | server.closeAllConnections(); 34 | }); 35 | const url = `http://127.0.0.1:${port}/`; 36 | return new Promise((resolve) => server.listen(port, (t) => resolve({ stop, url }))); 37 | } 38 | 39 | const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms)); 40 | 41 | const cleanHeaders = (headers) => { 42 | // these changes between node, bun and deno 43 | const { 44 | 'accept-encoding': _0, 45 | 'sec-fetch-mode': _1, 46 | 'user-agent': _2, 47 | connection: _3, 48 | host: _4, 49 | 'accept-language': _5, 50 | ...rest 51 | } = headers; 52 | return rest; 53 | }; 54 | 55 | describe('Network', () => { 56 | describe('Limit', () => { 57 | const { limit } = mftch._TEST; 58 | const delayed = (value, delay, log) => 59 | new Promise((resolve) => 60 | setTimeout(() => { 61 | log.push(value); 62 | resolve(value); 63 | }, delay) 64 | ); 65 | should('limit(2)', async () => { 66 | const ts = Date.now(); 67 | const limit2 = limit(2); 68 | const log = []; 69 | await Promise.all([ 70 | limit2(() => delayed(1, 100, log)), 71 | limit2(() => delayed(2, 100, log)), 72 | limit2(() => delayed(3, 100, log)), 73 | ]); 74 | deepStrictEqual(Date.now() - ts >= 200, true); 75 | deepStrictEqual(log, [1, 2, 3]); 76 | }); 77 | should('limit(1), order', async () => { 78 | const limit1 = limit(1); 79 | const log = []; 80 | await Promise.all([ 81 | limit1(() => delayed(1, 50, log)), 82 | limit1(() => delayed(2, 100, log)), 83 | limit1(() => delayed(3, 10, log)), 84 | ]); 85 | deepStrictEqual(log, [1, 2, 3]); 86 | }); 87 | should('limit(2), order', async () => { 88 | const limit2 = limit(2); 89 | const log = []; 90 | await Promise.all([ 91 | limit2(() => delayed(1, 50, log)), 92 | limit2(() => delayed(2, 100, log)), 93 | limit2(() => delayed(3, 10, log)), 94 | ]); 95 | deepStrictEqual(log, [1, 3, 2]); 96 | }); 97 | should('limit(3), order', async () => { 98 | const limit3 = limit(3); 99 | const log = []; 100 | await Promise.all([ 101 | limit3(() => delayed(1, 50, log)), 102 | limit3(() => delayed(2, 100, log)), 103 | limit3(() => delayed(3, 10, log)), 104 | ]); 105 | deepStrictEqual(log, [3, 1, 2]); 106 | }); 107 | should('error', async () => { 108 | const limit1 = limit(1); 109 | const log = []; 110 | limit1(() => delayed(1, 10, log)); 111 | const p2 = limit1(() => { 112 | throw new Error('Failure'); // sync error 113 | }); 114 | await rejects(p2); 115 | const p3 = await limit1(() => delayed(2, 20, log)); 116 | deepStrictEqual(p3, 2); 117 | deepStrictEqual(log, [1, 2]); 118 | const p4 = limit1(async () => { 119 | throw new Error('Failure'); // async error 120 | }); 121 | await rejects(p4); 122 | // still processing after error 123 | const p5 = await limit1(() => delayed(3, 10, log)); 124 | deepStrictEqual(p5, 3); 125 | deepStrictEqual(log, [1, 2, 3]); 126 | }); 127 | }); 128 | if (REAL_NETWORK) { 129 | describe('Real network', () => { 130 | should('Basic req', async () => { 131 | const ftch = mftch.ftch(fetch); 132 | const res = await ftch('https://httpbin.org/json'); 133 | deepStrictEqual(res.ok, true); 134 | deepStrictEqual(res.redirected, false); 135 | deepStrictEqual(res.status, 200); 136 | deepStrictEqual(res.statusText, 'OK'); 137 | deepStrictEqual(res.type, 'basic'); 138 | deepStrictEqual(res.url, 'https://httpbin.org/json'); 139 | const h = {}; 140 | res.headers.forEach((v, k) => (h[k] = v)); 141 | delete h.date; 142 | deepStrictEqual(h, { 143 | 'access-control-allow-credentials': 'true', 144 | 'access-control-allow-origin': '*', 145 | connection: 'keep-alive', 146 | 'content-length': '429', 147 | 'content-type': 'application/json', 148 | server: 'gunicorn/19.9.0', 149 | }); 150 | deepStrictEqual(await res.json(), { 151 | slideshow: { 152 | author: 'Yours Truly', 153 | date: 'date of publication', 154 | slides: [ 155 | { title: 'Wake up to WonderWidgets!', type: 'all' }, 156 | { 157 | items: ['Why WonderWidgets are great', 'Who buys WonderWidgets'], 158 | title: 'Overview', 159 | type: 'all', 160 | }, 161 | ], 162 | title: 'Sample Slide Show', 163 | }, 164 | }); 165 | }); 166 | should('Headers (class)', async () => { 167 | const ftch = mftch.ftch(fetch); 168 | const reqs = await Promise.all([ 169 | // Class 170 | ftch('https://httpbin.org/headers', { headers: new Headers({ A: 'b' }) }), 171 | fetch('https://httpbin.org/headers', { headers: new Headers({ A: 'b' }) }), 172 | // Array 173 | ftch('https://httpbin.org/headers', { headers: [['A', 'b']] }), 174 | fetch('https://httpbin.org/headers', { headers: [['A', 'b']] }), 175 | // Object 176 | ftch('https://httpbin.org/headers', { headers: { A: 'b' } }), 177 | fetch('https://httpbin.org/headers', { headers: { A: 'b' } }), 178 | ]); 179 | for (const req of reqs) { 180 | deepStrictEqual( 181 | { 182 | ...(await req.json()).headers, 183 | 'X-Amzn-Trace-Id': undefined, 184 | }, 185 | { 186 | A: 'b', 187 | Accept: '*/*', 188 | 'Accept-Encoding': 'br, gzip, deflate', 189 | 'Accept-Language': '*', 190 | Host: 'httpbin.org', 191 | 'Sec-Fetch-Mode': 'cors', 192 | 'User-Agent': 'node', 193 | 'X-Amzn-Trace-Id': undefined, 194 | } 195 | ); 196 | } 197 | }); 198 | 199 | should('Basic auth', async () => { 200 | const ftch = mftch.ftch(fetch); 201 | const res = await ftch('https://user:pwd@httpbin.org/basic-auth/user/pwd'); 202 | deepStrictEqual(await res.json(), { authenticated: true, user: 'user' }); 203 | const res2 = await ftch('https://httpbin.org/basic-auth/user/pwd'); 204 | deepStrictEqual(res2.status, 401); 205 | deepStrictEqual(res2.statusText, 'UNAUTHORIZED'); 206 | deepStrictEqual(await res2.text(), ''); 207 | }); 208 | }); 209 | } 210 | 211 | should('ftch', async () => { 212 | const serverLog = []; 213 | const { stop, url } = await httpServer(8001, async (r) => { 214 | if (r.sleep) await sleep(r.sleep); 215 | serverLog.push(r.res); 216 | return { res: r.res }; 217 | }); 218 | let ENABLED = true; 219 | const f1 = mftch.ftch(fetch, { 220 | concurrencyLimit: 1, 221 | killswitch: () => ENABLED, 222 | }); 223 | const f2 = mftch.ftch(fetch, { 224 | concurrencyLimit: 2, 225 | killswitch: () => ENABLED, 226 | }); 227 | const f3 = mftch.ftch(fetch, { 228 | concurrencyLimit: 3, 229 | killswitch: () => ENABLED, 230 | }); 231 | const t = async (fn, body, opts = {}) => { 232 | const res = await fn(url, { 233 | method: 'POST', 234 | headers: { 'Content-Type': 'application/json' }, 235 | body: JSON.stringify(body), 236 | ...opts, 237 | }); 238 | return await res.json(); 239 | }; 240 | // Basic 241 | deepStrictEqual(await t(f1, { res: 1 }), { res: 1 }); 242 | // Killswitch 243 | ENABLED = false; 244 | await rejects(() => t(f1, { res: 2 })); 245 | ENABLED = true; 246 | deepStrictEqual(await t(f1, { res: 3 }), { res: 3 }); 247 | deepStrictEqual(serverLog, [1, 3]); 248 | serverLog.splice(0, serverLog.length); 249 | // Concurrency 250 | // limit(1) 251 | const t0 = await Promise.all([ 252 | // All processed sequentially 253 | t(f1, { res: 1, sleep: 50 }), 254 | t(f1, { res: 2, sleep: 100 }), 255 | t(f1, { res: 3, sleep: 10 }), 256 | ]); 257 | deepStrictEqual(t0, [{ res: 1 }, { res: 2 }, { res: 3 }]); 258 | deepStrictEqual(serverLog, [1, 2, 3]); 259 | serverLog.splice(0, serverLog.length); 260 | // limit(2) 261 | const t1 = await Promise.all([ 262 | // 1+2 starts [processed: 1, 2] 263 | // 1 done, 3 starts [processed: 2,3] -> push(1) 264 | // 3 done [processed: 2] -> push(3) 265 | // 2 done [processed: none] -> push(2) 266 | t(f2, { res: 1, sleep: 50 }), 267 | t(f2, { res: 2, sleep: 100 }), 268 | t(f2, { res: 3, sleep: 10 }), 269 | ]); 270 | deepStrictEqual(t1, [{ res: 1 }, { res: 2 }, { res: 3 }]); 271 | deepStrictEqual(serverLog, [1, 3, 2]); 272 | serverLog.splice(0, serverLog.length); 273 | // limit(3) 274 | const t2 = await Promise.all([ 275 | // 1+2+3 starts [processed: 1, 2, 3] 276 | // 3 done [processed 1,2] -> push(3) 277 | // 1 done [processed 2] -> push(1) 278 | // 2 done [processed: none] -> push(2) 279 | t(f3, { res: 1, sleep: 50 }), 280 | t(f3, { res: 2, sleep: 100 }), 281 | t(f3, { res: 3, sleep: 10 }), 282 | ]); 283 | deepStrictEqual(t2, [{ res: 1 }, { res: 2 }, { res: 3 }]); 284 | deepStrictEqual(serverLog, [3, 1, 2]); 285 | serverLog.splice(0, serverLog.length); 286 | // Timeout: less timeout 287 | deepStrictEqual(await t(f1, { res: 1, sleep: 10 }, { timeout: 50 }), { res: 1 }); 288 | deepStrictEqual(serverLog, [1]); 289 | // Timeout: bigger than timeout 290 | await rejects(() => t(f1, { res: 2, sleep: 50 }, { timeout: 50 })); 291 | await sleep(10); // make sure request finished on server side 292 | deepStrictEqual(serverLog, [1, 2]); 293 | // Timeout: after long request with concurrency 294 | const t3 = await Promise.all([ 295 | t(f1, { res: 3, sleep: 50 }), 296 | // if timeout timer starts before enters queue this would crash since previous request takes 50ms 297 | t(f1, { res: 4, sleep: 1 }, { timeout: 10 }), 298 | ]); 299 | deepStrictEqual(t3, [{ res: 3 }, { res: 4 }]); 300 | deepStrictEqual(serverLog, [1, 2, 3, 4]); 301 | // Timeout: default 302 | const f1_t = mftch.ftch(fetch, { 303 | concurrencyLimit: 1, 304 | killswitch: () => ENABLED, 305 | timeout: 10, 306 | }); 307 | deepStrictEqual(await t(f1_t, { res: 5, sleep: 5 }), { res: 5 }); 308 | await rejects(() => t(f1_t, { res: 6, sleep: 11 })); 309 | // override timeout 310 | deepStrictEqual(await t(f1_t, { res: 7, sleep: 11 }, { timeout: 100 }), { res: 7 }); 311 | deepStrictEqual(serverLog, [1, 2, 3, 4, 5, 6, 7]); 312 | serverLog.splice(0, serverLog.length); 313 | // Logs 314 | const log = []; 315 | const f1_l = mftch.ftch(fetch, { 316 | concurrencyLimit: 1, 317 | log: (url, opts) => log.push({ url, opts }), 318 | }); 319 | deepStrictEqual(await t(f1_l, { res: 1, sleep: 10 }), { res: 1 }); 320 | deepStrictEqual(serverLog, [1]); 321 | deepStrictEqual(log, [ 322 | { 323 | url: 'http://127.0.0.1:8001/', 324 | opts: { 325 | method: 'POST', 326 | headers: { 'Content-Type': 'application/json' }, 327 | body: '{"res":1,"sleep":10}', 328 | }, 329 | }, 330 | ]); 331 | 332 | serverLog.splice(0, serverLog.length); 333 | await stop(); 334 | }); 335 | should('jsonrpc', async () => { 336 | const serverLog = []; 337 | const { stop, url } = await httpServer(8002, async (r, headers) => { 338 | serverLog.push({ r, headers: cleanHeaders(headers) }); 339 | if (Array.isArray(r)) 340 | return r.map((i) => (Array.isArray(i.params) ? i.params[0] : i.params.res)); 341 | return Array.isArray(r.params) ? r.params[0] : r.params.res; 342 | }); 343 | const f = mftch.ftch(fetch); 344 | const rpc = mftch.jsonrpc(f, url, { 345 | headers: { Test: '1' }, 346 | }); 347 | // Basic 348 | deepStrictEqual( 349 | await rpc.call('tmp', { jsonrpc: '2.0', id: 0, result: 1 }, 1, true, [1, 2, 3]), 350 | 1 351 | ); 352 | deepStrictEqual( 353 | await rpc.callNamed('tmp', { res: { jsonrpc: '2.0', id: 0, result: 1 }, A: 1 }), 354 | 1 355 | ); 356 | await rejects(() => 357 | rpc.call('tmp', { jsonrpc: '2.0', id: 0, error: { code: 0, message: 'test' } }) 358 | ); 359 | deepStrictEqual(serverLog, [ 360 | { 361 | r: { 362 | jsonrpc: '2.0', 363 | id: 0, 364 | method: 'tmp', 365 | params: [{ jsonrpc: '2.0', id: 0, result: 1 }, 1, true, [1, 2, 3]], 366 | }, 367 | headers: { 368 | 'content-type': 'application/json', 369 | test: '1', 370 | accept: '*/*', 371 | 'content-length': '101', 372 | }, 373 | }, 374 | { 375 | r: { 376 | jsonrpc: '2.0', 377 | id: 0, 378 | method: 'tmp', 379 | params: { res: { jsonrpc: '2.0', id: 0, result: 1 }, A: 1 }, 380 | }, 381 | headers: { 382 | 'content-type': 'application/json', 383 | test: '1', 384 | accept: '*/*', 385 | 'content-length': '98', 386 | }, 387 | }, 388 | { 389 | r: { 390 | jsonrpc: '2.0', 391 | id: 0, 392 | method: 'tmp', 393 | params: [{ jsonrpc: '2.0', id: 0, error: { code: 0, message: 'test' } }], 394 | }, 395 | headers: { 396 | 'content-type': 'application/json', 397 | test: '1', 398 | accept: '*/*', 399 | 'content-length': '111', 400 | }, 401 | }, 402 | ]); 403 | serverLog.splice(0, serverLog.length); 404 | // Batch 405 | const rpcBatch = mftch.jsonrpc(f, url, { 406 | headers: { Test: '1' }, 407 | batchSize: 2, 408 | }); 409 | // This tests: 410 | // - batch processes up to 2 elements in parallel 411 | // - named + unnamed mix works 412 | // - if there are less than 2 elements in queue we still create batch 413 | // - errors work in batch 414 | const t0 = await Promise.allSettled([ 415 | rpcBatch.call('tmp', { jsonrpc: '2.0', id: 0, result: 1 }, 1, true, [1, 2, 3]), 416 | rpcBatch.callNamed('tmp', { res: { jsonrpc: '2.0', id: 1, result: 2 }, A: 1 }), 417 | rpcBatch.call('tmp', { jsonrpc: '2.0', id: 0, error: { code: 0, message: 'test' } }), 418 | rpcBatch.call('tmp', { jsonrpc: '2.0', id: 1, result: 3 }, 1, true, [1, 2, 3]), 419 | rpcBatch.call('tmp', { jsonrpc: '2.0', id: 0, result: 4 }, 1, true, [1, 2, 3]), 420 | ]); 421 | deepStrictEqual(t0[0], { status: 'fulfilled', value: 1 }); 422 | deepStrictEqual(t0[1], { status: 'fulfilled', value: 2 }); 423 | deepStrictEqual(t0[2].status, 'rejected'); 424 | deepStrictEqual(t0[3], { status: 'fulfilled', value: 3 }); 425 | deepStrictEqual(t0[4], { status: 'fulfilled', value: 4 }); 426 | deepStrictEqual( 427 | serverLog.map((i) => i.r), 428 | [ 429 | [ 430 | { 431 | jsonrpc: '2.0', 432 | id: 0, 433 | method: 'tmp', 434 | params: [{ jsonrpc: '2.0', id: 0, result: 1 }, 1, true, [1, 2, 3]], 435 | }, 436 | { 437 | jsonrpc: '2.0', 438 | id: 1, 439 | method: 'tmp', 440 | params: { res: { jsonrpc: '2.0', id: 1, result: 2 }, A: 1 }, 441 | }, 442 | ], 443 | [ 444 | { 445 | jsonrpc: '2.0', 446 | id: 0, 447 | method: 'tmp', 448 | params: [{ jsonrpc: '2.0', id: 0, error: { code: 0, message: 'test' } }], 449 | }, 450 | { 451 | jsonrpc: '2.0', 452 | id: 1, 453 | method: 'tmp', 454 | params: [{ jsonrpc: '2.0', id: 1, result: 3 }, 1, true, [1, 2, 3]], 455 | }, 456 | ], 457 | [ 458 | { 459 | jsonrpc: '2.0', 460 | id: 0, 461 | method: 'tmp', 462 | params: [{ jsonrpc: '2.0', id: 0, result: 4 }, 1, true, [1, 2, 3]], 463 | }, 464 | ], 465 | ] 466 | ); 467 | serverLog.splice(0, serverLog.length); 468 | // Now, lets breaks ids! (malicious server) 469 | const t1 = await Promise.allSettled([ 470 | rpcBatch.call('tmp', { jsonrpc: '2.0', id: 90, result: 1 }, 1, true, [1, 2, 3]), 471 | rpcBatch.callNamed('tmp', { res: { jsonrpc: '2.0', id: 1, result: 2 }, A: 1 }), 472 | rpcBatch.call('tmp', { jsonrpc: '2.0', id: 5, error: { code: 0, message: 'test' } }), 473 | rpcBatch.call('tmp', { jsonrpc: '2.0', id: 1, result: 3 }, 1, true, [1, 2, 3]), 474 | rpcBatch.call('tmp', { jsonrpc: '2.0', id: 4, result: 4 }, 1, true, [1, 2, 3]), 475 | ]); 476 | deepStrictEqual( 477 | t1.map((i) => i.status), 478 | ['rejected', 'fulfilled', 'rejected', 'fulfilled', 'rejected'] 479 | ); 480 | await stop(); 481 | }); 482 | should('replayable', async () => { 483 | const serverLog = []; 484 | const { stop, url } = await httpServer(8003, async (r) => { 485 | if (r.sleep) await sleep(r.sleep); 486 | serverLog.push(r.res); 487 | return { res: r.res }; 488 | }); 489 | const t = async (fn, body, opts = {}) => { 490 | const res = await fn(url, { 491 | method: 'POST', 492 | headers: { 'Content-Type': 'application/json' }, 493 | body: JSON.stringify(body), 494 | ...opts, 495 | }); 496 | return await res.json(); 497 | }; 498 | const ftch = mftch.ftch(fetch); 499 | const replayCapture = mftch.replayable(ftch); 500 | deepStrictEqual(await t(replayCapture, { res: 1 }), { res: 1 }); 501 | deepStrictEqual(await t(replayCapture, { res: 2 }), { res: 2 }); 502 | deepStrictEqual(serverLog, [1, 2]); 503 | const logs = replayCapture.export(); 504 | deepStrictEqual( 505 | logs, 506 | '{"{\\"url\\":\\"http://127.0.0.1:8003/\\",\\"opt\\":{\\"method\\":\\"POST\\",\\"headers\\":{\\"Content-Type\\":\\"application/json\\"},\\"body\\":\\"{\\\\\\"res\\\\\\":1}\\"}}":"{\\"res\\":1}","{\\"url\\":\\"http://127.0.0.1:8003/\\",\\"opt\\":{\\"method\\":\\"POST\\",\\"headers\\":{\\"Content-Type\\":\\"application/json\\"},\\"body\\":\\"{\\\\\\"res\\\\\\":2}\\"}}":"{\\"res\\":2}"}' 507 | ); 508 | const replayTest = mftch.replayable(ftch, JSON.parse(logs)); 509 | deepStrictEqual(await t(replayTest, { res: 1 }), { res: 1 }); 510 | deepStrictEqual(await t(replayTest, { res: 2 }), { res: 2 }); 511 | deepStrictEqual(await t(replayTest, { res: 3 }), { res: 3 }); 512 | // Third request is real 513 | deepStrictEqual(serverLog, [1, 2, 3]); 514 | // Throws in offline mode 515 | const replayTestOffline = mftch.replayable(ftch, JSON.parse(logs), { offline: true }); 516 | deepStrictEqual(await t(replayTestOffline, { res: 1 }), { res: 1 }); 517 | deepStrictEqual(await t(replayTestOffline, { res: 2 }), { res: 2 }); 518 | await rejects(() => t(replayTestOffline, { res: 3 })); 519 | deepStrictEqual(serverLog, [1, 2, 3]); 520 | await stop(); 521 | }); 522 | }); 523 | 524 | should.runWhen(import.meta.url); 525 | -------------------------------------------------------------------------------- /test/build/input.js: -------------------------------------------------------------------------------- 1 | export { ftch, jsonrpc, replayable } from 'micro-ftch'; 2 | -------------------------------------------------------------------------------- /test/build/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "build", 3 | "version": "1.0.0", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "build", 9 | "version": "1.0.0", 10 | "devDependencies": { 11 | "esbuild": "0.25.0", 12 | "micro-ftch": "file:../.." 13 | } 14 | }, 15 | "..": { 16 | "extraneous": true 17 | }, 18 | "../..": { 19 | "name": "micro-ftch", 20 | "version": "0.5.0", 21 | "dev": true, 22 | "license": "MIT", 23 | "devDependencies": { 24 | "@paulmillr/jsbt": "0.4.1", 25 | "@types/node": "22.15.23", 26 | "micro-bmark": "0.4.1", 27 | "micro-should": "0.5.3", 28 | "prettier": "3.5.2", 29 | "typescript": "5.8.3" 30 | }, 31 | "funding": { 32 | "url": "https://paulmillr.com/funding/" 33 | } 34 | }, 35 | "node_modules/@esbuild/aix-ppc64": { 36 | "version": "0.25.0", 37 | "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.0.tgz", 38 | "integrity": "sha512-O7vun9Sf8DFjH2UtqK8Ku3LkquL9SZL8OLY1T5NZkA34+wG3OQF7cl4Ql8vdNzM6fzBbYfLaiRLIOZ+2FOCgBQ==", 39 | "cpu": [ 40 | "ppc64" 41 | ], 42 | "dev": true, 43 | "license": "MIT", 44 | "optional": true, 45 | "os": [ 46 | "aix" 47 | ], 48 | "engines": { 49 | "node": ">=18" 50 | } 51 | }, 52 | "node_modules/@esbuild/android-arm": { 53 | "version": "0.25.0", 54 | "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.0.tgz", 55 | "integrity": "sha512-PTyWCYYiU0+1eJKmw21lWtC+d08JDZPQ5g+kFyxP0V+es6VPPSUhM6zk8iImp2jbV6GwjX4pap0JFbUQN65X1g==", 56 | "cpu": [ 57 | "arm" 58 | ], 59 | "dev": true, 60 | "license": "MIT", 61 | "optional": true, 62 | "os": [ 63 | "android" 64 | ], 65 | "engines": { 66 | "node": ">=18" 67 | } 68 | }, 69 | "node_modules/@esbuild/android-arm64": { 70 | "version": "0.25.0", 71 | "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.0.tgz", 72 | "integrity": "sha512-grvv8WncGjDSyUBjN9yHXNt+cq0snxXbDxy5pJtzMKGmmpPxeAmAhWxXI+01lU5rwZomDgD3kJwulEnhTRUd6g==", 73 | "cpu": [ 74 | "arm64" 75 | ], 76 | "dev": true, 77 | "license": "MIT", 78 | "optional": true, 79 | "os": [ 80 | "android" 81 | ], 82 | "engines": { 83 | "node": ">=18" 84 | } 85 | }, 86 | "node_modules/@esbuild/android-x64": { 87 | "version": "0.25.0", 88 | "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.0.tgz", 89 | "integrity": "sha512-m/ix7SfKG5buCnxasr52+LI78SQ+wgdENi9CqyCXwjVR2X4Jkz+BpC3le3AoBPYTC9NHklwngVXvbJ9/Akhrfg==", 90 | "cpu": [ 91 | "x64" 92 | ], 93 | "dev": true, 94 | "license": "MIT", 95 | "optional": true, 96 | "os": [ 97 | "android" 98 | ], 99 | "engines": { 100 | "node": ">=18" 101 | } 102 | }, 103 | "node_modules/@esbuild/darwin-arm64": { 104 | "version": "0.25.0", 105 | "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.0.tgz", 106 | "integrity": "sha512-mVwdUb5SRkPayVadIOI78K7aAnPamoeFR2bT5nszFUZ9P8UpK4ratOdYbZZXYSqPKMHfS1wdHCJk1P1EZpRdvw==", 107 | "cpu": [ 108 | "arm64" 109 | ], 110 | "dev": true, 111 | "license": "MIT", 112 | "optional": true, 113 | "os": [ 114 | "darwin" 115 | ], 116 | "engines": { 117 | "node": ">=18" 118 | } 119 | }, 120 | "node_modules/@esbuild/darwin-x64": { 121 | "version": "0.25.0", 122 | "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.0.tgz", 123 | "integrity": "sha512-DgDaYsPWFTS4S3nWpFcMn/33ZZwAAeAFKNHNa1QN0rI4pUjgqf0f7ONmXf6d22tqTY+H9FNdgeaAa+YIFUn2Rg==", 124 | "cpu": [ 125 | "x64" 126 | ], 127 | "dev": true, 128 | "license": "MIT", 129 | "optional": true, 130 | "os": [ 131 | "darwin" 132 | ], 133 | "engines": { 134 | "node": ">=18" 135 | } 136 | }, 137 | "node_modules/@esbuild/freebsd-arm64": { 138 | "version": "0.25.0", 139 | "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.0.tgz", 140 | "integrity": "sha512-VN4ocxy6dxefN1MepBx/iD1dH5K8qNtNe227I0mnTRjry8tj5MRk4zprLEdG8WPyAPb93/e4pSgi1SoHdgOa4w==", 141 | "cpu": [ 142 | "arm64" 143 | ], 144 | "dev": true, 145 | "license": "MIT", 146 | "optional": true, 147 | "os": [ 148 | "freebsd" 149 | ], 150 | "engines": { 151 | "node": ">=18" 152 | } 153 | }, 154 | "node_modules/@esbuild/freebsd-x64": { 155 | "version": "0.25.0", 156 | "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.0.tgz", 157 | "integrity": "sha512-mrSgt7lCh07FY+hDD1TxiTyIHyttn6vnjesnPoVDNmDfOmggTLXRv8Id5fNZey1gl/V2dyVK1VXXqVsQIiAk+A==", 158 | "cpu": [ 159 | "x64" 160 | ], 161 | "dev": true, 162 | "license": "MIT", 163 | "optional": true, 164 | "os": [ 165 | "freebsd" 166 | ], 167 | "engines": { 168 | "node": ">=18" 169 | } 170 | }, 171 | "node_modules/@esbuild/linux-arm": { 172 | "version": "0.25.0", 173 | "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.0.tgz", 174 | "integrity": "sha512-vkB3IYj2IDo3g9xX7HqhPYxVkNQe8qTK55fraQyTzTX/fxaDtXiEnavv9geOsonh2Fd2RMB+i5cbhu2zMNWJwg==", 175 | "cpu": [ 176 | "arm" 177 | ], 178 | "dev": true, 179 | "license": "MIT", 180 | "optional": true, 181 | "os": [ 182 | "linux" 183 | ], 184 | "engines": { 185 | "node": ">=18" 186 | } 187 | }, 188 | "node_modules/@esbuild/linux-arm64": { 189 | "version": "0.25.0", 190 | "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.0.tgz", 191 | "integrity": "sha512-9QAQjTWNDM/Vk2bgBl17yWuZxZNQIF0OUUuPZRKoDtqF2k4EtYbpyiG5/Dk7nqeK6kIJWPYldkOcBqjXjrUlmg==", 192 | "cpu": [ 193 | "arm64" 194 | ], 195 | "dev": true, 196 | "license": "MIT", 197 | "optional": true, 198 | "os": [ 199 | "linux" 200 | ], 201 | "engines": { 202 | "node": ">=18" 203 | } 204 | }, 205 | "node_modules/@esbuild/linux-ia32": { 206 | "version": "0.25.0", 207 | "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.0.tgz", 208 | "integrity": "sha512-43ET5bHbphBegyeqLb7I1eYn2P/JYGNmzzdidq/w0T8E2SsYL1U6un2NFROFRg1JZLTzdCoRomg8Rvf9M6W6Gg==", 209 | "cpu": [ 210 | "ia32" 211 | ], 212 | "dev": true, 213 | "license": "MIT", 214 | "optional": true, 215 | "os": [ 216 | "linux" 217 | ], 218 | "engines": { 219 | "node": ">=18" 220 | } 221 | }, 222 | "node_modules/@esbuild/linux-loong64": { 223 | "version": "0.25.0", 224 | "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.0.tgz", 225 | "integrity": "sha512-fC95c/xyNFueMhClxJmeRIj2yrSMdDfmqJnyOY4ZqsALkDrrKJfIg5NTMSzVBr5YW1jf+l7/cndBfP3MSDpoHw==", 226 | "cpu": [ 227 | "loong64" 228 | ], 229 | "dev": true, 230 | "license": "MIT", 231 | "optional": true, 232 | "os": [ 233 | "linux" 234 | ], 235 | "engines": { 236 | "node": ">=18" 237 | } 238 | }, 239 | "node_modules/@esbuild/linux-mips64el": { 240 | "version": "0.25.0", 241 | "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.0.tgz", 242 | "integrity": "sha512-nkAMFju7KDW73T1DdH7glcyIptm95a7Le8irTQNO/qtkoyypZAnjchQgooFUDQhNAy4iu08N79W4T4pMBwhPwQ==", 243 | "cpu": [ 244 | "mips64el" 245 | ], 246 | "dev": true, 247 | "license": "MIT", 248 | "optional": true, 249 | "os": [ 250 | "linux" 251 | ], 252 | "engines": { 253 | "node": ">=18" 254 | } 255 | }, 256 | "node_modules/@esbuild/linux-ppc64": { 257 | "version": "0.25.0", 258 | "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.0.tgz", 259 | "integrity": "sha512-NhyOejdhRGS8Iwv+KKR2zTq2PpysF9XqY+Zk77vQHqNbo/PwZCzB5/h7VGuREZm1fixhs4Q/qWRSi5zmAiO4Fw==", 260 | "cpu": [ 261 | "ppc64" 262 | ], 263 | "dev": true, 264 | "license": "MIT", 265 | "optional": true, 266 | "os": [ 267 | "linux" 268 | ], 269 | "engines": { 270 | "node": ">=18" 271 | } 272 | }, 273 | "node_modules/@esbuild/linux-riscv64": { 274 | "version": "0.25.0", 275 | "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.0.tgz", 276 | "integrity": "sha512-5S/rbP5OY+GHLC5qXp1y/Mx//e92L1YDqkiBbO9TQOvuFXM+iDqUNG5XopAnXoRH3FjIUDkeGcY1cgNvnXp/kA==", 277 | "cpu": [ 278 | "riscv64" 279 | ], 280 | "dev": true, 281 | "license": "MIT", 282 | "optional": true, 283 | "os": [ 284 | "linux" 285 | ], 286 | "engines": { 287 | "node": ">=18" 288 | } 289 | }, 290 | "node_modules/@esbuild/linux-s390x": { 291 | "version": "0.25.0", 292 | "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.0.tgz", 293 | "integrity": "sha512-XM2BFsEBz0Fw37V0zU4CXfcfuACMrppsMFKdYY2WuTS3yi8O1nFOhil/xhKTmE1nPmVyvQJjJivgDT+xh8pXJA==", 294 | "cpu": [ 295 | "s390x" 296 | ], 297 | "dev": true, 298 | "license": "MIT", 299 | "optional": true, 300 | "os": [ 301 | "linux" 302 | ], 303 | "engines": { 304 | "node": ">=18" 305 | } 306 | }, 307 | "node_modules/@esbuild/linux-x64": { 308 | "version": "0.25.0", 309 | "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.0.tgz", 310 | "integrity": "sha512-9yl91rHw/cpwMCNytUDxwj2XjFpxML0y9HAOH9pNVQDpQrBxHy01Dx+vaMu0N1CKa/RzBD2hB4u//nfc+Sd3Cw==", 311 | "cpu": [ 312 | "x64" 313 | ], 314 | "dev": true, 315 | "license": "MIT", 316 | "optional": true, 317 | "os": [ 318 | "linux" 319 | ], 320 | "engines": { 321 | "node": ">=18" 322 | } 323 | }, 324 | "node_modules/@esbuild/netbsd-arm64": { 325 | "version": "0.25.0", 326 | "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.0.tgz", 327 | "integrity": "sha512-RuG4PSMPFfrkH6UwCAqBzauBWTygTvb1nxWasEJooGSJ/NwRw7b2HOwyRTQIU97Hq37l3npXoZGYMy3b3xYvPw==", 328 | "cpu": [ 329 | "arm64" 330 | ], 331 | "dev": true, 332 | "license": "MIT", 333 | "optional": true, 334 | "os": [ 335 | "netbsd" 336 | ], 337 | "engines": { 338 | "node": ">=18" 339 | } 340 | }, 341 | "node_modules/@esbuild/netbsd-x64": { 342 | "version": "0.25.0", 343 | "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.0.tgz", 344 | "integrity": "sha512-jl+qisSB5jk01N5f7sPCsBENCOlPiS/xptD5yxOx2oqQfyourJwIKLRA2yqWdifj3owQZCL2sn6o08dBzZGQzA==", 345 | "cpu": [ 346 | "x64" 347 | ], 348 | "dev": true, 349 | "license": "MIT", 350 | "optional": true, 351 | "os": [ 352 | "netbsd" 353 | ], 354 | "engines": { 355 | "node": ">=18" 356 | } 357 | }, 358 | "node_modules/@esbuild/openbsd-arm64": { 359 | "version": "0.25.0", 360 | "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.0.tgz", 361 | "integrity": "sha512-21sUNbq2r84YE+SJDfaQRvdgznTD8Xc0oc3p3iW/a1EVWeNj/SdUCbm5U0itZPQYRuRTW20fPMWMpcrciH2EJw==", 362 | "cpu": [ 363 | "arm64" 364 | ], 365 | "dev": true, 366 | "license": "MIT", 367 | "optional": true, 368 | "os": [ 369 | "openbsd" 370 | ], 371 | "engines": { 372 | "node": ">=18" 373 | } 374 | }, 375 | "node_modules/@esbuild/openbsd-x64": { 376 | "version": "0.25.0", 377 | "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.0.tgz", 378 | "integrity": "sha512-2gwwriSMPcCFRlPlKx3zLQhfN/2WjJ2NSlg5TKLQOJdV0mSxIcYNTMhk3H3ulL/cak+Xj0lY1Ym9ysDV1igceg==", 379 | "cpu": [ 380 | "x64" 381 | ], 382 | "dev": true, 383 | "license": "MIT", 384 | "optional": true, 385 | "os": [ 386 | "openbsd" 387 | ], 388 | "engines": { 389 | "node": ">=18" 390 | } 391 | }, 392 | "node_modules/@esbuild/sunos-x64": { 393 | "version": "0.25.0", 394 | "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.0.tgz", 395 | "integrity": "sha512-bxI7ThgLzPrPz484/S9jLlvUAHYMzy6I0XiU1ZMeAEOBcS0VePBFxh1JjTQt3Xiat5b6Oh4x7UC7IwKQKIJRIg==", 396 | "cpu": [ 397 | "x64" 398 | ], 399 | "dev": true, 400 | "license": "MIT", 401 | "optional": true, 402 | "os": [ 403 | "sunos" 404 | ], 405 | "engines": { 406 | "node": ">=18" 407 | } 408 | }, 409 | "node_modules/@esbuild/win32-arm64": { 410 | "version": "0.25.0", 411 | "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.0.tgz", 412 | "integrity": "sha512-ZUAc2YK6JW89xTbXvftxdnYy3m4iHIkDtK3CLce8wg8M2L+YZhIvO1DKpxrd0Yr59AeNNkTiic9YLf6FTtXWMw==", 413 | "cpu": [ 414 | "arm64" 415 | ], 416 | "dev": true, 417 | "license": "MIT", 418 | "optional": true, 419 | "os": [ 420 | "win32" 421 | ], 422 | "engines": { 423 | "node": ">=18" 424 | } 425 | }, 426 | "node_modules/@esbuild/win32-ia32": { 427 | "version": "0.25.0", 428 | "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.0.tgz", 429 | "integrity": "sha512-eSNxISBu8XweVEWG31/JzjkIGbGIJN/TrRoiSVZwZ6pkC6VX4Im/WV2cz559/TXLcYbcrDN8JtKgd9DJVIo8GA==", 430 | "cpu": [ 431 | "ia32" 432 | ], 433 | "dev": true, 434 | "license": "MIT", 435 | "optional": true, 436 | "os": [ 437 | "win32" 438 | ], 439 | "engines": { 440 | "node": ">=18" 441 | } 442 | }, 443 | "node_modules/@esbuild/win32-x64": { 444 | "version": "0.25.0", 445 | "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.0.tgz", 446 | "integrity": "sha512-ZENoHJBxA20C2zFzh6AI4fT6RraMzjYw4xKWemRTRmRVtN9c5DcH9r/f2ihEkMjOW5eGgrwCslG/+Y/3bL+DHQ==", 447 | "cpu": [ 448 | "x64" 449 | ], 450 | "dev": true, 451 | "license": "MIT", 452 | "optional": true, 453 | "os": [ 454 | "win32" 455 | ], 456 | "engines": { 457 | "node": ">=18" 458 | } 459 | }, 460 | "node_modules/esbuild": { 461 | "version": "0.25.0", 462 | "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.0.tgz", 463 | "integrity": "sha512-BXq5mqc8ltbaN34cDqWuYKyNhX8D/Z0J1xdtdQ8UcIIIyJyz+ZMKUt58tF3SrZ85jcfN/PZYhjR5uDQAYNVbuw==", 464 | "dev": true, 465 | "hasInstallScript": true, 466 | "license": "MIT", 467 | "bin": { 468 | "esbuild": "bin/esbuild" 469 | }, 470 | "engines": { 471 | "node": ">=18" 472 | }, 473 | "optionalDependencies": { 474 | "@esbuild/aix-ppc64": "0.25.0", 475 | "@esbuild/android-arm": "0.25.0", 476 | "@esbuild/android-arm64": "0.25.0", 477 | "@esbuild/android-x64": "0.25.0", 478 | "@esbuild/darwin-arm64": "0.25.0", 479 | "@esbuild/darwin-x64": "0.25.0", 480 | "@esbuild/freebsd-arm64": "0.25.0", 481 | "@esbuild/freebsd-x64": "0.25.0", 482 | "@esbuild/linux-arm": "0.25.0", 483 | "@esbuild/linux-arm64": "0.25.0", 484 | "@esbuild/linux-ia32": "0.25.0", 485 | "@esbuild/linux-loong64": "0.25.0", 486 | "@esbuild/linux-mips64el": "0.25.0", 487 | "@esbuild/linux-ppc64": "0.25.0", 488 | "@esbuild/linux-riscv64": "0.25.0", 489 | "@esbuild/linux-s390x": "0.25.0", 490 | "@esbuild/linux-x64": "0.25.0", 491 | "@esbuild/netbsd-arm64": "0.25.0", 492 | "@esbuild/netbsd-x64": "0.25.0", 493 | "@esbuild/openbsd-arm64": "0.25.0", 494 | "@esbuild/openbsd-x64": "0.25.0", 495 | "@esbuild/sunos-x64": "0.25.0", 496 | "@esbuild/win32-arm64": "0.25.0", 497 | "@esbuild/win32-ia32": "0.25.0", 498 | "@esbuild/win32-x64": "0.25.0" 499 | } 500 | }, 501 | "node_modules/micro-ftch": { 502 | "resolved": "../..", 503 | "link": true 504 | } 505 | } 506 | } 507 | -------------------------------------------------------------------------------- /test/build/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "build", 3 | "private": true, 4 | "version": "1.0.0", 5 | "main": "input.js", 6 | "type": "module", 7 | "devDependencies": { 8 | "esbuild": "0.25.0", 9 | "micro-ftch": "file:../.." 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /test/index.ts: -------------------------------------------------------------------------------- 1 | import { should } from 'micro-should'; 2 | 3 | import './basic.test.ts'; 4 | 5 | should.run(true); 6 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "@paulmillr/jsbt/tsconfig.json", 3 | "compilerOptions": { 4 | "baseUrl": ".", 5 | "outDir": "." 6 | }, 7 | "include": [ 8 | "index.ts" 9 | ], 10 | "exclude": [ 11 | "node_modules" 12 | ] 13 | } 14 | --------------------------------------------------------------------------------