├── .gitattributes ├── .prettierrc ├── .gitignore ├── SECURITY.md ├── test ├── destroy.js ├── transform.js ├── passthrough.js ├── all.js ├── get-stream-error.js ├── byte-length.js ├── duplex.js ├── pipeline.js ├── backpressure.js ├── pipe.js ├── async-iterator.js ├── compat.js ├── writable.js └── readable.js ├── .github └── workflows │ └── test.yml ├── package.json ├── LICENSE ├── examples └── fs.js ├── README.md └── index.js /.gitattributes: -------------------------------------------------------------------------------- 1 | * text=auto eol=lf 2 | -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | "prettier-config-holepunch" 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | package-lock.json 3 | coverage 4 | sandbox.js 5 | sandbox/ 6 | *.cpy 7 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | ## Security contact information 2 | 3 | To report a security vulnerability, please use the 4 | [Tidelift security contact](https://tidelift.com/security). 5 | Tidelift will coordinate the fix and disclosure. 6 | -------------------------------------------------------------------------------- /test/destroy.js: -------------------------------------------------------------------------------- 1 | const test = require('brittle') 2 | const { Duplex } = require('../') 3 | 4 | test('destroy is never sync', function (t) { 5 | t.plan(1) 6 | 7 | let openCb = null 8 | 9 | const s = new Duplex({ 10 | open(cb) { 11 | openCb = cb 12 | }, 13 | predestroy() { 14 | openCb(new Error('stop')) 15 | } 16 | }) 17 | 18 | s.resume() 19 | setImmediate(() => { 20 | s.destroy() 21 | s.on('close', () => t.pass('destroy was not sync')) 22 | }) 23 | }) 24 | -------------------------------------------------------------------------------- /test/transform.js: -------------------------------------------------------------------------------- 1 | const test = require('brittle') 2 | const { Transform } = require('../') 3 | 4 | test('default transform teardown when saturated', async function (t) { 5 | const stream = new Transform({ 6 | transform(data, cb) { 7 | cb(null, data) 8 | } 9 | }) 10 | 11 | for (let i = 0; i < 20; i++) { 12 | stream.write('hello') 13 | } 14 | 15 | await new Promise((resolve) => setImmediate(resolve)) 16 | 17 | stream.destroy() 18 | 19 | await new Promise((resolve) => stream.on('close', resolve)) 20 | 21 | t.pass('close fired') 22 | }) 23 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Build Status 2 | on: 3 | push: 4 | branches: 5 | - master 6 | pull_request: 7 | branches: 8 | - master 9 | jobs: 10 | build: 11 | strategy: 12 | matrix: 13 | node-version: [lts/*] 14 | os: [ubuntu-latest, macos-latest, windows-latest] 15 | runs-on: ${{ matrix.os }} 16 | steps: 17 | - uses: actions/checkout@v2 18 | - name: Use Node.js ${{ matrix.node-version }} 19 | uses: actions/setup-node@v2 20 | with: 21 | node-version: ${{ matrix.node-version }} 22 | - run: npm install 23 | - run: npm test 24 | -------------------------------------------------------------------------------- /test/passthrough.js: -------------------------------------------------------------------------------- 1 | const test = require('brittle') 2 | const { PassThrough, Writable, Readable } = require('../') 3 | 4 | test('passthrough', (t) => { 5 | t.plan(3) 6 | 7 | let i = 0 8 | const p = new PassThrough() 9 | const w = new Writable({ 10 | write(data, cb) { 11 | i++ 12 | if (i === 1) t.is(data, 'foo') 13 | else if (i === 2) t.is(data, 'bar') 14 | else t.fail('too many messages') 15 | cb() 16 | } 17 | }) 18 | w.on('finish', () => t.pass('finished')) 19 | const r = new Readable() 20 | r.pipe(p).pipe(w) 21 | r.push('foo') 22 | r.push('bar') 23 | r.push(null) 24 | }) 25 | -------------------------------------------------------------------------------- /test/all.js: -------------------------------------------------------------------------------- 1 | // This runner is auto-generated by Brittle 2 | 3 | runTests() 4 | 5 | async function runTests() { 6 | const test = (await import('brittle')).default 7 | 8 | test.pause() 9 | 10 | await import('./async-iterator.js') 11 | await import('./backpressure.js') 12 | await import('./byte-length.js') 13 | await import('./compat.js') 14 | await import('./destroy.js') 15 | await import('./duplex.js') 16 | await import('./get-stream-error.js') 17 | await import('./passthrough.js') 18 | await import('./pipe.js') 19 | await import('./pipeline.js') 20 | await import('./readable.js') 21 | await import('./transform.js') 22 | await import('./writable.js') 23 | 24 | test.resume() 25 | } 26 | -------------------------------------------------------------------------------- /test/get-stream-error.js: -------------------------------------------------------------------------------- 1 | const test = require('brittle') 2 | const { Readable, getStreamError } = require('../') 3 | 4 | test('getStreamError, no errors', function (t) { 5 | const stream = new Readable() 6 | 7 | t.is(getStreamError(stream), null) 8 | }) 9 | 10 | test('getStreamError, basic', function (t) { 11 | const stream = new Readable() 12 | stream.on('error', () => {}) 13 | 14 | const err = new Error('stop') 15 | stream.destroy(err) 16 | 17 | t.is(getStreamError(stream), err) 18 | }) 19 | 20 | test('getStreamError, only explicit errors by default', function (t) { 21 | const stream = new Readable() 22 | 23 | stream.destroy() 24 | 25 | t.absent(getStreamError(stream)) 26 | }) 27 | 28 | test('getStreamError, get premature destroy', function (t) { 29 | const stream = new Readable() 30 | 31 | stream.destroy() 32 | 33 | const err = getStreamError(stream, { all: true }) 34 | t.alike(err.message, 'Stream was destroyed') 35 | }) 36 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "streamx", 3 | "version": "2.23.0", 4 | "description": "An iteration of the Node.js core streams with a series of improvements", 5 | "main": "index.js", 6 | "dependencies": { 7 | "events-universal": "^1.0.0", 8 | "fast-fifo": "^1.3.2", 9 | "text-decoder": "^1.1.0" 10 | }, 11 | "devDependencies": { 12 | "b4a": "^1.6.6", 13 | "brittle": "^3.1.1", 14 | "end-of-stream": "^1.4.4", 15 | "prettier": "^3.6.2", 16 | "prettier-config-holepunch": "^2.0.0" 17 | }, 18 | "files": [ 19 | "index.js" 20 | ], 21 | "scripts": { 22 | "format": "prettier --write .", 23 | "test": "prettier --check . && node test/all.js", 24 | "test:bare": "bare test/all.js" 25 | }, 26 | "repository": { 27 | "type": "git", 28 | "url": "https://github.com/mafintosh/streamx.git" 29 | }, 30 | "author": "Mathias Buus (@mafintosh)", 31 | "license": "MIT", 32 | "bugs": { 33 | "url": "https://github.com/mafintosh/streamx/issues" 34 | }, 35 | "homepage": "https://github.com/mafintosh/streamx" 36 | } 37 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2019 Mathias Buus 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. 22 | -------------------------------------------------------------------------------- /test/byte-length.js: -------------------------------------------------------------------------------- 1 | const test = require('brittle') 2 | const { Readable, Writable } = require('../') 3 | 4 | const defaultSizes = [ 5 | { name: 'buf512', item: Buffer.alloc(512), size: 512 }, 6 | { name: 'number', item: 1, size: 1024 }, 7 | { name: 'number-byteLength', item: 1, size: 512, byteLength: () => 512 }, 8 | { 9 | name: 'number-byteLengthReadable', 10 | item: 1, 11 | size: 256, 12 | byteLength: () => 512, 13 | byteLengthExtended: () => 256 14 | }, 15 | { name: 'uint8-512', item: new Uint8Array(512), size: 512 }, 16 | { name: 'uint32-64', item: new Uint32Array(64), size: 256 } 17 | ] 18 | 19 | for (const { name, item, size, byteLength, byteLengthExtended } of defaultSizes) { 20 | test(`readable ${name}`, function (t) { 21 | const r = new Readable({ 22 | byteLength, 23 | byteLengthReadable: byteLengthExtended 24 | }) 25 | r.push(item) 26 | t.is(r._readableState.buffered, size) 27 | }) 28 | 29 | test(`writable ${name}`, function (t) { 30 | const w = new Writable({ 31 | byteLength, 32 | byteLengthWritable: byteLengthExtended 33 | }) 34 | w.write(item) 35 | t.is(w._writableState.buffered, size) 36 | }) 37 | } 38 | 39 | test('byteLength receives readable item', function (t) { 40 | t.plan(1) 41 | 42 | const obj = {} 43 | const r = new Readable({ 44 | byteLength: (data) => { 45 | t.alike(obj, data) 46 | } 47 | }) 48 | r.push(obj) 49 | }) 50 | 51 | test('byteLength receives writable item', function (t) { 52 | t.plan(2) 53 | 54 | const obj = {} 55 | const r = new Writable({ 56 | byteLength: (data) => { 57 | t.alike(obj, data) 58 | return 1 59 | } 60 | }) 61 | r.write(obj) 62 | }) 63 | -------------------------------------------------------------------------------- /test/duplex.js: -------------------------------------------------------------------------------- 1 | const test = require('brittle') 2 | const { Duplex } = require('../') 3 | 4 | test('if open does not end, it should stall', function (t) { 5 | t.plan(1) 6 | 7 | const d = new Duplex({ 8 | open() { 9 | t.pass('open called') 10 | }, 11 | read() { 12 | t.fail('should not call read') 13 | }, 14 | write() { 15 | t.fail('should not call write') 16 | } 17 | }) 18 | 19 | d.resume() 20 | d.write('hi') 21 | }) 22 | 23 | test('Using both mapReadable and mapWritable to map data', function (t) { 24 | t.plan(2) 25 | 26 | const d = new Duplex({ 27 | write(data, cb) { 28 | d.push(data) 29 | cb() 30 | }, 31 | final(cb) { 32 | d.push(null) 33 | cb() 34 | }, 35 | mapReadable: (num) => JSON.stringify({ num }), 36 | mapWritable: (input) => parseInt(input, 10) 37 | }) 38 | d.on('data', (data) => { 39 | t.is(data, '{"num":32}') 40 | }) 41 | d.on('close', () => { 42 | t.pass('closed') 43 | }) 44 | d.write('32') 45 | d.end() 46 | }) 47 | 48 | test('wait for readable', function (t) { 49 | t.plan(1) 50 | 51 | const d = new Duplex({ 52 | read(cb) { 53 | d.push('ok') 54 | d.push(null) 55 | cb() 56 | } 57 | }) 58 | 59 | d.on('readable', function () { 60 | t.is(d.read(), 'ok') 61 | }) 62 | }) 63 | 64 | test('write during end', function (t) { 65 | t.plan(3) 66 | 67 | const expected = ['a', 'b'] 68 | 69 | const w = new Duplex({ 70 | write(data, cb) { 71 | t.is(data, expected.shift()) 72 | cb(null) 73 | }, 74 | final(cb) { 75 | w.write('bad') 76 | cb(null) 77 | } 78 | }) 79 | 80 | w.write('a') 81 | w.write('b') 82 | w.end() 83 | w.on('finish', () => w.push(null)) 84 | w.on('close', () => t.pass('closed')) 85 | }) 86 | -------------------------------------------------------------------------------- /examples/fs.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs') 2 | const { Writable, Readable } = require('../') 3 | 4 | class FileWriteStream extends Writable { 5 | constructor(filename, mode) { 6 | super() 7 | this.filename = filename 8 | this.mode = mode 9 | this.fd = 0 10 | } 11 | 12 | _open(cb) { 13 | fs.open(this.filename, this.mode, (err, fd) => { 14 | if (err) return cb(err) 15 | this.fd = fd 16 | cb(null) 17 | }) 18 | } 19 | 20 | _write(data, cb) { 21 | fs.write(this.fd, data, 0, data.length, null, (err, written) => { 22 | if (err) return cb(err) 23 | if (written !== data.length) return this._write(data.slice(written), cb) 24 | cb(null) 25 | }) 26 | } 27 | 28 | _destroy(cb) { 29 | if (!this.fd) return cb() 30 | fs.close(this.fd, cb) 31 | } 32 | } 33 | 34 | class FileReadStream extends Readable { 35 | constructor(filename) { 36 | super() 37 | this.filename = filename 38 | this.fd = 0 39 | } 40 | 41 | _open(cb) { 42 | fs.open(this.filename, 'r', (err, fd) => { 43 | if (err) return cb(err) 44 | this.fd = fd 45 | cb(null) 46 | }) 47 | } 48 | 49 | _read(cb) { 50 | let data = Buffer.alloc(16 * 1024) 51 | 52 | fs.read(this.fd, data, 0, data.length, null, (err, read) => { 53 | if (err) return cb(err) 54 | if (read !== data.length) data = data.slice(0, read) 55 | this.push(data.length ? data : null) 56 | cb(null) 57 | }) 58 | } 59 | 60 | _destroy(cb) { 61 | if (!this.fd) return cb() 62 | fs.close(this.fd, cb) 63 | } 64 | } 65 | 66 | // copy this file as an example 67 | 68 | const rs = new FileReadStream(__filename) 69 | const ws = new FileWriteStream(`${__filename}.cpy`, 'w') 70 | 71 | rs.pipe(ws, function (err) { 72 | console.log('file copied', err) 73 | }) 74 | -------------------------------------------------------------------------------- /test/pipeline.js: -------------------------------------------------------------------------------- 1 | const test = require('brittle') 2 | const { pipeline, pipelinePromise, Transform, Readable, Writable } = require('../') 3 | 4 | test('piping to a writable', function (t) { 5 | t.plan(2) 6 | 7 | const w = pipeline( 8 | Readable.from('hello'), 9 | new Writable({ 10 | write(data, cb) { 11 | t.is(data, 'hello') 12 | cb() 13 | } 14 | }) 15 | ) 16 | w.on('close', () => t.pass('closed')) 17 | }) 18 | 19 | test('piping with error', function (t) { 20 | t.plan(1) 21 | 22 | const r = new Readable() 23 | const w = new Writable() 24 | const err = new Error() 25 | pipeline(r, w, (error) => { 26 | t.alike(error, err) 27 | }) 28 | r.destroy(err) 29 | }) 30 | 31 | test('piping with final callback', function (t) { 32 | t.plan(2) 33 | 34 | pipeline( 35 | Readable.from('hello'), 36 | new Writable({ 37 | write(data, cb) { 38 | t.is(data, 'hello') 39 | cb() 40 | } 41 | }), 42 | () => t.pass('ended') 43 | ) 44 | }) 45 | 46 | test('piping with transform stream inbetween', function (t) { 47 | t.plan(2) 48 | 49 | pipeline( 50 | [ 51 | Readable.from('hello'), 52 | new Transform({ 53 | transform(input, cb) { 54 | this.push(input.length) 55 | cb() 56 | } 57 | }), 58 | new Writable({ 59 | write(data, cb) { 60 | t.is(data, 5) 61 | cb() 62 | } 63 | }) 64 | ], 65 | () => t.pass('ended') 66 | ) 67 | }) 68 | 69 | test('piping to a writable', function (t) { 70 | t.plan(2) 71 | 72 | const w = pipeline( 73 | Readable.from('hello'), 74 | new Writable({ 75 | write(data, cb) { 76 | t.is(data, 'hello') 77 | cb() 78 | } 79 | }) 80 | ) 81 | w.on('close', () => t.pass('closed')) 82 | }) 83 | 84 | test('piping to a writable + promise', async function (t) { 85 | t.plan(2) 86 | 87 | const r = Readable.from('hello') 88 | let closed = false 89 | r.on('close', () => { 90 | closed = true 91 | }) 92 | await pipelinePromise( 93 | r, 94 | new Writable({ 95 | write(data, cb) { 96 | t.is(data, 'hello') 97 | cb() 98 | } 99 | }) 100 | ) 101 | t.ok(closed) 102 | }) 103 | -------------------------------------------------------------------------------- /test/backpressure.js: -------------------------------------------------------------------------------- 1 | const test = require('brittle') 2 | const { Writable, Readable } = require('../') 3 | 4 | test('write backpressure', function (t) { 5 | const ws = new Writable() 6 | 7 | for (let i = 0; i < 15; i++) { 8 | t.ok(ws.write('a'), 'not backpressured') 9 | t.absent(Writable.isBackpressured(ws), 'static check') 10 | } 11 | 12 | t.absent(ws.write('a'), 'backpressured') 13 | t.ok(Writable.isBackpressured(ws), 'static check') 14 | }) 15 | 16 | test('write backpressure with drain', function (t) { 17 | t.plan(15 * 2 + 2 + 1) 18 | 19 | const ws = new Writable() 20 | 21 | for (let i = 0; i < 15; i++) { 22 | t.ok(ws.write('a'), 'not backpressured') 23 | t.absent(Writable.isBackpressured(ws), 'static check') 24 | } 25 | 26 | t.absent(ws.write('a'), 'backpressured') 27 | t.ok(Writable.isBackpressured(ws), 'static check') 28 | 29 | ws.on('drain', function () { 30 | t.absent(Writable.isBackpressured(ws)) 31 | }) 32 | }) 33 | 34 | test('write backpressure with destroy', function (t) { 35 | const ws = new Writable() 36 | 37 | ws.write('a') 38 | ws.destroy() 39 | 40 | t.ok(Writable.isBackpressured(ws)) 41 | }) 42 | 43 | test('write backpressure with end', function (t) { 44 | const ws = new Writable() 45 | 46 | ws.write('a') 47 | ws.end() 48 | 49 | t.ok(Writable.isBackpressured(ws)) 50 | }) 51 | 52 | test('read backpressure', function (t) { 53 | const rs = new Readable() 54 | 55 | for (let i = 0; i < 15; i++) { 56 | t.ok(rs.push('a'), 'not backpressured') 57 | t.absent(Readable.isBackpressured(rs), 'static check') 58 | } 59 | 60 | t.absent(rs.push('a'), 'backpressured') 61 | t.ok(Readable.isBackpressured(rs), 'static check') 62 | }) 63 | 64 | test('read backpressure with later read', function (t) { 65 | t.plan(15 * 2 + 2 + 1) 66 | 67 | const rs = new Readable() 68 | 69 | for (let i = 0; i < 15; i++) { 70 | t.ok(rs.push('a'), 'not backpressured') 71 | t.absent(Readable.isBackpressured(rs), 'static check') 72 | } 73 | 74 | t.absent(rs.push('a'), 'backpressured') 75 | t.ok(Readable.isBackpressured(rs), 'static check') 76 | 77 | rs.once('readable', function () { 78 | rs.read() 79 | t.absent(Readable.isBackpressured(rs)) 80 | }) 81 | }) 82 | 83 | test('read backpressure with destroy', function (t) { 84 | const rs = new Readable() 85 | 86 | rs.push('a') 87 | rs.destroy() 88 | 89 | t.ok(Readable.isBackpressured(rs)) 90 | }) 91 | 92 | test('read backpressure with push(null)', function (t) { 93 | const rs = new Readable() 94 | 95 | rs.push('a') 96 | rs.push(null) 97 | 98 | t.ok(Readable.isBackpressured(rs)) 99 | }) 100 | -------------------------------------------------------------------------------- /test/pipe.js: -------------------------------------------------------------------------------- 1 | const test = require('brittle') 2 | const compat = global.Bare ? null : require('stream') 3 | const { Readable, Writable } = require('../') 4 | 5 | test('pipe to node stream', { skip: !compat }, function (t) { 6 | t.plan(3) 7 | 8 | const expected = ['hi', 'ho'] 9 | 10 | const r = new Readable() 11 | const w = new compat.Writable({ 12 | objectMode: true, 13 | write(data, enc, cb) { 14 | t.is(data, expected.shift()) 15 | cb(null) 16 | } 17 | }) 18 | 19 | r.push('hi') 20 | r.push('ho') 21 | r.push(null) 22 | 23 | r.pipe(w) 24 | 25 | w.on('finish', function () { 26 | t.is(expected.length, 0) 27 | }) 28 | }) 29 | 30 | test('pipe with callback - error case', function (t) { 31 | t.plan(2) 32 | 33 | const r = new Readable() 34 | const w = new Writable({ 35 | write(data, cb) { 36 | cb(new Error('blerg')) 37 | } 38 | }) 39 | 40 | r.pipe(w, function (err) { 41 | t.pass('callback called') 42 | t.alike(err, new Error('blerg')) 43 | }) 44 | 45 | r.push('hello') 46 | r.push('world') 47 | r.push(null) 48 | }) 49 | 50 | test('pipe with callback - error case with destroy', function (t) { 51 | t.plan(2) 52 | 53 | const r = new Readable() 54 | const w = new Writable({ 55 | write(data, cb) { 56 | w.destroy(new Error('blerg')) 57 | cb(null) 58 | } 59 | }) 60 | 61 | r.pipe(w, function (err) { 62 | t.pass('callback called') 63 | t.alike(err, new Error('blerg')) 64 | }) 65 | 66 | r.push('hello') 67 | r.push('world') 68 | }) 69 | 70 | test('pipe with callback - error case node stream', { skip: !compat }, function (t) { 71 | t.plan(2) 72 | 73 | const r = new Readable() 74 | const w = new compat.Writable({ 75 | write(data, enc, cb) { 76 | cb(new Error('blerg')) 77 | } 78 | }) 79 | 80 | r.pipe(w, function (err) { 81 | t.pass('callback called') 82 | t.alike(err, new Error('blerg')) 83 | }) 84 | 85 | r.push('hello') 86 | r.push('world') 87 | r.push(null) 88 | }) 89 | 90 | test('simple pipe', function (t) { 91 | t.plan(2) 92 | 93 | const buffered = [] 94 | 95 | const r = new Readable() 96 | const w = new Writable({ 97 | write(data, cb) { 98 | buffered.push(data) 99 | cb(null) 100 | }, 101 | 102 | final() { 103 | t.pass('final called') 104 | t.alike(buffered, ['hello', 'world']) 105 | } 106 | }) 107 | 108 | r.pipe(w) 109 | 110 | r.push('hello') 111 | r.push('world') 112 | r.push(null) 113 | }) 114 | 115 | test('pipe with callback', function (t) { 116 | t.plan(3) 117 | 118 | const buffered = [] 119 | 120 | const r = new Readable() 121 | const w = new Writable({ 122 | write(data, cb) { 123 | buffered.push(data) 124 | cb(null) 125 | } 126 | }) 127 | 128 | r.pipe(w, function (err) { 129 | t.pass('callback called') 130 | t.is(err, null) 131 | t.alike(buffered, ['hello', 'world']) 132 | }) 133 | 134 | r.push('hello') 135 | r.push('world') 136 | r.push(null) 137 | }) 138 | 139 | test('pipe continues if read is "blocked"', function (t) { 140 | t.plan(1) 141 | 142 | let written = 0 143 | let read = 0 144 | 145 | const r = new Readable({ 146 | read(cb) { 147 | this.push('test') 148 | 149 | if (++read === 20) { 150 | setTimeout(done, 10) 151 | return 152 | } 153 | 154 | cb(null) 155 | } 156 | }) 157 | 158 | const w = new Writable({ 159 | write(data, cb) { 160 | written++ 161 | cb(null) 162 | } 163 | }) 164 | 165 | r.pipe(w) 166 | 167 | function done() { 168 | t.is(written, read) 169 | } 170 | }) 171 | -------------------------------------------------------------------------------- /test/async-iterator.js: -------------------------------------------------------------------------------- 1 | const test = require('brittle') 2 | const { Readable } = require('../') 3 | 4 | test('streams are async iterators', async function (t) { 5 | const data = ['a', 'b', 'c', null] 6 | const expected = data.slice(0) 7 | 8 | const r = new Readable({ 9 | read(cb) { 10 | this.push(data.shift()) 11 | cb(null) 12 | } 13 | }) 14 | 15 | for await (const chunk of r) { 16 | t.is(chunk, expected.shift()) 17 | } 18 | 19 | t.is(expected.shift(), null) 20 | }) 21 | 22 | test('break out of iterator', async function (t) { 23 | const r = new Readable({ 24 | read(cb) { 25 | this.push('tick') 26 | cb(null) 27 | }, 28 | destroy(cb) { 29 | t.pass('destroying') 30 | cb(null) 31 | } 32 | }) 33 | 34 | let runs = 10 35 | 36 | for await (const chunk of r) { 37 | t.is(chunk, 'tick') 38 | if (--runs === 0) break 39 | } 40 | }) 41 | 42 | test('throw out of iterator', async function (t) { 43 | const r = new Readable({ 44 | read(cb) { 45 | this.push('tick') 46 | cb(null) 47 | }, 48 | destroy(cb) { 49 | t.pass('destroying') 50 | cb(null) 51 | } 52 | }) 53 | 54 | let runs = 10 55 | 56 | await t.exception(async function () { 57 | for await (const chunk of r) { 58 | t.is(chunk, 'tick') 59 | if (--runs === 0) throw new Error('stop') 60 | } 61 | }) 62 | }) 63 | 64 | test('intertesting timing', async function (t) { 65 | const r = new Readable({ 66 | read(cb) { 67 | setImmediate(() => { 68 | this.push('b') 69 | this.push('c') 70 | this.push(null) 71 | cb(null) 72 | }) 73 | }, 74 | destroy(cb) { 75 | t.pass('destroying') 76 | cb(null) 77 | } 78 | }) 79 | 80 | r.push('a') 81 | 82 | const iterated = [] 83 | 84 | for await (const chunk of r) { 85 | iterated.push(chunk) 86 | await new Promise((resolve) => setTimeout(resolve, 10)) 87 | } 88 | 89 | t.alike(iterated, ['a', 'b', 'c']) 90 | }) 91 | 92 | test('intertesting timing with close', async function (t) { 93 | t.plan(3) 94 | 95 | const r = new Readable({ 96 | read(cb) { 97 | setImmediate(() => { 98 | this.destroy(new Error('stop')) 99 | cb(null) 100 | }) 101 | }, 102 | destroy(cb) { 103 | t.pass('destroying') 104 | cb(null) 105 | } 106 | }) 107 | 108 | r.push('a') 109 | 110 | const iterated = [] 111 | 112 | await t.exception(async function () { 113 | for await (const chunk of r) { 114 | iterated.push(chunk) 115 | await new Promise((resolve) => setTimeout(resolve, 10)) 116 | } 117 | }) 118 | 119 | t.alike(iterated, ['a']) 120 | }) 121 | 122 | test('cleaning up a closed iterator', async function (t) { 123 | const r = new Readable() 124 | r.push('a') 125 | t.plan(1) 126 | 127 | const fn = async () => { 128 | for await (const chunk of r) { 129 | // eslint-disable-line 130 | r.destroy() 131 | await new Promise((resolve) => r.once('close', resolve)) 132 | t.is(chunk, 'a') 133 | return 134 | } 135 | } 136 | await fn() 137 | }) 138 | 139 | test('using abort controller', { skip: !!global.Bare }, async function (t) { 140 | function createInfinite(signal) { 141 | let count = 0 142 | const r = new Readable({ signal }) 143 | r.push(count) 144 | const int = setInterval(() => r.push(count++), 5000) 145 | r.once('close', () => clearInterval(int)) 146 | return r 147 | } 148 | const controller = new AbortController() 149 | const inc = [] 150 | setImmediate(() => controller.abort()) 151 | 152 | await t.exception(async function () { 153 | for await (const chunk of createInfinite(controller.signal)) { 154 | inc.push(chunk) 155 | } 156 | }) 157 | 158 | t.alike(inc, [0]) 159 | }) 160 | 161 | test('from async iterator and to async iterator', async function (t) { 162 | const expected = [] 163 | 164 | const stream = Readable.from( 165 | (async function* () { 166 | yield 'a' 167 | yield 'b' 168 | })() 169 | ) 170 | 171 | for await (const data of stream) { 172 | expected.push(data) 173 | } 174 | 175 | t.alike(expected, ['a', 'b']) 176 | }) 177 | -------------------------------------------------------------------------------- /test/compat.js: -------------------------------------------------------------------------------- 1 | const eos = global.Bare ? null : require('end-of-stream') 2 | const test = require('brittle') 3 | const stream = require('../') 4 | const finished = global.Bare ? null : require('stream').finished 5 | 6 | run(eos) 7 | run(finished) 8 | 9 | function run(eos) { 10 | if (!eos) return 11 | const name = eos === finished ? 'nodeStream.finished' : 'eos' 12 | 13 | test(name + ' readable', function (t) { 14 | t.plan(2) 15 | 16 | const r = new stream.Readable() 17 | let ended = false 18 | 19 | r.on('end', function () { 20 | ended = true 21 | }) 22 | 23 | eos(r, function (err) { 24 | t.absent(err, 'no error') 25 | t.ok(ended) 26 | }) 27 | 28 | r.push('hello') 29 | r.push(null) 30 | r.resume() 31 | }) 32 | 33 | test(name + ' readable destroy', function (t) { 34 | t.plan(2) 35 | 36 | const r = new stream.Readable() 37 | let ended = false 38 | 39 | r.on('end', function () { 40 | ended = true 41 | }) 42 | 43 | eos(r, function (err) { 44 | t.ok(err, 'had error') 45 | t.absent(ended) 46 | }) 47 | 48 | r.push('hello') 49 | r.push(null) 50 | r.resume() 51 | r.destroy() 52 | }) 53 | 54 | test(name + ' writable', function (t) { 55 | t.plan(2) 56 | 57 | const w = new stream.Writable() 58 | let finished = false 59 | 60 | w.on('finish', function () { 61 | finished = true 62 | }) 63 | 64 | eos(w, function (err) { 65 | t.absent(err, 'no error') 66 | t.ok(finished) 67 | }) 68 | 69 | w.write('hello') 70 | w.end() 71 | }) 72 | 73 | test(name + ' writable destroy', function (t) { 74 | t.plan(3) 75 | 76 | const w = new stream.Writable() 77 | let finished = false 78 | 79 | w.on('finish', function () { 80 | finished = true 81 | }) 82 | 83 | eos(w, function (err) { 84 | t.ok(err, 'had error') 85 | t.absent(finished) 86 | }) 87 | 88 | w.write('hello') 89 | t.is(w.end(), w) 90 | w.destroy() 91 | }) 92 | 93 | test(name + ' duplex', function (t) { 94 | t.plan(4) 95 | 96 | const s = new stream.Duplex() 97 | let ended = false 98 | let finished = false 99 | 100 | s.on('end', () => { 101 | ended = true 102 | }) 103 | s.on('finish', () => { 104 | finished = true 105 | }) 106 | 107 | eos(s, function (err) { 108 | t.absent(err, 'no error') 109 | t.ok(ended) 110 | t.ok(finished) 111 | }) 112 | 113 | s.push('hello') 114 | s.push(null) 115 | s.resume() 116 | t.is(s.end(), s) 117 | }) 118 | 119 | test(name + ' duplex + deferred s.end()', function (t) { 120 | t.plan(3) 121 | 122 | const s = new stream.Duplex() 123 | let ended = false 124 | let finished = false 125 | 126 | s.on('end', function () { 127 | ended = true 128 | setImmediate(() => s.end()) 129 | }) 130 | 131 | s.on('finish', () => { 132 | finished = true 133 | }) 134 | 135 | eos(s, function (err) { 136 | t.absent(err, 'no error') 137 | t.ok(ended) 138 | t.ok(finished) 139 | }) 140 | 141 | s.push('hello') 142 | s.push(null) 143 | s.resume() 144 | }) 145 | 146 | test(name + ' duplex + deferred s.push(null)', function (t) { 147 | t.plan(3) 148 | 149 | const s = new stream.Duplex() 150 | let ended = false 151 | let finished = false 152 | 153 | s.on('finish', function () { 154 | finished = true 155 | setImmediate(() => s.push(null)) 156 | }) 157 | 158 | s.on('end', () => { 159 | ended = true 160 | }) 161 | 162 | eos(s, function (err) { 163 | t.absent(err, 'no error') 164 | t.ok(ended) 165 | t.ok(finished) 166 | }) 167 | 168 | s.push('hello') 169 | s.end() 170 | s.resume() 171 | }) 172 | 173 | test(name + ' duplex destroy', function (t) { 174 | t.plan(3) 175 | 176 | const s = new stream.Duplex() 177 | let ended = false 178 | let finished = false 179 | 180 | s.on('end', () => { 181 | ended = true 182 | }) 183 | s.on('finish', () => { 184 | finished = true 185 | }) 186 | 187 | eos(s, function (err) { 188 | t.ok(err, 'had error') 189 | t.absent(ended) 190 | t.absent(finished) 191 | }) 192 | 193 | s.push('hello') 194 | s.push(null) 195 | s.resume() 196 | s.end() 197 | s.destroy() 198 | }) 199 | } 200 | -------------------------------------------------------------------------------- /test/writable.js: -------------------------------------------------------------------------------- 1 | const test = require('brittle') 2 | const { Writable, Duplex } = require('../') 3 | 4 | test('opens before writes', function (t) { 5 | t.plan(2) 6 | 7 | const trace = [] 8 | const stream = new Writable({ 9 | open(cb) { 10 | trace.push('open') 11 | return cb(null) 12 | }, 13 | write(data, cb) { 14 | trace.push('write') 15 | return cb(null) 16 | } 17 | }) 18 | stream.on('close', () => { 19 | t.is(trace.length, 2) 20 | t.is(trace[0], 'open') 21 | }) 22 | stream.write('data') 23 | stream.end() 24 | }) 25 | 26 | test('drain', function (t) { 27 | t.plan(2) 28 | 29 | const stream = new Writable({ 30 | highWaterMark: 1, 31 | write(data, cb) { 32 | cb(null) 33 | } 34 | }) 35 | 36 | t.absent(stream.write('a')) 37 | stream.on('drain', function () { 38 | t.pass('drained') 39 | }) 40 | }) 41 | 42 | test('drain multi write', function (t) { 43 | t.plan(4) 44 | 45 | const stream = new Writable({ 46 | highWaterMark: 1, 47 | write(data, cb) { 48 | cb(null) 49 | } 50 | }) 51 | 52 | t.absent(stream.write('a')) 53 | t.absent(stream.write('a')) 54 | t.absent(stream.write('a')) 55 | stream.on('drain', function () { 56 | t.pass('drained') 57 | }) 58 | }) 59 | 60 | test('drain async write', function (t) { 61 | t.plan(3) 62 | 63 | let flushed = false 64 | 65 | const stream = new Writable({ 66 | highWaterMark: 1, 67 | write(data, cb) { 68 | setImmediate(function () { 69 | flushed = true 70 | cb(null) 71 | }) 72 | } 73 | }) 74 | 75 | t.absent(stream.write('a')) 76 | t.absent(flushed) 77 | stream.on('drain', function () { 78 | t.ok(flushed) 79 | }) 80 | }) 81 | 82 | test('writev', function (t) { 83 | t.plan(3) 84 | 85 | const expected = [[], ['ho']] 86 | 87 | const s = new Writable({ 88 | writev(batch, cb) { 89 | t.alike(batch, expected.shift()) 90 | cb(null) 91 | } 92 | }) 93 | 94 | for (let i = 0; i < 100; i++) { 95 | expected[0].push('hi-' + i) 96 | s.write('hi-' + i) 97 | } 98 | 99 | s.on('drain', function () { 100 | s.write('ho') 101 | s.end() 102 | }) 103 | 104 | s.on('finish', function () { 105 | t.pass('finished') 106 | }) 107 | }) 108 | 109 | test('map written data', function (t) { 110 | t.plan(2) 111 | 112 | const r = new Writable({ 113 | write(data, cb) { 114 | t.is(data, '{"foo":1}') 115 | cb() 116 | }, 117 | map: (input) => JSON.stringify(input) 118 | }) 119 | r.on('finish', () => { 120 | t.pass('finished') 121 | }) 122 | r.write({ foo: 1 }) 123 | r.end() 124 | }) 125 | 126 | test('use mapWritable to map data', function (t) { 127 | t.plan(2) 128 | 129 | const r = new Writable({ 130 | write(data, cb) { 131 | t.is(data, '{"foo":1}') 132 | cb() 133 | }, 134 | map: () => t.fail('.mapWritable has priority'), 135 | mapWritable: (input) => JSON.stringify(input) 136 | }) 137 | r.on('finish', () => { 138 | t.pass('finished') 139 | }) 140 | r.write({ foo: 1 }) 141 | r.end() 142 | }) 143 | 144 | test('many ends', function (t) { 145 | t.plan(2) 146 | 147 | let finals = 0 148 | let finish = 0 149 | 150 | const s = new Duplex({ 151 | final(cb) { 152 | finals++ 153 | cb(null) 154 | } 155 | }) 156 | 157 | s.end() 158 | queueMicrotask(() => { 159 | s.end() 160 | queueMicrotask(() => { 161 | s.end() 162 | }) 163 | }) 164 | 165 | s.on('finish', function () { 166 | finish++ 167 | t.is(finals, 1) 168 | t.is(finish, 1) 169 | }) 170 | }) 171 | 172 | test('drained helper', async function (t) { 173 | const w = new Writable({ 174 | write(data, cb) { 175 | setImmediate(cb) 176 | } 177 | }) 178 | 179 | for (let i = 0; i < 20; i++) w.write('hi') 180 | 181 | await Writable.drained(w) 182 | 183 | t.is(w._writableState.queue.length, 0) 184 | 185 | for (let i = 0; i < 20; i++) w.write('hi') 186 | 187 | const d1 = Writable.drained(w) 188 | 189 | for (let i = 0; i < 20; i++) w.write('hi') 190 | 191 | const d2 = Writable.drained(w) 192 | 193 | d1.then(() => { 194 | t.not(w._writableState.queue.length, 0, 'future writes are queued') 195 | }) 196 | 197 | d2.then(() => { 198 | t.is(w._writableState.queue.length, 0, 'all drained now') 199 | }) 200 | 201 | await d1 202 | await d2 203 | 204 | await Writable.drained(w) 205 | 206 | t.pass('works if no writes are pending') 207 | 208 | for (let i = 0; i < 20; i++) w.write('hi') 209 | 210 | const d3 = Writable.drained(w) 211 | w.destroy() 212 | 213 | t.absent(await d3) 214 | t.absent(await Writable.drained(w), 'already destroyed') 215 | }) 216 | 217 | test('drained helper, duplex', async function (t) { 218 | const w = new Duplex({ 219 | write(data, cb) { 220 | setImmediate(cb) 221 | } 222 | }) 223 | 224 | for (let i = 0; i < 20; i++) w.write('hi') 225 | 226 | await Writable.drained(w) 227 | 228 | t.is(w._writableState.queue.length, 0) 229 | 230 | for (let i = 0; i < 20; i++) w.write('hi') 231 | 232 | const d1 = Writable.drained(w) 233 | 234 | for (let i = 0; i < 20; i++) w.write('hi') 235 | 236 | const d2 = Writable.drained(w) 237 | 238 | d1.then(() => { 239 | t.not(w._writableState.queue.length, 0, 'future writes are queued') 240 | }) 241 | 242 | d2.then(() => { 243 | t.is(w._writableState.queue.length, 0, 'all drained now') 244 | }) 245 | 246 | await d1 247 | await d2 248 | 249 | await Writable.drained(w) 250 | 251 | t.pass('works if no writes are pending') 252 | 253 | for (let i = 0; i < 20; i++) w.write('hi') 254 | 255 | const d3 = Writable.drained(w) 256 | w.destroy() 257 | 258 | t.absent(await d3) 259 | t.absent(await Writable.drained(w), 'already destroyed') 260 | }) 261 | 262 | test('drained helper, inflight write', async function (t) { 263 | let writing = false 264 | const w = new Writable({ 265 | write(data, cb) { 266 | writing = true 267 | setImmediate(() => { 268 | setImmediate(() => { 269 | writing = false 270 | cb() 271 | }) 272 | }) 273 | } 274 | }) 275 | 276 | w.write('hello') 277 | w.end() 278 | 279 | await new Promise((resolve) => setImmediate(resolve)) 280 | t.ok(writing, 'is writing') 281 | await Writable.drained(w) 282 | t.absent(writing, 'not writing') 283 | }) 284 | 285 | test('drained helper, writev', async function (t) { 286 | let writing = 0 287 | let continueWrite 288 | 289 | const wrote = new Promise((resolve) => { 290 | continueWrite = resolve 291 | }) 292 | 293 | const w = new Writable({ 294 | writev(datas, cb) { 295 | continueWrite() 296 | setImmediate(() => { 297 | writing -= datas.length 298 | cb() 299 | }) 300 | } 301 | }) 302 | 303 | for (let i = 0; i < 10; i++) { 304 | writing++ 305 | w.write('hello') 306 | } 307 | 308 | w.end() 309 | 310 | await wrote 311 | t.ok(writing > 0, 'is writing') 312 | await Writable.drained(w) 313 | t.ok(writing === 0, 'not writing') 314 | }) 315 | 316 | test('drained helper, writev, already flushed', async function (t) { 317 | const w = new Writable({ 318 | writev(datas, cb) { 319 | cb() 320 | } 321 | }) 322 | 323 | await Writable.drained(w) 324 | t.pass('drained resovled') 325 | }) 326 | 327 | test('can cork and uncork the stream', async function (t) { 328 | const w = new Writable({ 329 | writev(batch, cb) { 330 | t.alike(batch, [1, 2, 3]) 331 | cb(null) 332 | } 333 | }) 334 | 335 | w.cork() 336 | w.write(1) 337 | await eventFlush() 338 | w.write(2) 339 | await eventFlush() 340 | w.write(3) 341 | w.uncork() 342 | 343 | await Writable.drained(w) 344 | }) 345 | 346 | function eventFlush() { 347 | return new Promise((resolve) => setImmediate(resolve)) 348 | } 349 | -------------------------------------------------------------------------------- /test/readable.js: -------------------------------------------------------------------------------- 1 | const test = require('brittle') 2 | const b4a = require('b4a') 3 | const { Readable, isDisturbed } = require('../') 4 | 5 | test('ondata', function (t) { 6 | t.plan(4) 7 | 8 | const r = new Readable() 9 | const buffered = [] 10 | let ended = 0 11 | 12 | r.push('hello') 13 | r.push('world') 14 | r.push(null) 15 | 16 | r.on('data', (data) => buffered.push(data)) 17 | r.on('end', () => ended++) 18 | r.on('close', function () { 19 | t.pass('closed') 20 | t.alike(buffered, ['hello', 'world']) 21 | t.is(ended, 1) 22 | t.ok(r.destroyed) 23 | }) 24 | }) 25 | 26 | test('pause', async function (t) { 27 | const r = new Readable() 28 | const buffered = [] 29 | t.is(Readable.isPaused(r), true, 'starting off paused') 30 | r.on('data', (data) => buffered.push(data)) 31 | r.on('close', () => t.end()) 32 | r.push('hello') 33 | await nextImmediate() 34 | t.is(r.pause(), r, '.pause() returns self') 35 | t.is(Readable.isPaused(r), true, '.pause() marks stream as paused') 36 | r.push('world') 37 | await nextImmediate() 38 | t.alike(buffered, ['hello'], '.pause() prevents data to be read') 39 | t.is(r.resume(), r, '.resume() returns self') 40 | t.is(Readable.isPaused(r), false, '.resume() marks stream as resumed') 41 | await nextImmediate() 42 | t.alike(buffered, ['hello', 'world']) 43 | r.push(null) 44 | }) 45 | 46 | test('resume', function (t) { 47 | t.plan(3) 48 | 49 | const r = new Readable() 50 | let ended = 0 51 | 52 | r.push('hello') 53 | r.push('world') 54 | r.push(null) 55 | 56 | r.resume() 57 | r.on('end', () => ended++) 58 | r.on('close', function () { 59 | t.pass('closed') 60 | t.is(ended, 1) 61 | t.ok(r.destroyed) 62 | }) 63 | }) 64 | 65 | test('lazy open', async function (t) { 66 | let opened = false 67 | const r = new Readable({ 68 | open(cb) { 69 | opened = true 70 | cb(null) 71 | } 72 | }) 73 | await nextImmediate() 74 | t.absent(opened) 75 | r.read() 76 | await nextImmediate() 77 | t.ok(opened) 78 | }) 79 | 80 | test('eager open', async function (t) { 81 | let opened = false 82 | const r = new Readable({ 83 | open(cb) { 84 | opened = true 85 | cb(null) 86 | }, 87 | eagerOpen: true 88 | }) 89 | await nextImmediate() 90 | t.ok(opened) 91 | r.push(null) 92 | }) 93 | 94 | test('shorthands', function (t) { 95 | t.plan(3) 96 | 97 | const r = new Readable({ 98 | read(cb) { 99 | this.push('hello') 100 | cb(null) 101 | }, 102 | destroy(cb) { 103 | t.pass('destroyed') 104 | cb(null) 105 | } 106 | }) 107 | 108 | r.once('readable', function () { 109 | t.is(r.read(), 'hello') 110 | r.destroy() 111 | t.is(r.read(), null) 112 | }) 113 | }) 114 | 115 | test('both push and the cb needs to be called for re-reads', function (t) { 116 | t.plan(2) 117 | 118 | let once = true 119 | 120 | const r = new Readable({ 121 | read(cb) { 122 | t.ok(once, 'read called once') 123 | once = false 124 | cb(null) 125 | } 126 | }) 127 | 128 | r.resume() 129 | 130 | setTimeout(function () { 131 | once = true 132 | r.push('hi') 133 | }, 100) 134 | }) 135 | 136 | test('from array', function (t) { 137 | t.plan(1) 138 | 139 | const inc = [] 140 | const r = Readable.from([1, 2, 3]) 141 | r.on('data', (data) => inc.push(data)) 142 | r.on('end', function () { 143 | t.alike(inc, [1, 2, 3]) 144 | }) 145 | }) 146 | 147 | test('from buffer', function (t) { 148 | t.plan(1) 149 | 150 | const inc = [] 151 | const r = Readable.from(Buffer.from('hello')) 152 | r.on('data', (data) => inc.push(data)) 153 | r.on('end', function () { 154 | t.alike(inc, [Buffer.from('hello')]) 155 | }) 156 | }) 157 | 158 | test('from async iterator', function (t) { 159 | t.plan(1) 160 | 161 | async function* test() { 162 | yield 1 163 | yield 2 164 | yield 3 165 | } 166 | 167 | const inc = [] 168 | const r = Readable.from(test()) 169 | r.on('data', (data) => inc.push(data)) 170 | r.on('end', function () { 171 | t.alike(inc, [1, 2, 3]) 172 | }) 173 | }) 174 | 175 | test('from array with highWaterMark', function (t) { 176 | const r = Readable.from([1, 2, 3], { highWaterMark: 1 }) 177 | t.is(r._readableState.highWaterMark, 1) 178 | }) 179 | 180 | test('from async iterator with highWaterMark', function (t) { 181 | async function* test() { 182 | yield 1 183 | } 184 | 185 | const r = Readable.from(test(), { highWaterMark: 1 }) 186 | t.is(r._readableState.highWaterMark, 1) 187 | }) 188 | 189 | test('unshift', async function (t) { 190 | const r = new Readable() 191 | r.pause() 192 | r.push(1) 193 | r.push(2) 194 | r.unshift(0) 195 | r.push(null) 196 | const inc = [] 197 | for await (const entry of r) { 198 | inc.push(entry) 199 | } 200 | t.alike(inc, [0, 1, 2]) 201 | }) 202 | 203 | test('from readable should return the original readable', function (t) { 204 | const r = new Readable() 205 | t.is(Readable.from(r), r) 206 | }) 207 | 208 | test('map readable data', async function (t) { 209 | const r = new Readable({ 210 | map: (input) => JSON.parse(input) 211 | }) 212 | r.push('{ "foo": 1 }') 213 | for await (const obj of r) { 214 | // eslint-disable-line 215 | t.alike(obj, { foo: 1 }) 216 | break 217 | } 218 | }) 219 | 220 | test('use mapReadable to map data', async function (t) { 221 | const r = new Readable({ 222 | map: () => t.fail('.mapReadable has priority'), 223 | mapReadable: (input) => JSON.parse(input) 224 | }) 225 | r.push('{ "foo": 1 }') 226 | for await (const obj of r) { 227 | // eslint-disable-line 228 | t.alike(obj, { foo: 1 }) 229 | break 230 | } 231 | }) 232 | 233 | test('live stream', function (t) { 234 | t.plan(3) 235 | 236 | const r = new Readable({ 237 | read(cb) { 238 | this.push('data') 239 | this.push('data') 240 | this.push('data') 241 | // assume cb is called way later 242 | } 243 | }) 244 | 245 | r.on('data', function (data) { 246 | t.is(data, 'data') 247 | }) 248 | }) 249 | 250 | test('live stream with readable', function (t) { 251 | t.plan(3) 252 | 253 | const r = new Readable({ 254 | read(cb) { 255 | this.push('data') 256 | this.push('data') 257 | this.push('data') 258 | // assume cb is called way later 259 | } 260 | }) 261 | 262 | r.on('readable', function () { 263 | let data 264 | while ((data = r.read()) !== null) t.is(data, 'data') 265 | }) 266 | }) 267 | 268 | test('resume a stalled stream', function (t) { 269 | t.plan(1) 270 | 271 | const expected = [] 272 | let once = true 273 | 274 | const r = new Readable({ 275 | read(cb) { 276 | if (once) { 277 | once = false 278 | this.push('data') 279 | expected.push('data') 280 | return cb() 281 | } 282 | 283 | for (let i = 0; i < 20; i++) { 284 | this.push('data') 285 | expected.push('data') 286 | } 287 | 288 | // pretend its stalled 289 | } 290 | }) 291 | 292 | const collected = [] 293 | 294 | r.once('data', function (data) { 295 | r.pause() 296 | collected.push(data) 297 | setImmediate(() => { 298 | r.on('data', function (data) { 299 | collected.push(data) 300 | if (collected.length === 21) { 301 | t.alike(collected, expected) 302 | } 303 | }) 304 | r.resume() 305 | }) 306 | }) 307 | }) 308 | 309 | test('no read-ahead with pause/resume', function (t) { 310 | t.plan(4) 311 | 312 | let tick = 0 313 | 314 | const r = new Readable({ 315 | highWaterMark: 0, 316 | read(cb) { 317 | this.push('tick: ' + ++tick) 318 | cb() 319 | } 320 | }) 321 | 322 | r.once('data', function () { 323 | t.is(tick, 1) 324 | r.pause() 325 | setImmediate(() => { 326 | t.is(tick, 1) 327 | r.resume() 328 | r.once('data', function () { 329 | t.is(tick, 2) 330 | r.pause() 331 | setImmediate(() => { 332 | t.is(tick, 2) 333 | }) 334 | }) 335 | }) 336 | }) 337 | }) 338 | 339 | test('no read-ahead with async iterator', async function (t) { 340 | let tick = 0 341 | 342 | const r = new Readable({ 343 | highWaterMark: 0, 344 | read(cb) { 345 | this.push('tick: ' + ++tick) 346 | if (tick === 10) this.push(null) 347 | cb() 348 | } 349 | }) 350 | 351 | let expectedTick = 0 352 | for await (const data of r) { 353 | t.is(tick, ++expectedTick) 354 | t.is(data, 'tick: ' + tick) 355 | await nextImmediate() 356 | } 357 | 358 | t.is(expectedTick, 10) 359 | }) 360 | 361 | test('setEncoding', async function (t) { 362 | const r = new Readable() 363 | 364 | r.setEncoding('utf-8') 365 | const buffer = b4a.from('hællå wørld!') 366 | for (let i = 0; i < buffer.byteLength; i++) { 367 | r.push(buffer.subarray(i, i + 1)) 368 | } 369 | r.push(null) 370 | const expected = b4a.toString(buffer).split('') 371 | for await (const data of r) { 372 | t.is(data, expected.shift()) 373 | } 374 | t.is(expected.length, 0) 375 | }) 376 | 377 | test('setEncoding respects existing map', async function (t) { 378 | t.plan(1) 379 | 380 | const r = new Readable({ 381 | encoding: 'utf-8', 382 | map(data) { 383 | return JSON.parse(data) 384 | } 385 | }) 386 | 387 | r.push('{"hello":"world"}') 388 | r.once('data', function (data) { 389 | t.alike(data, { hello: 'world' }) 390 | }) 391 | }) 392 | 393 | test('setEncoding empty string', async function (t) { 394 | t.plan(1) 395 | 396 | const r = new Readable() 397 | 398 | r.setEncoding('utf-8') 399 | const buffer = b4a.from('') 400 | r.push(buffer) 401 | r.push(null) 402 | 403 | for await (const data of r) { 404 | t.is(data, '') 405 | } 406 | }) 407 | 408 | test('is disturbed', function (t) { 409 | const r = new Readable() 410 | t.is(isDisturbed(r), false) 411 | 412 | r.push('hello') 413 | t.is(isDisturbed(r), false) 414 | 415 | r.resume() 416 | t.is(isDisturbed(r), true) 417 | 418 | r.pause() 419 | t.is(isDisturbed(r), true) 420 | }) 421 | 422 | function nextImmediate() { 423 | return new Promise((resolve) => setImmediate(resolve)) 424 | } 425 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # streamx 2 | 3 | An iteration of the Node.js core streams with a series of improvements. 4 | 5 | ``` 6 | npm install streamx 7 | ``` 8 | 9 | [![Build Status](https://github.com/streamxorg/streamx/workflows/Build%20Status/badge.svg)](https://github.com/streamxorg/streamx/actions?query=workflow%3A%22Build+Status%22) 10 | 11 | ## Main improvements from Node.js core stream 12 | 13 | #### Proper lifecycle support. 14 | 15 | Streams have an `_open` function that is called before any read/write operation and a `_destroy` 16 | function that is always run as the last part of the stream. 17 | 18 | This makes it easy to maintain state. 19 | 20 | #### Easy error handling 21 | 22 | Fully integrates a `.destroy()` function. When called the stream will wait for any 23 | pending operation to finish and call the stream destroy logic. 24 | 25 | Close is _always_ the last event emitted and `destroy` is always run. 26 | 27 | #### `pipe()` error handles 28 | 29 | `pipe` accepts a callback that is called when the pipeline is fully drained. 30 | It also error handles the streams provided and destroys both streams if either 31 | of them fail. 32 | 33 | #### All streams are both binary and object mode streams 34 | 35 | A `map` function can be provided to map your input data into buffers 36 | or other formats. To indicate how much buffer space each data item takes 37 | an `byteLength` function can be provided as well. 38 | 39 | This removes the need for two modes of streams. 40 | 41 | #### Simplicity 42 | 43 | This is a full rewrite, all contained in one file. 44 | 45 | Lots of stream methods are simplified based on how I and devs I work with actually use streams in the wild. 46 | 47 | #### Backwards compat 48 | 49 | streamx aims to be compatible with Node.js streams whenever it is reasonable to do so. 50 | 51 | This means that streamx streams behave a lot like Node.js streams from the outside but still provides the 52 | improvements above. 53 | 54 | #### Smaller browser footprint 55 | 56 | streamx has a much smaller footprint when compiled for the browser: 57 | 58 | ``` 59 | $ for x in stream{,x}; do echo $x: $(browserify -r $x | wc -c) bytes; done 60 | stream: 173844 bytes 61 | streamx: 46943 bytes 62 | ``` 63 | 64 | With optimizations turned on, the difference is even more stark: 65 | 66 | ``` 67 | $ for x in stream{,x}; do echo $x: $(browserify -r $x -p tinyify | wc -c) bytes; done 68 | stream: 62649 bytes 69 | streamx: 8460 bytes 70 | $ for x in stream{,x}; do echo $x: $(browserify -r $x -p tinyify | gzip | wc -c) "bytes (gzipped)"; done 71 | stream: 18053 bytes (gzipped) 72 | streamx: 2806 bytes (gzipped) 73 | ``` 74 | 75 | #### AbortSignal support 76 | 77 | To make it easier to integrate streams in a `async/await` flow, all streams support a `signal` option 78 | that accepts a [`AbortSignal`](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal) to as an 79 | alternative means to `.destroy` streams. 80 | 81 | ## Usage 82 | 83 | ```js 84 | const { Readable } = require('streamx') 85 | 86 | const rs = new Readable({ 87 | read(cb) { 88 | this.push('Cool data') 89 | cb(null) 90 | } 91 | }) 92 | 93 | rs.on('data', (data) => console.log('data:', data)) 94 | ``` 95 | 96 | ## API 97 | 98 | This streamx package contains 4 streams similar to Node.js core. 99 | 100 | ## Readable Stream 101 | 102 | #### `rs = new stream.Readable([options])` 103 | 104 | Create a new readable stream. 105 | 106 | Options include: 107 | 108 | ``` 109 | { 110 | highWaterMark: 16384, // max buffer size in bytes 111 | map: (data) => data, // optional function to map input data 112 | byteLength: (data) => size, // optional function that calculates the byte size of input data 113 | signal: abortController.signal, // optional AbortSignal that triggers `.destroy` when on `abort` 114 | eagerOpen: false // eagerly open the stream 115 | } 116 | ``` 117 | 118 | In addition you can pass the `open`, `read`, and `destroy` functions as shorthands in 119 | the constructor instead of overwrite the methods below. 120 | 121 | The default byteLength function returns the byte length of buffers and `1024` 122 | for any other object. This means the buffer will contain around 16 non buffers 123 | or buffers worth 16kb when full if the defaults are used. 124 | 125 | If you set highWaterMark to `0` then all read ahead buffering on the stream 126 | is disabled and it will only call `_read` when a user reads rather than ahead of time. 127 | 128 | #### `rs._read(cb)` 129 | 130 | This function is called when the stream wants you to push new data. 131 | Overwrite this and add your own read logic. 132 | You should call the callback when you are fully done with the read. 133 | 134 | Can also be set using `options.read` in the constructor. 135 | 136 | Note that this function differs from Node.js streams in that it takes 137 | the "read finished" callback. 138 | 139 | #### `drained = rs.push(data)` 140 | 141 | Push new data to the stream. Returns true if the buffer is not full 142 | and you should push more data if you can. 143 | 144 | If you call `rs.push(null)` you signal to the stream that no more 145 | data will be pushed and that you want to end the stream. 146 | 147 | #### `data = rs.read()` 148 | 149 | Read a piece of data from the stream buffer. If the buffer is currently empty 150 | `null` will be returned and you should wait for `readable` to be emitted before 151 | trying again. If the stream has been ended it will also return `null`. 152 | 153 | Note that this method differs from Node.js streams in that it does not accept 154 | an optional amounts of bytes to consume. 155 | 156 | #### `rs.unshift(data)` 157 | 158 | Add a piece of data to the front of the buffer. Use this if you read too much 159 | data using the `rs.read()` function. 160 | 161 | #### `rs._open(cb)` 162 | 163 | This function is called once before the first read is issued. Use this function 164 | to implement your own open logic. 165 | 166 | Can also be set using `options.open` in the constructor. 167 | 168 | #### `rs._destroy(cb)` 169 | 170 | This function is called just before the stream is fully destroyed. You should 171 | use this to implement whatever teardown logic you need. The final part of the 172 | stream life cycle is always to call destroy itself so this function will always 173 | be called wheather or not the stream ends gracefully or forcefully. 174 | 175 | Can also be set using `options.destroy` in the constructor. 176 | 177 | Note that the `_destroy` might be called without the open function being called 178 | in case no read was ever performed on the stream. 179 | 180 | #### `rs._predestroy()` 181 | 182 | A simple hook that is called as soon as the first `stream.destroy()` call is invoked. 183 | 184 | Use this in case you need to cancel pending reads (if possible) instead of waiting for them to finish. 185 | 186 | Can also be set using `options.predestroy` in the constructor. 187 | 188 | #### `rs.destroy([error])` 189 | 190 | Forcefully destroy the stream. Will call `_destroy` as soon as all pending reads have finished. 191 | Once the stream is fully destroyed `close` will be emitted. 192 | 193 | If you pass an error this error will be emitted just before `close` is, signifying a reason 194 | as to why this stream was destroyed. 195 | 196 | #### `rs.pause()` 197 | 198 | Pauses the stream. You will only need to call this if you want to pause a resumed stream. 199 | 200 | Returns this stream instance. 201 | 202 | #### `rs.resume()` 203 | 204 | Will start reading data from the stream as fast as possible. 205 | 206 | If you do not call this, you need to use the `read()` method to read data or the `pipe()` method to 207 | pipe the stream somewhere else or the `data` handler. 208 | 209 | If none of these option are used the stream will stay paused. 210 | 211 | Returns this stream instance. 212 | 213 | #### `bool = Readable.isPaused(rs)` 214 | 215 | Returns `true` if the stream is paused, else `false`. 216 | 217 | #### `writableStream = rs.pipe(writableStream, [callback])` 218 | 219 | Efficently pipe the readable stream to a writable stream (can be Node.js core stream or a stream from this package). 220 | If you provide a callback the callback is called when the pipeline has fully finished with an optional error in case 221 | it failed. 222 | 223 | To cancel the pipeline destroy either of the streams. 224 | 225 | #### `rs.on('readable')` 226 | 227 | Emitted when data is pushed to the stream if the buffer was previously empty. 228 | 229 | #### `rs.on('data', data)` 230 | 231 | Emitted when data is being read from the stream. If you attach a data handler you are implicitly resuming the stream. 232 | 233 | #### `rs.on('end')` 234 | 235 | Emitted when the readable stream has ended and no data is left in it's buffer. 236 | 237 | #### `rs.on('close')` 238 | 239 | Emitted when the readable stream has fully closed (i.e. it's destroy function has completed) 240 | 241 | #### `rs.on('error', err)` 242 | 243 | Emitted if any of the stream operations fail with an error. `close` is always emitted right after this. 244 | 245 | #### `rs.on('piping', dest)` 246 | 247 | Emitted when the readable stream is pipeing to a destination. 248 | 249 | #### `rs.destroyed` 250 | 251 | Boolean property indicating wheather or not this stream has been destroyed. 252 | 253 | #### `bool = Readable.isBackpressured(rs)` 254 | 255 | Static method to check if a readable stream is currently under backpressure. 256 | 257 | #### `stream = Readable.from(arrayOrBufferOrStringOrAsyncIterator)` 258 | 259 | Static method to turn an array or buffer or string or AsyncIterator into a readable stream. 260 | 261 | ## Writable Stream 262 | 263 | #### `ws = new stream.Writable([options])` 264 | 265 | Create a new writable stream. 266 | 267 | Options include: 268 | 269 | ``` 270 | { 271 | highWaterMark: 16384, // max buffer size in bytes 272 | map: (data) => data, // optional function to map input data 273 | byteLength: (data) => size, // optional function that calculates the byte size of input data 274 | signal: abortController.signal // optional AbortSignal that triggers `.destroy` when on `abort` 275 | } 276 | ``` 277 | 278 | In addition you can pass the `open`, `write`, `final`, and `destroy` functions as shorthands in 279 | the constructor instead of overwrite the methods below. 280 | 281 | The default byteLength function returns the byte length of buffers and `1024` 282 | for any other object. This means the buffer will contain around 16 non buffers 283 | or buffers worth 16kb when full if the defaults are used. 284 | 285 | #### `ws._open(cb)` 286 | 287 | This function is called once before the first write is issued. Use this function 288 | to implement your own open logic. 289 | 290 | Can also be set using `options.open` in the constructor. 291 | 292 | #### `ws._destroy(cb)` 293 | 294 | This function is called just before the stream is fully destroyed. You should 295 | use this to implement whatever teardown logic you need. The final part of the 296 | stream life cycle is always to call destroy itself so this function will always 297 | be called wheather or not the stream ends gracefully or forcefully. 298 | 299 | Can also be set using `options.destroy` in the constructor. 300 | 301 | Note that the `_destroy` might be called without the open function being called 302 | in case no write was ever performed on the stream. 303 | 304 | #### `ws._predestroy()` 305 | 306 | A simple hook that is called as soon as the first `stream.destroy()` call is invoked. 307 | 308 | Use this in case you need to cancel pending writes (if possible) instead of waiting for them to finish. 309 | 310 | Can also be set using `options.predestroy` in the constructor. 311 | 312 | #### `ws.destroy([error])` 313 | 314 | Forcefully destroy the stream. Will call `_destroy` as soon as all pending reads have finished. 315 | Once the stream is fully destroyed `close` will be emitted. 316 | 317 | If you pass an error this error will be emitted just before `close` is, signifying a reason 318 | as to why this stream was destroyed. 319 | 320 | #### `drained = ws.write(data)` 321 | 322 | Write a piece of data to the stream. Returns `true` if the stream buffer is not full and you 323 | should keep writing to it if you can. If `false` is returned the stream will emit `drain` 324 | once it's buffer is fully drained. 325 | 326 | #### `ws._write(data, callback)` 327 | 328 | This function is called when the stream want to write some data. Use this to implement your own 329 | write logic. When done call the callback and the stream will call it again if more data exists in the buffer. 330 | 331 | Can also be set using `options.write` in the constructor. 332 | 333 | #### `ws._writev(batch, callback)` 334 | 335 | Similar to `_write` but passes an array of all data in the current write buffer instead of the oldest one. 336 | Useful if the destination you are writing the data to supports batching. 337 | 338 | Can also be set using `options.writev` in the constructor. 339 | 340 | #### `ws.end()` 341 | 342 | Gracefully end the writable stream. Call this when you no longer want to write to the stream. 343 | 344 | Once all writes have been fully drained `finish` will be emitted. 345 | 346 | Returns this stream instance. 347 | 348 | #### `ws._final(callback)` 349 | 350 | This function is called just before `finish` is emitted, i.e. when all writes have flushed but `ws.end()` 351 | have been called. Use this to implement any logic that should happen after all writes but before finish. 352 | 353 | Can also be set using `options.final` in the constructor. 354 | 355 | #### `ws.on('finish')` 356 | 357 | Emitted when the stream has been ended and all writes have been drained. 358 | 359 | #### `ws.on('close')` 360 | 361 | Emitted when the readable stream has fully closed (i.e. it's destroy function has completed) 362 | 363 | #### `ws.on('error', err)` 364 | 365 | Emitted if any of the stream operations fail with an error. `close` is always emitted right after this. 366 | 367 | #### `ws.on('pipe', src)` 368 | 369 | Emitted when a readable stream is being piped to the writable one. 370 | 371 | #### `ws.destroyed` 372 | 373 | Boolean property indicating wheather or not this stream has been destroyed. 374 | 375 | #### `bool = Writable.isBackpressured(ws)` 376 | 377 | Static method to check if a writable stream is currently under backpressure. 378 | 379 | #### `bool = await Writable.drained(ws)` 380 | 381 | Static helper to wait for a stream to drain the currently queued writes. 382 | Returns true if they were drained and false otherwise if the stream was destroyed. 383 | 384 | ## Duplex Stream 385 | 386 | #### `s = new stream.Duplex([options])` 387 | 388 | A duplex stream is a stream that is both readable and writable. 389 | 390 | Since JS does not support multiple inheritance it inherits directly from Readable 391 | but implements the Writable API as well. 392 | 393 | If you want to provide only a map function for the readable side use `mapReadable` instead. 394 | If you want to provide only a byteLength function for the readable side use `byteLengthReadable` instead. 395 | 396 | Same goes for the writable side but using `mapWritable` and `byteLengthWritable` instead. 397 | 398 | ## Transform Stream 399 | 400 | A Transform stream is a duplex stream with an `._transform` template method that allows to 401 | asynchronously map the input to a different output. 402 | 403 | The transform stream overrides the `_write` and `_read` operations of `Readable` and `Writable` but 404 | still allows the setting of these options in the constructor. Usually it is unnecessary to pass 405 | in `read` or `write`/`writev` or to override the corresponding `._read`, `._write` or `._writev` operation. 406 | 407 | #### `ts = new stream.Transform([options])` 408 | 409 | A transform stream is a duplex stream that maps the data written to it and emits that as readable data. 410 | 411 | Has the same options as a duplex stream except you can provide a `transform` function also. 412 | 413 | #### `ts._transform(data, callback)` 414 | 415 | Transform the incoming data. Call `callback(null, mappedData)` or use `ts.push(mappedData)` to 416 | return data to the readable side of the stream. 417 | 418 | Per default the transform function just remits the incoming data making it act as a pass-through stream. 419 | 420 | ## Pipeline 421 | 422 | `pipeline` allows to stream form a readable through a set of duplex streams to a writable entry. 423 | 424 | ```js 425 | const { pipeline, Readable, Transform, Writable } = require('streamx') 426 | const lastStream = pipeline( 427 | Readable.from([1, 2, 3]), 428 | new Transform({ 429 | transform (from, cb) { 430 | this.push(from.toString()) 431 | cb() 432 | } 433 | }), 434 | new Writable({ 435 | write (data, cb) { 436 | console.log(data) 437 | cb() 438 | } 439 | }) 440 | error => { 441 | // Callback once write has finished 442 | } 443 | ) 444 | ``` 445 | 446 | #### `lastStream = stream.pipeline(...streams, [done])` 447 | 448 | Pipe all streams together and return the last stream piped to. 449 | When the last stream finishes the pipeline ended succesfully. 450 | 451 | If any of the streams error, whether they are Node.js core streams 452 | or streamx streams, all streams in the pipeline are shutdown. 453 | 454 | Optionally you can pass a done callback to know when the pipeline is done. 455 | 456 | #### `promise = stream.pipelinePromise(...streams)` 457 | 458 | Same as normal pipeline except instead of returning the last stream it returns 459 | a promise representing the done callback. Note you should error handle this 460 | promise if you use this version. 461 | 462 | ## Helpers 463 | 464 | #### `bool = isStream(stream)` 465 | 466 | #### `bool = isStreamx(stream)` 467 | 468 | #### `err = getStreamError(stream, [options])` 469 | 470 | Returns `null` if the stream has no errors. 471 | 472 | ## Utilities 473 | 474 | Streamx aims to be minimal and stable. It therefore only contains a minimal set of utilities. 475 | To help discover of other modules that help you build streamx apps, we link some useful utilities here 476 | 477 | - [stream-composer](https://github.com/mafintosh/stream-composer) - Compose streams like Node's `stream.compose` and the `duplexify` and `pumpify` modules. 478 | - [teex](https://github.com/mafintosh/teex) - Clone a readable stream into multiple new readable instances. 479 | 480 | ## Contributing 481 | 482 | If you want to help contribute to streamx a good way to start is to help writing more test 483 | cases, compatibility tests, documentation, or performance benchmarks. 484 | 485 | If in doubt open an issue :) 486 | 487 | ## License 488 | 489 | MIT 490 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | const { EventEmitter } = require('events-universal') 2 | const STREAM_DESTROYED = new Error('Stream was destroyed') 3 | const PREMATURE_CLOSE = new Error('Premature close') 4 | 5 | const FIFO = require('fast-fifo') 6 | const TextDecoder = require('text-decoder') 7 | 8 | // if we do a future major, expect queue microtask to be there always, for now a bit defensive 9 | const qmt = 10 | typeof queueMicrotask === 'undefined' ? (fn) => global.process.nextTick(fn) : queueMicrotask 11 | 12 | // 29 bits used total (4 from shared, 14 from read, and 11 from write) 13 | const MAX = (1 << 29) - 1 14 | 15 | // Shared state 16 | const OPENING = 0b0001 17 | const PREDESTROYING = 0b0010 18 | const DESTROYING = 0b0100 19 | const DESTROYED = 0b1000 20 | 21 | const NOT_OPENING = MAX ^ OPENING 22 | const NOT_PREDESTROYING = MAX ^ PREDESTROYING 23 | 24 | // Read state (4 bit offset from shared state) 25 | const READ_ACTIVE = 0b00000000000001 << 4 26 | const READ_UPDATING = 0b00000000000010 << 4 27 | const READ_PRIMARY = 0b00000000000100 << 4 28 | const READ_QUEUED = 0b00000000001000 << 4 29 | const READ_RESUMED = 0b00000000010000 << 4 30 | const READ_PIPE_DRAINED = 0b00000000100000 << 4 31 | const READ_ENDING = 0b00000001000000 << 4 32 | const READ_EMIT_DATA = 0b00000010000000 << 4 33 | const READ_EMIT_READABLE = 0b00000100000000 << 4 34 | const READ_EMITTED_READABLE = 0b00001000000000 << 4 35 | const READ_DONE = 0b00010000000000 << 4 36 | const READ_NEXT_TICK = 0b00100000000000 << 4 37 | const READ_NEEDS_PUSH = 0b01000000000000 << 4 38 | const READ_READ_AHEAD = 0b10000000000000 << 4 39 | 40 | // Combined read state 41 | const READ_FLOWING = READ_RESUMED | READ_PIPE_DRAINED 42 | const READ_ACTIVE_AND_NEEDS_PUSH = READ_ACTIVE | READ_NEEDS_PUSH 43 | const READ_PRIMARY_AND_ACTIVE = READ_PRIMARY | READ_ACTIVE 44 | const READ_EMIT_READABLE_AND_QUEUED = READ_EMIT_READABLE | READ_QUEUED 45 | const READ_RESUMED_READ_AHEAD = READ_RESUMED | READ_READ_AHEAD 46 | 47 | const READ_NOT_ACTIVE = MAX ^ READ_ACTIVE 48 | const READ_NON_PRIMARY = MAX ^ READ_PRIMARY 49 | const READ_NON_PRIMARY_AND_PUSHED = MAX ^ (READ_PRIMARY | READ_NEEDS_PUSH) 50 | const READ_PUSHED = MAX ^ READ_NEEDS_PUSH 51 | const READ_PAUSED = MAX ^ READ_RESUMED 52 | const READ_NOT_QUEUED = MAX ^ (READ_QUEUED | READ_EMITTED_READABLE) 53 | const READ_NOT_ENDING = MAX ^ READ_ENDING 54 | const READ_PIPE_NOT_DRAINED = MAX ^ READ_FLOWING 55 | const READ_NOT_NEXT_TICK = MAX ^ READ_NEXT_TICK 56 | const READ_NOT_UPDATING = MAX ^ READ_UPDATING 57 | const READ_NO_READ_AHEAD = MAX ^ READ_READ_AHEAD 58 | const READ_PAUSED_NO_READ_AHEAD = MAX ^ READ_RESUMED_READ_AHEAD 59 | 60 | // Write state (18 bit offset, 4 bit offset from shared state and 14 from read state) 61 | const WRITE_ACTIVE = 0b00000000001 << 18 62 | const WRITE_UPDATING = 0b00000000010 << 18 63 | const WRITE_PRIMARY = 0b00000000100 << 18 64 | const WRITE_QUEUED = 0b00000001000 << 18 65 | const WRITE_UNDRAINED = 0b00000010000 << 18 66 | const WRITE_DONE = 0b00000100000 << 18 67 | const WRITE_EMIT_DRAIN = 0b00001000000 << 18 68 | const WRITE_NEXT_TICK = 0b00010000000 << 18 69 | const WRITE_WRITING = 0b00100000000 << 18 70 | const WRITE_FINISHING = 0b01000000000 << 18 71 | const WRITE_CORKED = 0b10000000000 << 18 72 | 73 | const WRITE_NOT_ACTIVE = MAX ^ (WRITE_ACTIVE | WRITE_WRITING) 74 | const WRITE_NON_PRIMARY = MAX ^ WRITE_PRIMARY 75 | const WRITE_NOT_FINISHING = MAX ^ (WRITE_ACTIVE | WRITE_FINISHING) 76 | const WRITE_DRAINED = MAX ^ WRITE_UNDRAINED 77 | const WRITE_NOT_QUEUED = MAX ^ WRITE_QUEUED 78 | const WRITE_NOT_NEXT_TICK = MAX ^ WRITE_NEXT_TICK 79 | const WRITE_NOT_UPDATING = MAX ^ WRITE_UPDATING 80 | const WRITE_NOT_CORKED = MAX ^ WRITE_CORKED 81 | 82 | // Combined shared state 83 | const ACTIVE = READ_ACTIVE | WRITE_ACTIVE 84 | const NOT_ACTIVE = MAX ^ ACTIVE 85 | const DONE = READ_DONE | WRITE_DONE 86 | const DESTROY_STATUS = DESTROYING | DESTROYED | PREDESTROYING 87 | const OPEN_STATUS = DESTROY_STATUS | OPENING 88 | const AUTO_DESTROY = DESTROY_STATUS | DONE 89 | const NON_PRIMARY = WRITE_NON_PRIMARY & READ_NON_PRIMARY 90 | const ACTIVE_OR_TICKING = WRITE_NEXT_TICK | READ_NEXT_TICK 91 | const TICKING = ACTIVE_OR_TICKING & NOT_ACTIVE 92 | const IS_OPENING = OPEN_STATUS | TICKING 93 | 94 | // Combined shared state and read state 95 | const READ_PRIMARY_STATUS = OPEN_STATUS | READ_ENDING | READ_DONE 96 | const READ_STATUS = OPEN_STATUS | READ_DONE | READ_QUEUED 97 | const READ_ENDING_STATUS = OPEN_STATUS | READ_ENDING | READ_QUEUED 98 | const READ_READABLE_STATUS = OPEN_STATUS | READ_EMIT_READABLE | READ_QUEUED | READ_EMITTED_READABLE 99 | const SHOULD_NOT_READ = 100 | OPEN_STATUS | READ_ACTIVE | READ_ENDING | READ_DONE | READ_NEEDS_PUSH | READ_READ_AHEAD 101 | const READ_BACKPRESSURE_STATUS = DESTROY_STATUS | READ_ENDING | READ_DONE 102 | const READ_UPDATE_SYNC_STATUS = READ_UPDATING | OPEN_STATUS | READ_NEXT_TICK | READ_PRIMARY 103 | const READ_NEXT_TICK_OR_OPENING = READ_NEXT_TICK | OPENING 104 | 105 | // Combined write state 106 | const WRITE_PRIMARY_STATUS = OPEN_STATUS | WRITE_FINISHING | WRITE_DONE 107 | const WRITE_QUEUED_AND_UNDRAINED = WRITE_QUEUED | WRITE_UNDRAINED 108 | const WRITE_QUEUED_AND_ACTIVE = WRITE_QUEUED | WRITE_ACTIVE 109 | const WRITE_DRAIN_STATUS = WRITE_QUEUED | WRITE_UNDRAINED | OPEN_STATUS | WRITE_ACTIVE 110 | const WRITE_STATUS = OPEN_STATUS | WRITE_ACTIVE | WRITE_QUEUED | WRITE_CORKED 111 | const WRITE_PRIMARY_AND_ACTIVE = WRITE_PRIMARY | WRITE_ACTIVE 112 | const WRITE_ACTIVE_AND_WRITING = WRITE_ACTIVE | WRITE_WRITING 113 | const WRITE_FINISHING_STATUS = OPEN_STATUS | WRITE_FINISHING | WRITE_QUEUED_AND_ACTIVE | WRITE_DONE 114 | const WRITE_BACKPRESSURE_STATUS = WRITE_UNDRAINED | DESTROY_STATUS | WRITE_FINISHING | WRITE_DONE 115 | const WRITE_UPDATE_SYNC_STATUS = WRITE_UPDATING | OPEN_STATUS | WRITE_NEXT_TICK | WRITE_PRIMARY 116 | const WRITE_DROP_DATA = WRITE_FINISHING | WRITE_DONE | DESTROY_STATUS 117 | 118 | const asyncIterator = Symbol.asyncIterator || Symbol('asyncIterator') 119 | 120 | class WritableState { 121 | constructor( 122 | stream, 123 | { highWaterMark = 16384, map = null, mapWritable, byteLength, byteLengthWritable } = {} 124 | ) { 125 | this.stream = stream 126 | this.queue = new FIFO() 127 | this.highWaterMark = highWaterMark 128 | this.buffered = 0 129 | this.error = null 130 | this.pipeline = null 131 | this.drains = null // if we add more seldomly used helpers we might them into a subobject so its a single ptr 132 | this.byteLength = byteLengthWritable || byteLength || defaultByteLength 133 | this.map = mapWritable || map 134 | this.afterWrite = afterWrite.bind(this) 135 | this.afterUpdateNextTick = updateWriteNT.bind(this) 136 | } 137 | 138 | get ended() { 139 | return (this.stream._duplexState & WRITE_DONE) !== 0 140 | } 141 | 142 | push(data) { 143 | if ((this.stream._duplexState & WRITE_DROP_DATA) !== 0) return false 144 | if (this.map !== null) data = this.map(data) 145 | 146 | this.buffered += this.byteLength(data) 147 | this.queue.push(data) 148 | 149 | if (this.buffered < this.highWaterMark) { 150 | this.stream._duplexState |= WRITE_QUEUED 151 | return true 152 | } 153 | 154 | this.stream._duplexState |= WRITE_QUEUED_AND_UNDRAINED 155 | return false 156 | } 157 | 158 | shift() { 159 | const data = this.queue.shift() 160 | 161 | this.buffered -= this.byteLength(data) 162 | if (this.buffered === 0) this.stream._duplexState &= WRITE_NOT_QUEUED 163 | 164 | return data 165 | } 166 | 167 | end(data) { 168 | if (typeof data === 'function') this.stream.once('finish', data) 169 | else if (data !== undefined && data !== null) this.push(data) 170 | this.stream._duplexState = (this.stream._duplexState | WRITE_FINISHING) & WRITE_NON_PRIMARY 171 | } 172 | 173 | autoBatch(data, cb) { 174 | const buffer = [] 175 | const stream = this.stream 176 | 177 | buffer.push(data) 178 | while ((stream._duplexState & WRITE_STATUS) === WRITE_QUEUED_AND_ACTIVE) { 179 | buffer.push(stream._writableState.shift()) 180 | } 181 | 182 | if ((stream._duplexState & OPEN_STATUS) !== 0) return cb(null) 183 | stream._writev(buffer, cb) 184 | } 185 | 186 | update() { 187 | const stream = this.stream 188 | 189 | stream._duplexState |= WRITE_UPDATING 190 | 191 | do { 192 | while ((stream._duplexState & WRITE_STATUS) === WRITE_QUEUED) { 193 | const data = this.shift() 194 | stream._duplexState |= WRITE_ACTIVE_AND_WRITING 195 | stream._write(data, this.afterWrite) 196 | } 197 | 198 | if ((stream._duplexState & WRITE_PRIMARY_AND_ACTIVE) === 0) this.updateNonPrimary() 199 | } while (this.continueUpdate() === true) 200 | 201 | stream._duplexState &= WRITE_NOT_UPDATING 202 | } 203 | 204 | updateNonPrimary() { 205 | const stream = this.stream 206 | 207 | if ((stream._duplexState & WRITE_FINISHING_STATUS) === WRITE_FINISHING) { 208 | stream._duplexState = stream._duplexState | WRITE_ACTIVE 209 | stream._final(afterFinal.bind(this)) 210 | return 211 | } 212 | 213 | if ((stream._duplexState & DESTROY_STATUS) === DESTROYING) { 214 | if ((stream._duplexState & ACTIVE_OR_TICKING) === 0) { 215 | stream._duplexState |= ACTIVE 216 | stream._destroy(afterDestroy.bind(this)) 217 | } 218 | return 219 | } 220 | 221 | if ((stream._duplexState & IS_OPENING) === OPENING) { 222 | stream._duplexState = (stream._duplexState | ACTIVE) & NOT_OPENING 223 | stream._open(afterOpen.bind(this)) 224 | } 225 | } 226 | 227 | continueUpdate() { 228 | if ((this.stream._duplexState & WRITE_NEXT_TICK) === 0) return false 229 | this.stream._duplexState &= WRITE_NOT_NEXT_TICK 230 | return true 231 | } 232 | 233 | updateCallback() { 234 | if ((this.stream._duplexState & WRITE_UPDATE_SYNC_STATUS) === WRITE_PRIMARY) this.update() 235 | else this.updateNextTick() 236 | } 237 | 238 | updateNextTick() { 239 | if ((this.stream._duplexState & WRITE_NEXT_TICK) !== 0) return 240 | this.stream._duplexState |= WRITE_NEXT_TICK 241 | if ((this.stream._duplexState & WRITE_UPDATING) === 0) qmt(this.afterUpdateNextTick) 242 | } 243 | } 244 | 245 | class ReadableState { 246 | constructor( 247 | stream, 248 | { highWaterMark = 16384, map = null, mapReadable, byteLength, byteLengthReadable } = {} 249 | ) { 250 | this.stream = stream 251 | this.queue = new FIFO() 252 | this.highWaterMark = highWaterMark === 0 ? 1 : highWaterMark 253 | this.buffered = 0 254 | this.readAhead = highWaterMark > 0 255 | this.error = null 256 | this.pipeline = null 257 | this.byteLength = byteLengthReadable || byteLength || defaultByteLength 258 | this.map = mapReadable || map 259 | this.pipeTo = null 260 | this.afterRead = afterRead.bind(this) 261 | this.afterUpdateNextTick = updateReadNT.bind(this) 262 | } 263 | 264 | get ended() { 265 | return (this.stream._duplexState & READ_DONE) !== 0 266 | } 267 | 268 | pipe(pipeTo, cb) { 269 | if (this.pipeTo !== null) throw new Error('Can only pipe to one destination') 270 | if (typeof cb !== 'function') cb = null 271 | 272 | this.stream._duplexState |= READ_PIPE_DRAINED 273 | this.pipeTo = pipeTo 274 | this.pipeline = new Pipeline(this.stream, pipeTo, cb) 275 | 276 | if (cb) this.stream.on('error', noop) // We already error handle this so supress crashes 277 | 278 | if (isStreamx(pipeTo)) { 279 | pipeTo._writableState.pipeline = this.pipeline 280 | if (cb) pipeTo.on('error', noop) // We already error handle this so supress crashes 281 | pipeTo.on('finish', this.pipeline.finished.bind(this.pipeline)) // TODO: just call finished from pipeTo itself 282 | } else { 283 | const onerror = this.pipeline.done.bind(this.pipeline, pipeTo) 284 | const onclose = this.pipeline.done.bind(this.pipeline, pipeTo, null) // onclose has a weird bool arg 285 | pipeTo.on('error', onerror) 286 | pipeTo.on('close', onclose) 287 | pipeTo.on('finish', this.pipeline.finished.bind(this.pipeline)) 288 | } 289 | 290 | pipeTo.on('drain', afterDrain.bind(this)) 291 | this.stream.emit('piping', pipeTo) 292 | pipeTo.emit('pipe', this.stream) 293 | } 294 | 295 | push(data) { 296 | const stream = this.stream 297 | 298 | if (data === null) { 299 | this.highWaterMark = 0 300 | stream._duplexState = (stream._duplexState | READ_ENDING) & READ_NON_PRIMARY_AND_PUSHED 301 | return false 302 | } 303 | 304 | if (this.map !== null) { 305 | data = this.map(data) 306 | if (data === null) { 307 | stream._duplexState &= READ_PUSHED 308 | return this.buffered < this.highWaterMark 309 | } 310 | } 311 | 312 | this.buffered += this.byteLength(data) 313 | this.queue.push(data) 314 | 315 | stream._duplexState = (stream._duplexState | READ_QUEUED) & READ_PUSHED 316 | 317 | return this.buffered < this.highWaterMark 318 | } 319 | 320 | shift() { 321 | const data = this.queue.shift() 322 | 323 | this.buffered -= this.byteLength(data) 324 | if (this.buffered === 0) this.stream._duplexState &= READ_NOT_QUEUED 325 | return data 326 | } 327 | 328 | unshift(data) { 329 | const pending = [this.map !== null ? this.map(data) : data] 330 | while (this.buffered > 0) pending.push(this.shift()) 331 | 332 | for (let i = 0; i < pending.length - 1; i++) { 333 | const data = pending[i] 334 | this.buffered += this.byteLength(data) 335 | this.queue.push(data) 336 | } 337 | 338 | this.push(pending[pending.length - 1]) 339 | } 340 | 341 | read() { 342 | const stream = this.stream 343 | 344 | if ((stream._duplexState & READ_STATUS) === READ_QUEUED) { 345 | const data = this.shift() 346 | if (this.pipeTo !== null && this.pipeTo.write(data) === false) 347 | stream._duplexState &= READ_PIPE_NOT_DRAINED 348 | if ((stream._duplexState & READ_EMIT_DATA) !== 0) stream.emit('data', data) 349 | return data 350 | } 351 | 352 | if (this.readAhead === false) { 353 | stream._duplexState |= READ_READ_AHEAD 354 | this.updateNextTick() 355 | } 356 | 357 | return null 358 | } 359 | 360 | drain() { 361 | const stream = this.stream 362 | 363 | while ( 364 | (stream._duplexState & READ_STATUS) === READ_QUEUED && 365 | (stream._duplexState & READ_FLOWING) !== 0 366 | ) { 367 | const data = this.shift() 368 | if (this.pipeTo !== null && this.pipeTo.write(data) === false) 369 | stream._duplexState &= READ_PIPE_NOT_DRAINED 370 | if ((stream._duplexState & READ_EMIT_DATA) !== 0) stream.emit('data', data) 371 | } 372 | } 373 | 374 | update() { 375 | const stream = this.stream 376 | 377 | stream._duplexState |= READ_UPDATING 378 | 379 | do { 380 | this.drain() 381 | 382 | while ( 383 | this.buffered < this.highWaterMark && 384 | (stream._duplexState & SHOULD_NOT_READ) === READ_READ_AHEAD 385 | ) { 386 | stream._duplexState |= READ_ACTIVE_AND_NEEDS_PUSH 387 | stream._read(this.afterRead) 388 | this.drain() 389 | } 390 | 391 | if ((stream._duplexState & READ_READABLE_STATUS) === READ_EMIT_READABLE_AND_QUEUED) { 392 | stream._duplexState |= READ_EMITTED_READABLE 393 | stream.emit('readable') 394 | } 395 | 396 | if ((stream._duplexState & READ_PRIMARY_AND_ACTIVE) === 0) this.updateNonPrimary() 397 | } while (this.continueUpdate() === true) 398 | 399 | stream._duplexState &= READ_NOT_UPDATING 400 | } 401 | 402 | updateNonPrimary() { 403 | const stream = this.stream 404 | 405 | if ((stream._duplexState & READ_ENDING_STATUS) === READ_ENDING) { 406 | stream._duplexState = (stream._duplexState | READ_DONE) & READ_NOT_ENDING 407 | stream.emit('end') 408 | if ((stream._duplexState & AUTO_DESTROY) === DONE) stream._duplexState |= DESTROYING 409 | if (this.pipeTo !== null) this.pipeTo.end() 410 | } 411 | 412 | if ((stream._duplexState & DESTROY_STATUS) === DESTROYING) { 413 | if ((stream._duplexState & ACTIVE_OR_TICKING) === 0) { 414 | stream._duplexState |= ACTIVE 415 | stream._destroy(afterDestroy.bind(this)) 416 | } 417 | return 418 | } 419 | 420 | if ((stream._duplexState & IS_OPENING) === OPENING) { 421 | stream._duplexState = (stream._duplexState | ACTIVE) & NOT_OPENING 422 | stream._open(afterOpen.bind(this)) 423 | } 424 | } 425 | 426 | continueUpdate() { 427 | if ((this.stream._duplexState & READ_NEXT_TICK) === 0) return false 428 | this.stream._duplexState &= READ_NOT_NEXT_TICK 429 | return true 430 | } 431 | 432 | updateCallback() { 433 | if ((this.stream._duplexState & READ_UPDATE_SYNC_STATUS) === READ_PRIMARY) this.update() 434 | else this.updateNextTick() 435 | } 436 | 437 | updateNextTickIfOpen() { 438 | if ((this.stream._duplexState & READ_NEXT_TICK_OR_OPENING) !== 0) return 439 | this.stream._duplexState |= READ_NEXT_TICK 440 | if ((this.stream._duplexState & READ_UPDATING) === 0) qmt(this.afterUpdateNextTick) 441 | } 442 | 443 | updateNextTick() { 444 | if ((this.stream._duplexState & READ_NEXT_TICK) !== 0) return 445 | this.stream._duplexState |= READ_NEXT_TICK 446 | if ((this.stream._duplexState & READ_UPDATING) === 0) qmt(this.afterUpdateNextTick) 447 | } 448 | } 449 | 450 | class TransformState { 451 | constructor(stream) { 452 | this.data = null 453 | this.afterTransform = afterTransform.bind(stream) 454 | this.afterFinal = null 455 | } 456 | } 457 | 458 | class Pipeline { 459 | constructor(src, dst, cb) { 460 | this.from = src 461 | this.to = dst 462 | this.afterPipe = cb 463 | this.error = null 464 | this.pipeToFinished = false 465 | } 466 | 467 | finished() { 468 | this.pipeToFinished = true 469 | } 470 | 471 | done(stream, err) { 472 | if (err) this.error = err 473 | 474 | if (stream === this.to) { 475 | this.to = null 476 | 477 | if (this.from !== null) { 478 | if ((this.from._duplexState & READ_DONE) === 0 || !this.pipeToFinished) { 479 | this.from.destroy(this.error || new Error('Writable stream closed prematurely')) 480 | } 481 | return 482 | } 483 | } 484 | 485 | if (stream === this.from) { 486 | this.from = null 487 | 488 | if (this.to !== null) { 489 | if ((stream._duplexState & READ_DONE) === 0) { 490 | this.to.destroy(this.error || new Error('Readable stream closed before ending')) 491 | } 492 | return 493 | } 494 | } 495 | 496 | if (this.afterPipe !== null) this.afterPipe(this.error) 497 | this.to = this.from = this.afterPipe = null 498 | } 499 | } 500 | 501 | function afterDrain() { 502 | this.stream._duplexState |= READ_PIPE_DRAINED 503 | this.updateCallback() 504 | } 505 | 506 | function afterFinal(err) { 507 | const stream = this.stream 508 | if (err) stream.destroy(err) 509 | if ((stream._duplexState & DESTROY_STATUS) === 0) { 510 | stream._duplexState |= WRITE_DONE 511 | stream.emit('finish') 512 | } 513 | if ((stream._duplexState & AUTO_DESTROY) === DONE) { 514 | stream._duplexState |= DESTROYING 515 | } 516 | 517 | stream._duplexState &= WRITE_NOT_FINISHING 518 | 519 | // no need to wait the extra tick here, so we short circuit that 520 | if ((stream._duplexState & WRITE_UPDATING) === 0) this.update() 521 | else this.updateNextTick() 522 | } 523 | 524 | function afterDestroy(err) { 525 | const stream = this.stream 526 | 527 | if (!err && this.error !== STREAM_DESTROYED) err = this.error 528 | if (err) stream.emit('error', err) 529 | stream._duplexState |= DESTROYED 530 | stream.emit('close') 531 | 532 | const rs = stream._readableState 533 | const ws = stream._writableState 534 | 535 | if (rs !== null && rs.pipeline !== null) rs.pipeline.done(stream, err) 536 | 537 | if (ws !== null) { 538 | while (ws.drains !== null && ws.drains.length > 0) ws.drains.shift().resolve(false) 539 | if (ws.pipeline !== null) ws.pipeline.done(stream, err) 540 | } 541 | } 542 | 543 | function afterWrite(err) { 544 | const stream = this.stream 545 | 546 | if (err) stream.destroy(err) 547 | stream._duplexState &= WRITE_NOT_ACTIVE 548 | 549 | if (this.drains !== null) tickDrains(this.drains) 550 | 551 | if ((stream._duplexState & WRITE_DRAIN_STATUS) === WRITE_UNDRAINED) { 552 | stream._duplexState &= WRITE_DRAINED 553 | if ((stream._duplexState & WRITE_EMIT_DRAIN) === WRITE_EMIT_DRAIN) { 554 | stream.emit('drain') 555 | } 556 | } 557 | 558 | this.updateCallback() 559 | } 560 | 561 | function afterRead(err) { 562 | if (err) this.stream.destroy(err) 563 | this.stream._duplexState &= READ_NOT_ACTIVE 564 | if (this.readAhead === false && (this.stream._duplexState & READ_RESUMED) === 0) 565 | this.stream._duplexState &= READ_NO_READ_AHEAD 566 | this.updateCallback() 567 | } 568 | 569 | function updateReadNT() { 570 | if ((this.stream._duplexState & READ_UPDATING) === 0) { 571 | this.stream._duplexState &= READ_NOT_NEXT_TICK 572 | this.update() 573 | } 574 | } 575 | 576 | function updateWriteNT() { 577 | if ((this.stream._duplexState & WRITE_UPDATING) === 0) { 578 | this.stream._duplexState &= WRITE_NOT_NEXT_TICK 579 | this.update() 580 | } 581 | } 582 | 583 | function tickDrains(drains) { 584 | for (let i = 0; i < drains.length; i++) { 585 | // drains.writes are monotonic, so if one is 0 its always the first one 586 | if (--drains[i].writes === 0) { 587 | drains.shift().resolve(true) 588 | i-- 589 | } 590 | } 591 | } 592 | 593 | function afterOpen(err) { 594 | const stream = this.stream 595 | 596 | if (err) stream.destroy(err) 597 | 598 | if ((stream._duplexState & DESTROYING) === 0) { 599 | if ((stream._duplexState & READ_PRIMARY_STATUS) === 0) stream._duplexState |= READ_PRIMARY 600 | if ((stream._duplexState & WRITE_PRIMARY_STATUS) === 0) stream._duplexState |= WRITE_PRIMARY 601 | stream.emit('open') 602 | } 603 | 604 | stream._duplexState &= NOT_ACTIVE 605 | 606 | if (stream._writableState !== null) { 607 | stream._writableState.updateCallback() 608 | } 609 | 610 | if (stream._readableState !== null) { 611 | stream._readableState.updateCallback() 612 | } 613 | } 614 | 615 | function afterTransform(err, data) { 616 | if (data !== undefined && data !== null) this.push(data) 617 | this._writableState.afterWrite(err) 618 | } 619 | 620 | function newListener(name) { 621 | if (this._readableState !== null) { 622 | if (name === 'data') { 623 | this._duplexState |= READ_EMIT_DATA | READ_RESUMED_READ_AHEAD 624 | this._readableState.updateNextTick() 625 | } 626 | if (name === 'readable') { 627 | this._duplexState |= READ_EMIT_READABLE 628 | this._readableState.updateNextTick() 629 | } 630 | } 631 | 632 | if (this._writableState !== null) { 633 | if (name === 'drain') { 634 | this._duplexState |= WRITE_EMIT_DRAIN 635 | this._writableState.updateNextTick() 636 | } 637 | } 638 | } 639 | 640 | class Stream extends EventEmitter { 641 | constructor(opts) { 642 | super() 643 | 644 | this._duplexState = 0 645 | this._readableState = null 646 | this._writableState = null 647 | 648 | if (opts) { 649 | if (opts.open) this._open = opts.open 650 | if (opts.destroy) this._destroy = opts.destroy 651 | if (opts.predestroy) this._predestroy = opts.predestroy 652 | if (opts.signal) { 653 | opts.signal.addEventListener('abort', abort.bind(this)) 654 | } 655 | } 656 | 657 | this.on('newListener', newListener) 658 | } 659 | 660 | _open(cb) { 661 | cb(null) 662 | } 663 | 664 | _destroy(cb) { 665 | cb(null) 666 | } 667 | 668 | _predestroy() { 669 | // does nothing 670 | } 671 | 672 | get readable() { 673 | return this._readableState !== null ? true : undefined 674 | } 675 | 676 | get writable() { 677 | return this._writableState !== null ? true : undefined 678 | } 679 | 680 | get destroyed() { 681 | return (this._duplexState & DESTROYED) !== 0 682 | } 683 | 684 | get destroying() { 685 | return (this._duplexState & DESTROY_STATUS) !== 0 686 | } 687 | 688 | destroy(err) { 689 | if ((this._duplexState & DESTROY_STATUS) === 0) { 690 | if (!err) err = STREAM_DESTROYED 691 | this._duplexState = (this._duplexState | DESTROYING) & NON_PRIMARY 692 | 693 | if (this._readableState !== null) { 694 | this._readableState.highWaterMark = 0 695 | this._readableState.error = err 696 | } 697 | if (this._writableState !== null) { 698 | this._writableState.highWaterMark = 0 699 | this._writableState.error = err 700 | } 701 | 702 | this._duplexState |= PREDESTROYING 703 | this._predestroy() 704 | this._duplexState &= NOT_PREDESTROYING 705 | 706 | if (this._readableState !== null) this._readableState.updateNextTick() 707 | if (this._writableState !== null) this._writableState.updateNextTick() 708 | } 709 | } 710 | } 711 | 712 | class Readable extends Stream { 713 | constructor(opts) { 714 | super(opts) 715 | 716 | this._duplexState |= OPENING | WRITE_DONE | READ_READ_AHEAD 717 | this._readableState = new ReadableState(this, opts) 718 | 719 | if (opts) { 720 | if (this._readableState.readAhead === false) this._duplexState &= READ_NO_READ_AHEAD 721 | if (opts.read) this._read = opts.read 722 | if (opts.eagerOpen) this._readableState.updateNextTick() 723 | if (opts.encoding) this.setEncoding(opts.encoding) 724 | } 725 | } 726 | 727 | setEncoding(encoding) { 728 | const dec = new TextDecoder(encoding) 729 | const map = this._readableState.map || echo 730 | this._readableState.map = mapOrSkip 731 | return this 732 | 733 | function mapOrSkip(data) { 734 | const next = dec.push(data) 735 | return next === '' && (data.byteLength !== 0 || dec.remaining > 0) ? null : map(next) 736 | } 737 | } 738 | 739 | _read(cb) { 740 | cb(null) 741 | } 742 | 743 | pipe(dest, cb) { 744 | this._readableState.updateNextTick() 745 | this._readableState.pipe(dest, cb) 746 | return dest 747 | } 748 | 749 | read() { 750 | this._readableState.updateNextTick() 751 | return this._readableState.read() 752 | } 753 | 754 | push(data) { 755 | this._readableState.updateNextTickIfOpen() 756 | return this._readableState.push(data) 757 | } 758 | 759 | unshift(data) { 760 | this._readableState.updateNextTickIfOpen() 761 | return this._readableState.unshift(data) 762 | } 763 | 764 | resume() { 765 | this._duplexState |= READ_RESUMED_READ_AHEAD 766 | this._readableState.updateNextTick() 767 | return this 768 | } 769 | 770 | pause() { 771 | this._duplexState &= 772 | this._readableState.readAhead === false ? READ_PAUSED_NO_READ_AHEAD : READ_PAUSED 773 | return this 774 | } 775 | 776 | static _fromAsyncIterator(ite, opts) { 777 | let destroy 778 | 779 | const rs = new Readable({ 780 | ...opts, 781 | read(cb) { 782 | ite.next().then(push).then(cb.bind(null, null)).catch(cb) 783 | }, 784 | predestroy() { 785 | destroy = ite.return() 786 | }, 787 | destroy(cb) { 788 | if (!destroy) return cb(null) 789 | destroy.then(cb.bind(null, null)).catch(cb) 790 | } 791 | }) 792 | 793 | return rs 794 | 795 | function push(data) { 796 | if (data.done) rs.push(null) 797 | else rs.push(data.value) 798 | } 799 | } 800 | 801 | static from(data, opts) { 802 | if (isReadStreamx(data)) return data 803 | if (data[asyncIterator]) return this._fromAsyncIterator(data[asyncIterator](), opts) 804 | if (!Array.isArray(data)) data = data === undefined ? [] : [data] 805 | 806 | let i = 0 807 | return new Readable({ 808 | ...opts, 809 | read(cb) { 810 | this.push(i === data.length ? null : data[i++]) 811 | cb(null) 812 | } 813 | }) 814 | } 815 | 816 | static isBackpressured(rs) { 817 | return ( 818 | (rs._duplexState & READ_BACKPRESSURE_STATUS) !== 0 || 819 | rs._readableState.buffered >= rs._readableState.highWaterMark 820 | ) 821 | } 822 | 823 | static isPaused(rs) { 824 | return (rs._duplexState & READ_RESUMED) === 0 825 | } 826 | 827 | [asyncIterator]() { 828 | const stream = this 829 | 830 | let error = null 831 | let promiseResolve = null 832 | let promiseReject = null 833 | 834 | this.on('error', (err) => { 835 | error = err 836 | }) 837 | this.on('readable', onreadable) 838 | this.on('close', onclose) 839 | 840 | return { 841 | [asyncIterator]() { 842 | return this 843 | }, 844 | next() { 845 | return new Promise(function (resolve, reject) { 846 | promiseResolve = resolve 847 | promiseReject = reject 848 | const data = stream.read() 849 | if (data !== null) ondata(data) 850 | else if ((stream._duplexState & DESTROYED) !== 0) ondata(null) 851 | }) 852 | }, 853 | return() { 854 | return destroy(null) 855 | }, 856 | throw(err) { 857 | return destroy(err) 858 | } 859 | } 860 | 861 | function onreadable() { 862 | if (promiseResolve !== null) ondata(stream.read()) 863 | } 864 | 865 | function onclose() { 866 | if (promiseResolve !== null) ondata(null) 867 | } 868 | 869 | function ondata(data) { 870 | if (promiseReject === null) return 871 | if (error) promiseReject(error) 872 | else if (data === null && (stream._duplexState & READ_DONE) === 0) 873 | promiseReject(STREAM_DESTROYED) 874 | else promiseResolve({ value: data, done: data === null }) 875 | promiseReject = promiseResolve = null 876 | } 877 | 878 | function destroy(err) { 879 | stream.destroy(err) 880 | return new Promise((resolve, reject) => { 881 | if (stream._duplexState & DESTROYED) return resolve({ value: undefined, done: true }) 882 | stream.once('close', function () { 883 | if (err) reject(err) 884 | else resolve({ value: undefined, done: true }) 885 | }) 886 | }) 887 | } 888 | } 889 | } 890 | 891 | class Writable extends Stream { 892 | constructor(opts) { 893 | super(opts) 894 | 895 | this._duplexState |= OPENING | READ_DONE 896 | this._writableState = new WritableState(this, opts) 897 | 898 | if (opts) { 899 | if (opts.writev) this._writev = opts.writev 900 | if (opts.write) this._write = opts.write 901 | if (opts.final) this._final = opts.final 902 | if (opts.eagerOpen) this._writableState.updateNextTick() 903 | } 904 | } 905 | 906 | cork() { 907 | this._duplexState |= WRITE_CORKED 908 | } 909 | 910 | uncork() { 911 | this._duplexState &= WRITE_NOT_CORKED 912 | this._writableState.updateNextTick() 913 | } 914 | 915 | _writev(batch, cb) { 916 | cb(null) 917 | } 918 | 919 | _write(data, cb) { 920 | this._writableState.autoBatch(data, cb) 921 | } 922 | 923 | _final(cb) { 924 | cb(null) 925 | } 926 | 927 | static isBackpressured(ws) { 928 | return (ws._duplexState & WRITE_BACKPRESSURE_STATUS) !== 0 929 | } 930 | 931 | static drained(ws) { 932 | if (ws.destroyed) return Promise.resolve(false) 933 | const state = ws._writableState 934 | const pending = isWritev(ws) ? Math.min(1, state.queue.length) : state.queue.length 935 | const writes = pending + (ws._duplexState & WRITE_WRITING ? 1 : 0) 936 | if (writes === 0) return Promise.resolve(true) 937 | if (state.drains === null) state.drains = [] 938 | return new Promise((resolve) => { 939 | state.drains.push({ writes, resolve }) 940 | }) 941 | } 942 | 943 | write(data) { 944 | this._writableState.updateNextTick() 945 | return this._writableState.push(data) 946 | } 947 | 948 | end(data) { 949 | this._writableState.updateNextTick() 950 | this._writableState.end(data) 951 | return this 952 | } 953 | } 954 | 955 | class Duplex extends Readable { 956 | // and Writable 957 | constructor(opts) { 958 | super(opts) 959 | 960 | this._duplexState = OPENING | (this._duplexState & READ_READ_AHEAD) 961 | this._writableState = new WritableState(this, opts) 962 | 963 | if (opts) { 964 | if (opts.writev) this._writev = opts.writev 965 | if (opts.write) this._write = opts.write 966 | if (opts.final) this._final = opts.final 967 | } 968 | } 969 | 970 | cork() { 971 | this._duplexState |= WRITE_CORKED 972 | } 973 | 974 | uncork() { 975 | this._duplexState &= WRITE_NOT_CORKED 976 | this._writableState.updateNextTick() 977 | } 978 | 979 | _writev(batch, cb) { 980 | cb(null) 981 | } 982 | 983 | _write(data, cb) { 984 | this._writableState.autoBatch(data, cb) 985 | } 986 | 987 | _final(cb) { 988 | cb(null) 989 | } 990 | 991 | write(data) { 992 | this._writableState.updateNextTick() 993 | return this._writableState.push(data) 994 | } 995 | 996 | end(data) { 997 | this._writableState.updateNextTick() 998 | this._writableState.end(data) 999 | return this 1000 | } 1001 | } 1002 | 1003 | class Transform extends Duplex { 1004 | constructor(opts) { 1005 | super(opts) 1006 | this._transformState = new TransformState(this) 1007 | 1008 | if (opts) { 1009 | if (opts.transform) this._transform = opts.transform 1010 | if (opts.flush) this._flush = opts.flush 1011 | } 1012 | } 1013 | 1014 | _write(data, cb) { 1015 | if (this._readableState.buffered >= this._readableState.highWaterMark) { 1016 | this._transformState.data = data 1017 | } else { 1018 | this._transform(data, this._transformState.afterTransform) 1019 | } 1020 | } 1021 | 1022 | _read(cb) { 1023 | if (this._transformState.data !== null) { 1024 | const data = this._transformState.data 1025 | this._transformState.data = null 1026 | cb(null) 1027 | this._transform(data, this._transformState.afterTransform) 1028 | } else { 1029 | cb(null) 1030 | } 1031 | } 1032 | 1033 | destroy(err) { 1034 | super.destroy(err) 1035 | if (this._transformState.data !== null) { 1036 | this._transformState.data = null 1037 | this._transformState.afterTransform() 1038 | } 1039 | } 1040 | 1041 | _transform(data, cb) { 1042 | cb(null, data) 1043 | } 1044 | 1045 | _flush(cb) { 1046 | cb(null) 1047 | } 1048 | 1049 | _final(cb) { 1050 | this._transformState.afterFinal = cb 1051 | this._flush(transformAfterFlush.bind(this)) 1052 | } 1053 | } 1054 | 1055 | class PassThrough extends Transform {} 1056 | 1057 | function transformAfterFlush(err, data) { 1058 | const cb = this._transformState.afterFinal 1059 | if (err) return cb(err) 1060 | if (data !== null && data !== undefined) this.push(data) 1061 | this.push(null) 1062 | cb(null) 1063 | } 1064 | 1065 | function pipelinePromise(...streams) { 1066 | return new Promise((resolve, reject) => { 1067 | return pipeline(...streams, (err) => { 1068 | if (err) return reject(err) 1069 | resolve() 1070 | }) 1071 | }) 1072 | } 1073 | 1074 | function pipeline(stream, ...streams) { 1075 | const all = Array.isArray(stream) ? [...stream, ...streams] : [stream, ...streams] 1076 | const done = all.length && typeof all[all.length - 1] === 'function' ? all.pop() : null 1077 | 1078 | if (all.length < 2) throw new Error('Pipeline requires at least 2 streams') 1079 | 1080 | let src = all[0] 1081 | let dest = null 1082 | let error = null 1083 | 1084 | for (let i = 1; i < all.length; i++) { 1085 | dest = all[i] 1086 | 1087 | if (isStreamx(src)) { 1088 | src.pipe(dest, onerror) 1089 | } else { 1090 | errorHandle(src, true, i > 1, onerror) 1091 | src.pipe(dest) 1092 | } 1093 | 1094 | src = dest 1095 | } 1096 | 1097 | if (done) { 1098 | let fin = false 1099 | 1100 | const autoDestroy = 1101 | isStreamx(dest) || !!(dest._writableState && dest._writableState.autoDestroy) 1102 | 1103 | dest.on('error', (err) => { 1104 | if (error === null) error = err 1105 | }) 1106 | 1107 | dest.on('finish', () => { 1108 | fin = true 1109 | if (!autoDestroy) done(error) 1110 | }) 1111 | 1112 | if (autoDestroy) { 1113 | dest.on('close', () => done(error || (fin ? null : PREMATURE_CLOSE))) 1114 | } 1115 | } 1116 | 1117 | return dest 1118 | 1119 | function errorHandle(s, rd, wr, onerror) { 1120 | s.on('error', onerror) 1121 | s.on('close', onclose) 1122 | 1123 | function onclose() { 1124 | if (rd && s._readableState && !s._readableState.ended) return onerror(PREMATURE_CLOSE) 1125 | if (wr && s._writableState && !s._writableState.ended) return onerror(PREMATURE_CLOSE) 1126 | } 1127 | } 1128 | 1129 | function onerror(err) { 1130 | if (!err || error) return 1131 | error = err 1132 | 1133 | for (const s of all) { 1134 | s.destroy(err) 1135 | } 1136 | } 1137 | } 1138 | 1139 | function echo(s) { 1140 | return s 1141 | } 1142 | 1143 | function isStream(stream) { 1144 | return !!stream._readableState || !!stream._writableState 1145 | } 1146 | 1147 | function isStreamx(stream) { 1148 | return typeof stream._duplexState === 'number' && isStream(stream) 1149 | } 1150 | 1151 | function isEnded(stream) { 1152 | return !!stream._readableState && stream._readableState.ended 1153 | } 1154 | 1155 | function isFinished(stream) { 1156 | return !!stream._writableState && stream._writableState.ended 1157 | } 1158 | 1159 | function getStreamError(stream, opts = {}) { 1160 | const err = 1161 | (stream._readableState && stream._readableState.error) || 1162 | (stream._writableState && stream._writableState.error) 1163 | 1164 | // avoid implicit errors by default 1165 | return !opts.all && err === STREAM_DESTROYED ? null : err 1166 | } 1167 | 1168 | function isReadStreamx(stream) { 1169 | return isStreamx(stream) && stream.readable 1170 | } 1171 | 1172 | function isDisturbed(stream) { 1173 | return ( 1174 | (stream._duplexState & OPENING) !== OPENING || (stream._duplexState & ACTIVE_OR_TICKING) !== 0 1175 | ) 1176 | } 1177 | 1178 | function isTypedArray(data) { 1179 | return typeof data === 'object' && data !== null && typeof data.byteLength === 'number' 1180 | } 1181 | 1182 | function defaultByteLength(data) { 1183 | return isTypedArray(data) ? data.byteLength : 1024 1184 | } 1185 | 1186 | function noop() {} 1187 | 1188 | function abort() { 1189 | this.destroy(new Error('Stream aborted.')) 1190 | } 1191 | 1192 | function isWritev(s) { 1193 | return s._writev !== Writable.prototype._writev && s._writev !== Duplex.prototype._writev 1194 | } 1195 | 1196 | module.exports = { 1197 | pipeline, 1198 | pipelinePromise, 1199 | isStream, 1200 | isStreamx, 1201 | isEnded, 1202 | isFinished, 1203 | isDisturbed, 1204 | getStreamError, 1205 | Stream, 1206 | Writable, 1207 | Readable, 1208 | Duplex, 1209 | Transform, 1210 | // Export PassThrough for compatibility with Node.js core's stream module 1211 | PassThrough 1212 | } 1213 | --------------------------------------------------------------------------------