├── .github └── workflows │ └── js-test-and-release.yml ├── .gitignore ├── LICENSE ├── README.md ├── package-lock.json ├── package.json ├── src ├── duplex.js ├── index.js └── transform.js └── test ├── duplex.test.js ├── helpers ├── random.js └── streams.js ├── readable.test.js ├── transform.test.js └── writable.test.js /.github/workflows/js-test-and-release.yml: -------------------------------------------------------------------------------- 1 | name: test & maybe release 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | workflow_dispatch: 9 | 10 | permissions: 11 | contents: write 12 | packages: write 13 | 14 | concurrency: 15 | group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event_name == 'push' && github.sha || github.ref }} 16 | cancel-in-progress: true 17 | 18 | jobs: 19 | js-test-and-release: 20 | uses: pl-strflt/uci/.github/workflows/js-test-and-release.yml@v0.0 21 | secrets: 22 | DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }} 23 | DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} 24 | NPM_TOKEN: ${{ secrets.NPM_TOKEN }} 25 | UCI_GITHUB_TOKEN: ${{ secrets.UCI_GITHUB_TOKEN }} 26 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | .nyc_output 3 | coverage 4 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Alan Shaw 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # it-to-stream 2 | 3 | [![Build Status](https://travis-ci.org/alanshaw/it-to-stream.svg?branch=master)](https://travis-ci.org/alanshaw/it-to-stream) 4 | [![dependencies Status](https://status.david-dm.org/gh/alanshaw/it-to-stream.svg)](https://david-dm.org/alanshaw/it-to-stream) 5 | [![JavaScript Style Guide](https://img.shields.io/badge/code_style-standard-brightgreen.svg)](https://standardjs.com) 6 | 7 | > Convert streaming iterables to Node.js streams 8 | 9 | ## Install 10 | 11 | ```sh 12 | npm i it-to-stream 13 | ``` 14 | 15 | ## Usage 16 | 17 | ```js 18 | const toStream = require('it-to-stream') 19 | ``` 20 | 21 | ### Convert source iterable to readable stream 22 | 23 | ```js 24 | // A streaming iterable "source" is just an (async) iterable 25 | const source = (async function * () { 26 | for (const value of [1, 2, 3, 4]) yield Buffer.from(value.toString()) 27 | })() 28 | 29 | const readable = toStream.readable(source) 30 | 31 | // Now we have a readable stream, we can consume it by 32 | readable.on('data', console.log) 33 | // or 34 | readable.pipe(writable) 35 | // or 36 | pipeline(readable, writable, err => console.log(err || 'done')) 37 | ``` 38 | 39 | ### Convert sink iterable to writable stream 40 | 41 | ```js 42 | // A streaming iterable "sink" is an (async) function that takes a "source" 43 | // and consumes it. 44 | const sink = async source => { 45 | for await (const chunk of source) { 46 | console.log(chunk.toString()) 47 | } 48 | } 49 | 50 | const writable = toStream.writable(sink) 51 | 52 | // Now we have a writable stream, we can pipe to it 53 | fs.createReadStream('/path/to/file').pipe(writable) 54 | ``` 55 | 56 | ### Convert transform iterable to transform stream 57 | 58 | ```js 59 | // A streaming iterable "transform" is a function that takes a "source" and 60 | // returns a "source". 61 | const transform = source => (async function * () { 62 | for await (const chunk of source) { 63 | // Replace all space characters with dashes 64 | yield Buffer.from(chunk.toString().replace(/ /g, '-')) 65 | } 66 | })() 67 | 68 | const transform = toStream.transform(transform) 69 | 70 | // Now we have a transform stream, we can pipe to and from it 71 | fs.createReadStream('/path/to/file') 72 | .pipe(transform) 73 | .pipe(fs.createWriteStream('/path/to/file2')) 74 | ``` 75 | 76 | ## API 77 | 78 | ```js 79 | const toStream = require('it-to-stream') 80 | ``` 81 | 82 | ### `toStream.readable(source, [options]): Readable` 83 | 84 | Convert a [source](https://gist.github.com/alanshaw/591dc7dd54e4f99338a347ef568d6ee9#source-it) iterable to a [`Readable`](https://nodejs.org/dist/latest/docs/api/stream.html#stream_readable_streams) stream. 85 | 86 | `options` are passed directly to the `Readable` constructor. 87 | 88 | ### `toStream.writable(sink, [options]): Writable` 89 | 90 | Convert a [sink](https://gist.github.com/alanshaw/591dc7dd54e4f99338a347ef568d6ee9#sink-it) iterable to a [`Writable`](https://nodejs.org/dist/latest/docs/api/stream.html#stream_writable_streams) stream. 91 | 92 | `options` are passed directly to the `Writable` constructor. 93 | 94 | ### `toStream.transform(transform, [options]): Transform` 95 | 96 | Convert a [transform](https://gist.github.com/alanshaw/591dc7dd54e4f99338a347ef568d6ee9#transform-it) iterable to a [`Transform`](https://nodejs.org/dist/latest/docs/api/stream.html#stream_duplex_and_transform_streams) stream. 97 | 98 | `options` are passed directly to the `Transform` constructor. 99 | 100 | ### `toStream.duplex(duplex, [options]): Duplex` 101 | 102 | Convert a [duplex](https://gist.github.com/alanshaw/591dc7dd54e4f99338a347ef568d6ee9#duplex-it) iterable to a [`Duplex`](https://nodejs.org/dist/latest/docs/api/stream.html#stream_duplex_and_transform_streams) stream. 103 | 104 | `options` are passed directly to the `Duplex` constructor. 105 | 106 | ## Related 107 | 108 | * [`stream-to-it`](https://www.npmjs.com/package/stream-to-it) Convert Node.js streams to streaming iterables 109 | * [`it-pipe`](https://www.npmjs.com/package/it-pipe) Utility to "pipe" async iterables together 110 | 111 | ## Contribute 112 | 113 | Feel free to dive in! [Open an issue](https://github.com/alanshaw/it-to-stream/issues/new) or submit PRs. 114 | 115 | ## License 116 | 117 | [MIT](LICENSE) © Alan Shaw 118 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "it-to-stream", 3 | "version": "1.0.0", 4 | "description": "Convert streaming iterables to Node.js streams", 5 | "main": "src/index.js", 6 | "browser": { 7 | "stream": "readable-stream" 8 | }, 9 | "scripts": { 10 | "test": "ava test/*.test.js --verbose", 11 | "lint": "standard", 12 | "coverage": "nyc --reporter=text --reporter=lcov npm test" 13 | }, 14 | "keywords": [ 15 | "stream", 16 | "readablestream" 17 | ], 18 | "author": "Alan Shaw", 19 | "license": "MIT", 20 | "dependencies": { 21 | "buffer": "^6.0.3", 22 | "fast-fifo": "^1.0.0", 23 | "get-iterator": "^1.0.2", 24 | "p-defer": "^3.0.0", 25 | "p-fifo": "^1.0.0", 26 | "readable-stream": "^3.6.0" 27 | }, 28 | "devDependencies": { 29 | "ava": "^3.9.0", 30 | "it-pair": "^1.0.0", 31 | "nyc": "^15.1.0", 32 | "standard": "^16.0.3" 33 | }, 34 | "directories": { 35 | "test": "test" 36 | }, 37 | "repository": { 38 | "type": "git", 39 | "url": "git+https://github.com/alanshaw/it-to-stream.git" 40 | }, 41 | "bugs": { 42 | "url": "https://github.com/alanshaw/it-to-stream/issues" 43 | }, 44 | "homepage": "https://github.com/alanshaw/it-to-stream#readme" 45 | } 46 | -------------------------------------------------------------------------------- /src/duplex.js: -------------------------------------------------------------------------------- 1 | const { Readable, Writable, Duplex } = require('stream') 2 | const getIterator = require('get-iterator') 3 | const Fifo = require('p-fifo') 4 | const { Buffer } = require('buffer') 5 | const END_CHUNK = Buffer.alloc(0) 6 | 7 | module.exports = function toDuplex (duplex, options) { 8 | options = options || {} 9 | 10 | let reading = false 11 | const fifo = new Fifo() 12 | 13 | duplex = { 14 | sink: duplex.sink, 15 | source: duplex.source ? getIterator(duplex.source) : null 16 | } 17 | 18 | let Stream = Duplex 19 | if (!duplex.source) { 20 | Stream = Writable 21 | } else if (!duplex.sink) { 22 | Stream = Readable 23 | } 24 | 25 | let readable 26 | if (duplex.source) { 27 | readable = { 28 | async read (size) { 29 | if (reading) return 30 | reading = true 31 | 32 | try { 33 | while (true) { 34 | const { value, done } = await duplex.source.next(size) 35 | if (done) return this.push(null) 36 | if (!this.push(value)) break 37 | } 38 | } catch (err) { 39 | this.emit('error', err) 40 | } finally { 41 | reading = false 42 | } 43 | } 44 | } 45 | } 46 | 47 | let writable 48 | if (duplex.sink) { 49 | writable = { 50 | write (chunk, enc, cb) { 51 | fifo.push(chunk).then(() => cb(), cb) 52 | }, 53 | final (cb) { 54 | fifo.push(END_CHUNK).then(() => cb(), cb) 55 | } 56 | } 57 | } 58 | 59 | Object.assign(options, readable, writable) 60 | 61 | const stream = new Stream(options) 62 | 63 | if (duplex.sink) { 64 | duplex.sink({ 65 | [Symbol.asyncIterator] () { 66 | return this 67 | }, 68 | async next () { 69 | const chunk = await fifo.shift() 70 | return chunk === END_CHUNK ? { done: true } : { value: chunk } 71 | }, 72 | async throw (err) { 73 | stream.destroy(err) 74 | return { done: true } 75 | }, 76 | async return () { 77 | stream.destroy() 78 | return { done: true } 79 | } 80 | }) 81 | } 82 | 83 | return stream 84 | } 85 | -------------------------------------------------------------------------------- /src/index.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const toTransform = require('./transform') 4 | const toDuplex = require('./duplex') 5 | 6 | function toReadable (source, options) { 7 | return toDuplex({ source }, options) 8 | } 9 | 10 | function toWritable (sink, options) { 11 | return toDuplex({ sink }, options) 12 | } 13 | 14 | module.exports = toReadable 15 | module.exports.readable = toReadable 16 | module.exports.writable = toWritable 17 | module.exports.transform = toTransform 18 | module.exports.duplex = toDuplex 19 | -------------------------------------------------------------------------------- /src/transform.js: -------------------------------------------------------------------------------- 1 | const toDuplex = require('./duplex') 2 | const defer = require('p-defer') 3 | 4 | module.exports = function toTransform (transform, options) { 5 | const { promise, resolve } = defer() 6 | 7 | const source = (async function * () { 8 | const it = await promise 9 | for await (const chunk of it) yield chunk 10 | })() 11 | 12 | return toDuplex({ sink: s => resolve(transform(s)), source }, options) 13 | } 14 | -------------------------------------------------------------------------------- /test/duplex.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava') 2 | const { Readable } = require('stream') 3 | const pair = require('it-pair') 4 | const toStream = require('../') 5 | const { collect } = require('./helpers/streams') 6 | const { randomInt, randomBytes } = require('./helpers/random') 7 | 8 | test('should convert to duplex stream', async t => { 9 | const input = Array.from(Array(randomInt(5, 10)), () => randomBytes(1, 512)) 10 | let i = 0 11 | 12 | const output = await collect( 13 | new Readable({ 14 | read (size) { 15 | let chunk = input[i] 16 | while (true) { 17 | if (!chunk) return this.push(null) 18 | i++ 19 | if (!this.push(chunk)) return 20 | chunk = input[i] 21 | } 22 | } 23 | }), 24 | toStream.duplex(pair()) 25 | ) 26 | 27 | t.deepEqual(output, input) 28 | }) 29 | -------------------------------------------------------------------------------- /test/helpers/random.js: -------------------------------------------------------------------------------- 1 | const Crypto = require('crypto') 2 | 3 | // Maximum is exclusive and the minimum is inclusive 4 | const randomInt = (min, max) => { 5 | min = Math.ceil(min) 6 | max = Math.floor(max) 7 | return Math.floor(Math.random() * (max - min)) + min 8 | } 9 | 10 | exports.randomInt = randomInt 11 | 12 | const randomBytes = (min, max) => Crypto.randomBytes(randomInt(min, max)) 13 | 14 | exports.randomBytes = randomBytes 15 | -------------------------------------------------------------------------------- /test/helpers/streams.js: -------------------------------------------------------------------------------- 1 | const { Writable, pipeline } = require('stream') 2 | 3 | // Promisified pipeline 4 | function pipe (...streams) { 5 | return new Promise((resolve, reject) => { 6 | pipeline(...streams, err => { 7 | // work around bug in node to make 'should end mid stream' test pass - https://github.com/nodejs/node/issues/23890 8 | if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') return reject(err) 9 | resolve() 10 | }) 11 | }) 12 | } 13 | 14 | exports.pipe = pipe 15 | 16 | // Pipe a bunch of streams together and collect the results 17 | exports.collect = async (...streams) => { 18 | const chunks = [] 19 | const collector = new Writable({ 20 | write (chunk, enc, cb) { 21 | chunks.push(chunk) 22 | cb() 23 | } 24 | }) 25 | 26 | await pipe(...[...streams, collector]) 27 | return chunks 28 | } 29 | -------------------------------------------------------------------------------- /test/readable.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava') 2 | const { Writable } = require('stream') 3 | const toStream = require('../') 4 | const { collect, pipe } = require('./helpers/streams') 5 | const { randomInt, randomBytes } = require('./helpers/random') 6 | 7 | test('should convert to readable stream', async t => { 8 | const input = Array.from(Array(randomInt(5, 10)), () => randomBytes(1, 512)) 9 | const output = await collect(toStream.readable(input, { objectMode: true })) 10 | t.deepEqual(input, output) 11 | }) 12 | 13 | test('should error the stream when source iterator errors', async t => { 14 | const input = (async function * () { 15 | yield randomBytes(1, 1024) 16 | yield randomBytes(1, 1024) 17 | throw new Error('boom') 18 | })() 19 | 20 | const err = await t.throwsAsync(collect(toStream.readable(input))) 21 | t.is(err.message, 'boom') 22 | }) 23 | 24 | test('should respect backpressure', async t => { 25 | const input = Array.from(Array(randomInt(10, 100)), () => randomBytes(1, 512)) 26 | const chunks = [] 27 | 28 | await pipe( 29 | toStream.readable(input, { 30 | highWaterMark: 1, 31 | objectMode: true 32 | }), 33 | new Writable({ 34 | highWaterMark: 1, 35 | objectMode: true, 36 | write (chunk, enc, cb) { 37 | chunks.push(chunk) 38 | setTimeout(() => cb(null, chunk), 10) 39 | } 40 | }) 41 | ) 42 | t.deepEqual(chunks, input) 43 | }) 44 | -------------------------------------------------------------------------------- /test/transform.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava') 2 | const { Readable } = require('stream') 3 | const { Buffer } = require('buffer') 4 | const toStream = require('../') 5 | const { collect } = require('./helpers/streams') 6 | const { randomInt, randomBytes } = require('./helpers/random') 7 | 8 | test('should convert to transform stream', async t => { 9 | const input = Array.from(Array(randomInt(5, 10)), () => randomBytes(1, 512)) 10 | let i = 0 11 | 12 | const suffix = Buffer.from(`${Date.now()}`) 13 | 14 | const output = await collect( 15 | new Readable({ 16 | read (size) { 17 | let chunk = input[i] 18 | while (true) { 19 | if (!chunk) return this.push(null) 20 | i++ 21 | if (!this.push(chunk)) return 22 | chunk = input[i] 23 | } 24 | } 25 | }), 26 | // Transform every chunk to have a "suffix" 27 | toStream.transform(source => (async function * () { 28 | for await (const chunk of source) { 29 | yield Buffer.concat([chunk, suffix]) 30 | } 31 | })()) 32 | ) 33 | 34 | t.is(output.length, input.length) 35 | 36 | input.forEach((inputBuffer, i) => { 37 | t.deepEqual(Buffer.concat([inputBuffer, suffix]), output[i]) 38 | }) 39 | }) 40 | 41 | test('should transform single chunk into multiple chunks', async t => { 42 | const input = Array.from(Array(randomInt(5, 10)), () => randomBytes(1, 512)) 43 | let i = 0 44 | 45 | const separator = Buffer.from(`${Date.now()}`) 46 | 47 | const output = await collect( 48 | new Readable({ 49 | read (size) { 50 | let chunk = input[i] 51 | while (true) { 52 | if (!chunk) return this.push(null) 53 | i++ 54 | if (!this.push(chunk)) return 55 | chunk = input[i] 56 | } 57 | } 58 | }), 59 | // Add a separator after every chunk 60 | toStream.transform(source => (async function * () { 61 | for await (const chunk of source) { 62 | yield chunk 63 | yield separator 64 | } 65 | })()) 66 | ) 67 | 68 | t.is(output.length, input.length * 2) 69 | }) 70 | 71 | test('should transform single chunk into no chunks', async t => { 72 | const input = Array.from(Array(randomInt(5, 10)), () => randomBytes(1, 512)) 73 | let i = 0 74 | 75 | const output = await collect( 76 | new Readable({ 77 | read (size) { 78 | let chunk = input[i] 79 | while (true) { 80 | if (!chunk) return this.push(null) 81 | i++ 82 | if (!this.push(chunk)) return 83 | chunk = input[i] 84 | } 85 | } 86 | }), 87 | toStream.transform(source => (async function * () { 88 | // eslint-disable-next-line no-unused-vars, no-empty 89 | for await (const chunk of source) {} 90 | })()) 91 | ) 92 | 93 | t.is(output.length, 0) 94 | }) 95 | 96 | test('should error the stream when transform iterator errors', async t => { 97 | const input = Array.from(Array(randomInt(5, 10)), () => randomBytes(1, 512)) 98 | let i = 0 99 | 100 | const err = await t.throwsAsync(collect( 101 | new Readable({ 102 | read (size) { 103 | let chunk = input[i] 104 | while (true) { 105 | if (!chunk) return this.push(null) 106 | i++ 107 | if (!this.push(chunk)) return 108 | chunk = input[i] 109 | } 110 | } 111 | }), 112 | toStream.transform(source => (async function * () { 113 | // eslint-disable-next-line no-unused-vars 114 | for await (const chunk of source) { 115 | if (i > 2) throw new Error('boom') 116 | } 117 | })()) 118 | )) 119 | 120 | t.is(err.message, 'boom') 121 | }) 122 | -------------------------------------------------------------------------------- /test/writable.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava') 2 | const { Readable } = require('stream') 3 | const toStream = require('../') 4 | const { pipe } = require('./helpers/streams') 5 | const { randomInt, randomBytes } = require('./helpers/random') 6 | 7 | test('should convert to writable stream', async t => { 8 | const input = Array.from(Array(randomInt(5, 10)), () => randomBytes(1, 512)) 9 | const output = [] 10 | let i = 0 11 | 12 | await pipe( 13 | new Readable({ 14 | read (size) { 15 | let chunk = input[i] 16 | while (true) { 17 | if (!chunk) return this.push(null) 18 | i++ 19 | if (!this.push(chunk)) return 20 | chunk = input[i] 21 | } 22 | } 23 | }), 24 | toStream.writable(async source => { 25 | for await (const chunk of source) { 26 | output.push(chunk) 27 | } 28 | }) 29 | ) 30 | 31 | t.deepEqual(output, input) 32 | }) 33 | 34 | test('should end mid stream', async t => { 35 | const input = Array.from(Array(randomInt(5, 10)), () => randomBytes(1, 512)) 36 | const output = [] 37 | let i = 0 38 | 39 | await pipe( 40 | new Readable({ 41 | read (size) { 42 | let chunk = input[i] 43 | while (true) { 44 | if (!chunk) return this.push(null) 45 | i++ 46 | if (!this.push(chunk)) return 47 | chunk = input[i] 48 | } 49 | } 50 | }), 51 | toStream.writable(async source => { 52 | const { value, done } = await source.next() 53 | if (!done) { 54 | output.push(value) 55 | } 56 | if (source.return) { 57 | source.return() 58 | } 59 | }) 60 | ) 61 | 62 | t.deepEqual(output, input.slice(0, 1)) 63 | }) 64 | 65 | test('should throw mid stream', async t => { 66 | const input = Array.from(Array(randomInt(5, 10)), () => randomBytes(1, 512)) 67 | const output = [] 68 | let i = 0 69 | 70 | const err = await t.throwsAsync(pipe( 71 | new Readable({ 72 | read (size) { 73 | let chunk = input[i] 74 | while (true) { 75 | if (!chunk) return this.push(null) 76 | i++ 77 | if (!this.push(chunk)) return 78 | chunk = input[i] 79 | } 80 | } 81 | }), 82 | toStream.writable(async source => { 83 | const { value } = await source.next() 84 | output.push(value) 85 | await source.throw(new Error('boom!')) 86 | }) 87 | )) 88 | 89 | t.is(err.message, 'boom!') 90 | t.deepEqual(output, input.slice(0, 1)) 91 | }) 92 | --------------------------------------------------------------------------------