├── .github
├── dependabot.yml
└── workflows
│ └── ci.yml
├── .gitignore
├── LICENSE
├── README.md
├── benchmarks
└── basic.js
├── example.js
├── package.json
├── syncthrough.js
└── test.js
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: npm
4 | directory: "/"
5 | schedule:
6 | interval: daily
7 | open-pull-requests-limit: 10
8 | ignore:
9 | - dependency-name: standard
10 | versions:
11 | - 16.0.3
12 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: ci
2 |
3 | on:
4 | push:
5 | paths-ignore:
6 | - 'docs/**'
7 | - '*.md'
8 | pull_request:
9 | paths-ignore:
10 | - 'docs/**'
11 | - '*.md'
12 |
13 | jobs:
14 | test:
15 | runs-on: ${{matrix.os}}
16 |
17 | strategy:
18 | matrix:
19 | node-version: [18.x, 20.x, 21.x]
20 | os: [ubuntu-latest]
21 | steps:
22 | - uses: actions/checkout@v3
23 |
24 | - name: Use Node.js
25 | uses: actions/setup-node@v2
26 | with:
27 | node-version: ${{ matrix.node-version }}
28 |
29 | - name: Install
30 | run: |
31 | npm install
32 |
33 | - name: Run tests
34 | run: |
35 | npm run test
36 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 |
6 | # Runtime data
7 | pids
8 | *.pid
9 | *.seed
10 |
11 | # Directory for instrumented libs generated by jscoverage/JSCover
12 | lib-cov
13 |
14 | # Coverage directory used by tools like istanbul
15 | coverage
16 |
17 | # nyc test coverage
18 | .nyc_output
19 |
20 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
21 | .grunt
22 |
23 | # node-waf configuration
24 | .lock-wscript
25 |
26 | # Compiled binary addons (http://nodejs.org/api/addons.html)
27 | build/Release
28 |
29 | # Dependency directories
30 | node_modules
31 | jspm_packages
32 |
33 | # Optional npm cache directory
34 | .npm
35 |
36 | # Optional REPL history
37 | .node_repl_history
38 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2017 Matteo Collina
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # syncthrough [](https://github.com/mcollina/syncthrough/actions/workflows/ci.yml)
2 |
3 | Transform your data as it pass by, synchronously.
4 |
5 | **syncthrough** is a synchronous transform stream, similar to [Transform
6 | stream][transform] and [through2](https://github.com/rvagg/through2), but with a synchronous processing function.
7 | **syncthrough** enforces backpressure, but it maintain no internal
8 | buffering, allowing much greater throughput.
9 | In fact, it delivers 10x performance over a standard
10 | [`Transform`][transform].
11 |
12 | Because of the [caveats](#caveats), it is best used in combination of
13 | [`pipe()`][pipe], [`pump()`][pump], or [`pipeline()`][pipeline].
14 |
15 | ## Install
16 |
17 | ```
18 | npm i syncthrough --save
19 | ```
20 |
21 | ## Example
22 |
23 | ```js
24 | import { createReadStream } from 'node:fs'
25 | import { pipeline } from 'node:stream/promises'
26 | import { syncthrough } from 'syncthrough'
27 |
28 | await pipeline(
29 | createReadStream(import.meta.filename),
30 | syncthrough(function (chunk) {
31 | // there is no callback here
32 | // you can return null to end the stream
33 | // returning undefined will let you skip this chunk
34 | return chunk.toString().toUpperCase()
35 | }),
36 | process.stdout)
37 | ```
38 |
39 | ## API
40 |
41 | ### syncthrough([transform(chunk)], [flush()])
42 |
43 | Returns a new instance of `syncthrough`, where `transform(chunk)` is the
44 | transformation that will be applied to all incoming chunks.
45 |
46 | The default `transform` function is:
47 |
48 | ```js
49 | function (chunk) {
50 | return chunk
51 | }
52 | ```
53 |
54 | If it returns `null`, the stream will be closed. If it returns
55 | `undefined`, the chunk will be skipped.
56 |
57 | There is currently no way to split an incoming chunk into multiple
58 | chunks.
59 |
60 | The `flush()` function will be called before the transform sends `end()`
61 | on the destination.
62 |
63 | ### syncthrough([transform(object)], [flush()])
64 |
65 | Returns a new instance of `syncthrough`, where `transform(object)` is the
66 | transformation that will be applied to all incoming objects.
67 |
68 | Syncthrough is compatible with Streams in [Object Mode](https://nodejs.org/api/stream.html#stream_object_mode),
69 | the API is exactly the same, simply expect objects instead of buffer chunks.
70 |
71 | ### instance.push(chunk)
72 |
73 | Push a chunk to the destination.
74 |
75 | ## Caveats
76 |
77 | The API is the same of a streams 3 [`Transform`][transform], with some major differences:
78 |
79 | 1. *backpressure is enforced*, and the instance performs no buffering,
80 | e.g. when `write()` cannot be called after it returns false or it will `throw`
81 | (you need to wait for a `'drain'` event).
82 | 2. It does not inherits from any of the Streams classes, and it does not
83 | have `_readableState` nor `_writableState`.
84 | 3. it does not have a `read(n)` method, nor it emits the
85 | `'readable'` event, the data is pushed whenever ready.
86 |
87 |
88 | ## Acknowledgements
89 |
90 | This project was kindly sponsored by [nearForm](http://nearform.com).
91 |
92 | ## License
93 |
94 | MIT
95 |
96 | [transform]: https://nodejs.org/api/stream.html#stream_class_stream_transform
97 | [pipe]: https://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
98 | [pump]: https://github.com/mafintosh/pump
99 | [pipeline]: https://nodejs.org/api/stream.html#streampipelinesource-transforms-destination-options
100 |
--------------------------------------------------------------------------------
/benchmarks/basic.js:
--------------------------------------------------------------------------------
1 | 'use strict'
2 |
3 | const bench = require('fastbench')
4 | const syncthrough = require('../')
5 | const through2 = require('through2')
6 | const through = require('through')
7 | const { PassThrough } = require('node:stream')
8 | const data = Buffer.from('hello')
9 |
10 | function write (i, s) {
11 | for (; i < 1000; i++) {
12 | if (!s.write(data)) {
13 | i++
14 | break
15 | }
16 | }
17 |
18 | if (i < 1000) {
19 | s.once('drain', write.bind(undefined, i, s))
20 | } else {
21 | s.end()
22 | }
23 | }
24 |
25 | function benchThrough2 (done) {
26 | const stream = through2()
27 | stream.on('data', noop)
28 | stream.on('end', done)
29 |
30 | write(0, stream)
31 | }
32 |
33 | function benchPassThrough (done) {
34 | const stream = PassThrough()
35 | stream.on('data', noop)
36 | stream.on('end', done)
37 |
38 | write(0, stream)
39 | }
40 |
41 | let lastDone = null
42 | function benchThrough (done) {
43 | const stream = through()
44 | lastDone = done
45 | stream.on('data', noop)
46 | stream.on('end', next)
47 |
48 | write(0, stream)
49 | }
50 |
51 | function next () {
52 | // needed to avoid a max stack call exception
53 | process.nextTick(lastDone)
54 | }
55 |
56 | function benchSyncThrough (done) {
57 | const stream = syncthrough()
58 |
59 | stream.on('data', noop)
60 | stream.on('end', done)
61 |
62 | write(0, stream)
63 | }
64 |
65 | function noop () {}
66 |
67 | const run = bench([
68 | benchThrough2,
69 | benchThrough,
70 | benchPassThrough,
71 | benchSyncThrough
72 | ], 10000)
73 |
74 | run(run)
75 |
--------------------------------------------------------------------------------
/example.js:
--------------------------------------------------------------------------------
1 | 'use strict'
2 |
3 | const fs = require('fs')
4 | const syncthrough = require('.')
5 |
6 | fs.createReadStream(__filename)
7 | .pipe(syncthrough(function (chunk) {
8 | return chunk.toString().toUpperCase()
9 | }))
10 | .pipe(process.stdout, { end: false })
11 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "syncthrough",
3 | "version": "1.0.0",
4 | "description": "A Transform stream that is sync, and very fast",
5 | "main": "syncthrough.js",
6 | "scripts": {
7 | "test": "standard | snazzy && borp --coverage test.js"
8 | },
9 | "repository": {
10 | "type": "git",
11 | "url": "git+https://github.com/mcollina/syncthrough.git"
12 | },
13 | "keywords": [
14 | "streams",
15 | "stream",
16 | "transform",
17 | "through",
18 | "through2"
19 | ],
20 | "author": "Matteo Collina ",
21 | "license": "MIT",
22 | "bugs": {
23 | "url": "https://github.com/mcollina/syncthrough/issues"
24 | },
25 | "homepage": "https://github.com/mcollina/syncthrough#readme",
26 | "devDependencies": {
27 | "@fastify/pre-commit": "^2.1.0",
28 | "@matteo.collina/tspl": "^0.2.0",
29 | "borp": "^0.20.0",
30 | "fastbench": "^1.0.1",
31 | "pump": "^3.0.0",
32 | "readable-stream": "^4.0.0",
33 | "snazzy": "^9.0.0",
34 | "standard": "^17.1.0",
35 | "through": "^2.3.8",
36 | "through2": "^4.0.2"
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/syncthrough.js:
--------------------------------------------------------------------------------
1 | 'use strict'
2 |
3 | const { EventEmitter } = require('events')
4 | const nextTick = process.nextTick
5 |
6 | function SyncThrough (transform, flush) {
7 | if (!(this instanceof SyncThrough)) {
8 | return new SyncThrough(transform, flush)
9 | }
10 |
11 | EventEmitter.call(this)
12 |
13 | this._transform = transform || passthrough
14 | this._flush = flush || passthrough
15 | this._destination = null
16 | this._inFlight = undefined
17 | this.writable = true
18 | this._endEmitted = false
19 | this._destinationNeedsEnd = true
20 | this._lastPush = true
21 |
22 | this.on('newListener', onNewListener)
23 | this.on('removeListener', onRemoveListener)
24 | this.on('end', onEnd)
25 | }
26 |
27 | function onNewListener (ev, func) {
28 | if (ev === 'data') {
29 | if (this._destination && !(this._destination instanceof OnData)) {
30 | throw new Error('you can use only pipe() or on(\'data\')')
31 | }
32 | nextTick(deferPiping, this)
33 | }
34 | }
35 |
36 | function deferPiping (that) {
37 | if (that._destination && that._destination instanceof OnData) {
38 | // nothing to do, piping was deferred twice for on('data')
39 | return
40 | }
41 |
42 | that.pipe(new OnData(that))
43 | if (!that.writable & !that._endEmitted) {
44 | that.emit('end')
45 | }
46 | }
47 |
48 | function onRemoveListener (ev, func) {
49 | if (ev === 'data' && ((ev.listenerCount && ev.listenerCount(this, ev)) !== 0)) {
50 | this.unpipe(this._destination)
51 | }
52 | }
53 |
54 | function onEnd () {
55 | this._endEmitted = true
56 | }
57 |
58 | Object.setPrototypeOf(SyncThrough.prototype, EventEmitter.prototype)
59 | Object.setPrototypeOf(SyncThrough, EventEmitter)
60 |
61 | SyncThrough.prototype.pipe = function (dest, opts) {
62 | const that = this
63 | const inFlight = this._inFlight
64 |
65 | if (this._destination) {
66 | throw new Error('multiple pipe not allowed')
67 | }
68 | this._destination = dest
69 |
70 | dest.emit('pipe', this)
71 |
72 | this._destination.on('drain', function () {
73 | that.emit('drain')
74 | })
75 |
76 | this._destination.on('end', function () {
77 | that.end()
78 | })
79 |
80 | this._destinationNeedsEnd = !opts || opts.end !== false
81 |
82 | if (inFlight && this._destination.write(inFlight)) {
83 | this._inFlight = undefined
84 | this.emit('drain')
85 | } else if (inFlight === null) {
86 | doEnd(this)
87 | }
88 |
89 | return dest
90 | }
91 |
92 | SyncThrough.prototype.unpipe = function (dest) {
93 | if (!this._destination || this._destination !== dest) {
94 | return this
95 | }
96 |
97 | this._destination = null
98 |
99 | dest.emit('unpipe', this)
100 |
101 | return this
102 | }
103 |
104 | SyncThrough.prototype.write = function (chunk) {
105 | if (!this.writable) {
106 | this.emit('error', new Error('write after EOF'))
107 | return false
108 | }
109 |
110 | const res = this._transform(chunk)
111 |
112 | if (!this._destination) {
113 | if (this._inFlight) {
114 | this.emit('error', new Error('upstream must respect backpressure'))
115 | return false
116 | }
117 | this._inFlight = res
118 | return false
119 | }
120 |
121 | if (res) {
122 | this._lastPush = this._destination.write(res)
123 | } else if (res === null) {
124 | doEnd(this)
125 | return false
126 | }
127 |
128 | return this._lastPush
129 | }
130 |
131 | SyncThrough.prototype.push = function (chunk) {
132 | // ignoring the return value
133 | this._lastPush = this._destination.write(chunk)
134 | return this
135 | }
136 |
137 | SyncThrough.prototype.end = function (chunk) {
138 | if (chunk) {
139 | this.write(chunk) // errors if we are after EOF
140 | }
141 |
142 | doEnd(this)
143 |
144 | return this
145 | }
146 |
147 | function doEnd (that) {
148 | if (that.writable) {
149 | that.writable = false
150 | if (that._destination) {
151 | that._endEmitted = true
152 | const toFlush = that._flush() || null
153 | if (that._destinationNeedsEnd) {
154 | that._destination.end(toFlush)
155 | } else if (toFlush !== null) {
156 | that._destination.write(toFlush)
157 | }
158 | that.emit('end')
159 | }
160 | }
161 | }
162 |
163 | SyncThrough.prototype.destroy = function (err) {
164 | if (!this._destroyed) {
165 | this._destroyed = true
166 |
167 | nextTick(doDestroy, this, err)
168 | }
169 |
170 | return this
171 | }
172 |
173 | function doDestroy (that, err) {
174 | if (err) {
175 | that.emit('error', err)
176 | }
177 | that.emit('close')
178 | }
179 |
180 | function passthrough (chunk) {
181 | return chunk
182 | }
183 |
184 | function OnData (parent) {
185 | this.parent = parent
186 | EventEmitter.call(this)
187 | }
188 |
189 | Object.setPrototypeOf(OnData.prototype, EventEmitter.prototype)
190 | Object.setPrototypeOf(OnData, EventEmitter)
191 |
192 | OnData.prototype.write = function (chunk) {
193 | this.parent.emit('data', chunk)
194 | return true
195 | }
196 |
197 | OnData.prototype.end = function () {
198 | }
199 |
200 | module.exports = SyncThrough
201 |
--------------------------------------------------------------------------------
/test.js:
--------------------------------------------------------------------------------
1 | 'use strict'
2 |
3 | const { test } = require('node:test')
4 | const tspl = require('@matteo.collina/tspl')
5 | const syncthrough = require('./')
6 | const Readable = require('readable-stream').Readable
7 | const Writable = require('readable-stream').Writable
8 | const fs = require('fs')
9 | const eos = require('end-of-stream')
10 | const pump = require('pump')
11 | const { pipeline } = require('node:stream')
12 | const through = require('through')
13 |
14 | function stringFrom (chunks) {
15 | return new Readable({
16 | read: function (n) {
17 | this.push(chunks.shift() || null)
18 | }
19 | })
20 | }
21 |
22 | function stringSink (t, expected) {
23 | return new Writable({
24 | write: function (chunk, enc, cb) {
25 | t.equal(chunk.toString(), expected.shift().toString(), 'chunk matches')
26 | cb()
27 | }
28 | })
29 | }
30 |
31 | function delayedStringSink (t, expected) {
32 | return new Writable({
33 | highWaterMark: 2,
34 | write: function (chunk, enc, cb) {
35 | t.equal(chunk.toString(), expected.shift().toString(), 'chunk matches')
36 | setImmediate(cb)
37 | }
38 | })
39 | }
40 |
41 | function objectFrom (chunks) {
42 | return new Readable({
43 | objectMode: true,
44 | read: function (n) {
45 | this.push(chunks.shift() || null)
46 | }
47 | })
48 | }
49 |
50 | function objectSink (t, expected) {
51 | return new Writable({
52 | objectMode: true,
53 | write: function (chunk, enc, cb) {
54 | if (expected.length) {
55 | t.deepEqual(chunk, expected.shift(), 'chunk matches')
56 | } else {
57 | t.ok(false, `unexpected chunk "${chunk}"`)
58 | }
59 | cb()
60 | }
61 | })
62 | }
63 |
64 | test('pipe', async function (_t) {
65 | const t = tspl(_t, { plan: 3 })
66 |
67 | const stream = syncthrough(function (chunk) {
68 | return Buffer.from(chunk.toString().toUpperCase())
69 | })
70 | const from = stringFrom([Buffer.from('foo'), Buffer.from('bar')])
71 | const sink = stringSink(t, [Buffer.from('FOO'), Buffer.from('BAR')])
72 |
73 | sink.on('finish', function () {
74 | t.ok('finish emitted')
75 | })
76 |
77 | from.pipe(stream).pipe(sink)
78 |
79 | await t.completed
80 | })
81 |
82 | test('multiple pipe', async function (_t) {
83 | const t = tspl(_t, { plan: 3 })
84 |
85 | const stream = syncthrough(function (chunk) {
86 | return Buffer.from(chunk.toString().toUpperCase())
87 | })
88 |
89 | const stream2 = syncthrough(function (chunk) {
90 | return Buffer.from(chunk.toString().toLowerCase())
91 | })
92 |
93 | const from = stringFrom([Buffer.from('foo'), Buffer.from('bar')])
94 | const sink = stringSink(t, [Buffer.from('foo'), Buffer.from('bar')])
95 |
96 | sink.on('finish', function () {
97 | t.ok('finish emitted')
98 | })
99 |
100 | from.pipe(stream).pipe(stream2).pipe(sink)
101 | await t.completed
102 | })
103 |
104 | test('backpressure', async function (_t) {
105 | const t = tspl(_t, { plan: 3 })
106 |
107 | const stream = syncthrough(function (chunk) {
108 | return Buffer.from(chunk.toString().toUpperCase())
109 | })
110 |
111 | const from = stringFrom([Buffer.from('foo'), Buffer.from('bar')])
112 | const sink = delayedStringSink(t, [Buffer.from('FOO'), Buffer.from('BAR')])
113 |
114 | sink.on('finish', function () {
115 | t.ok('finish emitted')
116 | })
117 |
118 | from.pipe(stream).pipe(sink)
119 | await t.completed
120 | })
121 |
122 | test('multiple pipe with backpressure', async function (_t) {
123 | const t = tspl(_t, { plan: 4 })
124 |
125 | const stream = syncthrough(function (chunk) {
126 | return Buffer.from(chunk.toString().toUpperCase())
127 | })
128 |
129 | const stream2 = syncthrough(function (chunk) {
130 | return Buffer.from(chunk.toString().toLowerCase())
131 | })
132 |
133 | const from = stringFrom([Buffer.from('foo'), Buffer.from('bar'), Buffer.from('baz')])
134 | const sink = delayedStringSink(t, [Buffer.from('foo'), Buffer.from('bar'), Buffer.from('baz')])
135 |
136 | sink.on('finish', function () {
137 | t.ok('finish emitted')
138 | })
139 |
140 | from.pipe(stream).pipe(stream2).pipe(sink)
141 | await t.completed
142 | })
143 |
144 | test('objects', async function (_t) {
145 | const t = tspl(_t, { plan: 3 })
146 |
147 | const stream = syncthrough(function (chunk) {
148 | return { chunk }
149 | })
150 | const from = objectFrom([{ name: 'matteo' }, { answer: 42 }])
151 | const sink = objectSink(t, [{ chunk: { name: 'matteo' } }, { chunk: { answer: 42 } }])
152 |
153 | sink.on('finish', function () {
154 | t.ok('finish emitted')
155 | })
156 |
157 | from.pipe(stream).pipe(sink)
158 | await t.completed
159 | })
160 |
161 | test('pipe event', async function (_t) {
162 | const t = tspl(_t, { plan: 4 })
163 |
164 | const stream = syncthrough(function (chunk) {
165 | return Buffer.from(chunk.toString().toUpperCase())
166 | })
167 | const from = stringFrom([Buffer.from('foo'), Buffer.from('bar')])
168 | const sink = stringSink(t, [Buffer.from('FOO'), Buffer.from('BAR')])
169 |
170 | stream.on('pipe', function (s) {
171 | t.equal(s, from, 'pipe emitted on stream')
172 | })
173 |
174 | sink.on('pipe', function (s) {
175 | t.equal(s, stream, 'pipe emitted on sink')
176 | })
177 |
178 | from.pipe(stream).pipe(sink)
179 | await t.completed
180 | })
181 |
182 | test('unpipe event', async function (_t) {
183 | const t = tspl(_t, { plan: 2 })
184 |
185 | const stream = syncthrough(function (chunk) {
186 | return Buffer.from(chunk.toString().toUpperCase())
187 | })
188 | const from = new Readable({ read: function () { } })
189 | const sink = stringSink(t, [Buffer.from('FOO')])
190 |
191 | sink.on('unpipe', function (s) {
192 | t.equal(s, stream, 'stream is unpiped')
193 | })
194 |
195 | from.pipe(stream).pipe(sink)
196 | from.push(Buffer.from('foo'))
197 | process.nextTick(function () {
198 | // writing is deferred, we need to let a write go syncthrough
199 | stream.unpipe(sink)
200 | from.push(Buffer.from('bar'))
201 | })
202 | await t.completed
203 | })
204 |
205 | test('data event', async function (_t) {
206 | const t = tspl(_t, { plan: 3 })
207 |
208 | const stream = syncthrough(function (chunk) {
209 | return Buffer.from(chunk.toString().toUpperCase())
210 | })
211 | const from = stringFrom([Buffer.from('foo'), Buffer.from('bar')])
212 | const expected = [Buffer.from('FOO'), Buffer.from('BAR')]
213 |
214 | stream.on('data', function (chunk) {
215 | t.equal(chunk.toString(), expected.shift().toString(), 'chunk matches')
216 | })
217 |
218 | stream.on('end', function () {
219 | t.ok('end emitted')
220 | })
221 |
222 | from.pipe(stream)
223 | await t.completed
224 | })
225 |
226 | test('end event during pipe', async function (_t) {
227 | const t = tspl(_t, { plan: 3 })
228 |
229 | const stream = syncthrough(function (chunk) {
230 | return Buffer.from(chunk.toString().toUpperCase())
231 | })
232 | const from = stringFrom([Buffer.from('foo'), Buffer.from('bar')])
233 | const sink = stringSink(t, [Buffer.from('FOO'), Buffer.from('BAR')])
234 |
235 | stream.on('end', function () {
236 | t.ok('end emitted')
237 | })
238 |
239 | from.pipe(stream).pipe(sink)
240 | await t.completed
241 | })
242 |
243 | test('end()', async function (_t) {
244 | const t = tspl(_t, { plan: 2 })
245 |
246 | const stream = syncthrough(function (chunk) {
247 | return Buffer.from(chunk.toString().toUpperCase())
248 | })
249 | const expected = [Buffer.from('FOO')]
250 |
251 | stream.on('data', function (chunk) {
252 | t.equal(chunk.toString(), expected.shift().toString(), 'chunk matches')
253 | })
254 |
255 | stream.on('end', function () {
256 | t.ok('end emitted')
257 | })
258 |
259 | stream.end(Buffer.from('foo'))
260 | await t.completed
261 | })
262 |
263 | test('on(\'data\') after end()', async function (_t) {
264 | const t = tspl(_t, { plan: 2 })
265 |
266 | const stream = syncthrough(function (chunk) {
267 | return Buffer.from(chunk.toString().toUpperCase())
268 | })
269 | const expected = [Buffer.from('FOO')]
270 |
271 | stream.end(Buffer.from('foo'))
272 |
273 | stream.on('data', function (chunk) {
274 | t.equal(chunk.toString(), expected.shift().toString(), 'chunk matches')
275 | })
276 |
277 | stream.on('end', function () {
278 | t.ok('end emitted')
279 | })
280 | await t.completed
281 | })
282 |
283 | test('double end()', async function (_t) {
284 | const t = tspl(_t, { plan: 1 })
285 |
286 | const stream = syncthrough()
287 | stream.end('hello')
288 | stream.on('error', function (err) {
289 | t.equal(err.message, 'write after EOF')
290 | })
291 | stream.end('world')
292 |
293 | await t.completed
294 | })
295 |
296 | test('uppercase a file with on(\'data\')', async function (_t) {
297 | const t = tspl(_t, { plan: 1 })
298 |
299 | let str = ''
300 | let expected = ''
301 |
302 | const stream = syncthrough(function (chunk) {
303 | return chunk.toString().toUpperCase()
304 | })
305 |
306 | stream.on('data', function (chunk) {
307 | str = str + chunk
308 | })
309 |
310 | const from = fs.createReadStream(__filename)
311 | from.pipe(new Writable({
312 | write: function (chunk, enc, cb) {
313 | expected += chunk.toString().toUpperCase()
314 | cb()
315 | }
316 | })).on('finish', function () {
317 | t.equal(str, expected)
318 | })
319 | from.pipe(stream)
320 |
321 | await t.completed
322 | })
323 |
324 | test('uppercase a file with pipe()', async function (_t) {
325 | const t = tspl(_t, { plan: 1 })
326 |
327 | let str = ''
328 | let expected = ''
329 |
330 | const stream = syncthrough(function (chunk) {
331 | return chunk.toString().toUpperCase()
332 | })
333 |
334 | stream.pipe(new Writable({
335 | objecMode: true,
336 | write: function (chunk, enc, cb) {
337 | str += chunk
338 | cb()
339 | }
340 | }))
341 |
342 | const from = fs.createReadStream(__filename)
343 | from.pipe(new Writable({
344 | write: function (chunk, enc, cb) {
345 | expected += chunk.toString().toUpperCase()
346 | cb()
347 | }
348 | })).on('finish', function () {
349 | t.equal(str, expected)
350 | })
351 |
352 | from.pipe(stream)
353 | await t.completed
354 | })
355 |
356 | test('works with end-of-stream', async function (_t) {
357 | const t = tspl(_t, { plan: 1 })
358 | const stream = syncthrough()
359 | stream.on('data', function () {})
360 | stream.end()
361 |
362 | eos(stream, function (err) {
363 | t.equal(err, null, 'ends with no error')
364 | })
365 | await t.completed
366 | })
367 |
368 | test('destroy()', async function (_t) {
369 | const t = tspl(_t, { plan: 1 })
370 | const stream = syncthrough()
371 | stream.destroy()
372 |
373 | // this is deferred to the next tick
374 | stream.on('close', function () {
375 | t.ok('close emitted')
376 | })
377 | await t.completed
378 | })
379 |
380 | test('destroy(err)', async function (_t) {
381 | const t = tspl(_t, { plan: 1 })
382 | const stream = syncthrough()
383 | stream.destroy(new Error('kaboom'))
384 | stream.on('error', function (err) {
385 | t.ok(err, 'error emitted')
386 | })
387 | await t.completed
388 | })
389 |
390 | test('works with pump', async function (_t) {
391 | const t = tspl(_t, { plan: 3 })
392 |
393 | const stream = syncthrough(function (chunk) {
394 | return Buffer.from(chunk.toString().toUpperCase())
395 | })
396 |
397 | const stream2 = syncthrough(function (chunk) {
398 | return Buffer.from(chunk.toString().toLowerCase())
399 | })
400 |
401 | const from = stringFrom([Buffer.from('foo'), Buffer.from('bar')])
402 | const sink = stringSink(t, [Buffer.from('foo'), Buffer.from('bar')])
403 |
404 | pump(from, stream, stream2, sink, function (err) {
405 | t.equal(err, null, 'pump finished without error')
406 | })
407 | await t.completed
408 | })
409 |
410 | test('works with pump and handles errors', async function (_t) {
411 | const t = tspl(_t, { plan: 3 })
412 |
413 | const stream = syncthrough(function (chunk) {
414 | return Buffer.from(chunk.toString().toUpperCase())
415 | })
416 |
417 | stream.on('close', function () {
418 | t.ok('stream closed prematurely')
419 | })
420 |
421 | const stream2 = syncthrough(function (chunk) {
422 | return Buffer.from(chunk.toString().toLowerCase())
423 | })
424 |
425 | stream2.on('close', function () {
426 | t.ok('stream2 closed prematurely')
427 | })
428 |
429 | const from = stringFrom([Buffer.from('foo'), Buffer.from('bar')])
430 | const sink = new Writable({
431 | write: function (chunk, enc, cb) {
432 | cb(new Error('kaboom'))
433 | }
434 | })
435 |
436 | pump(from, stream, stream2, sink, function (err) {
437 | t.ok(err, 'pump finished with error')
438 | })
439 | await t.completed
440 | })
441 |
442 | test('avoid ending the pipe destination if { end: false }', async function (_t) {
443 | const t = tspl(_t, { plan: 2 })
444 |
445 | const stream = syncthrough(function (chunk) {
446 | return Buffer.from(chunk.toString().toUpperCase())
447 | })
448 | const from = stringFrom([Buffer.from('foo'), Buffer.from('bar')])
449 | const sink = stringSink(t, [Buffer.from('FOO'), Buffer.from('BAR')])
450 |
451 | sink.on('finish', function () {
452 | t.fail('finish emitted')
453 | })
454 |
455 | from.pipe(stream).pipe(sink, { end: false })
456 | await t.completed
457 | })
458 |
459 | test('this.push', async function (_t) {
460 | const t = tspl(_t, { plan: 5 })
461 |
462 | const stream = syncthrough(function (chunk) {
463 | this.push(Buffer.from(chunk.toString().toUpperCase()))
464 | this.push(Buffer.from(chunk.toString()))
465 | })
466 | const from = stringFrom([Buffer.from('foo'), Buffer.from('bar')])
467 | const sink = stringSink(t, [Buffer.from('FOO'), Buffer.from('foo'), Buffer.from('BAR'), Buffer.from('bar')])
468 |
469 | sink.on('finish', function () {
470 | t.ok('finish emitted')
471 | })
472 |
473 | from.pipe(stream).pipe(sink)
474 | await t.completed
475 | })
476 |
477 | test('this.push objects', async function (_t) {
478 | const t = tspl(_t, { plan: 7 })
479 |
480 | const stream = syncthrough(function (chunks) {
481 | return chunks
482 | })
483 | const from = objectFrom([{ num: 1 }, { num: 2 }, { num: 3 }, { num: 4 }, { num: 5 }, { num: 6 }])
484 | const mid = through(function (chunk) {
485 | this.queue(chunk)
486 | })
487 | const sink = objectSink(t, [{ num: 1 }, { num: 2 }, { num: 3 }, { num: 4 }, { num: 5 }, { num: 6 }])
488 | sink.on('finish', function () {
489 | t.ok('finish emitted')
490 | })
491 |
492 | from.pipe(stream).pipe(mid).pipe(sink)
493 | await t.completed
494 | })
495 |
496 | test('backpressure', async function (_t) {
497 | const t = tspl(_t, { plan: 7 })
498 | let wait = false
499 |
500 | const stream = syncthrough(function (chunk) {
501 | t.strictEqual(wait, false, 'we should not be waiting')
502 | wait = true
503 | this.push(Buffer.from(chunk.toString().toUpperCase()))
504 | this.push(Buffer.from(chunk.toString()))
505 | setImmediate(function () {
506 | wait = false
507 | })
508 | })
509 |
510 | const from = stringFrom([Buffer.from('foo'), Buffer.from('bar')])
511 | const sink = delayedStringSink(t, [Buffer.from('FOO'), Buffer.from('foo'), Buffer.from('BAR'), Buffer.from('bar')])
512 |
513 | sink.on('finish', function () {
514 | t.ok('finish emitted')
515 | })
516 |
517 | from.pipe(stream).pipe(sink)
518 | await t.completed
519 | })
520 |
521 | test('returning null ends the stream', async function (_t) {
522 | const t = tspl(_t, { plan: 1 })
523 |
524 | const stream = syncthrough(function (chunk) {
525 | return null
526 | })
527 |
528 | stream.on('data', function () {
529 | t.fail('data should not be emitted')
530 | })
531 |
532 | stream.on('end', function () {
533 | t.ok('end emitted')
534 | })
535 |
536 | stream.write(Buffer.from('foo'))
537 | await t.completed
538 | })
539 |
540 | test('returning null ends the stream deferred', async function (_t) {
541 | const t = tspl(_t, { plan: 1 })
542 |
543 | const stream = syncthrough(function (chunk) {
544 | return null
545 | })
546 |
547 | stream.on('data', function () {
548 | t.fail('data should not be emitted')
549 | })
550 |
551 | stream.on('end', function () {
552 | t.ok('end emitted')
553 | })
554 |
555 | setImmediate(function () {
556 | stream.write(Buffer.from('foo'))
557 | })
558 | await t.completed
559 | })
560 |
561 | test('returning null ends the stream when piped', async function (_t) {
562 | const t = tspl(_t, { plan: 1 })
563 |
564 | const stream = syncthrough(function (chunk) {
565 | return null
566 | })
567 | const from = stringFrom([Buffer.from('foo'), Buffer.from('bar')])
568 | const sink = stringSink(t, [])
569 |
570 | sink.on('finish', function () {
571 | t.ok('finish emitted')
572 | })
573 |
574 | from.pipe(stream).pipe(sink)
575 | await t.completed
576 | })
577 |
578 | test('support flush', async function (_t) {
579 | const t = tspl(_t, { plan: 4 })
580 |
581 | const stream = syncthrough(function (chunk) {
582 | return Buffer.from(chunk.toString().toUpperCase())
583 | }, function () {
584 | return Buffer.from('done!')
585 | })
586 | const from = stringFrom([Buffer.from('foo'), Buffer.from('bar')])
587 | const sink = stringSink(t, [Buffer.from('FOO'), Buffer.from('BAR'), Buffer.from('done!')])
588 |
589 | sink.on('finish', function () {
590 | t.ok('finish emitted')
591 | })
592 |
593 | from.pipe(stream).pipe(sink)
594 | await t.completed
595 | })
596 |
597 | test('adding on(\'data\') after pipe throws', async function (_t) {
598 | const t = tspl(_t, { plan: 1 })
599 |
600 | const stream = syncthrough(function (chunk) {
601 | return Buffer.from(chunk.toString().toUpperCase())
602 | })
603 |
604 | const sink = new Writable()
605 |
606 | stream.pipe(sink)
607 |
608 | t.throws(function () {
609 | stream.on('data', function () {})
610 | })
611 | await t.completed
612 | })
613 |
614 | test('multiple data event', async function (_t) {
615 | const t = tspl(_t, { plan: 4 })
616 |
617 | const stream = syncthrough(function (chunk) {
618 | return Buffer.from(chunk.toString().toUpperCase())
619 | })
620 | const from = stringFrom([Buffer.from('foo'), Buffer.from('bar')])
621 | const expected1 = [Buffer.from('FOO'), Buffer.from('BAR')]
622 | const expected2 = [Buffer.from('FOO'), Buffer.from('BAR')]
623 |
624 | stream.on('data', function (chunk) {
625 | t.equal(chunk.toString(), expected1.shift().toString(), 'chunk from 1 matches')
626 | })
627 |
628 | stream.on('data', function (chunk) {
629 | t.equal(chunk.toString(), expected2.shift().toString(), 'chunk from 2 matches')
630 | })
631 |
632 | from.pipe(stream)
633 | await t.completed
634 | })
635 |
636 | test('piping twice errors', async function (_t) {
637 | const t = tspl(_t, { plan: 1 })
638 |
639 | const stream = syncthrough()
640 | stream.pipe(new Writable())
641 |
642 | t.throws(function () {
643 | stream.pipe(new Writable())
644 | })
645 | await t.completed
646 | })
647 |
648 | test('removing on(\'data\') handlers', async function (_t) {
649 | const t = tspl(_t, { plan: 2 })
650 |
651 | const stream = syncthrough(function (chunk) {
652 | return Buffer.from(chunk.toString().toUpperCase())
653 | })
654 | const expected = [Buffer.from('FOO'), Buffer.from('BAR')]
655 |
656 | stream.on('data', first)
657 | stream.on('data', second)
658 |
659 | stream.removeListener('data', second)
660 |
661 | stream.write('foo')
662 |
663 | stream.once('drain', function () {
664 | stream.removeListener('data', first)
665 | stream.on('data', first)
666 | stream.write('bar')
667 | })
668 |
669 | function first (chunk) {
670 | t.equal(chunk.toString(), expected.shift().toString(), 'chunk matches')
671 | }
672 |
673 | function second () {
674 | t.fail('should never be called')
675 | }
676 | await t.completed
677 | })
678 |
679 | test('double unpipe does nothing', function (_t) {
680 | const stream = syncthrough()
681 | const dest = new Writable()
682 |
683 | stream.pipe(dest)
684 | stream.unpipe(dest)
685 | stream.unpipe(dest)
686 |
687 | stream.write('hello')
688 | })
689 |
690 | test('must respect backpressure', async function (_t) {
691 | const t = tspl(_t, { plan: 3 })
692 |
693 | const stream = syncthrough()
694 |
695 | t.strictEqual(stream.write('hello'), false)
696 |
697 | stream.once('error', function () {
698 | t.ok('stream errors')
699 | })
700 |
701 | t.strictEqual(stream.write('world'), false)
702 | await t.completed
703 | })
704 |
705 | test('pipe with through', async function (_t) {
706 | const t = tspl(_t, { plan: 3 })
707 |
708 | const stream = syncthrough(function (chunk) {
709 | return Buffer.from(chunk.toString().toUpperCase())
710 | })
711 | const from = stringFrom([Buffer.from('foo'), Buffer.from('bar')])
712 | const th = through(function (data) {
713 | this.queue(data)
714 | })
715 | const sink = stringSink(t, [Buffer.from('FOO'), Buffer.from('BAR')])
716 |
717 | sink.on('finish', function () {
718 | t.ok('finish emitted')
719 | })
720 |
721 | from.pipe(th).pipe(stream).pipe(sink)
722 | th.resume()
723 | await t.completed
724 | })
725 |
726 | test('works with pipeline', async function (_t) {
727 | const t = tspl(_t, { plan: 3 })
728 |
729 | const stream = syncthrough(function (chunk) {
730 | return Buffer.from(chunk.toString().toUpperCase())
731 | })
732 |
733 | const stream2 = syncthrough(function (chunk) {
734 | return Buffer.from(chunk.toString().toLowerCase())
735 | })
736 |
737 | const from = stringFrom([Buffer.from('foo'), Buffer.from('bar')])
738 | const sink = stringSink(t, [Buffer.from('foo'), Buffer.from('bar')])
739 |
740 | pipeline(from, stream, stream2, sink, function (err) {
741 | t.equal(err, null, 'pipeline finished without error')
742 | })
743 | await t.completed
744 | })
745 |
746 | test('works with pipeline and handles errors', async function (_t) {
747 | const t = tspl(_t, { plan: 3 })
748 |
749 | const stream = syncthrough(function (chunk) {
750 | return Buffer.from(chunk.toString().toUpperCase())
751 | })
752 |
753 | stream.on('close', function () {
754 | t.ok('stream closed prematurely')
755 | })
756 |
757 | const stream2 = syncthrough(function (chunk) {
758 | return Buffer.from(chunk.toString().toLowerCase())
759 | })
760 |
761 | stream2.on('close', function () {
762 | t.ok('stream2 closed prematurely')
763 | })
764 |
765 | const from = stringFrom([Buffer.from('foo'), Buffer.from('bar')])
766 | const sink = new Writable({
767 | write: function (chunk, enc, cb) {
768 | cb(new Error('kaboom'))
769 | }
770 | })
771 |
772 | pipeline(from, stream, stream2, sink, function (err) {
773 | t.ok(err, 'pipeline finished with error')
774 | })
775 | await t.completed
776 | })
777 |
778 | test('works with pipeline and calls flush', async function (_t) {
779 | const t = tspl(_t, { plan: 3 })
780 | const expected = 'hello world!'
781 | let actual = ''
782 | pipeline(
783 | Readable.from('hello world'),
784 | syncthrough(
785 | undefined,
786 | function flush () {
787 | t.ok('flush called')
788 | this.push('!')
789 | }
790 | ),
791 | new Writable({
792 | write (chunk, enc, cb) {
793 | actual += chunk.toString()
794 | cb()
795 | }
796 | }),
797 | (err) => {
798 | t.equal(err, null, 'pipeline finished without error')
799 | t.equal(actual, expected, 'actual matches expected')
800 | }
801 | )
802 |
803 | await t.completed
804 | })
805 |
806 | test('works with pipeline and calls flush / 2', async function (_t) {
807 | const t = tspl(_t, { plan: 3 })
808 | const expected = 'hello world!'
809 | let actual = ''
810 | pipeline(
811 | Readable.from('hello world'),
812 | syncthrough(
813 | undefined,
814 | function flush () {
815 | t.ok('flush called')
816 | return '!'
817 | }
818 | ),
819 | new Writable({
820 | write (chunk, enc, cb) {
821 | actual += chunk.toString()
822 | cb()
823 | }
824 | }),
825 | (err) => {
826 | t.equal(err, null, 'pipeline finished without error')
827 | t.equal(actual, expected, 'actual matches expected')
828 | }
829 | )
830 |
831 | await t.completed
832 | })
833 |
--------------------------------------------------------------------------------