├── .gitignore ├── .npmignore ├── .travis.yml ├── LICENSE ├── README.md ├── examples ├── package.json ├── stdout.js ├── streaming-compressed-ndjson.js └── streaming-ndjson.js ├── index.js ├── package.json └── test.js /.gitignore: -------------------------------------------------------------------------------- 1 | .nyc_output 2 | node_modules 3 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | # Files to ignore 2 | .travis.yml 3 | test.js 4 | 5 | # Folders to ignore 6 | examples 7 | 8 | # Files and folders generated by tests and other commands 9 | .nyc_output 10 | 11 | # In case you add a node_modules folder to a subdirectory. These will not show 12 | # up on `git status` because of the .gitignore file, but they will be inluded 13 | # in the npm package! 14 | **/node_modules 15 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | node_js: 3 | - '11' 4 | - '10' 5 | - '8' 6 | - '6' 7 | after_success: npm run coverage 8 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2018 Thomas Watson Steen 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # stream-chopper 2 | 3 | Chop a single stream of data into a series of readable streams. 4 | 5 | [![npm](https://img.shields.io/npm/v/stream-chopper.svg)](https://www.npmjs.com/package/stream-chopper) 6 | [![build status](https://travis-ci.org/watson/stream-chopper.svg?branch=master)](https://travis-ci.org/watson/stream-chopper) 7 | [![codecov](https://img.shields.io/codecov/c/github/watson/stream-chopper.svg)](https://codecov.io/gh/watson/stream-chopper) 8 | [![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://github.com/feross/standard) 9 | 10 | Stream Chopper is useful in situations where you have a stream of data 11 | you want to chop up into smaller pieces, either based on time or size. 12 | Each piece will be emitted as a readable stream (called output streams). 13 | 14 | Possible use-cases include log rotation, splitting up large data sets, 15 | or chopping up a live stream of data into finite chunks that can then be 16 | stored. 17 | 18 | ## Control how data is split 19 | 20 | Sometimes it's important to ensure that a chunk written to the input 21 | stream isn't split up and devided over two output streams. Stream 22 | Chopper allows you to specify the chopping algorithm used (via the 23 | `type` option) when a chunk is too large to fit into the current output 24 | stream. 25 | 26 | By default a chunk too large to fit in the current output stream is 27 | split between it and the next. Alternatively you can decide to either 28 | allow the chunk to "overflow" the size limit, in which case it will be 29 | written to the current output stream, or to "underflow" the size limit, 30 | in which case the current output stream will be ended and the chunk 31 | written to the next output stream. 32 | 33 | ## Installation 34 | 35 | ``` 36 | npm install stream-chopper --save 37 | ``` 38 | 39 | ## Usage 40 | 41 | Example app: 42 | 43 | ```js 44 | const StreamChopper = require('stream-chopper') 45 | 46 | const chopper = new StreamChopper({ 47 | size: 30, // chop stream when it reaches 30 bytes, 48 | time: 10000, // or when it's been open for 10s, 49 | type: StreamChopper.overflow // but allow stream to exceed size slightly 50 | }) 51 | 52 | chopper.on('stream', function (stream, next) { 53 | console.log('>> Got a new stream! <<') 54 | stream.pipe(process.stdout) 55 | stream.on('end', next) // call next when you're ready to receive a new stream 56 | }) 57 | 58 | chopper.write('This write contains more than 30 bytes\n') 59 | chopper.write('This write contains less\n') 60 | chopper.write('This is the last write\n') 61 | ``` 62 | 63 | Output: 64 | 65 | ``` 66 | >> Got a new stream! << 67 | This write contains more than 30 bytes 68 | >> Got a new stream! << 69 | This write contains less 70 | This is the last write 71 | ``` 72 | 73 | ## API 74 | 75 | ### `chopper = new StreamChopper([options])` 76 | 77 | Instantiate a `StreamChopper` instance. `StreamChopper` is a [writable] 78 | stream. 79 | 80 | Takes an optional `options` object which, besides the normal options 81 | accepted by the [`Writable`][writable] class, accepts the following 82 | config options: 83 | 84 | - `size` - The maximum number of bytes that can be written to the 85 | `chopper` stream before a new output stream is emitted (default: 86 | `Infinity`) 87 | - `time` - The maximum number of milliseconds that an output stream can 88 | be in use before a new output stream is emitted (default: `-1` which 89 | means no limit) 90 | - `type` - Change the algoritm used to determine how a written chunk 91 | that cannot fit into the current output stream should be handled. The 92 | following values are possible: 93 | - `StreamChopper.split` - Fit as much data from the chunk as possible 94 | into the current stream and write the remainder to the next stream 95 | (default) 96 | - `StreamChopper.overflow` - Allow the entire chunk to be written to 97 | the current stream. After writing, the stream is ended 98 | - `StreamChopper.underflow` - End the current output stream and write 99 | the entire chunk to the next stream 100 | - `transform` - An optional function that returns a transform stream 101 | used for transforming the data in some way (e.g. a zlib Gzip stream). 102 | If used, the `size` option will count towards the size of the output 103 | chunks. This config option cannot be used together with the 104 | `StreamChopper.split` type 105 | 106 | If `type` is `StreamChopper.underflow` and the size of the chunk to be 107 | written is larger than `size` an error is emitted. 108 | 109 | ### Event: `stream` 110 | 111 | Emitted every time a new output stream is ready. You must listen for 112 | this event. 113 | 114 | The listener function is called with two arguments: 115 | 116 | - `stream` - A [readable] output stream 117 | - `next` - A function you must call when you're ready to receive a new 118 | output stream. If called with an error, the `chopper` stream is 119 | destroyed 120 | 121 | ### `chopper.size` 122 | 123 | The maximum number of bytes that can be written to the `chopper` stream 124 | before a new output stream is emitted. 125 | 126 | Use this property to override it with a new value. The new value will 127 | take effect immediately on the current stream. 128 | 129 | ### `chopper.time` 130 | 131 | The maximum number of milliseconds that an output stream can be in use 132 | before a new output stream is emitted. 133 | 134 | Use this property to override it with a new value. The new value will 135 | take effect when the next stream is initialized. To change the current 136 | timer, see [`chopper.resetTimer()`](#chopperresettimertime). 137 | 138 | Set to `-1` for no time limit. 139 | 140 | ### `chopper.type` 141 | 142 | The algoritm used to determine how a written chunk that cannot fit into 143 | the current output stream should be handled. The following values are 144 | possible: 145 | 146 | - `StreamChopper.split` - Fit as much data from the chunk as possible 147 | into the current stream and write the remainder to the next stream 148 | - `StreamChopper.overflow` - Allow the entire chunk to be written to 149 | the current stream. After writing, the stream is ended 150 | - `StreamChopper.underflow` - End the current output stream and write 151 | the entire chunk to the next stream 152 | 153 | Use this property to override it with a new value. The new value will 154 | take effect immediately on the current stream. 155 | 156 | ### `chopper.chop([callback])` 157 | 158 | Manually chop the stream. Forces the current output stream to end even 159 | if its `size` limit or `time` timeout hasn't been reached yet. 160 | 161 | Arguments: 162 | 163 | - `callback` - An optional callback which will be called once the output 164 | stream have ended 165 | 166 | ### `chopper.resetTimer([time])` 167 | 168 | Use this function to reset the current timer (configured via the `time` 169 | config option). Calling this function will force the current timer to 170 | start over. 171 | 172 | If the optional `time` argument is provided, this value is used as the 173 | new time. This is equivilent to calling: 174 | 175 | ```js 176 | chopper.time = time 177 | chopper.resetTimer() 178 | ``` 179 | 180 | If the function is called with `time` set to `-1`, the current timer is 181 | cancelled and the time limit is disabled for all future streams. 182 | 183 | ## License 184 | 185 | [MIT](https://github.com/watson/stream-chopper/blob/master/LICENSE) 186 | 187 | [writable]: https://nodejs.org/api/stream.html#stream_class_stream_writable 188 | [readable]: https://nodejs.org/api/stream.html#stream_class_stream_readable 189 | -------------------------------------------------------------------------------- /examples/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "examples", 3 | "version": "1.0.0", 4 | "description": "", 5 | "dependencies": { 6 | "ndjson": "^1.5.0", 7 | "pump": "^3.0.0" 8 | }, 9 | "devDependencies": {}, 10 | "scripts": { 11 | "test": "echo \"Error: no test specified\" && exit 1" 12 | }, 13 | "keywords": [], 14 | "author": "Thomas Watson (https://twitter.com/wa7son)", 15 | "license": "MIT" 16 | } 17 | -------------------------------------------------------------------------------- /examples/stdout.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const StreamChopper = require('stream-chopper') 4 | 5 | const chopper = new StreamChopper({ 6 | size: 30, // chop stream when it reaches 30 bytes, 7 | time: 10000, // or when it's been open for 10s, 8 | type: StreamChopper.overflow // but allow stream to exceed size slightly 9 | }) 10 | 11 | chopper.on('stream', function (stream, next) { 12 | console.log('>> Got a new stream! <<') 13 | stream.pipe(process.stdout) 14 | stream.on('end', next) // call next when you're ready to receive a new stream 15 | }) 16 | 17 | chopper.write('This write contains more than 30 bytes\n') 18 | chopper.write('This write contains less\n') 19 | chopper.write('This is the last write\n') 20 | -------------------------------------------------------------------------------- /examples/streaming-compressed-ndjson.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | /** 4 | * This example app will send ndjson objects to an HTTP server. 5 | * 6 | * The HTTP server expects that the request body is compressed using gzip. Each 7 | * ndjson object just contains the current time. 8 | */ 9 | 10 | const http = require('http') 11 | const zlib = require('zlib') 12 | const crypto = require('crypto') 13 | const pump = require('pump') 14 | const ndjson = require('ndjson') 15 | const StreamChopper = require('stream-chopper') 16 | 17 | let requestCount = 0 18 | let streamCount = 0 19 | 20 | // Start a dummy server that will receive streaming ndjson HTTP requests 21 | const server = http.createServer(function (req, res) { 22 | const count = ++requestCount 23 | console.log('[server req#%d] new request', count) 24 | 25 | // decompress the request body and parse it as ndjson 26 | pump(req, zlib.createGunzip(), ndjson.parse(), function (err) { 27 | if (err) { 28 | console.error(err.stack) 29 | res.statusCode = 500 30 | } 31 | console.log('[server req#%d] request body ended - responding with status code %d', count, res.statusCode) 32 | res.end() 33 | }).on('data', function (obj) { 34 | console.log('[server req#%d] got an ndjson object: %j', count, obj) 35 | }) 36 | }) 37 | 38 | server.listen(function () { 39 | const port = server.address().port 40 | 41 | const chopper = new StreamChopper({ 42 | size: 512, // close request when 512 bytes data have been written, 43 | time: 10000, // or when it't been open for 10 seconds, 44 | type: StreamChopper.overflow, // but don't chop ndjson lines up 45 | transform: function () { // compress the transmitted ndjson 46 | return zlib.createGzip({ 47 | chunkSize: 512, // use small zlib buffer for demo purposes 48 | level: zlib.constants.Z_NO_COMPRESSION // make zlib buffer fill up fast for demo purposes 49 | }) 50 | } 51 | }) 52 | 53 | chopper.on('stream', function (stream, next) { 54 | // prepare a new output stream 55 | const count = ++streamCount 56 | console.log('[chopper stream#%d] new stream', count) 57 | 58 | const opts = { method: 'POST', port } 59 | 60 | // open a request to the HTTP server 61 | const req = http.request(opts, function (res) { 62 | console.log('[chopper stream#%d] got server response: %d', count, res.statusCode) 63 | res.resume() 64 | }) 65 | 66 | // compress all data written to the stream with gzip before sending it to 67 | // the HTTP server 68 | pump(stream, req, function (err) { 69 | if (err) throw err 70 | console.log('[chopper stream#%d] stream ended', count) 71 | next() 72 | }) 73 | }) 74 | 75 | const serialize = ndjson.serialize() 76 | 77 | // pipe the serialize stream into the chopper stream 78 | pump(serialize, chopper, function (err) { 79 | if (err) throw err 80 | console.error('unexpected end of chopper') 81 | }) 82 | 83 | // start writing ndjson to the serialize stream 84 | write() 85 | 86 | function write () { 87 | // prepare dummy json object 88 | const obj = { time: new Date(), data: crypto.randomBytes(64).toString('hex') } 89 | 90 | // write it to the serialize stream 91 | if (serialize.write(obj) === false) { 92 | // backpressure detected, pause writing until the stream is ready 93 | serialize.once('drain', next) 94 | return 95 | } 96 | 97 | // queue next write 98 | next() 99 | } 100 | 101 | function next () { 102 | // queue next write to happen somewhere between 0 and 50 ms 103 | setTimeout(write, Math.floor(Math.random() * 50)) 104 | } 105 | }) 106 | -------------------------------------------------------------------------------- /examples/streaming-ndjson.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | /** 4 | * This example app will send ndjson objects to an HTTP server. 5 | * 6 | * Each ndjson object just contains the current time. 7 | */ 8 | 9 | const http = require('http') 10 | const pump = require('pump') 11 | const ndjson = require('ndjson') 12 | const StreamChopper = require('stream-chopper') 13 | 14 | let requestCount = 0 15 | let streamCount = 0 16 | 17 | // Start a dummy server that will receive streaming ndjson HTTP requests 18 | const server = http.createServer(function (req, res) { 19 | const count = ++requestCount 20 | console.log('[server req#%d] new request', count) 21 | 22 | // parse the request body as ndjson 23 | pump(req, ndjson.parse(), function (err) { 24 | if (err) { 25 | console.error(err.stack) 26 | res.statusCode = 500 27 | } 28 | console.log('[server req#%d] request body ended - responding with status code %d', count, res.statusCode) 29 | res.end() 30 | }).on('data', function (obj) { 31 | console.log('[server req#%d] got an ndjson object: %j', count, obj) 32 | }) 33 | }) 34 | 35 | server.listen(function () { 36 | const port = server.address().port 37 | 38 | const chopper = new StreamChopper({ 39 | size: 512, // close request when 512 bytes data have been written, 40 | time: 10000, // or when it't been open for 10 seconds, 41 | type: StreamChopper.overflow // but don't chop ndjson lines up 42 | }) 43 | 44 | chopper.on('stream', function (stream, next) { 45 | // prepare a new output stream 46 | const count = ++streamCount 47 | console.log('[chopper stream#%d] new stream', count) 48 | 49 | const opts = { method: 'POST', port } 50 | 51 | // open a request to the HTTP server 52 | const req = http.request(opts, function (res) { 53 | console.log('[chopper stream#%d] got server response: %d', count, res.statusCode) 54 | res.resume() 55 | }) 56 | 57 | // send all data to the HTTP server 58 | pump(stream, req, function (err) { 59 | if (err) throw err 60 | console.log('[chopper stream#%d] stream ended', count) 61 | next() 62 | }) 63 | }) 64 | 65 | const serialize = ndjson.serialize() 66 | 67 | // pipe the serialize stream into the chopper stream 68 | pump(serialize, chopper, function (err) { 69 | if (err) throw err 70 | console.error('unexpected end of chopper') 71 | }) 72 | 73 | // start writing ndjson to the serialize stream 74 | write() 75 | 76 | function write () { 77 | // prepare dummy json object 78 | const obj = { time: new Date() } 79 | 80 | // write it to the serialize stream 81 | if (serialize.write(obj) === false) { 82 | // backpressure detected, pause writing until the stream is ready 83 | serialize.once('drain', next) 84 | return 85 | } 86 | 87 | // queue next write 88 | next() 89 | } 90 | 91 | function next () { 92 | // queue next write to happen somewhere between 250 and 500 ms 93 | setTimeout(write, Math.floor(Math.random() * 250) + 250) 94 | } 95 | }) 96 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const util = require('util') 4 | const zlib = require('zlib') 5 | const { Writable, PassThrough } = require('readable-stream') 6 | 7 | module.exports = StreamChopper 8 | 9 | util.inherits(StreamChopper, Writable) 10 | 11 | StreamChopper.split = Symbol('split') 12 | StreamChopper.overflow = Symbol('overflow') 13 | StreamChopper.underflow = Symbol('underflow') 14 | 15 | const types = [ 16 | StreamChopper.split, 17 | StreamChopper.overflow, 18 | StreamChopper.underflow 19 | ] 20 | 21 | function StreamChopper (opts) { 22 | if (!(this instanceof StreamChopper)) return new StreamChopper(opts) 23 | if (!opts) opts = {} 24 | 25 | Writable.call(this, opts) 26 | 27 | this.size = opts.size || Infinity 28 | this.time = opts.time || -1 29 | this.type = types.indexOf(opts.type) === -1 30 | ? StreamChopper.split 31 | : opts.type 32 | this._transform = opts.transform 33 | 34 | if (this._transform && this.type === StreamChopper.split) { 35 | throw new Error('stream-chopper cannot split a transform stream') 36 | } 37 | 38 | this._bytes = 0 39 | this._stream = null 40 | 41 | this._locked = false 42 | this._draining = false 43 | 44 | this._onunlock = null 45 | this._next = noop 46 | this._oneos = oneos 47 | this._ondrain = ondrain 48 | 49 | const self = this 50 | 51 | function oneos () { 52 | self._removeStream() 53 | } 54 | 55 | function ondrain () { 56 | self._draining = false 57 | const next = self._next 58 | self._next = noop 59 | next() 60 | } 61 | } 62 | 63 | StreamChopper.prototype.chop = function (cb) { 64 | if (this.destroyed) { 65 | if (cb) process.nextTick(cb) 66 | } else if (this._onunlock === null) { 67 | this._endStream(cb) 68 | } else { 69 | const write = this._onunlock 70 | this._onunlock = () => { 71 | write() 72 | this._endStream(cb) 73 | } 74 | } 75 | } 76 | 77 | StreamChopper.prototype._startStream = function (cb) { 78 | if (this.destroyed) return 79 | if (this._locked) { 80 | this._onunlock = cb 81 | return 82 | } 83 | 84 | this._bytes = 0 85 | 86 | if (this._transform) { 87 | this._stream = this._transform().once('resume', () => { 88 | // in case `_removeStream` have just been called 89 | if (this._stream === null) return 90 | 91 | // `resume` will be emitted before the first `data` event 92 | this._stream.on('data', chunk => { 93 | this._bytes += chunk.length 94 | this._maybeEndTransformSteam() 95 | }) 96 | }) 97 | } else { 98 | this._stream = new PassThrough() 99 | } 100 | 101 | this._stream 102 | .on('close', this._oneos) 103 | .on('error', this._oneos) 104 | .on('finish', this._oneos) 105 | .on('end', this._oneos) 106 | .on('drain', this._ondrain) 107 | 108 | this._locked = true 109 | this.emit('stream', this._stream, err => { 110 | this._locked = false 111 | if (err) return this.destroy(err) 112 | 113 | const cb = this._onunlock 114 | if (cb) { 115 | this._onunlock = null 116 | cb() 117 | } 118 | }) 119 | 120 | this.resetTimer() 121 | 122 | // To ensure that the write that caused this stream to be started 123 | // is perfromed in the same tick, call the callback synchronously. 124 | // Note that we can't do this in case the chopper is locked. 125 | cb() 126 | } 127 | 128 | StreamChopper.prototype._maybeEndTransformSteam = function () { 129 | if (this._stream === null) return 130 | 131 | // in case of backpresure on the transform stream, count how many bytes are 132 | // buffered 133 | const bufferedSize = getBufferedSize(this._stream) 134 | 135 | const overflow = (this._bytes + bufferedSize) - this.size 136 | 137 | if (overflow >= 0) this._endStream() 138 | } 139 | 140 | StreamChopper.prototype.resetTimer = function (time) { 141 | if (arguments.length > 0) this.time = time 142 | if (this._timer) { 143 | clearTimeout(this._timer) 144 | this._timer = null 145 | } 146 | if (this.time !== -1 && !this.destroyed && this._stream) { 147 | this._timer = setTimeout(() => { 148 | this._timer = null 149 | this._endStream() 150 | }, this.time) 151 | this._timer.unref() 152 | } 153 | } 154 | 155 | StreamChopper.prototype._endStream = function (cb) { 156 | if (this.destroyed) return 157 | if (this._stream === null) { 158 | if (cb) process.nextTick(cb) 159 | return 160 | } 161 | 162 | const stream = this._stream 163 | 164 | // ensure all timers and event listeners related to the current stream is removed 165 | this._removeStream() 166 | 167 | // if stream hasn't yet ended, make sure to end it properly 168 | if (!stream._writableState.ending && !stream._writableState.finished) { 169 | stream.end(cb) 170 | } else if (cb) { 171 | process.nextTick(cb) 172 | } 173 | } 174 | 175 | StreamChopper.prototype._removeStream = function () { 176 | if (this._stream === null) return 177 | 178 | const stream = this._stream 179 | this._stream = null 180 | 181 | if (this._timer !== null) clearTimeout(this._timer) 182 | if (stream._writableState.needDrain) this._ondrain() 183 | stream.removeListener('error', this._oneos) 184 | stream.removeListener('close', this._oneos) 185 | stream.removeListener('finish', this._oneos) 186 | stream.removeListener('end', this._oneos) 187 | stream.removeListener('drain', this._ondrain) 188 | } 189 | 190 | StreamChopper.prototype._write = function (chunk, enc, cb) { 191 | if (this._stream === null) { 192 | this._startStream(() => { 193 | this._write(chunk, enc, cb) 194 | }) 195 | return 196 | } 197 | 198 | // This guard is to protect against writes that happen in the same tick after 199 | // a user destroys the stream. If it wasn't here, we'd accidentally write to 200 | // the stream and it would emit an error 201 | if (isDestroyed(this._stream)) { 202 | this._startStream(() => { 203 | this._write(chunk, enc, cb) 204 | }) 205 | return 206 | } 207 | 208 | if (this._transform) { 209 | // The size of a transform stream is counted post-transform and so the size 210 | // guard is located elsewhere. We can therefore just write to the stream 211 | // without any checks. 212 | this._unprotectedWrite(chunk, enc, cb) 213 | } else { 214 | this._protectedWrite(chunk, enc, cb) 215 | } 216 | } 217 | 218 | StreamChopper.prototype._protectedWrite = function (chunk, enc, cb) { 219 | this._bytes += chunk.length 220 | 221 | const overflow = this._bytes - this.size 222 | 223 | if (overflow > 0 && this.type !== StreamChopper.overflow) { 224 | if (this.type === StreamChopper.split) { 225 | const remaining = chunk.length - overflow 226 | this._stream.write(chunk.slice(0, remaining)) 227 | chunk = chunk.slice(remaining) 228 | } 229 | 230 | if (this.type === StreamChopper.underflow && this._bytes - chunk.length === 0) { 231 | cb(new Error(`Cannot write ${chunk.length} byte chunk - only ${this.size} available`)) 232 | return 233 | } 234 | 235 | this._endStream(() => { 236 | this._write(chunk, enc, cb) 237 | }) 238 | return 239 | } 240 | 241 | if (overflow < 0) { 242 | this._unprotectedWrite(chunk, enc, cb) 243 | } else { 244 | // if we reached the size limit, just end the stream already 245 | this._stream.end(chunk) 246 | this._endStream(cb) 247 | } 248 | } 249 | 250 | StreamChopper.prototype._unprotectedWrite = function (chunk, enc, cb) { 251 | if (this._stream.write(chunk) === false) this._draining = true 252 | if (this._draining === false) cb() 253 | else this._next = cb 254 | } 255 | 256 | StreamChopper.prototype._destroy = function (err, cb) { 257 | const stream = this._stream 258 | this._removeStream() 259 | 260 | if (stream !== null) { 261 | if (stream.destroyed === true) return cb(err) 262 | destroyStream(stream, function () { 263 | cb(err) 264 | }) 265 | } else { 266 | cb(err) 267 | } 268 | } 269 | 270 | StreamChopper.prototype._final = function (cb) { 271 | if (this._stream === null) return cb() 272 | this._stream.end(cb) 273 | } 274 | 275 | function noop () {} 276 | 277 | function getBufferedSize (stream) { 278 | const buffer = stream.writableBuffer || stream._writableState.getBuffer() 279 | return buffer.reduce((total, b) => { 280 | return total + b.chunk.length 281 | }, 0) 282 | } 283 | 284 | // TODO: Make this work with all Node.js 6 streams. A Node.js 6 stream doesn't 285 | // have a destroyed flag because it doesn't have a .destroy() function. If the 286 | // stream is a zlib stream it will however have a _handle, which will be null 287 | // if the stream has been closed. We can check for that, but that coveres only 288 | // zlib streams 289 | function isDestroyed (stream) { 290 | return stream.destroyed === true || stream._handle === null 291 | } 292 | 293 | function destroyStream (stream, cb) { 294 | const emitClose = stream._writableState.emitClose 295 | if (emitClose) stream.once('close', cb) 296 | 297 | if (stream instanceof zlib.Gzip || 298 | stream instanceof zlib.Gunzip || 299 | stream instanceof zlib.Deflate || 300 | stream instanceof zlib.DeflateRaw || 301 | stream instanceof zlib.Inflate || 302 | stream instanceof zlib.InflateRaw || 303 | stream instanceof zlib.Unzip) { 304 | // Zlib streams doesn't have a destroy function in Node.js 6. On top of 305 | // that simply calling destroy on a zlib stream in Node.js 8+ will result 306 | // in a memory leak as the handle isn't closed (an operation normally done 307 | // by calling close). So until that is fixed, we need to manually close the 308 | // handle after destroying the stream. 309 | // 310 | // PR: https://github.com/nodejs/node/pull/23734 311 | if (typeof stream.destroy === 'function') { 312 | // Manually close the stream instead of calling `close()` as that would 313 | // have emitted 'close' again when calling `destroy()` 314 | if (stream._handle && typeof stream._handle.close === 'function') { 315 | stream._handle.close() 316 | stream._handle = null 317 | } 318 | 319 | stream.destroy() 320 | } else if (typeof stream.close === 'function') { 321 | stream.close() 322 | } 323 | } else { 324 | // For other streams we assume calling destroy is enough 325 | if (typeof stream.destroy === 'function') stream.destroy() 326 | // Or if there's no destroy (which Node.js 6 will not have on regular 327 | // streams), emit `close` as that should trigger almost the same effect 328 | else if (typeof stream.emit === 'function') stream.emit('close') 329 | } 330 | 331 | // In case this stream doesn't emit 'close', just call the callback manually 332 | if (!emitClose) cb() 333 | } 334 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "stream-chopper", 3 | "version": "3.0.1", 4 | "description": "Chop a single stream of data into a series of readable streams", 5 | "main": "index.js", 6 | "dependencies": { 7 | "readable-stream": "^3.0.6" 8 | }, 9 | "devDependencies": { 10 | "codecov": "^3.0.4", 11 | "nyc": "^13.0.1", 12 | "standard": "^12.0.1", 13 | "tape": "^4.9.1" 14 | }, 15 | "scripts": { 16 | "coverage": "nyc report --reporter=text-lcov > coverage.lcov && codecov", 17 | "test": "standard && nyc node test.js" 18 | }, 19 | "repository": { 20 | "type": "git", 21 | "url": "git+https://github.com/watson/stream-chopper.git" 22 | }, 23 | "keywords": [ 24 | "chop", 25 | "choppa", 26 | "chopper", 27 | "split", 28 | "splitter", 29 | "cut", 30 | "cutter", 31 | "stream", 32 | "streams", 33 | "size", 34 | "length", 35 | "time", 36 | "timeout", 37 | "max", 38 | "max-size" 39 | ], 40 | "author": "Thomas Watson (https://twitter.com/wa7son)", 41 | "license": "MIT", 42 | "bugs": { 43 | "url": "https://github.com/watson/stream-chopper/issues" 44 | }, 45 | "homepage": "https://github.com/watson/stream-chopper#readme", 46 | "coordinates": [ 47 | 55.778265, 48 | 12.593146 49 | ] 50 | } 51 | -------------------------------------------------------------------------------- /test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const test = require('tape') 4 | const zlib = require('zlib') 5 | const crypto = require('crypto') 6 | const StreamChopper = require('./') 7 | 8 | const types = [ 9 | StreamChopper.split, 10 | StreamChopper.overflow, 11 | StreamChopper.underflow 12 | ] 13 | 14 | test('default values', function (t) { 15 | const chopper = new StreamChopper() 16 | t.equal(chopper.size, Infinity) 17 | t.equal(chopper.time, -1) 18 | t.equal(chopper.type, StreamChopper.split) 19 | t.equal(chopper._transform, undefined) 20 | t.equal(chopper._locked, false) 21 | t.equal(chopper._draining, false) 22 | t.end() 23 | }) 24 | 25 | test('throw on invalid config', function (t) { 26 | t.throws(function () { 27 | new StreamChopper({ // eslint-disable-line no-new 28 | type: StreamChopper.split, 29 | transform () {} 30 | }) 31 | }) 32 | t.throws(function () { 33 | new StreamChopper({ // eslint-disable-line no-new 34 | transform () {} 35 | }) 36 | }) 37 | t.end() 38 | }) 39 | 40 | types.forEach(function (type) { 41 | test(`write with no remainder and type:${type.toString()}`, function (t) { 42 | const sizeOfWrite = 'hello world 1'.length 43 | const chopper = new StreamChopper({ 44 | size: sizeOfWrite * 3, // allow for a length of exactly 3x of a single write 45 | type 46 | }) 47 | chopper.on('stream', assertOnStream(t, 3)) 48 | chopper.write('hello world 1') 49 | chopper.write('hello world 1') 50 | chopper.write('hello world 1') 51 | chopper.write('hello world 2') 52 | chopper.write('hello world 2') 53 | chopper.write('hello world 2') 54 | chopper.write('hello world 3') 55 | chopper.write('hello world 3') 56 | chopper.write('hello world 3') 57 | chopper.end() 58 | }) 59 | }) 60 | 61 | test('transform: very fast writes should not exceed size limit too much', function (t) { 62 | const writes = [ 63 | crypto.randomBytes(5 * 1024).toString('hex'), 64 | crypto.randomBytes(5 * 1024).toString('hex'), 65 | crypto.randomBytes(5 * 1024).toString('hex'), 66 | crypto.randomBytes(5 * 1024).toString('hex'), 67 | crypto.randomBytes(5 * 1024).toString('hex'), 68 | crypto.randomBytes(5 * 1024).toString('hex'), 69 | crypto.randomBytes(5 * 1024).toString('hex') 70 | ] 71 | let emits = 0 72 | 73 | const ZLIB_BUFFER_SIZE = 16 * 1024 74 | 75 | // 33k: two times the zlib buffer + a little extra 76 | const size = 2 * 16 * 1024 + 1024 77 | 78 | // The internals of zlib works in mysterious ways. The overshoot will in many 79 | // cases be more than the size of the zlib buffer, but so far we haven't seen 80 | // it be more than twice that. So we use 2x just to be safe. 81 | const maxOutputSize = size + 2 * ZLIB_BUFFER_SIZE 82 | 83 | const chopper = new StreamChopper({ 84 | size, 85 | type: StreamChopper.overflow, 86 | transform () { 87 | return zlib.createGzip({ 88 | level: zlib.constants ? zlib.constants.Z_NO_COMPRESSION : zlib.Z_NO_COMPRESSION 89 | }) 90 | } 91 | }) 92 | 93 | chopper.on('stream', function (stream, next) { 94 | const emit = ++emits 95 | const chunks = [] 96 | 97 | t.ok(stream instanceof zlib.Gzip, 'emitted stream should be of type Gzip') 98 | 99 | stream.on('data', chunks.push.bind(chunks)) 100 | stream.on('end', function () { 101 | const data = Buffer.concat(chunks) 102 | 103 | if (emit === 1) { 104 | t.ok(data.length >= size, `output should be within bounds (${data.length} >= ${size})`) 105 | } 106 | t.ok(data.length <= maxOutputSize, `output should be within bounds (${data.length} <= ${maxOutputSize})`) 107 | 108 | next() 109 | if (emit === 2) t.end() 110 | }) 111 | }) 112 | 113 | write() 114 | 115 | function write (index) { 116 | if (!index) index = 0 117 | if (index === writes.length) return chopper.end() 118 | chopper.write(writes[index]) 119 | setImmediate(write.bind(null, ++index)) 120 | } 121 | }) 122 | 123 | test('transform: shouldn\'t throw even if transform stream is set to null before first data event', function (t) { 124 | const chopper = new StreamChopper({ 125 | type: StreamChopper.overflow, 126 | transform () { 127 | return zlib.createGzip() 128 | } 129 | }) 130 | 131 | chopper.on('stream', function (stream, next) { 132 | stream.resume() 133 | }) 134 | 135 | chopper.write('hello') 136 | chopper.destroy() 137 | 138 | t.end() 139 | }) 140 | 141 | test('2nd write with remainder and type:split', function (t) { 142 | const streams = [ 143 | ['hello world', 'h'], 144 | ['ello world', 'he'], 145 | ['llo world', 'hel'], 146 | ['lo world'] 147 | ] 148 | 149 | const chopper = new StreamChopper({ 150 | size: 'hello world'.length + 1, 151 | type: StreamChopper.split 152 | }) 153 | 154 | chopper.on('stream', function (stream, next) { 155 | const chunks = streams.shift() 156 | const last = streams.length === 0 157 | t.ok(chunks) 158 | 159 | stream.on('data', function (chunk) { 160 | const expected = chunks.shift() 161 | t.ok(expected) 162 | t.equal(chunk.toString(), expected, `should receive '${expected}'`) 163 | }) 164 | 165 | stream.on('end', function () { 166 | next() 167 | if (last) t.end() 168 | }) 169 | }) 170 | 171 | chopper.write('hello world') 172 | chopper.write('hello world') 173 | chopper.write('hello world') 174 | chopper.write('hello world') 175 | chopper.end() 176 | }) 177 | 178 | test('2nd write with remainder and type:overflow', function (t) { 179 | const sizeOfWrite = 'hello world 1'.length 180 | const chopper = new StreamChopper({ 181 | size: Math.round(sizeOfWrite + sizeOfWrite / 2), // allow for a length of 1.5x of a single write 182 | type: StreamChopper.overflow 183 | }) 184 | chopper.on('stream', assertOnStream(t, 3)) 185 | chopper.write('hello world 1') // within limit 186 | chopper.write('hello world 1') // go 0.5 over the limit 187 | chopper.write('hello world 2') // within limit 188 | chopper.write('hello world 2') // go 0.5 over the limit 189 | chopper.write('hello world 3') // within limit 190 | chopper.write('hello world 3') // go 0.5 over the limit 191 | chopper.end() 192 | }) 193 | 194 | test('2nd write with remainder and type:underflow', function (t) { 195 | const sizeOfWrite = 'hello world 1'.length 196 | const chopper = new StreamChopper({ 197 | size: Math.round(sizeOfWrite + sizeOfWrite / 2), // allow for a length of 1.5x of a single write 198 | type: StreamChopper.underflow 199 | }) 200 | chopper.on('stream', assertOnStream(t, 4)) 201 | chopper.write('hello world 1') // within limit 202 | chopper.write('hello world 2') // go 0.5 over the limit 203 | chopper.write('hello world 3') // within limit 204 | chopper.write('hello world 4') // go 0.5 over the limit 205 | chopper.end() 206 | }) 207 | 208 | test('1st write with remainder and type:split', function (t) { 209 | const streams = [ 210 | ['hello'], 211 | [' worl'], 212 | ['d', 'hell'], 213 | ['o wor'], 214 | ['ld'] 215 | ] 216 | 217 | const chopper = new StreamChopper({ size: 5, type: StreamChopper.split }) 218 | 219 | chopper.on('stream', function (stream, next) { 220 | const chunks = streams.shift() 221 | const last = streams.length === 0 222 | t.ok(chunks) 223 | 224 | stream.on('data', function (chunk) { 225 | const expected = chunks.shift() 226 | t.ok(expected) 227 | t.equal(chunk.toString(), expected) 228 | }) 229 | 230 | stream.on('end', function () { 231 | next() 232 | if (last) t.end() 233 | }) 234 | }) 235 | 236 | chopper.write('hello world') 237 | chopper.write('hello world') 238 | chopper.end() 239 | }) 240 | 241 | test('1st write with remainder and type:overflow', function (t) { 242 | const chopper = new StreamChopper({ size: 5, type: StreamChopper.overflow }) 243 | chopper.on('stream', assertOnStream(t, 2)) 244 | chopper.write('hello world 1') 245 | chopper.write('hello world 2') 246 | chopper.end() 247 | }) 248 | 249 | test('1st write with remainder and type:underflow', function (t) { 250 | const chopper = new StreamChopper({ size: 4, type: StreamChopper.underflow }) 251 | 252 | chopper.on('stream', function (stream, next) { 253 | stream.resume() 254 | next() 255 | }) 256 | 257 | chopper.on('error', function (err) { 258 | t.equal(err.message, 'Cannot write 5 byte chunk - only 4 available') 259 | t.end() 260 | }) 261 | 262 | chopper.write('hello') 263 | chopper.end() 264 | }) 265 | 266 | test('if next() is not called, next stream should not be emitted', function (t) { 267 | let emitted = false 268 | const chopper = new StreamChopper({ 269 | size: 4, 270 | type: StreamChopper.overflow 271 | }) 272 | chopper.on('stream', function (stream, next) { 273 | t.equal(emitted, false) 274 | emitted = true 275 | stream.resume() 276 | }) 277 | chopper.write('hello') // indirect chop 278 | chopper.end('world') // indirect chop 279 | setTimeout(function () { 280 | t.end() 281 | }, 100) 282 | }) 283 | 284 | test('call next() with error', function (t) { 285 | t.plan(1) 286 | const err = new Error('foo') 287 | const chopper = new StreamChopper() 288 | chopper.on('stream', function (stream, next) { 289 | next(err) 290 | }) 291 | chopper.on('error', function (_err) { 292 | t.equal(_err, err) 293 | }) 294 | chopper.on('close', function () { 295 | t.end() 296 | }) 297 | chopper.write('hello') 298 | }) 299 | 300 | test('chopper.destroy() - synchronously during write', function (t) { 301 | const chopper = new StreamChopper() 302 | chopper.on('stream', function (stream, next) { 303 | // this event is emitted synchronously during the chopper.write call below 304 | chopper.destroy() 305 | }) 306 | chopper.on('error', function (err) { 307 | t.error(err) 308 | }) 309 | chopper.on('close', function () { 310 | t.end() 311 | }) 312 | chopper.write('hello') 313 | }) 314 | 315 | test('chopper.destroy() - active stream', function (t) { 316 | t.plan(2) 317 | 318 | const chopper = new StreamChopper() 319 | 320 | chopper.on('stream', function (stream, next) { 321 | stream.on('data', function (chunk) { 322 | t.equal(chunk.toString(), 'hello', 'stream should get data') 323 | }) 324 | stream.on('error', function (err) { 325 | t.error(err, 'error on the stream') 326 | }) 327 | stream.on('close', function () { 328 | t.pass('stream should close') 329 | next() 330 | }) 331 | }) 332 | 333 | chopper.on('close', function () { 334 | t.end() 335 | }) 336 | 337 | chopper.on('error', function (err) { 338 | t.error(err, 'error on the chopper') 339 | }) 340 | 341 | chopper.write('hello') 342 | chopper.destroy() 343 | }) 344 | 345 | test('chopper.destroy(err) - active stream', function (t) { 346 | t.plan(3) 347 | 348 | const err = new Error('foo') 349 | const chopper = new StreamChopper() 350 | 351 | chopper.on('stream', function (stream, next) { 352 | stream.on('data', function (chunk) { 353 | t.equal(chunk.toString(), 'hello', 'stream should get data') 354 | }) 355 | stream.on('error', function (err) { 356 | t.error(err) 357 | }) 358 | stream.on('close', function () { 359 | t.pass('stream should close') 360 | next() 361 | }) 362 | }) 363 | 364 | chopper.on('close', function () { 365 | t.end() 366 | }) 367 | 368 | chopper.on('error', function (_err) { 369 | t.equal(_err, err) 370 | }) 371 | 372 | chopper.write('hello') 373 | chopper.destroy(err) 374 | }) 375 | 376 | test('chopper.destroy() - no active stream', function (t) { 377 | t.plan(3) 378 | 379 | const chopper = new StreamChopper({ 380 | size: 4, 381 | type: StreamChopper.overflow 382 | }) 383 | 384 | chopper.on('stream', function (stream, next) { 385 | stream.on('data', function (chunk) { 386 | t.equal(chunk.toString(), 'hello', 'stream should get data') 387 | }) 388 | stream.on('error', function (err) { 389 | t.error(err, 'error on the stream') 390 | }) 391 | stream.on('end', function () { 392 | t.pass('stream should end') 393 | next() 394 | }) 395 | }) 396 | 397 | chopper.on('close', function () { 398 | t.pass('chopper should close') 399 | t.end() 400 | }) 401 | 402 | chopper.on('error', function (err) { 403 | t.error(err, 'error on the chopper') 404 | }) 405 | 406 | chopper.write('hello') // force chop to make sure there's no active stream 407 | chopper.destroy() 408 | }) 409 | 410 | test('chopper.destroy(err) - no active stream', function (t) { 411 | t.plan(4) 412 | 413 | const err = new Error('foo') 414 | const chopper = new StreamChopper({ 415 | size: 4, 416 | type: StreamChopper.overflow 417 | }) 418 | 419 | chopper.on('stream', function (stream, next) { 420 | stream.on('data', function (chunk) { 421 | t.equal(chunk.toString(), 'hello', 'stream should get data') 422 | }) 423 | stream.on('error', function (err) { 424 | t.error(err, 'error on the stream') 425 | }) 426 | stream.on('end', function () { 427 | t.pass('stream should end') 428 | next() 429 | }) 430 | }) 431 | 432 | chopper.on('close', function () { 433 | t.pass('chopper should close') 434 | t.end() 435 | }) 436 | 437 | chopper.on('error', function (_err) { 438 | t.equal(_err, err) 439 | }) 440 | 441 | chopper.write('hello') // force chop to make sure there's no active stream 442 | chopper.destroy(err) 443 | }) 444 | 445 | test('chopper.chop(callback)', function (t) { 446 | t.plan(8) 447 | 448 | let emits = 0 449 | const chunks = ['hello', 'world'] 450 | const chopper = new StreamChopper() 451 | 452 | chopper.on('stream', function (stream, next) { 453 | const emit = ++emits 454 | stream.on('data', function (chunk) { 455 | t.equal(emit, emits, 'should finish streaming current stream before emitting the next') 456 | t.equal(chunk.toString(), chunks.shift()) 457 | }) 458 | stream.on('end', function () { 459 | t.equal(emit, emits, 'should end current stream before emitting the next') 460 | t.pass(`stream ${emit} ended`) 461 | next() 462 | if (emit === 2) t.end() 463 | }) 464 | }) 465 | 466 | chopper.write('hello') 467 | chopper.chop(function () { 468 | chopper.write('world') 469 | chopper.end() 470 | }) 471 | }) 472 | 473 | test('chopper.chop(callback) - on destroyed stream', function (t) { 474 | const chopper = new StreamChopper() 475 | chopper.destroy() 476 | chopper.chop(function () { 477 | t.end() 478 | }) 479 | }) 480 | 481 | test('chopper.chop()', function (t) { 482 | t.plan(8) 483 | 484 | let emits = 0 485 | const chunks = ['hello', 'world'] 486 | const chopper = new StreamChopper() 487 | 488 | chopper.on('stream', function (stream, next) { 489 | const emit = ++emits 490 | stream.on('data', function (chunk) { 491 | t.equal(emit, emits, 'should finish streaming current stream before emitting the next') 492 | t.equal(chunk.toString(), chunks.shift()) 493 | }) 494 | stream.on('end', function () { 495 | t.equal(emit, emits, 'should end current stream before emitting the next') 496 | t.pass(`stream ${emit} ended`) 497 | next() 498 | if (emit === 2) t.end() 499 | }) 500 | }) 501 | 502 | chopper.write('hello') 503 | chopper.chop() 504 | chopper.write('world') 505 | chopper.end() 506 | }) 507 | 508 | test('chopper.chop() - twice with no write in between', function (t) { 509 | t.plan(8) 510 | 511 | let emits = 0 512 | const chunks = ['hello', 'world'] 513 | const chopper = new StreamChopper() 514 | 515 | chopper.on('stream', function (stream, next) { 516 | const emit = ++emits 517 | stream.on('data', function (chunk) { 518 | t.equal(emit, emits, 'should finish streaming current stream before emitting the next') 519 | t.equal(chunk.toString(), chunks.shift()) 520 | }) 521 | stream.on('end', function () { 522 | t.equal(emit, emits, 'should end current stream before emitting the next') 523 | t.pass(`stream ${emit} ended`) 524 | next() 525 | if (emit === 2) t.end() 526 | }) 527 | }) 528 | 529 | chopper.write('hello') 530 | chopper.chop() 531 | chopper.chop() 532 | chopper.write('world') 533 | chopper.end() 534 | }) 535 | 536 | test('chopper.chop() - twice with write in between', function (t) { 537 | t.plan(8) 538 | 539 | let emits = 0 540 | const chunks = ['hello', 'world'] 541 | const chopper = new StreamChopper() 542 | 543 | chopper.on('stream', function (stream, next) { 544 | const emit = ++emits 545 | stream.on('data', function (chunk) { 546 | t.equal(emit, emits, 'should finish streaming current stream before emitting the next') 547 | t.equal(chunk.toString(), chunks.shift()) 548 | }) 549 | stream.on('end', function () { 550 | t.equal(emit, emits, 'should end current stream before emitting the next') 551 | t.pass(`stream ${emit} ended`) 552 | next() 553 | if (emit === 2) { 554 | t.end() 555 | chopper.destroy() 556 | } 557 | }) 558 | }) 559 | 560 | chopper.write('hello') 561 | chopper.chop() 562 | chopper.write('world') 563 | chopper.chop() 564 | }) 565 | 566 | test('should not chop if no size is given', function (t) { 567 | const bigString = new Array(10000).join('hello ') 568 | const totalWrites = 1000 569 | let emitted = false 570 | 571 | t.plan(totalWrites) 572 | 573 | const chopper = new StreamChopper() 574 | 575 | chopper.on('stream', function (stream, next) { 576 | if (emitted) t.fail('should not emit stream more than once') 577 | emitted = true 578 | stream.on('data', function (chunk) { 579 | t.equal(chunk.toString(), bigString) 580 | }) 581 | next() 582 | }) 583 | 584 | for (let n = 0; n < totalWrites; n++) { 585 | chopper.write(bigString) 586 | } 587 | }) 588 | 589 | test('should not chop if no time is given', function (t) { 590 | setTimeout(function () { 591 | t.end() 592 | }, 100) 593 | 594 | const origSetTimeout = global.setTimeout 595 | global.setTimeout = function () { 596 | t.fail('should not set a timeout') 597 | } 598 | t.on('end', function () { 599 | global.setTimeout = origSetTimeout 600 | }) 601 | 602 | const chopper = new StreamChopper() 603 | chopper.on('stream', function (stream, next) { 604 | stream.resume() 605 | next() 606 | }) 607 | chopper.write('test') 608 | }) 609 | 610 | test('should chop when timeout occurs', function (t) { 611 | const chopper = new StreamChopper({ time: 50 }) 612 | chopper.on('stream', assertOnStream(t, 2)) 613 | chopper.write('hello world 1') 614 | setTimeout(function () { 615 | chopper.write('hello world 2') 616 | chopper.end() 617 | }, 100) 618 | }) 619 | 620 | test('handle backpressure when current stream is full, but next() haven\'t been called yet', function (t) { 621 | t.plan(4) 622 | 623 | const chunks = ['foo', 'bar', 'baz'] 624 | let emits = 0 625 | let firstNext 626 | 627 | const chopper = new StreamChopper({ 628 | size: 2, 629 | type: StreamChopper.overflow 630 | }) 631 | 632 | chopper.on('stream', function (stream, next) { 633 | const emit = ++emits 634 | const expected = chunks.shift() 635 | 636 | if (emit === 1) firstNext = next 637 | 638 | stream.on('data', function (chunk) { 639 | t.equal(chunk.toString(), expected) 640 | }) 641 | 642 | stream.on('end', function () { 643 | if (emit > 1) next() 644 | if (emit === 3) t.end() 645 | }) 646 | }) 647 | 648 | chopper.write('foo') // indirect chop 649 | chopper.write('bar') // indirect chop 650 | chopper.end('baz') // indirect chop 651 | 652 | t.equal(emits, 1, 'should only have emitted the first stream') 653 | 654 | firstNext() 655 | }) 656 | 657 | test('output stream destroyed by user', function (t) { 658 | t.plan(2) 659 | 660 | let emits = 0 661 | const chopper = new StreamChopper() 662 | 663 | chopper.on('stream', function (stream, next) { 664 | const emit = ++emits 665 | 666 | stream.on('data', function (chunk) { 667 | t.equal(chunk.toString(), 'hello', 'stream should get data') 668 | stream.destroy() // force output stream to end unexpectedly 669 | }) 670 | stream.on('close', function () { 671 | t.pass(`stream ${emit} closed`) 672 | next() 673 | t.end() 674 | }) 675 | }) 676 | 677 | chopper.write('hello') 678 | }) 679 | 680 | test('output stream destroyed by user followed by chopper.write() when stream emits end', function (t) { 681 | t.plan(4) 682 | 683 | let emits = 0 684 | const chunks = ['hello', 'world'] 685 | const chopper = new StreamChopper() 686 | 687 | chopper.on('stream', function (stream, next) { 688 | const emit = ++emits 689 | 690 | stream.on('data', function (chunk) { 691 | t.equal(chunk.toString(), chunks.shift()) 692 | if (emit === 1) stream.destroy() // force output stream to end unexpectedly 693 | }) 694 | stream.on('close', function () { 695 | t.equal(emit, 1, 'stream 1 should close') 696 | next() 697 | chopper.end('world') // start writing before stream have emitted finish 698 | }) 699 | stream.on('end', function () { 700 | t.equal(emit, 2, 'stream 2 should end') 701 | t.end() 702 | }) 703 | }) 704 | 705 | chopper.write('hello') 706 | }) 707 | 708 | test('output stream destroyed by user followed directly by chopper.write()', function (t) { 709 | t.plan(14) 710 | 711 | let emits = 0 712 | const streams = [ 713 | ['foo'], 714 | ['bar', 'b'], 715 | ['az'] 716 | ] 717 | const chopper = new StreamChopper({ size: 4 }) 718 | 719 | chopper.on('stream', function (stream, next) { 720 | const emit = ++emits 721 | const chunks = streams.shift() 722 | t.ok(chunks) 723 | 724 | stream.on('data', function (chunk) { 725 | const expected = chunks.shift() 726 | t.ok(expected) 727 | t.equal(chunk.toString(), expected, `should get '${expected}'`) 728 | if (emit === 1) { 729 | stream.destroy() // force output stream to end unexpectedly 730 | chopper.write('bar') // start writing while stream is in the process of being destroyed 731 | chopper.end('baz') // start writing while stream is locked 732 | } 733 | }) 734 | stream.on('close', function () { 735 | t.equal(emit, 1, 'stream 1 should close') 736 | next() 737 | }) 738 | stream.on('end', function () { 739 | t.ok(emit > 1, `stream 1 shouldn't end (was stream ${emit}`) 740 | next() 741 | if (emit === 3) t.end() 742 | }) 743 | }) 744 | 745 | chopper.write('foo') 746 | }) 747 | 748 | test('change size midflight', function (t) { 749 | t.plan(12) 750 | 751 | let emits = 0 752 | const streams = [ 753 | ['foo'], 754 | ['bar'], 755 | ['foobar'] 756 | ] 757 | 758 | const chopper = new StreamChopper({ size: 3 }) 759 | 760 | chopper.on('stream', function (stream, next) { 761 | const emit = ++emits 762 | const chunks = streams.shift() 763 | t.ok(chunks, `stream ${emit} should be expected`) 764 | 765 | stream.on('data', function (chunk) { 766 | const expected = chunks.shift() 767 | t.ok(expected, `chunk event should be expected on stream ${emit}`) 768 | t.equal(chunk.toString(), expected, `should get '${expected}'`) 769 | if (emit === 2) { 770 | chopper.size = 6 771 | chopper.write('foobar') 772 | chopper.end() 773 | } 774 | }) 775 | stream.on('end', function () { 776 | t.pass(`stream ${emit} ended`) 777 | next() 778 | if (emit === 3) t.end() 779 | }) 780 | }) 781 | 782 | chopper.write('foobar') 783 | }) 784 | 785 | test('change type midflight', function (t) { 786 | t.plan(12) 787 | 788 | let emits = 0 789 | const streams = [ 790 | ['foo'], 791 | ['bar'], 792 | ['foobar'] 793 | ] 794 | 795 | const chopper = new StreamChopper({ size: 3 }) 796 | 797 | chopper.on('stream', function (stream, next) { 798 | const emit = ++emits 799 | const chunks = streams.shift() 800 | t.ok(chunks, `stream ${emit} should be expected`) 801 | 802 | stream.on('data', function (chunk) { 803 | const expected = chunks.shift() 804 | t.ok(expected, `chunk event should be expected on stream ${emit}`) 805 | t.equal(chunk.toString(), expected, `should get '${expected}'`) 806 | if (emit === 2) { 807 | chopper.type = StreamChopper.overflow 808 | chopper.write('foobar') 809 | chopper.end() 810 | } 811 | }) 812 | stream.on('end', function () { 813 | t.pass(`stream ${emit} ended`) 814 | next() 815 | if (emit === 3) t.end() 816 | }) 817 | }) 818 | 819 | chopper.write('foobar') 820 | }) 821 | 822 | test('change time midflight', function (t) { 823 | t.plan(8) 824 | 825 | let start 826 | let emits = 0 827 | const streams = [ 828 | ['foo'], 829 | ['bar'] 830 | ] 831 | 832 | const chopper = new StreamChopper({ time: 200 }) 833 | 834 | chopper.on('stream', function (stream, next) { 835 | const emit = ++emits 836 | const chunks = streams.shift() 837 | t.ok(chunks, `stream ${emit} should be expected`) 838 | 839 | stream.on('data', function (chunk) { 840 | const expected = chunks.shift() 841 | t.ok(expected, `chunk event should be expected on stream ${emit}`) 842 | t.equal(chunk.toString(), expected, `should get '${expected}'`) 843 | }) 844 | stream.on('end', function () { 845 | const diff = Date.now() - start 846 | if (emit === 1) { 847 | t.ok(diff >= 200 && diff <= 400, `should end the stream witin a window of 200-400ms (was: ${diff})`) 848 | chopper.time = 500 849 | start = Date.now() 850 | chopper.write('bar') 851 | next() 852 | } else { 853 | t.ok(diff >= 500 && diff <= 700, `should end the stream witin a window of 500-700ms (was: ${diff})`) 854 | clearTimeout(timer) 855 | next() 856 | chopper.destroy() 857 | t.end() 858 | } 859 | }) 860 | }) 861 | 862 | // we need a timer on the event loop so the test doesn't exit too soon 863 | const timer = setTimeout(function () { 864 | t.fail('took too long') 865 | }, 1101) 866 | 867 | start = Date.now() 868 | chopper.write('foo') 869 | }) 870 | 871 | test('#chopper.resetTimer()', function (t) { 872 | t.plan(2) 873 | 874 | let start 875 | const chopper = new StreamChopper({ time: 200 }) 876 | 877 | chopper.on('stream', function (stream, next) { 878 | stream.on('data', function (chunk) { 879 | t.equal(chunk.toString(), 'foo', 'stream should get data') 880 | }) 881 | stream.on('end', function () { 882 | const diff = Date.now() - start 883 | t.ok(diff >= 300 && diff <= 500, `should end the stream witin a window of 300-500ms (was: ${diff})`) 884 | clearTimeout(timer) 885 | next() 886 | chopper.destroy() 887 | t.end() 888 | }) 889 | }) 890 | 891 | // we need a timer on the event loop so the test doesn't exit too soon 892 | const timer = setTimeout(function () { 893 | t.fail('took too long') 894 | }, 501) 895 | 896 | start = Date.now() 897 | chopper.write('foo') 898 | 899 | setTimeout(function () { 900 | chopper.resetTimer() 901 | }, 100) 902 | }) 903 | 904 | test('#chopper.resetTimer() - without an active stream', function (t) { 905 | const chopper = new StreamChopper({ time: 200 }) 906 | 907 | chopper.on('stream', function (stream, next) { 908 | t.fail('should never emit a stream') 909 | }) 910 | 911 | setTimeout(function () { 912 | chopper.destroy() 913 | t.end() 914 | }, 400) 915 | 916 | chopper.resetTimer() 917 | 918 | t.notOk(chopper._timer) 919 | }) 920 | 921 | test('#chopper.resetTimer(time) - with current time set to 200', function (t) { 922 | t.plan(2) 923 | 924 | let start 925 | const chopper = new StreamChopper({ time: 200 }) 926 | 927 | chopper.on('stream', function (stream, next) { 928 | stream.on('data', function (chunk) { 929 | t.equal(chunk.toString(), 'foo', 'stream should get data') 930 | }) 931 | stream.on('end', function () { 932 | const diff = Date.now() - start 933 | t.ok(diff >= 500 && diff <= 700, `should end the stream witin a window of 500-700ms (was: ${diff})`) 934 | clearTimeout(timer) 935 | next() 936 | chopper.destroy() 937 | t.end() 938 | }) 939 | }) 940 | 941 | // we need a timer on the event loop so the test doesn't exit too soon 942 | const timer = setTimeout(function () { 943 | t.fail('took too long') 944 | }, 701) 945 | 946 | start = Date.now() 947 | chopper.write('foo') 948 | 949 | setTimeout(function () { 950 | chopper.resetTimer(400) 951 | }, 100) 952 | }) 953 | 954 | test('#chopper.resetTimer(time) - with current time set to -1', function (t) { 955 | t.plan(2) 956 | 957 | let start 958 | const chopper = new StreamChopper({ time: -1 }) 959 | 960 | chopper.on('stream', function (stream, next) { 961 | stream.on('data', function (chunk) { 962 | t.equal(chunk.toString(), 'foo', 'stream should get data') 963 | }) 964 | stream.on('end', function () { 965 | const diff = Date.now() - start 966 | t.ok(diff >= 400 && diff <= 600, `should end the stream witin a window of 400-600ms (was: ${diff})`) 967 | clearTimeout(timer) 968 | next() 969 | chopper.destroy() 970 | t.end() 971 | }) 972 | }) 973 | 974 | // we need a timer on the event loop so the test doesn't exit too soon 975 | const timer = setTimeout(function () { 976 | t.fail('took too long') 977 | }, 601) 978 | 979 | start = Date.now() 980 | chopper.write('foo') 981 | chopper.resetTimer(400) 982 | }) 983 | 984 | test('#chopper.resetTimer(-1)', function (t) { 985 | t.plan(2) 986 | 987 | let ended = false 988 | const chopper = new StreamChopper({ time: 100 }) 989 | 990 | chopper.on('stream', function (stream, next) { 991 | stream.on('data', function (chunk) { 992 | t.equal(chunk.toString(), 'foo', 'stream should get data') 993 | }) 994 | stream.on('end', function () { 995 | ended = true 996 | }) 997 | }) 998 | 999 | setTimeout(function () { 1000 | t.equal(ended, false, 'should successfully have disabled the timer before the stream ended') 1001 | chopper.destroy() 1002 | t.end() 1003 | }, 200) 1004 | 1005 | chopper.write('foo') 1006 | 1007 | setTimeout(function () { 1008 | chopper.resetTimer(-1) 1009 | }, 50) 1010 | }) 1011 | 1012 | function assertOnStream (t, expectedEmits) { 1013 | let emits = 0 1014 | return function (stream, next) { 1015 | const emit = ++emits 1016 | stream.on('data', function (chunk) { 1017 | t.equal(chunk.toString(), 'hello world ' + emit, 'expected data for stream ' + emit) 1018 | }) 1019 | stream.on('end', function () { 1020 | next() 1021 | if (emit >= expectedEmits) { 1022 | t.equal(emits, expectedEmits) 1023 | t.end() 1024 | } 1025 | }) 1026 | } 1027 | } 1028 | --------------------------------------------------------------------------------