├── .editorconfig ├── .eslintrc.json ├── .gitignore ├── LICENSE.md ├── README.md ├── package.json ├── src ├── main.js ├── transformations │ ├── Chunk.js │ ├── Filter.js │ ├── Map.js │ ├── Merge.js │ ├── Omit.js │ ├── Pick.js │ ├── Pluck.js │ ├── Uniq.js │ └── WeakSort.js └── utils.js └── test ├── _helper.js ├── main.js └── transformations ├── Chunk.js ├── Filter.js ├── Map.js ├── Merge.js ├── Omit.js ├── Pick.js ├── Pluck.js ├── Uniq.js └── WeakSort.js /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | indent_style = space 5 | indent_size = 4 6 | charset = utf-8 7 | trim_trailing_whitespace = true 8 | insert_final_newline = true 9 | end_of_line = lf 10 | # editorconfig-tools is unable to ignore longs strings or urls 11 | max_line_length = null 12 | -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "airbnb-base", 3 | "plugins": [ 4 | "import", 5 | "promise" 6 | ], 7 | "rules": { 8 | "indent": ["error", 4], 9 | "no-underscore-dangle": [2, { 10 | "allow": ["_id"], 11 | "allowAfterThis": true 12 | }], 13 | "no-use-before-define": 0, 14 | "no-param-reassign": 0, 15 | "consistent-return": 0, 16 | "array-callback-return": 0, 17 | "object-curly-newline": 0, 18 | "arrow-body-style": 0, 19 | "no-plusplus": 0, 20 | "strict": ["error", "global"], 21 | "max-len": ["error", 150], 22 | "no-undef": 0, 23 | "func-names": 0, 24 | "import/prefer-default-export": 0, 25 | "import/no-absolute-path": 0, 26 | "import/no-extraneous-dependencies": ["error", { "devDependencies": ["**/test/**/*.js"] }], 27 | "no-await-in-loop": 0, 28 | "no-restricted-syntax": ["error", "ForInStatement", "LabeledStatement", "WithStatement"] 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | .DS_Store 3 | coverage 4 | .nyc_output 5 | .idea 6 | package-lock.json 7 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2018 Apify Technologies s.r.o. 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Flowage 2 | 3 | [![npm version](https://badge.fury.io/js/flowage.svg)](https://www.npmjs.com/package/flowage) 4 | 5 | ## Contents 6 | 7 | 8 | 9 | - [Motivation](#motivation) 10 | - [Basic usage](#basic-usage) 11 | - [Reference](#reference) 12 | * [merge `stream1.merge(stream2)`](#merge-stream1mergestream2) 13 | * [collect `stream.collect()`](#collect-streamcollect) 14 | * [filter `stream.filter(function)`](#filter-streamfilterfunction) 15 | * [chunk `stream.chunk(length)`](#chunk-streamchunklength) 16 | * [map `stream.map(function)`](#map-streammapfunction) 17 | * [omit `stream.omit(field1, field2, ...)`](#omit-streamomitfield1-field2-) 18 | * [pick `stream.pick(field1, field2, ...)`](#pick-streampickfield1-field2-) 19 | * [pluck `stream.pluck(field);`](#pluck-streampluckfield) 20 | * [uniq `stream.uniq(field)`](#uniq-streamuniqfield) 21 | * [weakSort `stream.weakSort(sortFunction, [bufferMinSize=75], [bufferMaxSize=100])`](#weaksort-streamweaksortsortfunction-bufferminsize75-buffermaxsize100) 22 | * [onSeries `stream.onSeries(async function, [concurrency=1])`](#onseries-streamonseriesasync-function-concurrency1) 23 | 24 | 25 | 26 | ## Motivation 27 | 28 | This package simplifies transformations and filtering of NodeJS object streams. Think about it as [Underscore.js](http://underscorejs.org) 29 | for streams. 30 | 31 | The basic use case I faced many times was a transformation of a large number of JSON objects that are finally stored in some database. 32 | Transformation is the quick part but then you have to then chunk data in size allowed by your database to limit the number of queries 33 | and control the flow of the whole stream based on how fast you are able to save the transformed data. 34 | 35 | ## Basic usage 36 | 37 | ```javascript 38 | const { Readable } = require('stream'); 39 | const Flowage = require('flowage'); 40 | 41 | // Let's have some stream that will output a series of objects { n: 0 }, { n: 1 }, { n: 2 }, { n: 3 }, ... 42 | const readable = new Readable({ objectMode: true }); 43 | let n = 0; 44 | setInterval(() => readable.push({ n: n++ }), 1000); 45 | 46 | // Pipe it thru Flowage() to get stream extended by helper methods. 47 | const flowage = readable.pipe(new Flowage()); 48 | 49 | // Split the stream into a stream of odd objects and even objects and extend them with some field is='odd' or is='even'. 50 | const oddStream = flowage 51 | .filter(obj => obj.n % 2) 52 | .map(obj => Object.assign({}, obj, { is: 'odd' })); 53 | 54 | const evenStream = flowage 55 | .filter(obj => obj.n % 2 === 0) 56 | .map(obj => Object.assign({}, obj, { is: 'even' })); 57 | 58 | // Then merge them back. 59 | const mergedStream = oddStream.merge(evenStream); 60 | 61 | // Chunk them by 100 records. 62 | const chunkedStream = mergedStream.chunk(100); 63 | 64 | // Save them to MongoDB in batches of 100 items with concurrency 2. 65 | // This also corks the stream everytime the period when max concurrency is reached. 66 | chunkedStream.onSeries(async (arrayOf100Items) => { 67 | await datase.collection('test').insert(arrayOf100Items); 68 | }, { concurrency: 2 }); 69 | 70 | ``` 71 | 72 | ## Reference 73 | 74 | ### merge `stream1.merge(stream2)` 75 | 76 | Returns stream containing values merged from 2 given streams. Merged stream ends when both streams ends. 77 | 78 | ```javascript 79 | const mergedStream = stream1.merge(stream2); 80 | ``` 81 | 82 | ### collect `stream.collect()` 83 | 84 | Returns Promise that gets resolved when stream ends to an array of all the values. 85 | 86 | ```javascript 87 | const data = await stream.collect(); 88 | ``` 89 | 90 | ### filter `stream.filter(function)` 91 | 92 | Returns stream containing filtered values. 93 | 94 | ```javascript 95 | // Filter out even items from stream. 96 | const filteredStream = stream.filter(val => val.index % 2 === 0); 97 | ``` 98 | 99 | ### chunk `stream.chunk(length)` 100 | 101 | Returns stream where each item is an array given number of items from original stream. 102 | 103 | ```javascript 104 | // Chunk values into arrays of 10 items. 105 | const chunkedStream = stream.chunk(10); 106 | ``` 107 | 108 | ### map `stream.map(function)` 109 | 110 | Returns stream where original items are transformed using given function. 111 | 112 | ```javascript 113 | // Extend each object in the stream with `.foo = 'bar'` field. 114 | const mappedStream = stream.map(val => Object.assign({}, val, { foo: 'bar' })); 115 | ``` 116 | 117 | ### omit `stream.omit(field1, field2, ...)` 118 | 119 | Returns stream where given fields where omitted. 120 | 121 | ```javascript 122 | // Omit field1 and field2 from stream objects. 123 | const resultingStream = stream.omit('field1', 'field2'); 124 | ``` 125 | 126 | ### pick `stream.pick(field1, field2, ...)` 127 | 128 | Returns stream where each item contains only the given fields. 129 | 130 | ```javascript 131 | // Pick only field1 and field2 from stream objects. 132 | const resultingStream = stream.pick('field1', 'field2'); 133 | ``` 134 | 135 | ### pluck `stream.pluck(field);` 136 | 137 | Returns stream with given field picked from each item. 138 | 139 | ```javascript 140 | // Pick only field1 and field2 from stream objects. 141 | const resultingStream = stream.pluck('field1'); 142 | ``` 143 | 144 | ### uniq `stream.uniq(field)` 145 | 146 | Returns stream containing only unique items based on given field. 147 | You need enough memory to keep a set of all unique values hashed using sha256. 148 | 149 | ```javascript 150 | // Filter unique items based on id field. 151 | const uniquesStream = stream.uniq('id'); 152 | ``` 153 | 154 | ### weakSort `stream.weakSort(sortFunction, [bufferMinSize=75], [bufferMaxSize=100])` 155 | 156 | Returns stream containing values sorted using given function and floating buffer of a given size. 157 | 158 | This method is helpful when only a few neighboring items may have the wrong order. This may happen 159 | for example when a client is pushing data into the storage via API with concurrency higher than 1 and the 160 | quests reach the server in the wrong order. Or the API has multiple redundant instances that may process 161 | the incoming requests with different speed. 162 | 163 | This method uses a buffer for streamed items. Every time the buffer reaches `bufferMaxSize` gets 164 | sorted and `bufferMaxSize - bufferMinSize` items are outputted to the stream. 165 | 166 | ```javascript 167 | const sortFunction = (a, b) => a.index < b.index ? -1 : 1; 168 | const sortedStream = stream.sort(sortFunction, 75, 100); 169 | ``` 170 | 171 | ### onSeries `stream.onSeries(async function, [concurrency=1])` 172 | 173 | Returns a promise that gets resolved when given function gets finished for the last item of the stream. 174 | 175 | Everytime the given concurrency is reached it pauses the stream. 176 | 177 | ```javascript 178 | // Store items in MongoDB with concurrency 10. 179 | await stream.onSeries(async (item) => { 180 | await database.collection('items').insert(item); 181 | }, 10); 182 | ``` 183 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "flowage", 3 | "version": "0.0.5", 4 | "description": "Easy filtering and transformations for NodeJS streams.", 5 | "engines": { 6 | "node": ">=8.0.0" 7 | }, 8 | "main": "src/main.js", 9 | "keywords": [ 10 | "stream", 11 | "transformation", 12 | "filter", 13 | "underscore" 14 | ], 15 | "author": { 16 | "name": "Marek Trunkat", 17 | "url": "https://trunkat.eu" 18 | }, 19 | "license": "Apache-2.0", 20 | "repository": { 21 | "type": "git", 22 | "url": "git+https://github.com/mtrunkat/js-flowage" 23 | }, 24 | "bugs": { 25 | "url": "https://github.com/mtrunkat/js-flowage/issues" 26 | }, 27 | "homepage": "https://github.com/mtrunkat/js-flowage", 28 | "files": [ 29 | "src" 30 | ], 31 | "scripts": { 32 | "test": "nyc --reporter=html --reporter=text mocha --timeout 60000 --recursive", 33 | "lint": "npm run build && ./node_modules/.bin/eslint ./src ./test", 34 | "lint:fix": "./node_modules/.bin/eslint ./src ./test --ext .js,.jsx --fix", 35 | "build-toc": "./node_modules/.bin/markdown-toc README.md -i" 36 | }, 37 | "dependencies": { 38 | "underscore": "^1.9.1" 39 | }, 40 | "devDependencies": { 41 | "chai": "^4.2.0", 42 | "chai-as-promised": "^7.1.1", 43 | "eslint": "^5.5.0", 44 | "eslint-config-airbnb": "^17.1.0", 45 | "eslint-config-airbnb-base": "^13.1.0", 46 | "eslint-plugin-import": "^2.2.0", 47 | "eslint-plugin-jsx-a11y": "^6.0.2", 48 | "eslint-plugin-promise": "^4.0.1", 49 | "eslint-plugin-react": "^7.0.1", 50 | "markdown-toc": "^1.2.0", 51 | "mocha": "^3.5.3", 52 | "nyc": "^13.0.1" 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /src/main.js: -------------------------------------------------------------------------------- 1 | const { ObjectIdentityTransform } = require('./utils'); 2 | const Map = require('./transformations/Map'); 3 | const Filter = require('./transformations/Filter'); 4 | const Pluck = require('./transformations/Pluck'); 5 | const Chunk = require('./transformations/Chunk'); 6 | const Merge = require('./transformations/Merge'); 7 | const Omit = require('./transformations/Omit'); 8 | const Pick = require('./transformations/Pick'); 9 | const Uniq = require('./transformations/Uniq'); 10 | const WeakSort = require('./transformations/WeakSort'); 11 | 12 | const METHODS = { 13 | map: Map, 14 | filter: Filter, 15 | pluck: Pluck, 16 | chunk: Chunk, 17 | omit: Omit, 18 | pick: Pick, 19 | uniq: Uniq, 20 | weakSort: WeakSort, 21 | }; 22 | 23 | class ObjectStreamUtilities extends ObjectIdentityTransform { 24 | constructor() { 25 | super(); 26 | for (const methodName of Object.keys(METHODS)) { 27 | this._registerTransformation(methodName, METHODS[methodName]); 28 | } 29 | } 30 | 31 | _registerTransformation(methodName, MethodClass) { 32 | this[methodName] = (...args) => { 33 | return this 34 | .pipe(new MethodClass(...args)) 35 | .pipe(new ObjectStreamUtilities()); 36 | }; 37 | } 38 | 39 | merge(stream) { 40 | return this 41 | .pipe(new Merge(this, stream), { end: false }) 42 | .pipe(new ObjectStreamUtilities()); 43 | } 44 | 45 | async onSeries(func, { concurrency = 1 } = {}) { 46 | let processing = 0; 47 | 48 | return new Promise((resolve, reject) => { 49 | let finishOnLast = false; 50 | 51 | this.on('data', async (chunk) => { 52 | try { 53 | processing++; 54 | if (processing === concurrency) this.pause(); 55 | await func(chunk); 56 | this.resume(); 57 | processing--; 58 | 59 | if (processing === 0 && finishOnLast) { 60 | setTimeout(() => { 61 | if (processing === 0) resolve(); 62 | }, 0); 63 | } 64 | } catch (err) { 65 | this.emit('error', err); 66 | } 67 | }); 68 | this.on('end', () => { 69 | if (processing === 0) resolve(); 70 | else finishOnLast = true; 71 | }); 72 | this.on('error', reject); 73 | }); 74 | } 75 | 76 | async collect() { 77 | return new Promise((resolve, reject) => { 78 | const data = []; 79 | 80 | this.on('data', chunk => data.push(chunk)); 81 | this.on('end', () => resolve(data)); 82 | this.on('error', reject); 83 | }); 84 | } 85 | } 86 | 87 | module.exports = ObjectStreamUtilities; 88 | -------------------------------------------------------------------------------- /src/transformations/Chunk.js: -------------------------------------------------------------------------------- 1 | const { ObjectTransform } = require('../utils'); 2 | 3 | module.exports = class Chunk extends ObjectTransform { 4 | constructor(size) { 5 | super(); 6 | this.size = size; 7 | this.buffer = []; 8 | } 9 | 10 | _final(callback) { 11 | this._emitBuffer(); 12 | callback(); 13 | } 14 | 15 | _emitBuffer() { 16 | this.push(this.buffer); 17 | this.buffer = []; 18 | } 19 | 20 | _transform(chunk, encoding, callback) { 21 | try { 22 | this.buffer.push(chunk); 23 | if (this.buffer.length === this.size) this._emitBuffer(); 24 | callback(); 25 | } catch (err) { 26 | callback(err); 27 | } 28 | } 29 | }; 30 | -------------------------------------------------------------------------------- /src/transformations/Filter.js: -------------------------------------------------------------------------------- 1 | const { ObjectTransform } = require('../utils'); 2 | 3 | module.exports = class Filter extends ObjectTransform { 4 | constructor(func) { 5 | super(); 6 | this.func = func; 7 | } 8 | 9 | _transform(chunk, encoding, callback) { 10 | try { 11 | if (this.func(chunk)) callback(null, chunk); 12 | else callback(); 13 | } catch (err) { 14 | callback(err); 15 | } 16 | } 17 | }; 18 | -------------------------------------------------------------------------------- /src/transformations/Map.js: -------------------------------------------------------------------------------- 1 | const { ObjectTransform } = require('../utils'); 2 | 3 | module.exports = class Map extends ObjectTransform { 4 | constructor(func) { 5 | super(); 6 | this.func = func; 7 | } 8 | 9 | _transform(chunk, encoding, callback) { 10 | try { 11 | callback(null, this.func(chunk)); 12 | } catch (err) { 13 | callback(err); 14 | } 15 | } 16 | }; 17 | -------------------------------------------------------------------------------- /src/transformations/Merge.js: -------------------------------------------------------------------------------- 1 | const { ObjectIdentityTransform } = require('../utils'); 2 | 3 | module.exports = class Pluck extends ObjectIdentityTransform { 4 | constructor(sourceStream, mergedStream) { 5 | super(); 6 | 7 | const sourceStreamEndPromise = new Promise((resolve => sourceStream.on('end', resolve))); 8 | const mergedStreamEndPromise = new Promise((resolve => mergedStream.on('end', resolve))); 9 | 10 | mergedStream.pipe(this, { end: false }); 11 | 12 | Promise 13 | .all([ 14 | sourceStreamEndPromise, 15 | mergedStreamEndPromise, 16 | ]) 17 | .then(() => this.end()); 18 | } 19 | }; 20 | -------------------------------------------------------------------------------- /src/transformations/Omit.js: -------------------------------------------------------------------------------- 1 | const _ = require('underscore'); 2 | const { ObjectTransform } = require('../utils'); 3 | 4 | module.exports = class Omit extends ObjectTransform { 5 | constructor(...fields) { 6 | super(); 7 | this.fields = fields; 8 | } 9 | 10 | _transform(chunk, encoding, callback) { 11 | try { 12 | callback(null, _.omit(chunk, ...this.fields)); 13 | } catch (err) { 14 | callback(err); 15 | } 16 | } 17 | }; 18 | -------------------------------------------------------------------------------- /src/transformations/Pick.js: -------------------------------------------------------------------------------- 1 | const _ = require('underscore'); 2 | const { ObjectTransform } = require('../utils'); 3 | 4 | module.exports = class Pick extends ObjectTransform { 5 | constructor(...fields) { 6 | super(); 7 | this.fields = fields; 8 | } 9 | 10 | _transform(chunk, encoding, callback) { 11 | try { 12 | callback(null, _.pick(chunk, ...this.fields)); 13 | } catch (err) { 14 | callback(err); 15 | } 16 | } 17 | }; 18 | -------------------------------------------------------------------------------- /src/transformations/Pluck.js: -------------------------------------------------------------------------------- 1 | const { ObjectTransform } = require('../utils'); 2 | 3 | module.exports = class Pluck extends ObjectTransform { 4 | constructor(property) { 5 | super(); 6 | this.property = property; 7 | } 8 | 9 | _transform(chunk, encoding, callback) { 10 | try { 11 | callback(null, chunk[this.property]); 12 | } catch (err) { 13 | callback(err); 14 | } 15 | } 16 | }; 17 | -------------------------------------------------------------------------------- /src/transformations/Uniq.js: -------------------------------------------------------------------------------- 1 | const crypto = require('crypto'); 2 | const { ObjectTransform } = require('../utils'); 3 | 4 | const HASHING_ALGORITHM = 'sha256'; 5 | 6 | const hash = string => crypto 7 | .createHash(HASHING_ALGORITHM) 8 | .update(string) 9 | .digest('base64'); 10 | 11 | const existing = {}; 12 | 13 | module.exports = class Uniq extends ObjectTransform { 14 | constructor(keyField) { 15 | super(); 16 | this.keyField = keyField; 17 | } 18 | 19 | _transform(chunk, encoding, callback) { 20 | try { 21 | const val = chunk[this.keyField]; 22 | const hashed = hash(val ? val.toString() : ''); 23 | if (existing[hashed]) return callback(); 24 | 25 | existing[hashed] = true; 26 | callback(null, chunk); 27 | } catch (err) { 28 | callback(err); 29 | } 30 | } 31 | }; 32 | -------------------------------------------------------------------------------- /src/transformations/WeakSort.js: -------------------------------------------------------------------------------- 1 | const { ObjectTransform } = require('../utils'); 2 | 3 | module.exports = class WeakSort extends ObjectTransform { 4 | constructor(sortFunction, bufferMinSize = 75, bufferMaxSize = 100) { 5 | super(); 6 | this.sortFunction = sortFunction; 7 | this.bufferMinSize = bufferMinSize; 8 | this.bufferMaxSize = bufferMaxSize; 9 | this.buffer = []; 10 | } 11 | 12 | _final(callback) { 13 | this._emitBuffer(this.buffer.length); 14 | callback(); 15 | } 16 | 17 | _emitBuffer(count) { 18 | this.buffer.sort(this.sortFunction); 19 | for (let i = 0; i < count; i++) this.push(this.buffer.shift()); 20 | } 21 | 22 | _transform(chunk, encoding, callback) { 23 | try { 24 | this.buffer.push(chunk); 25 | if (this.buffer.length >= this.bufferMaxSize) { 26 | this._emitBuffer(this.bufferMaxSize - this.bufferMinSize); 27 | } 28 | callback(); 29 | } catch (err) { 30 | callback(err); 31 | } 32 | } 33 | }; 34 | -------------------------------------------------------------------------------- /src/utils.js: -------------------------------------------------------------------------------- 1 | const { Transform } = require('stream'); 2 | 3 | exports.ObjectTransform = class ObjectTransform extends Transform { 4 | constructor() { 5 | super({ objectMode: true }); 6 | } 7 | }; 8 | 9 | exports.ObjectIdentityTransform = class ObjectIdentityTransform extends exports.ObjectTransform { 10 | // eslint-disable-next-line 11 | _transform(chunk, encoding, callback) { 12 | callback(null, chunk); 13 | } 14 | }; 15 | -------------------------------------------------------------------------------- /test/_helper.js: -------------------------------------------------------------------------------- 1 | const { PassThrough } = require('stream'); 2 | 3 | exports.getReadableStream = (data = [], pauseMillis = 25, initialDelatMillis = 0) => { 4 | const readable = new PassThrough({ objectMode: true }); 5 | 6 | data.forEach((item, index) => { 7 | setTimeout(() => { 8 | readable.push(item); 9 | if (index === data.length - 1) readable.push(null); 10 | }, initialDelatMillis + index * pauseMillis); 11 | }); 12 | 13 | return readable; 14 | }; 15 | 16 | exports.sleepMillis = (millis) => { 17 | return new Promise(resolve => setTimeout(resolve, millis)); 18 | }; 19 | -------------------------------------------------------------------------------- /test/main.js: -------------------------------------------------------------------------------- 1 | const _ = require('underscore'); 2 | const { expect } = require('chai'); 3 | const { Readable, Writable } = require('stream'); 4 | const { getReadableStream, sleepMillis } = require('./_helper'); 5 | const ObjectStreamUtilities = require('../src/main'); 6 | 7 | describe('ObjectStreamUtilities', () => { 8 | it('works', (done) => { 9 | const readable = new Readable({ objectMode: true }); 10 | const mapped = readable 11 | .pipe(new ObjectStreamUtilities()); 12 | 13 | const odd = mapped 14 | .filter(obj => obj.x % 2) 15 | .map(obj => Object.assign({}, obj, { is: 'odd' })); 16 | 17 | const even = mapped 18 | .filter(obj => obj.x % 2 === 0) 19 | .map(obj => Object.assign({}, obj, { is: 'even' })); 20 | 21 | const final = odd.merge(even); 22 | 23 | readable.push({ x: 1 }); 24 | readable.push({ x: 2 }); 25 | readable.push({ x: 3 }); 26 | readable.push({ x: 4 }); 27 | readable.push({ x: 5 }); 28 | readable.push({ x: 6 }); 29 | readable.push({ x: 7 }); 30 | readable.push(null); 31 | 32 | const collected = []; 33 | final.on('data', data => collected.push(data)); 34 | final.on('end', () => { 35 | expect(collected).to.be.eql([ 36 | { x: 1, is: 'odd' }, 37 | { x: 2, is: 'even' }, 38 | { x: 3, is: 'odd' }, 39 | { x: 4, is: 'even' }, 40 | { x: 5, is: 'odd' }, 41 | { x: 6, is: 'even' }, 42 | { x: 7, is: 'odd' }, 43 | ]); 44 | done(); 45 | }); 46 | }); 47 | 48 | describe('collect()', () => { 49 | it('works', async () => { 50 | const items = [ 51 | { foo: 'bar', x: 1 }, 52 | { foo: 'hotel', x: 2 }, 53 | { foo: 'restaurant', x: 3 }, 54 | { foo: 'hotel', x: 4 }, 55 | { foo: 'hotel', x: 5 }, 56 | ]; 57 | const readable = getReadableStream(items); 58 | const collected = await readable 59 | .pipe(new ObjectStreamUtilities()) 60 | .collect(); 61 | 62 | expect(collected).to.be.eql(items); 63 | }); 64 | 65 | it('rejects on error', async () => { 66 | const readable = getReadableStream([ 67 | { foo: 'bar', x: 1 }, 68 | { foo: 'hotel', x: 2 }, 69 | { foo: 'restaurant', x: 3 }, 70 | { foo: 'hotel', x: 4 }, 71 | { foo: 'hotel', x: 5 }, 72 | ]); 73 | const stream = readable.pipe(new ObjectStreamUtilities()); 74 | 75 | setTimeout(() => stream.emit('error', new Error('some-message')), 5); 76 | 77 | await stream 78 | .collect() 79 | .then( 80 | () => { throw new Error('This should have failed!'); }, 81 | err => expect(err.message).to.be.eql('some-message'), 82 | ); 83 | }); 84 | }); 85 | 86 | describe('onSeries()', () => { 87 | it('works', async () => { 88 | let current = 0; 89 | const readable = new Readable({ 90 | objectMode: true, 91 | read() { 92 | if (current >= 50) return this.destroy(); 93 | this.push({ index: current++ }); 94 | }, 95 | }); 96 | 97 | const processed = []; 98 | await readable 99 | .pipe(new ObjectStreamUtilities()) 100 | .onSeries(async (chunk) => { 101 | await new Promise(resolve => setTimeout(resolve, 55 - chunk.index)); 102 | processed.push(chunk.index); 103 | }); 104 | expect(processed).to.be.eql(_.range(0, 50)); 105 | }); 106 | 107 | it('works with concurrency > 1', async () => { 108 | let current = 0; 109 | 110 | const startedAt = Date.now(); 111 | const readable = new Readable({ 112 | objectMode: true, 113 | read() { 114 | if (current >= 50) return this.destroy(); 115 | this.push({ index: current++ }); 116 | }, 117 | }); 118 | 119 | let concurrency = 0; 120 | const concurrencies = []; 121 | 122 | const processed = []; 123 | await readable 124 | .pipe(new ObjectStreamUtilities()) 125 | .onSeries(async (chunk) => { 126 | concurrency++; 127 | await new Promise(resolve => setTimeout(resolve, 55 - chunk.index)); 128 | processed.push(chunk.index); 129 | concurrencies.push(concurrency); 130 | concurrency--; 131 | }, { concurrency: 10 }); 132 | 133 | // Order is wrong. 134 | expect(processed).to.not.be.eql(_.range(0, 50)); 135 | 136 | // But contains all elements. 137 | expect(processed).to.be.have.members(_.range(0, 50)); 138 | 139 | // It was running in parrallel. 140 | expect(Date.now() - startedAt).to.be.below(_.range(55, 5).reduce((sum, num) => sum + num, 0) / 5); 141 | 142 | // It was running at the right concurrency. 143 | const fortyNineTimesTen = _.range(0, 41).map(() => 10); 144 | expect(concurrencies).to.be.eql([...fortyNineTimesTen, 9, 8, 7, 6, 5, 4, 3, 2, 1]); 145 | }); 146 | }); 147 | 148 | it(('should correctly cork and uncork streams (normal transformations)'), async () => { 149 | const items = _.range(0, 1000).map(index => ({ foo: index })); 150 | 151 | let readCount = 0; 152 | const readable = new Readable({ 153 | objectMode: true, 154 | read() { 155 | readCount++; 156 | this.push(items.shift() || null); 157 | }, 158 | }); 159 | const writable = new Writable({ 160 | objectMode: true, 161 | write(chunk, encoding, callback) { 162 | callback(); 163 | }, 164 | }); 165 | 166 | // Pipe stream. 167 | readable 168 | .pipe(new ObjectStreamUtilities()) 169 | .map(item => item) 170 | .pipe(writable); 171 | 172 | // Cork it. 173 | writable.cork(); 174 | 175 | // Wait 1 second. 176 | await sleepMillis(500); 177 | const currentReadCount = readCount; 178 | 179 | // Check that it's not reading any more. 180 | await sleepMillis(500); 181 | expect(currentReadCount).to.be.above(0); 182 | expect(currentReadCount).to.be.below(1001); 183 | expect(currentReadCount).to.be.eql(readCount); 184 | 185 | // Lets start reading again. 186 | writable.uncork(); 187 | await new Promise((resolve) => { 188 | writable.on('finish', () => { 189 | expect(currentReadCount).to.be.below(readCount); 190 | expect(readCount).to.be.eql(1001); 191 | resolve(); 192 | }); 193 | }); 194 | }); 195 | }); 196 | -------------------------------------------------------------------------------- /test/transformations/Chunk.js: -------------------------------------------------------------------------------- 1 | const { expect } = require('chai'); 2 | const _ = require('underscore'); 3 | const ObjectStreamUtilities = require('../../src/main'); 4 | const { getReadableStream } = require('../_helper'); 5 | 6 | describe('Chunk', () => { 7 | it('works', async () => { 8 | const items = _.range(0, 1000).map(index => ({ index })); 9 | const readable = getReadableStream(items, 1); 10 | 11 | const data = await readable 12 | .pipe(new ObjectStreamUtilities()) 13 | .chunk(248) 14 | .collect(); 15 | 16 | expect(data).to.be.eql(_.chunk(items, 248)); 17 | expect(data).to.have.lengthOf(5); 18 | }); 19 | }); 20 | -------------------------------------------------------------------------------- /test/transformations/Filter.js: -------------------------------------------------------------------------------- 1 | const { expect } = require('chai'); 2 | const ObjectStreamUtilities = require('../../src/main'); 3 | const { getReadableStream } = require('../_helper'); 4 | 5 | describe('Filter', () => { 6 | it('works', async () => { 7 | const readable = getReadableStream([ 8 | { foo: 'bar', x: 1 }, 9 | { foo: 'hotel', x: 2 }, 10 | { foo: 'restaurant', x: 3 }, 11 | { foo: 'hotel', x: 4 }, 12 | { foo: 'hotel', x: 5 }, 13 | ]); 14 | 15 | const data = await readable 16 | .pipe(new ObjectStreamUtilities()) 17 | .filter(chunk => chunk.foo === 'hotel') 18 | .collect(); 19 | 20 | expect(data).to.be.eql([ 21 | { foo: 'hotel', x: 2 }, 22 | { foo: 'hotel', x: 4 }, 23 | { foo: 'hotel', x: 5 }, 24 | ]); 25 | }); 26 | }); 27 | -------------------------------------------------------------------------------- /test/transformations/Map.js: -------------------------------------------------------------------------------- 1 | const { expect } = require('chai'); 2 | const ObjectStreamUtilities = require('../../src/main'); 3 | const { getReadableStream } = require('../_helper'); 4 | 5 | describe('Map', () => { 6 | it('works', async () => { 7 | const items = [ 8 | { foo: 'bar', x: 1 }, 9 | { foo: 'hotel', x: 2 }, 10 | { foo: 'restaurant', x: 3 }, 11 | { foo: 'hotel', x: 4 }, 12 | { foo: 'hotel', x: 5 }, 13 | ]; 14 | const readable = getReadableStream(items, 1); 15 | const mapFunc = chunk => Object.assign({}, chunk, { foo: 'bar' }); 16 | 17 | const data = await readable 18 | .pipe(new ObjectStreamUtilities()) 19 | .map(mapFunc) 20 | .collect(); 21 | 22 | expect(data).to.be.eql(items.map(mapFunc)); 23 | expect(data).to.have.lengthOf(5); 24 | }); 25 | }); 26 | -------------------------------------------------------------------------------- /test/transformations/Merge.js: -------------------------------------------------------------------------------- 1 | const _ = require('underscore'); 2 | const { expect } = require('chai'); 3 | const { Readable, Writable } = require('stream'); 4 | const ObjectStreamUtilities = require('../../src/main'); 5 | const { getReadableStream, sleepMillis } = require('../_helper'); 6 | 7 | describe('Map', () => { 8 | it('works', async () => { 9 | const items1 = [ 10 | { index: '1-1' }, 11 | { index: '1-2' }, 12 | { index: '1-3' }, 13 | { index: '1-4' }, 14 | { index: '1-5' }, 15 | ]; 16 | const items2 = [ 17 | { index: '2-1' }, 18 | { index: '2-2' }, 19 | { index: '2-3' }, 20 | { index: '2-4' }, 21 | { index: '2-5' }, 22 | ]; 23 | 24 | const readable1 = getReadableStream(items1); 25 | const readable2 = getReadableStream(items2); 26 | 27 | const data = await readable1 28 | .pipe(new ObjectStreamUtilities()) 29 | .merge(readable2) 30 | .collect(); 31 | 32 | expect(data).to.be.eql([ 33 | { index: '1-1' }, 34 | { index: '2-1' }, 35 | { index: '1-2' }, 36 | { index: '2-2' }, 37 | { index: '1-3' }, 38 | { index: '2-3' }, 39 | { index: '1-4' }, 40 | { index: '2-4' }, 41 | { index: '1-5' }, 42 | { index: '2-5' }, 43 | ]); 44 | }); 45 | 46 | it('should wait for slower merged stream', async () => { 47 | const items1 = [ 48 | { index: '1-1' }, 49 | { index: '1-2' }, 50 | { index: '1-3' }, 51 | { index: '1-4' }, 52 | { index: '1-5' }, 53 | ]; 54 | const items2 = [ 55 | { index: '2-1' }, 56 | { index: '2-2' }, 57 | { index: '2-3' }, 58 | { index: '2-4' }, 59 | { index: '2-5' }, 60 | ]; 61 | 62 | const readable1 = getReadableStream(items1, 1); 63 | const readable2 = getReadableStream(items2, 1, 100); 64 | 65 | const data = await readable1 66 | .pipe(new ObjectStreamUtilities()) 67 | .merge(readable2) 68 | .collect(); 69 | 70 | expect(data).to.be.eql([ 71 | { index: '1-1' }, 72 | { index: '1-2' }, 73 | { index: '1-3' }, 74 | { index: '1-4' }, 75 | { index: '1-5' }, 76 | { index: '2-1' }, 77 | { index: '2-2' }, 78 | { index: '2-3' }, 79 | { index: '2-4' }, 80 | { index: '2-5' }, 81 | ]); 82 | }); 83 | 84 | it('should wait for slower source stream', async () => { 85 | const items1 = [ 86 | { index: '1-1' }, 87 | { index: '1-2' }, 88 | { index: '1-3' }, 89 | { index: '1-4' }, 90 | { index: '1-5' }, 91 | ]; 92 | const items2 = [ 93 | { index: '2-1' }, 94 | { index: '2-2' }, 95 | { index: '2-3' }, 96 | { index: '2-4' }, 97 | { index: '2-5' }, 98 | ]; 99 | 100 | const readable1 = getReadableStream(items1, 1, 100); 101 | const readable2 = getReadableStream(items2, 1); 102 | 103 | const data = await readable1 104 | .pipe(new ObjectStreamUtilities()) 105 | .merge(readable2) 106 | .collect(); 107 | 108 | expect(data).to.be.eql([ 109 | { index: '2-1' }, 110 | { index: '2-2' }, 111 | { index: '2-3' }, 112 | { index: '2-4' }, 113 | { index: '2-5' }, 114 | { index: '1-1' }, 115 | { index: '1-2' }, 116 | { index: '1-3' }, 117 | { index: '1-4' }, 118 | { index: '1-5' }, 119 | ]); 120 | }); 121 | 122 | it('should pass cork/uncork events to both source and merged streams', async () => { 123 | const items1 = _.range(0, 1000).map(index => ({ index: `1-${index}` })); 124 | const items2 = _.range(0, 1000).map(index => ({ index: `2-${index}` })); 125 | const allItems = _.range(0, 1000).reduce((all, index) => { 126 | all.push({ index: `1-${index}` }); 127 | all.push({ index: `2-${index}` }); 128 | 129 | return all; 130 | }, []); 131 | 132 | let readCount1 = 0; 133 | const readable1 = new Readable({ 134 | objectMode: true, 135 | read() { 136 | readCount1++; 137 | this.push(items1.shift() || null); 138 | }, 139 | }); 140 | let readCount2 = 0; 141 | const readable2 = new Readable({ 142 | objectMode: true, 143 | read() { 144 | readCount2++; 145 | this.push(items2.shift() || null); 146 | }, 147 | }); 148 | const collectedData = []; 149 | const writable = new Writable({ 150 | objectMode: true, 151 | write(chunk, encoding, callback) { 152 | collectedData.push(chunk); 153 | callback(); 154 | }, 155 | }); 156 | 157 | // Pipe stream. 158 | readable1 159 | .pipe(new ObjectStreamUtilities()) 160 | .merge(readable2) 161 | .pipe(writable); 162 | 163 | // Cork it. 164 | writable.cork(); 165 | 166 | // Wait 1 second. 167 | await sleepMillis(500); 168 | const currentReadCount1 = readCount1; 169 | const currentReadCount2 = readCount2; 170 | 171 | // Check that it's not reading any more. 172 | await sleepMillis(500); 173 | expect(currentReadCount1).to.be.above(0); 174 | expect(currentReadCount1).to.be.below(1001); 175 | expect(currentReadCount1).to.be.eql(readCount1); 176 | expect(currentReadCount2).to.be.above(0); 177 | expect(currentReadCount2).to.be.below(1001); 178 | expect(currentReadCount2).to.be.eql(readCount2); 179 | 180 | // Lets start reading again. 181 | writable.uncork(); 182 | await new Promise((resolve, reject) => { 183 | writable.on('finish', () => { 184 | try { 185 | expect(currentReadCount1).to.be.below(readCount1); 186 | expect(readCount1).to.be.eql(1001); 187 | expect(currentReadCount2).to.be.below(readCount2); 188 | expect(readCount2).to.be.eql(1001); 189 | expect(collectedData).to.have.deep.members(allItems); 190 | resolve(); 191 | } catch (err) { 192 | reject(err); 193 | } 194 | }); 195 | writable.on('error', reject); 196 | }); 197 | }); 198 | }); 199 | -------------------------------------------------------------------------------- /test/transformations/Omit.js: -------------------------------------------------------------------------------- 1 | const { expect } = require('chai'); 2 | const ObjectStreamUtilities = require('../../src/main'); 3 | const { getReadableStream } = require('../_helper'); 4 | 5 | describe('Omit', () => { 6 | it('works', async () => { 7 | const items = [ 8 | { foo: 'bar', x: 1, y: 'xxx' }, 9 | { foo: 'hotel', x: 2, y: 'xxx' }, 10 | { foo: 'restaurant', x: 3, y: 'xxx' }, 11 | { foo: 'hotel', x: 4, y: 'xxx' }, 12 | { foo: 'hotel', x: 5, y: 'xxx' }, 13 | ]; 14 | const readable = getReadableStream(items, 1); 15 | 16 | const data = await readable 17 | .pipe(new ObjectStreamUtilities()) 18 | .omit('foo', 'y') 19 | .collect(); 20 | 21 | expect(data).to.be.eql(items.map(item => ({ x: item.x }))); 22 | }); 23 | }); 24 | -------------------------------------------------------------------------------- /test/transformations/Pick.js: -------------------------------------------------------------------------------- 1 | const { expect } = require('chai'); 2 | const ObjectStreamUtilities = require('../../src/main'); 3 | const { getReadableStream } = require('../_helper'); 4 | 5 | describe('Pick', () => { 6 | it('works', async () => { 7 | const items = [ 8 | { foo: 'bar', x: 1, y: 'xxx' }, 9 | { foo: 'hotel', x: 2, y: 'xxx' }, 10 | { foo: 'restaurant', x: 3, y: 'xxx' }, 11 | { foo: 'hotel', x: 4, y: 'xxx' }, 12 | { foo: 'hotel', x: 5, y: 'xxx' }, 13 | ]; 14 | const readable = getReadableStream(items, 1); 15 | 16 | const data = await readable 17 | .pipe(new ObjectStreamUtilities()) 18 | .pick('foo', 'y') 19 | .collect(); 20 | 21 | expect(data).to.be.eql(items.map(item => ({ foo: item.foo, y: item.y }))); 22 | }); 23 | }); 24 | -------------------------------------------------------------------------------- /test/transformations/Pluck.js: -------------------------------------------------------------------------------- 1 | const { expect } = require('chai'); 2 | const ObjectStreamUtilities = require('../../src/main'); 3 | const { getReadableStream } = require('../_helper'); 4 | 5 | describe('Pick', () => { 6 | it('works', async () => { 7 | const items = [ 8 | { foo: 'bar', x: 1, y: { something: 'else', x: 1 } }, 9 | { foo: 'hotel', x: 2, y: { something: 'else', x: 2 } }, 10 | { foo: 'restaurant', x: 3, y: { something: 'else', x: 3 } }, 11 | { foo: 'hotel', x: 4, y: { something: 'else', x: 4 } }, 12 | { foo: 'hotel', x: 5, y: { something: 'else', x: 5 } }, 13 | ]; 14 | const readable = getReadableStream(items, 1); 15 | 16 | const data = await readable 17 | .pipe(new ObjectStreamUtilities()) 18 | .pluck('y') 19 | .collect(); 20 | 21 | expect(data).to.be.eql(items.map(item => item.y)); 22 | }); 23 | }); 24 | -------------------------------------------------------------------------------- /test/transformations/Uniq.js: -------------------------------------------------------------------------------- 1 | const { expect } = require('chai'); 2 | const _ = require('underscore'); 3 | const ObjectStreamUtilities = require('../../src/main'); 4 | const { getReadableStream } = require('../_helper'); 5 | 6 | describe('Uniq', () => { 7 | it('works', async () => { 8 | const items = [ 9 | { foo: 1, x: 0 }, 10 | { foo: 1, x: 1 }, 11 | { foo: 'a', x: 2 }, 12 | { foo: 'b', x: 3 }, 13 | { foo: 'ab', x: 4 }, 14 | { foo: 'a', x: 5 }, 15 | { foo: null, x: 6 }, 16 | { foo: null, x: 7 }, 17 | { foo: undefined, x: 8 }, 18 | { x: 9 }, 19 | { foo: '', x: 10 }, 20 | ]; 21 | const readable = getReadableStream(items, 1); 22 | 23 | const data = await readable 24 | .pipe(new ObjectStreamUtilities()) 25 | .uniq('foo') 26 | .collect(); 27 | 28 | expect(_.pluck(data, 'x')).to.be.eql([0, 2, 3, 4, 6]); 29 | }); 30 | }); 31 | -------------------------------------------------------------------------------- /test/transformations/WeakSort.js: -------------------------------------------------------------------------------- 1 | const { expect } = require('chai'); 2 | const _ = require('underscore'); 3 | const ObjectStreamUtilities = require('../../src/main'); 4 | const { getReadableStream } = require('../_helper'); 5 | 6 | describe('WeakSort', () => { 7 | it('works', async () => { 8 | const items = [ 9 | { num: 0 }, 10 | { num: 4 }, 11 | { num: 19 }, 12 | { num: 5 }, 13 | { num: 1 }, 14 | { num: 2 }, 15 | { num: 3 }, 16 | { num: 6 }, 17 | { num: 7 }, 18 | { num: 8 }, 19 | { num: 9 }, 20 | { num: 10 }, 21 | { num: 17 }, 22 | { num: 18 }, 23 | { num: 11 }, 24 | { num: 12 }, 25 | { num: 15 }, 26 | { num: 13 }, 27 | { num: 14 }, 28 | { num: 16 }, 29 | { num: 20 }, 30 | ]; 31 | const readable = getReadableStream(items, 1); 32 | 33 | const data = await readable 34 | .pipe(new ObjectStreamUtilities()) 35 | .weakSort((a, b) => (a.num < b.num ? -1 : 1), 5, 9) 36 | .collect(); 37 | 38 | expect(_.pluck(data, 'num')).to.be.eql([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20]); 39 | }); 40 | 41 | it('will not sort perfectly', async () => { 42 | const items = [ 43 | { num: 19 }, 44 | { num: 16 }, 45 | { num: 20 }, 46 | { num: 13 }, 47 | { num: 14 }, 48 | { num: 0 }, 49 | { num: 4 }, 50 | { num: 5 }, 51 | { num: 1 }, 52 | { num: 2 }, 53 | { num: 3 }, 54 | { num: 6 }, 55 | { num: 7 }, 56 | { num: 8 }, 57 | { num: 9 }, 58 | { num: 10 }, 59 | { num: 12 }, 60 | { num: 17 }, 61 | { num: 18 }, 62 | { num: 11 }, 63 | { num: 15 }, 64 | ]; 65 | const readable = getReadableStream(items, 1); 66 | 67 | const data = await readable 68 | .pipe(new ObjectStreamUtilities()) 69 | .weakSort((a, b) => (a.num < b.num ? -1 : 1), 5, 10) 70 | .collect(); 71 | 72 | expect(_.pluck(data, 'num')).to.be.eql([0, 1, 2, 4, 5, 3, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20]); 73 | }); 74 | }); 75 | --------------------------------------------------------------------------------