├── .gitignore ├── .npmignore ├── LICENSE ├── README.md ├── examples ├── async-generator-stream.js ├── async-iteration-stream.js ├── callback-stream.js ├── mock-data.json └── server-example.js ├── index.js └── package.json /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | 8 | # Runtime data 9 | pids 10 | *.pid 11 | *.seed 12 | *.pid.lock 13 | 14 | # Directory for instrumented libs generated by jscoverage/JSCover 15 | lib-cov 16 | 17 | # Coverage directory used by tools like istanbul 18 | coverage 19 | 20 | # nyc test coverage 21 | .nyc_output 22 | 23 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 24 | .grunt 25 | 26 | # Bower dependency directory (https://bower.io/) 27 | bower_components 28 | 29 | # node-waf configuration 30 | .lock-wscript 31 | 32 | # Compiled binary addons (https://nodejs.org/api/addons.html) 33 | build/Release 34 | 35 | # Dependency directories 36 | node_modules/ 37 | jspm_packages/ 38 | 39 | # TypeScript v1 declaration files 40 | typings/ 41 | 42 | # Optional npm cache directory 43 | .npm 44 | 45 | # Optional eslint cache 46 | .eslintcache 47 | 48 | # Optional REPL history 49 | .node_repl_history 50 | 51 | # Output of 'npm pack' 52 | *.tgz 53 | 54 | # Yarn Integrity file 55 | .yarn-integrity 56 | 57 | # dotenv environment variables file 58 | .env 59 | 60 | # next.js build output 61 | .next 62 | 63 | # macos bs 64 | .DS_Store -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | examples -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2015 Matthew I. Metnetsky 4 | Copyright (c) 2018 Matt Morgis 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a copy of 7 | this software and associated documentation files (the "Software"), to deal in 8 | the Software without restriction, including without limitation the rights to 9 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 10 | the Software, and to permit persons to whom the Software is furnished to do so, 11 | subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included in all 14 | copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 18 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 19 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 20 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 21 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # async-stream-generator 2 | 3 | Pipe ES6 Async Generators through Node.js [Streams](https://nodejs.org/api/stream.html). 4 | 5 | ## 10 Second Tutorial 6 | 7 | `streamify` is a function that takes an async generator function and when invoked, returns a Readable Stream. 8 | 9 | ```javascript 10 | const fs = require("fs"); 11 | const streamify = require("async-stream-generator"); 12 | 13 | async function* generator(stream) { 14 | for await (const chunk of stream) { 15 | yield chunk; 16 | } 17 | } 18 | 19 | const main = () => { 20 | const readStream = fs.createReadStream("path-to-data.json"); 21 | streamify(generator(readStream)).pipe(process.stdout); 22 | }; 23 | 24 | main(); 25 | ``` 26 | 27 | ## What are Streams and Why Should I Care? 28 | 29 | I/O in node is asynchronous. The early days of Node.js required interacting with the disk and network by passing callbacks to functions. 30 | 31 | For example, here is code that serves up a file from disk: 32 | 33 | ```javascript 34 | const http = require("http"); 35 | const fs = require("fs"); 36 | 37 | const server = http.createServer((request, response) => { 38 | fs.readFile(__dirname + "/mock-data.json", (error, data) => { 39 | response.end(data); 40 | }); 41 | }); 42 | server.listen(8000); 43 | ``` 44 | 45 | This code works but it buffers up the entire file into memory for every request before writing the result back to clients. If the file is very large, your program could start eating a lot of memory as it serves lots of users concurrently, particularly for users on slow connections. 46 | 47 | The user experience is poor too because users will need to wait for the whole file to be buffered into memory on your server before they can start receiving any content. 48 | 49 | However, both `request` and `response` are **streams**. 50 | 51 | ```javascript 52 | const http = require("http"); 53 | const fs = require("fs"); 54 | 55 | const server = http.createServer((req, res) => { 56 | const stream = fs.createReadStream(__dirname + "/mock-data.json"); 57 | stream.pipe(res); 58 | }); 59 | server.listen(8000); 60 | ``` 61 | 62 | This is where Node.js shines. `.pipe()` will write to clients one chunk at at a time immediately as they are received from disk. 63 | 64 | Using `.pipe()` has other benefits too, like handling backpressure automatically so that node won't buffer chunks into memory needlessly when the remote client is on a really slow or high-latency connection. 65 | 66 | This is very much like what you might do on the command-line to pipe programs together except in node instead of the shell! 67 | 68 | ``` 69 | a | b | c | d 70 | ``` 71 | 72 | Once you learn the stream api, you can just snap together streaming modules like lego bricks instead of having to remember how to push data through non-streaming, custom APIs. 73 | 74 | Streams make programming in node simple, elegant, and composable. 75 | 76 | ## What are Async Iterators and Generators? 77 | 78 | Previously to read the contents of a stream asynchronously, you used callbacks: 79 | 80 | ```javascript 81 | const fs = require("fs"); 82 | 83 | const main = inputFilePath => { 84 | const readStream = fs.createReadStream(inputFilePath, { 85 | encoding: "utf8", 86 | highWaterMark: 256 87 | }); 88 | 89 | readStream.on("data", chunk => { 90 | console.log(">>> " + chunk); 91 | console.log("\n"); 92 | }); 93 | 94 | readStream.on("end", () => { 95 | console.log("### DONE ###"); 96 | }); 97 | }; 98 | 99 | main("./mock-data.json"); 100 | ``` 101 | 102 | As of Node.js v10, you can use [`asynchronous iteration`](https://github.com/tc39/proposal-async-iteration) to read the stream of a file, which enables the `for-await-of` syntax: 103 | 104 | ```javascript 105 | const fs = require("fs"); 106 | 107 | const main = async inputFilePath => { 108 | const readStream = fs.createReadStream(inputFilePath, { 109 | encoding: "utf8", 110 | highWaterMark: 256 111 | }); 112 | 113 | for await (const chunk of readStream) { 114 | console.log(">>> " + chunk); 115 | console.log("\n"); 116 | } 117 | 118 | console.log("### DONE ###"); 119 | }; 120 | 121 | main("./mock-data.json"); 122 | ``` 123 | 124 | Output for both: 125 | 126 | ``` 127 | ... 128 | 129 | >>> ld":"Indonesia","customer_title":"Honorable"} 130 | {"guid":"bf62800e-b3b1-46f2-a3f2-dc17c66c90a1","car_make":"Ford","car_model":"Bronco II","car_model_year":1986,"car_color":"Pink","car_country_cold":"Philippines","customer_title":"Rev"} 131 | {"guid":"32a2f79b-5a0b- 132 | 133 | 134 | >>> 4072-9ebb-0e3600d0f714","car_make":"Toyota","car_model":"RAV4","car_model_year":2001,"car_color":"Purple","car_country_cold":"China","customer_title":"Mr"} 135 | {"guid":"6d52f031-c7e7-4167-81bc-e2879d6630e2","car_make":"Lexus","car_model":"SC","car_model_year": 136 | 137 | 138 | >>> 1998,"car_color":"Teal","car_country_cold":"Russia","customer_title":"Rev"} 139 | 140 | 141 | 142 | ### DONE ### 143 | ``` 144 | 145 | You can use async generators to process input similiar to Unix piping. Generator functions use the `async` and `function*` keywords, consume an async iterator and use `yield` instead of `return`. 146 | 147 | Example of Generator #1, which will process our chunks of data into lines: 148 | 149 | ```javascript 150 | async function* chunksToLines(chunks) { 151 | let previous = ""; 152 | 153 | for await (const chunk of chunks) { 154 | previous += chunk; 155 | let eolIndex; 156 | 157 | while ((eolIndex = previous.indexOf("\n")) >= 0) { 158 | // this line includes the EOL 159 | const line = previous.slice(0, eolIndex + 1); 160 | yield line; 161 | previous = previous.slice(eolIndex + 1); 162 | } 163 | } 164 | 165 | if (previous.length > 0) { 166 | yield previous; 167 | } 168 | } 169 | ``` 170 | 171 | Example of Generator #2, which will number each line 172 | 173 | ```javascript 174 | async function* numberOfLines(lines) { 175 | let counter = 1; 176 | for await (const line of lines) { 177 | yield counter + ": " + line; 178 | counter++; 179 | } 180 | } 181 | ``` 182 | 183 | Now you can snap these generators together using function composition to stream the file to the console line by line. 184 | 185 | The whole program will read in the file 256 bytes at a time (defined by `highWaterMark`). Break each chunk into lines, number them, print them, and repeat. 186 | 187 | ```javascript 188 | const printAsyncIterable = async numberedLines => { 189 | for await (const line of numberedLines) { 190 | console.log(line); 191 | } 192 | }; 193 | 194 | const main = () => { 195 | const readStream = fs.createReadStream("./mock-data.json", { 196 | encoding: "utf8", 197 | highWaterMark: 256 198 | }); 199 | printAsyncIterable(numberOfLines(chunksToLines(readStream))); 200 | }; 201 | 202 | main(); 203 | ``` 204 | 205 | Output 206 | 207 | ``` 208 | ... 209 | 3999: {"guid":"32a2f79b-5a0b-4072-9ebb-0e3600d0f714","car_make":"Toyota","car_model":"RAV4","car_model_year":2001,"car_color":"Purple","car_country_cold":"China","customer_title":"Mr"} 210 | 211 | 4000: {"guid":"6d52f031-c7e7-4167-81bc-e2879d6630e2","car_make":"Lexus","car_model":"SC","car_model_year":1998,"car_color":"Teal","car_country_cold":"Russia","customer_title":"Rev"} 212 | ``` 213 | 214 | ## Where Async Generators Fall Short 215 | 216 | These new tools are great for _reading_ streams, however, it's still not clear how to `write()` to another stream or create a processing pipeline with `pipe()`. 217 | 218 | This was discussed [here](https://github.com/tc39/proposal-async-iteration/issues/74). 219 | 220 | Enter this module. 221 | 222 | Using the same generators from above, we can `pipe()` the results to a writeable stream. 223 | 224 | ```javascript 225 | const http = require("http"); 226 | const fs = require("fs"); 227 | const streamify = require("async-stream-generator"); 228 | 229 | const server = http.createServer(async (req, res) => { 230 | const readStream = fs.createReadStream("./mock-data.json", { 231 | encoding: "utf8", 232 | highWaterMark: 256 233 | }); 234 | 235 | streamify(numberOfLines(chunksToLines(readStream))).pipe(res); 236 | }); 237 | 238 | server.listen(8000); 239 | ``` 240 | 241 | ## References and Thank Yous 242 | 243 | * All code can be found in the `examples` directory. 244 | 245 | * This was forked from [@mimetnet](https://github.com/mimetnet)'s module [stream-generators](https://github.com/mimetnet/node-stream-generators), which offers the same functionality to synchronous generators. 246 | 247 | * Early stream examples and a deeper dive into streams can be found at [@substack](https://github.com/substack)'s [Stream Handbook](https://github.com/substack/stream-handbook). 248 | 249 | * Async Generator and Iterator examples from [2ality.com](http://2ality.com/2018/04/async-iter-nodejs.html) 250 | 251 | * [TC39 Proposal](https://github.com/tc39/proposal-async-iteration). 252 | 253 | * Node.js Support for [Symbol.asyncIterator](https://github.com/nodejs/readable-stream/issues/254). 254 | 255 | * [Node.js Stream Meeting Notes - Async Iterators](https://github.com/tc39/proposal-async-iteration/issues/74). 256 | 257 | ## License 258 | 259 | [MIT](https://github.com/MattMorgis/async-stream-generator/blob/master/LICENSEd) 260 | -------------------------------------------------------------------------------- /examples/async-generator-stream.js: -------------------------------------------------------------------------------- 1 | const fs = require("fs"); 2 | const streamify = require("../index"); 3 | /** 4 | * `function*` defines a generator function. 5 | * You can not define these as an arrow function. 6 | * 7 | * These use `yield` instead of `return` 8 | * 9 | * More: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/function* 10 | */ 11 | async function* chunksToLines(chunks) { 12 | let previous = ""; 13 | 14 | for await (const chunk of chunks) { 15 | previous += chunk; 16 | let eolIndex; 17 | 18 | while ((eolIndex = previous.indexOf("\n")) >= 0) { 19 | // this line includes the EOL 20 | const line = previous.slice(0, eolIndex + 1); 21 | yield line; 22 | previous = previous.slice(eolIndex + 1); 23 | } 24 | } 25 | 26 | if (previous.length > 0) { 27 | yield previous; 28 | } 29 | } 30 | 31 | async function* numberOfLines(lines) { 32 | let counter = 1; 33 | for await (const line of lines) { 34 | yield counter + ": " + line; 35 | counter++; 36 | } 37 | } 38 | 39 | const printAsyncIterable = async numberedLines => { 40 | for await (const line of numberedLines) { 41 | console.log(line); 42 | } 43 | }; 44 | 45 | const main = () => { 46 | const readStream = fs.createReadStream("./mock-data.json", { 47 | encoding: "utf8", 48 | highWaterMark: 256 49 | }); 50 | printAsyncIterable(numberOfLines(chunksToLines(readStream))); 51 | }; 52 | 53 | main(); 54 | -------------------------------------------------------------------------------- /examples/async-iteration-stream.js: -------------------------------------------------------------------------------- 1 | const fs = require("fs"); 2 | 3 | const main = async inputFilePath => { 4 | const readStream = fs.createReadStream(inputFilePath, { 5 | encoding: "utf8", 6 | highWaterMark: 256 7 | }); 8 | 9 | /** 10 | * Starting w/ Node.js v10, you can use `async iteration` 11 | * to read files asynchronously. It provides the `for-await-of` API 12 | * to iterate over the chunks. It is only available inside `async` functions 13 | */ 14 | for await (const chunk of readStream) { 15 | console.log(">>> " + chunk); 16 | console.log("\n"); 17 | } 18 | 19 | console.log("### DONE ###"); 20 | }; 21 | 22 | main("./mock-data.json"); 23 | -------------------------------------------------------------------------------- /examples/callback-stream.js: -------------------------------------------------------------------------------- 1 | const fs = require("fs"); 2 | 3 | const main = inputFilePath => { 4 | const readStream = fs.createReadStream(inputFilePath, { 5 | encoding: "utf8", 6 | highWaterMark: 256 7 | }); 8 | 9 | readStream.on("data", chunk => { 10 | console.log(">>> " + chunk); 11 | console.log("\n"); 12 | }); 13 | 14 | readStream.on("end", () => { 15 | console.log("### DONE ###"); 16 | }); 17 | }; 18 | 19 | main("./mock-data.json"); 20 | -------------------------------------------------------------------------------- /examples/server-example.js: -------------------------------------------------------------------------------- 1 | const http = require("http"); 2 | const fs = require("fs"); 3 | const streamify = require("./node-stream-generators"); 4 | 5 | /** 6 | * `function*` defines a generator function. 7 | * You can not define these as an arrow function. 8 | * 9 | * These use `yield` instead of `return` 10 | * 11 | * More: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/function* 12 | */ 13 | async function* chunksToLines(asyncChunks) { 14 | let previous = ""; 15 | 16 | for await (const chunk of asyncChunks) { 17 | previous += chunk; 18 | let eolIndex; 19 | 20 | while ((eolIndex = previous.indexOf("\n")) >= 0) { 21 | // this line includes the EOL 22 | const line = previous.slice(0, eolIndex + 1); 23 | yield line; 24 | previous = previous.slice(eolIndex + 1); 25 | } 26 | } 27 | 28 | if (previous.length > 0) { 29 | yield previous; 30 | } 31 | } 32 | 33 | async function* numberOfLines(asyncLines) { 34 | let counter = 1; 35 | for await (const line of asyncLines) { 36 | yield counter + ": " + line; 37 | counter++; 38 | } 39 | } 40 | 41 | const server = http.createServer(async (req, res) => { 42 | const readStream = fs.createReadStream("./mock-data.json", { 43 | encoding: "utf8", 44 | highWaterMark: 256 45 | }); 46 | 47 | streamify(numberOfLines(chunksToLines(readStream))).pipe(res); 48 | }); 49 | 50 | server.listen(8000); 51 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | const { Readable } = require("stream"); 2 | 3 | const isAsyncIterator = obj => { 4 | // checks for null and undefined 5 | if (obj == null) { 6 | return false; 7 | } 8 | return typeof obj[Symbol.asyncIterator] === "function"; 9 | }; 10 | 11 | function StreamGenerators(g) { 12 | if (!isAsyncIterator(g)) 13 | throw new TypeError("First argument must be a ES6 Async Generator"); 14 | 15 | Readable.call(this, { objectMode: true }); 16 | this._g = g; 17 | } 18 | 19 | StreamGenerators.prototype = Object.create(Readable.prototype, { 20 | constructor: { value: StreamGenerators } 21 | }); 22 | 23 | StreamGenerators.prototype._read = function(size) { 24 | try { 25 | this._g.next().then(r => { 26 | if (false === r.done) { 27 | this.push(r.value); 28 | } else { 29 | this.push(null); 30 | } 31 | }).catch((e)=> { 32 | this.emit("error", e); 33 | }); 34 | } catch (e) { 35 | this.emit("error", e); 36 | } 37 | }; 38 | 39 | module.exports = list => { 40 | return new StreamGenerators(list); 41 | }; 42 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "async-stream-generator", 3 | "version": "1.0.3", 4 | "description": "Pipe ES6 Async Generators through Node.js Streams", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "repository": { 10 | "type": "git", 11 | "url": "git+https://github.com/MattMorgis/async-stream-generator.git" 12 | }, 13 | "keywords": ["streams", "async", "generators", "async", "iterators", "pipe"], 14 | "author": "Matt Morgis", 15 | "license": "MIT", 16 | "bugs": { 17 | "url": "https://github.com/MattMorgis/async-stream-generator/issues" 18 | }, 19 | "homepage": "https://github.com/MattMorgis/async-stream-generator#readme" 20 | } 21 | --------------------------------------------------------------------------------