├── .gitattributes ├── .gitignore ├── .npmignore ├── .travis.yml ├── .vscode └── settings.json ├── API.md ├── README.md ├── build.js ├── examples ├── config.json ├── echo._js ├── echo.js ├── oracleMongoPipe._js ├── pi._js ├── pi.js ├── pre._js ├── series._js └── wc._js ├── index.js ├── index.ts ├── package-lock.json ├── package.json ├── src ├── devices │ ├── array.md │ ├── array.ts │ ├── buffer.md │ ├── buffer.ts │ ├── child_process.md │ ├── child_process.ts │ ├── console.md │ ├── console.ts │ ├── file.md │ ├── file.ts │ ├── generic.md │ ├── generic.ts │ ├── http.md │ ├── http.ts │ ├── index.ts │ ├── net.md │ ├── net.ts │ ├── node.md │ ├── node.ts │ ├── queue.md │ ├── queue.ts │ ├── std.md │ ├── std.ts │ ├── string.md │ ├── string.ts │ ├── uturn.md │ └── uturn.ts ├── ez.ts ├── factory.ts ├── helpers │ ├── binary.md │ ├── binary.ts │ └── index.ts ├── index.js ├── mappers │ ├── convert.md │ ├── convert.ts │ ├── index.ts │ ├── json.md │ └── json.ts ├── node-wrappers.md ├── node-wrappers.ts ├── predicate.ts ├── reader.md ├── reader.ts ├── stop-exception.ts ├── transforms │ ├── csv.md │ ├── csv.ts │ ├── cut.md │ ├── cut.ts │ ├── index.ts │ ├── json.md │ ├── json.ts │ ├── lines.md │ ├── lines.ts │ ├── multipart.md │ ├── multipart.ts │ ├── xml.md │ └── xml.ts ├── util.ts ├── writer.md └── writer.ts ├── test ├── benchmarks │ └── bench._js ├── common │ └── predicate-test.ts ├── fixtures │ ├── exit2.cmd │ ├── exit2.sh │ └── rss-sample.xml ├── index.js ├── loader.js └── server │ ├── api-test.ts │ ├── binary-test.ts │ ├── child-process-test.ts │ ├── csv-test.ts │ ├── ez-test.ts │ ├── http-test.ts │ ├── json-test.ts │ ├── multipart-test.ts │ ├── newlines-test.ts │ ├── nodify-test.ts │ ├── queue-test.ts │ ├── stop-test.ts │ └── xml-test.ts └── tsconfig.json /.gitattributes: -------------------------------------------------------------------------------- 1 | # Set default behaviour, in case users don't have core.autocrlf set. 2 | * text=auto 3 | 4 | # Declare files that will always have LF line endings on checkout. 5 | *.md text eol=lf 6 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.testlog 2 | examples/config-custom.* 3 | .DS_Store 4 | test/fixtures/rss-sample.zip 5 | /node_modules/ 6 | /lib/ 7 | /test-callbacks/ 8 | /test-fibers/ 9 | /coverage/ 10 | /out/ 11 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | *.testlog 2 | examples/config-custom.* 3 | .DS_Store 4 | test/fixtures/rss-sample.zip 5 | /node_modules/ 6 | /out/ 7 | /coverage/ 8 | .vscode 9 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | node_js: 3 | - 12 4 | - 10 5 | - 8 6 | sudo: false 7 | env: 8 | - CXX=g++-4.8 9 | addons: 10 | apt: 11 | sources: 12 | - ubuntu-toolchain-r-test 13 | packages: 14 | - g++-4.8 15 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | // Placez vos paramètres dans ce fichier pour remplacer les paramètres par défaut et les paramètres utilisateur. 2 | { 3 | "editor.formatOnSave": true, 4 | "files.exclude": { 5 | "**/node_modules/**": true, 6 | "**/out/**": true, 7 | "**/lib/**": true 8 | } 9 | } -------------------------------------------------------------------------------- /API.md: -------------------------------------------------------------------------------- 1 | # ez-streams 2 | 3 | EZ streams for node.js 4 | 5 | * [ez-streams/src/devices/array](src/devices/array.md) 6 | Array readers and writers 7 | * [ez-streams/src/devices/buffer](src/devices/buffer.md) 8 | In-memory buffer streams 9 | * [ez-streams/src/devices/child_process](src/devices/child_process.md) 10 | EZ Stream wrappers for node child processes 11 | * [ez-streams/src/devices/console](src/devices/console.md) 12 | Console EZ streams 13 | * [ez-streams/src/devices/file](src/devices/file.md) 14 | File based EZ streams 15 | * [ez-streams/src/devices/generic](src/devices/generic.md) 16 | Generic stream constructors 17 | * [ez-streams/src/devices/http](src/devices/http.md) 18 | HTTP EZ Streams 19 | * [ez-streams/src/devices/net](src/devices/net.md) 20 | TCP and socket EZ Streams 21 | * [ez-streams/src/devices/node](src/devices/node.md) 22 | EZ Stream wrappers for native node streams 23 | * [ez-streams/src/devices/queue](src/devices/queue.md) 24 | Queue device 25 | * [ez-streams/src/devices/std](src/devices/std.md) 26 | EZ wrappers for standard I/O streams 27 | * [ez-streams/src/devices/string](src/devices/string.md) 28 | In-memory string streams 29 | * [ez-streams/src/devices/uturn](src/devices/uturn.md) 30 | Special device that transforms a writer into a reader 31 | * [ez-streams/src/helpers/binary](src/helpers/binary.md) 32 | helpers for binary streams 33 | * [ez-streams/src/mappers/convert](src/mappers/convert.md) 34 | Encoding mappers 35 | * [ez-streams/src/mappers/json](src/mappers/json.md) 36 | JSON mappers 37 | * [ez-streams/src/node-wrappers](src/node-wrappers.md) 38 | Wrappers for node.js streams 39 | * [ez-streams/src/reader](src/reader.md) 40 | EZ Streams core reader API 41 | * [ez-streams/src/transforms/csv](src/transforms/csv.md) 42 | Stream transform for CSV files 43 | * [ez-streams/src/transforms/cut](src/transforms/cut.md) 44 | Transform to cut string and binary streams 45 | * [ez-streams/src/transforms/json](src/transforms/json.md) 46 | "Simple" JSON streams 47 | * [ez-streams/src/transforms/lines](src/transforms/lines.md) 48 | Stream transform for line-oriented text streams 49 | * [ez-streams/src/transforms/multipart](src/transforms/multipart.md) 50 | Stream transform for MIME multipart 51 | * [ez-streams/src/transforms/xml](src/transforms/xml.md) 52 | Simple XML parser and formatter 53 | * [ez-streams/src/writer](src/writer.md) 54 | EZ Streams core writer API 55 | -------------------------------------------------------------------------------- /build.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | // This script rebuilds the lib/builtins-*.js files 4 | var fs = require('fs'); 5 | var fsp = require('path'); 6 | var helpers = require('streamline-helpers'); 7 | function options(runtime, isTest) { 8 | return { 9 | plugins: ['transform-flow-comments', 'transform-class-properties', 'streamline'], 10 | runtime: runtime, 11 | isTest: isTest, 12 | }; 13 | } 14 | 15 | ['callbacks', 'fibers', 'generators'].forEach(function(runtime) { 16 | helpers.compileSync(fsp.join(__dirname, 'src'), fsp.join(__dirname, 'lib', runtime), options(runtime)); 17 | }); 18 | ['callbacks', 'fibers'].forEach(function(runtime) { 19 | helpers.compileSync(fsp.join(__dirname, 'test'), fsp.join(__dirname, 'test-' + runtime), options(runtime, true)); 20 | }); 21 | 22 | /*helpers.compileTypescript({ 23 | name: 'ez-streams', 24 | root: __dirname, 25 | main: 'out/ez.d.ts', 26 | dts: 'ez-streams.d.ts', 27 | });*/ -------------------------------------------------------------------------------- /examples/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "oracle": { 3 | "hostname": "localhost", 4 | "database": "ORCL", 5 | "user": "oracle", 6 | "password": "oracle", 7 | "table": "test", 8 | "writable": false 9 | }, 10 | "mongodb": { 11 | "host": "localhost", 12 | "port": "27017", 13 | "database": "example", 14 | "collection": "test", 15 | "writable": true 16 | } 17 | } -------------------------------------------------------------------------------- /examples/echo._js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var std = require('ez-streams').devices.std; 4 | 5 | std.in('utf8').map(function(_, line) { 6 | switch (process.argv[2]) { 7 | case '-u': return line.toUpperCase(); 8 | case '-l': return line.toLowerCase(); 9 | default: return line; 10 | } 11 | }).pipe(_, std.out('utf8')) -------------------------------------------------------------------------------- /examples/echo.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var std = require('ez-streams').devices.std; 4 | 5 | std.in('utf8').map(function(cb, line) { 6 | switch (process.argv[2]) { 7 | case '-u': return cb(null, line.toUpperCase()); 8 | case '-l': return cb(null, line.toLowerCase()); 9 | default: return cb(null, line); 10 | } 11 | }).pipe(function(err) { if (err) throw err; }, std.out('utf8')) -------------------------------------------------------------------------------- /examples/oracleMongoPipe._js: -------------------------------------------------------------------------------- 1 | try { 2 | var oracle = require('oracle'); 3 | } catch (ex) { 4 | console.log("this example requires node-oracle driver, install it with `npm install oracle`"); 5 | } 6 | 7 | try { 8 | var mongodb = require('mongodb'); 9 | } catch (ex) { 10 | console.log("this example requires mongodb driver, install it with `npm install mongodb`"); 11 | } 12 | 13 | try { 14 | var config = require("./config-custom"); 15 | } catch (ex) { 16 | console.log("WARNING: you did not create config-custom.json, running with the default config.json") 17 | config = require("./config"); 18 | } 19 | 20 | var ez = require('ez-streams'); 21 | 22 | var oracleConn = oracle.connect(config.oracle, _); 23 | oracleConn.setPrefetchRowCount(config.oracle.prefetchRowCount || 50); 24 | 25 | var mongodbConn = new mongodb.Db(config.mongodb.database, // 26 | new mongodb.Server(config.mongodb.host, config.mongodb.port, {}), { 27 | w: 1, 28 | }).open(_); 29 | 30 | if (config.mongodb.writable) { 31 | mongodbConn.collection(config.mongodb.collection, _).remove({}, _); 32 | var count = 0, 33 | t0 = Date.now(); 34 | ez.devices.oracle.reader(oracleConn.reader('select * from "' + config.oracle.table + '"', [])) // 35 | .map(function(_, obj) { 36 | count++; 37 | return obj; 38 | }).pipe(_, ez.devices.mongodb.writer(mongodbConn.collection(config.mongodb.collection, _))); 39 | console.log(count + " records transferred from oracle to mongodb in " + (Date.now() - t0) + " millis"); 40 | } else { 41 | console.log("skipping oracle -> mongodb transfer"); 42 | } 43 | 44 | if (config.oracle.writable) { 45 | oracleConn.execute('delete from "' + config.oracle.table + '"', [], _); 46 | var count = 0, 47 | t0 = Date.now(); 48 | ez.devices.mongodb.reader(mongodbConn.collection(config.mongodb.collection, _).find()) // 49 | .map(function(_, obj) { 50 | count++; 51 | delete obj._id; 52 | return obj; 53 | }).pipe(_, ez.devices.oracle.writer(oracleConn, config.oracle.table)); 54 | console.log(count + " records transferred from mongodb to oracle in " + (Date.now() - t0) + " millis"); 55 | } else { 56 | console.log("skipping mongodb -> oracle transfer"); 57 | } 58 | mongodbConn.close(); -------------------------------------------------------------------------------- /examples/pi._js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | var ez = require("ez-streams"); 3 | 4 | var numberReader = function(n) { 5 | var i = 0; 6 | return ez.devices.generic.reader(function read(_) { 7 | if (i < n) return i++; 8 | else return undefined; 9 | }); 10 | }; 11 | 12 | console.log("pi~=" + 4 * numberReader(10000).filter(function(_, n) { 13 | return n % 2; // keep only odd numbers 14 | }).map(function(_, n) { 15 | return n % 4 === 1 ? 1 / n : -1 / n; 16 | }).reduce(_, function(_, res, val) { 17 | return res + val; 18 | }, 0)); 19 | -------------------------------------------------------------------------------- /examples/pi.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | var ez = require('ez-streams'); 3 | 4 | var numberReader = function(n) { 5 | var i = 0; 6 | return ez.devices.generic.reader(function read(cb) { 7 | if (i < n) cb(null, i++); 8 | else cb(); 9 | }); 10 | }; 11 | 12 | numberReader(10000).filter(function(cb, n) { 13 | cb(null, n % 2); 14 | }).map(function(cb, n) { 15 | cb(null, n % 4 === 1 ? 1 / n : -1 / n); 16 | }).reduce(function(err, result) { 17 | if (err) throw err; 18 | console.log("pi~=" + 4 * result); 19 | }, function(cb, res, val) { 20 | cb(null, res + val); 21 | }, 0); -------------------------------------------------------------------------------- /examples/pre._js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var std = require('ez-streams').devices.std; 4 | 5 | function upper(_, str) { 6 | return str.toUpperCase(); 7 | } 8 | 9 | var out = std.out('utf8').pre.map(upper); 10 | 11 | std.in('utf8').pipe(_, out); -------------------------------------------------------------------------------- /examples/series._js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var ez = require('ez-streams'); 4 | var output = ez.devices.console.log; 5 | 6 | function numbers() { 7 | var i = 0; 8 | return ez.devices.generic.reader(function read(_) { 9 | return i++; 10 | }); 11 | } 12 | 13 | function wait(_, val) { 14 | setTimeout(_, 1000); 15 | return val; 16 | } 17 | 18 | function pow(n) { 19 | return function(_, val) { 20 | return Math.pow(val, n); 21 | } 22 | } 23 | 24 | function minJoiner(_, values) { 25 | var min = Math.min.apply(null, values); 26 | values.forEach(function(val, i) { 27 | if (val == min) values[i] = undefined; 28 | }); 29 | return min; 30 | } 31 | 32 | //numbers().map(pow(2)).join(numbers().map(pow(3)).limit(4)).rr().map(wait).limit(20).pipe(_, output); 33 | 34 | /*numbers().fork([ 35 | function(source) { return source.map(pow(2)).limit(4); }, 36 | function(source) { return source.map(pow(3)); }, 37 | ]).rr().map(wait).limit(30).pipe(_, output);*/ 38 | 39 | numbers().parallelize(5, function(source) { 40 | return source.map(pow(2)).map(wait); 41 | }).limit(30).pipe(_, output); -------------------------------------------------------------------------------- /examples/wc._js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var fs = require('fs'); 4 | var path = require('path'); 5 | var ez = require('ez-streams'); 6 | var lines = ez.transforms.lines; 7 | var file = ez.devices.file; 8 | 9 | var root = process.argv[2] || '.'; 10 | 11 | fs.readdir(root, _).filter_(_, function(_, name) { 12 | return fs.stat(root + '/' + name, _).isFile(); 13 | }).forEach_(_, function(_, name) { 14 | var res = file.text.reader(root + '/' + name).transform(lines.parser()).reduce(_, function(_, r, line) { 15 | r.lines++; 16 | r.words += line.split(/\S+/).length - 1; 17 | r.chars += line.length + 1; 18 | return r; 19 | }, { 20 | lines: 0, 21 | words: 0, 22 | chars: 0, 23 | }); 24 | console.log('\t' + (res.lines - 1) + '\t' + res.words + '\t' + (res.chars - 1) + '\t' + name); 25 | }); -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | var fs = require('fs'); 3 | var fsp = require('path'); 4 | if (fs.existsSync(fsp.join(__dirname, 'lib'))) { 5 | module.exports = require('./lib/' + require('streamline-runtime').runtime); 6 | } else { 7 | module.exports = require('./src'); 8 | } -------------------------------------------------------------------------------- /index.ts: -------------------------------------------------------------------------------- 1 | import {} from 'streamline-node'; 2 | // this one is just a stub to get imports working in tests. 3 | import { devices, helpers, mappers, transforms, factory, predicate, reader, writer, 4 | Reader, Writer, CompareOptions, ParallelOptions } from './src/ez'; 5 | export { devices, helpers, mappers, transforms, factory, predicate, reader, writer, 6 | Reader, Writer, CompareOptions, ParallelOptions }; -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "ez-streams", 3 | "description": "EZ streams for node.js", 4 | "version": "4.0.1", 5 | "license": "MIT", 6 | "author": { 7 | "name": "Bruno Jouhier" 8 | }, 9 | "repository": { 10 | "type": "git", 11 | "url": "git://github.com/Sage/ez-streams.git" 12 | }, 13 | "engines": { 14 | "node": ">=10.0.0" 15 | }, 16 | "dependencies": { 17 | "@types/node": "^12.12.12", 18 | "streamline-node": "^3.1.2", 19 | "streamline-runtime": "^2.0.0" 20 | }, 21 | "devDependencies": { 22 | "babel-core": "^6.26.3", 23 | "babel-plugin-streamline": "^2.0.26", 24 | "babel-plugin-transform-class-properties": "^6.24.1", 25 | "babel-plugin-transform-flow-comments": "^6.22.0", 26 | "babel-preset-es2015": "^6.24.1", 27 | "dts-bundle": "^0.7.3", 28 | "retyped-qunit-tsd-ambient": "^1.16.0-0", 29 | "streamline": "^3.0.0", 30 | "streamline-helpers": "^3.0.0", 31 | "typescript": "^3.7.2" 32 | }, 33 | "scripts": { 34 | "prepare": "node build.js || nodejs build.js", 35 | "test": "node test-callbacks && node test-fibers" 36 | }, 37 | "main": "index.js", 38 | "readmeFilename": "README.md", 39 | "bugs": { 40 | "url": "https://github.com/Sage/ez-streams/issues" 41 | }, 42 | "homepage": "https://github.com/Sage/ez-streams#readme" 43 | } 44 | -------------------------------------------------------------------------------- /src/devices/array.md: -------------------------------------------------------------------------------- 1 | ## Array readers and writers 2 | 3 | `import * as ez from 'ez-streams'` 4 | 5 | * `reader = ez.devices.array.reader(array, options)` 6 | creates an EZ reader that reads its entries from `array`. 7 | `reader.read(_)` will return its entries asynchronously by default. 8 | You can force synchronous delivery by setting `options.sync` to `true`. 9 | * `writer = ez.devices.array.writer(options)` 10 | creates an EZ writer that collects its entries into an array. 11 | `writer.write(_, value)` will write asynchronously by default. 12 | You can force synchronous write by setting `options.sync` to `true`. 13 | `writer.toArray()` returns the internal array into which the 14 | entries have been collected. 15 | -------------------------------------------------------------------------------- /src/devices/array.ts: -------------------------------------------------------------------------------- 1 | import { _ } from 'streamline-runtime'; 2 | import { Reader } from '../reader'; 3 | import { Writer } from '../writer'; 4 | import { nextTick } from '../util'; 5 | 6 | export interface Options { 7 | sync?: boolean; 8 | } 9 | 10 | export class ArrayWriter extends Writer { 11 | values: T[]; 12 | constructor(options: Options) { 13 | super((_: _, value: T) => { 14 | if (!options.sync) nextTick(_); 15 | if (value !== undefined) this.values.push(value); 16 | return this; 17 | }); 18 | this.values = []; 19 | } 20 | toArray(): T[] { 21 | return this.values; 22 | } 23 | get result(): T[] { 24 | return this.values; 25 | } 26 | } 27 | 28 | /// !doc 29 | /// ## Array readers and writers 30 | /// 31 | /// `import * as ez from 'ez-streams'` 32 | /// 33 | /// * `reader = ez.devices.array.reader(array, options)` 34 | /// creates an EZ reader that reads its entries from `array`. 35 | /// `reader.read(_)` will return its entries asynchronously by default. 36 | /// You can force synchronous delivery by setting `options.sync` to `true`. 37 | export function reader(array: T[], options?: Options) { 38 | var opts = options || {}; 39 | const values = array.slice(0); 40 | return new Reader(function (_) { 41 | if (!opts.sync) nextTick(_); 42 | return values.shift(); 43 | }); 44 | } 45 | 46 | /// * `writer = ez.devices.array.writer(options)` 47 | /// creates an EZ writer that collects its entries into an array. 48 | /// `writer.write(_, value)` will write asynchronously by default. 49 | /// You can force synchronous write by setting `options.sync` to `true`. 50 | /// `writer.toArray()` returns the internal array into which the 51 | /// entries have been collected. 52 | export function writer(options?: Options) { 53 | var opts = options || {}; 54 | return new ArrayWriter(opts); 55 | }; -------------------------------------------------------------------------------- /src/devices/buffer.md: -------------------------------------------------------------------------------- 1 | ## In-memory buffer streams 2 | 3 | `import * as ez from 'ez-streams'` 4 | 5 | * `reader = ez.devices.buffer.reader(buffer, options)` 6 | creates an EZ reader that reads its entries from `buffer`. 7 | `reader.read(_)` will return its entries asynchronously by default. 8 | You can force synchronous delivery by setting `options.sync` to `true`. 9 | The default chunk size is 1024. You can override it by passing 10 | a `chunkSize` option. 11 | * `writer = ez.devices.buffer.writer(options)` 12 | creates an EZ writer that collects data into an buffer. 13 | `writer.write(_, data)` will write asynchronously by default. 14 | You can force synchronous write by setting `options.sync` to `true`. 15 | `writer.toBuffer()` returns the internal buffer into which the 16 | chunks have been collected. 17 | -------------------------------------------------------------------------------- /src/devices/buffer.ts: -------------------------------------------------------------------------------- 1 | import { _ } from 'streamline-runtime'; 2 | import { Reader } from '../reader'; 3 | import { Writer } from '../writer'; 4 | import { nextTick } from '../util'; 5 | 6 | export interface Options { 7 | sync?: boolean; 8 | chunkSize?: number | (() => number); 9 | } 10 | 11 | export class BufferWriter extends Writer { 12 | chunks: Buffer[]; 13 | constructor(options: Options) { 14 | super((_: _, data: Buffer) => { 15 | if (!options.sync) nextTick(_); 16 | if (data !== undefined) this.chunks.push(data); 17 | return this; 18 | }); 19 | this.chunks = []; 20 | } 21 | toBuffer() { 22 | return Buffer.concat(this.chunks); 23 | } 24 | get result() { 25 | return this.toBuffer(); 26 | } 27 | } 28 | 29 | 30 | /// !doc 31 | /// ## In-memory buffer streams 32 | /// 33 | /// `import * as ez from 'ez-streams'` 34 | /// 35 | /// * `reader = ez.devices.buffer.reader(buffer, options)` 36 | /// creates an EZ reader that reads its entries from `buffer`. 37 | /// `reader.read(_)` will return its entries asynchronously by default. 38 | /// You can force synchronous delivery by setting `options.sync` to `true`. 39 | /// The default chunk size is 1024. You can override it by passing 40 | /// a `chunkSize` option. 41 | export function reader(buffer: Buffer, options?: Options | number) { 42 | var opts: Options; 43 | if (typeof options === "number") { 44 | opts = { 45 | chunkSize: options 46 | }; 47 | } 48 | else opts = options || {}; 49 | const chunkSize = opts.chunkSize || 1024; 50 | var pos = 0; 51 | return new Reader(function read(_) { 52 | if (!opts.sync) nextTick(_); 53 | if (pos >= buffer.length) return; 54 | const len = typeof chunkSize === "function" ? chunkSize() : chunkSize; 55 | const s = buffer.slice(pos, pos + len); 56 | pos += len; 57 | return s; 58 | }); 59 | } 60 | /// * `writer = ez.devices.buffer.writer(options)` 61 | /// creates an EZ writer that collects data into an buffer. 62 | /// `writer.write(_, data)` will write asynchronously by default. 63 | /// You can force synchronous write by setting `options.sync` to `true`. 64 | /// `writer.toBuffer()` returns the internal buffer into which the 65 | /// chunks have been collected. 66 | export function writer(options?: Options) { 67 | return new BufferWriter(options || {}); 68 | } 69 | -------------------------------------------------------------------------------- /src/devices/child_process.md: -------------------------------------------------------------------------------- 1 | ## EZ Stream wrappers for node child processes 2 | 3 | `import * as ez from 'ez-streams'` 4 | 5 | * `reader = ez.devices.child_process.reader(proc, options)` 6 | wraps a node.js child process as an EZ reader. 7 | For a full description of the options, see `ReadableStream` in 8 | https://github.com/Sage/ez-streams/blob/master/lib/node-wrappers.md 9 | * `writer = ez.devices.child_process.writer(proc, options)` 10 | wraps a node.js child process as an EZ writer. 11 | For a full description of the options, see `WritableStream` in 12 | https://github.com/Sage/ez-streams/blob/master/lib/node-wrappers.md 13 | -------------------------------------------------------------------------------- /src/devices/child_process.ts: -------------------------------------------------------------------------------- 1 | /// !doc 2 | /// ## EZ Stream wrappers for node child processes 3 | /// 4 | /// `import * as ez from 'ez-streams'` 5 | /// 6 | import { _ } from 'streamline-runtime'; 7 | import { Reader } from '../reader'; 8 | import { Writer } from '../writer'; 9 | import * as generic from './generic'; 10 | import { parser as linesParser } from '../transforms/lines'; 11 | import * as node from './node'; 12 | import { stringify } from '../mappers/convert'; 13 | import { ChildProcess } from 'child_process'; 14 | 15 | /// * `reader = ez.devices.child_process.reader(proc, options)` 16 | /// wraps a node.js child process as an EZ reader. 17 | /// For a full description of the options, see `ReadableStream` in 18 | /// https://github.com/Sage/ez-streams/blob/master/lib/node-wrappers.md 19 | export interface ReaderOptions { 20 | acceptCode?: (code: number) => boolean; 21 | encoding?: string; 22 | dataHandler?: (reader: Reader) => Reader; 23 | errorHandler?: (reader: Reader) => Reader; 24 | errorPrefix?: string; 25 | errorThrow?: boolean; 26 | } 27 | 28 | export function reader(proc: ChildProcess, options?: ReaderOptions) { 29 | var opts = options || {}; 30 | var err: NodeJS.ErrnoException, closeCb: ((err: Error) => void) | null, closed: boolean; 31 | proc.on('close', (ec: number) => { 32 | closed = true; 33 | if (ec === -1) { 34 | proc.stdout.emit('end'); 35 | proc.stderr.emit('end'); 36 | } 37 | if (ec && !(opts.acceptCode && opts.acceptCode(ec))) { 38 | err = new Error("process exited with code:" + ec); 39 | err.errno = ec; 40 | // compat code 41 | var anyErr: any = err; 42 | anyErr.code = ec; 43 | } 44 | if (closeCb) 45 | closeCb(err); 46 | closeCb = null; 47 | }); 48 | proc.on('error', (e: NodeJS.ErrnoException) => { 49 | err = err || e; 50 | }); 51 | var stdout: Reader = node.reader(proc.stdout, opts); 52 | var stderr: Reader = node.reader(proc.stderr, opts); 53 | // node does not send close event if we remove all listeners on stdin and stdout 54 | // so we disable the stop methods and we call stop explicitly after the close. 55 | const stops = [stdout.stop.bind(stdout), stderr.stop.bind(stderr)]; 56 | stdout.stop = stderr.stop = (_: _) => { }; 57 | function stopStreams(_: _, arg?: any) { 58 | stops.forEach_(_, (_, stop) => { 59 | stop(_, arg); 60 | }); 61 | } 62 | if (opts.encoding !== 'buffer') { 63 | stdout = stdout.map(stringify()).transform(linesParser()); 64 | stderr = stderr.map(stringify()).transform(linesParser()); 65 | } 66 | if (opts.dataHandler) stdout = opts.dataHandler(stdout); 67 | if (opts.errorHandler) stderr = opts.errorHandler(stderr); 68 | if (opts.errorPrefix || opts.errorThrow) stderr = stderr.map(function (_, data) { 69 | if (opts.errorThrow) throw new Error((opts.errorPrefix || "") + data); 70 | return opts.errorPrefix + data; 71 | }); 72 | const rd = stdout.join(stderr); 73 | return generic.reader(function read(_) { 74 | if (err) throw err; 75 | const data = rd.read(_); 76 | if (data !== undefined) return data; 77 | // reached end of stream - worry about close event now. 78 | if (closed) { 79 | // already got close event 80 | if (err) throw err; 81 | } else { 82 | // wait for the close event 83 | _.cast(function (cb) { 84 | closeCb = cb; 85 | })(_); 86 | stopStreams(_); 87 | } 88 | return undefined; 89 | }, stopStreams); 90 | } 91 | /// * `writer = ez.devices.child_process.writer(proc, options)` 92 | /// wraps a node.js child process as an EZ writer. 93 | /// For a full description of the options, see `WritableStream` in 94 | /// https://github.com/Sage/ez-streams/blob/master/lib/node-wrappers.md 95 | 96 | export function writer(proc: ChildProcess, options: node.NodeWriterOptions) { 97 | return node.writer(proc.stdin, options); 98 | } 99 | -------------------------------------------------------------------------------- /src/devices/console.md: -------------------------------------------------------------------------------- 1 | ## Console EZ streams 2 | 3 | `import * as ez from 'ez-streams'` 4 | 5 | * `ez.devices.console.log` 6 | * `ez.devices.console.info` 7 | * `ez.devices.console.warn` 8 | * `ez.devices.console.errors` 9 | EZ writers for console 10 | -------------------------------------------------------------------------------- /src/devices/console.ts: -------------------------------------------------------------------------------- 1 | import { _ } from "streamline-runtime"; 2 | import { Writer } from "../writer"; 3 | import * as generic from './generic'; 4 | 5 | function consoleWriter(fn: (message: string) => void) { 6 | return generic.writer(function (this: Writer, _: _, value: any) { 7 | if (value !== undefined) fn(value); 8 | return this; 9 | }); 10 | } 11 | 12 | /// !doc 13 | /// ## Console EZ streams 14 | /// 15 | /// `import * as ez from 'ez-streams'` 16 | /// 17 | /// * `ez.devices.console.log` 18 | /// * `ez.devices.console.info` 19 | /// * `ez.devices.console.warn` 20 | /// * `ez.devices.console.errors` 21 | /// EZ writers for console 22 | export const log: Writer = consoleWriter(console.log); 23 | export const info: Writer = consoleWriter(console.info); 24 | export const warn: Writer = consoleWriter(console.warn); 25 | export const error: Writer = consoleWriter(console.error); 26 | -------------------------------------------------------------------------------- /src/devices/file.md: -------------------------------------------------------------------------------- 1 | ## File based EZ streams 2 | 3 | `import * as ez from 'ez-streams'` 4 | 5 | * `reader = ez.devices.file.text.reader(path, encoding)` 6 | creates an EZ reader that reads from a text file. 7 | `encoding` is optional. It defaults to `'utf8'`. 8 | * `writer = ez.devices.file.text.writer(path, encoding)` 9 | creates an EZ writer that writes to a text file. 10 | `encoding` is optional. It defaults to `'utf8'`. 11 | * `reader = ez.devices.file.binary.reader(path)` 12 | creates an EZ reader that reads from a binary file. 13 | * `writer = ez.devices.file.binary.writer(path)` 14 | creates an EZ writer that writes to a binary file. 15 | * `reader = ez.devices.file.list(path, options)` 16 | `reader = ez.devices.file.list(path, recurse, accept)` 17 | creates a reader that enumerates (recursively) directories and files. 18 | Returns the entries as `{ path: path, name: name, depth: depth, stat: stat }` objects. 19 | Two `options` may be specified: `recurse` and `accept`. 20 | If `recurse` is falsy, only the entries immediately under `path` are returned. 21 | If `recurse` is truthy, entries at all levels (including the root entry) are returned. 22 | If `recurse` is `"postorder"`, directories are returned after their children. 23 | `accept` is an optional function which will be called as `accept(_, entry)` and 24 | will control whether files or subdirectories will be included in the stream or not. 25 | -------------------------------------------------------------------------------- /src/devices/file.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'fs'; 2 | import { _ } from 'streamline-runtime'; 3 | import * as node from './node'; 4 | import * as generic from './generic'; 5 | import { Reader } from '../reader'; 6 | import { Writer } from '../writer'; 7 | 8 | /// !doc 9 | /// ## File based EZ streams 10 | /// 11 | /// `import * as ez from 'ez-streams'` 12 | /// 13 | export const text = { 14 | /// * `reader = ez.devices.file.text.reader(path, encoding)` 15 | /// creates an EZ reader that reads from a text file. 16 | /// `encoding` is optional. It defaults to `'utf8'`. 17 | reader(path: string, encoding?: string): Reader { 18 | return node.reader(fs.createReadStream(path, { 19 | encoding: encoding || 'utf8' 20 | })); 21 | }, 22 | /// * `writer = ez.devices.file.text.writer(path, encoding)` 23 | /// creates an EZ writer that writes to a text file. 24 | /// `encoding` is optional. It defaults to `'utf8'`. 25 | writer(path: string, encoding?: string): Writer { 26 | return node.writer(fs.createWriteStream(path, { 27 | encoding: encoding || 'utf8' 28 | })); 29 | } 30 | } 31 | 32 | export const binary = { 33 | /// * `reader = ez.devices.file.binary.reader(path)` 34 | /// creates an EZ reader that reads from a binary file. 35 | reader(path: string): Reader { 36 | return node.reader(fs.createReadStream(path)); 37 | }, 38 | /// * `writer = ez.devices.file.binary.writer(path)` 39 | /// creates an EZ writer that writes to a binary file. 40 | writer(path: string): Writer { 41 | return node.writer(fs.createWriteStream(path)); 42 | } 43 | } 44 | 45 | /// * `reader = ez.devices.file.list(path, options)` 46 | /// `reader = ez.devices.file.list(path, recurse, accept)` 47 | /// creates a reader that enumerates (recursively) directories and files. 48 | /// Returns the entries as `{ path: path, name: name, depth: depth, stat: stat }` objects. 49 | /// Two `options` may be specified: `recurse` and `accept`. 50 | /// If `recurse` is falsy, only the entries immediately under `path` are returned. 51 | /// If `recurse` is truthy, entries at all levels (including the root entry) are returned. 52 | /// If `recurse` is `"postorder"`, directories are returned after their children. 53 | /// `accept` is an optional function which will be called as `accept(_, entry)` and 54 | /// will control whether files or subdirectories will be included in the stream or not. 55 | export interface ListOptions { 56 | recurse?: boolean | 'preorder' | 'postorder'; 57 | accept?: (_: _, entry: ListEntry) => boolean; 58 | } 59 | 60 | export interface ListEntry { 61 | path: string; 62 | name: string; 63 | depth: number; 64 | stat: fs.Stats; 65 | } 66 | 67 | export function list(path: string, options?: ListOptions) { 68 | var recurse: boolean | 'preorder' | 'postorder', accept: ((_: _, entry: ListEntry) => boolean) | undefined; 69 | if (options && typeof options === 'object') { 70 | recurse = options.recurse || false; 71 | accept = options.accept; 72 | } else { 73 | recurse = arguments[1]; 74 | accept = arguments[2]; 75 | } 76 | const postorder = recurse === 'postorder'; 77 | return generic.empty.reader.transform((_, reader, writer) => { 78 | function process(_: _, p: string, name: string, depth: number) { 79 | const stat = fs.stat(p, _); 80 | const entry = { 81 | path: p, 82 | name: name, 83 | depth: depth, 84 | stat: stat, 85 | }; 86 | if (accept && !accept(_, entry)) return; 87 | if ((recurse || depth === 1) && !postorder) writer.write(_, entry); 88 | if ((recurse || depth === 0) && stat.isDirectory()) fs.readdir(p, _).forEach_(_, (_, pp) => { 89 | process(_, p + '/' + pp, pp, depth + 1); 90 | }); 91 | if ((recurse || depth === 1) && postorder) writer.write(_, entry); 92 | } 93 | process(_, path, path.substring(path.lastIndexOf('/') + 1), 0); 94 | }); 95 | } -------------------------------------------------------------------------------- /src/devices/generic.md: -------------------------------------------------------------------------------- 1 | ## Generic stream constructors 2 | 3 | `import * as ez from 'ez-streams'` 4 | 5 | * `reader = ez.devices.generic.reader(read[, stop])` 6 | creates an EZ reader from a given `read(_)` function and an optional `stop(_, [arg])` function. 7 | * `writer = ez.devices.generic.writer(write)` 8 | creates an ES writer from a given `write(_, val)` function. 9 | ## Special streams 10 | 11 | * `ez.devices.generic.empty` 12 | The empty stream. `empty.read(_)` returns `undefined`. 13 | It is also a null sink. It just discards anything you would write to it. 14 | -------------------------------------------------------------------------------- /src/devices/generic.ts: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | import { _ } from 'streamline-runtime'; 3 | import { Reader } from '../reader'; 4 | import { Writer } from '../writer'; 5 | 6 | /// ## Special streams 7 | /// 8 | /// * `ez.devices.generic.empty` 9 | /// The empty stream. `empty.read(_)` returns `undefined`. 10 | /// It is also a null sink. It just discards anything you would write to it. 11 | export const empty = { 12 | reader: new Reader(function (this: Reader, _: _) { }), 13 | writer: new Writer(function (this: Writer, _: _, value: any) { }), 14 | }; 15 | 16 | /// !doc 17 | /// ## Generic stream constructors 18 | /// 19 | /// `import * as ez from 'ez-streams'` 20 | /// 21 | /// * `reader = ez.devices.generic.reader(read[, stop])` 22 | /// creates an EZ reader from a given `read(_)` function and an optional `stop(_, [arg])` function. 23 | export function reader(read: (_: _) => T, stop?: (_: _, arg?: any) => void) { 24 | return new Reader(read, stop); 25 | } 26 | 27 | /// * `writer = ez.devices.generic.writer(write)` 28 | /// creates an ES writer from a given `write(_, val)` function. 29 | export function writer(write: (_: _, value: T) => void, stop?: (_: _, arg?: any) => void) { 30 | return new Writer(write, stop); 31 | } 32 | -------------------------------------------------------------------------------- /src/devices/http.md: -------------------------------------------------------------------------------- 1 | ## HTTP EZ Streams 2 | 3 | `import * as ez from 'ez-streams'` 4 | 5 | * `server = ez.devices.http.server(listener, options)` 6 | Creates an EZ HTTP server. 7 | The `listener` is called as `listener(request, response, _)` 8 | where `request` is an EZ reader and `response` an EZ writer. 9 | For a full description of this API, see `HttpServerRequest/Response` in 10 | https://github.com/Sage/ez-streams/blob/master/lib/node-wrappers.md 11 | * `client = ez.devices.http.client(options)` 12 | Creates an EZ HTTP client. 13 | `client` is an EZ writer. 14 | The response object returned by `client.response(_)` is an EZ reader. 15 | For a full description of this API, see `HttpClientRequest/Response` in 16 | https://github.com/Sage/ez-streams/blob/master/lib/node-wrappers.md 17 | * `listener = ez.devices.http.listener(listener, options)` 18 | wraps an ez-streams listener as a vanilla node.js listener 19 | * `factory = ez.factory("http://user:pass@host:port/...")` 20 | Use reader for a GET request, writer for POST request 21 | * `reader = factory.reader(_)` 22 | * `writer = factory.writer(_)` 23 | -------------------------------------------------------------------------------- /src/devices/http.ts: -------------------------------------------------------------------------------- 1 | import { _ } from 'streamline-runtime'; 2 | import { fixOptions } from './node'; 3 | import { Reader } from '../reader'; 4 | import { Writer } from '../writer'; 5 | import * as http from 'http'; 6 | 7 | import { 8 | HttpProxyClientRequest, 9 | HttpClientRequest, 10 | HttpClientResponse, 11 | HttpClientOptions, 12 | HttpServer, 13 | HttpServerRequest, 14 | HttpServerResponse, 15 | HttpServerOptions, 16 | createHttpServer, 17 | httpRequest, 18 | httpListener, 19 | } from '../node-wrappers'; 20 | 21 | export { 22 | HttpProxyClientRequest, 23 | HttpClientRequest, 24 | HttpClientResponse, 25 | HttpClientOptions, 26 | HttpServer, 27 | HttpServerRequest, 28 | HttpServerResponse, 29 | HttpServerOptions, 30 | } 31 | 32 | function endWrite(_: _, cli: HttpClientRequest) { 33 | const resp = cli.end().response(_); 34 | if (resp.statusCode != 201) throw new Error("Request return status code: " + resp.statusCode); // TODO: better manage errors 35 | const data = resp.readAll(_); 36 | return (typeof data === 'string' && /^application\/json/.test(resp.headers['content-type'])) ? JSON.parse(data) : data; 37 | } 38 | 39 | function guessType(data: any) { 40 | if (!data) return null; 41 | if (Buffer.isBuffer(data)) return "application/octet-stream"; 42 | if (typeof data === "object") return "application/json"; 43 | if (typeof data !== "string") throw new TypeError("invalid data type: " + typeof data); 44 | const text = data; 45 | if (text[0] === "<") { 46 | if (text.slice(0, 9).toLowerCase() === " void, options?: HttpServerOptions) { 65 | return createHttpServer(listener, fixOptions(options)); 66 | } 67 | /// * `client = ez.devices.http.client(options)` 68 | /// Creates an EZ HTTP client. 69 | /// `client` is an EZ writer. 70 | /// The response object returned by `client.response(_)` is an EZ reader. 71 | /// For a full description of this API, see `HttpClientRequest/Response` in 72 | /// https://github.com/Sage/ez-streams/blob/master/lib/node-wrappers.md 73 | 74 | export function client(options?: HttpClientOptions) { 75 | return httpRequest(fixOptions(options)); 76 | } 77 | /// * `listener = ez.devices.http.listener(listener, options)` 78 | /// wraps an ez-streams listener as a vanilla node.js listener 79 | export interface HttpListenerOption { 80 | 81 | } 82 | export function listener(listener: (request: HttpServerRequest, response: HttpServerResponse) => void, options?: HttpListenerOption) { 83 | return httpListener(listener, fixOptions(options)); 84 | } 85 | /// * `factory = ez.factory("http://user:pass@host:port/...")` 86 | /// Use reader for a GET request, writer for POST request 87 | export type FactoryWriter = Writer & { _result: any }; 88 | 89 | export function factory(url: string) { 90 | return { 91 | /// * `reader = factory.reader(_)` 92 | reader(_: _) { 93 | var response = module.exports.client({ 94 | url: url, 95 | method: "GET" 96 | }).end().response(_); 97 | if (response.statusCode !== 200) { 98 | var payload = response.readAll(_); 99 | throw new Error("Error reading '" + url + "'; Status " + response.statusCode + ": " + payload); 100 | } 101 | return response; 102 | }, 103 | /// * `writer = factory.writer(_)` 104 | writer(_: _) { 105 | var cli: HttpClientRequest; 106 | var type: string | null; 107 | return { 108 | write(this: FactoryWriter, _: _, data: any) { 109 | const opt: HttpClientOptions = { 110 | url: url, 111 | method: "POST", 112 | headers: {} 113 | }; 114 | if (!cli) { 115 | type = guessType(data); 116 | if (type) opt.headers!["content-type"] = type; 117 | cli = client(opt).proxyConnect(_); 118 | } 119 | if (data === undefined) return this._result = endWrite(_, cli); 120 | else return cli.write(_, type === "application/json" ? JSON.stringify(data) : data); 121 | }, 122 | get result(this: FactoryWriter) { return this._result } 123 | }; 124 | } 125 | }; 126 | } -------------------------------------------------------------------------------- /src/devices/index.ts: -------------------------------------------------------------------------------- 1 | import * as array from './array'; 2 | import * as buffer from './buffer'; 3 | import * as child_process from './child_process'; 4 | import * as console from './console'; 5 | import * as file from './file'; 6 | import * as generic from './generic'; 7 | import * as http from './http'; 8 | import * as net from './net'; 9 | import * as node from './node'; 10 | import * as queue from './queue'; 11 | import * as std from './std'; 12 | import * as string from './string'; 13 | import * as uturn from './uturn'; 14 | export { array, buffer, child_process, console, file, generic, http, net, node, queue, std, string, uturn } -------------------------------------------------------------------------------- /src/devices/net.md: -------------------------------------------------------------------------------- 1 | ## TCP and socket EZ Streams 2 | 3 | `import * as ez from 'ez-streams'` 4 | 5 | * `server = ez.devices.net.server(serverOptions, listener, streamOptions)` 6 | Creates an EZ HTTP server. 7 | The `listener` is called as `listener(stream, _)` 8 | where `stream` is an EZ reader and writer. 9 | For a full description of this API, see `NetServer` in 10 | https://github.com/Sage/ez-streams/blob/master/lib/node-wrappers.md 11 | * `client = ez.devices.net.tcpClient(port, host, options)` 12 | Creates an EZ TCP client. 13 | The stream returned by `client.connect(_)` is an EZ reader and writer. 14 | For a full description of this API, see `tcpClient` in 15 | https://github.com/Sage/ez-streams/blob/master/lib/node-wrappers.md 16 | * `client = ez.devices.net.socketClient(path, options)` 17 | Creates an EZ socket client. 18 | The stream returned by `client.connect(_)` is an EZ reader and writer. 19 | For a full description of this API, see `tcpClient` in 20 | https://github.com/Sage/ez-streams/blob/master/lib/node-wrappers.md 21 | -------------------------------------------------------------------------------- /src/devices/net.ts: -------------------------------------------------------------------------------- 1 | import { _ } from 'streamline-runtime'; 2 | import { fixOptions } from './node'; 3 | import * as streams from '../node-wrappers'; 4 | 5 | /// !doc 6 | /// ## TCP and socket EZ Streams 7 | /// 8 | /// `import * as ez from 'ez-streams'` 9 | /// 10 | /// * `server = ez.devices.net.server(serverOptions, listener, streamOptions)` 11 | /// Creates an EZ HTTP server. 12 | /// The `listener` is called as `listener(stream, _)` 13 | /// where `stream` is an EZ reader and writer. 14 | /// For a full description of this API, see `NetServer` in 15 | /// https://github.com/Sage/ez-streams/blob/master/lib/node-wrappers.md 16 | export type SocketServer = streams.SocketServer; 17 | export type SocketServerOptions = streams.SocketServerOptions; 18 | export type SocketStream = streams.SocketStream; 19 | export type SocketOptions = streams.SocketOptions; 20 | export type SocketClient = streams.SocketClient; 21 | 22 | export function server(listener: (stream: SocketStream, _: _) => void, streamOptions?: SocketOptions, serverOptions?: SocketServerOptions) { 23 | // compat hack 24 | if (typeof streamOptions === "function") 25 | return streams.createNetServer(arguments[0], arguments[1], fixOptions(arguments[2])); 26 | return streams.createNetServer(serverOptions!, listener, fixOptions(streamOptions)); 27 | } 28 | /// * `client = ez.devices.net.tcpClient(port, host, options)` 29 | /// Creates an EZ TCP client. 30 | /// The stream returned by `client.connect(_)` is an EZ reader and writer. 31 | /// For a full description of this API, see `tcpClient` in 32 | /// https://github.com/Sage/ez-streams/blob/master/lib/node-wrappers.md 33 | 34 | export function tcpClient(port: number, host?: string, options?: SocketOptions) { 35 | return streams.tcpClient(port, host, fixOptions(options)); 36 | } 37 | /// * `client = ez.devices.net.socketClient(path, options)` 38 | /// Creates an EZ socket client. 39 | /// The stream returned by `client.connect(_)` is an EZ reader and writer. 40 | /// For a full description of this API, see `tcpClient` in 41 | /// https://github.com/Sage/ez-streams/blob/master/lib/node-wrappers.md 42 | 43 | export function socketClient(path: string, options: SocketOptions) { 44 | return streams.socketClient(path, fixOptions(options)); 45 | } 46 | -------------------------------------------------------------------------------- /src/devices/node.md: -------------------------------------------------------------------------------- 1 | ## EZ Stream wrappers for native node streams 2 | 3 | `import * as ez from 'ez-streams'` 4 | 5 | * `reader = ez.devices.node.reader(stream, options)` 6 | wraps a node.js stream as an EZ reader. 7 | For a full description of the options, see `ReadableStream` in 8 | https://github.com/Sage/ez-streams/blob/master/lib/node-wrappers.md 9 | * `writer = ez.devices.node.writer(stream, options)` 10 | wraps a node.js stream as an EZ writer. 11 | For a full description of the options, see `WritableStream` in 12 | https://github.com/Sage/ez-streams/blob/master/lib/node-wrappers.md 13 | -------------------------------------------------------------------------------- /src/devices/node.ts: -------------------------------------------------------------------------------- 1 | /// 2 | /// ## Native node.js streams 3 | /// 4 | import * as streams from '../node-wrappers'; 5 | import { Reader } from '../reader'; 6 | 7 | require('../reader').decorate(streams.ReadableStream.prototype); 8 | require('../writer').decorate(streams.WritableStream.prototype); 9 | 10 | /// !doc 11 | /// ## EZ Stream wrappers for native node streams 12 | /// 13 | /// `import * as ez from 'ez-streams'` 14 | /// 15 | /// * `reader = ez.devices.node.reader(stream, options)` 16 | /// wraps a node.js stream as an EZ reader. 17 | /// For a full description of the options, see `ReadableStream` in 18 | /// https://github.com/Sage/ez-streams/blob/master/lib/node-wrappers.md 19 | 20 | export interface NodeReaderOptions { 21 | encoding?: string; 22 | } 23 | 24 | export function fixOptions(options: NodeReaderOptions | string | undefined) { 25 | var opts: NodeReaderOptions; 26 | if (typeof options === "string") { 27 | opts = { 28 | encoding: options, 29 | }; 30 | } else { 31 | opts = options || {}; 32 | } 33 | return opts; 34 | } 35 | 36 | export function reader(emitter: NodeJS.ReadableStream, options?: NodeReaderOptions | string): Reader { 37 | var opts = fixOptions(options); 38 | const reader = new streams.ReadableStream(emitter, opts); 39 | if (opts.encoding) reader.setEncoding(opts.encoding); 40 | return reader.reader; 41 | } 42 | /// * `writer = ez.devices.node.writer(stream, options)` 43 | /// wraps a node.js stream as an EZ writer. 44 | /// For a full description of the options, see `WritableStream` in 45 | /// https://github.com/Sage/ez-streams/blob/master/lib/node-wrappers.md 46 | 47 | export interface NodeWriterOptions { }; 48 | 49 | export function writer(emitter: NodeJS.WritableStream, options?: NodeWriterOptions) { 50 | const writer = new streams.WritableStream(emitter, fixOptions(options)); 51 | return writer.writer; 52 | } 53 | -------------------------------------------------------------------------------- /src/devices/queue.md: -------------------------------------------------------------------------------- 1 | ## Queue device 2 | 3 | The queue device can be used to desynchronize processing between one or several tasks that produce 4 | data and a task that consumes queued data. 5 | 6 | `import * as ez from 'ez-streams'` 7 | 8 | * `queue = ez.devices.queue(options)` 9 | creates a queue device. 10 | The queue device has two properties: 11 | `queue.reader`: a reader from which you can read the data which has been queued. 12 | `queue.writer`: a writer to which you can write data. 13 | You can also interact with the queue with the following non-streaming API: 14 | `data = queue.get(_)` gets the next item from the queue. 15 | `ok = queue.put(data)` adds an item to the queue (synchronously). 16 | You can pass a `max` option through the `options` parameter when creating the queue. 17 | If you pass this option, `queue.put(data)` will return true if the data has been queued and false if 18 | the data has been discarded because the queue is full. 19 | Note that `queue.writer will not discard the data but instead will wait for the queue to become available. 20 | -------------------------------------------------------------------------------- /src/devices/queue.ts: -------------------------------------------------------------------------------- 1 | import { _ } from 'streamline-runtime'; 2 | import { Reader } from '../reader'; 3 | import { Writer } from '../writer'; 4 | import * as generic from './generic'; 5 | 6 | /// !doc 7 | /// ## Queue device 8 | /// 9 | /// The queue device can be used to desynchronize processing between one or several tasks that produce 10 | /// data and a task that consumes queued data. 11 | /// 12 | /// `import * as ez from 'ez-streams'` 13 | /// 14 | /// * `queue = ez.devices.queue.create(options)` 15 | /// creates a queue device. 16 | /// The queue device has two properties: 17 | /// `queue.reader`: a reader from which you can read the data which has been queued. 18 | /// `queue.writer`: a writer to which you can write data. 19 | /// You can also interact with the queue with the following non-streaming API: 20 | /// `data = queue.get(_)` gets the next item from the queue. 21 | /// `ok = queue.put(data)` adds an item to the queue (synchronously). 22 | /// You can pass a `max` option through the `options` parameter when creating the queue. 23 | /// If you pass this option, `queue.put(data)` will return true if the data has been queued and false if 24 | /// the data has been discarded because the queue is full. 25 | /// Note that `queue.writer will not discard the data but instead will wait for the queue to become available. 26 | 27 | export interface Duplex { 28 | reader: Reader; 29 | writer: Writer; 30 | } 31 | 32 | // any and type intersection to the rescuse because queue is not an ES2015 class 33 | export function create(max?: number): Streamline.Queue & Duplex { 34 | const queue: any = _.queue(max); 35 | queue.reader = generic.reader(queue.read.bind(queue), function (_) { queue.end.call(queue); }) 36 | queue.writer = generic.writer(queue.write.bind(queue)) 37 | return queue; 38 | } 39 | -------------------------------------------------------------------------------- /src/devices/std.md: -------------------------------------------------------------------------------- 1 | ## EZ wrappers for standard I/O streams 2 | 3 | `import * as ez from 'ez-streams'` 4 | 5 | * `reader = ez.devices.std.in(encoding)` 6 | * `writer = ez.devices.std.out(encoding)` 7 | * `writer = ez.devices.std.err(encoding)` 8 | -------------------------------------------------------------------------------- /src/devices/std.ts: -------------------------------------------------------------------------------- 1 | import { Reader } from '../reader'; 2 | import { Writer } from '../writer'; 3 | import * as streams from '../node-wrappers'; 4 | 5 | /// !doc 6 | /// ## EZ wrappers for standard I/O streams 7 | /// 8 | /// `import * as ez from 'ez-streams'` 9 | /// 10 | /// * `reader = ez.devices.std.in(encoding)` 11 | /// * `writer = ez.devices.std.out(encoding)` 12 | /// * `writer = ez.devices.std.err(encoding)` 13 | export const input: Input = function (encoding?: string) { 14 | const st = new streams.ReadableStream(process.stdin, {}); 15 | st.setEncoding(encoding || null); 16 | process.stdin.resume(); 17 | return st.reader; 18 | } 19 | 20 | export const output: Output = function (encoding?: string) { 21 | return new streams.WritableStream(process.stdout, { 22 | encoding: encoding, 23 | }).writer; 24 | } 25 | 26 | export const error: Output = function (encoding?: string) { 27 | return new streams.WritableStream(process.stderr, { 28 | encoding: encoding, 29 | }).writer; 30 | } 31 | 32 | export interface Input { 33 | (encoding: string): Reader; 34 | (): Reader; 35 | } 36 | export interface Output { 37 | (encoding: string): Writer; 38 | (): Writer; 39 | } 40 | 41 | // compat API (cannot export 'in' to TS because reserved word) 42 | exports.in = input; 43 | exports.out = output; 44 | exports.err = error; 45 | -------------------------------------------------------------------------------- /src/devices/string.md: -------------------------------------------------------------------------------- 1 | ## In-memory string streams 2 | 3 | `import * as ez from 'ez-streams'` 4 | 5 | * `reader = ez.devices.string.reader(text, options)` 6 | creates an EZ reader that reads its chunks from `text`. 7 | `reader.read(_)` will return the chunks asynchronously by default. 8 | You can force synchronous delivery by setting `options.sync` to `true`. 9 | The default chunk size is 1024. You can override it by passing 10 | a `chunkSize` option. 11 | * `writer = ez.devices.string.writer(options)` 12 | creates an EZ writer that collects strings into a text buffer. 13 | `writer.write(_, data)` will write asynchronously by default. 14 | You can force synchronous write by setting `options.sync` to `true`. 15 | `writer.toString()` returns the internal text buffer into which the 16 | strings have been collected. 17 | -------------------------------------------------------------------------------- /src/devices/string.ts: -------------------------------------------------------------------------------- 1 | import { _ } from 'streamline-runtime'; 2 | import { Reader } from '../reader'; 3 | import { Writer } from '../writer'; 4 | import { nextTick } from '../util'; 5 | 6 | /// !doc 7 | /// ## In-memory string streams 8 | /// 9 | /// `import * as ez from 'ez-streams'` 10 | /// 11 | 12 | export interface Options { 13 | sync?: boolean; 14 | chunkSize?: number | (() => number); 15 | } 16 | 17 | export class StringWriter extends Writer { 18 | buf: string; 19 | constructor(options: Options) { 20 | super((_: _, value: string) => { 21 | if (!options.sync) nextTick(_); 22 | if (value !== undefined) this.buf += value; 23 | return this; 24 | }); 25 | this.buf = ''; 26 | } 27 | toString() { 28 | return this.buf; 29 | } 30 | get result() { 31 | return this.buf; 32 | } 33 | } 34 | 35 | 36 | /// * `reader = ez.devices.string.reader(text, options)` 37 | /// creates an EZ reader that reads its chunks from `text`. 38 | /// `reader.read(_)` will return the chunks asynchronously by default. 39 | /// You can force synchronous delivery by setting `options.sync` to `true`. 40 | /// The default chunk size is 1024. You can override it by passing 41 | /// a `chunkSize` option. 42 | export function reader(text: string, options?: Options | number) { 43 | var opts: Options; 44 | if (typeof options === "number") { 45 | opts = { 46 | chunkSize: options 47 | }; 48 | } 49 | else opts = options || {}; 50 | const chunkSize = opts.chunkSize || 1024; 51 | var pos = 0; 52 | return new Reader(function read(_) { 53 | if (!opts.sync) nextTick(_); 54 | if (pos >= text.length) return; 55 | const len = typeof chunkSize === "function" ? chunkSize() : chunkSize; 56 | const s = text.substring(pos, pos + len); 57 | pos += len; 58 | return s; 59 | }); 60 | } 61 | /// * `writer = ez.devices.string.writer(options)` 62 | /// creates an EZ writer that collects strings into a text buffer. 63 | /// `writer.write(_, data)` will write asynchronously by default. 64 | /// You can force synchronous write by setting `options.sync` to `true`. 65 | /// `writer.toString()` returns the internal text buffer into which the 66 | /// strings have been collected. 67 | export function writer(options?: Options) { 68 | return new StringWriter(options || {}); 69 | } 70 | 71 | export function factory(url: string) { 72 | return { 73 | /// * `reader = factory.reader(_)` 74 | reader: (_: _) => { 75 | return module.exports.reader(url.substring(url.indexOf(':') + 1)); 76 | }, 77 | /// * `writer = factory.writer(_)` 78 | writer: (_: _) => { 79 | return module.exports.writer(); 80 | }, 81 | }; 82 | } -------------------------------------------------------------------------------- /src/devices/uturn.md: -------------------------------------------------------------------------------- 1 | ## Special device that transforms a writer into a reader 2 | 3 | `import * as ez from 'ez-streams'` 4 | 5 | * `uturn = ez.devices.uturn.create()` 6 | creates a uturn device. 7 | The device has two properties: a `uturn.writer` to which you can write, 8 | and a `uturn.reader` from which you can read. 9 | -------------------------------------------------------------------------------- /src/devices/uturn.ts: -------------------------------------------------------------------------------- 1 | import { _ } from 'streamline-runtime'; 2 | import * as generic from './generic'; 3 | import * as stopException from '../stop-exception'; 4 | import { Reader } from '../reader'; 5 | import { Writer } from '../writer'; 6 | import { nextTick } from '../util'; 7 | 8 | var lastId = 0; 9 | var tracer: (...args: any[]) => void; // = console.error; 10 | 11 | /// !doc 12 | /// ## Special device that transforms a writer into a reader 13 | /// 14 | /// `import * as ez from 'ez-streams'` 15 | /// 16 | /// * `uturn = ez.devices.uturn.create()` 17 | /// creates a uturn device. 18 | /// The device has two properties: a `uturn.writer` to which you can write, 19 | /// and a `uturn.reader` from which you can read. 20 | export interface Uturn { 21 | reader: Reader; 22 | writer: Writer; 23 | end: (_: _) => void; 24 | } 25 | 26 | export function create(): Uturn { 27 | var state = 'idle', pendingData: T | undefined, error: any; 28 | const id = ++lastId; 29 | 30 | var pendingReaderCb: ((_: _, value: (T | undefined)) => void) | null; 31 | function bounceReader(err?: any, val?: T) { 32 | const lcb = pendingReaderCb; 33 | pendingReaderCb = null; 34 | if (lcb) lcb(err, val); 35 | } 36 | 37 | var pendingWriterCb: ((_: _, value?: Writer) => void) | null; 38 | function bounceWriter(err?: any, val?: Writer) { 39 | const lcb = pendingWriterCb; 40 | pendingWriterCb = null; 41 | if (lcb) lcb(err, val); 42 | } 43 | 44 | var pendingStopCb: ((_: _, value: any) => void) | null; 45 | function bounceStop(err?: any, val?: any) { 46 | const lcb = pendingStopCb; 47 | pendingStopCb = null; 48 | if (lcb) lcb(err, val); 49 | } 50 | 51 | const uturn = { 52 | reader: new Reader(_.cast(function read(cb: (err?: any, val?: T) => void) { 53 | nextTick(() => { 54 | tracer && tracer(id, "READ", state, pendingData); 55 | const st = state; 56 | switch (st) { 57 | case 'writing': 58 | state = pendingData === undefined ? 'done' : 'idle'; 59 | // acknowledge the write 60 | bounceWriter(null, uturn.writer); 61 | // return the data posted by the write 62 | cb(null, pendingData); 63 | pendingData = undefined; 64 | break; 65 | case 'idle': 66 | // remember it 67 | state = 'reading'; 68 | pendingReaderCb = cb; 69 | break; 70 | case 'readStopping': 71 | case 'writeStopping': 72 | state = 'done'; 73 | const arg = stopException.unwrap(error); 74 | // acknowledge the stop 75 | bounceStop(); 76 | // return undefined or throw 77 | cb(arg && arg !== true ? arg : null); 78 | break; 79 | case 'done': 80 | cb(error); 81 | break; 82 | default: 83 | state = 'done'; 84 | cb(error || new Error('invalid state ' + st)); 85 | break; 86 | } 87 | }); 88 | }), _.cast(function stop(cb: (err?: any, arg?: any) => void, arg?: any) { 89 | nextTick(() => { 90 | error = error || stopException.make(arg); 91 | tracer && tracer(id, "STOP READER", state, arg); 92 | const st = state; 93 | switch (st) { 94 | case 'reading': 95 | state = 'done'; 96 | // send undefined or exception to read 97 | bounceReader(arg && arg !== 1 ? arg : null); 98 | // acknowledge the stop 99 | cb(); 100 | break; 101 | case 'writing': 102 | state = 'done'; 103 | // send to write 104 | bounceWriter(error, uturn.writer); 105 | // acknowledge the stop 106 | cb(); 107 | break; 108 | case 'idle': 109 | // remember it 110 | state = 'readStopping'; 111 | pendingStopCb = cb; 112 | break; 113 | case 'done': 114 | cb(error); 115 | break; 116 | default: 117 | state = 'done'; 118 | cb(error || new Error('invalid state ' + st)); 119 | break; 120 | } 121 | }); 122 | })), 123 | writer: new Writer(_.cast(function write(this: Writer, cb: (err?: any, val?: Writer) => void, data: T) { 124 | nextTick(() => { 125 | tracer && tracer(id, "WRITE", state, data); 126 | const st = state; 127 | switch (st) { 128 | case 'reading': 129 | state = data === undefined ? 'done' : 'idle'; 130 | // acknowledge the read 131 | bounceReader(error, data); 132 | // return the data posted by the write 133 | cb(null, this); 134 | break; 135 | case 'idle': 136 | // remember it 137 | state = 'writing'; 138 | pendingWriterCb = cb; 139 | pendingData = data; 140 | break; 141 | case 'readStopping': 142 | state = 'done'; 143 | // acknowledge the stop 144 | bounceStop(); 145 | // throw the error 146 | cb(error); 147 | break; 148 | case 'done': 149 | cb(error || 'invalid state ' + st); 150 | break; 151 | default: 152 | state = 'done'; 153 | cb(new Error('invalid state ' + st)); 154 | break; 155 | } 156 | }); 157 | }), _.cast(function stop(cb: (err?: any, val?: Writer) => void, arg?: any) { 158 | nextTick(() => { 159 | tracer && tracer(id, "STOP WRITER", state, arg); 160 | error = error || stopException.make(arg); 161 | const st = state; 162 | switch (st) { 163 | case 'reading': 164 | // send undefined or exception to read 165 | state = 'done'; 166 | bounceReader(arg && arg !== 1 ? arg : null); 167 | // acknowledge the stop 168 | cb(); 169 | break; 170 | case 'idle': 171 | // remember it 172 | state = 'writeStopping'; 173 | pendingStopCb = cb; 174 | break; 175 | case 'done': 176 | cb(error); 177 | break; 178 | default: 179 | state = 'done'; 180 | cb(new Error('invalid state ' + st)); 181 | break; 182 | } 183 | }); 184 | })), 185 | end: _.cast(function (err) { 186 | nextTick(() => { 187 | tracer && tracer(id, "END", state, err); 188 | err = stopException.unwrap(err); 189 | error = error || err; 190 | state = 'done'; 191 | // at most one of the pending callbacks should be active but we can safely bounce to all. 192 | bounceReader(error); 193 | bounceWriter(error, uturn.writer) 194 | bounceStop(error); 195 | }); 196 | }), 197 | }; 198 | return uturn; 199 | } -------------------------------------------------------------------------------- /src/ez.ts: -------------------------------------------------------------------------------- 1 | import * as devices from './devices/index'; 2 | import * as helpers from './helpers/index'; 3 | import * as mappers from './mappers/index'; 4 | import * as transforms from './transforms/index'; 5 | import * as predicate from './predicate'; 6 | import * as stopException from './stop-exception'; 7 | 8 | import * as EzReader from './reader'; 9 | import * as EzWriter from './writer'; 10 | import EzFactory from './factory'; 11 | 12 | export { 13 | devices, helpers, mappers, transforms, 14 | predicate, stopException 15 | }; 16 | 17 | export const factory = EzFactory; 18 | 19 | export type Reader = EzReader.Reader; 20 | export type CompareOptions = EzReader.CompareOptions; 21 | export type ParallelOptions = EzReader.ParallelOptions; 22 | 23 | export type Writer = EzWriter.Writer; 24 | 25 | export function reader(arg: string | any[] | Buffer): Reader { 26 | if (typeof arg === 'string') { 27 | const f = factory(arg); 28 | let reader: Reader; 29 | return devices.generic.reader(function read(_) { 30 | if (!reader) reader = f.reader(_); 31 | return reader.read(_); 32 | }, function stop(_, arg) { 33 | if (!reader) reader = f.reader(_); 34 | return reader.stop(_, arg); 35 | }) 36 | } else if (Array.isArray(arg)) { 37 | return devices.array.reader(arg); 38 | } else if (Buffer.isBuffer(arg)) { 39 | return devices.buffer.reader(arg); 40 | } else { 41 | throw new Error(`invalid argument ${arg && typeof arg}`); 42 | } 43 | } 44 | 45 | export function writer(arg: string | any[] | Buffer): Writer { 46 | if (typeof arg === 'string') { 47 | const f = factory(arg); 48 | let writer: Writer; 49 | const wrapper = devices.generic.writer(function write(_, val) { 50 | if (!writer) writer = f.writer(_); 51 | return writer.write(_, val); 52 | }, function stop(_, arg) { 53 | if (!writer) writer = f.writer(_); 54 | return writer.stop(_, arg); 55 | }); 56 | Object.defineProperty(wrapper, 'result', { 57 | get: () => { 58 | const anyWriter: any = writer; 59 | return anyWriter.result; 60 | } 61 | }); 62 | return wrapper; 63 | } else if (Array.isArray(arg)) { 64 | return devices.array.writer(arg); 65 | } else if (Buffer.isBuffer(arg)) { 66 | return devices.buffer.writer(arg); 67 | } else { 68 | throw new Error(`invalid argument ${arg && typeof arg}`); 69 | } 70 | } 71 | 72 | // compatibility hacks 73 | function anyfy(x: any) { return x; } 74 | var readerHack: any = reader; 75 | readerHack.create = EzReader.create; 76 | readerHack.decorate = anyfy(EzReader).decorate; 77 | 78 | var writerHack: any = writer; 79 | writerHack.create = EzWriter.create; 80 | writerHack.decorate = anyfy(EzWriter).decorate; 81 | 82 | var transformHack: any = transforms.cut.transform; 83 | (transforms as any).cut = transformHack; 84 | transforms.cut.transform = transformHack; 85 | 86 | var queueHack: any = devices.queue.create; 87 | (devices as any).queue = queueHack; 88 | devices.queue.create = queueHack; 89 | -------------------------------------------------------------------------------- /src/factory.ts: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | import * as fs from 'fs'; 4 | import * as fsp from 'path'; 5 | 6 | const glob: any = typeof global === "object" ? global : window; 7 | const secret = "_6522f20750bf404ea2fbccd561613115"; 8 | const factories = (glob[secret] = (glob[secret] || { 9 | // standard factories 10 | "console": "./devices/console", 11 | "http": "./devices/http", 12 | "https": "./devices/http", 13 | "file": "./devices/file", 14 | "string": "./devices/string", 15 | })); 16 | 17 | export interface PackageFactory { 18 | protocol: string; 19 | module: string; 20 | } 21 | 22 | function scanDirs(dir: string) { 23 | function tryPackage(pkgPath: string, fromDir?: string) { 24 | if (!fs.existsSync(pkgPath)) return; 25 | try { 26 | // add factories from package.json 27 | var pk = require(pkgPath); 28 | if (pk && pk.ez && pk.ez.factories) { 29 | pk.ez.factories.forEach((crt: PackageFactory) => { 30 | if (crt.protocol && crt.module) { 31 | factories[crt.protocol] = fromDir ? crt.module.replace(/^.*([\\\/])/, fromDir + '$1') : crt.module; 32 | } 33 | }); 34 | } 35 | } catch (e) { 36 | console.error(e.message); 37 | } 38 | }; 39 | const ndir = fsp.join(dir, "../node_modules"); 40 | if (fs.existsSync(ndir)) { 41 | fs.readdirSync(ndir).forEach((pkg) => { 42 | tryPackage(fsp.join(ndir, pkg, "package.json")); 43 | }); 44 | } 45 | const d = fsp.join(dir, '..'); 46 | // try also package.json inside parent directory - for travis-ci 47 | tryPackage(fsp.join(d, "package.json"), d); 48 | if (d.length < dir.length) scanDirs(d); 49 | } 50 | 51 | scanDirs(__dirname); 52 | 53 | export default function (url: string) { 54 | const parts = (url || "").split(":"); 55 | if (parts.length < 2) throw new Error("invalid URL: " + url); 56 | const pp = parts[0]; 57 | if (!pp) throw new Error("Missing protocol in url: " + url); 58 | if (!factories[pp]) throw new Error("Missing factory for protocol " + pp); 59 | // 60 | return require(factories[pp]).factory(url); 61 | } -------------------------------------------------------------------------------- /src/helpers/binary.md: -------------------------------------------------------------------------------- 1 | ## helpers for binary streams 2 | 3 | `import * as ez from 'ez-streams'` 4 | 5 | ---- 6 | 7 | * `reader = ez.helpers.binary.reader(reader, options)` 8 | Wraps a raw buffer reader and returns a reader with additional API to handle binary streams. 9 | By default the reader is configured as big endian. 10 | You can configure it as little endian by setting the `endian` option to `"little"`. 11 | 12 | * `buf = reader.read(_, len)` 13 | returns the `len` next bytes of the stream. 14 | returns a buffer of length `len`, except at the end of the stream. 15 | The last chunk of the stream may have less than `len` bytes and afterwards the call 16 | returns `undefined`. 17 | If the `len` parameter is omitted, the call returns the next available chunk of data. 18 | 19 | * `buf = reader.peek(_, len)` 20 | Same as `read` but does not advance the read pointer. 21 | Another `read` would read the same data again. 22 | 23 | * `reader.unread(len)` 24 | Unread the last `len` bytes read. 25 | `len` cannot exceed the size of the last read. 26 | 27 | * `val = reader.readInt8(_)` 28 | * `val = reader.readUInt8(_)` 29 | * `val = reader.readInt16(_)` 30 | * `val = reader.readUInt16(_)` 31 | * `val = reader.readInt32(_)` 32 | * `val = reader.readUInt32(_)` 33 | * `val = reader.readFloat(_)` 34 | * `val = reader.readDouble(_)` 35 | Specialized readers for numbers. 36 | 37 | * `val = reader.peekInt8(_)` 38 | * `val = reader.peekUInt8(_)` 39 | * `val = reader.peekInt16(_)` 40 | * `val = reader.peekUInt16(_)` 41 | * `val = reader.peekInt32(_)` 42 | * `val = reader.peekUInt32(_)` 43 | * `val = reader.peekFloat(_)` 44 | * `val = reader.peekDouble(_)` 45 | Specialized peekers for numbers. 46 | * `val = reader.unreadInt8()` 47 | * `val = reader.unreadUInt8()` 48 | * `val = reader.unreadInt16()` 49 | * `val = reader.unreadUInt16()` 50 | * `val = reader.unreadInt32()` 51 | * `val = reader.unreadUInt32()` 52 | * `val = reader.unreadFloat()` 53 | * `val = reader.unreadDouble()` 54 | Specialized unreaders for numbers. 55 | 56 | ---- 57 | 58 | * `writer = ez.helpers.binary.writer(writer, options)` 59 | Wraps a raw buffer writer and returns a writer with additional API to handle binary streams. 60 | By default the writer is configured as big endian. 61 | You can configure it as little endian by setting the `endian` option to `"little"`. 62 | The `bufSize` option controls the size of the intermediate buffer. 63 | 64 | * `writer.flush(_)` 65 | Flushes the buffer to the wrapped writer. 66 | 67 | * `writer.write(_, buf)` 68 | Writes `buf`. 69 | Note: writes are buffered. 70 | Use the `flush(_)` call if you need to flush before the end of the stream. 71 | 72 | * `writer.writeInt8(_, val)` 73 | * `writer.writeUInt8(_, val)` 74 | * `writer.writeInt16(_, val)` 75 | * `writer.writeUInt16(_, val)` 76 | * `writer.writeInt32(_, val)` 77 | * `writer.writeUInt32(_, val)` 78 | * `writer.writeFloat(_, val)` 79 | * `writer.writeDouble(_, val)` 80 | Specialized writers for numbers. 81 | -------------------------------------------------------------------------------- /src/helpers/binary.ts: -------------------------------------------------------------------------------- 1 | /// !doc 2 | /// ## helpers for binary streams 3 | /// 4 | /// `import * as ez from 'ez-streams'` 5 | import { _ } from "streamline-runtime"; 6 | import { Reader as BaseReader } from "../reader"; 7 | import { Writer as BaseWriter } from "../writer"; 8 | 9 | const NUMBERS: [string, number][] = [// 10 | ['Int8', 1], ['UInt8', 1], // 11 | ['Int16', 2], ['UInt16', 2], // 12 | ['Int32', 4], ['UInt32', 4], // 13 | ['Float', 4], ['Double', 8]]; 14 | 15 | /// 16 | /// ---- 17 | /// 18 | /// * `reader = ez.helpers.binary.reader(reader, options)` 19 | /// Wraps a raw buffer reader and returns a reader with additional API to handle binary streams. 20 | /// By default the reader is configured as big endian. 21 | /// You can configure it as little endian by setting the `endian` option to `"little"`. 22 | export interface ReaderOptions { 23 | endian?: 'big' | 'little'; 24 | } 25 | export class Reader extends BaseReader { 26 | reader: BaseReader; 27 | options: ReaderOptions; 28 | pos: number; 29 | buf: Buffer | undefined; 30 | constructor(reader: BaseReader, options: ReaderOptions) { 31 | /// 32 | /// * `buf = reader.read(_, len)` 33 | /// returns the `len` next bytes of the stream. 34 | /// returns a buffer of length `len`, except at the end of the stream. 35 | /// The last chunk of the stream may have less than `len` bytes and afterwards the call 36 | /// returns `undefined`. 37 | /// If the `len` parameter is omitted, the call returns the next available chunk of data. 38 | // peekOnly is internal and not documented 39 | super((_) => { return this.readData(_); }); 40 | this.reader = reader; 41 | this.options = options; 42 | this.pos = 0; 43 | this.buf = Buffer.alloc(0); 44 | // override read for compat 45 | this.read = this.readData; 46 | } 47 | readData(_: _, len?: number, peekOnly?: boolean): Buffer | undefined { 48 | if (this.buf === undefined) return undefined; 49 | if (len === undefined) { 50 | if (this.pos < this.buf.length) return this.readData(_, this.buf.length - this.pos); 51 | else { 52 | this.buf = this.reader.read(_); 53 | this.pos = this.buf && !peekOnly ? this.buf.length : 0; 54 | return this.buf; 55 | } 56 | } 57 | const l = this.ensure(_, len); 58 | if (l === 0 && len > 0) return undefined; 59 | const result = this.buf.slice(this.pos, this.pos + l); 60 | if (!peekOnly) this.pos += l; 61 | return result; 62 | } 63 | 64 | // internal API 65 | ensure(_: _, len: number) { 66 | if (this.buf === undefined) return 0; 67 | if (this.pos + len <= this.buf.length) return len; 68 | var got = this.buf.length - this.pos; 69 | const bufs = got ? [this.buf.slice(this.pos)] : []; 70 | this.pos = 0; 71 | while (got < len) { 72 | var buf = this.reader.read(_); 73 | if (buf === undefined) { 74 | if (bufs.length === 0) return 0; 75 | else break; 76 | } 77 | bufs.push(buf); 78 | got += buf.length; 79 | } 80 | this.buf = Buffer.concat(bufs); 81 | return Math.min(this.buf.length, len); 82 | } 83 | 84 | /// 85 | /// * `buf = reader.peek(_, len)` 86 | /// Same as `read` but does not advance the read pointer. 87 | /// Another `read` would read the same data again. 88 | peek(_: _, len: number) { 89 | return this.readData(_, len, true); 90 | } 91 | 92 | /// 93 | /// * `reader.unread(len)` 94 | /// Unread the last `len` bytes read. 95 | /// `len` cannot exceed the size of the last read. 96 | unread(len: number) { 97 | if (!(len <= this.pos)) throw new Error("invalid unread: expected <= " + this.pos + ", got " + len); 98 | this.pos -= len; 99 | } 100 | } 101 | /// 102 | /// * `val = reader.readInt8(_)` 103 | /// * `val = reader.readUInt8(_)` 104 | /// * `val = reader.readInt16(_)` 105 | /// * `val = reader.readUInt16(_)` 106 | /// * `val = reader.readInt32(_)` 107 | /// * `val = reader.readUInt32(_)` 108 | /// * `val = reader.readFloat(_)` 109 | /// * `val = reader.readDouble(_)` 110 | /// Specialized readers for numbers. 111 | /// 112 | /// * `val = reader.peekInt8(_)` 113 | /// * `val = reader.peekUInt8(_)` 114 | /// * `val = reader.peekInt16(_)` 115 | /// * `val = reader.peekUInt16(_)` 116 | /// * `val = reader.peekInt32(_)` 117 | /// * `val = reader.peekUInt32(_)` 118 | /// * `val = reader.peekFloat(_)` 119 | /// * `val = reader.peekDouble(_)` 120 | /// Specialized peekers for numbers. 121 | function numberReader(name: string, len: number, peekOnly?: boolean) { 122 | return function (this: Reader, _: _) { 123 | const got = this.ensure(_, len); 124 | if (got === 0) return undefined; 125 | if (got < len) throw new Error("unexpected EOF: expected " + len + ", got " + got); 126 | const result = (this.buf as any)[name](this.pos); 127 | if (!peekOnly) this.pos += len; 128 | return result; 129 | }; 130 | } 131 | 132 | /// * `val = reader.unreadInt8()` 133 | /// * `val = reader.unreadUInt8()` 134 | /// * `val = reader.unreadInt16()` 135 | /// * `val = reader.unreadUInt16()` 136 | /// * `val = reader.unreadInt32()` 137 | /// * `val = reader.unreadUInt32()` 138 | /// * `val = reader.unreadFloat()` 139 | /// * `val = reader.unreadDouble()` 140 | /// Specialized unreaders for numbers. 141 | function numberUnreader(len: number) { 142 | return function (this: Reader) { 143 | return this.unread(len); 144 | }; 145 | } 146 | 147 | /// 148 | /// ---- 149 | /// 150 | /// * `writer = ez.helpers.binary.writer(writer, options)` 151 | /// Wraps a raw buffer writer and returns a writer with additional API to handle binary streams. 152 | /// By default the writer is configured as big endian. 153 | /// You can configure it as little endian by setting the `endian` option to `"little"`. 154 | /// The `bufSize` option controls the size of the intermediate buffer. 155 | export interface WriterOptions { 156 | endian?: 'big' | 'little'; 157 | bufSize?: number; 158 | } 159 | 160 | export class Writer extends BaseWriter { 161 | writer: BaseWriter; 162 | options: WriterOptions; 163 | pos: number; 164 | buf: Buffer; 165 | constructor(writer: BaseWriter, options?: WriterOptions) { 166 | super((_: _, buf: Buffer) => { 167 | this.writeDate(_, buf); 168 | return this; 169 | }); 170 | options = options || {}; 171 | this.writer = writer; 172 | this.options = options; 173 | this.pos = 0; 174 | this.buf = Buffer.alloc(options.bufSize && options.bufSize > 0 ? options.bufSize : 16384); 175 | } 176 | 177 | 178 | /// 179 | /// * `writer.flush(_)` 180 | /// Flushes the buffer to the wrapped writer. 181 | flush(_: _) { 182 | if (this.pos > 0) this.writer.write(_, this.buf.slice(0, this.pos)); 183 | // reallocate the buffer because existing buffer belongs to this.writer now. 184 | this.buf = Buffer.alloc(this.buf.length); 185 | this.pos = 0; 186 | } 187 | 188 | // internal call 189 | ensure(_: _, len: number) { 190 | if (this.pos + len > this.buf.length) { 191 | this.flush(_); 192 | if (len > this.buf.length) this.buf = Buffer.alloc(len); 193 | } 194 | } 195 | 196 | /// 197 | /// * `writer.write(_, buf)` 198 | /// Writes `buf`. 199 | /// Note: writes are buffered. 200 | /// Use the `flush(_)` call if you need to flush before the end of the stream. 201 | writeDate(_: _, buf: Buffer) { 202 | if (buf === undefined || buf.length > this.buf.length) { 203 | this.flush(_); 204 | this.writer.write(_, buf); 205 | } else { 206 | this.ensure(_, buf.length); 207 | buf.copy(this.buf, this.pos); 208 | this.pos += buf.length; 209 | } 210 | } 211 | } 212 | 213 | /// 214 | /// * `writer.writeInt8(_, val)` 215 | /// * `writer.writeUInt8(_, val)` 216 | /// * `writer.writeInt16(_, val)` 217 | /// * `writer.writeUInt16(_, val)` 218 | /// * `writer.writeInt32(_, val)` 219 | /// * `writer.writeUInt32(_, val)` 220 | /// * `writer.writeFloat(_, val)` 221 | /// * `writer.writeDouble(_, val)` 222 | /// Specialized writers for numbers. 223 | function numberWriter(name: string, len: number) { 224 | return function (this: Writer, _: _, val: number) { 225 | this.ensure(_, len); 226 | (this.buf as any)[name](val, this.pos); 227 | this.pos += len; 228 | }; 229 | } 230 | 231 | NUMBERS.forEach(function (pair) { 232 | const len = pair[1]; 233 | const names = len > 1 ? [pair[0] + 'BE', pair[0] + 'LE'] : [pair[0]]; 234 | const readerProto: any = Reader.prototype; 235 | const writerProto: any = Writer.prototype; 236 | names.forEach(function (name) { 237 | readerProto['read' + name] = numberReader('read' + name, len, false); 238 | readerProto['peek' + name] = numberReader('read' + name, len, true); 239 | readerProto['unread' + name] = numberUnreader(len); 240 | writerProto['write' + name] = numberWriter('write' + name, len); 241 | }); 242 | }); 243 | 244 | function makeEndian(base: Function, verbs: string[], suffix: string) { 245 | const construct = function (this: Reader | Writer) { 246 | base.apply(this, arguments); 247 | } 248 | construct.prototype = Object.create(base.prototype); 249 | NUMBERS.slice(1).forEach(function (pair) { 250 | verbs.forEach(function (verb) { 251 | construct.prototype[verb + pair[0]] = base.prototype[verb + pair[0] + suffix]; 252 | }); 253 | }); 254 | return construct; 255 | } 256 | 257 | // TODO: add ambient definitions for all generated methods 258 | require('../reader').decorate(Reader.prototype); 259 | require('../writer').decorate(Writer.prototype); 260 | const ReaderLE = makeEndian(Reader, ['read', 'peek', 'unread'], 'LE'); 261 | const ReaderBE = makeEndian(Reader, ['read', 'peek', 'unread'], 'BE'); 262 | const WriterLE = makeEndian(Writer, ['write'], 'LE'); 263 | const WriterBE = makeEndian(Writer, ['write'], 'BE'); 264 | 265 | // Interfaces to get the specialized methods in TypeScript 266 | export interface BinaryReader extends Reader { 267 | read(_: _, len?: number): Buffer | undefined; 268 | readInt8(_: _): number; 269 | peekInt8(_: _): number; 270 | unreadInt8(): void; 271 | readInt16(_: _): number; 272 | peekInt16(_: _): number; 273 | unreadInt16(): void; 274 | readUInt16(_: _): number; 275 | peekUInt16(_: _): number; 276 | unreadUInt16(): void; 277 | readInt32(_: _): number; 278 | peekInt32(_: _): number; 279 | unreadInt32(): void; 280 | readUInt32(_: _): number; 281 | peekUInt32(_: _): number; 282 | unreadUInt32(): void; 283 | readFloat(_: _): number; 284 | peekFloat(_: _): number; 285 | unreadFloat(): void; 286 | readDouble(_: _): number; 287 | peekDouble(_: _): number; 288 | unreadDouble(): void; 289 | } 290 | 291 | export interface BinaryWriter extends Writer { 292 | writeInt8(_: _, val: number): void; 293 | writeInt16(_: _, val: number): void; 294 | writeUInt16(_: _, val: number): void; 295 | writeInt32(_: _, val: number): void; 296 | writeUInt32(_: _, val: number): void; 297 | writeFloat(_: _, val: number): void; 298 | writeDouble(_: _, val: number): void; 299 | } 300 | 301 | // Documentation above, next to the constructor 302 | export function reader(reader: BaseReader, options?: ReaderOptions): BinaryReader { 303 | options = options || {}; 304 | const constr: any = options.endian === 'little' ? ReaderLE : ReaderBE; 305 | return new constr(reader, options); 306 | } 307 | 308 | export function writer(writer: BaseWriter, options?: WriterOptions): BinaryWriter { 309 | options = options || {}; 310 | const constr: any = options.endian === 'little' ? WriterLE : WriterBE; 311 | return new constr(writer, options); 312 | } -------------------------------------------------------------------------------- /src/helpers/index.ts: -------------------------------------------------------------------------------- 1 | import * as binary from './binary'; 2 | export { binary } -------------------------------------------------------------------------------- /src/index.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | module.exports = require('./ez'); -------------------------------------------------------------------------------- /src/mappers/convert.md: -------------------------------------------------------------------------------- 1 | ## Encoding mappers 2 | 3 | `import * as ez from 'ez-streams'` 4 | 5 | * `mapper = ez.mappers.convert.stringify(encoding)` 6 | returns a mapper that converts to string 7 | * `mapper = ez.mappers.convert.bufferify(encoding)` 8 | returns a mapper that converts to buffer 9 | -------------------------------------------------------------------------------- /src/mappers/convert.ts: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | /// !doc 3 | /// ## Encoding mappers 4 | /// 5 | /// `import * as ez from 'ez-streams'` 6 | /// 7 | import { _ } from 'streamline-runtime'; 8 | 9 | /// * `mapper = ez.mappers.convert.stringify(encoding)` 10 | /// returns a mapper that converts to string 11 | export function stringify(encoding?: string) { 12 | encoding = encoding || 'utf8'; 13 | return (_: _, data: Buffer) => { 14 | return data.toString(encoding); 15 | } 16 | } 17 | /// * `mapper = ez.mappers.convert.bufferify(encoding)` 18 | /// returns a mapper that converts to buffer 19 | export function bufferify(encoding?: string) { 20 | encoding = encoding || 'utf8'; 21 | return (_: _, data: string) => { 22 | return Buffer.from(data, encoding); 23 | } 24 | } -------------------------------------------------------------------------------- /src/mappers/index.ts: -------------------------------------------------------------------------------- 1 | import * as convert from './convert'; 2 | import * as json from './json'; 3 | export { convert, json } -------------------------------------------------------------------------------- /src/mappers/json.md: -------------------------------------------------------------------------------- 1 | ## JSON mappers 2 | 3 | `import * as ez from 'ez-streams'` 4 | 5 | * `mapper = ez.mappers.json.parse()` 6 | returns a mapper that parses JSON string. 7 | It assumes that the stream has already been split on boundaries that delimit valid JSON strings, 8 | with an optional separator at the end. 9 | * `mapper = ez.mappers.json.stringify()` 10 | returns a mapper that converts objects to JSON. 11 | You can use a the `sep` option to specify a separator that will be added at the end of every item. 12 | By default, `sep` is `,\n`. 13 | -------------------------------------------------------------------------------- /src/mappers/json.ts: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | /// !doc 3 | /// ## JSON mappers 4 | /// 5 | /// `import * as ez from 'ez-streams'` 6 | /// 7 | 8 | import { _ } from 'streamline-runtime'; 9 | 10 | /// * `mapper = ez.mappers.json.parse()` 11 | /// returns a mapper that parses JSON string. 12 | /// It assumes that the stream has already been split on boundaries that delimit valid JSON strings, 13 | /// with an optional separator at the end. 14 | export interface ParseOptions { 15 | sep?: string; 16 | encoding?: string; 17 | } 18 | 19 | export function parse(options?: ParseOptions) { 20 | const opts = options || {}; 21 | const sep = opts.sep == null ? ',' : opts.sep; 22 | return (_: _, data: string | Buffer) => { 23 | var str: string; 24 | if (Buffer.isBuffer(data)) str = data.toString(opts.encoding || 'utf8'); 25 | else str = data; 26 | if (str === '') return; 27 | // remove trailing separator, if any 28 | if (sep && str.substring(str.length - sep.length) === sep) str = str.substring(0, str.length - sep.length); 29 | return JSON.parse(str); 30 | } 31 | } 32 | /// * `mapper = ez.mappers.json.stringify()` 33 | /// returns a mapper that converts objects to JSON. 34 | /// You can use a the `sep` option to specify a separator that will be added at the end of every item. 35 | /// By default, `sep` is `,\n`. 36 | export interface FormatterOptions { 37 | sep?: string; 38 | replacer?: (key: string, value: any) => any; 39 | space?: string; 40 | } 41 | 42 | export function stringify(options?: FormatterOptions) { 43 | const opts = options || {}; 44 | const sep = opts.sep == null ? ',\n' : opts.sep; 45 | return (_: _, data: any) => { 46 | return JSON.stringify(data, opts.replacer, opts.space) + sep; 47 | } 48 | } -------------------------------------------------------------------------------- /src/node-wrappers.md: -------------------------------------------------------------------------------- 1 | 2 | # Wrappers for node.js streams 3 | 4 | These wrappers implement a _pull style_ API. 5 | For readable streams, instead of having the stream _push_ the data to its consumer by emitting `data` and `end` events, 6 | the wrapper lets the consumer _pull_ the data from the stream by calling asynchronous `read` methods. 7 | The wrapper takes care of the low level `pause`/`resume` logic. 8 | 9 | Similarly, for writable streams, the wrapper provides a simple asynchronous `write` method and takes 10 | care of the low level `drain` logic. 11 | 12 | For more information on this design, 13 | see [this blog post](http://bjouhier.wordpress.com/2011/04/25/asynchronous-episode-3-adventures-in-event-land/) 14 | 15 | For a simple example of this API in action, 16 | see the [google client example](../../../examples/streams/googleClient._js) 17 | 18 | ## Wrapper 19 | 20 | Base wrapper for all objects that emit an `end` or `close` event. 21 | All stream wrappers derive from this wrapper. 22 | 23 | * `wrapper = new streams.Wrapper(stream)` 24 | creates a wrapper. 25 | * `emitter = wrapper.emitter` 26 | returns the underlying emitter. The emitter stream can be used to attach additional observers. 27 | * `closed = wrapper.closed` 28 | returns true if the `close` event has been received. 29 | * `emitter = wrapper.unwrap()` 30 | unwraps and returns the underlying emitter. 31 | The wrapper should not be used after this call. 32 | 33 | ## ReadableStream 34 | 35 | All readable stream wrappers derive from this wrapper. 36 | 37 | * `stream = new streams.ReadableStream(stream[, options])` 38 | creates a readable stream wrapper. 39 | * `stream.setEncoding(enc)` 40 | sets the encoding. 41 | returns `this` for chaining. 42 | * `data = stream.read(_[, len])` 43 | reads asynchronously from the stream and returns a `string` or a `Buffer` depending on the encoding. 44 | If a `len` argument is passed, the `read` call returns when `len` characters or bytes 45 | (depending on encoding) have been read, or when the underlying stream has emitted its `end` event 46 | (so it may return less than `len` bytes or chars). 47 | Reads till the end if `len` is negative. 48 | Without `len`, the read calls returns the data chunks as they have been emitted by the underlying stream. 49 | Once the end of stream has been reached, the `read` call returns `null`. 50 | * `data = stream.readAll(_)` 51 | reads till the end of stream. 52 | Equivalent to `stream.read(_, -1)`. 53 | * `stream.unread(chunk)` 54 | pushes the chunk back to the stream. 55 | returns `this` for chaining. 56 | * `len = stream.available()` 57 | returns the number of bytes/chars that have been received and not read yet. 58 | * `reader = stream.reader` 59 | returns a clean ez reader. 60 | 61 | ## WritableStream 62 | 63 | All writable stream wrappers derive from this wrapper. 64 | 65 | * `stream = new streams.WritableStream(stream[, options])` 66 | creates a writable stream wrapper. 67 | * `stream.write(_, data[, enc])` 68 | Writes the data. 69 | This operation is asynchronous because it _drains_ the stream if necessary. 70 | Returns `this` for chaining. 71 | * `stream.end()` 72 | signals the end of the send operation. 73 | Returns `this` for chaining. 74 | * `writer = stream.writer` 75 | returns a clean ez writer. 76 | 77 | ## HttpServerRequest 78 | 79 | This is a wrapper around node's `http.ServerRequest`: 80 | This stream is readable (see `ReadableStream` above). 81 | 82 | * `request = new streams.HttpServerRequest(req[, options])` 83 | returns a wrapper around `req`, an `http.ServerRequest` object. 84 | The `options` parameter can be used to pass `lowMark` and `highMark` values, or 85 | to control encoding detection (see section below). 86 | * `method = request.method` 87 | * `url = request.url` 88 | * `headers = request.headers` 89 | * `trailers = request.trailers` 90 | * `httpVersion = request.httpVersion` 91 | * `connection = request.connection` 92 | * `socket = request.socket` 93 | (same as `http.ServerRequest`) 94 | 95 | ## HttpServerResponse 96 | 97 | This is a wrapper around node's `http.ServerResponse`. 98 | This stream is writable (see `WritableStream` above). 99 | 100 | * `response = new streams.HttpServerResponse(resp[, options])` 101 | returns a wrapper around `resp`, an `http.ServerResponse` object. 102 | * `response.writeContinue()` 103 | * `response.writeHead(head)` 104 | * `response.setHeader(name, value)` 105 | * `value = response.getHeader(head)` 106 | * `response.removeHeader(name)` 107 | * `response.addTrailers(trailers)` 108 | * `response.statusCode = value` 109 | (same as `http.ServerResponse`) 110 | 111 | ## HttpServer 112 | 113 | This is a wrapper around node's `http.Server` object: 114 | 115 | * `server = streams.createHttpServer(requestListener[, options])` 116 | creates the wrapper. 117 | `requestListener` is called as `requestListener(request, response, _)` 118 | where `request` and `response` are wrappers around `http.ServerRequest` and `http.ServerResponse`. 119 | A fresh empty global context is set before every call to `requestListener`. See [Global context API](https://github.com/Sage/streamline-runtime/blob/master/index.md). 120 | * `server.listen(_, port[, host])` 121 | * `server.listen(_, path)` 122 | (same as `http.Server`) 123 | 124 | ## HttpClientResponse 125 | 126 | This is a wrapper around node's `http.ClientResponse` 127 | 128 | This stream is readable (see `ReadableStream` above). 129 | 130 | * `response = new HttpClientResponse(resp, options)` 131 | wraps a node response object. 132 | `options.detectEncoding` and be used to control encoding detection (see section below). 133 | * `response = request.response(_)` 134 | returns the response stream. 135 | * `status = response.statusCode` 136 | returns the HTTP status code. 137 | * `version = response.httpVersion` 138 | returns the HTTP version. 139 | * `headers = response.headers` 140 | returns the HTTP response headers. 141 | * `trailers = response.trailers` 142 | returns the HTTP response trailers. 143 | * `response.checkStatus(statuses)` 144 | throws an error if the status is not in the `statuses` array. 145 | If only one status is expected, it may be passed directly as an integer rather than as an array. 146 | Returns `this` for chaining. 147 | 148 | ## HttpClientRequest 149 | 150 | This is a wrapper around node's `http.ClientRequest`. 151 | 152 | This stream is writable (see `WritableStream` above). 153 | 154 | * `request = streams.httpRequest(options)` 155 | creates the wrapper. 156 | The options are the following: 157 | * `method`: the HTTP method, `'GET'` by default. 158 | * `headers`: the HTTP headers. 159 | * `url`: the requested URL (with query string if necessary). 160 | * `proxy.url`: the proxy URL. 161 | * `lowMark` and `highMark`: low and high water mark values for buffering (in bytes or characters depending 162 | on encoding). 163 | Note that these values are only hints as the data is received in chunks. 164 | * `response = request.response(_)` 165 | returns the response. 166 | * `request.abort()` 167 | aborts the request. 168 | 169 | ## NetStream 170 | 171 | This is a wrapper around streams returned by TCP and socket clients: 172 | 173 | These streams are both readable and writable (see `ReadableStream` and `WritableStream` above). 174 | 175 | * `stream = new streams.NetStream(stream[, options])` 176 | creates a network stream wrapper. 177 | 178 | ## TCP and Socket clients 179 | 180 | These are wrappers around node's `net.createConnection`: 181 | 182 | * `client = streams.tcpClient(port, host[, options])` 183 | returns a TCP connection client. 184 | * `client = streams.socketClient(path[, options])` 185 | returns a socket client. 186 | The `options` parameter of the constructor provide options for the stream (`lowMark` and `highMark`). 187 | If you want different options for `read` and `write` operations, you can specify them by creating `options.read` and `options.write` sub-objects inside `options`. 188 | * `stream = client.connect(_)` 189 | connects the client and returns a network stream. 190 | 191 | ## NetServer 192 | 193 | This is a wrapper around node's `net.Server` object: 194 | 195 | * `server = streams.createNetServer([serverOptions,] connectionListener [, streamOptions])` 196 | creates the wrapper. 197 | `connectionListener` is called as `connectionListener(stream, _)` 198 | where `stream` is a `NetStream` wrapper around the native connection. 199 | A fresh empty global context is set before every call to `connectionListener`. See [Global context API](https://github.com/Sage/streamline-runtime/blob/master/index.md). 200 | * `server.listen(_, port[, host])` 201 | * `server.listen(_, path)` 202 | (same as `net.Server`) 203 | 204 | ## try/finally wrappers and pump 205 | 206 | * `result = streams.using(_, constructor, stream[, options], fn)` 207 | wraps `stream` with an instance of `constructor`; 208 | passes the wrapper to `fn(_, wrapper)` and closes the stream after `fn` returns. 209 | `fn` is called inside a `try/finally` block to guarantee that the stream is closed in all cases. 210 | Returns the value returned by `fn`. 211 | * `result = streams.usingReadable(_, stream[, options], fn)` 212 | shortcut for `streams.using(_, streams.ReadableStream, stream, options, fn)` 213 | * `result = streams.usingWritable(_, stream[, options], fn)` 214 | shortcut for `streams.using(_, streams.WritableStream, stream, options, fn)` 215 | * `streams.pump(_, inStream, outStream)` 216 | Pumps from `inStream` to `outStream`. 217 | Does not close the streams at the end. 218 | 219 | ## Encoding detection 220 | 221 | The `options.detectEncoding` option controls how the encoding is sent by the 222 | `HttpServerRequest` and `HttpClientResponse` constructors. 223 | This option can take the following values: 224 | 225 | * `strict`: the RFC-2616-7.2.1 rules are applied. 226 | * `default`: the default algorithm used by streamline v0.4 is used. 227 | This algorithm is more lenient and sets the encoding to `utf8` when text content is detected, even 228 | if there is no charset indication. 229 | * `disable`: null is always returned and the stream is always handled in binary mode (buffers rather than strings). 230 | * a function. This is a hook for custom encoding detection. 231 | The function is called as `fn(headers)` and returns the encoding. 232 | 233 | -------------------------------------------------------------------------------- /src/predicate.ts: -------------------------------------------------------------------------------- 1 | import { _ } from "streamline-runtime"; 2 | 3 | export interface Options { 4 | allowEval?: boolean; 5 | } 6 | 7 | export type Predicate = (_: _, val: any) => boolean; 8 | export type Op = (val: any, parent?: any) => Predicate; 9 | 10 | export function converter(options?: Options) { 11 | const opts = options || {}; 12 | 13 | const pfalse: Predicate = (_, obj) => false; 14 | const ptrue: Predicate = (_, obj) => true; 15 | 16 | const ops: { [name: string]: Op } = { 17 | $eq: (val) => ((_, v) => v == val), 18 | $ne: (val) => ((_, v) => v != val), 19 | $gt: (val) => ((_, v) => v > val), 20 | $gte: (val) => ((_, v) => v >= val), 21 | $lt: (val) => ((_, v) => v < val), 22 | $lte: (val) => ((_, v) => v <= val), 23 | $in: (val) => ((_, v) => val.indexOf(v) >= 0), 24 | $nin: (val) => ((_, v) => val.indexOf(v) < 0), 25 | $and: (val) => and(val.map(convert)), 26 | $or: (val) => or(val.map(convert)), 27 | $nor: (val) => not(or(val.map(convert))), 28 | $not: (val) => not(convert(val)), 29 | $exists: (val) => ((_, v) => val in v), 30 | $type: (val) => ((_, v) => typeof v === val), 31 | $mod: (val) => ((_, v) => v % val[0] === val[1]), 32 | $regex: (val, parent) => { 33 | const re = new RegExp(val, parent.$options || ""); 34 | return (_, v) => re.test(v); 35 | }, 36 | $options: (val, parent) => { 37 | if (parent.$regex == null) throw new Error("$options without $regex"); 38 | return ptrue; 39 | }, 40 | /*$text: (val) => { 41 | throw new Error("$text not supported"); 42 | },*/ 43 | $where: (val) => { 44 | if (typeof val !== "function") { 45 | if (opts.allowEval) val = new Function("return (" + val + ")"); 46 | else throw new Error("$where value is not a function"); 47 | } 48 | return (_, v) => val.call(v); 49 | }, 50 | $elemMatch: (val) => { 51 | const pred = convert(val); 52 | return (_, v) => { 53 | // if v is not array, treat it as single element array 54 | if (!Array.isArray(v)) return pred(_, v); 55 | return v.some_(_, pred); 56 | }; 57 | }, 58 | $all: (val) => { 59 | if (!Array.isArray(val)) throw new Error("$all value is not an array"); 60 | return and(val.map(ops['$elemMatch'])); 61 | }, 62 | $size: (val) => compose(ops['$eq'](val), deref('length')), 63 | 64 | // geospatial operators not supported 65 | } 66 | 67 | const re_test = (re: RegExp) => ((_: _, val: any) => re.test(val)); 68 | const not = (predicate: Predicate) => ((_: _, obj: any) => !predicate(_, obj)); 69 | 70 | const or = (predicates: Predicate[]) => { 71 | if (predicates.length === 0) return pfalse; 72 | if (predicates.length === 1) return predicates[0]; 73 | return (_: _, obj: any) => predicates.some_(_, (_, predicate) => predicate(_, obj)); 74 | } 75 | 76 | const and = (predicates: Predicate[]) => { 77 | if (predicates.length === 0) return ptrue; 78 | if (predicates.length === 1) return predicates[0]; 79 | return (_: _, obj: any) => predicates.every_(_, (_, predicate) => predicate(_, obj)); 80 | } 81 | 82 | const compose = (f: Predicate, g: Predicate) => ((_: _, obj: any) => f(_, g(_, obj))); 83 | 84 | const deref = (key: string) => ((_: _, obj: any) => { 85 | if (obj == null) return undefined; 86 | const v = obj[key]; 87 | return typeof v === "function" ? v(_) : v; 88 | }); 89 | 90 | const walk: (p: string) => Predicate = (p) => { 91 | const i = p.indexOf('.'); 92 | if (i >= 0) { 93 | return compose(walk(p.substring(i + 1)), walk(p.substring(0, i))); 94 | } else { 95 | return deref(p); 96 | } 97 | } 98 | 99 | const convert: (val: any) => Predicate = (val) => { 100 | if (val instanceof RegExp) { 101 | return re_test(val); 102 | } else if (typeof val === "object" && val) { 103 | return and(Object.keys(val).map((k) => { 104 | const v = val[k]; 105 | if (k[0] === '$') { 106 | if (!ops[k]) throw new Error("bad operator: " + k); 107 | return ops[k](v, val); 108 | } else { 109 | return compose(convert(v), walk(k)); 110 | } 111 | })); 112 | } else { 113 | return ops['$eq'](val); 114 | } 115 | }; 116 | return convert; 117 | }; 118 | 119 | 120 | export const convert = converter(); 121 | -------------------------------------------------------------------------------- /src/reader.md: -------------------------------------------------------------------------------- 1 | ## EZ Streams core reader API 2 | 3 | `import * as ez from 'ez-streams'` 4 | 5 | * `ez.reader.decorate(proto)` 6 | Adds the EZ streams reader API to an object. 7 | Usually the object is a prototype but it may be any object with a `read(_)` method. 8 | You do not need to call this function if you create your readers with 9 | the `ez.devices` modules. 10 | Returns `proto` for convenience. 11 | * `count = reader.forEach(_, fn, thisObj)` 12 | Similar to `forEach` on arrays. 13 | The `fn` function is called as `fn(_, elt, i)`. 14 | This call is asynchonous. It returns the number of entries processed when the end of stream is reached. 15 | * `reader = reader.map(fn, thisObj)` 16 | Similar to `map` on arrays. 17 | The `fn` function is called as `fn(_, elt, i)`. 18 | Returns another reader on which other operations may be chained. 19 | * `result = reader.every(_, fn, thisObj)` 20 | Similar to `every` on arrays. 21 | The `fn` function is called as `fn(_, elt)`. 22 | Returns true at the end of stream if `fn` returned true on every entry. 23 | Stops streaming and returns false as soon as `fn` returns false on an entry. 24 | * `result = reader.some(_, fn, thisObj)` 25 | Similar to `some` on arrays. 26 | The `fn` function is called as `fn(_, elt)`. 27 | Returns false at the end of stream if `fn` returned false on every entry. 28 | Stops streaming and returns true as soon as `fn` returns true on an entry. 29 | * `result = reader.reduce(_, fn, initial, thisObj)` 30 | Similar to `reduce` on arrays. 31 | The `fn` function is called as `fn(_, current, elt)` where `current` is `initial` on the first entry and 32 | the result of the previous `fn` call otherwise. 33 | Returns the value returned by the last `fn` call. 34 | * `writer = reader.pipe(_, writer)` 35 | Pipes from `stream` to `writer`. 36 | Returns the writer for chaining. 37 | * `reader = reader.tee(writer)` 38 | Branches another writer on the chain`. 39 | Returns another reader on which other operations may be chained. 40 | * `readers = reader.dup()` 41 | Duplicates a reader and returns a pair of readers which can be read from independently. 42 | * `reader = reader.concat(reader1, reader2)` 43 | Concatenates reader with one or more readers. 44 | Works like array.concat: you can pass the readers as separate arguments, or pass an array of readers. 45 | * `result = reader.toArray(_)` 46 | Reads all entries and returns them to an array. 47 | Note that this call is an anti-pattern for streaming but it may be useful when working with small streams. 48 | * `result = reader.readAll(_)` 49 | Reads all entries and returns them as a single string or buffer. Returns undefined if nothing has been read. 50 | Note that this call is an anti-pattern for streaming but it may be useful when working with small streams. 51 | * `reader = reader.transform(fn)` 52 | Inserts an asynchronous transformation into chain. 53 | This API is more powerful than `map` because the transformation function can combine results, split them, etc. 54 | The transformation function `fn` is called as `fn(_, reader, writer)` 55 | where `reader` is the `stream` to which `transform` is applied, 56 | and writer is a writer which is piped into the next element of the chain. 57 | Returns another reader on which other operations may be chained. 58 | * `result = reader.filter(fn, thisObj)` 59 | Similar to `filter` on arrays. 60 | The `fn` function is called as `fn(_, elt, i)`. 61 | Returns another reader on which other operations may be chained. 62 | * `result = reader.until(fn, testVal, thisObj, stopArg)` 63 | Cuts the stream by when the `fn` condition becomes true. 64 | The `fn` function is called as `fn(_, elt, i)`. 65 | `stopArg` is an optional argument which is passed to `stop` when `fn` becomes true. 66 | Returns another reader on which other operations may be chained. 67 | * `result = reader.while(fn, testVal, thisObj, stopArg)` 68 | Cuts the stream by when the `fn` condition becomes false. 69 | This is different from `filter` in that the result streams _ends_ when the condition 70 | becomes false, instead of just skipping the entries. 71 | The `fn` function is called as `fn(_, elt, i)`. 72 | `stopArg` is an optional argument which is passed to `stop` when `fn` becomes false. 73 | Returns another reader on which other operations may be chained. 74 | * `result = reader.limit(count, stopArg)` 75 | Limits the stream to produce `count` results. 76 | `stopArg` is an optional argument which is passed to `stop` when the limit is reached. 77 | Returns another reader on which other operations may be chained. 78 | * `result = reader.skip(count)` 79 | Skips the first `count` entries of the reader. 80 | Returns another reader on which other operations may be chained. 81 | * `group = reader.fork(consumers)` 82 | Forks the steam and passes the values to a set of consumers, as if each consumer 83 | had its own copy of the stream as input. 84 | `consumers` is an array of functions with the following signature: `reader = consumer(source)` 85 | Returns a `StreamGroup` on which other operations can be chained. 86 | * `group = reader.parallel(count, consumer)` 87 | Parallelizes by distributing the values to a set of `count` identical consumers. 88 | `count` is the number of consumers that will be created. 89 | `consumer` is a function with the following signature: `reader = consumer(source)` 90 | Returns a `StreamGroup` on which other operations can be chained. 91 | Note: transformed entries may be delivered out of order. 92 | * `reader = reader.peekable()` 93 | Returns a stream which has been extended with two methods to support lookahead. 94 | The lookahead methods are: 95 | - `reader.peek(_)`: same as `read(_)` but does not consume the item. 96 | - `reader.unread(val)`: pushes `val` back so that it will be returned by the next `read(_)` 97 | * `reader = reader.buffer(max)` 98 | Returns a stream which is identical to the original one but in which up to `max` entries may have been buffered. 99 | * `stream = reader.nodify()` 100 | converts the reader into a native node Readable stream. 101 | * `reader = reader.nodeTransform(duplex)` 102 | pipes the reader into a node duplex stream. Returns another reader. 103 | * `cmp = reader1.compare(_, reader2)` 104 | compares reader1 and reader2 return 0 if equal, 105 | * `reader.stop(_, arg)` 106 | Informs the source that the consumer(s) has(ve) stopped reading. 107 | The source should override this method if it needs to free resources when the stream ends. 108 | `arg` is an optional argument. 109 | If `arg` is falsy and the reader has been forked (or teed) upstream, only this reader stops (silently). 110 | If `arg` is true, readers that have been forked upstream are stopped silently (their `read` returns undefined). 111 | Otherwise `arg` should be an error object which will be thrown when readers that have been forked upstream try to read. 112 | The default `stop` function is a no-op. 113 | Note: `stop` is only called if reading stops before reaching the end of the stream. 114 | Sources should free their resources both on `stop` and on end-of-stream. 115 | ## StreamGroup API 116 | * `reader = group.dequeue()` 117 | Dequeues values in the order in which they are delivered by the readers. 118 | Returns a stream on which other operations may be chained. 119 | * `reader = group.rr()` 120 | Dequeues values in round robin fashion. 121 | Returns a stream on which other operations may be chained. 122 | * `reader = group.join(fn, thisObj)` 123 | Combines the values read from the readers to produce a single value. 124 | `fn` is called as `fn(_, values)` where `values` is the set of values produced by 125 | all the readers that are still active. 126 | `fn` returns the value which will be read from the joined stream. `fn` _must_ also reset to `undefined` the `values` entries 127 | that it has consumed. The next `read(_)` on the joined stream will fetch these values. 128 | Note that the length of the `values` array will decrease every time an input stream is exhausted. 129 | Returns a stream on which other operations may be chained. 130 | -------------------------------------------------------------------------------- /src/stop-exception.ts: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | // don't subclass Error because we don't want the overhead of a stack capture 4 | class StopException { 5 | arg: any; 6 | constructor(arg: any) { 7 | this.arg = arg; 8 | } 9 | get message() { 10 | return "stream stopped"; 11 | } 12 | get stack() { 13 | return "stream stopped\n\t"; 14 | } 15 | } 16 | 17 | 18 | export function unwrap(ex: any) { return ex instanceof StopException ? ex.arg : ex; } 19 | 20 | export function make(arg: any) { return (!arg || arg === true) ? new StopException(arg) : arg; } 21 | -------------------------------------------------------------------------------- /src/transforms/csv.md: -------------------------------------------------------------------------------- 1 | ## Stream transform for CSV files 2 | 3 | `import * as ez from 'ez-streams'` 4 | 5 | * `transform = ez.transforms.csv.parser(options)` 6 | creates a parser transform. The following options can be set: 7 | - `sep`: the field separator, comma by default 8 | * `transform = ez.transforms.csv.formatter(options)` 9 | creates a formatter transform. The following options can be set: 10 | - `sep`: the field separator, comma by default 11 | - `eol`: the end of line marker (`\n` or `\r\n`) 12 | -------------------------------------------------------------------------------- /src/transforms/csv.ts: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | /// !doc 3 | /// ## Stream transform for CSV files 4 | /// 5 | /// `import * as ez from 'ez-streams'` 6 | /// 7 | import { _ } from "streamline-runtime"; 8 | import { Reader } from "../reader"; 9 | import { Writer } from "../writer"; 10 | import * as lines from './lines'; 11 | 12 | /// * `transform = ez.transforms.csv.parser(options)` 13 | /// creates a parser transform. The following options can be set: 14 | /// - `sep`: the field separator, comma by default 15 | export interface ParserOptions { 16 | sep?: string; 17 | encoding?: string; 18 | } 19 | 20 | export function parser(options?: ParserOptions) { 21 | const opts = options || {}; 22 | const sep = opts.sep || ','; 23 | return (_: _, reader: Reader, writer: Writer) => { 24 | const rd = reader.transform(lines.parser()); 25 | const keys = (rd.read(_) || '').split(sep); 26 | rd.forEach(_, (_, line) => { 27 | // ignore empty line (we get one at the end if file is terminated by newline) 28 | if (line.length === 0) return; 29 | const values = line.split(sep); 30 | const obj: any = {}; 31 | keys.forEach((key, i) => { 32 | obj[key] = values[i]; 33 | }); 34 | writer.write(_, obj); 35 | }); 36 | }; 37 | } 38 | /// * `transform = ez.transforms.csv.formatter(options)` 39 | /// creates a formatter transform. The following options can be set: 40 | /// - `sep`: the field separator, comma by default 41 | /// - `eol`: the end of line marker (`\n` or `\r\n`) 42 | export interface FormatterOptions { 43 | sep?: string; 44 | eol?: string; 45 | } 46 | 47 | export function formatter(options?: FormatterOptions) { 48 | const opts = options || {}; 49 | const sep = opts.sep || ','; 50 | const eol = opts.eol || '\n'; 51 | return (_: _, reader: Reader, writer: Writer) => { 52 | var obj = reader.read(_); 53 | if (!obj) return; 54 | const keys = Object.keys(obj); 55 | writer.write(_, keys.join(sep) + eol); 56 | do { 57 | var values = keys.map((key) => obj[key]); 58 | writer.write(_, values.join(sep) + eol); 59 | } while ((obj = reader.read(_)) !== undefined); 60 | }; 61 | } 62 | -------------------------------------------------------------------------------- /src/transforms/cut.md: -------------------------------------------------------------------------------- 1 | ## Transform to cut string and binary streams 2 | 3 | `import * as ez from 'ez-streams'` 4 | 5 | * `transform = ez.transforms.cut(options)` 6 | cuts a string or binary stream in chunks of equal size 7 | -------------------------------------------------------------------------------- /src/transforms/cut.ts: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | /// !doc 3 | /// ## Transform to cut string and binary streams 4 | /// 5 | /// `import * as ez from 'ez-streams'` 6 | import { _ } from "streamline-runtime"; 7 | import { Reader } from "../reader"; 8 | import { Writer } from "../writer"; 9 | /// 10 | /// * `transform = ez.transforms.cut(options)` 11 | /// cuts a string or binary stream in chunks of equal size 12 | export interface Options { 13 | size?: number; 14 | } 15 | 16 | export function transform(options?: Options) { 17 | options = options || {}; 18 | const size = typeof options === 'number' ? options : options.size; 19 | return (_: _, reader: Reader, writer: Writer) => { 20 | if (!size) return reader.pipe(_, writer); 21 | var data: any = reader.read(_); 22 | while (data !== undefined) { 23 | if (data.length < size) { 24 | var d = reader.read(_); 25 | if (d === undefined) { 26 | if (data.length > 0) writer.write(_, data); 27 | data = d; 28 | } else { 29 | if (typeof data === 'string') data += d; 30 | else if (Buffer.isBuffer(data) && Buffer.isBuffer(d)) data = Buffer.concat([data, d]); 31 | else if (Array.isArray(data)) data = data.concat(d); 32 | else throw new Error("Cannot cut: bad data type: " + typeof data); 33 | } 34 | } else { 35 | writer.write(_, data.slice(0, size)); 36 | data = data.slice(size); 37 | } 38 | } 39 | }; 40 | } 41 | -------------------------------------------------------------------------------- /src/transforms/index.ts: -------------------------------------------------------------------------------- 1 | import * as csv from './csv'; 2 | import * as cut from './cut'; 3 | import * as json from './json'; 4 | import * as lines from './lines'; 5 | import * as multipart from './multipart'; 6 | import * as xml from './xml'; 7 | export { csv, cut, json, lines, multipart, xml } -------------------------------------------------------------------------------- /src/transforms/json.md: -------------------------------------------------------------------------------- 1 | Stream transform for simple JSON streams 2 | 3 | ## "Simple" JSON streams 4 | 5 | A _simple JSON stream_ is a text stream with the following format: 6 | 7 | * the stream starts with `[` and ends with `]` 8 | * items are serialized in JSON format and separated by commas 9 | 10 | In other words, the whole stream is just a valid JSON array. 11 | 12 | There is no special constraint on spaces or line breaks, nor on items. Items are usually objects but they may also be simple values, arrays or even nulls. Items may or may not be separated by lines. Any valid JSON array is a valid _simple JSON stream_. 13 | 14 | For example the following is a valid simple JSON stream: 15 | 16 | ``` json 17 | [{ "firstName": "Jimy", "lastName": "Hendrix" }, 18 | { "firstName": "Jim", "lastName": "Morrison" }, 19 | "people are strange", 27, null, 20 | { "firstName": "Janis", 21 | "lastName": "Joplin" }, 22 | [1, 2, 3, 23 | 5, 8, 13], 24 | true] 25 | ``` 26 | 27 | ## Unbounded streams 28 | 29 | Sometimes it is preferable to omit the `[` and `]` delimiters and to systematically append a comma after every entry, even after the last one. For example this is a better format for log files as it makes it easy to append entries. 30 | 31 | This alternate format can be obtained by passing an `unbounded: true` option when creating the reader or the writer. 32 | 33 | Here is an example of a normal, _bounded_, simple JSON stream: 34 | 35 | ``` 36 | [{ "firstName": "Jimy", "lastName": "Hendrix" }, 37 | { "firstName": "Jim", "lastName": "Morrison" }, 38 | { "firstName": "Janis", "lastName": "Joplin" }] 39 | ``` 40 | 41 | and the corresponding _unbounded_ stream: 42 | 43 | ``` 44 | { "firstName": "Jimy", "lastName": "Hendrix" }, 45 | { "firstName": "Jim", "lastName": "Morrison" }, 46 | { "firstName": "Janis", "lastName": "Joplin" }, 47 | ``` 48 | 49 | ## API 50 | 51 | `import * as ez from 'ez-streams'` 52 | 53 | * `transform = ez.transforms.json.parser(options)` 54 | creates a parser transform. The following options can be set: 55 | - `unbounded`: use _unbounded_ format 56 | - `reviver`: reviver function which is passed to [JSON.parse](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse) 57 | * `transform = ez.transforms.json.formatter(options)` 58 | creates a formatter transform. The following options can be set: 59 | - `unbounded`: use _unbounded_ format 60 | - `replacer`: replacer function which is passed to [JSON.stringify](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse) 61 | - `space`: space formatting directive which is passed to JSON.stringify. 62 | -------------------------------------------------------------------------------- /src/transforms/json.ts: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | /// !doc 3 | /// Stream transform for simple JSON streams 4 | /// 5 | /// ## "Simple" JSON streams 6 | /// 7 | /// A _simple JSON stream_ is a text stream with the following format: 8 | /// 9 | /// * the stream starts with `[` and ends with `]` 10 | /// * items are serialized in JSON format and separated by commas 11 | /// 12 | /// In other words, the whole stream is just a valid JSON array. 13 | /// 14 | /// There is no special constraint on spaces or line breaks, nor on items. Items are usually objects but they may also be simple values, arrays or even nulls. Items may or may not be separated by lines. Any valid JSON array is a valid _simple JSON stream_. 15 | /// 16 | /// For example the following is a valid simple JSON stream: 17 | /// 18 | /// ``` json 19 | /// [{ "firstName": "Jimy", "lastName": "Hendrix" }, 20 | /// { "firstName": "Jim", "lastName": "Morrison" }, 21 | /// "people are strange", 27, null, 22 | /// { "firstName": "Janis", 23 | /// "lastName": "Joplin" }, 24 | /// [1, 2, 3, 25 | /// 5, 8, 13], 26 | /// true] 27 | /// ``` 28 | /// 29 | /// ## Unbounded streams 30 | /// 31 | /// Sometimes it is preferable to omit the `[` and `]` delimiters and to systematically append a comma after every entry, even after the last one. For example this is a better format for log files as it makes it easy to append entries. 32 | /// 33 | /// This alternate format can be obtained by passing an `unbounded: true` option when creating the reader or the writer. 34 | /// 35 | /// Here is an example of a normal, _bounded_, simple JSON stream: 36 | /// 37 | /// ``` 38 | /// [{ "firstName": "Jimy", "lastName": "Hendrix" }, 39 | /// { "firstName": "Jim", "lastName": "Morrison" }, 40 | /// { "firstName": "Janis", "lastName": "Joplin" }] 41 | /// ``` 42 | /// 43 | /// and the corresponding _unbounded_ stream: 44 | /// 45 | /// ``` 46 | /// { "firstName": "Jimy", "lastName": "Hendrix" }, 47 | /// { "firstName": "Jim", "lastName": "Morrison" }, 48 | /// { "firstName": "Janis", "lastName": "Joplin" }, 49 | /// ``` 50 | /// 51 | /// ## API 52 | /// 53 | /// `import * as ez from 'ez-streams'` 54 | /// 55 | import { _ } from "streamline-runtime"; 56 | import { Reader } from "../reader"; 57 | import { Writer } from "../writer"; 58 | /// * `transform = ez.transforms.json.parser(options)` 59 | /// creates a parser transform. The following options can be set: 60 | /// - `unbounded`: use _unbounded_ format 61 | /// - `reviver`: reviver function which is passed to [JSON.parse](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse) 62 | export interface ParserOptions { 63 | size?: number; 64 | encoding?: string; 65 | reviver?: (key: any, value: any) => any; 66 | unbounded?: boolean; 67 | } 68 | 69 | export function parser(options?: ParserOptions) { 70 | const opts = options || {}; 71 | 72 | return (_: _, reader: Reader, writer: Writer) => { 73 | function read(_: _) { 74 | const data = reader.read(_); 75 | return Buffer.isBuffer(data) ? data.toString(opts.encoding || 'utf8') : data; 76 | } 77 | var pos = 0, 78 | chunk = read(_), 79 | beg: number | undefined, collected = "", 80 | line = 1, 81 | depth = 1, 82 | quoted = false, 83 | escape = false, 84 | ch: string | undefined; 85 | 86 | function error(msg: string) { 87 | throw new Error(line + ": " + msg + " near " + (chunk ? chunk.substring(pos, pos + 20) : "")); 88 | } 89 | 90 | function peekch(_: _) { 91 | if (!chunk) return undefined; 92 | if (pos >= chunk.length) { 93 | if (beg !== undefined) { 94 | collected += chunk.substring(beg); 95 | beg = 0; 96 | } 97 | chunk = read(_); 98 | if (!chunk) return undefined; 99 | pos = 0; 100 | } 101 | return chunk[pos]; 102 | } 103 | 104 | function skipSpaces(_: _) { 105 | var ch: string | undefined; 106 | while ((ch = peekch(_)) !== undefined && /^\s/.test(ch)) { 107 | line += ch === '\n' ? 1 : 0; 108 | pos++; 109 | } 110 | return ch; 111 | } 112 | 113 | 114 | function flush(_: _) { 115 | if (chunk === undefined || beg === undefined) return; 116 | collected += chunk.substring(beg, pos); 117 | const val = JSON.parse(collected, opts.reviver); 118 | writer.write(_, val); 119 | beg = undefined; 120 | collected = ""; 121 | } 122 | 123 | ch = skipSpaces(_); 124 | if (!opts.unbounded) { 125 | if (ch !== '[') throw error("expected [, got " + ch); 126 | pos++; 127 | } else { 128 | if (ch === undefined) return; 129 | } 130 | 131 | while (true) { 132 | ch = peekch(_); 133 | if (escape) { 134 | escape = false; 135 | } else if (quoted) { 136 | if (ch === '\\') escape = true; 137 | else if (ch === '"') { 138 | quoted = false; 139 | } 140 | } else { 141 | switch (ch) { 142 | case undefined: 143 | if (depth === 1 && opts.unbounded && beg === undefined) return; 144 | else throw error("unexpected EOF"); 145 | case '"': 146 | if (depth === 1 && beg === undefined) beg = pos; 147 | quoted = true; 148 | break; 149 | case '{': 150 | case '[': 151 | if (depth === 1 && beg === undefined) beg = pos; 152 | depth++; 153 | break; 154 | case '}': 155 | depth--; 156 | if (depth === 0) throw error("unexpected }"); 157 | break; 158 | case ']': 159 | depth--; 160 | if (depth === 0) { 161 | if (opts.unbounded) throw error("unexpected ]"); 162 | if (beg !== undefined) flush(_); 163 | return; 164 | } 165 | break; 166 | case ',': 167 | if (depth === 1) { 168 | if (beg === undefined) throw error("unexpected comma"); 169 | flush(_); 170 | } 171 | break; 172 | default: 173 | if (/^\s/.test(ch)) line += (ch === '\n') ? 1 : 0; 174 | else if (depth === 1 && beg === undefined) beg = pos; 175 | } 176 | } 177 | pos++; 178 | } 179 | } 180 | } 181 | 182 | /// * `transform = ez.transforms.json.formatter(options)` 183 | /// creates a formatter transform. The following options can be set: 184 | /// - `unbounded`: use _unbounded_ format 185 | /// - `replacer`: replacer function which is passed to [JSON.stringify](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse) 186 | /// - `space`: space formatting directive which is passed to JSON.stringify. 187 | export interface FormatterOptions { 188 | unbounded?: boolean; 189 | replacer?: (key: string, value: any) => any; 190 | space?: string; 191 | } 192 | 193 | export function formatter(options?: FormatterOptions) { 194 | const opts = options || {}; 195 | return (_: _, reader: Reader, writer: Writer) => { 196 | if (!opts.unbounded) writer.write(_, '['); 197 | reader.forEach(_, (_, obj, i) => { 198 | if (i > 0) writer.write(_, ',\n'); 199 | writer.write(_, JSON.stringify(obj, opts.replacer, opts.space)); 200 | }); 201 | writer.write(_, opts.unbounded ? ',' : ']'); 202 | writer.write(_, undefined); 203 | } 204 | } -------------------------------------------------------------------------------- /src/transforms/lines.md: -------------------------------------------------------------------------------- 1 | ## Stream transform for line-oriented text streams 2 | 3 | `import * as ez from 'ez-streams'` 4 | 5 | * `transform = ez.transforms.lines.parser(options)` 6 | creates a parser transform. 7 | `options` is reserved for future use. 8 | * `transform = ez.transforms.lines.formatter(options)` 9 | creates a formatter transform. 10 | `options.eol` defines the line separator. It is set to `\n` by default. 11 | `options.extra` indicates if an extra line separator must be emitted or not at the end. It is false by default. 12 | -------------------------------------------------------------------------------- /src/transforms/lines.ts: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | /// !doc 3 | /// ## Stream transform for line-oriented text streams 4 | /// 5 | /// `import * as ez from 'ez-streams'` 6 | /// 7 | /// * `transform = ez.transforms.lines.parser(options)` 8 | /// creates a parser transform. 9 | /// `options` is reserved for future use. 10 | import { _ } from "streamline-runtime"; 11 | import { Reader } from "../reader"; 12 | import { Writer } from "../writer"; 13 | 14 | export interface ParserOptions { 15 | sep?: string; 16 | encoding?: string; 17 | } 18 | 19 | export function parser(options?: ParserOptions): (_: _, reader: Reader, writer: Writer) => void { 20 | const opts = options || {}; 21 | 22 | function clean(line: string) { 23 | return (!opts.sep && line[line.length - 1] === '\r') ? line.substring(0, line.length - 1) : line; 24 | } 25 | return (_: _, reader: Reader, writer: Writer) => { 26 | var remain = ""; 27 | reader.forEach(_, (_, chunk) => { 28 | var str: string; 29 | if (typeof chunk === 'string') str = chunk; 30 | else if (Buffer.isBuffer(chunk)) str = chunk.toString(opts.encoding || 'utf8'); 31 | else if (chunk === undefined) return; 32 | else throw new Error("bad input: " + typeof chunk); 33 | const lines = str.split(opts.sep || '\n'); 34 | if (lines.length > 1) { 35 | writer.write(_, clean(remain + lines[0])); 36 | for (var i = 1; i < lines.length - 1; i++) writer.write(_, clean(lines[i])); 37 | remain = lines[i]; 38 | } else { 39 | remain += lines[0]; 40 | } 41 | }); 42 | if (remain) writer.write(_, remain); 43 | }; 44 | } 45 | 46 | /// * `transform = ez.transforms.lines.formatter(options)` 47 | /// creates a formatter transform. 48 | /// `options.eol` defines the line separator. It is set to `\n` by default. 49 | /// `options.extra` indicates if an extra line separator must be emitted or not at the end. It is false by default. 50 | export interface FormatterOptions { 51 | eol?: string; 52 | extra?: boolean; 53 | } 54 | 55 | export function formatter(options?: FormatterOptions) { 56 | const opts = options || {}; 57 | const eol = opts.eol || '\n'; 58 | return (_: _, reader: Reader, writer: Writer) => { 59 | if (opts.extra) { 60 | reader.forEach(_, (_, line) => { 61 | writer.write(_, line + eol); 62 | }); 63 | } else { 64 | reader.forEach(_, (_, line, i) => { 65 | writer.write(_, i > 0 ? eol + line : line); 66 | }); 67 | } 68 | } 69 | } -------------------------------------------------------------------------------- /src/transforms/multipart.md: -------------------------------------------------------------------------------- 1 | ## Stream transform for MIME multipart 2 | 3 | `import * as ez from 'ez-streams'` 4 | 5 | * `transform = ez.transforms.multipart.parser(options)` 6 | Creates a parser transform. 7 | The content type, which includes the boundary, 8 | is passed via `options['content-type']`. 9 | * `transform = ez.transforms.multipart.formatter(options)` 10 | Creates a formatter transform. 11 | The content type, which includes the boundary, 12 | is passed via `options['content-type']`. 13 | -------------------------------------------------------------------------------- /src/transforms/multipart.ts: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | /// !doc 3 | /// ## Stream transform for MIME multipart 4 | /// 5 | /// `import * as ez from 'ez-streams'` 6 | /// 7 | import { _ } from "streamline-runtime"; 8 | import { Reader } from "../reader"; 9 | import { Writer } from "../writer"; 10 | import * as binary from '../helpers/binary'; 11 | import * as generic from '../devices/generic'; 12 | 13 | function parseContentType(contentType?: string) { 14 | if (!contentType) throw new Error("content-type missing"); 15 | const match = /^multipart\/([\w\-]*)/.exec(contentType); 16 | if (!match) return null; 17 | const subType = match[1]; 18 | const atbs: any = contentType.split(/\s*;\s*/).reduce((r: any, s: string) => { 19 | const kv = s.split(/\s*=\s*/); 20 | r[kv[0]] = kv[1]; 21 | return r; 22 | }, {}); 23 | return { 24 | subType: subType, 25 | boundary: atbs.boundary, 26 | } 27 | } 28 | 29 | /// * `transform = ez.transforms.multipart.parser(options)` 30 | /// Creates a parser transform. 31 | /// The content type, which includes the boundary, 32 | /// is passed via `options['content-type']`. 33 | export type ParserOptions = { 34 | [name: string]: string; 35 | } 36 | 37 | export function parser(options: ParserOptions) { 38 | const ct = parseContentType(options && options["content-type"]); 39 | const boundary = ct && ct.boundary; 40 | if (!boundary) throw new Error("multipart boundary missing"); 41 | 42 | return (_: _, reader: Reader, writer: Writer) => { 43 | const binReader = binary.reader(reader); 44 | const handshake = _.handshake(); 45 | while (true) { 46 | var buf = binReader.readData(_, 2048); 47 | if (!buf || !buf.length) return; 48 | var str = buf.toString("binary"); 49 | var i = str.indexOf(boundary); 50 | if (i < 0) throw new Error("boundary not found"); 51 | var lines = str.substring(0, i).split(/\r?\n/); 52 | var headers = lines.slice(0, lines.length - 2).reduce((h: any, l: string) => { 53 | const kv = l.split(/\s*:\s*/); 54 | h[kv[0].toLowerCase()] = kv[1]; 55 | return h; 56 | }, {}); 57 | i = str.indexOf('\n', i); 58 | binReader.unread(buf.length - i - 1); 59 | 60 | var read = (_: _) => { 61 | const len = Math.max(boundary.length, 256); 62 | const buf = binReader.readData(_, 32 * len); 63 | if (!buf || !buf.length) { 64 | handshake.notify(); 65 | return; 66 | } 67 | // would be nice if Buffer had an indexOf. Would avoid a conversion to string. 68 | // I could use node-buffertools but it introduces a dependency on a binary module. 69 | const s = buf.toString("binary"); 70 | const i = s.indexOf(boundary); 71 | if (i === 0) { 72 | const j = s.indexOf('\n', boundary.length); 73 | if (j < 0) throw new Error("newline missing after boundary"); 74 | binReader.unread(buf.length - j - 1); 75 | handshake.notify(); 76 | return undefined; 77 | } else if (i > 0) { 78 | var j = s.lastIndexOf('\n', i); 79 | if (s[j - 1] === '\r') j--; 80 | binReader.unread(buf.length - i); 81 | return buf.slice(0, j); 82 | } else { 83 | binReader.unread(buf.length - 31 * len); 84 | return buf.slice(0, 31 * len); 85 | } 86 | }; 87 | const partReader = generic.reader(read); 88 | partReader.headers = headers; 89 | writer.write(_, partReader); 90 | handshake.wait(_); 91 | } 92 | }; 93 | } 94 | 95 | /// * `transform = ez.transforms.multipart.formatter(options)` 96 | /// Creates a formatter transform. 97 | /// The content type, which includes the boundary, 98 | /// is passed via `options['content-type']`. 99 | export interface FormatterOptions { 100 | [name: string]: string; 101 | } 102 | 103 | export function formatter(options?: FormatterOptions) { 104 | const ct = parseContentType(options && options["content-type"]); 105 | const boundary = ct && ct.boundary; 106 | if (!boundary) throw new Error("multipart boundary missing"); 107 | 108 | return (_: _, reader: Reader>, writer: Writer) => { 109 | var part: Reader | undefined; 110 | while ((part = reader.read(_)) !== undefined) { 111 | var headers = part.headers; 112 | if (!headers) throw new Error("part does not have headers"); 113 | Object.keys(part.headers).forEach_(_, (_, key) => { 114 | writer.write(_, Buffer.from(key + ": " + headers[key] + "\n", "binary")); 115 | }); 116 | writer.write(_, Buffer.from("\n" + boundary + "\n")); 117 | // cannot use pipe because pipe writes undefined at end. 118 | part.forEach(_, (_, data) => { 119 | writer.write(_, data); 120 | }); 121 | writer.write(_, Buffer.from("\n" + boundary + "\n")); 122 | } 123 | } 124 | } -------------------------------------------------------------------------------- /src/transforms/xml.md: -------------------------------------------------------------------------------- 1 | 2 | # Simple XML parser and formatter 3 | 4 | Transforms back and forth between XML and JS. 5 | Tries to generate a JS object which is as simple as possible, without losing information. 6 | 7 | Uses the following rules when converting from XML to JS: 8 | * all values are returned as strings. No attempt to convert numbers and booleans 9 | * attributes are mapped to a `$` subobject. 10 | * simple values are mapped to an object with a `$value` property if the tag has attributes. 11 | * simple values are mapped to a string if the tag does not have attributes. 12 | * repeating tags are mapped to an array. 13 | * CDATA sections are mapped to an object with a `$cdata` property. 14 | * self-closing tags are returned as an empty object. 15 | 16 | Some examples: 17 | 18 | ``` 19 | hello world --> { a: "hello world" } 20 | world --> { a: { $: { x: "hello" }, $value: "world" } } 21 | helloworld --> { a: { b : "hello", c: "world" } } 22 | helloworld --> { a: { b : ["hello", "world"] } 23 | --> { a: "" } 24 | --> { a: {} } 25 | ``` 26 | 27 | See the `test/server/xml-test._js` unit test for more examples. 28 | 29 | ## API 30 | 31 | `import * as ez from 'ez-streams'` 32 | 33 | * `transform = ez.transforms.xml.parser(options)` 34 | creates a parser transform. The following options can be set: 35 | - `tags`: the list of tags that enclose each item returned by the reader 36 | * `transform = ez.transforms.xml.formatter(options)` 37 | creates a formatter transform. The following options can be set: 38 | - `tags`: the list of tags that enclose each item returned by the reader 39 | - `indent`: optional indentation string, should only contain spaces. 40 | -------------------------------------------------------------------------------- /src/transforms/xml.ts: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | /// !doc 4 | /// 5 | /// # Simple XML parser and formatter 6 | /// 7 | /// Transforms back and forth between XML and JS. 8 | /// Tries to generate a JS object which is as simple as possible, without losing information. 9 | /// 10 | /// Uses the following rules when converting from XML to JS: 11 | /// * all values are returned as strings. No attempt to convert numbers and booleans 12 | /// * attributes are mapped to a `$` subobject. 13 | /// * simple values are mapped to an object with a `$value` property if the tag has attributes. 14 | /// * simple values are mapped to a string if the tag does not have attributes. 15 | /// * repeating tags are mapped to an array. 16 | /// * CDATA sections are mapped to an object with a `$cdata` property. 17 | /// * self-closing tags are returned as an empty object. 18 | /// 19 | /// Some examples: 20 | /// 21 | /// ``` 22 | /// hello world --> { a: "hello world" } 23 | /// world --> { a: { $: { x: "hello" }, $value: "world" } } 24 | /// helloworld --> { a: { b : "hello", c: "world" } } 25 | /// helloworld --> { a: { b : ["hello", "world"] } 26 | /// --> { a: "" } 27 | /// --> { a: {} } 28 | /// ``` 29 | /// 30 | /// See the `test/server/xml-test._js` unit test for more examples. 31 | /// 32 | /// ## API 33 | /// 34 | /// `import * as ez from 'ez-streams'` 35 | /// 36 | import { _ } from "streamline-runtime"; 37 | import { Reader } from "../reader"; 38 | import { Writer } from "../writer"; 39 | 40 | const begWord: any = {}, 41 | inWord: any = {}, 42 | space: any = {}, 43 | LF = '\n'.charCodeAt(0), 44 | LT = '<'.charCodeAt(0), 45 | GT = '>'.charCodeAt(0), 46 | EXCLAM = '!'.charCodeAt(0), 47 | QMARK = '?'.charCodeAt(0), 48 | SLASH = '/'.charCodeAt(0), 49 | OBRA = '['.charCodeAt(0), 50 | EQ = '='.charCodeAt(0), 51 | DQUOTE = '"'.charCodeAt(0), 52 | DASH = '-'.charCodeAt(0), 53 | entitiesByChar: { [name: string]: string } = { 54 | '&': 'amp', 55 | '<': 'lt', 56 | '>': 'gt', 57 | '"': 'quot', 58 | "'": 'apos', 59 | }, 60 | entitiesByName: { [name: string]: string } = {}; 61 | 62 | (() => { 63 | function add(clas: any, chs: string, i?: number) { 64 | chs.split('').forEach((ch) => { 65 | clas[ch.charCodeAt(0) + (i || 0)] = true; 66 | }); 67 | } 68 | for (var i = 0; i <= 9; i++) add(inWord, '0', i); 69 | for (var i = 0; i < 26; i++) add(begWord, 'aA', i), add(inWord, 'aA', i); 70 | add(begWord, ':_'), add(inWord, ':_-.'); 71 | add(space, ' \t\r\n'); 72 | Object.keys(entitiesByChar).forEach((ch) => { 73 | entitiesByName[entitiesByChar[ch]] = ch; 74 | }); 75 | })(); 76 | 77 | function assert(cond: boolean, msg: string) { 78 | if (!cond) throw new Error(msg); 79 | } 80 | 81 | const MARKER = '689c93f7-0147-40e9-a172-5c6c1c12ba11'; 82 | 83 | /// * `transform = ez.transforms.xml.parser(options)` 84 | /// creates a parser transform. The following options can be set: 85 | /// - `tags`: the list of tags that enclose each item returned by the reader 86 | export interface ParserOptions { 87 | tags?: string; 88 | encoding?: string; 89 | } 90 | 91 | interface Element { 92 | $tag?: string; 93 | $parent?: Element; 94 | $childCount?: number; 95 | $cdata?: string; 96 | $value?: string; 97 | $index?: number; 98 | $emit?: boolean; 99 | $?: { [name: string]: any }; 100 | [name: string]: any; 101 | } 102 | 103 | export function parser(options?: ParserOptions) { 104 | const opts = options || {}; 105 | const ttags: any = typeof opts === "string" ? opts : opts.tags; 106 | const tags = typeof ttags === "string" ? ttags.split('/') : ttags; 107 | if (!tags) throw new Error("cannot transform XML: 'tags' option missing") 108 | 109 | function builder(error: (message: string) => Error) { 110 | const root: Element = { 111 | $childCount: 0 112 | }; 113 | var elt = root; 114 | 115 | function mustEmit(parent: Element, tag: string) { 116 | if (tag !== tags[tags.length - 1]) return false; 117 | // TS bugfix: for (var i = tags.length - 2; tag >= 0; tag--) { 118 | var p: Element | undefined = parent; 119 | for (var i = tags.length - 2; i >= 0; i--) { 120 | if (!p || p.$tag !== tags[i]) return false; 121 | p = p.$parent; 122 | } 123 | return true; 124 | } 125 | 126 | function clone(parent: Element, tag: string | undefined, child: Element): Element { 127 | const pp = Object.keys(parent).reduce((r: Element, k: string) => { 128 | if (k[0] !== '$' || k.length === 1) r[k] = tag === k ? child : parent[k]; 129 | return r; 130 | }, {}); 131 | return parent.$parent ? clone(parent.$parent, parent.$tag, pp) : pp; 132 | } 133 | 134 | return { 135 | push: (tag: string) => { 136 | if (elt.$cdata != null) throw error("cannot mix CDATA and children"); 137 | if (elt.$value != null) throw error("cannot mix value and children"); 138 | elt.$childCount++; 139 | const emit = mustEmit(elt, tag); 140 | const child: Element = { 141 | $tag: tag, 142 | $parent: elt, 143 | $childCount: 0, 144 | $emit: emit, 145 | }; 146 | if (!emit && elt[tag] != null) { 147 | if (!Array.isArray(elt[tag])) elt[tag] = [elt[tag]]; 148 | child.$index = elt[tag].length; 149 | elt[tag].push(child); 150 | } else { 151 | elt[tag] = child; 152 | } 153 | elt = child; 154 | }, 155 | pop: (_: _, writer: Writer, tag?: string) => { 156 | if (tag && tag !== elt.$tag) throw error("closing tag mismatch: expected " + elt.$tag + ", got " + tag); 157 | const parent = elt.$parent; 158 | const emit = elt.$emit; 159 | if (!parent) throw error("too many closing tags"); 160 | delete elt.$parent; 161 | // if elt does not have attributes, replace it by value in parent 162 | if (elt.$value !== undefined && !elt.$) { 163 | if (emit || elt.$index === undefined) parent[elt.$tag!] = elt.$value; 164 | else parent[elt.$tag!][elt.$index] = elt.$value; 165 | } else { 166 | delete elt.$tag; 167 | delete elt.$childCount; 168 | delete elt.$index; 169 | delete elt.$emit; 170 | } 171 | if (emit) writer.write(_, clone(parent, tag, elt)); 172 | elt = parent; 173 | }, 174 | attribute: (atb: string, val: any) => { 175 | elt.$ = elt.$ || {}; 176 | if (elt.$[atb] != null) throw error("duplicate attribute: " + atb); 177 | elt.$[atb] = val; 178 | }, 179 | value: (val: any) => { 180 | if (elt.$cdata != null) throw error("cannot mix CDATA and value"); 181 | if (elt.$childCount) throw error("cannot mix children and value"); 182 | elt.$value = val; 183 | }, 184 | cdata: (val: any) => { 185 | if (elt.$value != null) throw error("cannot mix value and CDATA"); 186 | if (elt.$childCount) throw error("cannot mix children and CDATA"); 187 | elt.$cdata = val; 188 | }, 189 | getResult: () => { 190 | if (elt !== root) throw error("tag not closed: " + elt.$tag); 191 | if (!root.$childCount) throw error("root tag not found"); 192 | if (root.$childCount !== 1) throw error("too many elements at top level"); 193 | delete root.$childCount; 194 | return root; 195 | }, 196 | }; 197 | } 198 | return (_: _, reader: Reader, writer: Writer) => { 199 | var data = reader.read(_); 200 | if (data === undefined) return; 201 | var str = Buffer.isBuffer(data) ? data.toString(opts.encoding || 'utf8') : data; 202 | var pos = 0, 203 | bld = builder(error); 204 | 205 | function forget() { 206 | str = str.substring(pos); 207 | pos = 0; 208 | } 209 | 210 | function fill(_: _, pat: string) { 211 | while (true) { 212 | var i = str.indexOf(pat, pos); 213 | // read at least 3 chars further to detect '); 300 | if (j < 0) throw error("unterminated comment"); 301 | pos = j + 3; 302 | fill(_, '<'); 303 | } else { 304 | break; 305 | } 306 | } 307 | val += clean(str.substring(pos, j)); 308 | if (str.charCodeAt(j + 1) === SLASH) bld.value(val), pos = j; 309 | else checkEmpty(val); 310 | break; 311 | } else { 312 | pos--; 313 | throw error("unexpected character: '" + str[pos] + "'"); 314 | } 315 | } 316 | } else if (ch === SLASH) { 317 | // closing tag - read optional tag name 318 | beg = pos; 319 | var ch = str.charCodeAt(pos); 320 | var tag: string | undefined; 321 | if (begWord[ch]) { 322 | pos++; 323 | while (inWord[str.charCodeAt(pos)]) pos++; 324 | tag = str.substring(beg, pos); 325 | } 326 | eatSpaces(); 327 | eat(GT); 328 | bld.pop(_, writer, tag); 329 | } else if (ch === EXCLAM) { 330 | // comment 331 | var ch = str.charCodeAt(pos++); 332 | if (ch === DASH && str.charCodeAt(pos++) === DASH) { 333 | var j = str.indexOf('-->', pos); 334 | if (j < 0) throw error("--> missing"); 335 | pos = j + 3; 336 | } else if (ch === OBRA && str.substring(pos, pos + 6) === 'CDATA[') { 337 | pos += 6; 338 | var j = fill(_, ']]>'); 339 | if (j < 0) throw error("]]> missing"); 340 | bld.cdata(str.substring(pos, j)); 341 | pos = j + 3; 342 | eatSpaces(); 343 | } else { 344 | throw error("invalid syntax after ', pos); 349 | if (j < 0) throw error("?> missing"); 350 | pos = j + 2; 351 | } else { 352 | throw error("unexpected character: " + str[beg]); 353 | } 354 | eatSpaces(); 355 | } while (npos >= 0); 356 | if (pos != str.length) throw error("unexpected trailing text: " + str.substring(pos)); 357 | }; 358 | } 359 | /// * `transform = ez.transforms.xml.formatter(options)` 360 | /// creates a formatter transform. The following options can be set: 361 | /// - `tags`: the list of tags that enclose each item returned by the reader 362 | /// - `indent`: optional indentation string, should only contain spaces. 363 | export interface FormatterOptions { 364 | tags?: string; 365 | indent?: string; 366 | } 367 | 368 | export interface Builder { 369 | beginTag: (tag: string) => void; 370 | addAttribute: (atb: string, val: any) => void; 371 | endTag: (close?: boolean) => void; 372 | closeTag: (tag: string, val?: any) => void; 373 | cdata: (data: string) => void; 374 | getResult: (extraIndent?: boolean) => string; 375 | } 376 | 377 | export function formatter(options?: FormatterOptions) { 378 | const opts = options || {}; 379 | const ttags: any = typeof opts === "string" ? opts : opts.tags; 380 | const tags = typeof ttags === "string" ? ttags.split('/') : ttags; 381 | if (!tags) throw new Error("cannot transform XML: 'tags' option missing") 382 | const ident = opts && opts.indent; 383 | 384 | function builder(depth: number): Builder { 385 | var str = ''; 386 | 387 | function indent() { 388 | str += '\n' + Array(depth + 1).join(opts.indent); 389 | } 390 | 391 | function escape(val: any) { 392 | return typeof (val) !== "string" ? "" + val : val.replace(/([&<>"']|[^ -~\u00a1-\ud7ff\ue000-\ufffd])/g, (ch: string) => { 393 | const ent = entitiesByChar[ch]; 394 | if (ent) return '&' + ent + ';'; 395 | var hex = ch.charCodeAt(0).toString(16); 396 | while (hex.length < 2) hex = '0' + hex; 397 | while (hex.length > 2 && hex.length < 4) hex = '0' + hex; 398 | return '&#x' + hex + ';'; 399 | }); 400 | } 401 | return { 402 | beginTag: (tag) => { 403 | opts.indent && indent(); 404 | str += '<' + tag; 405 | depth++; 406 | }, 407 | addAttribute: (atb, val) => { 408 | str += ' ' + atb + '="' + escape(val) + '"'; 409 | }, 410 | endTag: (close) => { 411 | close && depth--; 412 | str += close ? '/>' : '>'; 413 | }, 414 | closeTag: (tag, val) => { 415 | depth--; 416 | if (val != null) { 417 | str += escape(val); 418 | } else { 419 | opts.indent && indent(); 420 | } 421 | str += ''; 422 | }, 423 | cdata: (data) => { 424 | str += ''; 425 | }, 426 | getResult: (extraIndent) => { 427 | if (extraIndent) indent(); 428 | // indexOf to eliminate newline that indent may put before root 429 | return str.substring(str.indexOf('<')); 430 | } 431 | }; 432 | } 433 | 434 | return (_: _, reader: Reader, writer: Writer) => { 435 | function error(msg: string) { 436 | return new Error(msg); 437 | } 438 | 439 | function strfy(bld: Builder, elt: any, tag: string) { 440 | if (elt === undefined) return bld; 441 | if (Array.isArray(elt)) { 442 | elt.forEach((child: any) => { 443 | strfy(bld, child, tag); 444 | }); 445 | return bld; 446 | } 447 | bld.beginTag(tag); 448 | if (elt === null) { 449 | bld.addAttribute('xsi:nil', 'true'); 450 | bld.endTag(true); 451 | } else if (typeof elt !== "object") { 452 | bld.endTag(); 453 | bld.closeTag(tag, elt); 454 | } else { 455 | if (elt.$) { 456 | Object.keys(elt.$).forEach((atb) => { 457 | var v: any; 458 | if ((v = elt.$[atb]) != null) bld.addAttribute(atb, v); 459 | }); 460 | } 461 | const keys = Object.keys(elt).filter((key) => key[0] !== '$'); 462 | if (elt.$value !== undefined) { 463 | if (keys.length > 0) throw error("cannot mix $value and $children"); 464 | if (elt.$cdata) throw error("cannot mix $value and $cdata"); 465 | if (elt.$value === null) { 466 | bld.addAttribute('xsi:nil', 'true'); 467 | bld.endTag(true); 468 | } else { 469 | bld.endTag(); 470 | bld.closeTag(tag, elt.$value); 471 | } 472 | } else if (elt.$cdata != null) { 473 | if (keys.length > 0) throw error("cannot mix $cdata and $children"); 474 | bld.endTag(); 475 | bld.cdata(elt.$cdata); 476 | bld.closeTag(tag); 477 | } else if (keys.length > 0) { 478 | bld.endTag(); 479 | keys.forEach((key) => { 480 | strfy(bld, elt[key], key); 481 | }); 482 | bld.closeTag(tag); 483 | } else { 484 | bld.endTag(true); 485 | } 486 | } 487 | return bld; 488 | } 489 | function getParent(elt: any) { 490 | for (var i = 0; i < tags.length - 1; i++) { 491 | elt = elt[tags[i]]; 492 | if (elt === null) throw new Error("tag not found: " + tags[i]); 493 | } 494 | return elt; 495 | } 496 | 497 | const rootTag = tags[0]; 498 | const parentTag = tags[tags.length - 1]; 499 | 500 | const elt = reader.read(_); 501 | if (elt === undefined) return; 502 | var parent = getParent(elt); 503 | const saved = parent[parentTag]; 504 | parent[parentTag] = MARKER; 505 | const envelope = strfy(builder(0), elt[rootTag], rootTag).getResult(); 506 | parent[parentTag] = saved; 507 | 508 | const marker = '<' + parentTag + '>' + MARKER + ''; 509 | const markerPos = envelope.indexOf(marker); 510 | if (markerPos < 0) throw new Error("internal error: marker not found"); 511 | 512 | const prologue = '' + (opts.indent ? '\n' : ''); 513 | writer.write(_, prologue + envelope.substring(0, markerPos)); 514 | while (true) { 515 | var xml = strfy(builder(tags.length - 1), parent[parentTag], parentTag).getResult(true); 516 | writer.write(_, xml); 517 | var parent = reader.read(_); 518 | if (parent === undefined) break; 519 | parent = getParent(parent); 520 | } 521 | writer.write(_, envelope.substring(markerPos + marker.length)); 522 | }; 523 | } -------------------------------------------------------------------------------- /src/util.ts: -------------------------------------------------------------------------------- 1 | import { _ } from 'streamline-runtime'; 2 | 3 | export function nextTick(cb: Function | _) { 4 | const anyCb: any = cb; 5 | if (/^0\./.test(process.versions.node)) setImmediate(cb); 6 | else process.nextTick(anyCb); 7 | } -------------------------------------------------------------------------------- /src/writer.md: -------------------------------------------------------------------------------- 1 | ## EZ Streams core writer API 2 | 3 | `import * as ez from 'ez-streams'` 4 | 5 | * `ez.writer.decorate(proto)` 6 | Adds the EZ streams writer API to an object. 7 | Usually the object is a prototype but it may be any object with a `write(_, data)` method. 8 | You do not need to call this function if you create your readers with 9 | the `ez.devices` modules. 10 | Returns `proto` for convenience. 11 | 12 | * `writer = writer.writeAll(_, val)` 13 | writes `val` and ends the writer 14 | 15 | * `writer = writer.stop(_, err)` 16 | stops the writer. 17 | by default err is silently ignored 18 | 19 | * `writer = writer.end()` 20 | ends the writer - compatiblity call (errors won't be thrown to caller) 21 | * `writer = writer.pre.action(fn)` 22 | returns another writer which applies `action(fn)` before writing to the original writer. 23 | `action` may be any chainable action from the reader API: `map`, `filter`, `transform`, ... 24 | * `stream = writer.nodify()` 25 | converts the writer into a native node Writable stream. 26 | -------------------------------------------------------------------------------- /src/writer.ts: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | /** 3 | * Copyright (c) 2013 Bruno Jouhier 4 | * 5 | * Permission is hereby granted, free of charge, to any person 6 | * obtaining a copy of this software and associated documentation 7 | * files (the "Software"), to deal in the Software without 8 | * restriction, including without limitation the rights to use, 9 | * copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the 11 | * Software is furnished to do so, subject to the following 12 | * conditions: 13 | * 14 | * The above copyright notice and this permission notice shall be 15 | * included in all copies or substantial portions of the Software. 16 | * 17 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 18 | * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 19 | * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 20 | * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 21 | * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 22 | * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 23 | * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 24 | * OTHER DEALINGS IN THE SOFTWARE. 25 | */ 26 | /// !doc 27 | /// ## EZ Streams core writer API 28 | /// 29 | /// `import * as ez from 'ez-streams'` 30 | /// 31 | import { _ } from "streamline-runtime"; 32 | import { Reader, ParallelOptions } from "./reader"; 33 | import { create as createUturn } from './devices/uturn'; 34 | import * as nodeStream from "stream"; 35 | import * as sys from 'util'; 36 | 37 | export class Writer { 38 | write: (this: Writer, _: _, value?: T) => this; 39 | ended: boolean; 40 | constructor(write: (_: _, value: T) => void, stop?: (_: _, arg?: any) => void) { 41 | if (typeof write !== 'function') throw new Error("invalid writer.write: " + (write && typeof write)); 42 | this.ended = false; 43 | this.write = (_, data) => { 44 | if (data === undefined) { 45 | if (!this.ended) write.call(this, _); 46 | this.ended = true; 47 | } else { 48 | if (this.ended) throw new Error("invalid attempt to write after end"); 49 | write.call(this, _, data); 50 | } 51 | return this; 52 | }; 53 | if (stop) { 54 | this.stop = (_: _, arg?: any) => { 55 | stop.call(this, _, arg); 56 | return this; 57 | }; 58 | } 59 | } 60 | 61 | 62 | /// 63 | /// * `writer = writer.writeAll(_, val)` 64 | /// writes `val` and ends the writer 65 | writeAll(_: _, val: T) { 66 | this.write(_, val); 67 | this.write(_, undefined); 68 | return this; 69 | }; 70 | 71 | /// 72 | /// * `writer = writer.stop(_, err)` 73 | /// stops the writer. 74 | /// by default arg is silently ignored 75 | stop(_: _, arg?: any): Writer { 76 | this.write(_, undefined); 77 | return this; 78 | }; 79 | 80 | /// 81 | /// * `writer = writer.end()` 82 | /// ends the writer - compatiblity call (errors won't be thrown to caller) 83 | end() { 84 | if (arguments.length > 0) throw new Error("invalid end call: " + arguments.length + " arg(s)"); 85 | _.run(_ => this.write(_, undefined)); 86 | return this; 87 | }; 88 | 89 | /// * `writer = writer.pre.action(fn)` 90 | /// returns another writer which applies `action(fn)` before writing to the original writer. 91 | /// `action` may be any chainable action from the reader API: `map`, `filter`, `transform`, ... 92 | get pre(): Pre { 93 | return new PreImpl(this) as Pre; 94 | } 95 | 96 | /// * `stream = writer.nodify()` 97 | /// converts the writer into a native node Writable stream. 98 | nodify() { 99 | const self = this; 100 | const stream = new nodeStream.Writable(); 101 | // ES2015 does not let us override method directly but we do it! 102 | // This is fishy. Clean up later (should do it from end event). 103 | // also very fragile because of optional args. 104 | const anyStream: any = stream; 105 | anyStream._write = function (chunk: any, encoding?: string, done?: Function) { 106 | if (chunk && encoding && encoding !== 'buffer') chunk = chunk.toString(encoding); 107 | _.run(_ => self.write(_, chunk), err => { 108 | if (err) return stream.emit('error', err) as never; 109 | if (done) done(); 110 | }); 111 | } 112 | // override end to emit undefined marker 113 | const end = stream.end; 114 | anyStream.end = function (chunk: any, encoding?: string, cb?: (err: any, val?: any) => any) { 115 | end.call(stream, chunk, encoding, (err: any) => { 116 | if (err) return stream.emit('error', err) as never; 117 | cb = cb || ((err) => { }); 118 | _.run(_ => self.write(_, undefined), cb); 119 | }); 120 | }; 121 | return stream; 122 | } 123 | // optional result getter - only implemneted in some subclasses 124 | get result(): any { 125 | throw new Error("result not supported"); 126 | } 127 | }; 128 | 129 | export function create(write: (_: _, value: T) => Writer, stop?: (_: _, arg?: any) => Writer) { 130 | return new Writer(write, stop); 131 | } 132 | 133 | /// * `ez.writer.decorate(proto)` 134 | /// Adds the EZ streams writer API to an object. 135 | /// Usually the object is a prototype but it may be any object with a `write(_, data)` method. 136 | /// You do not need to call this function if you create your readers with 137 | /// the `ez.devices` modules. 138 | /// Returns `proto` for convenience. 139 | // compat API - don't export in TS 140 | exports.decorate = function (proto: any) { 141 | const writerProto: any = Writer.prototype; 142 | Object.getOwnPropertyNames(Writer.prototype).forEach(k => { 143 | // compare with == is important here! 144 | if (k == 'constructor' || k == 'result') return; 145 | if (k == 'pre') { 146 | Object.defineProperty(proto, k, { 147 | get(this: Writer) { return new PreImpl(this); } 148 | }); 149 | } else { 150 | if (!proto[k]) proto[k] = writerProto[k]; 151 | } 152 | }); 153 | return proto; 154 | } 155 | 156 | 157 | export class PreImpl { 158 | writer: Writer; 159 | constructor(writer: Writer) { 160 | if (typeof writer.write !== 'function') throw new Error("invalid pre writer: " + sys.inspect(writer)); 161 | this.writer = writer; 162 | } 163 | } 164 | 165 | export interface Pre extends PreImpl { 166 | map(fn: (_: _, elt: U, index?: number) => T, thisObj?: any): Writer; 167 | tee(writer: Writer): Writer; 168 | concat(readers: Reader[]): Writer; 169 | transform(fn: (_: _, reader: Reader, writer: Writer) => void, thisObj?: any): Writer; 170 | filter(fn: (_: _, elt: T, index?: number) => boolean, thisObj?: any): Writer; 171 | until(fn: (_: _, elt: T, index?: number) => boolean, thisObj?: any): Writer; 172 | while(fn: (_: _, elt: T, index?: number) => boolean, thisObj?: any): Writer; 173 | limit(n: number, stopArg?: any): Writer; 174 | skip(n: number): Writer; 175 | parallel(options: ParallelOptions | number, consumer: (source: any) => Reader): Writer; 176 | buffer(max: number): Writer; 177 | nodeTransform(duplex: nodeStream.Duplex): Writer; 178 | } 179 | 180 | // add reader methods to Pre.prototype 181 | // fragile but we'll fix later 182 | process.nextTick(() => { 183 | const preProto: any = PreImpl.prototype; 184 | const api: any = Reader.prototype; 185 | [ 186 | 'map', 187 | 'tee', 188 | 'concat', 189 | 'transform', 190 | 'filter', 191 | 'until', 192 | 'while', 193 | 'limit', 194 | 'skip', 195 | 'parallel', 196 | 'buffer', 197 | 'nodeTransform' 198 | ].forEach((name) => { 199 | preProto[name] = function (this: Pre, arg: any) { 200 | const uturn = require('./devices/uturn').create(); 201 | uturn.reader[name](arg).pipe(uturn.end, this.writer); 202 | return uturn.writer; 203 | } 204 | }); 205 | }) 206 | -------------------------------------------------------------------------------- /test/benchmarks/bench._js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | const fs = require("fs"); 3 | const ez = require('ez-streams'); 4 | 5 | function bench(_, name, fn) { 6 | var max = 1; 7 | while (true) { 8 | var t0 = Date.now(); 9 | var result = fn(_, max); 10 | if (result !== dummy(_, max - 1)) throw new Error(name + ": bad result: " + result); 11 | var dt = (Date.now() - t0); 12 | if (dt > 100) { 13 | dt = Math.round(dt * 1000 * 1000 / max); 14 | var s = dt.toString().replace(/\B(?=(\d{3})+(?!\d))/g, ","); 15 | console.log(name + "\t" + s + " ns"); 16 | return; 17 | } 18 | max *= 2; 19 | } 20 | } 21 | 22 | function dummy(_, i) { 23 | return 3 * i; 24 | } 25 | 26 | function myReader(max) { 27 | var i = 0; 28 | return ez.devices.generic.reader(function read(_) { 29 | return i++ < max ? dummy(_, i - 1) : undefined; 30 | }); 31 | } 32 | 33 | const benches = { 34 | 'streamline dummy loop': { 35 | fn: function (_, max) { 36 | var result; 37 | for (var i = 0; i < max; i++) result = dummy(_, i); 38 | return result; 39 | }, 40 | time: 2 41 | }, 42 | 'streamline bound dummy loop': { 43 | fn: function (_, max) { 44 | var result; 45 | for (var i = 0; i < max; i++) result = [dummy][0](_, i); 46 | return result; 47 | }, 48 | time: 24 49 | }, 50 | 'callbacks loop nextTick': { 51 | fn: function (cb, max) { 52 | var i = 0; 53 | 54 | function next() { 55 | if (++i < max) process.nextTick(next); 56 | else dummy(cb, i - 1); 57 | } 58 | next(); 59 | }, 60 | time: 24 61 | }, 62 | 'callbacks loop setImmediate': { 63 | fn: function (cb, max) { 64 | var i = 0; 65 | 66 | function next() { 67 | if (++i < max) setImmediate(next); 68 | else dummy(cb, i - 1); 69 | } 70 | next(); 71 | }, 72 | time: 24 73 | }, 74 | 'streamline loop nextTick': { 75 | fn: function (_, max) { 76 | var i = 0; 77 | for (i = 0; i < max; i++) process.nextTick(_); 78 | return dummy(_, i - 1); 79 | }, 80 | time: 681 81 | }, 82 | 'streamline loop setImmediate': { 83 | fn: function (_, max) { 84 | var i = 0; 85 | for (i = 0; i < max; i++) setImmediate(_); 86 | return dummy(_, i - 1); 87 | }, 88 | time: 681 89 | }, 90 | 'reader with read loop': { 91 | fn: function (_, max) { 92 | const rd = myReader(max * 2); 93 | var result; 94 | for (var i = 0; i < max; i++) result = rd.read(_); 95 | return result; 96 | }, 97 | time: 10 98 | }, 99 | 'reader with limit': { 100 | fn: function (_, max) { 101 | var result; 102 | myReader(max * 2).limit(max).forEach(_, function (_, val) { 103 | result = val; 104 | }); 105 | return result; 106 | }, 107 | time: 3326 108 | }, 109 | 'reader with filter': { 110 | fn: function (_, max) { 111 | var result; 112 | myReader(max).filter((_) => true).forEach(_, function (_, val) { 113 | result = val; 114 | }); 115 | return result; 116 | }, 117 | time: 1735 118 | }, 119 | 'reader with limit and filter': { 120 | fn: function (_, max) { 121 | var result; 122 | myReader(max * 2).limit(max).filter((_) => true).forEach(_, function (_, val) { 123 | result = val; 124 | }); 125 | return result; 126 | }, 127 | time: 3724 128 | }, 129 | 'reader with transform': { 130 | fn: function (_, max) { 131 | var result; 132 | myReader(max).transform((_, reader, writer) => reader.pipe(_, writer)).forEach(_, function (_, val) { 133 | result = val; 134 | }); 135 | return result; 136 | }, 137 | time: 3724 138 | }, 139 | }; 140 | 141 | Object.keys(benches).forEach_(_, function (_, name) { 142 | bench(_, name, benches[name].fn) 143 | }); -------------------------------------------------------------------------------- /test/common/predicate-test.ts: -------------------------------------------------------------------------------- 1 | /// 2 | declare function asyncTest(name: string, expected: number, test: (_: _) => any): any; 3 | 4 | import { _ } from "streamline-runtime"; 5 | import * as ez from "../../src/ez"; 6 | 7 | QUnit.module(module.id); 8 | 9 | const safeConverter = ez.predicate.convert; 10 | const unsafeConverter = ez.predicate.converter({ 11 | allowEval: true 12 | }); 13 | 14 | function t(_: _, condition: any, obj: any, expected: any, unsafe?: boolean) { 15 | const got = (unsafe ? unsafeConverter : safeConverter)(condition)(_, obj); 16 | equal(got, expected, JSON.stringify(condition) + " with " + JSON.stringify(obj) + " => " + expected); 17 | } 18 | 19 | asyncTest("direct values", 6, (_) => { 20 | t(_, 5, 5, true); 21 | t(_, 5, 6, false); 22 | t(_, 'a', 'a', true); 23 | t(_, 'a', 'aa', false); 24 | t(_, true, true, true); 25 | t(_, true, false, false); 26 | start(); 27 | }); 28 | 29 | asyncTest("gt", 3, (_) => { 30 | t(_, { 31 | $gt: 4, 32 | }, 5, true); 33 | 34 | t(_, { 35 | $gt: 5, 36 | }, 5, false); 37 | 38 | t(_, { 39 | $gt: 6, 40 | }, 5, false); 41 | 42 | start(); 43 | }); 44 | 45 | asyncTest("gte", 3, (_) => { 46 | t(_, { 47 | $gte: 4, 48 | }, 5, true); 49 | 50 | t(_, { 51 | $gte: 5, 52 | }, 5, true); 53 | 54 | t(_, { 55 | $gte: 6, 56 | }, 5, false); 57 | 58 | start(); 59 | }); 60 | 61 | asyncTest("lt", 3, (_) => { 62 | t(_, { 63 | $lt: 4, 64 | }, 5, false); 65 | 66 | t(_, { 67 | $lt: 5, 68 | }, 5, false); 69 | 70 | t(_, { 71 | $lt: 6, 72 | }, 5, true); 73 | 74 | start(); 75 | }); 76 | 77 | asyncTest("lte", 3, (_) => { 78 | t(_, { 79 | $lte: 4, 80 | }, 5, false); 81 | 82 | t(_, { 83 | $lte: 5, 84 | }, 5, true); 85 | 86 | t(_, { 87 | $lte: 6, 88 | }, 5, true); 89 | 90 | start(); 91 | }); 92 | 93 | asyncTest("ne", 3, (_) => { 94 | t(_, { 95 | $ne: 4, 96 | }, 5, true); 97 | 98 | t(_, { 99 | $ne: 5, 100 | }, 5, false); 101 | 102 | t(_, { 103 | $ne: 6, 104 | }, 5, true); 105 | 106 | 107 | start(); 108 | }); 109 | 110 | asyncTest("range", 3, (_) => { 111 | t(_, { 112 | $gte: 3, 113 | $lte: 7, 114 | }, 2, false); 115 | 116 | t(_, { 117 | $gte: 3, 118 | $lte: 7, 119 | }, 5, true); 120 | 121 | t(_, { 122 | $gte: 3, 123 | $lte: 7, 124 | }, 8, false); 125 | 126 | start(); 127 | }); 128 | 129 | asyncTest("regexp", 2, (_) => { 130 | t(_, /^hel/, 'hello', true); 131 | t(_, /^hel/, 'world', false); 132 | 133 | start(); 134 | }); 135 | 136 | asyncTest("and", 2, (_) => { 137 | t(_, { 138 | $and: [2, 5], 139 | }, 5, false); 140 | 141 | t(_, { 142 | $and: [5, 5], 143 | }, 5, true); 144 | 145 | start(); 146 | }); 147 | 148 | asyncTest("empty and", 2, (_) => { 149 | t(_, {}, {}, true); 150 | 151 | t(_, {}, { 152 | a: 5, 153 | }, true); 154 | 155 | start(); 156 | }); 157 | 158 | asyncTest("or", 2, (_) => { 159 | t(_, { 160 | $or: [2, 5], 161 | }, 5, true); 162 | 163 | t(_, { 164 | $or: [2, 6], 165 | }, 5, false); 166 | 167 | start(); 168 | }); 169 | 170 | asyncTest("empty or", 2, (_) => { 171 | t(_, { 172 | $or: [] 173 | }, {}, false); 174 | 175 | t(_, { 176 | $or: [] 177 | }, { 178 | a: 5, 179 | }, false); 180 | 181 | start(); 182 | }); 183 | 184 | asyncTest("nor", 2, (_) => { 185 | t(_, { 186 | $nor: [2, 5], 187 | }, 5, false); 188 | 189 | t(_, { 190 | $nor: [2, 6], 191 | }, 5, true); 192 | 193 | start(); 194 | }); 195 | 196 | asyncTest("not", 2, (_) => { 197 | t(_, { 198 | $not: { 199 | $gt: 2 200 | }, 201 | }, 5, false); 202 | 203 | t(_, { 204 | $not: { 205 | $lt: 2 206 | }, 207 | }, 5, true); 208 | 209 | start(); 210 | }); 211 | 212 | asyncTest("in", 3, (_) => { 213 | t(_, { 214 | $in: [2, 3, 5] 215 | }, 3, true); 216 | 217 | t(_, { 218 | $in: [2, 3, 5] 219 | }, 4, false); 220 | 221 | t(_, { 222 | $in: [2, 3, 5] 223 | }, 5, true); 224 | 225 | start(); 226 | }); 227 | 228 | asyncTest("not in", 3, (_) => { 229 | t(_, { 230 | $nin: [2, 3, 5] 231 | }, 3, false); 232 | 233 | t(_, { 234 | $nin: [2, 3, 5] 235 | }, 4, true); 236 | 237 | t(_, { 238 | $nin: [2, 3, 5] 239 | }, 5, false); 240 | 241 | start(); 242 | }); 243 | 244 | asyncTest("exists", 3, (_) => { 245 | t(_, { 246 | $exists: "a" 247 | }, { 248 | a: 5, 249 | }, true); 250 | 251 | t(_, { 252 | $exists: "a" 253 | }, { 254 | a: undefined, 255 | }, true); 256 | 257 | t(_, { 258 | $exists: "a" 259 | }, { 260 | b: 5, 261 | }, false); 262 | 263 | start(); 264 | }); 265 | 266 | asyncTest("type", 3, (_) => { 267 | t(_, { 268 | $type: "number" 269 | }, 5, true); 270 | 271 | t(_, { 272 | $type: "object" 273 | }, {}, true); 274 | 275 | t(_, { 276 | $type: "string" 277 | }, 5, false); 278 | 279 | start(); 280 | }); 281 | 282 | asyncTest("mod", 2, (_) => { 283 | t(_, { 284 | $mod: [3, 2] 285 | }, 5, true); 286 | 287 | t(_, { 288 | $mod: [4, 2] 289 | }, 5, false); 290 | 291 | start(); 292 | }); 293 | 294 | asyncTest("regex", 4, (_) => { 295 | t(_, { 296 | $regex: "^hel", 297 | }, "hello", true); 298 | 299 | t(_, { 300 | $regex: "^hel", 301 | }, "world", false); 302 | 303 | t(_, { 304 | $regex: "^hel", 305 | }, "HeLLo", false); 306 | 307 | t(_, { 308 | $regex: "^hel", 309 | $options: "i", 310 | }, "HeLLo", true); 311 | 312 | start(); 313 | }); 314 | 315 | asyncTest("where", 4, (_) => { 316 | t(_, { 317 | $where: "this.a === this.b", 318 | }, { 319 | a: 5, 320 | b: 5, 321 | }, true, true); 322 | 323 | t(_, { 324 | $where: "this.a === this.b", 325 | }, { 326 | a: 5, 327 | b: 6, 328 | }, false, true); 329 | 330 | t(_, { 331 | $where: function (this: any) { 332 | return this.a === this.b; 333 | }, 334 | }, { 335 | a: 5, 336 | b: 5, 337 | }, true); 338 | 339 | t(_, { 340 | $where: function (this: any) { 341 | return this.a === this.b; 342 | }, 343 | }, { 344 | a: 5, 345 | b: 6, 346 | }, false); 347 | 348 | start(); 349 | }); 350 | 351 | asyncTest("elemMatch", 2, (_) => { 352 | t(_, { 353 | $elemMatch: { 354 | $gte: 2, 355 | $lt: 5, 356 | }, 357 | }, [1, 3, 5], true); 358 | 359 | t(_, { 360 | $elemMatch: { 361 | $gte: 2, 362 | $lt: 5, 363 | }, 364 | }, [1, 5, 6], false); 365 | 366 | start(); 367 | }); 368 | 369 | asyncTest("all", 6, (_) => { 370 | t(_, { 371 | $all: [2, 4], 372 | }, [1, 2, 3, 4, 5], true); 373 | 374 | t(_, { 375 | $all: [2, 4], 376 | }, [1, 2, 3, 5], false); 377 | 378 | t(_, { 379 | tags: { 380 | $all: ["appliance", "school", "book"] 381 | } 382 | }, { 383 | tags: ["school", "book", "bag", "headphone", "appliance"], 384 | }, true); 385 | 386 | t(_, { 387 | tags: { 388 | $all: ["appliance", "school", "book"] 389 | } 390 | }, { 391 | tags: ["school", "bag", "headphone", "appliance"], 392 | }, false); 393 | 394 | const cond = { 395 | items: { 396 | $all: [{ 397 | $elemMatch: { 398 | size: "M", 399 | num: { 400 | $gt: 50 401 | } 402 | } 403 | }, { 404 | $elemMatch: { 405 | num: 100, 406 | color: "green" 407 | } 408 | }] 409 | } 410 | }; 411 | t(_, cond, { 412 | items: [{ 413 | size: "S", 414 | num: 10, 415 | color: "blue" 416 | }, { 417 | size: "M", 418 | num: 100, 419 | color: "blue" 420 | }, { 421 | size: "L", 422 | num: 100, 423 | color: "green" 424 | }] 425 | }, true); 426 | t(_, cond, { 427 | items: [{ 428 | size: "S", 429 | num: 10, 430 | color: "blue" 431 | }, { 432 | size: "M", 433 | num: 100, 434 | color: "blue" 435 | }, { 436 | size: "L", 437 | num: 100, 438 | color: "red" 439 | }] 440 | }, false); 441 | 442 | start(); 443 | }); 444 | 445 | asyncTest("size", 2, (_) => { 446 | t(_, { 447 | $size: 2, 448 | }, [1, 2], true); 449 | 450 | t(_, { 451 | $size: 2, 452 | }, [1, 2, 3], false); 453 | 454 | start(); 455 | }); 456 | 457 | asyncTest("single property", 2, (_) => { 458 | t(_, { 459 | a: 5, 460 | }, { 461 | a: 5, 462 | b: 3, 463 | }, true); 464 | 465 | t(_, { 466 | a: 6, 467 | }, { 468 | a: 5, 469 | b: 3, 470 | }, false); 471 | start(); 472 | }); 473 | 474 | asyncTest("implicit and (multiple properties)", 2, (_) => { 475 | t(_, { 476 | a: 5, 477 | b: 3, 478 | }, { 479 | a: 5, 480 | b: 3, 481 | }, true); 482 | 483 | t(_, { 484 | a: 5, 485 | b: 3, 486 | }, { 487 | a: 5, 488 | }, false); 489 | 490 | start(); 491 | }); 492 | 493 | asyncTest("walk", 5, (_) => { 494 | t(_, { 495 | 'a.b': /^hel/, 496 | }, { 497 | a: { 498 | b: 'hello', 499 | } 500 | }, true); 501 | 502 | t(_, { 503 | 'a.b': /^hel/, 504 | }, { 505 | a: { 506 | c: 'hello', 507 | } 508 | }, false); 509 | 510 | t(_, { 511 | 'a.c': /^hel/, 512 | }, { 513 | b: { 514 | c: 'hello', 515 | } 516 | }, false); 517 | 518 | t(_, { 519 | 'a.b.c': /^hel/, 520 | }, { 521 | a: { 522 | b: { 523 | c: 'hello', 524 | } 525 | } 526 | }, true); 527 | 528 | t(_, { 529 | 'a.b.c': /^hel/, 530 | }, { 531 | a: { 532 | b: { 533 | c: 'world', 534 | } 535 | } 536 | }, false); 537 | 538 | start(); 539 | }); -------------------------------------------------------------------------------- /test/fixtures/exit2.cmd: -------------------------------------------------------------------------------- 1 | echo launched 2 | exit 2 3 | -------------------------------------------------------------------------------- /test/fixtures/exit2.sh: -------------------------------------------------------------------------------- 1 | exit 2 -------------------------------------------------------------------------------- /test/fixtures/rss-sample.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | Yahoo! Finance: Top Stories 4 | http://biz.yahoo.com/top.html 5 | Top Stories from Yahoo! Finance 6 | en-us 7 | Thu, 2 Jan 14 22:30:10 GMT 8 | 9 | http://us.news2.yimg.com/us.yimg.com/p/fi/pr/40197.gif 10 | Yahoo! Finance 11 | http://finance.yahoo.com 12 | 144 13 | 17 14 | 15 | 16 | Wall Street ends down on first trading day of 2014 17 | http://us.rd.yahoo.com/finance/news/rss/story/SIG=159cqpnuf/*http%3A//us.rd.yahoo.com/finance/news/topfinstories/SIG=136ed25a3/*http%3A//finance.yahoo.com/news/futures-lower-ahead-claims-manufacturing-data-121351141--sector.html?l=1 18 | U.S. stocks fell on their first day of trading in 2014 as investors booked profits in the wake of the S&P 500's best yearly advance since 1997, with many of last year's strongest performers down on the day. 19 | yahoo_finance/395449748 20 | Thu, 2 Jan 14 22:30:10 GMT 21 | 22 | 23 | Billionaires begin 2014 a half-trillion dollars richer 24 | http://us.rd.yahoo.com/finance/news/rss/story/SIG=16ou9iqu4/*http%3A//us.rd.yahoo.com/finance/news/topfinstories/SIG=14l5f7u0r/*http%3A//finance.yahoo.com/blogs/daily-ticker/billionaires-start-2014-a-half-trillion-dollars-richer--the-biggest-winners-and-losers-150840317.html?l=1 25 | The richest people in the world start 2014 worth $3.7 trillion after collectively adding $524 billion to their net worth in 2013. That’s according to the Bloomberg Billionaires Index, which ranks the 300 wealthiest people across the globe. 26 | yahoo_finance/230499411 27 | Thu, 2 Jan 14 22:30:09 GMT 28 | 29 | 30 | Company makes good on leaving Colorado over gun laws 31 | http://us.rd.yahoo.com/finance/news/rss/story/SIG=14juci3if/*http%3A//us.rd.yahoo.com/finance/news/topfinstories/SIG=12g47gb2h/*http%3A//finance.yahoo.com/news/company-makes-good-leaving-colo-205433398.html?l=1 32 | One of the country's largest producers of ammunition magazines for guns is leaving Colorado and moving operations to Wyoming and Texas because of new state laws that include restrictions on how many cartridges ... 33 | yahoo_finance/1058722038 34 | Thu, 2 Jan 14 22:30:08 GMT 35 | 36 | 37 | A new start for Chrysler — but the same old problem 38 | http://us.rd.yahoo.com/finance/news/rss/story/SIG=15rh0jkua/*http%3A//us.rd.yahoo.com/finance/news/topfinstories/SIG=13o7071nl/*http%3A//finance.yahoo.com/blogs/the-exchange/a-new-start-for-chrysler%E2%80%94but-the-same-old-problem-174112171.html?l=1 39 | The No. 3 automaker is profitable again--but may still be too small to compete. 40 | yahoo_finance/1922799730 41 | Thu, 2 Jan 14 22:30:07 GMT 42 | 43 | 44 | Why you should date Apple, not marry it 45 | http://us.rd.yahoo.com/finance/news/rss/story/SIG=15ft7r4sv/*http%3A//us.rd.yahoo.com/finance/news/topfinstories/SIG=13crtve76/*http%3A//finance.yahoo.com/blogs/talking-numbers/why-date-apple-not-marry-portfolio-manager-191514232.html?l=1 46 | One portfolio manager says don't marry something cheap -- but it's okay to date it. In this case, he's talking about Apple stock. 47 | yahoo_finance/2486936109 48 | Thu, 2 Jan 14 22:30:06 GMT 49 | 50 | 51 | Walmart’s donkey meat recall shows challenge of expansion 52 | http://us.rd.yahoo.com/finance/news/rss/story/SIG=15eernqvq/*http%3A//us.rd.yahoo.com/finance/news/topfinstories/SIG=13bce4si8/*http%3A//finance.yahoo.com/blogs/daily-ticker/walmart-recalls-tainted-donkey-meat-in-china-162624316.html?l=1 53 | Walmart, the world's largest retailer, was forced to recall donkey meat in some of its stores in China after tests revealed that the meat contained DNA of other animals, including fox. 54 | yahoo_finance/4216418081 55 | Thu, 2 Jan 14 22:30:05 GMT 56 | 57 | 58 | New year, old strategies; Why the January barometer still works 59 | http://us.rd.yahoo.com/finance/news/rss/story/SIG=15thpu8uu/*http%3A//us.rd.yahoo.com/finance/news/topfinstories/SIG=13q5l3c37/*http%3A//finance.yahoo.com/blogs/breakout/new-year--old-strategies--why-the-january-barometer-still-works-181438050.html?l=1 60 | Can you depend on the "January barometer" and indicators like it? Sam Stovall of S&P Capital IQ thinks you can. 61 | yahoo_finance/3069840236 62 | Thu, 2 Jan 14 22:30:04 GMT 63 | 64 | 65 | Wall Street starts 2014 with a whimper; Dow down triple digits 66 | http://us.rd.yahoo.com/finance/news/rss/story/SIG=14vk3leja/*http%3A//us.rd.yahoo.com/finance/news/topfinstories/SIG=12sbqts6m/*http%3A//finance.yahoo.com/news/stocks-open-2014-lower-wall-street-145306348--finance.html?l=1 67 | Stocks are lower on Wall Street as the market comes off of its biggest annual gain in nearly two decades. 68 | yahoo_finance/341723052 69 | Thu, 2 Jan 14 22:30:03 GMT 70 | 71 | 72 | Why gold could stage an epic comeback 73 | http://us.rd.yahoo.com/finance/news/rss/story/SIG=15uls07ks/*http%3A//us.rd.yahoo.com/finance/news/topfinstories/SIG=13rrmeuvv/*http%3A//finance.yahoo.com/blogs/breakout/from-worst-to-first--why-gold-is-about-to-stage-an-epic-comeback-154842675.html?l=1 74 | 2013 was a disastrous year for gold, but David Lutz of Stifel says 2014 will be the year the precious metal's luster returns. 75 | yahoo_finance/3815225807 76 | Thu, 2 Jan 14 22:30:02 GMT 77 | 78 | 79 | 2013's big winners abandoned 'safety' and bet on central bankers 80 | http://us.rd.yahoo.com/finance/news/rss/story/SIG=157opdttj/*http%3A//us.rd.yahoo.com/finance/news/topfinstories/SIG=13432hbcc/*http%3A//finance.yahoo.com/blogs/daily-ticker/what-2013-s-big-winners-had-in-common-144920545.html?l=1 81 | With the Dow up 27% and the S&P rising 30%, 2013 was a banner year for the stock market and an amazing year for a few select stock-pickers. What these investors had in common was "they put their faith in the most aggressive central banks - the U.S. and Japan - and believed in the economies in those markets," says Yahoo Finance's Michael Santoli. 82 | yahoo_finance/2255394120 83 | Thu, 2 Jan 14 22:30:01 GMT 84 | 85 | 86 | 87 | 88 | 89 | -------------------------------------------------------------------------------- /test/index.js: -------------------------------------------------------------------------------- 1 | const fsp = require('path'); 2 | require('streamline-helpers').runTests({ 3 | root: __dirname, 4 | subdirs: ['common', 'server'], 5 | coverage: '../lib', 6 | }); -------------------------------------------------------------------------------- /test/loader.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | // install streamline hooks (only if files are not precompiled) 3 | if (/[\\\/]test$/.test(__dirname)) require('streamline').register(); 4 | 5 | // patch asyncTest because streamline test function needs a callback. 6 | const original = global.asyncTest; 7 | global.asyncTest = function (name, expect, fn) { 8 | if (typeof expect === 'function') { 9 | fn = expect; 10 | expect = null; 11 | } 12 | original(name, expect, function () { 13 | fn(function (err) { 14 | if (err) throw err; 15 | }); 16 | }); 17 | } -------------------------------------------------------------------------------- /test/server/binary-test.ts: -------------------------------------------------------------------------------- 1 | /// 2 | declare function asyncTest(name: string, expected: number, test: (_: _) => any): any; 3 | 4 | import { _ } from "streamline-runtime"; 5 | import * as ez from "../.."; 6 | 7 | QUnit.module(module.id); 8 | 9 | const TESTBUF = Buffer.from([1, 4, 9, 16, 25, 36, 49, 64, 81, 100]); 10 | 11 | function eqbuf(b1: Buffer | undefined, b2: Buffer, msg: string) { 12 | if (!b1) throw new Error('unexpected EOF'); 13 | equal(b1.toString('hex'), b2.toString('hex'), msg); 14 | } 15 | 16 | asyncTest("roundtrip", 52, (_) => { 17 | [1, 4, 11, 1000].forEach_(_, function (_, size) { 18 | const dst = ez.devices.buffer.writer(); 19 | const writer = ez.helpers.binary.writer(dst, { 20 | bufSize: size 21 | }); 22 | writer.write(_, TESTBUF); 23 | writer.writeInt8(_, 1); 24 | writer.writeInt16(_, 2); 25 | writer.writeInt32(_, 3); 26 | writer.writeFloat(_, 0.5); 27 | writer.writeDouble(_, 0.125); 28 | writer.writeInt8(_, 5); 29 | writer.write(_); 30 | const result = dst.toBuffer(); 31 | 32 | const src = ez.devices.buffer.reader(result).transform(ez.transforms.cut.transform(5)); 33 | const reader = ez.helpers.binary.reader(src); 34 | eqbuf(reader.read(_, 7), TESTBUF.slice(0, 7), 'read 7 (size=' + size + ')'); 35 | reader.unread(3); 36 | eqbuf(reader.peek(_, 5), TESTBUF.slice(4, 9), 'unread 3 then peek 5'); 37 | eqbuf(reader.read(_, 6), TESTBUF.slice(4), 'read 6'); 38 | equal(reader.readInt8(_), 1, 'int8 roundtrip'); 39 | equal(reader.peekInt16(_), 2, 'int16 roundtrip (peek)'); 40 | equal(reader.readInt16(_), 2, 'int16 roundtrip'); 41 | equal(reader.readInt32(_), 3, 'int32 roundtrip'); 42 | equal(reader.readFloat(_), 0.5, 'float roundtrip'); 43 | equal(reader.peekDouble(_), 0.125, 'double roundtrip (peek)'); 44 | equal(reader.readDouble(_), 0.125, 'double roundtrip'); 45 | reader.unreadDouble(); 46 | equal(reader.readDouble(_), 0.125, 'double roundtrip (after unread)'); 47 | equal(reader.readInt8(_), 5, 'int8 roundtrip again'); 48 | equal(reader.read(_), undefined, 'EOF roundtrip'); 49 | }) 50 | start(); 51 | }); -------------------------------------------------------------------------------- /test/server/child-process-test.ts: -------------------------------------------------------------------------------- 1 | /// 2 | declare function asyncTest(name: string, expected: number, test: (_: _) => any): any; 3 | 4 | import { _ } from "streamline-runtime"; 5 | import * as ez from "../.."; 6 | 7 | import * as cp from "child_process"; 8 | import * as fsp from "path"; 9 | import * as os from "os"; 10 | 11 | QUnit.module(module.id); 12 | 13 | asyncTest("echo ok", 1, (_) => { 14 | if (os.type() === 'Windows_NT') { 15 | ok("Ignore on Windows"); 16 | } else { 17 | const proc = cp.spawn('echo', ['hello\nworld']); 18 | const got = ez.devices.child_process.reader(proc).toArray(_); 19 | deepEqual(got, ['hello', 'world']); 20 | } 21 | start(); 22 | }); 23 | 24 | asyncTest("bad command", 1, (_) => { 25 | const proc = cp.spawn(fsp.join(__dirname, 'foobar.zoo'), ['2']); 26 | try { 27 | const got = ez.devices.child_process.reader(proc).toArray(_); 28 | ok(false); 29 | } catch (ex) { 30 | ok(ex.code < 0); // -1 on node 0.10 but -2 on 0.12 31 | } 32 | start(); 33 | }); 34 | 35 | asyncTest("exit 2", 1, (_) => { 36 | const cmd = 'exit2' + (os.type() === 'Windows_NT' ? '.cmd' : '.sh'); 37 | const proc = cp.spawn(fsp.join(__dirname, '../../test/fixtures', cmd), ['2']); 38 | try { 39 | const got = ez.devices.child_process.reader(proc).toArray(_); 40 | ok(false); 41 | } catch (ex) { 42 | equal(ex.code, 2); 43 | } 44 | start(); 45 | }); -------------------------------------------------------------------------------- /test/server/csv-test.ts: -------------------------------------------------------------------------------- 1 | // 2 | declare function asyncTest(name: string, expected: number, test: (_: _) => any): any; 3 | 4 | import { _ } from "streamline-runtime"; 5 | import * as ez from "../.."; 6 | 7 | QUnit.module(module.id); 8 | const csv = ez.transforms.csv; 9 | const string = ez.devices.string; 10 | 11 | const legends = 'firstName,lastName,gender,dob\n' + // 12 | 'Jimi,Hendrix,M,27-11-1942\n' + // 13 | 'Janis,Joplin,F,19-01-1943\n' + // 14 | 'Jim,Morrison,M,08-12-1943\n' + // 15 | 'Kurt,Cobain,M,20-02-1967\n'; 16 | 17 | asyncTest("roundtrip", 1, (_) => { 18 | const sink = string.writer(); 19 | string.reader(legends).transform(csv.parser()).transform(csv.formatter()).pipe(_, sink); 20 | equal(sink.toString(), legends); 21 | start(); 22 | }); 23 | 24 | asyncTest("binary input", 1, (_) => { 25 | const sink = string.writer(); 26 | ez.devices.buffer.reader(Buffer.from(legends, 'utf8')).transform(csv.parser()).transform(csv.formatter()).pipe(_, sink); 27 | equal(sink.toString(), legends); 28 | start(); 29 | }); 30 | -------------------------------------------------------------------------------- /test/server/ez-test.ts: -------------------------------------------------------------------------------- 1 | // 2 | declare function asyncTest(name: string, expected: number, test: (_: _) => any): any; 3 | 4 | import { _ } from "streamline-runtime"; 5 | import * as ez from "../.."; 6 | 7 | QUnit.module(module.id); 8 | 9 | var server: ez.devices.http.HttpServer; 10 | 11 | asyncTest("start echo server", 1, (_) => { 12 | server = ez.devices.http.server(function (req, res, _) { 13 | if (req.method === "POST") { 14 | const text = req.readAll(_); 15 | const ct = req.headers["content-type"]; 16 | if (ct === 'application/json') { 17 | res.writeHead(201, { 18 | 'content-type': ct, 19 | }); 20 | res.end('{"echo":' + text + '}'); 21 | } else { 22 | res.writeHead(201); 23 | res.end(ct + ': ' + text); 24 | } 25 | } 26 | if (req.method === "GET") { 27 | // query parameters 28 | var query = (req.url.split("?")[1] || "").split("&").reduce(function (prev, crt) { 29 | var parts = crt.split("="); 30 | if (parts[0]) prev[parts[0]] = parts[1]; 31 | return prev; 32 | }, {} as any); 33 | res.writeHead(query.status || 200, {}); 34 | res.end("reply for GET"); 35 | } 36 | }); 37 | server.listen(_, 3005); 38 | ok(true, "server started"); 39 | start(); 40 | }); 41 | 42 | asyncTest("http test", 2, (_) => { 43 | const reply = ez.reader("http://localhost:3005").readAll(_); 44 | strictEqual(reply, "reply for GET", "Get test: reader ok"); 45 | // try not found reader 46 | try { 47 | const reply404 = ez.reader("http://localhost:3005?status=404").readAll(_); 48 | ok(false, "Reader supposed to throw"); 49 | } catch (ex) { 50 | ok(/Status 404/.test(ex.message), "Reader throws ok"); 51 | } 52 | start(); 53 | }); 54 | 55 | asyncTest("http readers and writers", 1, (_) => { 56 | const writer = ez.writer("http://localhost:3005"); 57 | const result = writer.writeAll(_, "hello world").result; 58 | strictEqual(result, "text/plain: hello world"); 59 | start(); 60 | }); 61 | 62 | asyncTest("http JSON", 1, (_) => { 63 | const writer = ez.writer("http://localhost:3005"); 64 | const result = writer.writeAll(_, [2, 4]).result; 65 | deepEqual(result, { echo: [2, 4] }); 66 | start(); 67 | }); 68 | 69 | asyncTest("array test", 1, (_) => { 70 | const reply = ez.reader([2, 3, 4]).readAll(_); 71 | deepEqual(reply, [2, 3, 4]); 72 | start(); 73 | }); 74 | 75 | asyncTest("array readers and writers", 1, (_) => { 76 | const writer = ez.writer([]); 77 | ez.reader([2, 3, 4]).pipe(_, writer); 78 | deepEqual(writer.result, [2, 3, 4]); 79 | start(); 80 | }); 81 | 82 | asyncTest("string test", 1, (_) => { 83 | const reply = ez.reader("string:hello world").readAll(_); 84 | deepEqual(reply, "hello world"); 85 | start(); 86 | }); 87 | 88 | asyncTest("string readers and writers", 1, (_) => { 89 | const writer = ez.writer("string:"); 90 | ez.reader("string:hello world").pipe(_, writer); 91 | deepEqual(writer.result, "hello world"); 92 | start(); 93 | }); 94 | 95 | asyncTest("buffer test", 1, (_) => { 96 | const buf = Buffer.from('hello world', 'utf8'); 97 | const reply = ez.reader(buf).transform(ez.transforms.cut.transform(2)).readAll(_) as Buffer; 98 | deepEqual(reply.toString('utf8'), buf.toString('utf8')); 99 | start(); 100 | }); 101 | 102 | asyncTest("buffer reader and writer", 1, (_) => { 103 | const buf = Buffer.from('hello world', 'utf8'); 104 | const writer = ez.writer(Buffer.alloc(0)); 105 | const reply = ez.reader(buf).pipe(_, writer); 106 | deepEqual(writer.result.toString('utf8'), buf.toString('utf8')); 107 | start(); 108 | }); 109 | -------------------------------------------------------------------------------- /test/server/http-test.ts: -------------------------------------------------------------------------------- 1 | // 2 | declare function asyncTest(name: string, expected: number, test: (_: _) => any): any; 3 | 4 | import { _ } from "streamline-runtime"; 5 | import * as ez from "../.."; 6 | 7 | QUnit.module(module.id); 8 | 9 | var server: ez.devices.http.HttpServer; 10 | 11 | asyncTest("Echo service test", 6, (_) => { 12 | function _test(_: _, type: string, message: any) { 13 | const writer = ez.factory("http://localhost:3004").writer(_); 14 | writer.write(_, message); 15 | strictEqual(writer.write(_, undefined), type + ((type === "application/json") ? JSON.stringify(message) : message), "POST result ok for " + type); 16 | } 17 | server = ez.devices.http.server(function (req, res, _) { 18 | if (req.method === "POST") { 19 | const text = req.readAll(_); 20 | res.statusCode = 201; 21 | res.end(req.headers["content-type"] + text); 22 | } 23 | if (req.method === "GET") { 24 | // query parameters 25 | var query = (req.url.split("?")[1] || "").split("&").reduce(function (prev, crt) { 26 | var parts = crt.split("="); 27 | if (parts[0]) prev[parts[0]] = parts[1]; 28 | return prev; 29 | }, {} as any); 30 | res.writeHead(query.status || 200, {}); 31 | res.end("reply for GET"); 32 | } 33 | }); 34 | server.listen(_, 3004); 35 | _test(_, "text/plain", "post test"); 36 | _test(_, "application/json", { test: "post test" }); 37 | _test(_, "text/html", ""); 38 | _test(_, "application/xml", " 2 | declare function asyncTest(name: string, expected: number, test: (_: _) => any): any; 3 | 4 | import { _ } from "streamline-runtime"; 5 | import * as ez from "../.."; 6 | 7 | QUnit.module(module.id); 8 | 9 | const file = ez.devices.file; 10 | const jsonTrans = ez.transforms.json; 11 | 12 | const inputFile = require('os').tmpdir() + '/jsonInput.json'; 13 | const outputFile = require('os').tmpdir() + '/jsonOutput.json'; 14 | const fs = require('fs'); 15 | const string = ez.devices.string; 16 | 17 | const mixedData = '[' + // 18 | '{ "firstName": "Jimy", "lastName": "Hendrix" },' + // 19 | '\n { "firstName": "Jim", "lastName": "Morrison" },' + // 20 | '\n"\\\"escape\\ttest",' + // 21 | '\n"people are strange", 27, null,' + // 22 | '\n { "firstName": "Janis", ' + // 23 | '\n "lastName": "Joplin" },' + // 24 | '\n[1,2, 3, ' + // 25 | '\n 5, 8, 13],' + // 26 | '\n true]'; 27 | 28 | function nodeStream(_: _, text: string) { 29 | fs.writeFile(inputFile, text, "utf8", _); 30 | return file.text.reader(inputFile); 31 | } 32 | 33 | asyncTest("empty", 1, (_) => { 34 | const stream = nodeStream(_, '[]').transform(jsonTrans.parser()); 35 | strictEqual(stream.read(_), undefined, "undefined"); 36 | start(); 37 | }); 38 | 39 | asyncTest("mixed data with node node stream", 9, (_) => { 40 | const stream = nodeStream(_, mixedData); 41 | const expected = JSON.parse(mixedData); 42 | stream.transform(jsonTrans.parser()).forEach(_, function (_, elt, i) { 43 | deepEqual(elt, expected[i], expected[i]); 44 | }); 45 | start(); 46 | }); 47 | 48 | asyncTest("fragmented read", 9, (_) => { 49 | const stream = string.reader(mixedData, 2).transform(jsonTrans.parser()); 50 | const expected = JSON.parse(mixedData); 51 | stream.forEach(_, function (_, elt, i) { 52 | deepEqual(elt, expected[i], expected[i]); 53 | }); 54 | start(); 55 | }); 56 | 57 | asyncTest("binary input", 9, (_) => { 58 | const stream = ez.devices.buffer.reader(Buffer.from(mixedData, 'utf8')).transform(jsonTrans.parser()); 59 | const expected = JSON.parse(mixedData); 60 | stream.forEach(_, function (_, elt, i) { 61 | deepEqual(elt, expected[i], expected[i]); 62 | }); 63 | start(); 64 | }); 65 | 66 | asyncTest("roundtrip", 11, (_) => { 67 | const writer = string.writer(); 68 | nodeStream(_, mixedData).transform(jsonTrans.parser()).map(function (_, elt) { 69 | return (elt && elt.lastName) ? elt.lastName : elt; 70 | }).transform(jsonTrans.formatter()).pipe(_, writer); 71 | const result = JSON.parse(writer.toString()); 72 | const expected = JSON.parse(mixedData).map(function (elt: any) { 73 | return (elt && elt.lastName) ? elt.lastName : elt; 74 | }); 75 | ok(Array.isArray(result), "isArray"); 76 | equal(result.length, expected.length, "length=" + result.length) 77 | result.forEach(function (elt: any, i: number) { 78 | deepEqual(result[i], elt, elt); 79 | }); 80 | start(); 81 | }); 82 | -------------------------------------------------------------------------------- /test/server/multipart-test.ts: -------------------------------------------------------------------------------- 1 | /// 2 | declare function asyncTest(name: string, expected: number, test: (_: _) => any): any; 3 | 4 | import { _ } from "streamline-runtime"; 5 | import * as ez from "../.."; 6 | 7 | QUnit.module(module.id); 8 | 9 | const buffer = ez.devices.buffer; 10 | const multipart = ez.transforms.multipart 11 | 12 | const boundary = "-- my boundary --"; 13 | 14 | function headers(subType: string) { 15 | return { 16 | "content-type": "multipart/" + subType + ";atb1=val1; boundary=" + boundary + "; atb2=val2", 17 | }; 18 | } 19 | 20 | type Part = { 21 | headers: { [key: string]: string }; 22 | body: string; 23 | }; 24 | 25 | function testStream() { 26 | const parts = [{ 27 | headers: { 28 | A: "VA1", 29 | B: "VB1", 30 | "Content-Type": "text/plain", 31 | }, 32 | body: "C1", 33 | }, { 34 | headers: { 35 | "content-type": "text/plain", 36 | A: "VA2", 37 | B: "VB2" 38 | }, 39 | body: "C2", 40 | }]; 41 | 42 | function formatPart(part: Part) { 43 | return Object.keys(part.headers).map(function (name) { 44 | return name + ': ' + part.headers[name] 45 | }).join('\n') + '\n\n' + boundary + '\n' + part.body + '\n' + boundary + '\n'; 46 | } 47 | return buffer.reader(Buffer.from(parts.map(formatPart).join(''), "binary")); 48 | } 49 | 50 | asyncTest('basic multipart/mixed', 13, (_) => { 51 | const source = testStream(); 52 | const stream = source.transform(multipart.parser(headers("mixed"))); 53 | var part = stream.read(_); 54 | ok(part != null, "part != null"); 55 | strictEqual(part.headers.a, "VA1", "header A"); 56 | strictEqual(part.headers.b, "VB1", "header B"); 57 | strictEqual(part.headers["content-type"], "text/plain", "content-type"); 58 | var r = part.read(_); 59 | strictEqual(r.toString('binary'), 'C1', 'body C1'); 60 | r = part.read(_); 61 | strictEqual(r, undefined, "end of part 1"); 62 | 63 | part = stream.read(_); 64 | ok(part != null, "part != null"); 65 | strictEqual(part.headers.a, "VA2", "header A"); 66 | strictEqual(part.headers.b, "VB2", "header B"); 67 | strictEqual(part.headers["content-type"], "text/plain", "content-type"); 68 | r = part.read(_); 69 | strictEqual(r.toString('binary'), 'C2', 'body C2'); 70 | r = part.read(_); 71 | strictEqual(r, undefined, "end of part 2"); 72 | 73 | part = stream.read(_); 74 | equal(part, undefined, "read next part returns undefined"); 75 | start(); 76 | }); 77 | 78 | asyncTest('multipart/mixed roundtrip', 2, (_) => { 79 | const heads = headers("mixed"); 80 | const source = testStream(); 81 | const writer = buffer.writer(); 82 | source.transform(multipart.parser(heads)).transform(multipart.formatter(heads)).pipe(_, writer); 83 | const result = writer.toBuffer(); 84 | strictEqual(result.length, 158); 85 | const writer2 = buffer.writer(); 86 | buffer.reader(result).transform(multipart.parser(heads)).transform(multipart.formatter(heads)).pipe(_, writer2); 87 | strictEqual(result.toString("binary"), writer2.toBuffer().toString("binary")); 88 | start(); 89 | }); -------------------------------------------------------------------------------- /test/server/newlines-test.ts: -------------------------------------------------------------------------------- 1 | /// 2 | declare function asyncTest(name: string, expected: number, test: (_: _) => any): any; 3 | 4 | import { _ } from "streamline-runtime"; 5 | import * as ez from "../.."; 6 | 7 | QUnit.module(module.id); 8 | const lines = ez.transforms.lines; 9 | const file = ez.devices.file; 10 | 11 | const inputFile = require('os').tmpdir() + '/jsonInput.json'; 12 | const outputFile = require('os').tmpdir() + '/jsonOutput.json'; 13 | const fs = require('fs'); 14 | const string = ez.devices.string; 15 | 16 | function nodeStream(_: _, text: string) { 17 | fs.writeFile(inputFile, text, "utf8", _); 18 | return file.text.reader(inputFile); 19 | } 20 | 21 | asyncTest("empty", 1, (_) => { 22 | const stream = nodeStream(_, '').transform(lines.parser()); 23 | strictEqual(stream.read(_), undefined, "undefined"); 24 | start(); 25 | }); 26 | 27 | asyncTest("non empty line", 2, (_) => { 28 | const stream = nodeStream(_, 'a').transform(lines.parser()); 29 | strictEqual(stream.read(_), 'a', "a"); 30 | strictEqual(stream.read(_), undefined, "undefined"); 31 | start(); 32 | }); 33 | 34 | asyncTest("only newline", 2, (_) => { 35 | const stream = nodeStream(_, '\n').transform(lines.parser()); 36 | strictEqual(stream.read(_), '', "empty line"); 37 | strictEqual(stream.read(_), undefined, "undefined"); 38 | start(); 39 | }); 40 | 41 | asyncTest("mixed", 5, (_) => { 42 | const stream = nodeStream(_, 'abc\n\ndef\nghi').transform(lines.parser()); 43 | strictEqual(stream.read(_), 'abc', 'abc'); 44 | strictEqual(stream.read(_), '', "empty line"); 45 | strictEqual(stream.read(_), 'def', 'def'); 46 | strictEqual(stream.read(_), 'ghi', 'ghi'); 47 | strictEqual(stream.read(_), undefined, "undefined"); 48 | start(); 49 | }); 50 | 51 | asyncTest("roundtrip", 1, (_) => { 52 | const writer = string.writer(); 53 | const text = 'abc\n\ndef\nghi'; 54 | string.reader(text, 2).transform(lines.parser()).transform(lines.formatter()).pipe(_, writer); 55 | strictEqual(writer.toString(), text, text); 56 | start(); 57 | }); 58 | 59 | asyncTest("binary input", 1, (_) => { 60 | const writer = string.writer(); 61 | const text = 'abc\n\ndef\nghi'; 62 | ez.devices.buffer.reader(Buffer.from(text, 'utf8')).transform(lines.parser()).transform(lines.formatter()).pipe(_, writer); 63 | strictEqual(writer.toString(), text, text); 64 | start(); 65 | }); 66 | -------------------------------------------------------------------------------- /test/server/nodify-test.ts: -------------------------------------------------------------------------------- 1 | /// 2 | declare function asyncTest(name: string, expected: number, test: (_: _) => any): any; 3 | 4 | import { _ } from "streamline-runtime"; 5 | import * as ez from "../.."; 6 | 7 | QUnit.module(module.id); 8 | 9 | const sample = __dirname + '/../../test/fixtures/rss-sample.xml'; 10 | const zlib = require('zlib'); 11 | 12 | asyncTest("gzip roundtrip", 1, (_) => { 13 | const sampleReader1 = ez.devices.file.text.reader(sample); 14 | var sampleReader2 = ez.devices.file.text.reader(sample); 15 | const stringify = ez.mappers.convert.stringify(); 16 | const cutter = ez.transforms.cut.transform(10); 17 | const out = require('fs').createWriteStream(__dirname + '/../../test/fixtures/rss-sample.zip'); 18 | sampleReader2 = sampleReader2.nodeTransform(zlib.createGzip()).nodeTransform(zlib.createGunzip()).map(stringify); 19 | const cmp = sampleReader1.transform(cutter).compare(_, sampleReader2.transform(cutter)); 20 | equal(cmp, 0); 21 | start(); 22 | }); 23 | asyncTest("writer nodify", 1, (_) => { 24 | const sampleReader1 = ez.devices.file.text.reader(sample); 25 | const sampleReader2 = ez.devices.file.text.reader(sample); 26 | const dest = ez.devices.string.writer(); 27 | const expected = sampleReader2.toArray(_).join(''); 28 | const piped = sampleReader1.nodify().pipe(dest.nodify()); 29 | piped.on('finish', function () { 30 | equal(dest.toString(), expected); 31 | start(); 32 | }); 33 | }); 34 | -------------------------------------------------------------------------------- /test/server/queue-test.ts: -------------------------------------------------------------------------------- 1 | /// 2 | declare function asyncTest(name: string, expected: number, test: (_: _) => any): any; 3 | 4 | import { _ } from "streamline-runtime"; 5 | import * as ez from "../.."; 6 | 7 | QUnit.module(module.id); 8 | 9 | asyncTest("put (lossy)", 7, (_) => { 10 | const queue = ez.devices.queue.create(4); 11 | for (var i = 0; i < 6; i++) { 12 | var queued = queue.put(i); 13 | ok(queued === (i < 4), "put return value: " + queued); 14 | } 15 | queue.end(); 16 | const result = queue.reader.toArray(_); 17 | equal(result.join(','), "0,1,2,3", 'partial queue contents ok'); 18 | start(); 19 | }); 20 | 21 | asyncTest("write (lossless)", 1, (_) => { 22 | const queue = ez.devices.queue.create(4); 23 | const writeTask = _.future(_ => { 24 | for (var i = 0; i < 6; i++) queue.write(_, i); 25 | queue.write(_); 26 | }); 27 | const readTask = _.future(_ => { 28 | return queue.reader.toArray(_); 29 | }); 30 | 31 | writeTask(_); 32 | equal(readTask(_).join(','), "0,1,2,3,4,5", 'full queue contents ok'); 33 | start(); 34 | }); -------------------------------------------------------------------------------- /test/server/stop-test.ts: -------------------------------------------------------------------------------- 1 | /// 2 | declare function asyncTest(name: string, expected: number, test: (_: _) => any): any; 3 | 4 | import { _ } from "streamline-runtime"; 5 | import * as ez from "../.."; 6 | 7 | QUnit.module(module.id); 8 | 9 | interface TestReader extends ez.Reader { 10 | stoppedReason?: { 11 | at: number; 12 | arg: any; 13 | } 14 | } 15 | 16 | //interface TestReader extends ez.reader.Reader 17 | function numbers(limit: number): TestReader { 18 | var i = 0; 19 | return ez.devices.generic.reader(function read(this: TestReader, _: _) { 20 | if (this.stoppedReason) throw new Error("attempt to read after stop: " + i); 21 | return i >= limit ? undefined : i++; 22 | }, function stop(this: TestReader, _: _, arg: any) { 23 | this.stoppedReason = { 24 | at: i, 25 | arg: arg, 26 | }; 27 | }) as TestReader; 28 | } 29 | 30 | asyncTest("explicit stop", 2, (_) => { 31 | const source = numbers(100); 32 | var result = '' 33 | for (var i = 0; i < 5; i++) result += source.read(_); 34 | source.stop(_); 35 | strictEqual(result, "01234"); 36 | strictEqual(source.stoppedReason && source.stoppedReason.at, 5); 37 | start(); 38 | }); 39 | 40 | asyncTest("explicit stop with err", 2, (_) => { 41 | const source = numbers(100); 42 | var result = '' 43 | for (var i = 0; i < 5; i++) result += source.read(_); 44 | const err = new Error("testing"); 45 | source.stop(_, err); 46 | strictEqual(result, "01234"); 47 | strictEqual(source.stoppedReason && source.stoppedReason.arg, err); 48 | start(); 49 | }); 50 | 51 | // limit exercises transform 52 | asyncTest("limit stops", 2, (_) => { 53 | var source = numbers(100); 54 | const result = source.skip(2).limit(5).toArray(_).join(','); 55 | strictEqual(result, '2,3,4,5,6'); 56 | ok(source.stoppedReason, 'stopped'); 57 | start(); 58 | }); 59 | 60 | asyncTest("concat stops", 4, (_) => { 61 | const source1 = numbers(5); 62 | const source2 = numbers(5); 63 | const source3 = numbers(5); 64 | const result = source1.concat([source2, source3]).limit(7).toArray(_).join(','); 65 | strictEqual(result, '0,1,2,3,4,0,1'); 66 | ok(!source1.stoppedReason, 'source1 not stopped'); 67 | ok(source2.stoppedReason, 'source2 stopped'); 68 | ok(source3.stoppedReason, 'source3 stopped'); 69 | start(); 70 | }); 71 | 72 | asyncTest("dup stops on 0 and continues on 1", 3, (_) => { 73 | const source = numbers(5); 74 | const dups = source.dup(); 75 | const resultF = _.future(_ => dups[0].limit(2).toArray(_)); 76 | const altF = _.future(_ => dups[1].toArray(_)); 77 | const result = resultF(_).join(); 78 | const alt = altF(_).join(); 79 | strictEqual(result, '0,1'); 80 | strictEqual(alt, '0,1,2,3,4'); 81 | ok(!source.stoppedReason, 'source not stopped'); 82 | start(); 83 | }); 84 | 85 | asyncTest("dup stops on 1 and continues on 0", 3, (_) => { 86 | const source = numbers(5); 87 | const dups = source.dup(); 88 | const resultF = _.future(_ => dups[1].limit(2).toArray(_)); 89 | const altF = _.future(_ => dups[0].toArray(_)); 90 | const result = resultF(_).join(); 91 | const alt = altF(_).join(); 92 | strictEqual(result, '0,1'); 93 | strictEqual(alt, '0,1,2,3,4'); 94 | ok(!source.stoppedReason, 'source not stopped'); 95 | start(); 96 | }); 97 | 98 | asyncTest("dup stops both silently from 0", 3, (_) => { 99 | const source = numbers(5); 100 | const dups = source.dup(); 101 | const resultF = _.future(_ => dups[0].limit(2, true).toArray(_)); 102 | const altF = _.future(_ => dups[1].toArray(_)); 103 | const result = resultF(_).join(); 104 | const alt = altF(_).join(); 105 | strictEqual(result, '0,1'); 106 | strictEqual(alt, '0,1,2'); // 2 is already queued when we hit limit 107 | ok(source.stoppedReason, 'source stopped'); 108 | start(); 109 | }); 110 | 111 | asyncTest("dup stops both silently from 1", 3, (_) => { 112 | const source = numbers(5); 113 | const dups = source.dup(); 114 | const resultF = _.future(_ => dups[1].limit(2, true).toArray(_)); 115 | const altF = _.future(_ => dups[0].toArray(_)); 116 | const result = resultF(_).join(); 117 | const alt = altF(_).join(); 118 | strictEqual(result, '0,1'); 119 | strictEqual(alt, '0,1,2'); // 2 is already queued when we hit limit 120 | ok(source.stoppedReason, 'source stopped'); 121 | start(); 122 | }); 123 | 124 | asyncTest("dup stops with error from 0", 3, (_) => { 125 | const source = numbers(5); 126 | const dups = source.dup(); 127 | const resultF = _.future(_ => dups[0].limit(2, new Error("testing")).toArray(_)); 128 | const altF = _.future(_ => dups[1].toArray(_)); 129 | const result = resultF(_).join(); 130 | try { 131 | const alt = altF(_).join(); 132 | ok(false, "altF did not throw"); 133 | } catch (ex) { 134 | strictEqual(ex.message, "testing"); 135 | } 136 | strictEqual(result, '0,1'); 137 | ok(source.stoppedReason, 'source stopped'); 138 | start(); 139 | }); 140 | 141 | asyncTest("dup stops with error from 1", 3, (_) => { 142 | const source = numbers(5); 143 | const dups = source.dup(); 144 | const resultF = _.future(_ => dups[1].limit(2, new Error("testing")).toArray(_)); 145 | const altF = _.future(_ => dups[0].toArray(_)); 146 | const result = resultF(_).join(); 147 | try { 148 | const alt = altF(_).join(); 149 | ok(false, "altF did not throw"); 150 | } catch (ex) { 151 | strictEqual(ex.message, "testing"); 152 | } 153 | strictEqual(result, '0,1'); 154 | ok(source.stoppedReason, 'source stopped'); 155 | start(); 156 | }); 157 | 158 | asyncTest("dup stops 0 first, 1 later", 3, (_) => { 159 | const source = numbers(10); 160 | const dups = source.dup(); 161 | const resultF = _.future(_ => dups[0].limit(2).toArray(_)); 162 | const altF = _.future(_ => dups[1].limit(5).toArray(_)); 163 | const result = resultF(_).join(); 164 | const alt = altF(_).join(); 165 | strictEqual(result, '0,1'); 166 | strictEqual(alt, '0,1,2,3,4'); 167 | ok(source.stoppedReason, 'source stopped'); 168 | start(); 169 | }); 170 | 171 | asyncTest("dup stops 1 first, 0 later", 3, (_) => { 172 | const source = numbers(10); 173 | const dups = source.dup(); 174 | const resultF = _.future(_ => dups[1].limit(2).toArray(_)); 175 | const altF = _.future(_ => dups[0].limit(5).toArray(_)); 176 | const result = resultF(_).join(); 177 | const alt = altF(_).join(); 178 | setTimeout(_, 0); 179 | strictEqual(result, '0,1'); 180 | strictEqual(alt, '0,1,2,3,4'); 181 | ok(source.stoppedReason, 'source stopped'); 182 | start(); 183 | }); 184 | 185 | asyncTest("pre", 2, (_) => { 186 | const source = numbers(10); 187 | const target = ez.devices.array.writer(); 188 | source.pipe(_, target.pre.limit(5)); 189 | strictEqual(target.toArray().join(), '0,1,2,3,4'); 190 | ok(source.stoppedReason, 'source stopped'); 191 | start(); 192 | }); 193 | 194 | 195 | -------------------------------------------------------------------------------- /test/server/xml-test.ts: -------------------------------------------------------------------------------- 1 | /// 2 | declare function asyncTest(name: string, expected: number, test: (_: _) => any): any; 3 | 4 | import { _ } from "streamline-runtime"; 5 | import * as ez from "../.."; 6 | import * as fs from "fs"; 7 | 8 | QUnit.module(module.id); 9 | 10 | function short(s: string) { 11 | return s.length < 50 ? s : s.substring(0, 47) + '...'; 12 | } 13 | 14 | function parseTest(_: _, xml: string, js: any, skipRT?: boolean) { 15 | const full = '' + xml + "\n"; 16 | const parsed = ez.devices.string.reader(full).transform(ez.transforms.cut.transform(2)) // 17 | .transform(ez.transforms.xml.parser('root')).toArray(_); 18 | deepEqual(parsed[0].root, js, "parse " + short(xml)); 19 | if (!skipRT) { 20 | const rt = ez.devices.string.reader(full).transform(ez.transforms.cut.transform(2)) // 21 | .transform(ez.transforms.xml.parser('root')) // 22 | .transform(ez.transforms.xml.formatter('root')).toArray(_).join(''); 23 | strictEqual(rt, full, "roundtrip " + short(full)); 24 | } 25 | } 26 | 27 | function rtTest(_: _, name: string, xml: string, indent: string | undefined, result: any) { 28 | const full = '' + xml + "\n"; 29 | result = '' + (indent ? '\n' : '') + '' + (result || xml) + "\n"; 30 | const rt = ez.devices.string.reader(full).transform(ez.transforms.cut.transform(2)) // 31 | .transform(ez.transforms.xml.parser('root')) // 32 | .transform(ez.transforms.xml.formatter({ 33 | tags: 'root', 34 | indent: indent 35 | })).toArray(_).join(''); 36 | strictEqual(rt, result, "roundtrip " + full); 37 | } 38 | 39 | asyncTest('simple tag without attributes', 6, (_) => { 40 | parseTest(_, '', { 41 | a: {} 42 | }); 43 | parseTest(_, '', { 44 | a: "" 45 | }); 46 | parseTest(_, '5', { 47 | a: "5" 48 | }); 49 | start(); 50 | }); 51 | 52 | asyncTest('simple tag with attributes', 6, (_) => { 53 | parseTest(_, '5', { 54 | a: { 55 | $: { 56 | x: "3", 57 | y: "4" 58 | }, 59 | $value: "5" 60 | } 61 | }); 62 | parseTest(_, '', { 63 | a: { 64 | $: { 65 | x: "3" 66 | }, 67 | $value: "" 68 | } 69 | }); 70 | parseTest(_, '', { 71 | a: { 72 | $: { 73 | x: "3" 74 | }, 75 | } 76 | }); 77 | start(); 78 | }); 79 | 80 | asyncTest('entities', 4, (_) => { 81 | parseTest(_, '', { 82 | a: { 83 | $: { 84 | x: "a>b&c<" 85 | }, 86 | } 87 | }); 88 | parseTest(_, 'a>b&c<', { 89 | a: "a>b&c<" 90 | }); 91 | start(); 92 | }); 93 | 94 | asyncTest('children', 6, (_) => { 95 | parseTest(_, '34', { 96 | a: { 97 | b: "3", 98 | c: "4" 99 | } 100 | }); 101 | parseTest(_, '34', { 102 | a: { 103 | b: { 104 | $: { 105 | x: "2" 106 | }, 107 | $value: "3" 108 | }, 109 | c: "4" 110 | } 111 | }); 112 | parseTest(_, '345', { 113 | a: { 114 | b: ["3", "4"], 115 | c: "5" 116 | } 117 | }); 118 | start(); 119 | }); 120 | 121 | asyncTest('cdata', 4, (_) => { 122 | parseTest(_, ']]>', { 123 | a: { 124 | $cdata: "" 125 | } 126 | }); 127 | parseTest(_, '', { 128 | a: { 129 | $cdata: "" 130 | } 131 | }); 132 | start(); 133 | }); 134 | 135 | asyncTest('comments in text', 1, (_) => { 136 | parseTest(_, 'abc ghi', { 137 | a: "abc ghi" 138 | }, true); 139 | start(); 140 | }); 141 | 142 | asyncTest('reformatting', 7, (_) => { 143 | rtTest(_, 'spaces outside', ' \r\n\t \t', undefined, ''); 144 | rtTest(_, 'spaces inside tag', '', undefined, ''); 145 | rtTest(_, 'spaces around children', ' \n\t', undefined, ''); 146 | rtTest(_, 'spaces and cdata', ' \n\n\t]]>\t', undefined, '\n\t]]>'); 147 | rtTest(_, 'spaces in value', ' ', undefined, ' '); 148 | rtTest(_, 'more spaces in value', ' \r\n\t', undefined, ' '); 149 | rtTest(_, 'indentation', '5', '\t', '\n\t\n\t\t5\n\t\t\n\t\t\t\n\t\t\n\t\n'); 150 | start(); 151 | }); 152 | 153 | asyncTest('empty element in list', 1, (_) => { 154 | parseTest(_, 'x', { 155 | a: { 156 | b: ["", "x", ""] 157 | } 158 | }, true); 159 | start(); 160 | }); 161 | 162 | 163 | asyncTest("rss feed", 5, (_) => { 164 | const entries = ez.devices.file.text.reader(__dirname + '/../../test/fixtures/rss-sample.xml') // 165 | .transform(ez.transforms.cut.transform(2)) // 166 | .transform(ez.transforms.xml.parser("rss/channel/item")).toArray(_); 167 | strictEqual(entries.length, 10); 168 | strictEqual(entries[0].rss.channel.title, "Yahoo! Finance: Top Stories"); 169 | strictEqual(entries[0].rss.channel.item.title, "Wall Street ends down on first trading day of 2014"); 170 | strictEqual(entries[9].rss.channel.title, "Yahoo! Finance: Top Stories"); 171 | strictEqual(entries[9].rss.channel.item.title, "2013's big winners abandoned 'safety' and bet on central bankers"); 172 | start(); 173 | }); 174 | 175 | asyncTest("binary input", 5, (_) => { 176 | const entries = ez.devices.file.binary.reader(__dirname + '/../../test/fixtures/rss-sample.xml') // 177 | .transform(ez.transforms.cut.transform(2)) // 178 | .transform(ez.transforms.xml.parser("rss/channel/item")).toArray(_); 179 | strictEqual(entries.length, 10); 180 | strictEqual(entries[0].rss.channel.title, "Yahoo! Finance: Top Stories"); 181 | strictEqual(entries[0].rss.channel.item.title, "Wall Street ends down on first trading day of 2014"); 182 | strictEqual(entries[9].rss.channel.title, "Yahoo! Finance: Top Stories"); 183 | strictEqual(entries[9].rss.channel.item.title, "2013's big winners abandoned 'safety' and bet on central bankers"); 184 | start(); 185 | }); 186 | 187 | asyncTest("rss roundtrip", 1, (_) => { 188 | var expected = fs.readFile(__dirname + '/../../test/fixtures/rss-sample.xml', 'utf8', _); 189 | var result = ez.devices.file.text.reader(__dirname + '/../../test/fixtures/rss-sample.xml') // 190 | .transform(ez.transforms.cut.transform(5)) // 191 | .transform(ez.transforms.xml.parser("rss/channel/item")) // 192 | .transform(ez.transforms.xml.formatter({ 193 | tags: "rss/channel/item", 194 | indent: " " 195 | })) // 196 | .toArray(_).join(''); 197 | expected = expected.replace(/\r?\n */g, '').replace(/<\!--.*-->/g, ''); 198 | result = result.replace(/\r?\n */g, ''); 199 | strictEqual(result, expected); 200 | start(); 201 | }); 202 | 203 | asyncTest('escaping', 2, (_) => { 204 | var xml = ''; 205 | var js = ''; 206 | for (var i = 0; i < 0x10000; i++) { 207 | if (i > 300 && i % 100) continue; 208 | // tab, cr, lf, ' and " could be formatted verbatim but we escape them 209 | if ((i >= 0x20 && i <= 0x7e) || (i >= 0xa1 && i <= 0xd7ff) || (i >= 0xe000 && i <= 0xfffd)) { 210 | if (i >= 0x2000 && i < 0xd000) continue; // skip to speed up test 211 | var ch = String.fromCharCode(i); 212 | if (ch === '<') xml += '<' 213 | else if (ch === '>') xml += '>' 214 | else if (ch === '&') xml += '&' 215 | else if (ch === '"') xml += '"' 216 | else if (ch === "'") xml += ''' 217 | else xml += ch; 218 | } else { 219 | var hex = i.toString(16); 220 | while (hex.length < 2) hex = '0' + hex; 221 | while (hex.length > 2 && hex.length < 4) hex = '0' + hex; 222 | xml += '&#x' + hex + ';' 223 | } 224 | js += String.fromCharCode(i); 225 | } 226 | xml += ''; 227 | parseTest(_, xml, { 228 | a: js 229 | }); 230 | start(); 231 | }); 232 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "commonjs", 4 | "moduleResolution": "node", 5 | "target": "es2015", 6 | "outDir": "out", 7 | "sourceMap": true, 8 | "noImplicitAny": true, 9 | "noImplicitThis": true, 10 | "noImplicitReturns": true, 11 | "strictNullChecks": true, 12 | "rootDir": ".", 13 | "baseUrl": "./node_modules", 14 | "paths": { 15 | "qunit": ["./node_modules/retyped-qunit-tsd-ambient/qunit.d.ts"] 16 | } 17 | }, 18 | "exclude": [ 19 | "lib", 20 | "node_modules" 21 | ] 22 | } --------------------------------------------------------------------------------