├── .dockerignore
├── .editorconfig
├── .eslintignore
├── .eslintrc
├── .gitattributes
├── .github
├── ISSUE_TEMPLATE
│ ├── bug_report.md
│ ├── feature_request.md
│ └── task.md
├── PULL_REQUEST_TEMPLATE
│ └── default.md
└── workflows
│ ├── coverage.yml
│ ├── nightlyBuild.yml
│ ├── release.yml
│ └── test.yml
├── .gitignore
├── .husky
└── pre-commit
├── .nvmrc
├── CODE_OF_CONDUCT.md
├── FUNDING.yml
├── LICENSE
├── README.md
├── cucumber.js
├── package-lock.json
├── package.json
├── scripts
├── coverage-ignore.js
├── prepare-package.js
└── prepare-readme.js
├── src
├── decorators.ts
├── ifca.ts
├── ifca
│ ├── ifca-chain.ts
│ └── processing-queue.ts
├── index.ts
├── streams
│ ├── base-stream.ts
│ ├── data-stream.ts
│ ├── proxies
│ │ └── stream-node-writable-proxy.ts
│ └── string-stream.ts
├── types.ts
└── utils.ts
├── test
├── _assets
│ ├── long.txt
│ ├── sample.txt
│ └── short.txt
├── _helpers
│ └── utils.ts
├── bdd
│ └── features
│ │ ├── step_definitions
│ │ └── stepdefs.ts
│ │ └── string-stream.feature
├── release
│ ├── index.js
│ ├── package.json
│ └── run.sh
├── samples
│ └── generic-this-1.ts
└── unit
│ ├── ifca
│ ├── common.spec.ts
│ ├── ifca-chain.spec.ts
│ ├── oal.spec.ts
│ ├── pts.spec.ts
│ └── test.spec.ts
│ ├── pts
│ ├── README.md
│ ├── helpers
│ │ ├── mk-read.js
│ │ ├── mk-transform.js
│ │ ├── mk-write.js
│ │ ├── promise-transform-stream-ifca.js
│ │ ├── promise-transform-stream.js
│ │ ├── stream-errors.js
│ │ └── utils.js
│ ├── order.spec.js
│ └── scribble.spec.js
│ ├── streams
│ ├── data
│ │ ├── batch.spec.ts
│ │ ├── creation.spec.ts
│ │ ├── each.spec.ts
│ │ ├── filter.spec.ts
│ │ ├── flatmap.spec.ts
│ │ ├── map.spec.ts
│ │ ├── native-interface.spec.ts
│ │ ├── pipe.spec.ts
│ │ ├── read.spec.ts
│ │ ├── reduce.spec.ts
│ │ ├── run.spec.ts
│ │ ├── tofile.spec.ts
│ │ ├── use.spec.ts
│ │ └── write.spec.ts
│ └── string
│ │ ├── creation.spec.ts
│ │ ├── each.spec.ts
│ │ ├── grep.spec.ts
│ │ ├── match.spec.ts
│ │ ├── parse.spec.ts
│ │ └── split.spec.ts
│ └── utils.spec.ts
├── tsconfig.build.json
└── tsconfig.json
/.dockerignore:
--------------------------------------------------------------------------------
1 | **/node_modules/
2 | **/.git/
3 | **/.github/
4 | **/dist/
5 | **/Dockerfile*
6 | **/Jenkinsfile*
7 | **/docs/
--------------------------------------------------------------------------------
/.editorconfig:
--------------------------------------------------------------------------------
1 | root = true
2 |
3 | [*]
4 | indent_style = space
5 | indent_size = 4
6 | trim_trailing_whitespace = true
7 | insert_final_newline = true
8 | max_line_length = 120
9 | end_of_line = lf
10 | charset = utf-8
11 |
12 | [*{.json,.yml}]
13 | indent_style = space
14 | indent_size = 2
15 |
--------------------------------------------------------------------------------
/.eslintignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | build
3 |
--------------------------------------------------------------------------------
/.eslintrc:
--------------------------------------------------------------------------------
1 | {
2 | "root": true,
3 | "parser": "@typescript-eslint/parser",
4 | "plugins": [
5 | "@typescript-eslint"
6 | ],
7 | "ignorePatterns": ["dist/"],
8 | "extends": [],
9 | "rules": {
10 | /* possible errors */
11 | "no-cond-assign" : 2,
12 | // "no-console" : 2,
13 | "no-constant-condition" : 2,
14 | "no-control-regex" : 2,
15 | "no-debugger" : 2,
16 | "no-dupe-args" : 2,
17 | "no-dupe-keys" : 2,
18 | "no-duplicate-case" : 2,
19 | "no-empty-character-class" : 2,
20 | "no-empty" : 2,
21 | "no-ex-assign" : 2,
22 | "no-extra-boolean-cast" : 2,
23 | // "no-extra-parens" : 1,
24 | "no-extra-semi" : 2,
25 | "no-func-assign" : 2,
26 | "no-inner-declarations" : 2,
27 | "no-invalid-regexp" : 2,
28 | "no-irregular-whitespace" : 2,
29 | "no-negated-in-lhs" : 2,
30 | "no-obj-calls" : 2,
31 | "no-regex-spaces" : 2,
32 | "no-sparse-arrays" : 2,
33 | "no-unreachable" : 2,
34 | "use-isnan" : 2,
35 | "valid-jsdoc" : [1, {
36 | "requireReturnType": false,
37 | "requireParamType": false,
38 | "requireReturn": false
39 | }],
40 | "valid-typeof" : 2,
41 | "no-unexpected-multiline" : 2,
42 |
43 | /* best practises */
44 | "accessor-pairs" : 1,
45 | "block-scoped-var" : 2,
46 | "complexity" : [1, 9],
47 | "consistent-return" : 2,
48 | "curly" : 0, //TODO
49 | "default-case" : 1,
50 | "dot-notation" : 1,
51 | "dot-location" : [1, "property"],
52 | "eqeqeq" : 2,
53 | "guard-for-in" : 1,
54 | "no-alert" : 2,
55 | "no-caller" : 1,
56 | "no-div-regex" : 1,
57 | "no-else-return" : 1,
58 | "no-eq-null" : 2,
59 | "no-eval" : 1,
60 | "no-extend-native" : 0,
61 |
62 | //"no-extra-bind" es6
63 | "no-fallthrough" : 2,
64 | "no-floating-decimal" : 1,
65 | "no-implicit-coercion" : 0,
66 | "no-implied-eval" : 2,
67 |
68 | //"no-invalid-this" es6
69 | "no-iterator" : 2,
70 | "no-labels" : 2,
71 | "no-lone-blocks" : 2,
72 | "no-loop-func" : 2,
73 | "no-multi-spaces" : 1,
74 | "no-multi-str" : 2,
75 | "no-native-reassign" : 2,
76 | "no-new-func" : 2,
77 | "no-new-wrappers" : 2,
78 | "no-new" : 0,
79 | "no-octal-escape" : 2,
80 | "no-octal" : 2,
81 | "no-param-reassign" : 0,
82 |
83 | //"no-process-env" node
84 | "no-proto" : 2,
85 | "no-redeclare" : 2,
86 | "no-return-assign" : 2,
87 | "no-script-url" : 2,
88 | "no-self-compare" : 2,
89 | "no-sequences" : 1,
90 | "no-throw-literal" : 1,
91 | "no-unused-expressions" : 2,
92 | "no-useless-concat" : 2,
93 | "no-void" : 2,
94 | "no-warning-comments" : 0,
95 | "no-with" : 1,
96 | "radix" : 2,
97 | "vars-on-top" : 0,
98 | "wrap-iife" : [2, "inside"],
99 | "yoda" : 2,
100 |
101 | /* variables */
102 | "init-declarations" : 0,
103 | "no-catch-shadow" : 2,
104 | "no-delete-var" : 2,
105 | "no-label-var" : 2,
106 | "no-shadow-restricted-names" : 2,
107 | "no-shadow" : 0,
108 | "no-undef-init" : 2,
109 | "no-undef" : 0,
110 | "no-undefined:" : 0,
111 | "no-use-before-define" : 2,
112 |
113 | /* stylistic issues */
114 | "array-bracket-spacing" : 1,
115 | "block-spacing" : 1,
116 | "brace-style" : [1, "1tbs", {"allowSingleLine": true}],
117 | "camelcase" : [1, {"properties": "never"}],
118 | "comma-spacing" : 1,
119 | "comma-style" : 1,
120 | "computed-property-spacing" : 1,
121 | "consistent-this": [1, "_this"],
122 | "eol-last" : 2,
123 | "func-names" : 0,
124 | "func-style" : 0,
125 | "id-length" : 0,
126 | "id-match": 0,
127 | "indent": [1, 4],
128 | "key-spacing" : [0, {"beforeColon" : true, "afterColon" : true}],
129 | "lines-around-comment" : 0,
130 | "linebreak-style" : [2, "unix"],
131 | "max-nested-callbacks" : [1, 3],
132 | "new-cap" : 0,
133 | "new-parens" : 1,
134 | "newline-after-var" : 0,
135 | "no-array-constructor" : 1,
136 | "no-continue" : 0,
137 | "no-inline-comments" : 0,
138 | "no-lonely-if" : 1,
139 | "no-mixed-spaces-and-tabs" : 1,
140 | "no-multiple-empty-lines" : 1,
141 | "no-nested-ternary" : 1,
142 | "no-negated-condition" : 0,
143 | "no-new-object" : 1,
144 | "no-restricted-syntax" : 0,
145 | "no-spaced-func" : 1,
146 | "no-ternary" : 0,
147 | "no-trailing-spaces" : [1, {"ignoreComments": true}],
148 | "no-underscore-dangle" : 0,
149 | "no-unneeded-ternary" : 1,
150 | "object-curly-spacing" : [2, "always"],
151 | "one-var" : 0,
152 | "operator-assignment" : 1,
153 | "operator-linebreak" : 1,
154 | "padded-blocks" : 0,
155 | "padding-line-between-statements": [2,
156 | { "blankLine": "always", "prev": ["const", "let"], "next": "*" },
157 | { "blankLine": "any", "prev": ["const"], "next": ["const"] },
158 | { "blankLine": "never", "prev": ["let"], "next": ["let"] },
159 | { "blankLine": "always", "prev": "directive", "next": "*" },
160 | { "blankLine": "any", "prev": "directive", "next": "directive" }
161 | ],
162 | "quote-props": [1, "as-needed"],
163 | "quotes": [ 2, "double"],
164 | "require-jsdo" : 0,
165 | "semi-spacing" : 1,
166 | "semi": ["error", "always"],
167 | "sort-vars" : 0,
168 | "keyword-spacing" : ["error", { "before": true, "after": true }],
169 | "space-before-function-paren" : [1, {"anonymous": "never", "named": "never", "asyncArrow": "always"}],
170 | "space-in-parens": [1, "never"],
171 | "space-infix-ops" : 1,
172 | "space-unary-ops" : 1,
173 | "spaced-comment" : 0,
174 | "wrap-regex" : 1,
175 | "@typescript-eslint/no-unused-vars": [2, {
176 | "varsIgnorePattern" : "^(_this|options|defaults)",
177 | "argsIgnorePattern" : "^(_|options|defaults)"}
178 | ],
179 | "@typescript-eslint/no-useless-constructor": "error",
180 | "@typescript-eslint/no-shadow": ["error"],
181 |
182 | "max-len": [
183 | "warn",
184 | {
185 | "code": 120,
186 | "tabWidth": 4,
187 | "comments": 120,
188 | "ignoreComments": false,
189 | "ignoreTrailingComments": true,
190 | "ignoreUrls": true,
191 | "ignoreStrings": true,
192 | "ignoreTemplateLiterals": true,
193 | "ignoreRegExpLiterals": true
194 | }
195 | ]
196 | }
197 | }
198 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | text eol=lf
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: 🐛 Bug report
3 | about: Report a bug or confusing behavior
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | ## Describe the bug
11 |
12 | A clear and concise description of what the bug is.
13 |
14 | ## Steps to reproduce
15 |
16 | 1. Go to ...
17 | 2. ...
18 |
19 | ### Actual behavior
20 |
21 | A clear and concise description of what was the result of the above steps.
22 |
23 | ### Expected behavior
24 |
25 | A clear and concise description of what you expected to happen.
26 |
27 | ### Reproduction repo/branch/code
28 |
29 | Provide a code sample where we can reproduce this issue.
30 |
31 | ## Additional details
32 |
33 | 1. node version:
34 | 1. os:
35 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: 🚀 Feature request
3 | about: Suggest an idea or a new feature
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | ## Feature description
11 |
12 | A clear and concise description of the feature.
13 |
14 | ### Use case
15 |
16 | Describe your use case and how you are planning to use this feature.
17 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/task.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: 📝 Task
3 | about: Anything other than bug report / feature requests
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | ## Description
11 |
12 | A clear and concise description of what needs to be done.
13 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE/default.md:
--------------------------------------------------------------------------------
1 | # Description
2 |
3 | Please include a summary of the change.
4 |
5 | # Fixes/Implements
6 |
7 | Please provide here links to issues/features.
8 |
--------------------------------------------------------------------------------
/.github/workflows/coverage.yml:
--------------------------------------------------------------------------------
1 | name: Coverage
2 |
3 | on:
4 | push:
5 | branches: [ main, feature/**, task/**, fix/** ]
6 |
7 | jobs:
8 | check:
9 |
10 | runs-on: ubuntu-latest
11 |
12 | steps:
13 | - uses: actions/checkout@v2
14 | - uses: actions/setup-node@v2
15 | with:
16 | node-version: ${{ matrix.node-version }}
17 | cache: npm
18 | - run: npm -v
19 | - run: node -v
20 | - run: npm ci
21 | - run: npm run coverage:check
22 |
--------------------------------------------------------------------------------
/.github/workflows/nightlyBuild.yml:
--------------------------------------------------------------------------------
1 | name: Nightly
2 |
3 | on:
4 | workflow_dispatch:
5 | push:
6 | branches: [ main ]
7 | jobs:
8 | nightly-build:
9 |
10 | runs-on: ubuntu-latest
11 |
12 | steps:
13 | - uses: actions/checkout@v2
14 | - uses: actions/setup-node@v2
15 | with:
16 | node-version: 14.x
17 | cache: npm
18 |
19 | - run: npm -v
20 |
21 | - run: node -v
22 |
23 | - run: npm ci
24 |
25 | - run: npm run dist
26 |
27 | - uses: EndBug/add-and-commit@v7
28 | with:
29 | add: 'dist --force'
30 | message: Nightly build from ${{ github.sha }}
31 | author_name: Nightly Build
32 | author_email: nb@scramjet.org
33 | push: origin main:nightly --force
34 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Release framework-js
3 |
4 | on:
5 | push:
6 | tags:
7 | - "v*"
8 |
9 | env:
10 | registry: https://registry.npmjs.org/
11 |
12 | jobs:
13 | run:
14 |
15 | runs-on: ubuntu-latest
16 | strategy:
17 | matrix:
18 | node-version: ["16.x"]
19 |
20 | steps:
21 | - uses: actions/checkout@v2
22 | with:
23 | fetch-depth: 0
24 |
25 | - name: Get tag branch
26 | run: |
27 | raw=$(git branch -r --contains ${{ github.ref }} |grep "main" )
28 | branch=${raw##*/}
29 | echo "BRANCH=$branch" >> $GITHUB_ENV
30 | echo ${{ env.BRANCH }}
31 |
32 | - name: Exit if tag not on main branch
33 | run: if [[ "${{ env.BRANCH }}" != 'main' ]];then exit 1; fi
34 |
35 | - uses: actions/setup-node@v2
36 | with:
37 | node-version: ${{ matrix.node-version }}
38 | registry-url: ${{ env.registry }}
39 |
40 | - name: Get the version
41 | id: get_version
42 | run: echo ::set-output name=VERSION::${GITHUB_REF/refs\/tags\//}
43 |
44 | - name: Instal deps
45 | run: npm i
46 |
47 | - name: Build dist package
48 | run: npm run dist
49 |
50 | - name: Publish dist package as public one
51 | run: npm publish dist/ --access public
52 | env:
53 | NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
54 |
55 | - name: Check npm package
56 | run: npm run test:release -- ${{ steps.get_version.outputs.VERSION }}
57 |
--------------------------------------------------------------------------------
/.github/workflows/test.yml:
--------------------------------------------------------------------------------
1 | name: Tests
2 |
3 | on:
4 | push:
5 | branches: [ main, feature/**, task/**, fix/** ]
6 |
7 | jobs:
8 | run:
9 |
10 | runs-on: ubuntu-latest
11 |
12 | strategy:
13 | matrix:
14 | node-version: [14.x, 16.x]
15 |
16 | steps:
17 | - uses: actions/checkout@v2
18 | - uses: actions/setup-node@v2
19 | with:
20 | node-version: ${{ matrix.node-version }}
21 | cache: npm
22 | - run: npm -v
23 | - run: node -v
24 | - run: npm ci
25 | - run: npm test
26 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 | lerna-debug.log*
8 |
9 | # Diagnostic reports (https://nodejs.org/api/report.html)
10 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
11 |
12 | # Runtime data
13 | pids
14 | *.pid
15 | *.seed
16 | *.pid.lock
17 |
18 | # Directory for instrumented libs generated by jscoverage/JSCover
19 | lib-cov
20 |
21 | # Coverage directory used by tools like istanbul
22 | coverage
23 | *.lcov
24 |
25 | # nyc test coverage
26 | .nyc_output
27 |
28 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
29 | .grunt
30 |
31 | # Bower dependency directory (https://bower.io/)
32 | bower_components
33 |
34 | # node-waf configuration
35 | .lock-wscript
36 |
37 | # Compiled binary addons (https://nodejs.org/api/addons.html)
38 | build/Release
39 |
40 | # Dependency directories
41 | node_modules/
42 | jspm_packages/
43 |
44 | # Snowpack dependency directory (https://snowpack.dev/)
45 | web_modules/
46 |
47 | # TypeScript cache
48 | *.tsbuildinfo
49 |
50 | # Optional npm cache directory
51 | .npm
52 |
53 | # Optional eslint cache
54 | .eslintcache
55 |
56 | # Microbundle cache
57 | .rpt2_cache/
58 | .rts2_cache_cjs/
59 | .rts2_cache_es/
60 | .rts2_cache_umd/
61 |
62 | # Optional REPL history
63 | .node_repl_history
64 |
65 | # Output of 'npm pack'
66 | *.tgz
67 |
68 | # Yarn Integrity file
69 | .yarn-integrity
70 |
71 | # dotenv environment variables file
72 | .env
73 | .env.test
74 |
75 | # parcel-bundler cache (https://parceljs.org/)
76 | .cache
77 | .parcel-cache
78 |
79 | # Next.js build output
80 | .next
81 | out
82 |
83 | # Nuxt.js build / generate output
84 | .nuxt
85 | dist
86 |
87 | # Gatsby files
88 | .cache/
89 | # Comment in the public line in if your project uses Gatsby and not Next.js
90 | # https://nextjs.org/blog/next-9-1#public-directory-support
91 | # public
92 |
93 | # vuepress build output
94 | .vuepress/dist
95 |
96 | # Serverless directories
97 | .serverless/
98 |
99 | # FuseBox cache
100 | .fusebox/
101 |
102 | # DynamoDB Local files
103 | .dynamodb/
104 |
105 | # TernJS port file
106 | .tern-port
107 |
108 | # VSCode locale files
109 | .vscode/
110 |
111 | # Stores VSCode versions used for testing VSCode extensions
112 | .vscode-test
113 |
114 | # yarn v2, v3
115 | .pnp.*
116 | .yarn/*
117 | !.yarn/patches
118 | !.yarn/plugins
119 | !.yarn/releases
120 | !.yarn/sdks
121 | !.yarn/versions
122 |
123 | # Scramjet framework specific
124 | build
125 |
126 | *.DS_Store
127 |
--------------------------------------------------------------------------------
/.husky/pre-commit:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | . "$(dirname "$0")/_/husky.sh"
3 |
4 | npm run lint
5 |
--------------------------------------------------------------------------------
/.nvmrc:
--------------------------------------------------------------------------------
1 | 14
2 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | In the interest of fostering an open and welcoming environment, we as
6 | contributors and maintainers pledge to making participation in our project and
7 | our community a harassment-free experience for everyone, regardless of age, body
8 | size, disability, ethnicity, sex characteristics, gender identity and expression,
9 | level of experience, education, socio-economic status, nationality, personal
10 | appearance, race, religion, or sexual identity and orientation.
11 |
12 | ## Our Standards
13 |
14 | Examples of behavior that contributes to creating a positive environment
15 | include:
16 |
17 | * Using welcoming and inclusive language
18 | * Being respectful of differing viewpoints and experiences
19 | * Gracefully accepting constructive criticism
20 | * Focusing on what is best for the community
21 | * Showing empathy towards other community members
22 |
23 | Examples of unacceptable behavior by participants include:
24 |
25 | * The use of sexualized language or imagery and unwelcome sexual attention or
26 | advances
27 | * Trolling, insulting/derogatory comments, and personal or political attacks
28 | * Public or private harassment
29 | * Publishing others' private information, such as a physical or electronic
30 | address, without explicit permission
31 | * Other conduct which could reasonably be considered inappropriate in a
32 | professional setting
33 |
34 | ## Our Responsibilities
35 |
36 | Project maintainers are responsible for clarifying the standards of acceptable
37 | behavior and are expected to take appropriate and fair corrective action in
38 | response to any instances of unacceptable behavior.
39 |
40 | Project maintainers have the right and responsibility to remove, edit, or
41 | reject comments, commits, code, wiki edits, issues, and other contributions
42 | that are not aligned to this Code of Conduct, or to ban temporarily or
43 | permanently any contributor for other behaviors that they deem inappropriate,
44 | threatening, offensive, or harmful.
45 |
46 | ## Scope
47 |
48 | This Code of Conduct applies both within project spaces and in public spaces
49 | when an individual is representing the project or its community. Examples of
50 | representing a project or community include using an official project e-mail
51 | address, posting via an official social media account, or acting as an appointed
52 | representative at an online or offline event. Representation of a project may be
53 | further defined and clarified by project maintainers.
54 |
55 | ## Enforcement
56 |
57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
58 | reported by contacting the project team at opensource@scramjet.org. All
59 | complaints will be reviewed and investigated and will result in a response that
60 | is deemed necessary and appropriate to the circumstances. The project team is
61 | obligated to maintain confidentiality with regard to the reporter of an incident.
62 | Further details of specific enforcement policies may be posted separately.
63 |
64 | Project maintainers who do not follow or enforce the Code of Conduct in good
65 | faith may face temporary or permanent repercussions as determined by other
66 | members of the project's leadership.
67 |
68 | ## Attribution
69 |
70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
72 |
73 | [homepage]: https://www.contributor-covenant.org
74 |
75 | For answers to common questions about this code of conduct, see
76 | https://www.contributor-covenant.org/faq
77 |
--------------------------------------------------------------------------------
/FUNDING.yml:
--------------------------------------------------------------------------------
1 | github: scramjetorg
2 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2021 Scramjet
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | Scramjet Framework TypeScript
2 | ==================
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 | ⭐ Star us on GitHub — it motivates us a lot! 🚀
19 |
20 |
21 |
22 |
23 | Scramjet is a simple reactive stream programming framework. The code is written by chaining functions that transform the streamed data, including well known map, filter and reduce.
24 |
25 | The main advantage of Scramjet is running asynchronous operations on your data streams concurrently. It allows you to perform the transformations both synchronously and asynchronously by using the same API - so now you can "map" your stream from whatever source and call any number of API's consecutively.
26 |
27 | This is a pre-release of the next major version (v5) of [JavaScript Scramjet Framework](https://www.npmjs.com/package/scramjet).
28 |
29 | **We are open to your feedback!** We encourage you to report issues with any ideas, suggestions and features you would like to see in this version. You can also upvote (`+1`) existing ones to show us the direction we should take in developing Scramjet Framework.
30 |
31 | **Not interested in JavaScript/TypeScript version?** Check out [Scramjet Framework in Python](https://github.com/scramjetorg/framework-python)!
32 |
33 | ## Table of contents
34 |
35 | - [Installation](#installation)
36 | - [Usage](#usage)
37 | - [Requesting features](#requesting-features)
38 | - [Reporting bugs](#reporting-bugs)
39 | - [Contributing](#contributing)
40 | - [Development Setup](#development-setup)
41 |
42 | ## Installation
43 |
44 | Simply run:
45 |
46 | ```bash
47 | npm i @scramjet/framework
48 | ```
49 |
50 | And then you can require it in the JS/TS code like:
51 |
52 | _sample-file.ts_
53 | ```ts
54 | import { DataStream } from "@scramjet/framework";
55 | ```
56 |
57 |
58 |
59 | You can also use nightly build as npm dependency by referring to `nightly` branch (which is the latest build) from this repository:
60 |
61 | _package.json_
62 | ```json
63 | {
64 | "dependencies": {
65 | "scramjet": "scramjetorg/framework-js#nightly"
66 | }
67 | }
68 | ```
69 |
70 | After adding Scramjet Framework as dependency it needs to be installed via `npm` (or similar):
71 |
72 | ```
73 | npm i
74 | ```
75 |
76 | You can also build Scramjet Framework yourself. Please refer to [Development Setup](#development-setup) section for more details.
77 |
78 |
79 |
80 | ## Usage
81 |
82 | Scramjet streams are similar and behave similar to native nodejs streams and to streams in any programing language in general. They allow operating on streams of data (were each separate data part is called a `chunk`) and process it in any way through transforms like mapping or filtering.
83 |
84 | Let's take a look on how to create and operate on Scramjet streams.
85 |
86 | _If you would like to dive deeper, please refer to [streams source files](https://github.com/scramjetorg/framework-js/tree/main/src/streams)_.
87 |
88 | ### Creating Scramjet streams
89 |
90 | The basic method for creating Scramjet streams is `from()` static method. It accepts iterables (both sync and async) and native nodejs streams. As for iterables it can be a simple array, generator or anything iterable:
91 |
92 | ```ts
93 | import { DataStream } from "scramjet";
94 |
95 | const stream = DataStream.from(["foo", "bar", "baz"]);
96 | ```
97 |
98 | Scramjet streams are asynchronous iterables itself, which means one stream can be created from another:
99 |
100 | ```ts
101 | import { DataStream } from "scramjet";
102 |
103 | const stream1 = DataStream.from(["foo", "bar", "baz"]);
104 | const stream2 = DataStream.from(stream1);
105 | ```
106 |
107 | They can be also created from native nodejs `Readable`s:
108 |
109 | ```ts
110 | import { createReadStream } from "fs";
111 | import { DataStream } from "scramjet";
112 |
113 | const stream = DataStream.from(createReadStream("path/to/file"));
114 | ```
115 |
116 | The more "manual" approach is creating streams using constructor:
117 |
118 | ```ts
119 | import { DataStream } from "scramjet";
120 |
121 | const stream = new DataStream();
122 | ```
123 |
124 | Such approach is useful when one needs to manually write data to a stream or use it as a pipe destination:
125 |
126 | ```ts
127 | import { DataStream } from "scramjet";
128 |
129 | const stream = new DataStream();
130 | stream.write("foo");
131 |
132 | const stream2 = new DataStream();
133 | stream.pipe(stream2);
134 | ```
135 |
136 | ### Getting data from Scramjet streams
137 |
138 | Similar as to creating Scramjet streams, there are specific methods which allow getting data out of them. Those are sometimes called `sink` methods as they allow data to flow through and out of the stream. As those methods needs to wait for the stream end, they return a `Promise` which needs to be awaited and is resolved when all data from source is processed.
139 |
140 | ```ts
141 | import { DataStream } from "scramjet";
142 |
143 | const stream1 = DataStream.from(["foo", "bar", "baz"]);
144 | await stream1.toArray(); // ["foo", "bar", "baz"]
145 |
146 | const stream2 = DataStream.from(["foo", "bar", "baz"]);
147 | await stream2.toFile("path/to/file"); // Writes to a file, resolves when done.
148 |
149 | const stream3 = DataStream.from(["foo", "bar", "baz"]);
150 | await stream3.reduce(
151 | (prev, curr) => `${ prev }-${ curr }`,
152 | ""
153 | ); // "foo-bar-baz"
154 | ```
155 |
156 | As Scramjet streams are asynchronous iterables they can be iterated too:
157 |
158 | ```ts
159 | import { DataStream } from "scramjet";
160 |
161 | const stream = DataStream.from(["foo", "bar", "baz"]);
162 |
163 | for await (const chunk of stream) {
164 | console.log(chunk);
165 | }
166 | // Logs:
167 | // "foo"
168 | // "bar"
169 | // "baz"
170 | ```
171 |
172 | Similar to writing, there is also more "manual" way of reading from streams using `.read()` method:
173 |
174 | ```ts
175 | import { DataStream } from "scramjet";
176 |
177 | const stream = DataStream.from(["foo", "bar", "baz"]);
178 |
179 | await stream.read(); // "foo"
180 | await stream.read(); // "bar"
181 | ```
182 |
183 | Read returns a `Promise` which waits until there is something ready to be read from a stream.
184 |
185 | ### Basic operations
186 |
187 | The whole idea of stream processing is an ability to quickly and efficiently transform data which flows through the stream. Let's take a look at basic operations (called `transforms`) and what they do:
188 |
189 | #### Mapping
190 |
191 | Mapping stream data is basically the same as mapping an array. It allows to map a `chunk` to a new value:
192 |
193 | ```ts
194 | import { DataStream } from "scramjet";
195 |
196 | DataStream
197 | .from(["foo", "bar", "baz"])
198 | .map(chunk => chunk.repeat(2))
199 | .toArray(); // ["foofoo", "barbar", "bazbaz"]
200 | ```
201 |
202 | The result of the map transform could be of different type than initial chunks:
203 |
204 | ```ts
205 | import { DataStream } from "scramjet";
206 |
207 | DataStream
208 | .from(["foo", "bar", "baz"])
209 | .map(chunk => chunk.charCodeAt(0))
210 | .toArray(); // [102, 98, 98]
211 |
212 | DataStream
213 | .from(["foo", "bar", "baz"])
214 | .map(chunk => chunk.split(""))
215 | .toArray(); // [["f", "o", "o"], ["b", "a", "r"], ["b", "a", "z"]]
216 | ```
217 |
218 | #### Filtering
219 |
220 | Filtering allows to filter out any unnecessary chunks:
221 |
222 | ```ts
223 | import { DataStream } from "scramjet";
224 |
225 | DataStream
226 | .from([1, 2, 3, 4, 5, 6])
227 | .filter(chunk => chunk % 2 === 0)
228 | .toArray(); // [2, 4, 6]
229 | ```
230 |
231 | #### Grouping
232 |
233 | Batching allows to group chunks into arrays, effectively changing chunks number flowing though the stream:
234 |
235 | ```ts
236 | import { DataStream } from "scramjet";
237 |
238 | DataStream
239 | .from([1, 2, 3, 4, 5, 6, 7, 8])
240 | .batch(chunk => chunk % 2 === 0)
241 | .toArray(); // [[1, 2], [3, 4], [5, 6], [7, 8]]
242 | ```
243 |
244 | Whenever callback function passed to `.batch()` call returns `true`, new group is emitted.
245 |
246 | #### Flattening
247 |
248 | Operation opposite to batching is flattening. At the moment, Scramjet streams provides `.flatMap()` method which allows first to map chunks and then flatten the resulting arrays:
249 |
250 | ```ts
251 | import { DataStream } from "scramjet";
252 |
253 | DataStream
254 | .from(["foo", "bar", "baz"])
255 | .flatMap(chunk => chunk.split(""))
256 | .toArray(); // ["f", "o", "o", "b", "a", "r", "b", "a", "z"]
257 | ```
258 |
259 | But it can be also used to only flatten the stream by providing a callback which only passes values through:
260 |
261 | ```ts
262 | import { DataStream } from "scramjet";
263 |
264 | DataStream
265 | .from([1, 2, 3, 4, 5, 6, 7, 8])
266 | .batch(chunk => chunk % 2 === 0)
267 | .flatMap(chunk => chunk)
268 | .toArray(); // [1, 2, 3, 4, 5, 6, 7, 8]
269 | ```
270 |
271 | #### Piping
272 |
273 | Piping is essential for operating on streams. Scramjet streams can be both used as pipe source and destination. They can be also combined with native nodejs streams having native streams as pipe source or destination.
274 |
275 | ```ts
276 | import { DataStream } from "scramjet";
277 |
278 | const stream1 = DataStream.from([1, 2, 3, 4, 5, 6, 7, 8]);
279 | const stream2 = new DataStream();
280 |
281 | stream1.pipe(stream2); // All data flowing through "stream1" will be passed to "stream2".
282 | ```
283 |
284 | ```ts
285 | import { createReadStream } from "fs";
286 | import { DataStream } from "scramjet";
287 |
288 | const readStream = createReadStream("path/to/file"));
289 | const scramjetStream = new DataStream();
290 |
291 | readStream.pipe(scramjetStream); // All file contents read by native nodejs stream will be passed to "scramjetStream".
292 | ```
293 |
294 | ```ts
295 | import { createWriteStream } from "fs";
296 | import { DataStream } from "scramjet";
297 |
298 | const scramjetStream = DataStream.from([1, 2, 3, 4, 5, 6, 7, 8]);
299 |
300 | scramjetStream.pipe(createWriteStream("path/to/file")); // All data flowing through "scramjetStream" will be written to a file via native nodejs stream.
301 | ```
302 |
303 | ## Requesting Features
304 |
305 | Anything missing? Or maybe there is something which would make using Scramjet Framework much easier or efficient? Don't hesitate to fill up a [new feature request](https://github.com/scramjetorg/framework-js/issues/new?assignees=&labels=&template=feature_request.md&title=)! We really appreciate all feedback.
306 |
307 | ## Reporting Bugs
308 |
309 | If you have found a bug, inconsistent or confusing behavior please fill up a [new bug report](https://github.com/scramjetorg/framework-js/issues/new?assignees=&labels=&template=bug_report.md&title=).
310 |
311 | ## Contributing
312 |
313 | You can contribute to this project by giving us feedback ([reporting bugs](#reporting-bugs) and [requesting features](#reporting-features)) and also by writing code yourself! We have some introductory issues labeled with `good first issue` which should be a perfect starter.
314 |
315 | The easiest way is to [create a fork](https://docs.github.com/en/get-started/quickstart/fork-a-repo) of this repository and then [create a pull request](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request-from-a-fork) with all your changes. In most cases, you should branch from and target `main` branch.
316 |
317 |
318 |
319 | Please refer to [Development Setup](#development-setup) section on how to setup this project.
320 |
321 | ## Development Setup
322 |
323 | ### Project setup
324 |
325 | 1. Install nodejs (`14.x`).
326 |
327 | Refer to [official docs](https://nodejs.org/en/download/). Alternatively you may use Node version manager like [nvm](https://github.com/nvm-sh/nvm).
328 |
329 | 2. Clone this repository:
330 |
331 | ```bash
332 | git clone git@github.com:scramjetorg/framework-js.git
333 | ```
334 |
335 | 3. Install project dependencies:
336 |
337 | ```bash
338 | npm i
339 | ```
340 |
341 | ### Commands
342 |
343 | There are multiple npm commands available which helps run tests, build the project and help during development.
344 |
345 | #### Running tests
346 |
347 | ```bash
348 | npm run test
349 | ```
350 |
351 | Runs all tests from `test` directory. It runs `build` internally so it doesn't have to be run manually.
352 |
353 | ```bash
354 | npm run test:unit[:w]
355 | ```
356 |
357 | Runs all unit tests (`test/unit` directory). It runs `build` internally so it doesn't have to be run manually. When run with `:w` it will watch for changes, rebuild and rerun test automatically. To run unit tests without rebuilding the project use `npm run test:run:unit`.
358 |
359 | ```bash
360 | npm run test:unit:d -- build/test/.../test.js [--host ...] [--port ...]
361 | ```
362 |
363 | Runs specified test file in a debug mode. It runs `build` internally so it doesn't have to be run manually. This is the same as running
364 | `npm run build && npx ava debug --break build/test/.../test.js [--host ...] [--port ...]`. Then it can be inspected e.g. via Chrome inspector
365 | by going to `chrome://inspect`.
366 |
367 | ```bash
368 | npm run test:bdd
369 | ```
370 |
371 | Runs all BDD tests (`test/bdd` directory). It runs `build` internally so it doesn't have to be run manually. To run BDD tests without rebuilding the project use `npm run test:run:bdd`.
372 |
373 | **Running single test file or specific tests**
374 |
375 | Single test file can be run by passing its path to `test` command:
376 |
377 | ```bash
378 | npm run test:unit -- build/test/ifca/common.spec.js
379 | ```
380 |
381 | While specific test cases can be run using `-m` (match) option:
382 |
383 | ```bash
384 | npm run test:unit -- -m "*default*"
385 | ```
386 |
387 | Both can be mixed to run specific tests from a given file or folder:
388 |
389 | ```bash
390 | npm run test:unit -- build/test/ifca/common.spec.js -m "*default*"
391 | ```
392 |
393 | #### Building the project
394 |
395 | ```bash
396 | npm run build[:w]
397 | ```
398 |
399 | Transpiles `.ts` sources and tests (`src` and `test` directories) and outputs JS files to `build` directory. When run with `:w` it will watch for changes and rebuild automatically.
400 |
401 | ```bash
402 | npm run dist
403 | ```
404 |
405 | Builds dist files - similar to `build` but skips `test` directory and additionally generates source maps.
406 |
407 | #### Miscellaneous
408 |
409 | ```bash
410 | npm run lint
411 | ```
412 |
413 | Lints `src` and `test` directories. Used as a `pre-commit` hook.
414 |
415 | ```bash
416 | npm run lint:f
417 | ```
418 |
419 | Fixes lint warnings/errors in `src` and `test` files.
420 |
421 | ```bash
422 | npm run coverage
423 | ```
424 |
425 | Checks code coverage, generates HTML report and serves it on 8080 port.
426 |
427 | ```bash
428 | npm run coverage:check
429 | ```
430 |
431 | Checks code coverage. Will fail if it is below a threshold defined in `package.json`. Useful as a CI job.
432 |
433 | ```bash
434 | npm run coverage:generate
435 | ```
436 |
437 |
438 |
--------------------------------------------------------------------------------
/cucumber.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | default: "--format-options '{\"snippetInterface\": \"synchronous\"}' --publish-quiet"
3 | };
4 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@scramjet/framework",
3 | "version": "0.1.0",
4 | "description": "Simple yet powerful live data computation framework.",
5 | "main": "./dist/index.js",
6 | "files": [
7 | "dist"
8 | ],
9 | "scripts": {
10 | "build": "rm -rf ./build && npm run copy && tsc -p tsconfig.build.json",
11 | "build:w": "npm run build -- -w",
12 | "test": "npm run build && npm run test:run:unit && npm run test:run:bdd",
13 | "test:unit": "npm run build && npm run test:run:unit",
14 | "test:unit:w": "concurrently 'npm run build:w' 'sleep 8 && npm test:run:unit -- -w'",
15 | "test:unit:d": "npm run build && npm run test:run:unit -- debug --break",
16 | "test:bdd": "npm run build && npm run test:run:bdd",
17 | "test:release": "cd test/release && ./run.sh",
18 | "dist": "rm -rf ./dist && tsc && node scripts/prepare-readme.js && node scripts/prepare-package.js",
19 | "lint": "eslint src/ test/ --ext .ts",
20 | "lint:f": "eslint src/ test/ --ext .ts --fix",
21 | "prepare": "husky install",
22 | "coverage": "npm run coverage:generate && npx http-server coverage",
23 | "coverage:generate": "npm run build && npm run coverage:ignore && npm run coverage:unit && npm run coverage:bdd && npm run coverage:report",
24 | "coverage:check": "npm run build && npm run coverage:ignore && npm run coverage:unit && npm run coverage:bdd && nyc report --check-coverage",
25 | "coverage:ignore": "node scripts/coverage-ignore.js",
26 | "coverage:unit": "nyc --silent npm run test:run:unit && SCRAMJET_LOG=1 nyc --silent --no-clean npm run test:run:unit -- -m \"trace does not throw\"",
27 | "coverage:bdd": "nyc --silent --no-clean npm run test:run:bdd",
28 | "coverage:report": "nyc report --reporter=html",
29 | "copy": "copyfiles -f test/bdd/features/*.feature build/test/bdd/features && copyfiles -f test/_assets/* build/test/_assets",
30 | "test:run:unit": "npx ava",
31 | "test:run:bdd": "npx cucumber-js build/test/bdd/features"
32 | },
33 | "author": "Scramjet ",
34 | "license": "AGPL-3.0",
35 | "devDependencies": {
36 | "@cucumber/cucumber": "^8.0.0-rc.1",
37 | "@types/assert": "^1.5.5",
38 | "@types/node": "^15.14.9",
39 | "@typescript-eslint/eslint-plugin": "^5.0.0",
40 | "@typescript-eslint/parser": "^5.0.0",
41 | "assert": "^2.0.0",
42 | "ava": "^3.15.0",
43 | "concurrently": "^6.2.1",
44 | "copyfiles": "^2.4.1",
45 | "eslint": "^8.0.0",
46 | "http-server": "^13.0.2",
47 | "husky": "^7.0.2",
48 | "nyc": "^15.1.0",
49 | "typedoc": "0.20.28",
50 | "typedoc-plugin-markdown": "3.4.5",
51 | "typescript": "^4.3.5"
52 | },
53 | "ava": {
54 | "extensions": [
55 | "js"
56 | ],
57 | "files": [
58 | "build/test/unit/**/*.js",
59 | "!build/test/**/helpers"
60 | ]
61 | },
62 | "nyc": {
63 | "branches": 100,
64 | "lines": 100,
65 | "functions": 100,
66 | "statements": 100,
67 | "include": "build/src/",
68 | "exclude": "build/test/**/*.js"
69 | },
70 | "private": true,
71 | "keywords": [
72 | "react",
73 | "javascript",
74 | "typescript",
75 | "event-stream",
76 | "stream",
77 | "es6",
78 | "promise",
79 | "data-stream",
80 | "reactive-programming",
81 | "spark-streaming",
82 | "stream2",
83 | "transformations"
84 | ]
85 | }
86 |
--------------------------------------------------------------------------------
/scripts/coverage-ignore.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 | // Ignore TypeScript constructs (like decorator local functions) in coverage reports.
3 |
4 | const { join } = require("path");
5 | const { readFileSync, writeFileSync } = require("fs");
6 |
7 | const files = [
8 | "build/src/streams/data-stream.js",
9 | "build/src/streams/string-stream.js",
10 | "build/src/streams/proxies/stream-node-writable-proxy.js"
11 | ];
12 |
13 | const replacements = [
14 | "var __decorate = (",
15 | "var __metadata = (",
16 | "var __asyncValues = (",
17 | "var __importDefault = (",
18 | [/__decorate\(\[/g, "__decorate(["]
19 | ];
20 |
21 | for (const file of files) {
22 | const filePath = join(__dirname, "..", file);
23 | const fileContents = readFileSync(filePath, "utf8");
24 |
25 | console.log(`coverage-ignore: Processing ${filePath}`);
26 |
27 | let newContents = fileContents;
28 |
29 | for (const replacement of replacements) {
30 | if (Array.isArray(replacement)) {
31 | newContents = newContents.replace(replacement[0], `/* istanbul ignore next */\n${replacement[1]}`);
32 | } else {
33 | newContents = newContents.replace(replacement, `/* istanbul ignore next */\n${replacement}`);
34 | }
35 | }
36 |
37 | writeFileSync(filePath, newContents);
38 | }
39 |
--------------------------------------------------------------------------------
/scripts/prepare-package.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 | // Prepares package.json file for npm publishing and copies it to dist/ folder.
3 |
4 | const { join } = require("path");
5 | const { readFileSync, writeFileSync } = require("fs");
6 |
7 | const packagePath = join(__dirname, "..", "package.json");
8 | const packageContents = readFileSync(packagePath, "utf8");
9 |
10 | const { name, version, description, author, license, keywords } = JSON.parse(packageContents);
11 | const newPackage = {
12 | name,
13 | version,
14 | description,
15 | main: "index.js",
16 | author,
17 | license,
18 | keywords
19 | };
20 |
21 | writeFileSync(join(__dirname, "../dist/", "package.json"), JSON.stringify(newPackage, null, " "));
22 |
--------------------------------------------------------------------------------
/scripts/prepare-readme.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 | // Prepares README.md file for npm publishing and copies it to dist/ folder.
3 |
4 | const { join } = require("path");
5 | const { readFileSync, writeFileSync } = require("fs");
6 |
7 | const readmePath = join(__dirname, "..", "README.md");
8 | const readmeContents = readFileSync(readmePath, "utf8");
9 |
10 | let newContents = readmeContents;
11 |
12 | newContents = newContents.replace(/(\s|.)*?/g, "");
13 | newContents = newContents.replace("- [Development Setup](#development-setup)", "");
14 |
15 | writeFileSync(join(__dirname, "../dist/", "README.md"), newContents);
16 |
--------------------------------------------------------------------------------
/src/decorators.ts:
--------------------------------------------------------------------------------
1 | export function checkTransformability(target: any, propertyKey: string, descriptor: PropertyDescriptor) {
2 | const originalValue = descriptor.value;
3 |
4 | descriptor.value = function(...args: any[]) {
5 | if (!(this as any).transformable) {
6 | throw new Error("Stream is not transformable.");
7 | }
8 |
9 | return originalValue.apply(this, args);
10 | };
11 | }
12 |
--------------------------------------------------------------------------------
/src/ifca/ifca-chain.ts:
--------------------------------------------------------------------------------
1 | import { IFCA } from "../ifca";
2 | import { MaybePromise, IFCAOptions } from "../types";
3 |
4 | export class IFCAChain {
5 | /**
6 | * All IFCA instances managed by this chain.
7 | */
8 | private chain: Array> = [];
9 |
10 | /**
11 | * @returns {number} Length of the IFCAChain (number of IFCA instances in a chain).
12 | */
13 | get length(): number {
14 | return this.chain.length;
15 | }
16 |
17 | /**
18 | * Creates and adds new IFCA to this chain.
19 | *
20 | * @param {IFCAOptions} options IFCA options.
21 | * @returns {IFCA} Newly created IFCA instance.
22 | */
23 | create(options: IFCAOptions): IFCA {
24 | const ifca = new IFCA(options);
25 |
26 | this.chain.push(ifca);
27 |
28 | return ifca;
29 | }
30 |
31 | /**
32 | * Adds passed IFCA instance to this chain. If the instance is already
33 | * the last item in a chain, it will be ignored.
34 | *
35 | * @param {IFCA} ifca IFCA instance to ba added to a chain.
36 | */
37 | add(ifca: IFCA): void {
38 | if (this.chain[this.chain.length - 1] !== ifca) {
39 | this.chain.push(ifca);
40 | }
41 | }
42 |
43 | /**
44 | * Gets last IFCA instance from this chain.
45 | *
46 | * @returns {IFCA} IFCA instance.
47 | */
48 | get(): IFCA {
49 | return this.chain[this.chain.length - 1];
50 | }
51 |
52 | /**
53 | * Writes to IFCA chain (to first IFCA in a chain).
54 | *
55 | * @param {IN} chunk Chunk to be written.
56 | * @returns {MaybePromise} Drain value/promise.
57 | */
58 | write(chunk: IN): MaybePromise {
59 | return this.chain[0].write(chunk);
60 | }
61 |
62 | /**
63 | * Reads from IFCA chain (from last IFCA in a chain).
64 | *
65 | * @returns {MaybePromise} Promise resolving to a chunk, chunk itself or null if there is nothing to read.
66 | */
67 | read(): MaybePromise {
68 | return this.chain[this.chain.length - 1].read();
69 | }
70 |
71 | /**
72 | * Ends IFCA chain (ends first IFCA instance).
73 | *
74 | * @returns {MaybePromise} Promise resolving (or already resolved) when chain is ended.
75 | */
76 | end(): MaybePromise {
77 | return this.chain[0].end();
78 | }
79 | }
80 |
--------------------------------------------------------------------------------
/src/ifca/processing-queue.ts:
--------------------------------------------------------------------------------
1 | import { DroppedChunk, MaybePromise } from "../types";
2 | import { createResolvablePromiseObject, getId } from "../utils";
3 |
4 | export class ProcessingQueue {
5 | /**
6 | * Creates instance of ProcessingQueue.
7 | *
8 | * @param {Function} whenEmitted Callback function called each time any chunk leaves the queue.
9 | */
10 | constructor(whenEmitted: () => void) {
11 | this.whenEmitted = whenEmitted;
12 | }
13 |
14 | /**
15 | * Instance unique id.
16 | */
17 | public id = getId("IFCA:ProcessingQueue");
18 |
19 | /**
20 | * Ready chunks waitng to be read.
21 | */
22 | private _ready: TYPE[] = [];
23 |
24 | /**
25 | * Awaitng chunk requests.
26 | */
27 | private _requested: Object[] = [];
28 |
29 | /**
30 | * Number of chunks processed at the given moment.
31 | */
32 | private _pendingLength: number = 0;
33 |
34 | /**
35 | * Whenever the queue is closed.
36 | */
37 | private _hasEnded: Boolean = false;
38 |
39 | /**
40 | * Last chunk which was pushed to the queue.
41 | */
42 | private previousChunk: Promise = Promise.resolve();
43 |
44 | /**
45 | * Callback function called each time any chunk leaves the queue.
46 | */
47 | private whenEmitted: () => void;
48 |
49 | /**
50 | * @returns {number} Number of chunks (both being processed and ready) in the queue at the given moment.
51 | */
52 | get length(): number {
53 | return this._pendingLength + this._ready.length;
54 | }
55 |
56 | /**
57 | * @returns {number} Number of chunks processed at the given moment.
58 | */
59 | get pendingLength(): number {
60 | return this._pendingLength;
61 | }
62 |
63 | /**
64 | * Last chunk which was pushed to the queue.
65 | * If there were no chunks pushed, resolved promise is returned.
66 | *
67 | * @returns {Promise} Last chunk from the queue.
68 | */
69 | get last(): Promise {
70 | return this.previousChunk;
71 | }
72 |
73 | /**
74 | * Adds chunk promise to the queue.
75 | *
76 | * @param {Promise} chunkResolver Promise resolving to a chunk.
77 | * @returns {void}
78 | */
79 | push(chunkResolver: Promise): void {
80 | // We don't need to worry about chunks resolving order since it is guaranteed
81 | // by IFCA with Promise.all[previousChunk, currentChunk].
82 | chunkResolver.then((result: TYPE) => {
83 | this._pendingLength--;
84 |
85 | if (result as any !== DroppedChunk) {
86 | this._ready.push(result);
87 |
88 | // If there is any chunk requested (read awaiting) resolve it.
89 | if (this._requested.length) {
90 | const chunkRequest: any = this._requested.shift();
91 |
92 | chunkRequest.resolver(this._ready.shift() as TYPE);
93 |
94 | this.whenEmitted();
95 | }
96 | } else {
97 | // Dropped chunks also means queue length changes.
98 | this.whenEmitted();
99 | }
100 |
101 | // If queue is closed and there are no more pending items we need to make sure
102 | // to resolve all waiting chunks requests (with nulls since there is no more data).
103 | if (this._hasEnded) {
104 | this.resolveAwaitingRequests();
105 | }
106 | });
107 |
108 | this._pendingLength++;
109 |
110 | this.previousChunk = chunkResolver;
111 | }
112 |
113 | /**
114 | * Reads chunk from the queue.
115 | *
116 | * If there are ready chunks waiting, value is returned. If not, a promise
117 | * which will resolved upon next chunk processing completes is returned.
118 | *
119 | * If the queue is closed and no more data avaialbe, `null`s are retruned.
120 | *
121 | * @returns {MaybePromise} Promise resolving to a chunk or chunk.
122 | */
123 | read(): MaybePromise {
124 | // If chunk is ready, simply return it.
125 | if (this._ready.length) {
126 | // TODO handle nulls?
127 |
128 | const chunk = this._ready.shift() as TYPE;
129 |
130 | this.whenEmitted();
131 |
132 | return chunk;
133 | }
134 |
135 | // Add chunk request to a queue if:
136 | // * queue is not closed and there are no ready chunks
137 | // * queue is closed but there are still pending chunks
138 | if (!this._hasEnded || this._hasEnded && this._pendingLength > 0) {
139 | const chunkRequest = createResolvablePromiseObject();
140 |
141 | this._requested.push(chunkRequest);
142 |
143 | return chunkRequest.promise as Promise;
144 | }
145 |
146 | return null;
147 | }
148 |
149 | /**
150 | * Closes the queue and resolves all awaiting chunk requests.
151 | *
152 | * @returns {void}
153 | */
154 | close() {
155 | this._hasEnded = true;
156 | this.resolveAwaitingRequests();
157 | }
158 |
159 | /**
160 | * Resolves all awaiting chunk requests which cannot be resolved due to end of data.
161 | *
162 | * @returns {void}
163 | */
164 | private resolveAwaitingRequests() {
165 | if (this._hasEnded && this._pendingLength === 0 && this._requested.length > 0) {
166 | for (const chunkRequest of this._requested) {
167 | (chunkRequest as any).resolver(null);
168 | }
169 | }
170 | }
171 | }
172 |
--------------------------------------------------------------------------------
/src/index.ts:
--------------------------------------------------------------------------------
1 | import { BaseStream } from "./streams/base-stream";
2 | import { DataStream } from "./streams/data-stream";
3 | import { StringStream } from "./streams/string-stream";
4 |
5 | export {
6 | BaseStream,
7 | DataStream,
8 | StringStream
9 | };
10 |
--------------------------------------------------------------------------------
/src/streams/base-stream.ts:
--------------------------------------------------------------------------------
1 | import { Writable } from "stream";
2 | import { TransformFunction, AnyIterable, MaybePromise } from "../types";
3 |
4 | export interface BaseStream {
5 | write(chunk: IN): MaybePromise;
6 | read(): MaybePromise;
7 | pause(): void;
8 | resume(): void;
9 | end(): MaybePromise;
10 |
11 | each(callback: TransformFunction, ...args: ARGS): BaseStream;
12 | map(
13 | callback: TransformFunction, ...args: ARGS): BaseStream;
14 | map(
15 | callback: TransformFunction, ...args: ARGS): BaseStream;
16 | filter(callback: TransformFunction, ...args: ARGS): BaseStream;
17 | batch(callback: TransformFunction, ...args: ARGS): BaseStream;
18 | flatMap(
19 | callback: TransformFunction, ARGS>, ...args: ARGS): BaseStream;
20 | flatMap(
21 | callback: TransformFunction, ARGS>, ...args: ARGS): BaseStream;
22 | pipe>(destination: DEST, options: { end: boolean }): DEST;
23 | pipe(destination: DEST, options: { end: boolean }): DEST;
24 | use(callback: (stream: BaseStream) => NEW_OUT): NEW_OUT;
25 | reduce(
26 | callback: (previous: NEW_OUT, current: OUT) => MaybePromise, initial?: NEW_OUT): Promise;
27 | toArray(): Promise;
28 | run(): Promise;
29 | }
30 |
--------------------------------------------------------------------------------
/src/streams/proxies/stream-node-writable-proxy.ts:
--------------------------------------------------------------------------------
1 | import EventEmitter from "events";
2 | import { Readable, Writable } from "stream";
3 | import { BaseStream } from "../base-stream";
4 |
5 | export class StreamAsNodeWritableProxy extends EventEmitter {
6 |
7 | constructor(
8 | protected instance: BaseStream
9 | ) {
10 | super();
11 |
12 | this.attachListeners();
13 | }
14 |
15 | protected isPiped: boolean = false;
16 | protected orgOn?: Function;
17 |
18 | get writable(): Writable {
19 | return this.instance as any as Writable;
20 | }
21 |
22 | write(chunk: IN): boolean {
23 | const drain = this.instance.write(chunk);
24 |
25 | if (drain instanceof Promise) {
26 | drain.then(() => {
27 | this.emit("drain");
28 | });
29 |
30 | return false;
31 | }
32 |
33 | return true;
34 | }
35 |
36 | end(): Writable {
37 | this.instance.end();
38 |
39 | return this as any as Writable;
40 | }
41 |
42 | protected attachListeners(): void {
43 | const stream = this.instance as any;
44 |
45 | this.orgOn = stream.on;
46 |
47 | stream.on = (eventName: string, listener: (...args: any[]) => void): BaseStream => {
48 | this.on(eventName, listener);
49 |
50 | return this.instance;
51 | };
52 |
53 | stream.once = (eventName: string, listener: (...args: any[]) => void): BaseStream => {
54 | this.once(eventName, listener);
55 |
56 | return this.instance;
57 | };
58 |
59 | stream.removeListener = (eventName: string, listener: (...args: any[]) => void): BaseStream => {
60 | this.removeListener(eventName, listener);
61 |
62 | return this.instance;
63 | };
64 |
65 | stream.emit = this.getEmitProxy();
66 | }
67 |
68 | protected detachListeners(): void {
69 | const stream = this.instance as any;
70 |
71 | stream.on = this.orgOn;
72 | stream.once = undefined;
73 | stream.removeListener = undefined;
74 | stream.emit = undefined;
75 | }
76 |
77 | protected getEmitProxy() {
78 | return (eventName: string, ...args: any[]): boolean => {
79 | const hasListeners = this.emit(eventName, ...args);
80 | const source = args[0] as Readable;
81 | const oldDest = this.instance as any as Writable;
82 | const newDest = this as any as Writable;
83 |
84 | if (eventName === "pipe") {
85 | source.unpipe(oldDest);
86 | } else if (eventName === "unpipe") {
87 | this.isPiped = true;
88 |
89 | source.pipe(newDest);
90 |
91 | this.detachListeners();
92 |
93 | const unpipe = source.unpipe;
94 |
95 | (source as any).unpipe = (...args1: any[]) => {
96 | if (args1[0] === oldDest) {
97 | args1[0] = newDest;
98 | }
99 |
100 | const cleanup = args1.length === 0 || args1[0] === newDest;
101 |
102 | unpipe.call(source, ...args1);
103 |
104 | if (cleanup) {
105 | source.unpipe = unpipe;
106 | }
107 | };
108 | }
109 |
110 | return hasListeners;
111 | };
112 | }
113 | }
114 |
--------------------------------------------------------------------------------
/src/streams/string-stream.ts:
--------------------------------------------------------------------------------
1 | import { DataStream } from "./data-stream";
2 | import { AnyIterable, TransformFunction } from "../types";
3 | import { checkTransformability } from "../decorators";
4 |
5 | export class StringStream extends DataStream {
6 |
7 | each(callback: TransformFunction, ...args: ARGS): StringStream {
8 | return super.each(callback, ...args) as StringStream;
9 | }
10 |
11 | map(callback: TransformFunction, ...args: ARGS): StringStream {
12 | return super.map(callback, ...args) as StringStream;
13 | }
14 |
15 | filter(callback: TransformFunction, ...args: ARGS): StringStream {
16 | return super.filter(callback, ...args) as StringStream;
17 | }
18 |
19 | flatMap(
20 | callback: TransformFunction, ARGS>,
21 | ...args: ARGS
22 | ): StringStream {
23 | return super.flatMap(callback, ...args) as StringStream;
24 | }
25 |
26 | use(callback: (stream: StringStream) => NEW_OUT): NEW_OUT {
27 | return super.use(callback as (stream: DataStream) => NEW_OUT);
28 | }
29 |
30 | split(splitBy: string): StringStream;
31 | split(splitBy: RegExp): StringStream;
32 |
33 | @checkTransformability
34 | split(splitBy: string | RegExp): StringStream {
35 | const result: any = {
36 | emitLastValue: false,
37 | lastValue: ""
38 | };
39 | const testFn = toString.call(splitBy) === "[object RegExp]"
40 | ? (chunk: string) => (splitBy as RegExp).test(chunk) : (chunk: string) => chunk.includes(splitBy as string);
41 |
42 | this.ifcaChain.create(this.options);
43 |
44 | const newStream = this.createChildStream();
45 | const callbacks = {
46 | onChunkCallback: async (chunk: string) => {
47 | const tmpChunk = `${result.lastValue}${chunk}`;
48 |
49 | result.emitLastValue = true;
50 |
51 | if (testFn(tmpChunk)) {
52 | const chunks = tmpChunk.split(splitBy);
53 |
54 | result.lastValue = chunks.pop() as string;
55 |
56 | for (const item of chunks) {
57 | await newStream.ifca.write(item);
58 | }
59 | } else {
60 | result.lastValue = tmpChunk;
61 | }
62 | },
63 | onEndCallback: async () => {
64 | if (result.emitLastValue) {
65 | await newStream.ifca.write(result.lastValue);
66 | }
67 |
68 | newStream.ifca.end();
69 | }
70 | };
71 |
72 | (this.getReaderAsyncCallback(false, callbacks))();
73 |
74 | return newStream;
75 | }
76 |
77 | parse(
78 | callback: TransformFunction,
79 | ...args: ARGS
80 | ): DataStream {
81 | return super.map(callback, ...args);
82 | }
83 |
84 | grep(pattern: RegExp): StringStream {
85 | return this.filter(chunk => pattern.test(chunk));
86 | }
87 |
88 | @checkTransformability
89 | match(pattern: RegExp): StringStream {
90 | this.ifcaChain.create(this.options);
91 |
92 | const regexpGroupsNr = pattern.source.match(/\((?!\?)/g)?.length || 0;
93 | const newStream = this.createChildStream();
94 |
95 | let onChunkCallback: (chunk: string) => Promise;
96 |
97 | if (regexpGroupsNr === 0 || regexpGroupsNr === 1) {
98 | onChunkCallback = async (chunk: string) => {
99 | const matches = chunk.matchAll(pattern);
100 |
101 | for (const item of matches) {
102 | await newStream.ifca.write(item[regexpGroupsNr]);
103 | }
104 | };
105 | } else {
106 | onChunkCallback = async (chunk: string) => {
107 | const matches = chunk.matchAll(pattern);
108 |
109 | for (const item of matches) {
110 | for (let i = 1; i <= regexpGroupsNr; i++) {
111 | await newStream.ifca.write(item[i]);
112 | }
113 | }
114 | };
115 | }
116 |
117 | const callbacks = {
118 | onChunkCallback,
119 | onEndCallback: async () => {
120 | newStream.ifca.end();
121 | }
122 | };
123 |
124 | (this.getReaderAsyncCallback(false, callbacks))();
125 |
126 | return newStream;
127 | }
128 |
129 | protected createChildStream(): StringStream {
130 | this.readable = false;
131 | this.transformable = false;
132 |
133 | return new StringStream(this.options, this);
134 | }
135 | }
136 |
--------------------------------------------------------------------------------
/src/types.ts:
--------------------------------------------------------------------------------
1 | export type AnyIterable = T[] | Iterable | AsyncIterable;
2 |
3 | export type MaybePromise = Promise | S;
4 | export type ResolvablePromiseObject = {promise: Promise, resolver: () => (T)};
5 | export type ErrorWithReason = Error & { cause?: Error };
6 |
7 | export type TransformFunction = (chunk: V, ...args: W) => (Promise|U);
8 | export type TransformErrorHandler = (err: ErrorWithReason|undefined, chunk?: S) => MaybePromise;
9 | export type TransformArray = [TransformFunction] | [
10 | TransformFunction,
11 | TransformFunction,
12 | ...TransformFunction[]
13 | ];
14 | export type TransformHandler =
15 | [TransformFunction, TransformErrorHandler?] |
16 | [undefined, TransformErrorHandler];
17 |
18 | export type IFCAOptions = { maxParallel?: number, ordered?: boolean, strict?: boolean };
19 | export type StreamOptions = IFCAOptions & { [key: string]: any };
20 |
21 | export type StreamConstructor = { new (options?: StreamOptions): T };
22 |
23 | export const DroppedChunk = Symbol("DroppedChunk");
24 |
--------------------------------------------------------------------------------
/src/utils.ts:
--------------------------------------------------------------------------------
1 | import { ResolvablePromiseObject, TransformHandler } from "./types";
2 |
3 | const SCRAMJET_LOG = process.env.SCRAMJET_LOG;
4 |
5 | /**
6 | * Helper function that prints out debug messages
7 | *
8 | * @param {String} msg Debug message to be printed out
9 | * @param {*} [array] Optional array of objects
10 | */
11 | function trace(msg:any, ...array: any[]) {
12 | if (!SCRAMJET_LOG) return;
13 |
14 | const date = new Date();
15 |
16 | console.log(`${date.valueOf()}: ${msg}`, ...array);
17 | }
18 |
19 | function createResolvablePromiseObject(): ResolvablePromiseObject {
20 | let resolver: any;
21 |
22 | const promise = new Promise(res => {
23 | resolver = res;
24 | });
25 |
26 | return { promise, resolver: resolver as () => (T) };
27 | }
28 |
29 | function isAsyncFunction(func: any): boolean {
30 | return func && func[Symbol.toStringTag] === "AsyncFunction";
31 | }
32 |
33 | function isAsyncTransformHandler(func: TransformHandler): boolean {
34 | return isAsyncFunction(func[0]) || isAsyncFunction(func[1]);
35 | }
36 |
37 | function getId(prefix: string): string {
38 | return `${ prefix }-${ Date.now() }${ (Math.random() * 100).toPrecision(2) }`;
39 | }
40 |
41 | export {
42 | trace,
43 | createResolvablePromiseObject,
44 | isAsyncFunction,
45 | isAsyncTransformHandler,
46 | getId
47 | };
48 |
--------------------------------------------------------------------------------
/test/_assets/sample.txt:
--------------------------------------------------------------------------------
1 | foo
2 | bar
3 | baz
4 | bax
5 |
--------------------------------------------------------------------------------
/test/_assets/short.txt:
--------------------------------------------------------------------------------
1 | Scramjet is a fast, simple, free and open source functional reactive
2 | stream programming framework written on top of node.js streams with
3 | multi-threadding support. The code is written by chaining functions
4 | that transform data easily with ES7 async/await syntax. It is built
5 | upon the logic behind three well known javascript array operations:
6 | map, filter and reduce. Scramjet transforms are so standard and natural
7 | that we're sure you can start writing your code straight away.
8 |
--------------------------------------------------------------------------------
/test/_helpers/utils.ts:
--------------------------------------------------------------------------------
1 | import { IFCA } from "../../src/ifca";
2 | import { DroppedChunk } from "../../src/types";
3 |
4 | /**
5 | * Helper function that defers and optionaly returns given output after waiting.
6 | *
7 | * @param {number} ts Number of milliseconds to wait
8 | * @param {Object} [out] Optional output
9 | * @returns {Promise} Promise resolved after given timoeut
10 | */
11 | async function defer(ts: number, out?: X): Promise {
12 | return new Promise((res) => setTimeout(() => res(out), ts));
13 | }
14 |
15 | async function deferReturn(ts: number, out: X): Promise {
16 | return new Promise((res) => setTimeout(() => res(out), ts));
17 | }
18 |
19 | function writeInput(ifca: IFCA, input: any[]): void {
20 | for (const i of input) {
21 | ifca.write(i);
22 | }
23 | }
24 |
25 | async function readNTimes(ifca: IFCA, numberOfReads: number): Promise {
26 | const reads = [];
27 |
28 | for (let i = 0; i < numberOfReads; i++) {
29 | reads.push(await ifca.read());
30 | }
31 |
32 | return reads;
33 | }
34 |
35 | async function readNTimesConcurrently(ifca: IFCA, numberOfReads: number): Promise {
36 | const reads = [];
37 |
38 | for (let i = 0; i < numberOfReads; i++) {
39 | reads.push(ifca.read());
40 | }
41 |
42 | return Promise.all(reads);
43 | }
44 |
45 | const transforms = {
46 | identity: (x: number) => x,
47 | prepend: (x: string) => `foo-${x}`,
48 | delay: async (x: number) => defer(x * 5, x),
49 | delayOdd: async (x: number) => { return x % 2 === 1 ? await defer(10 + x, x) : x; },
50 | filter: (x: number) => x % 2 ? x : DroppedChunk,
51 | filterAsync: async (x: number) => { await defer(5); return Promise.resolve(x % 2 ? x : DroppedChunk); },
52 | filterAll: () => DroppedChunk,
53 | logger: (into: any[]) => { return (x: number) => { into.push(x); return x; }; },
54 | loggerAsync: (into: any[]) => {
55 | return async (x: number) => {
56 | await defer(5);
57 | into.push(x);
58 | return Promise.resolve(x);
59 | };
60 | }
61 | };
62 |
63 | export {
64 | defer,
65 | deferReturn,
66 | writeInput,
67 | readNTimes,
68 | readNTimesConcurrently,
69 | transforms
70 | };
71 |
--------------------------------------------------------------------------------
/test/bdd/features/step_definitions/stepdefs.ts:
--------------------------------------------------------------------------------
1 | import assert from "assert";
2 | import { Given, When, Then, World, IWorld } from "@cucumber/cucumber";
3 | import { StringStream } from "../../../../src/streams/string-stream";
4 |
5 | class Context extends World {
6 | public stream!: StringStream;
7 | public asArray!: Array;
8 | public results!: any;
9 | }
10 |
11 | function getContext(context: IWorld): Context {
12 | return context as Context;
13 | }
14 |
15 | Given("I have a StringStream created from", function(input) {
16 | getContext(this).stream = StringStream.from([input]);
17 | });
18 |
19 | Given("I have a StringStream created from a file {string}", function(path) {
20 | getContext(this).stream = StringStream.fromFile(path, { readStream: { encoding: "utf8" } });
21 | });
22 |
23 | When("I call split function with EOL character", function() {
24 | getContext(this).stream = getContext(this).stream.split("\n");
25 | });
26 |
27 | Then("It should result with {int} chunks as output", async function(expectedChunksNr) {
28 | const context = getContext(this);
29 |
30 | context.asArray = await context.stream.toArray();
31 |
32 | assert.strictEqual(context.asArray.length, expectedChunksNr);
33 | });
34 |
35 | Then("Chunk nr {int} is {string}", function(chunkNr, expectedValue) {
36 | assert.strictEqual(getContext(this).asArray[chunkNr - 1], expectedValue);
37 | });
38 |
39 | When("I split it into words", function() {
40 | getContext(this).stream = getContext(this).stream.split(" ");
41 | });
42 |
43 | When("I filter out all words longer than {int} characters", function(length) {
44 | getContext(this).stream = getContext(this).stream.filter(chunk => chunk.length <= length);
45 | });
46 |
47 | When("I aggregate words into sentences by {string} as sentence end", function(sentenceEnd) {
48 | const context = getContext(this);
49 |
50 | context.stream = context.stream
51 | .batch(chunk => chunk.endsWith(sentenceEnd))
52 | .map(chunk => chunk.join(" ")) as StringStream; // Should I use somethign like sequence here?
53 | });
54 |
55 | When("I count average sentence length by words", async function() {
56 | const context = getContext(this);
57 |
58 | context.results = await context.stream
59 | .reduce((acc, chunk) => {
60 | acc.sentences++;
61 | acc.wordsAvgSum += chunk.split(" ").length;
62 |
63 | return acc;
64 | }, { sentences: 0, wordsAvgSum: 0 });
65 |
66 | context.results.wordsAvg = context.results.wordsAvgSum / context.results.sentences;
67 | });
68 |
69 | Then("It should result with {int} sentences", function(expectedSentencesNr) {
70 | assert.strictEqual(getContext(this).results.sentences, expectedSentencesNr);
71 | });
72 |
73 | Then("It should have average sentence length of {float} words", function(expectedAvgWordsNr) {
74 | assert.strictEqual(getContext(this).results.wordsAvg.toFixed(2), expectedAvgWordsNr.toFixed(2));
75 | });
76 |
--------------------------------------------------------------------------------
/test/bdd/features/string-stream.feature:
--------------------------------------------------------------------------------
1 | Feature: StringStream
2 |
3 | Scenario: Chunks can be splitted by line endings (split method)
4 | Given I have a StringStream created from
5 | """
6 | Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nam finibus odio
7 | euismod libero sagittis ultrices. Suspendisse at ornare odio. Phasellus nec
8 | magna massa. Duis in sapien id mi mollis pulvinar. Etiam maximus porttitor
9 | leo vitae facilisis. Aliquam cursus fermentum augue at mollis. Nulla nec leo
10 | id dolor tristique pellentesque.
11 |
12 | Vivamus leo dui, maximus sit amet consequat vitae, rhoncus id nisl. Mauris quam
13 | velit, tristique a ipsum eu, auctor mattis odio. Vestibulum vestibulum pharetra volutpat.
14 | Nulla dapibus ipsum vitae quam iaculis, non gravida magna efficitur.
15 | """
16 | When I call split function with EOL character
17 | Then It should result with 9 chunks as output
18 | And Chunk nr 5 is "id dolor tristique pellentesque."
19 |
20 | Scenario: File processing
21 | Given I have a StringStream created from a file "./build/test/_assets/short.txt"
22 | When I split it into words
23 | And I filter out all words longer than 8 characters
24 | And I aggregate words into sentences by "." as sentence end
25 | And I count average sentence length by words
26 | Then It should result with 4 sentences
27 | And It should have average sentence length of 15.5 words
28 |
29 | Scenario: Long file processing
30 | Given I have a StringStream created from a file "./build/test/_assets/long.txt"
31 | When I split it into words
32 | And I filter out all words longer than 8 characters
33 | And I aggregate words into sentences by "." as sentence end
34 | And I count average sentence length by words
35 | Then It should result with 12709 sentences
36 | And It should have average sentence length of 34.22 words
37 |
--------------------------------------------------------------------------------
/test/release/index.js:
--------------------------------------------------------------------------------
1 | import { DataStream } from "@scramjet/framework";
2 |
3 | (async () => {
4 | const result = await DataStream
5 | .from(["It", "works", "!"])
6 | .map(item => item.toUpperCase())
7 | .reduce((prev, next) => `${prev} ${next}`);
8 |
9 | console.log(result);
10 | })();
11 |
--------------------------------------------------------------------------------
/test/release/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@scramjet/framework-release-test",
3 | "main": "index.js",
4 | "author": "Scramjet ",
5 | "license": "AGPL-3.0",
6 | "private": true,
7 | "type": "module"
8 | }
9 |
--------------------------------------------------------------------------------
/test/release/run.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | echo "Installing $1 version..."
4 |
5 | npm i @scramjet/framework@$1 --no-save
6 |
7 | echo "Starting test..."
8 |
9 | node index.js
10 |
--------------------------------------------------------------------------------
/test/samples/generic-this-1.ts:
--------------------------------------------------------------------------------
1 | // This is a sample file demonstarting the approached used for "polymorphic this"
2 | // for generic method inheritance. It can be run with:
3 | //
4 | // npm run build && node build/test/samples/generic-this-1.js
5 |
6 | class BaseClass {
7 | public value: T;
8 |
9 | constructor(value: T) {
10 | this.value = value;
11 | }
12 |
13 | create(value: T): BaseClass;
14 | create(value: U): BaseClass;
15 | create(value: U): BaseClass {
16 | return new BaseClass(value);
17 | }
18 |
19 | method1(): this {
20 | return this;
21 | }
22 |
23 | method2(): BaseClass {
24 | return this;
25 | }
26 |
27 | method3(value: T): BaseClass {
28 | return this.create(value);
29 | }
30 |
31 | method5(value: T): BaseClass;
32 | method5(value: U): BaseClass;
33 | method5(value: U): BaseClass {
34 | return this.create(value);
35 | }
36 | }
37 |
38 | class DerivedClass extends BaseClass {
39 | create(value: T): DerivedClass;
40 | create(value: U): DerivedClass;
41 | create(value: U): DerivedClass {
42 | return new DerivedClass(value);
43 | }
44 |
45 | method2(): DerivedClass {
46 | return super.method2() as DerivedClass;
47 | }
48 |
49 | method3(value: T): DerivedClass {
50 | return super.method3(value) as DerivedClass;
51 | }
52 |
53 | method5(value: T): DerivedClass;
54 | method5(value: U): DerivedClass;
55 | method5(value: U): DerivedClass {
56 | return super.method5(value) as DerivedClass;
57 | }
58 |
59 | ownMethod1() {
60 | console.log(this);
61 | }
62 | }
63 |
64 | class DerivedClassFixedType extends DerivedClass {
65 | create(value: number): DerivedClassFixedType {
66 | return new DerivedClassFixedType(value);
67 | }
68 |
69 | method2(): DerivedClassFixedType {
70 | return super.method2() as DerivedClassFixedType;
71 | }
72 |
73 | method3(value: number): DerivedClassFixedType {
74 | return super.method3(value) as DerivedClassFixedType;
75 | }
76 |
77 | method5(value: number): DerivedClassFixedType {
78 | return super.method5(value) as DerivedClassFixedType;
79 | }
80 |
81 | ownMethod2() {
82 | console.log(this);
83 | }
84 | }
85 |
86 | class BaseClassFixedType extends BaseClass {
87 | create(value: string): BaseClassFixedType {
88 | return new BaseClassFixedType(value);
89 | }
90 |
91 | method2(): BaseClassFixedType {
92 | return super.method2() as BaseClassFixedType;
93 | }
94 |
95 | method3(value: string): BaseClassFixedType {
96 | return super.method3(value) as BaseClassFixedType;
97 | }
98 |
99 | method5(value: string): BaseClassFixedType {
100 | return super.method5(value) as BaseClassFixedType;
101 | }
102 |
103 | ownMethod3() {
104 | console.log(this);
105 | }
106 | }
107 |
108 | // --- BaseClass
109 |
110 | const bcString = new BaseClass("foo");
111 | const bcString1 = bcString.method1();
112 | const bcString2 = bcString.method2();
113 | const bcString3 = bcString.method3("bar");
114 | const bcString5 = bcString.method5(123);
115 |
116 | for (const instance of [bcString, bcString1, bcString2, bcString3, bcString5]) {
117 | console.log(`instanceof: ${ instance instanceof BaseClass }; constructor.name: ${ instance.constructor.name }`);
118 | }
119 |
120 | // --- DerivedClass
121 |
122 | const dcString = new DerivedClass("foo");
123 | const dcString1 = dcString.method1();
124 | const dcString2 = dcString.method2();
125 | const dcString3 = dcString.method3("bar");
126 | const dcString5 = dcString.method5(123);
127 |
128 | for (const instance of [dcString, dcString1, dcString2, dcString3, dcString5]) {
129 | console.log(`instanceof: ${ instance instanceof DerivedClass }; constructor.name: ${ instance.constructor.name }`);
130 | }
131 |
132 | dcString.ownMethod1();
133 | dcString1.ownMethod1();
134 | dcString2.ownMethod1();
135 | dcString3.ownMethod1();
136 | dcString5.ownMethod1();
137 |
138 | // --- DerivedClassFixedType
139 |
140 | const dcftString = new DerivedClassFixedType(123);
141 | const dcftString1 = dcftString.method1();
142 | const dcftString2 = dcftString.method2();
143 | const dcftString3 = dcftString.method3(456);
144 | const dcftString5 = dcftString.method5(123);
145 |
146 | for (const instance of [dcftString, dcftString1, dcftString2, dcftString3, dcftString5]) {
147 | console.log(`instanceof: ${ instance instanceof DerivedClassFixedType }; constructor.name: ${ instance.constructor.name }`);
148 | }
149 |
150 | dcftString.ownMethod2();
151 | dcftString1.ownMethod2();
152 | dcftString2.ownMethod2();
153 | dcftString3.ownMethod2();
154 | dcftString5.ownMethod2();
155 |
156 | // --- BaseClassFixedType
157 |
158 | const bcftString = new BaseClassFixedType("foo");
159 | const bcftString1 = bcftString.method1();
160 | const bcftString2 = bcftString.method2();
161 | const bcftString3 = bcftString.method3("bar");
162 | const bcftString5 = bcftString.method5("baz");
163 | // const toBcStrign6 = bcftString.method5(123); // This won't work for now
164 |
165 | for (const instance of [bcftString, bcftString1, bcftString2, bcftString3, bcftString5]) {
166 | console.log(`instanceof: ${ instance instanceof BaseClassFixedType }; constructor.name: ${ instance.constructor.name }`);
167 | }
168 |
169 | bcftString.ownMethod3();
170 | bcftString1.ownMethod3();
171 | bcftString2.ownMethod3();
172 | bcftString3.ownMethod3();
173 | bcftString5.ownMethod3();
174 |
--------------------------------------------------------------------------------
/test/unit/ifca/ifca-chain.spec.ts:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { IFCA } from "../../../src/ifca";
3 | import { IFCAChain } from "../../../src/ifca/ifca-chain";
4 |
5 | test("Same IFCA instance would not be duplicated on the IFCAChain end when added twice", async (t) => {
6 | const ifca = new IFCA({});
7 | const ifcaChain = new IFCAChain();
8 |
9 | t.is(ifcaChain.length, 0);
10 |
11 | ifcaChain.add(ifca);
12 | ifcaChain.add(ifca);
13 |
14 | t.is(ifcaChain.length, 1);
15 |
16 | ifcaChain.create({});
17 |
18 | t.is(ifcaChain.length, 2);
19 | });
20 |
--------------------------------------------------------------------------------
/test/unit/ifca/oal.spec.ts:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { IFCA } from "../../../src/ifca";
3 | import { defer } from "../../_helpers/utils";
4 |
5 | /**
6 | * How many items can be waiting to be flushed
7 | */
8 | const MAX_PARALLEL = 8;
9 |
10 | test("Acceptable latency test", async (t) => {
11 | let sum: bigint = BigInt(0);
12 | let cnt = BigInt(0);
13 |
14 | const ifca = new IFCA<{ i: number}, {i: number, ts: number}, any>({ maxParallel: MAX_PARALLEL })
15 | .addTransform(({ i }: {i: number}) => ({ i, ts: process.hrtime.bigint() }))
16 | .addTransform(({ i, ts }) => ({ i, latency: process.hrtime.bigint() - ts }));
17 |
18 | await Promise.all([
19 | (async () => {
20 | for (let i = 0; i < 4000; i++) {
21 | const ret = ifca.write({ i: i + 1 });
22 |
23 | await Promise.all([ret, defer(1)]);
24 | }
25 | ifca.end(); // TODO: test for correct end operation
26 | })().finally(() => t.log("Write done")),
27 | (async () => {
28 | await defer(10);
29 | let i = 0;
30 |
31 | while (++i) {
32 | const data = await ifca.read();
33 |
34 | if (data === null) {
35 | t.log("data done");
36 | return;
37 | }
38 | t.is(data?.i, i, "Data is in correct order");
39 | if (i < 2000) continue;
40 |
41 | cnt++;
42 | sum += data.latency;
43 | }
44 | })().finally(() => t.log("Read done"))
45 | ]);
46 |
47 | const latency = Number(sum * BigInt(1e6) / cnt) / 1e6;
48 |
49 | t.log("Latency:", latency);
50 | t.false(latency > 1e4, "Latency does not exceed 10us");
51 | });
52 |
--------------------------------------------------------------------------------
/test/unit/ifca/pts.spec.ts:
--------------------------------------------------------------------------------
1 | /* eslint-disable */
2 | import test from "ava";
3 | import { IFCA } from "../../../src/ifca";
4 | import { TransformFunction } from "../../../src/types";
5 | import { defer } from "../../_helpers/utils";
6 |
7 | type Dict = { [k: string]: number; };
8 |
9 | /**
10 | * Helper function that checks if passed argument is a promise.
11 | *
12 | * @param {Object} x Obejct to be checked
13 | * @returns {Boolean}
14 | */
15 | const isPromise = (x: any) => typeof x !== "undefined" && typeof x.then === "function";
16 |
17 | test("PTS", async (t) => {
18 |
19 | /**
20 | * How many items can be waiting to be flushed
21 | */
22 | const MAX_PARALLEL = 8;
23 |
24 | /**
25 | * How many elements should be tested
26 | */
27 | const ELEMENTS = 16;
28 |
29 | let a = 0;
30 | let x = 0;
31 | let y = 0;
32 | let z = 0;
33 |
34 | const input = Array.from(Array(ELEMENTS).keys()).map(() => {
35 | return { a: a++ };
36 | });
37 |
38 | const asyncPromiseTransform: TransformFunction<{a: number}, {[k: string]: number}> = async ({ a }: { a: number }) => {
39 | const out = { a, n: a % 2, x: x++ }
40 | if (a % 2) await defer(100, out);
41 | return out;
42 | };
43 | const syncPromiseTransform = ({ a, n, x }: Dict) => ({ a, n, x, y: y++ });
44 | const syncPromiseTransform2 = ({ a, n, x, y }: Dict) => ({ a, n, x, y, z: z++ });
45 |
46 | const ifca = new IFCA<{ [k: string]: number }, { a: number }, any>({ maxParallel: MAX_PARALLEL, strict: true })
47 | .addTransform(asyncPromiseTransform)
48 | .addTransform(syncPromiseTransform)
49 | .addTransform(syncPromiseTransform2);
50 |
51 | const writeNext = () => {
52 | const ref = input.shift();
53 | if (!ref) throw new Error("End of input");
54 | return ifca.write(ref);
55 | };
56 |
57 | t.false(isPromise(writeNext()), "Synchronous entry should resolve write immediately");
58 | await defer(10);
59 | const item1 = ifca.read(); // {"a":0,"n":0,"x":0,"y":0,"z":0}
60 | t.false(isPromise(item1), "Not a promise.");
61 |
62 | t.deepEqual(item1 as unknown as Dict, { a: 0, n: 0, x: 0, y: 0, z: 0 }, "Initial entry should be read immediately")
63 | t.false(isPromise(writeNext()), "1st entry should resolve write immediately");
64 | t.false(isPromise(writeNext()), "2nd entry should resolve write immediately");
65 | t.false(isPromise(writeNext()), "3rd entry should resolve write immediately");
66 | t.false(isPromise(writeNext()), "4th entry should resolve write immediately");
67 | t.false(isPromise(writeNext()), "5th entry should resolve write immediately");
68 | t.false(isPromise(writeNext()), "6th entry should resolve write immediately");
69 | t.false(isPromise(writeNext()), "7th entry should resolve write immediately");
70 | t.true(isPromise(writeNext()), "8th entry should fill up max parallel");
71 |
72 | // 8 chunks pending
73 | t.like(ifca.state, {pending: 8});
74 |
75 | // TODO: make this go 8 items beyond
76 | const item2 = ifca.read(); // {a: 1}
77 | const item3 = ifca.read(); // {a: 2}
78 | t.true(isPromise(item2), "Is a promise.");
79 | t.true(isPromise(item3), "Is a promise.");
80 |
81 | // 8 chunks pending (still since we haven't await on read() call)
82 | t.like(ifca.state, {pending: 8});
83 |
84 | await defer(20);
85 | // 8 chunks pending
86 | t.like(ifca.state, {pending: 8});
87 |
88 | t.true(isPromise(ifca.read()), "Is a promise."); // read {a: 3}.
89 |
90 | await defer(100);
91 | // All chunks should be processed by now.
92 | t.like(ifca.state, {pending: 0});
93 |
94 | t.false(isPromise(writeNext()), "After reading should allow to write immediately again"); // write {a: 9}
95 |
96 | const read8 = [
97 | ifca.read(), ifca.read(), ifca.read(), ifca.read(), ifca.read(), ifca.read(), ifca.read(), ifca.read()
98 | ];
99 |
100 | t.deepEqual(read8[0], { a:4, n: 0, x: 4, y: 2, z: 2 }, "Reads the 4 element");
101 | t.deepEqual(read8[4], { a:8, n: 0, x: 8, y: 4, z: 4 }, "Reads the 8 element");
102 | t.true(isPromise(read8[5]), "The 9 element is not resolved yet");
103 | t.deepEqual(await read8[5], { a:9, n: 1, x: 9, y: 9, z: 9 }, "The 9 element resolves");
104 |
105 |
106 | t.true(isPromise(read8[6]), "The 10 element is a promise");
107 |
108 | let wrote = false;
109 | defer(10).then(() => {
110 | writeNext();
111 | writeNext();
112 | ifca.end(); // without ifca.end() -> Error: Promise returned by test never resolved
113 | wrote = true;
114 | });
115 |
116 | t.deepEqual(await read8[6], { a:10, n: 0, x: 10, y: 10, z: 10 }, "The 10th element resolves when written");
117 | t.deepEqual(await read8[7], { a:11, n: 1, x: 11, y: 11, z: 11 }, "The 11th element resolves when written");
118 |
119 | t.true(wrote, "already wrote");
120 |
121 | t.is(await ifca.read(), null, "Reached the end."); // TODO: we need await since the read is resolved before end is.
122 |
123 | t.pass();
124 |
125 | });
126 |
127 | test("Simple order check", async (t) => {
128 |
129 | /**
130 | * How many items can be waiting to be flushed
131 | */
132 | const MAX_PARALLEL = 4;
133 |
134 | /**
135 | * How many elements should be tested
136 | */
137 | const ELEMENTS = 6;
138 |
139 | let a = 0;
140 | let x = 0;
141 | let y = 0;
142 | let z = 0;
143 |
144 | const input = Array.from(Array(ELEMENTS).keys()).map(() => {
145 | return { a: a++ };
146 | });
147 |
148 | const asyncPromiseTransform: TransformFunction<{a: number}, {[k: string]: number}> = async ({ a }: { a: number }) => {
149 | const out = { a, n: a % 2, x: x++ }
150 | if (a % 2) await defer(100, out);
151 | return out;
152 | };
153 | const syncPromiseTransform = ({ a, n, x }: Dict) => ({ a, n, x, y: y++ });
154 | const syncPromiseTransform2 = ({ a, n, x, y }: Dict) => ({ a, n, x, y, z: z++ });
155 |
156 | const ifca = new IFCA<{ [k: string]: number }, { a: number }, any>({ maxParallel: MAX_PARALLEL, strict: true })
157 | .addTransform(asyncPromiseTransform)
158 | .addTransform(syncPromiseTransform)
159 | .addTransform(syncPromiseTransform2);
160 |
161 | const writeNext = () => {
162 | const ref = input.shift();
163 | if (!ref) throw new Error("End of input");
164 | return ifca.write(ref);
165 | };
166 |
167 | t.false(isPromise(writeNext()), "Synchronous entry should resolve write immediately");
168 | await defer(10);
169 | const item1 = ifca.read(); // {"a":0,"n":0,"x":0,"y":0,"z":0}
170 |
171 | t.false(isPromise(item1), "Not a promise.");
172 | t.deepEqual(item1 as unknown as Dict, { a: 0, n: 0, x: 0, y: 0, z: 0 }, "Initial entry should be read immediately");
173 |
174 | t.false(isPromise(writeNext()), "1st entry should resolve write immediately");
175 | t.false(isPromise(writeNext()), "2nd entry should resolve write immediately");
176 | t.false(isPromise(writeNext()), "3rd entry should resolve write immediately");
177 | t.true(isPromise(writeNext()), "4th entry should fill up max parallel"); // As MAX_PARALLEL = 4 and we have 4 items in the queue.
178 | t.true(isPromise(writeNext()), "5th entry should should return promise as it's above max parallel");
179 |
180 | const item2 = ifca.read(); // {"a":1,"n":1,"x":1,"y":3,"z":3}
181 | const item3 = ifca.read(); // {"a":2,"n":0,"x":2,"y":1,"z":1}
182 | const item4 = ifca.read(); // {"a":3,"n":1,"x":3,"y":4,"z":4}
183 | const item5 = ifca.read(); // {"a":4,"n":0,"x":4,"y":2,"z":2}
184 | const item6 = ifca.read(); // {"a":5,"n":1,"x":5,"y":5,"z":5}
185 |
186 | t.true(isPromise(item2), "Is a promise.");
187 | t.true(isPromise(item3), "Is a promise.");
188 | t.true(isPromise(item4), "Is a promise.");
189 | t.true(isPromise(item5), "Is a promise.");
190 | t.true(isPromise(item6), "Is a promise.");
191 |
192 | // We need to add a .md description of the following occurences where chained functions are called on items immediately, not waiting for previous resolutions of the same function.
193 | t.deepEqual(await item2 as unknown as Dict, { a: 1, n: 1, x: 1, y: 3, z: 3 }, "Queueing does not occur on async element");
194 | t.deepEqual(await item3 as unknown as Dict, { a: 2, n: 0, x: 2, y: 1, z: 1 }, "Queueing does not occur on sync element");
195 | t.deepEqual(await item4 as unknown as Dict, { a: 3, n: 1, x: 3, y: 4, z: 4 }, "Queueing does not occur on async element");
196 | t.deepEqual(await item5 as unknown as Dict, { a: 4, n: 0, x: 4, y: 2, z: 2 }, "Queueing does not occur on sync element");
197 | t.deepEqual(await item6 as unknown as Dict, { a: 5, n: 1, x: 5, y: 5, z: 5 }, "Overflowing element is in sync again");
198 | });
199 |
--------------------------------------------------------------------------------
/test/unit/pts/README.md:
--------------------------------------------------------------------------------
1 | # PTS test
2 |
3 | **IMPORTANT**: Those test are here for legacy reasons for now. They check how old and new IFCA performs in context or being used by PromiseTransformStream (so the v4 way). Some information below may be outdated.
4 |
5 | ## How to run test?
6 |
7 | In `pts` directory execute `npm run test`
8 |
9 | ## Benchmarks
10 |
11 | In order to measure how fast each code works following commands and methods were considered:
12 |
13 | - `/usr/bin/time` - which measures how long given command is running
14 | - `console.time()` and `console.timeEnd()` - which starts and ends timer respectively
15 | - `performance.now()` - which returns the current high resolution millisecond timestamp
16 |
17 | > Unlike other timing data available to JavaScript (for example Date.now), the timestamps returned by performance.now() are not limited to one-millisecond resolution. Instead, they represent times as floating-point numbers with up to microsecond precision.
18 |
19 | > Also unlike Date.now(), the values returned by performance.now() always increase at a constant rate, independent of the system clock (which might be adjusted manually or skewed by software like NTP).
20 |
21 | Source: https://developer.mozilla.org/en-US/docs/Web/API/Performance/now
22 |
23 | Therefore, `performance.now()` was added to `PTS` test in order to measure execution time. However, we may note that in our circumstances all three methods may produce same or very similar results.
24 |
25 | Next, in `order.spec.js` following line was swapped `const { PromiseTransformStream } = require("../lib/promise-transform-stream-ifca");` (new IFCA algorithm) with `const { PromiseTransformStream } = require("../lib/promise-transform-stream");` (old code based on mk-transform) and the tests were run 5 times.
26 |
27 | After inital benchmarking, it was decided to run the test serially and not concurrently. Thus, `test.serial()` method was used. Moreover, before each test starts there is a one second `sleep()` method called in order to allow garbage collection.
28 |
29 | Finally, IFCA was compiled with `tsc` compiler (`npm run build`) and JS compiled code was used in third set.
30 |
31 | Results are shown in the table below:
32 |
33 | | Algorithm | Number of test runs | Min | Max | Average |
34 | | ------------- | ------------------- | ------ | ------ | ------- |
35 | | mk-transform | 5 | 4.203s | 4.220s | 4.212s |
36 | | IFCA | 5 | 4.267s | 4.281s | 4.275s |
37 | | Compiled IFCA | 5 | 4.180s | 4.219s | 4.190s |
38 |
39 | ### Detailed results
40 |
41 | | Run | mk-transform | IFCA | Compiled IFCA |
42 | | ------- | ------------ | ------ | ------------- |
43 | | 1 | 4.220s | 4.278s | 4.181s |
44 | | 2 | 4.215s | 4.267s | 4.183s |
45 | | 3 | 4.205s | 4.271s | 4.186s |
46 | | 4 | 4.215s | 4.281s | 4.180s |
47 | | 5 | 4.203s | 4.279s | 4.219s |
48 | | | | | |
49 | | Average | 4.212s | 4.275s | 4.190s |
50 |
51 |
--------------------------------------------------------------------------------
/test/unit/pts/helpers/mk-read.js:
--------------------------------------------------------------------------------
1 | const { StreamError } = require("./stream-errors");
2 |
3 | /**
4 | * Generate read methods on the stream class.
5 | *
6 | * @internal
7 | * @param {DataStreamOptions} newOptions Sanitized options passed to scramjet stream
8 | * @return {Boolean} returns true if creation of new stream is not necessary (promise can be pushed to queue)
9 | */
10 | module.exports = () =>
11 | function mkRead(newOptions) {
12 | console.log("MK READ");
13 | this.setOptions({
14 | // transforms: [],
15 | promiseRead: newOptions.promiseRead,
16 | });
17 |
18 | let chunks = [];
19 | let done = false;
20 | // TODO: implement the actual parallel logic - items can be promises and should be flushed when resolved.
21 | const pushSome = () =>
22 | Array.prototype.findIndex.call(chunks, (chunk) => {
23 | return !this.push(chunk);
24 | }) + 1;
25 |
26 | // let last = Promise.resolve();
27 | // let processing = [];
28 |
29 | // this.on("pipe", () => {
30 | // throw new Error("Cannot pipe to a Readable stream");
31 | // });
32 |
33 | // this._read = async (size) => {
34 | // console.log("MK-READ _read");
35 | // try {
36 | // let add = 0;
37 | // if (!done) {
38 | // const nw = await this._options.promiseRead(size);
39 | // chunks.push(...nw);
40 | // add = nw.length;
41 | // }
42 | // const pushed = pushSome();
43 | // chunks = chunks.slice(pushed || Infinity);
44 |
45 | // // Yes, it could be reassigned, but only with true so we don't need to care
46 | // // eslint-disable-next-line require-atomic-updates
47 | // done = done || !add;
48 |
49 | // if (done && !chunks.length) {
50 | // await new Promise((res, rej) => this._flush((err) => (err ? rej(err) : res())));
51 | // this.push(null);
52 | // }
53 |
54 | // // console.log("read", pushed, chunks, add, size);
55 | // // TODO: check for existence of transforms and push to transform directly.
56 | // // TODO: but in both cases transform methods must be there... which aren't there now.
57 | // // TODO: at least the subset that makes the transform - yes, otherwise all that transform stuff
58 | // // TODO: is useless and can be bypassed...
59 | // } catch (e) {
60 | // await this.raise(new StreamError(e, this));
61 | // return this._read(size);
62 | // }
63 | // };
64 | };
65 |
--------------------------------------------------------------------------------
/test/unit/pts/helpers/mk-transform.js:
--------------------------------------------------------------------------------
1 | const ignore = () => 0;
2 | const { StreamError } = require("./stream-errors");
3 |
4 | const { trace } = require("../../../../src/utils");
5 |
6 | /**
7 | * Generate transform methods on the stream class.
8 | *
9 | * @internal
10 | * @memberof PromiseTransformStream
11 | * @param {DataStreamOptions} newOptions Sanitized options passed to scramjet stream
12 | * @return {Boolean} returns true if creation of new stream is not necessary (promise can be pushed to queue)
13 | */
14 | module.exports = ({ filter }) =>
15 | function mkTransform(newOptions) {
16 | trace("mkTransform called!");
17 | trace(newOptions);
18 | this.setOptions({
19 | transforms: [],
20 | beforeTransform: newOptions.beforeTransform,
21 | afterTransform: newOptions.afterTransform,
22 | promiseFlush: newOptions.promiseFlush,
23 | });
24 |
25 | this.cork(); // https://nodejs.org/api/stream.html#stream_writable_cork
26 | if (
27 | newOptions.referrer instanceof this.constructor &&
28 | !newOptions.referrer._tapped &&
29 | !newOptions.referrer._options.promiseFlush
30 | ) {
31 | return true;
32 | }
33 | trace("mkTransform DONT RETURN TRUE");
34 |
35 | process.nextTick(this.uncork.bind(this)); // https://nodejs.org/api/stream.html#stream_writable_uncork
36 | trace(
37 | "mkTransform this.pushTransform. before: this._scramjet_options.transforms.length: " +
38 | this._scramjet_options.transforms.length
39 | );
40 | this.pushTransform(newOptions);
41 | trace("after push this._scramjet_options.transforms.length: " + this._scramjet_options.transforms.length);
42 |
43 | if (this._scramjet_options.transforms.length) {
44 | const processing = [];
45 | let last = Promise.resolve(); // Promise { undefined }
46 |
47 | trace("TRANSFORMS LENGTH > 0. LAST:");
48 | trace(last);
49 |
50 | /**
51 | * https://nodejs.org/api/stream.html#stream_transform_transform_chunk_encoding_callback
52 | * @param {*} chunk Chunk to be processed
53 | * @param {*} encoding Encoding not used
54 | * @param {*} callback Callback
55 | * @returns
56 | */
57 | this._transform = (chunk, encoding, callback) => {
58 | trace(
59 | "TRANSFORM this._transform chunk: " +
60 | JSON.stringify(chunk) +
61 | " length: " +
62 | this._scramjet_options.transforms.length
63 | );
64 | if (!this._scramjet_options.transforms.length) {
65 | return last.then(() => callback(null, chunk));
66 | }
67 |
68 | const prev = last;
69 | const ref = (last = Promise.all([
70 | this._scramjet_options.transforms
71 | .reduce((prev, transform) => prev.then(transform), Promise.resolve(chunk))
72 | .catch((err) => (err === filter ? filter : Promise.reject(err))),
73 | prev,
74 | ])
75 | .catch(async (e) => {
76 | if (e instanceof Error) {
77 | return Promise.all([this.raise(new StreamError(e, this, "EXTERNAL", chunk), chunk), prev]);
78 | } else {
79 | throw new Error("New stream error raised without cause!");
80 | }
81 | })
82 | .then((args) => {
83 | if (args && args[0] !== filter && typeof args[0] !== "undefined") {
84 | try {
85 | this.push(args[0]);
86 | } catch (e) {
87 | return this.raise(new StreamError(e, this, "INTERNAL", chunk), chunk);
88 | }
89 | }
90 | }));
91 |
92 | trace("PROCESSING BEFORE PUSH:");
93 | trace(
94 | "processing.length >= this._options.maxParallel: " +
95 | (processing.length >= this._options.maxParallel)
96 | );
97 | processing.push(ref); // append item to queue
98 | if (processing.length >= this._options.maxParallel) {
99 | processing[processing.length - this._options.maxParallel].then(() => callback()).catch(ignore);
100 | } else {
101 | trace("EXECUTE CALLBACK...");
102 | callback();
103 | }
104 |
105 | trace("PROCESSING AFTER PUSH:");
106 | trace(processing);
107 |
108 | ref.then(() => {
109 | trace("THEN...");
110 | trace(processing);
111 | const next = processing.shift(); // Take out the first element from processing obviously
112 |
113 | trace("NEXT: ");
114 | trace(next);
115 | trace(JSON.stringify(next));
116 |
117 | const result =
118 | ref !== next &&
119 | this.raise(
120 | new StreamError(
121 | new Error(`Promise resolved out of sequence in ${this.name}!`),
122 | this,
123 | "TRANSFORM_OUT_OF_SEQ",
124 | chunk
125 | ),
126 | chunk
127 | );
128 | trace("RESULT: " + result);
129 | return result;
130 | });
131 | };
132 |
133 | this._flush = (callback) => {
134 | trace("FLUSH");
135 | if (this._scramjet_options.runFlush) {
136 | last.then(this._scramjet_options.runFlush)
137 | .then(
138 | (data) => {
139 | if (Array.isArray(data)) data.forEach((item) => this.push(item));
140 | else if (data) this.push(data);
141 | },
142 | (e) => this.raise(e)
143 | )
144 | .then(() => callback());
145 | } else {
146 | last.then(() => callback());
147 | }
148 | };
149 | }
150 | };
151 |
--------------------------------------------------------------------------------
/test/unit/pts/helpers/mk-write.js:
--------------------------------------------------------------------------------
1 | const { StreamError } = require("./stream-errors");
2 |
3 | /**
4 | * Generate write methods on the stream class.
5 | *
6 | * @internal
7 | * @param {DataStreamOptions} newOptions Sanitized options passed to scramjet stream
8 | * @return {Boolean} returns true if creation of new stream is not necessary (promise can be pushed to queue)
9 | */
10 | module.exports = () =>
11 | function mkWrite(newOptions) {
12 | this.tap().setOptions({
13 | // transforms: [],
14 | promiseWrite: newOptions.promiseWrite,
15 | });
16 |
17 | this.pipe = () => {
18 | throw new Error("Method not allowed on a Writable only stream");
19 | };
20 |
21 | this._write = (chunk, encoding, callback) => {
22 | console.log(
23 | "PTS _write: " +
24 | JSON.stringify(chunk) +
25 | " encoding: " +
26 | encoding +
27 | " callback: " +
28 | JSON.stringify(callback)
29 | );
30 | Promise.resolve(chunk)
31 | .then((chunk) => this._options.promiseWrite(chunk, encoding))
32 | .then(() => callback())
33 | .catch((e) => this.raise(new StreamError(e, this, "EXTERNAL", chunk), chunk));
34 | };
35 | };
36 |
--------------------------------------------------------------------------------
/test/unit/pts/helpers/promise-transform-stream-ifca.js:
--------------------------------------------------------------------------------
1 | "use strict";
2 |
3 | const { Readable, Duplex } = require("stream");
4 | const DefaultHighWaterMark = require("os").cpus().length * 2;
5 | const { IFCA } = require("../../../../src/ifca");
6 | const { trace } = require("../../../../src/utils");
7 |
8 | let seq = 0;
9 |
10 | const rename = (ob, fr, to) => {
11 | if (ob[fr]) {
12 | ob[to] = ob[fr];
13 | delete ob[fr];
14 | }
15 | };
16 |
17 | const checkOptions = (options) => {
18 | rename(options, "parallelRead", "promiseRead");
19 | rename(options, "parallelWrite", "promiseWrite");
20 | rename(options, "parallelTransform", "promiseTransform");
21 | rename(options, "flushPromise", "promiseFlush");
22 |
23 | if (["promiseRead", "promiseWrite", "promiseTransform"].reduce((acc, key) => (acc += options[key] ? 1 : 0), 0) > 1)
24 | throw new Error("Scramjet stream can be either Read, Write or Transform");
25 | };
26 |
27 | class PromiseTransformStream extends Duplex {
28 | constructor(options = {}) {
29 | // Deconstrucing. Remove read and write
30 | const { read, write, ...rest } = options;
31 | const newOptions = Object.assign(
32 | {
33 | objectMode: true,
34 | promiseRead: null,
35 | promiseWrite: null,
36 | promiseTransform: null,
37 | promiseFlush: null,
38 | beforeTransform: null,
39 | afterTransform: null,
40 | },
41 | rest
42 | );
43 | checkOptions(newOptions);
44 |
45 | super(newOptions);
46 |
47 | this._tapped = false;
48 |
49 | this._error_handlers = [];
50 | this._scramjet_options = {
51 | referrer: options.referrer,
52 | constructed: new Error().stack,
53 | };
54 |
55 | this.seq = seq++;
56 |
57 | this.setMaxListeners(DefaultHighWaterMark);
58 | this.setOptions(newOptions);
59 | trace("NEW OPTIONS BEFORE IF:");
60 | trace(newOptions);
61 |
62 | // IFCA
63 | this.ifca = new IFCA(newOptions.maxParallel, newOptions.promiseTransform);
64 |
65 | if (read) {
66 | return new Readable.from(read()).pipe(this);
67 | }
68 | }
69 |
70 | setOptions(...options) {
71 | Object.assign(this._scramjet_options, ...options);
72 |
73 | if (this._scramjet_options.maxParallel) this.setMaxListeners(this._scramjet_options.maxParallel);
74 |
75 | if (this._flushed) {
76 | options.forEach(({ promiseFlush }) =>
77 | Promise.resolve()
78 | .then(promiseFlush)
79 | .catch((e) => this.raise(e))
80 | );
81 | }
82 |
83 | return this;
84 | }
85 |
86 | pushTransform(options) {
87 | trace("PTS.pushTransform... options:");
88 | trace(JSON.stringify(options));
89 | if (typeof options.promiseTransform === "function") {
90 | this.ifca.addTransform(options.promiseTransform);
91 | }
92 |
93 | if (typeof options.promiseFlush === "function") {
94 | if (this._scramjet_options.runFlush) {
95 | throw new Error("Promised Flush cannot be overwritten!");
96 | } else {
97 | this._scramjet_options.runFlush = options.promiseFlush;
98 | }
99 | }
100 |
101 | return this;
102 | }
103 |
104 | /**
105 | * Add TransformFunction to PromiseTransformStream
106 | *
107 | * @param {TransformFunction} transform
108 | * @returns {PromiseTransformStream}
109 | */
110 | addTransform(transform) {
111 | this.ifca.addTransform(transform);
112 | return this;
113 | }
114 |
115 | /**
116 | * Add TransformErrorHandler to PromiseTransformStream
117 | *
118 | * @param {TransformErrorHandler} handler
119 | * @returns {PromiseTransformStream}
120 | */
121 | addErrorHandler(handler) {
122 | this.ifca.addErrorHandler(handler);
123 | return this;
124 | }
125 |
126 | async _final(callback) {
127 | trace("PTS-IFCA FINAL");
128 | await this.ifca.end();
129 | callback();
130 | }
131 |
132 | async _write(data, encoding, callback) {
133 | trace("PTS-IFCA WRITE data:" + JSON.stringify(data));
134 | await this.ifca.write(data);
135 | callback();
136 | }
137 |
138 | /**
139 | * https://nodejs.org/api/stream.html#stream_writable_writev_chunks_callback
140 | *
141 | * @param {Object[]} chunks
142 | * @param {Function} callback
143 | */
144 | async _writev(chunks, callback) {
145 | trace("WRITEV chunks: " + JSON.stringify(chunks));
146 |
147 | await this.ifca.writev(chunks.map((o) => o.chunk));
148 | callback();
149 | }
150 |
151 | /**
152 | *
153 | * @param {integer} size
154 | */
155 | async _read(size) {
156 | trace("PTS-IFCA _read size: " + size);
157 |
158 | const result = await this.ifca.read();
159 | trace("PTS.read result: " + JSON.stringify(result));
160 | this.push(result);
161 | }
162 |
163 | /**
164 | * Dummy generator used for tesing.
165 | * TODO: Remove later
166 | *
167 | */
168 | *getGenerator() {
169 | const results = [1, 3, 5, 9, 11, 13, 15, 17, 19];
170 | console.log("INSIDE GENERATOR");
171 | let index = 0;
172 | while (true) {
173 | if (index === 9) throw new Error("Dummy Error");
174 | yield new Promise((resolve) => {
175 | setTimeout(() => {
176 | resolve(results[index++]);
177 | }, 100);
178 | });
179 | }
180 | }
181 | }
182 |
183 | module.exports = {
184 | PromiseTransformStream,
185 | };
186 |
--------------------------------------------------------------------------------
/test/unit/pts/helpers/promise-transform-stream.js:
--------------------------------------------------------------------------------
1 | const { Transform, Readable } = require("stream");
2 | const { EventEmitter } = require("events");
3 | const DefaultHighWaterMark = require("os").cpus().length * 2;
4 |
5 | const filter = Symbol("FILTER");
6 | const plgctor = Symbol("plgctor");
7 | const storector = Symbol("storector");
8 |
9 | let seq = 0;
10 |
11 | const shared = { filter, DefaultHighWaterMark, plgctor, storector };
12 | const mkTransform = require("./mk-transform")(shared);
13 | // const mkRead = require("./mk-read")(shared);
14 | // const mkWrite = require("./mk-write")(shared);
15 | const { StreamError } = require("./stream-errors");
16 | const { trace } = require("../../../../src/utils");
17 |
18 | const rename = (ob, fr, to) => {
19 | if (ob[fr]) {
20 | ob[to] = ob[fr];
21 | delete ob[fr];
22 | }
23 | };
24 |
25 | const checkOptions = (options) => {
26 | rename(options, "parallelRead", "promiseRead");
27 | rename(options, "parallelWrite", "promiseWrite");
28 | rename(options, "parallelTransform", "promiseTransform");
29 | rename(options, "flushPromise", "promiseFlush");
30 |
31 | if (["promiseRead", "promiseWrite", "promiseTransform"].reduce((acc, key) => (acc += options[key] ? 1 : 0), 0) > 1)
32 | throw new Error("Scramjet stream can be either Read, Write or Transform");
33 | };
34 |
35 | /**
36 | * This class is an underlying class for all Scramjet streams.
37 | *
38 | * It allows creation of simple transform streams that use async functions for transforms, reading or writing.
39 | *
40 | * @internal
41 | * @extends stream.PassThrough
42 | */
43 | class PromiseTransformStream extends Transform {
44 | constructor(options = {}) {
45 | const newOptions = Object.assign(
46 | {
47 | objectMode: true,
48 | promiseRead: null,
49 | promiseWrite: null,
50 | promiseTransform: null,
51 | promiseFlush: null,
52 | beforeTransform: null,
53 | afterTransform: null,
54 | },
55 | options
56 | );
57 | checkOptions(newOptions);
58 |
59 | super(newOptions);
60 |
61 | this._tapped = false;
62 |
63 | this._error_handlers = [];
64 | this._scramjet_options = {
65 | referrer: options.referrer,
66 | constructed: new Error().stack,
67 | };
68 |
69 | this.seq = seq++;
70 |
71 | this.setMaxListeners(DefaultHighWaterMark);
72 | this.setOptions(newOptions);
73 | trace("NEW OPTIONS BEFORE IF:");
74 | trace(newOptions);
75 | //
76 | // if (newOptions.promiseRead) {
77 | // trace("PTS constructor READ");
78 | // this.type = "Read";
79 | // mkRead.call(this, newOptions);
80 | // this.tap();
81 | // } else if (newOptions.promiseWrite) {
82 | // this.type = "Write";
83 | // mkWrite.call(this, newOptions);
84 | // } else if (newOptions.transform || !newOptions.promiseTransform) {
85 | // this.type = "Transform-";
86 | // this.tap();
87 | // } else {
88 | this.type = "Transform";
89 | trace("TRANSFORM...");
90 | // It's always false
91 | if (newOptions.promiseTransform && mkTransform.call(this, newOptions)) {
92 | // returns true if transform can be pushed to referring stream
93 | trace("RETURN AND PUSH TRANSFORM"); // Never executed
94 | return options.referrer.pushTransform(options);
95 | }
96 | // }
97 | // ^^^^^^^
98 | // What's this?
99 | // const pluginConstructors = this.constructor[plgctor].get();
100 | // if (pluginConstructors.length) {
101 | // let ret;
102 | // pluginConstructors.find((Ctor) => (ret = Ctor.call(this, options)));
103 |
104 | // if (typeof ret !== "undefined") {
105 | // return ret;
106 | // }
107 | // }
108 | }
109 |
110 | get name() {
111 | return `${this.constructor.name}(${this._options.name || this.seq})`;
112 | }
113 |
114 | set name(name) {
115 | this.setOptions({ name });
116 | }
117 |
118 | get constructed() {
119 | return this._scramjet_options.constructed;
120 | }
121 |
122 | get _options() {
123 | if (this._scramjet_options.referrer && this._scramjet_options.referrer !== this) {
124 | return Object.assign(
125 | { maxParallel: DefaultHighWaterMark },
126 | this._scramjet_options.referrer._options,
127 | this._scramjet_options
128 | );
129 | }
130 | return Object.assign({ maxParallel: DefaultHighWaterMark }, this._scramjet_options);
131 | }
132 |
133 | setOptions(...options) {
134 | Object.assign(this._scramjet_options, ...options);
135 |
136 | if (this._scramjet_options.maxParallel) this.setMaxListeners(this._scramjet_options.maxParallel);
137 |
138 | if (this._flushed) {
139 | options.forEach(({ promiseFlush }) =>
140 | Promise.resolve()
141 | .then(promiseFlush)
142 | .catch((e) => this.raise(e))
143 | );
144 | }
145 |
146 | return this;
147 | }
148 |
149 | setMaxListeners(value) {
150 | return super.setMaxListeners.call(this, value + EventEmitter.defaultMaxListeners);
151 | }
152 |
153 | static get [plgctor]() {
154 | const proto = Object.getPrototypeOf(this);
155 | return {
156 | ctors: (this[storector] = Object.prototype.hasOwnProperty.call(this, storector) ? this[storector] : []),
157 | get: () => (proto[plgctor] ? proto[plgctor].get().concat(this[storector]) : this[storector]),
158 | };
159 | }
160 |
161 | // async whenRead(count) {
162 | // trace("PTS.whenRead count: " + count);
163 | // return Promise.race([
164 | // new Promise((res) => {
165 | // const read = () => {
166 | // const ret = this.read(count);
167 | // if (ret !== null) {
168 | // return res(ret);
169 | // } else {
170 | // this.once("readable", read);
171 | // }
172 | // };
173 | // read();
174 | // }),
175 | // this.whenError(),
176 | // this.whenEnd(),
177 | // ]);
178 | // }
179 |
180 | // async whenDrained() {
181 | // return (
182 | // this._scramjet_drainPromise ||
183 | // (this._scramjet_drainPromise = new Promise((res, rej) =>
184 | // this.once("drain", () => {
185 | // this._scramjet_drainPromise = null;
186 | // res();
187 | // })
188 | // .whenError()
189 | // .then(rej)
190 | // ))
191 | // );
192 | // }
193 |
194 | async whenWrote(...data) {
195 | let ret;
196 | for (var item of data) ret = this.write(item);
197 |
198 | if (ret) {
199 | return;
200 | } else {
201 | return this.whenDrained();
202 | }
203 | }
204 |
205 | async whenError() {
206 | return (
207 | this._scramjet_errPromise ||
208 | (this._scramjet_errPromise = new Promise((res) => {
209 | this.once("error", (e) => {
210 | this._scramjet_errPromise = null;
211 | res(e);
212 | });
213 | }))
214 | );
215 | }
216 |
217 | async whenEnd() {
218 | return (
219 | this._scramjet_endPromise ||
220 | (this._scramjet_endPromise = new Promise((res, rej) => {
221 | this.whenError().then(rej);
222 | this.on("end", () => res());
223 | }))
224 | );
225 | }
226 |
227 | async whenFinished() {
228 | return (
229 | this._scramjet_finishPromise ||
230 | (this._scramjet_finishPromise = new Promise((res, rej) => {
231 | this.whenError().then(rej);
232 | this.on("finish", () => res());
233 | }))
234 | );
235 | }
236 |
237 | catch(callback) {
238 | this._error_handlers.push(callback);
239 | return this;
240 | }
241 |
242 | async raise(err, ...args) {
243 | return this._error_handlers
244 | .reduce(
245 | (promise, handler) =>
246 | promise.catch((lastError) =>
247 | handler(
248 | lastError instanceof StreamError
249 | ? lastError
250 | : new StreamError(lastError, this, err.code, err.chunk),
251 | ...args
252 | )
253 | ),
254 | Promise.reject(err)
255 | )
256 | .catch((err) => this.emit("error", err, ...args));
257 | }
258 |
259 | // pipe(to, options) {
260 | // trace("PTS.pipe()");
261 | // if (to === this) {
262 | // return this;
263 | // }
264 |
265 | // if (this !== to && to instanceof PromiseTransformStream) {
266 | // to.setOptions({ referrer: this });
267 | // this.on("error", (err) => to.raise(err));
268 | // this.tap().catch(async (err, ...args) => {
269 | // await to.raise(err, ...args);
270 | // return filter;
271 | // });
272 | // } else if (to instanceof Readable) {
273 | // this.on("error", (...err) => to.emit("error", ...err));
274 | // }
275 |
276 | // return super.pipe(to, options || { end: true });
277 | // }
278 |
279 | graph(func) {
280 | let referrer = this;
281 | const ret = [];
282 | while (referrer) {
283 | ret.push(referrer);
284 | referrer = referrer._options.referrer;
285 | }
286 | func(ret);
287 | return this;
288 | }
289 |
290 | tap() {
291 | trace("PTS.tap()");
292 | this._tapped = true;
293 | return this;
294 | }
295 |
296 | dropTransform(transform) {
297 | if (!this._scramjet_options.transforms) {
298 | if (!this._transform.currentTransform) return this;
299 |
300 | this._transform = this._transform.currentTransform;
301 | return this;
302 | }
303 | let i = 0;
304 | while (i++ < 1000) {
305 | const x = this._scramjet_options.transforms.findIndex((t) => t.ref === transform);
306 | if (x > -1) {
307 | this._scramjet_options.transforms.splice(x, 1);
308 | } else {
309 | return this;
310 | }
311 | }
312 | throw new Error("Maximum remove attempt count reached!");
313 | }
314 |
315 | pushTransform(options) {
316 | trace("PTS.pushTransform... options:");
317 | trace(options);
318 | if (typeof options.promiseTransform === "function") {
319 | if (!this._scramjet_options.transforms) {
320 | this._pushedTransform = options.promiseTransform;
321 | return this;
322 | }
323 |
324 | const markTransform = (bound) => {
325 | bound.ref = options.promiseTransform;
326 | return bound;
327 | };
328 |
329 | const before = typeof options.beforeTransform === "function";
330 | const after = typeof options.afterTransform === "function";
331 |
332 | if (before) this._scramjet_options.transforms.push(markTransform(options.beforeTransform.bind(this)));
333 |
334 | if (after)
335 | this._scramjet_options.transforms.push(
336 | markTransform(async (chunk) =>
337 | options.afterTransform.call(this, chunk, await options.promiseTransform.call(this, chunk))
338 | )
339 | );
340 | else this._scramjet_options.transforms.push(markTransform(options.promiseTransform.bind(this)));
341 | }
342 |
343 | if (typeof options.promiseFlush === "function") {
344 | if (this._scramjet_options.runFlush) {
345 | throw new Error("Promised Flush cannot be overwritten!");
346 | } else {
347 | this._scramjet_options.runFlush = options.promiseFlush;
348 | }
349 | }
350 |
351 | return this;
352 | }
353 |
354 | _selfInstance(...args) {
355 | return new this.constructor(...args);
356 | }
357 |
358 | // async _transform(chunk, encoding, callback) {
359 | // trace("PTS._transform. chunk: " + JSON.stringify(chunk));
360 | // trace(callback);
361 | // if (!this._delayed_first) {
362 | // await new Promise((res) => res());
363 | // this._delayed_first = 1;
364 | // }
365 |
366 | // try {
367 | // if (this._pushedTransform) chunk = await this._pushedTransform(chunk);
368 | // callback(null, chunk);
369 | // } catch (err) {
370 | // callback(err);
371 | // }
372 | // }
373 |
374 | _flush(callback) {
375 | trace("PTS._flush");
376 | const last = Promise.resolve();
377 |
378 | if (this._scramjet_options.runFlush) {
379 | last.then(this._scramjet_options.runFlush).then(
380 | (data) => {
381 | if (Array.isArray(data)) data.forEach((item) => this.push(item));
382 | else if (data) this.push(data);
383 |
384 | callback();
385 | },
386 | (e) => this.raise(e)
387 | );
388 | } else {
389 | last.then(() => callback());
390 | }
391 | }
392 |
393 | static get filter() {
394 | return filter;
395 | }
396 | }
397 |
398 | module.exports = {
399 | plgctor: plgctor,
400 | PromiseTransformStream,
401 | };
402 |
--------------------------------------------------------------------------------
/test/unit/pts/helpers/stream-errors.js:
--------------------------------------------------------------------------------
1 | const os = require("os");
2 | const combineStack = (stack, ...errors) => {
3 | return errors.reduce(
4 | (stack, trace) => {
5 | if (!trace) return stack;
6 | if (trace.indexOf("\n") >= 0)
7 | return stack + os.EOL + trace.substr(trace.indexOf("\n") + 1);
8 |
9 | else
10 | return stack + os.EOL + trace;
11 | },
12 | stack
13 | );
14 | };
15 |
16 | class StreamError extends Error {
17 |
18 | constructor(cause, stream, code = "GENERAL", chunk = null) {
19 | code = cause.code || code;
20 | stream = cause.stream || stream;
21 | chunk = cause.chunk || chunk;
22 |
23 | super(cause.message);
24 |
25 | if (cause instanceof StreamError)
26 | return cause;
27 |
28 | this.chunk = chunk;
29 | this.stream = stream;
30 | this.code = "ERR_SCRAMJET_" + code;
31 | this.cause = cause;
32 |
33 | const stack = this.stack;
34 | Object.defineProperty(this, "stack", {
35 | get: function () {
36 | return combineStack(
37 | stack,
38 | " caused by:",
39 | cause.stack,
40 | ` --- raised in ${stream.name} constructed ---`,
41 | stream.constructed
42 | );
43 | }
44 | });
45 |
46 | /** Needed to fix babel errors. */
47 | this.constructor = StreamError;
48 | this.__proto__ = StreamError.prototype;
49 | }
50 |
51 | }
52 |
53 | /**
54 | * Stream errors class
55 | *
56 | * @module scramjet/errors
57 | * @prop {Class} StreamError
58 | * @prop {Function} combineStack
59 | */
60 | module.exports = {StreamError, combineStack};
61 |
--------------------------------------------------------------------------------
/test/unit/pts/helpers/utils.js:
--------------------------------------------------------------------------------
1 | const {dirname, resolve} = require("path");
2 |
3 | /** @ignore */
4 | const getCalleeDirname = function(depth) {
5 | const p = Error.prepareStackTrace;
6 | Error.prepareStackTrace = (dummy, stack) => stack;
7 | const e = new Error();
8 | Error.captureStackTrace(e, arguments.callee);
9 | const stack = e.stack;
10 | Error.prepareStackTrace = p;
11 | return dirname(stack[depth].getFileName());
12 | };
13 |
14 | const resolveCalleeRelative = function(depth, ...relatives) {
15 | return resolve(getCalleeDirname(depth + 1), ...relatives);
16 | };
17 |
18 | /** @ignore */
19 | const resolveCalleeBlackboxed = function() {
20 | const p = Error.prepareStackTrace;
21 | Error.prepareStackTrace = (dummy, stack) => stack;
22 | const e = new Error();
23 | Error.captureStackTrace(e, arguments.callee);
24 | const stack = e.stack;
25 | Error.prepareStackTrace = p;
26 |
27 | let pos = stack.find(entry => entry.getFileName().indexOf(resolve(__dirname, "..")) === -1);
28 |
29 | return resolve(dirname(pos.getFileName()), ...arguments);
30 | };
31 |
32 | /**
33 | * @external AsyncGeneratorFunction
34 | */
35 | let AsyncGeneratorFunction = function() {};
36 | try {
37 | AsyncGeneratorFunction = require("./async-generator-constructor");
38 | } catch (e) {} // eslint-disable-line
39 |
40 | /**
41 | * @external GeneratorFunction
42 | */
43 | const GeneratorFunction = Object.getPrototypeOf(function*(){}).constructor;
44 |
45 | /** @ignore */
46 | const pipeIfTarget = (stream, target) => (target ? stream.pipe(target) : stream);
47 |
48 | /** @ignore */
49 | const pipeThen = async (func, target) => Promise
50 | .resolve()
51 | .then(func)
52 | .then(x => x.pipe(target))
53 | .catch(e => target.raise(e));
54 |
55 | /**
56 | * @external stream.PassThrough
57 | * @see https://nodejs.org/api/stream.html#stream_class_stream_passthrough
58 | */
59 |
60 | module.exports = {
61 | AsyncGeneratorFunction,
62 | GeneratorFunction,
63 | getCalleeDirname,
64 | resolveCalleeRelative,
65 | resolveCalleeBlackboxed,
66 | pipeIfTarget,
67 | pipeThen
68 | };
69 |
--------------------------------------------------------------------------------
/test/unit/pts/order.spec.js:
--------------------------------------------------------------------------------
1 | const test = require("ava");
2 | const { Readable } = require("stream");
3 | const { trace } = require("../../../src/utils");
4 |
5 | const { performance } = require("perf_hooks");
6 |
7 | const { PromiseTransformStream: PromiseTransformStreamIFCA } = require("./helpers/promise-transform-stream-ifca");
8 | const { PromiseTransformStream } = require("./helpers/promise-transform-stream");
9 |
10 | /**
11 | * Memory snapshot interval in miliseconds defines how often we check memory usage.
12 | */
13 | const MEM_SNAPSHOT_INTERVAL = 10;
14 |
15 | /**
16 | * Push transform to PromiseTransformStream
17 | *
18 | * @param {Readable} str
19 | * @param {Function} promiseTransform
20 | */
21 | function pushTransformToStreamPTS(str, promiseTransform) {
22 | str.pushTransform({
23 | promiseTransform,
24 | });
25 | }
26 |
27 | /**
28 | * Get stream
29 | *
30 | * @param {PromiseTransformStream} PromiseTransformStream PromiseTransformStream dependency injection
31 | * @param {Function} promiseTransform
32 | * @param {integer} maxParallel
33 | * @returns {PromiseTransformStream}
34 | */
35 | function getStreamInOrderPTS(PromiseTransformStream, promiseTransform, maxParallel) {
36 | trace("FN getStreamInOrderPTS:");
37 | trace(promiseTransform);
38 |
39 | return new PromiseTransformStream({
40 | maxParallel,
41 | promiseTransform,
42 | });
43 | }
44 |
45 | /**
46 | * Sleep helper function
47 | *
48 | * @param {number} ms
49 | * @returns {Promise}
50 | */
51 | function sleep(ms) {
52 | return new Promise((resolve) => setTimeout(resolve, ms));
53 | }
54 |
55 | /**
56 | * Common test code that allows injecting PTS algorithm and mesaures execution time.
57 | *
58 | * @param {PromiseTransformStream} pts PromiseTransformStream
59 | * @param {String} name Test Name
60 | * @param {ExecutionContext} t Ava ExecutionContext
61 | */
62 | async function code(pts, name, t) {
63 | await sleep(1000);
64 |
65 | let rss = 0;
66 | const executionStartTime = performance.now();
67 | setInterval(() => {
68 | const memoryUsage = process.memoryUsage();
69 | if (rss < memoryUsage.rss) rss = memoryUsage.rss;
70 | }, MEM_SNAPSHOT_INTERVAL);
71 | let a = 0;
72 | let x = 0;
73 | let y = 0;
74 | let z = 0;
75 |
76 | /**
77 | * How many elements should be tested
78 | */
79 | const ELEMENTS = 16;
80 |
81 | /**
82 | * How many items can be waiting to be flushed
83 | */
84 | const MAX_PARALLEL = 8;
85 |
86 | /**
87 | * Create input array
88 | */
89 | const input = Array.from(Array(ELEMENTS).keys()).map(() => {
90 | return { a: a++ };
91 | });
92 |
93 | const asyncPromiseTransform = async ({ a }) => {
94 | if (!(a % 2)) return { a, n: 0, x: x++ };
95 | return new Promise((res) => setTimeout(() => res({ a, n: 1, x: x++ }), 2000));
96 | };
97 | const syncPromiseTransform = ({ a, n, x }) => ({ a, n, x, y: y++ });
98 | const syncPromiseTransform2 = ({ a, n, x, y }) => ({ a, n, x, y, z: z++ });
99 |
100 | trace("Running with: ", { MAX_PARALLEL, ELEMENTS });
101 |
102 | const str = new Readable.from(input).pipe(getStreamInOrderPTS(pts, asyncPromiseTransform, MAX_PARALLEL));
103 | trace("PromiseTransformStream:");
104 | trace(str);
105 |
106 | pushTransformToStreamPTS(str, syncPromiseTransform);
107 |
108 | pushTransformToStreamPTS(str, syncPromiseTransform2);
109 |
110 | // await defer(2000); // This defers read after write
111 |
112 | /**
113 | * Current results:
114 | *
115 | * ```
116 | * { a: 0, n: 0, x: 0, y: 0, z: 0 }
117 | * { a: 1, n: 1, x: 5, y: 5, z: 5 }
118 | * { a: 2, n: 0, x: 1, y: 1, z: 1 }
119 | * { a: 3, n: 1, x: 7, y: 7, z: 7 }
120 | * { a: 4, n: 0, x: 2, y: 2, z: 2 }
121 | * { a: 5, n: 1, x: 9, y: 9, z: 9 }
122 | * { a: 6, n: 0, x: 3, y: 3, z: 3 }
123 | * { a: 7, n: 1, x: 11, y: 11, z: 11 }
124 | * { a: 8, n: 0, x: 4, y: 4, z: 4 }
125 | * { a: 9, n: 1, x: 12, y: 12, z: 12 }
126 | * { a: 10, n: 0, x: 6, y: 6, z: 6 }
127 | * { a: 11, n: 1, x: 13, y: 13, z: 13 }
128 | * { a: 12, n: 0, x: 8, y: 8, z: 8 }
129 | * { a: 13, n: 1, x: 14, y: 14, z: 14 }
130 | * { a: 14, n: 0, x: 10, y: 10, z: 10 }
131 | * { a: 15, n: 1, x: 15, y: 15, z: 15 }
132 | * ```
133 | */
134 | let b = 0;
135 | for await (const result of str) {
136 | console.error("RESULT: " + JSON.stringify(result));
137 | t.is(result.a, b++, "Should work in order");
138 | t.is(result.y, result.z, "Should work in order");
139 | t.is(result.x, result.y, "Should work out of order");
140 | if (result.a > MAX_PARALLEL / 2 && result.a !== ELEMENTS - 1)
141 | t.not(result.a, result.x, `Should not be chained ${result.a}, ${result.x}`);
142 | }
143 | const executionEndTime = performance.now();
144 |
145 | console.log(
146 | `${name} Time taken: ${(executionEndTime - executionStartTime) / 1000}s Memory: ${rss / 1024 / 1024}MB`
147 | );
148 | }
149 |
150 | // Run tests serially and not concurrently. Repeat 5x in order to measure average execution time.
151 | const algorithms = [
152 | { name: "IFCA", class: PromiseTransformStreamIFCA, repeat: 1 },
153 | { name: "MK-TRANSFORM", class: PromiseTransformStream, repeat: 1 },
154 | ];
155 |
156 | for (let algo of algorithms) {
157 | for (let count = 1; count <= algo.repeat; count++) {
158 | test.skip(`KM1.3 ${algo.name} run: ${count}`, code.bind(null, algo.class, `IFCA-${count}`));
159 | }
160 | }
161 |
--------------------------------------------------------------------------------
/test/unit/pts/scribble.spec.js:
--------------------------------------------------------------------------------
1 | const test = require("ava");
2 | const { PromiseTransformStream } = require("./helpers/promise-transform-stream-ifca");
3 |
4 | /**
5 | * How many items can be waiting to be flushed
6 | */
7 | const MAX_PARALLEL = 8;
8 |
9 | const gen = function* () {
10 | let i = 0;
11 | while (i < 25) yield { a: i++ };
12 | };
13 |
14 | const databaseSave = (x) => {
15 | console.log("DATABASE SAVE x: " + JSON.stringify(x));
16 | return new Promise((res) => setTimeout(res(x.b), 100));
17 | };
18 |
19 | test.skip("Error Handler", async (t) => {
20 | const str = new PromiseTransformStream({
21 | read: gen,
22 | maxParallel: MAX_PARALLEL,
23 | promiseTransform: (x) => (x.a % 2 ? x : Promise.reject(undefined)),
24 | })
25 | // If undefined don't move to the next step
26 | .addTransform((x) => {
27 | console.log("TRANSFORM - remove undefined: x: ");
28 | console.log(x);
29 | return {
30 | b: x.a,
31 | };
32 | })
33 | .addTransform((x) => {
34 | if (x.b % 10 === 7) {
35 | console.log("HANDLE ERROR");
36 | throw new Error("This should be handled"); // 7 and 17 throws Error
37 | }
38 | console.log("SECOND TRANSFORM RETURN x: " + JSON.stringify(x));
39 |
40 | return x;
41 | })
42 | .addErrorHandler((err, x) => {
43 | // TODO: Add addHandler
44 | console.log("ERROR HANDLER x: " + JSON.stringify(x) + " err: " + JSON.stringify(err));
45 | if (x.a === 7 || x.a === 21) return Promise.reject(undefined);
46 | if (err) throw err;
47 | })
48 | .addTransform(
49 | (x) => {
50 | console.log("ANOTHER TRANSFORM x: " + JSON.stringify(x));
51 | if (x.b === 17) throw new Error("This should be handled");
52 | if (x.b === 21) throw new Error("This should not be handled");
53 | return x;
54 | },
55 | (err, x) => {
56 | if (x.a === 17) return x;
57 | throw err;
58 | } // TODO: add.
59 | // Only applies to if (x.b === 17) throw new Error("This should be handled");
60 | // This drills down to IFCA to add such an option.
61 | )
62 | .addTransform((x) => databaseSave(x));
63 |
64 | let items = [];
65 | try {
66 | for await (const chunk of str) {
67 | console.log("FOR AWAIT chunk: " + JSON.stringify(chunk));
68 | items.push(chunk);
69 | }
70 | t.fail("Should throw error on 21");
71 | } catch (e) {
72 | // TypeError: Cannot set property 'cause' of undefined - remove that element completely
73 | console.log(e);
74 | console.log(items);
75 | t.deepEqual(items, [1, 3, 5, 9, 11, 13, 15, 17, 19]); // 21 will go to the next catch
76 | }
77 | });
78 |
--------------------------------------------------------------------------------
/test/unit/streams/data/batch.spec.ts:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { DataStream } from "../../../../src/streams/data-stream";
3 | import { deferReturn } from "../../../_helpers/utils";
4 |
5 | test("DataStream batch can make sentences from words", async (t) => {
6 | const result = await DataStream
7 | .from(["foo", "bar.", "baz", "bax", ".", "foo"])
8 | .batch(chunk => chunk.endsWith("."))
9 | .toArray();
10 |
11 | t.deepEqual(result, [["foo", "bar."], ["baz", "bax", "."], ["foo"]]);
12 | });
13 |
14 | test("DataStream batch can make sentences from words (async)", async (t) => {
15 | const result = await DataStream
16 | .from(["foo", "bar.", "baz", "bax", ".", "foo"])
17 | .batch(async (chunk) => deferReturn(5, chunk.endsWith(".")))
18 | .toArray();
19 |
20 | t.deepEqual(result, [["foo", "bar."], ["baz", "bax", "."], ["foo"]]);
21 | });
22 |
23 | test("DataStream batch can bu used to batch by amount (via variadic arg counter)", async (t) => {
24 | const result = await DataStream
25 | .from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
26 | .batch((chunk, counter) => { counter.i++; return counter.i % 3 === 0; }, { i: 0 })
27 | .toArray();
28 |
29 | t.deepEqual(result, [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]]);
30 | });
31 |
32 | test("DataStream batch does not deep copy chunks", async (t) => {
33 | const input = [{ id: 0, data: "foo" }, { id: 1, data: "bar" }, { id: 2, data: "baz" }, { id: 3, data: "bax" }];
34 | const result = await DataStream
35 | .from(input)
36 | .batch((chunk) => chunk.id % 2 !== 0)
37 | .toArray();
38 |
39 | t.deepEqual(result, [[{ id: 0, data: "foo" }, { id: 1, data: "bar" }], [{ id: 2, data: "baz" }, { id: 3, data: "bax" }]]);
40 |
41 | input[0].data = "changed1";
42 | input[3].data = "changed2";
43 |
44 | t.deepEqual(result, [[{ id: 0, data: "changed1" }, { id: 1, data: "bar" }], [{ id: 2, data: "baz" }, { id: 3, data: "changed2" }]]);
45 | });
46 |
--------------------------------------------------------------------------------
/test/unit/streams/data/creation.spec.ts:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { createReadStream } from "fs";
3 | import { defer } from "../../../_helpers/utils";
4 | import { DataStream } from "../../../../src/streams/data-stream";
5 |
6 | test("DataStream can be constructed", (t) => {
7 | const dsNumber = new DataStream();
8 | const dsString = new DataStream();
9 |
10 | t.true(dsNumber instanceof DataStream);
11 | t.true(dsString instanceof DataStream);
12 | });
13 |
14 | test("DataStream can be created via static from method", (t) => {
15 | const dsNumber = DataStream.from([1, 2, 3, 4]);
16 | const dsString = DataStream.from(["1", "2", "3", "4"]);
17 | const dsAny = DataStream.from([1, 2, "3", "4"]);
18 |
19 | t.true(dsNumber instanceof DataStream);
20 | t.true(dsString instanceof DataStream);
21 | t.true(dsAny instanceof DataStream);
22 | });
23 |
24 | test("DataStream can be created from an empty iterable", async (t) => {
25 | const input: number[] = [];
26 | const dsNumber = DataStream.from(input);
27 | const result = await dsNumber.toArray();
28 |
29 | t.deepEqual(result, input);
30 | });
31 |
32 | test("DataStream can read from iterable", async (t) => {
33 | const input = [1, 2, 3, 4, 5, 6, 7, 8];
34 | const dsNumber = DataStream.from(input);
35 | const result = await dsNumber.toArray();
36 |
37 | t.deepEqual(result, input);
38 | });
39 |
40 | test("DataStream can read from generator (iterable)", async (t) => {
41 | function* numbers() {
42 | for (let i = 0; i < 8; i++) {
43 | yield i;
44 | }
45 | }
46 |
47 | const dsNumber = DataStream.from(numbers());
48 | const result = await dsNumber.toArray();
49 |
50 | t.deepEqual(result, [0, 1, 2, 3, 4, 5, 6, 7]);
51 | });
52 |
53 | test("DataStream can read from async iterable", async (t) => {
54 | const words = {
55 | async*[Symbol.asyncIterator]() {
56 | yield "foo";
57 | yield "bar";
58 | yield "baz";
59 | yield "bax";
60 | yield "foo2";
61 | yield "bar2";
62 | yield "baz2";
63 | yield "bax2";
64 | }
65 | };
66 | const dsWords = DataStream.from>(words);
67 | const result = await dsWords.toArray();
68 |
69 | t.deepEqual(result, ["foo", "bar", "baz", "bax", "foo2", "bar2", "baz2", "bax2"]);
70 | });
71 |
72 | test("DataStream can read from another scramjet stream", async (t) => {
73 | const dsNumber = DataStream.from([1, 2, 3, 4]);
74 | const dsNumber2 = DataStream.from(dsNumber);
75 | const result = await dsNumber2.toArray();
76 |
77 | t.deepEqual(result, [1, 2, 3, 4]);
78 | });
79 |
80 | test("DataStream can read from readable", async (t) => {
81 | const readable = createReadStream("./build/test/_assets/sample.txt", "utf8");
82 | const dsString = DataStream.from>(readable);
83 | const result = await dsString.toArray();
84 |
85 | t.deepEqual(result, ["foo\nbar\nbaz\nbax\n"]);
86 | });
87 |
88 | test("DataStream can be constructed from file", async (t) => {
89 | const ds = DataStream.fromFile>("./build/test/_assets/sample.txt", { readStream: { encoding: "utf8" } });
90 |
91 | t.true(ds instanceof DataStream);
92 |
93 | const result = await ds.toArray();
94 |
95 | t.deepEqual(result, ["foo\nbar\nbaz\nbax\n"]);
96 | });
97 |
98 | test("DataStream will not start reading until 'output' transfomration is called (generator)", async (t) => {
99 | let startedReading = false;
100 |
101 | function* numbers() {
102 | for (let i = 0; i < 8; i++) {
103 | startedReading = true;
104 | yield i;
105 | }
106 | }
107 |
108 | const dsNumber = DataStream.from(numbers());
109 |
110 | t.false(startedReading);
111 |
112 | await dsNumber.toArray();
113 |
114 | t.true(startedReading);
115 | });
116 |
117 | test("DataStream will not start reading until 'output' transfomration is called (readable)", async (t) => {
118 | const readable = createReadStream("./build/test/_assets/sample.txt", "utf8");
119 | const dsString = DataStream.from>(readable);
120 |
121 | // Since readable will be read at once, if it's not ended means reading haven't started yet.
122 | t.false(readable.readableEnded);
123 |
124 | await dsString.toArray();
125 |
126 | t.true(readable.readableEnded);
127 | });
128 |
129 | test("DataStream can be paused and resumed", async (t) => {
130 | const ref: any = {};
131 | const yielded: number[] = [];
132 |
133 | function* numbers() {
134 | for (let i = 0; i < 8; i++) {
135 | yield i;
136 | yielded.push(i);
137 | if (i % 3 === 1) {
138 | ref.dataStream.pause();
139 | }
140 | }
141 | }
142 |
143 | const dsNumber = DataStream.from(numbers());
144 |
145 | ref.dataStream = dsNumber as any;
146 |
147 | t.deepEqual(yielded, []);
148 |
149 | const resultPromise = dsNumber.toArray();
150 |
151 | await defer(0);
152 |
153 | t.deepEqual(yielded, [0, 1]);
154 |
155 | dsNumber.resume();
156 |
157 | await defer(0);
158 |
159 | t.deepEqual(yielded, [0, 1, 2, 3, 4]);
160 |
161 | dsNumber.resume();
162 |
163 | await defer(0);
164 |
165 | t.deepEqual(yielded, [0, 1, 2, 3, 4, 5, 6, 7]);
166 |
167 | const result = await resultPromise;
168 |
169 | t.deepEqual(result, [0, 1, 2, 3, 4, 5, 6, 7]);
170 | });
171 |
172 | test("Transforming intermediate streams throws an error (first stream)", async (t) => {
173 | const stream = new DataStream()
174 | .map(chunk => `foo${ chunk }`);
175 |
176 | stream.batch(chunk => chunk.endsWith("1"))
177 | .map(chunk => chunk.join(""))
178 | .map(chunk => ({ value: chunk }));
179 |
180 | t.throws(() => stream.map(chunk => `foo${ chunk }`), { message: "Stream is not transformable." });
181 | });
182 |
183 | test("Transforming intermediate streams throws an error (middle stream)", async (t) => {
184 | const stream = new DataStream()
185 | .map(chunk => `foo${ chunk }`);
186 | const stream2 = stream.batch(chunk => chunk.endsWith("1"))
187 | .map(chunk => chunk.join(""));
188 |
189 | stream2.map(chunk => ({ value: chunk }));
190 |
191 | t.throws(() => stream2.map(chunk => ({ value: chunk })), { message: "Stream is not transformable." });
192 | });
193 |
194 | test("Pausing DataStream multiple times does not throw", async (t) => {
195 | const dsNumber = DataStream.from([1, 2, 3, 4, 5, 6, 7, 8, 9]);
196 |
197 | try {
198 | dsNumber.pause();
199 | dsNumber.pause();
200 |
201 | await defer(0);
202 |
203 | dsNumber.pause();
204 |
205 | } catch (err: any) {
206 | t.fail(err.message);
207 | } finally {
208 | t.pass();
209 | }
210 | });
211 |
212 | test("Resuming DataStream multiple times does not throw", async (t) => {
213 | const dsNumber = DataStream.from([1, 2, 3, 4, 5, 6, 7, 8, 9]);
214 |
215 | try {
216 | dsNumber.resume();
217 | dsNumber.resume();
218 |
219 | await defer(0);
220 |
221 | dsNumber.resume();
222 |
223 | } catch (err: any) {
224 | t.fail(err.message);
225 | } finally {
226 | t.pass();
227 | }
228 | });
229 |
230 | test("Pausing DataStream with parent stream multiple times does not throw", async (t) => {
231 | const dsNumber = DataStream.from([1, 2, 3, 4, 5, 6, 7, 8, 9]);
232 | const childStream = new DataStream({}, dsNumber);
233 |
234 | try {
235 | childStream.pause();
236 | childStream.pause();
237 |
238 | await defer(0);
239 |
240 | childStream.pause();
241 |
242 | } catch (err: any) {
243 | t.fail(err.message);
244 | } finally {
245 | t.pass();
246 | }
247 | });
248 |
249 | test("DataStream can be written to in multiple ways (internal API too)", async (t) => {
250 | const dsNumber = DataStream.from([1, 2, 3, 4, 5]);
251 | const dsNumber2 = new DataStream({});
252 |
253 | dsNumber2.write(0);
254 |
255 | (dsNumber2 as any).readSource(dsNumber);
256 |
257 | dsNumber2.resume();
258 |
259 | const result = await dsNumber2.toArray();
260 |
261 | t.deepEqual(result, [0, 1, 2, 3, 4, 5]);
262 | });
263 |
264 | test("DataStream sync reader can handle first sync chunk", async (t) => {
265 | const dsNumber = new DataStream({});
266 | const reader = (dsNumber as any).getReader(false, {
267 | onFirstChunkCallback: ((chunk: number) => chunk),
268 | onChunkCallback: ((chunk: number) => chunk),
269 | onEndCallback: (() => {
270 | t.pass();
271 | }),
272 | });
273 |
274 | const data = ["foo", "bar", null];
275 |
276 | let index = -1;
277 |
278 | (dsNumber as any).ifca.read = () => {
279 | index++;
280 | return data[index];
281 | };
282 |
283 | reader();
284 | });
285 |
286 | test("DataStream sync reader can handle null chunk at the beginning", async (t) => {
287 | const dsNumber = new DataStream({});
288 | const reader = (dsNumber as any).getReader(false, {
289 | onFirstChunkCallback: ((chunk: number) => chunk),
290 | onChunkCallback: ((chunk: number) => chunk),
291 | onEndCallback: (() => {
292 | t.pass();
293 | }),
294 | });
295 |
296 | (dsNumber as any).ifca.read = () => {
297 | return null;
298 | };
299 |
300 | reader();
301 | });
302 |
303 | test("DataStream async reader can handle first sync chunk", async (t) => {
304 | const dsNumber = new DataStream({});
305 | const reader = (dsNumber as any).getReaderAsyncCallback(false, {
306 | onFirstChunkCallback: ((chunk: number) => chunk),
307 | onChunkCallback: ((chunk: number) => chunk),
308 | onEndCallback: (() => {
309 | t.pass();
310 | }),
311 | });
312 |
313 | const data = ["foo", "bar", null];
314 |
315 | let index = -1;
316 |
317 | (dsNumber as any).ifca.read = () => {
318 | index++;
319 | return data[index];
320 | };
321 |
322 | reader();
323 | });
324 |
325 | test("DataStream async reader can handle null chunk at the beginning", async (t) => {
326 | const dsNumber = new DataStream({});
327 | const reader = (dsNumber as any).getReaderAsyncCallback(false, {
328 | onFirstChunkCallback: ((chunk: number) => chunk),
329 | onChunkCallback: ((chunk: number) => chunk),
330 | onEndCallback: (() => {
331 | t.pass();
332 | }),
333 | });
334 |
335 | (dsNumber as any).ifca.read = () => {
336 | return null;
337 | };
338 |
339 | reader();
340 | });
341 |
--------------------------------------------------------------------------------
/test/unit/streams/data/each.spec.ts:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { DataStream } from "../../../../src/streams/data-stream";
3 | import { defer } from "../../../_helpers/utils";
4 |
5 | test("DataStream each runs for every chunk in correct order (sync)", async (t) => {
6 | const result: number[] = [];
7 | const stream = DataStream
8 | .from([1, 2, 3, 4, 5, 6])
9 | .each(chunk => { result.push(chunk); });
10 |
11 | t.deepEqual(await stream.toArray(), [1, 2, 3, 4, 5, 6]);
12 | t.deepEqual(result, [1, 2, 3, 4, 5, 6]);
13 | });
14 |
15 | test("DataStream each runs for every chunk in correct order (async)", async (t) => {
16 | const result: number[] = [];
17 | const stream = DataStream
18 | .from([3, 2, 1, 6, 4, 5])
19 | .each(async (chunk, timeout) => { result.push(chunk); await defer(chunk * timeout); }, 10);
20 |
21 | t.deepEqual(await stream.toArray(), [3, 2, 1, 6, 4, 5]);
22 | t.deepEqual(result, [3, 2, 1, 6, 4, 5]);
23 | });
24 |
--------------------------------------------------------------------------------
/test/unit/streams/data/filter.spec.ts:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { DataStream } from "../../../../src/streams/data-stream";
3 |
4 | test("DataStream can filter chunks via sync callback", async (t) => {
5 | const dsNumber = DataStream.from([1, 2, 3, 4, 5, 6, 7]);
6 | const result = await dsNumber.filter(chunk => !!(chunk % 2)).toArray();
7 |
8 | t.deepEqual(result, [1, 3, 5, 7]);
9 | });
10 |
11 | test("DataStream can filter chunks via async callback", async (t) => {
12 | const dsString = DataStream.from(["foo", "bar", "baz", "bax"]);
13 | const result = await dsString.filter(async chunk => {
14 | return new Promise(res => {
15 | setTimeout(() => {
16 | res(!!chunk.startsWith("b"));
17 | }, 10);
18 | });
19 | }).toArray();
20 |
21 | t.deepEqual(result, ["bar", "baz", "bax"]);
22 | });
23 |
24 | test("DataStream can apply multiple filter transforms", async (t) => {
25 | const dsString = DataStream.from(["1", "2", "3", "4", "10", "20", "30", "40", "100", "200", "300", "400"]);
26 | const result = await dsString
27 | .filter(chunk => chunk.length < 3)
28 | .filter(async chunk => {
29 | return new Promise(res => {
30 | setTimeout(() => {
31 | res(!!(chunk.startsWith("1") || chunk.startsWith("2") || chunk.startsWith("3")));
32 | }, 10);
33 | });
34 | })
35 | .filter(chunk => chunk.length === 2)
36 | .toArray();
37 |
38 | t.deepEqual(result, ["10", "20", "30"]);
39 | });
40 |
41 | test("DataStream filter passes variadic args", async (t) => {
42 | const dsString = DataStream.from(["1", "2", "3", "4", "10", "20", "30", "40", "100", "200", "300", "400"]);
43 | const result = await dsString
44 | .filter((chunk, limiter) => chunk.length < limiter, 3)
45 | .filter(async (chunk, prefix, prefixes) => {
46 | return new Promise(res => {
47 | setTimeout(() => {
48 | res(!!(chunk.startsWith(prefix) || chunk.startsWith(prefixes[0]) || chunk.startsWith(prefixes[1])));
49 | }, 10);
50 | });
51 | }, "1", ["2", "3"])
52 | .filter(chunk => chunk.length === 2)
53 | .toArray();
54 |
55 | t.deepEqual(result, ["10", "20", "30"]);
56 | });
57 |
58 | test("DataStream filter passes typed variadic args", async (t) => {
59 | const dsString = DataStream.from(["1", "2", "3", "4", "10", "20", "30", "40", "100", "200", "300", "400"]);
60 | const result = await dsString
61 | .filter((chunk, limiter) => chunk.length < limiter, 3)
62 | .filter(async (chunk, prefix, prefixes) => {
63 | return new Promise(res => {
64 | setTimeout(() => {
65 | res(!!(chunk.startsWith(prefix) || chunk.startsWith(prefixes[0]) || chunk.startsWith(prefixes[1])));
66 | }, 10);
67 | });
68 | }, "1", ["2", "3"])
69 | .filter(chunk => chunk.length === 2)
70 | .toArray();
71 |
72 | t.deepEqual(result, ["10", "20", "30"]);
73 | });
74 |
--------------------------------------------------------------------------------
/test/unit/streams/data/flatmap.spec.ts:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { DataStream } from "../../../../src/streams/data-stream";
3 | import { defer } from "../../../_helpers/utils";
4 |
5 | test("DataStream can flat-map chunks via sync callback (to same type)", async (t) => {
6 | const dsNumber = DataStream.from([1, 2, 3, 4]);
7 | const result = await dsNumber.flatMap(chunk => [chunk * 2]).toArray();
8 |
9 | t.deepEqual(result, [2, 4, 6, 8]);
10 | });
11 |
12 | test("DataStream can flat-map chunks via async callback (to same type)", async (t) => {
13 | const dsString = DataStream.from(["it's Sunny in", "", "California"]);
14 | const result = await dsString.flatMap(chunk => chunk.split(" ")).toArray();
15 |
16 | t.deepEqual(result, ["it's", "Sunny", "in", "", "California"]);
17 | });
18 |
19 | test("DataStream can flat-map chunks returned as sync iterator (to different type)", async (t) => {
20 | const dsNumber = DataStream.from([1, 2, 3, 4]);
21 | const result = await dsNumber.flatMap(chunk => {
22 | return (function * () {
23 | yield `${chunk * 2}`;
24 | yield `${chunk * 10}`;
25 | })();
26 | }).toArray();
27 |
28 | t.deepEqual(result, ["2", "10", "4", "20", "6", "30", "8", "40"]);
29 | });
30 |
31 | test("DataStream can flat-map chunks returned as async iterator (to different type)", async (t) => {
32 | const dsNumber = DataStream.from([1, 2, 3, 4]);
33 | const result = await dsNumber.flatMap(chunk => {
34 | return (async function * () {
35 | await defer(chunk * 2);
36 | yield `${chunk * 2}`;
37 | await defer(chunk * 10);
38 | yield `${chunk * 10}`;
39 | })();
40 | }).toArray();
41 |
42 | t.deepEqual(result, ["2", "10", "4", "20", "6", "30", "8", "40"]);
43 | });
44 |
45 | test("DataStream flatMap flattens only one level", async (t) => {
46 | const dsNumber = DataStream.from([1, 2, 3, 4]);
47 | const result = await dsNumber.flatMap(chunk => [[chunk * 2]]).toArray();
48 |
49 | t.deepEqual(result, [[2], [4], [6], [8]]);
50 | });
51 |
52 | test("DataStream flatMap filters and duplicates chunks correctly", async (t) => {
53 | const dsNumber = DataStream.from([5, 4, -3, 20, 17, -33, -4, 18]);
54 | const result = await dsNumber.flatMap(chunk => {
55 | if (chunk < 0) {
56 | return [];
57 | }
58 | return chunk % 2 === 0 ? [chunk] : [chunk - 1, 1];
59 | }).toArray();
60 |
61 | t.deepEqual(result, [4, 1, 4, 20, 16, 1, 18]);
62 | });
63 |
64 | test("DataStream flatMap passes typed variadic args", async (t) => {
65 | const dsNumber = DataStream.from([1, 2, 3, 4]);
66 | const result = await dsNumber
67 | .flatMap((chunk, multiplier) => [chunk * multiplier], 3)
68 | .flatMap(async (chunk, postfix, text) => {
69 | return new Promise(res => {
70 | setTimeout(() => {
71 | res([`${chunk}${postfix}`, text]);
72 | }, 10);
73 | });
74 | }, "00", "foo")
75 | .toArray();
76 |
77 | t.deepEqual(result, ["300", "foo", "600", "foo", "900", "foo", "1200", "foo"]);
78 | });
79 |
--------------------------------------------------------------------------------
/test/unit/streams/data/map.spec.ts:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { DataStream } from "../../../../src/streams/data-stream";
3 | import { deferReturn } from "../../../_helpers/utils";
4 |
5 | test("DataStream can map chunks via sync callback (to same type)", async (t) => {
6 | const dsNumber = DataStream.from([1, 2, 3, 4, 5]);
7 | const result = await dsNumber.map(chunk => chunk * 2).toArray();
8 |
9 | t.deepEqual(result, [2, 4, 6, 8, 10]);
10 | });
11 |
12 | test("DataStream can map chunks via sync callback (to different type)", async (t) => {
13 | const dsNumber = DataStream.from([1, 2, 3, 4, 5]);
14 | const result = await dsNumber.map(chunk => `foo-${chunk}`).toArray();
15 |
16 | t.deepEqual(result, ["foo-1", "foo-2", "foo-3", "foo-4", "foo-5"]);
17 | });
18 |
19 | test("DataStream can map chunks via async callback (to same type)", async (t) => {
20 | const dsNumber = DataStream.from([1, 2, 3, 4, 5]);
21 | const result = await dsNumber.map(async chunk => deferReturn(5, chunk * 2)).toArray();
22 |
23 | t.deepEqual(result, [2, 4, 6, 8, 10]);
24 | });
25 |
26 | test("DataStream can map chunks via async callback (to different type)", async (t) => {
27 | const dsNumber = DataStream.from([1, 2, 3, 4, 5]);
28 | const result = await dsNumber.map(async chunk => deferReturn(5, `foo-${chunk}`)).toArray();
29 |
30 | t.deepEqual(result, ["foo-1", "foo-2", "foo-3", "foo-4", "foo-5"]);
31 | });
32 |
33 | test("DataStream can apply multiple map transforms", async (t) => {
34 | const dsNumber = DataStream.from([1, 2, 3, 4, 5]);
35 | const result = await dsNumber
36 | .map(chunk => chunk * 2)
37 | .map(async chunk => deferReturn(5, `${chunk}00`))
38 | .map(chunk => parseInt(chunk, 10))
39 | .toArray();
40 |
41 | t.deepEqual(result, [200, 400, 600, 800, 1000]);
42 | });
43 |
44 | test("DataStream map passes variadic args", async (t) => {
45 | const dsNumber = DataStream.from([1, 2, 3, 4, 5]);
46 | const result = await dsNumber
47 | .map((chunk, multiplier) => chunk * multiplier, 3)
48 | .map(async (chunk, postfix) => deferReturn(5, `${chunk}${postfix}`), "00")
49 | .toArray();
50 |
51 | t.deepEqual(result, ["300", "600", "900", "1200", "1500"]);
52 | });
53 |
54 | test("DataStream map passes typed variadic args", async (t) => {
55 | const dsNumber = DataStream.from([1, 2, 3, 4, 5]);
56 | const result = await dsNumber
57 | .map((chunk, multiplier) => chunk * multiplier, 3)
58 | .map(async (chunk, postfix) => deferReturn(5, `${chunk}${postfix}`), "00")
59 | .toArray();
60 |
61 | t.deepEqual(result, ["300", "600", "900", "1200", "1500"]);
62 | });
63 |
64 | test("DataStream can map chunks via sync callback (to same type) 2", async (t) => {
65 | const dsNumber = DataStream.from([1, 2, 3, 4, 5]);
66 | const result = await dsNumber.map(chunk => chunk * 2, undefined).toArray();
67 |
68 | t.deepEqual(result, [2, 4, 6, 8, 10]);
69 | });
70 |
71 | // test("DataStream can map chunks via sync callback (to same type) 3", async (t) => {
72 | // const dsNumber = DataStream.from([1, 2, 3, 4, 5]);
73 | // const result = await dsNumber.map(chunk => chunk * 2, void).toArray();
74 |
75 | // t.deepEqual(result, [2, 4, 6, 8, 10]);
76 | // });
77 |
--------------------------------------------------------------------------------
/test/unit/streams/data/native-interface.spec.ts:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { createReadStream } from "fs";
3 | import { Writable } from "stream";
4 | import { DataStream } from "../../../../src/streams/data-stream";
5 | import { deferReturn, defer } from "../../../_helpers/utils";
6 |
7 | test("DataStream can be iterated with 'for await..of'", async (t) => {
8 | const result = [];
9 | const stream = DataStream
10 | .from(["1", "2", "3", "4", "5", "6"])
11 | .map(parseInt, 10)
12 | .filter(chunk => !!(chunk % 2));
13 |
14 |
15 | for await (const chunk of stream) {
16 | result.push(chunk);
17 | }
18 |
19 | t.deepEqual(result, [1, 3, 5]);
20 | });
21 |
22 | test("DataStream can piped from nodejs Readable stream", async (t) => {
23 | const readable = createReadStream("./build/test/_assets/sample.txt", "utf8");
24 | const stream = new DataStream();
25 |
26 | readable.pipe(stream.asWritable());
27 |
28 | const result = await stream.toArray();
29 |
30 | t.deepEqual(result, ["foo\nbar\nbaz\nbax\n"]);
31 | });
32 |
33 | test("DataStream can piped from nodejs Readable stream (intermediate stream)", async (t) => {
34 | const readable = createReadStream("./build/test/_assets/sample.txt", "utf8");
35 | const stream = new DataStream();
36 | const stream2 = stream.map(chunk => chunk.toUpperCase());
37 |
38 | readable.pipe(stream.asWritable());
39 |
40 | const result = await stream2.toArray();
41 |
42 | t.deepEqual(result, ["FOO\nBAR\nBAZ\nBAX\n"]);
43 | });
44 |
45 |
46 | test("DataStream cannot be piped directly from nodejs Readable stream", async (t) => {
47 | const readable = createReadStream("./build/test/_assets/sample.txt", "utf8");
48 | const stream = new DataStream();
49 |
50 | t.throws(() => readable.pipe(stream as any as Writable));
51 | });
52 |
53 | test("DataStream can piped from nodejs Readable stream (read one byte at a time)", async (t) => {
54 | const readable = createReadStream("./build/test/_assets/sample.txt", { encoding: "utf8", highWaterMark: 1 });
55 | const stream = new DataStream();
56 |
57 | readable.pipe(stream.asWritable());
58 |
59 | const result = await stream.toArray();
60 |
61 | t.deepEqual(result, ["f", "o", "o", "\n", "b", "a", "r", "\n", "b", "a", "z", "\n", "b", "a", "x", "\n"]);
62 | });
63 |
64 | test("DataStream can piped from nodejs Readable stream and keep correct backpressure", async (t) => {
65 | const assertBackpressure = (stream: any): void => {
66 | const state = stream.ifca.state;
67 |
68 | t.true(state.all <= state.maxParallel, `Number of chunks processed ${ state.all } should respect maxParallel of ${ state.maxParallel }`);
69 | };
70 | const readable = createReadStream("./build/test/_assets/sample.txt", { encoding: "utf8", highWaterMark: 2 });
71 | const stream = new DataStream({ maxParallel: 2 })
72 | .map(chunk => { assertBackpressure(stream); return deferReturn(25, chunk); });
73 |
74 | readable.pipe(stream.asWritable());
75 |
76 | const result = await stream.toArray();
77 |
78 | t.deepEqual(result, ["fo", "o\n", "ba", "r\n", "ba", "z\n", "ba", "x\n"]);
79 | });
80 |
81 | test("Piped DataStream can be unpiped via '.unpipe(instance)'", (t) => {
82 | const readable = createReadStream("./build/test/_assets/sample.txt", { encoding: "utf8", highWaterMark: 2 });
83 | const stream = new DataStream({ maxParallel: 2 })
84 | .map(chunk => deferReturn(20, chunk));
85 |
86 | return new Promise(resolve => {
87 | readable.once("readable", async () => {
88 | const writable = readable.pipe(stream.asWritable());
89 |
90 | // This call in comibnation with defer below will make 2 first ready chunks to be read and 2 next
91 | // pending before unpipeing occurs.
92 | const result = stream.toArray();
93 |
94 | // We need quite significant delay to keep this test stable.
95 | await defer(30);
96 |
97 | readable.unpipe(writable);
98 |
99 | // Calling end will simply flush all pending chunks from IFCA queue (so we will have 4 chunks in total).
100 | await stream.end();
101 |
102 | t.deepEqual(await result, ["fo", "o\n", "ba", "r\n"]);
103 |
104 | resolve();
105 | });
106 | });
107 | });
108 |
109 | test("Piped DataStream can be unpiped via '.unpipe(instance)' #2", (t) => {
110 | const readable = createReadStream("./build/test/_assets/sample.txt", { encoding: "utf8", highWaterMark: 2 });
111 | const stream = new DataStream({ maxParallel: 2 })
112 | .map(chunk => deferReturn(10, chunk));
113 |
114 | return new Promise(resolve => {
115 | readable.once("readable", async () => {
116 | const writable = readable.pipe(stream.asWritable());
117 |
118 | await defer(5);
119 |
120 | readable.unpipe(writable);
121 |
122 | // Calling end will simply flush all pending chunks from IFCA queue (so we will have 2 chunks in total).
123 | await stream.end();
124 |
125 | t.deepEqual(await stream.toArray(), ["fo", "o\n"]);
126 |
127 | resolve();
128 | });
129 | });
130 | });
131 |
132 | test("Piped DataStream can be unpiped via '.unpipe()'", (t) => {
133 | const readable = createReadStream("./build/test/_assets/sample.txt", { encoding: "utf8", highWaterMark: 2 });
134 | const stream = new DataStream({ maxParallel: 2 })
135 | .map(chunk => deferReturn(20, chunk));
136 |
137 | return new Promise(resolve => {
138 | readable.once("readable", async () => {
139 | readable.pipe(stream.asWritable());
140 |
141 | // This call in comibnation with defer below will make 2 first ready chunks to be read and 2 next
142 | // pending before unpipeing occurs.
143 | const result = stream.toArray();
144 |
145 | // We need quite significant delay to keep this test stable.
146 | await defer(30);
147 |
148 | readable.unpipe();
149 |
150 | // Calling end will simply flush all pending chunks from IFCA queue (so we will have 4 chunks in total).
151 | await stream.end();
152 |
153 | t.deepEqual(await result, ["fo", "o\n", "ba", "r\n"]);
154 |
155 | resolve();
156 | });
157 | });
158 | });
159 |
160 | test("Piped DataStream can be unpiped via '.unpipe()' #2", (t) => {
161 | const readable = createReadStream("./build/test/_assets/sample.txt", { encoding: "utf8", highWaterMark: 2 });
162 | const stream = new DataStream({ maxParallel: 2 })
163 | .map(chunk => deferReturn(10, chunk));
164 |
165 | return new Promise(resolve => {
166 | readable.once("readable", async () => {
167 | readable.pipe(stream.asWritable());
168 |
169 | await defer(5);
170 |
171 | readable.unpipe();
172 |
173 | // Calling end will simply flush all pending chunks from IFCA queue (so we will have 2 chunks in total).
174 | await stream.end();
175 |
176 | t.deepEqual(await stream.toArray(), ["fo", "o\n"]);
177 |
178 | resolve();
179 | });
180 | });
181 | });
182 |
183 | test("Native pipe with DataStream returns the same instance which was passed as an argument", async (t) => {
184 | const readable = createReadStream("./build/test/_assets/sample.txt", "utf8");
185 | const stream = new DataStream();
186 |
187 | const stream2 = readable.pipe(stream.asWritable());
188 |
189 | t.is(stream, stream2 as any as DataStream);
190 | });
191 |
192 | test("Calling 'DataStream.on()' should throw", async (t) => {
193 | const stream = new DataStream();
194 |
195 | t.throws(() => (stream as any).on("fake"));
196 | });
197 |
198 | test("After pipe is called DataStream has no event-emitter like methods", async (t) => {
199 | const readable = createReadStream("./build/test/_assets/sample.txt", "utf8");
200 | const stream = new DataStream();
201 |
202 | readable.pipe(stream.asWritable());
203 |
204 | t.true((stream as any).once === undefined);
205 | t.true((stream as any).removeListener === undefined);
206 | t.true((stream as any).emit === undefined);
207 | });
208 |
209 | test("DataStream writable proxy emit returns whether there are listeners attached (true)", async (t) => {
210 | const stream = new DataStream();
211 | const streamAsWritable = stream.asWritable();
212 |
213 | streamAsWritable.on("fake", () => {});
214 |
215 | t.true(streamAsWritable.emit("fake"));
216 | });
217 |
218 | test("DataStream writable proxy emit returns whether there are listeners attached (false)", async (t) => {
219 | const stream = new DataStream();
220 | const streamAsWritable = stream.asWritable();
221 |
222 | t.false(streamAsWritable.emit("fake"));
223 | });
224 |
225 | test("DataStream writable proxy emit cna handle unexpected unpipe data", async (t) => {
226 | const stream = new DataStream();
227 | const streamAsWritable = stream.asWritable();
228 |
229 | const streamMock = {
230 | pipe: () => {},
231 | // eslint-disable-next-line @typescript-eslint/no-unused-vars
232 | unpipe: (...args: any[]) => {},
233 | };
234 |
235 | t.notThrows(() => {
236 | streamAsWritable.emit("unpipe", streamMock);
237 | streamMock.unpipe(["fake", "data"]);
238 | });
239 | });
240 |
--------------------------------------------------------------------------------
/test/unit/streams/data/pipe.spec.ts:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import fs from "fs";
3 | import { Writable } from "stream";
4 | import { DataStream } from "../../../../src/streams/data-stream";
5 | import { StringStream } from "../../../../src/streams/string-stream";
6 | import { deferReturn } from "../../../_helpers/utils";
7 |
8 | // Run tests for different sets of "maxParallel" values for each stream.
9 | const maxParallels = [
10 | // Constant
11 | [1, 1, 1, 1, 1],
12 | [2, 2, 2, 2, 2],
13 | [4, 4, 4, 4, 4],
14 | [8, 8, 8, 8, 8],
15 | // Mixed
16 | [2, 1, 5, 2, 9],
17 | [32, 1, 1, 10, 5],
18 | // Increasing
19 | [2, 4, 6, 8, 10],
20 | [1, 4, 16, 32, 64],
21 | // Decreasing
22 | [10, 8, 6, 4, 2],
23 | [64, 32, 16, 4, 1]
24 | ];
25 |
26 | for (const maxParallel of maxParallels) {
27 | test(`DataStream can be piped to another DataStream, ${ maxParallel.slice(0, 2) }`, async (t) => {
28 | const sourceStream = DataStream.from([1, 2, 3, 4, 5, 6, 7], { maxParallel: maxParallel[0] });
29 | const destStream = new DataStream({ maxParallel: maxParallel[1] });
30 |
31 | sourceStream.pipe(destStream);
32 |
33 | t.deepEqual(await destStream.toArray(), [1, 2, 3, 4, 5, 6, 7]);
34 | });
35 |
36 | test(`DataStream with transforms can be piped to another DataStream, ${ maxParallel.slice(0, 2) }`, async (t) => {
37 | const sourceStream = DataStream.from([1, 2, 3, 4, 5], { maxParallel: maxParallel[0] });
38 | const destStream = new DataStream({ maxParallel: maxParallel[1] });
39 |
40 | sourceStream.map((x) => x * 2).pipe(destStream);
41 |
42 | t.deepEqual(await destStream.toArray(), [2, 4, 6, 8, 10]);
43 | });
44 |
45 | test(`DataStream can be piped to another DataStream with transforms, ${ maxParallel.slice(0, 2) }`, async (t) => {
46 | const sourceStream = DataStream.from([1, 2, 3, 4, 5], { maxParallel: maxParallel[0] });
47 | const destStream = sourceStream.pipe(
48 | new DataStream({ maxParallel: maxParallel[1] }).map((x) => x * 2));
49 |
50 | t.deepEqual(await destStream.toArray(), [2, 4, 6, 8, 10]);
51 | });
52 |
53 | test(`DataStream with IFCA breaking transforms can be piped to another DataStream, ${ maxParallel.slice(0, 2) }`, async (t) => {
54 | const sourceStream = DataStream
55 | .from([1, 2, 3, 4, 5], { maxParallel: maxParallel[0] })
56 | .flatMap(x => [x, x + 10, x + 100]);
57 | const destStream = sourceStream.pipe(
58 | new DataStream({ maxParallel: maxParallel[1] }));
59 |
60 | t.deepEqual(await destStream.toArray(), [1, 11, 101, 2, 12, 102, 3, 13, 103, 4, 14, 104, 5, 15, 105]);
61 | });
62 |
63 | test(`DataStream using write can be piped to another DataStream (toArray), ${ maxParallel.slice(0, 2) }`, async (t) => {
64 | const sourceStream = new DataStream({ maxParallel: maxParallel[0] })
65 | .filter(x => x % 2 === 0)
66 | .map(x => x * 2);
67 | const destStream = new DataStream({ maxParallel: maxParallel[1] }).map((x) => `${ x }`);
68 |
69 | sourceStream.pipe(destStream);
70 |
71 | // We need to use sinking method here withput awaiting so it can consume sourceStream
72 | // chunks as they come, otherwise the sourceStream will fill up maxParallel and block.
73 | const result = destStream.toArray();
74 |
75 | for (const i of [1, 2, 3, 4, 5, 6, 7, 8]) {
76 | await sourceStream.write(i);
77 | }
78 |
79 | sourceStream.end();
80 |
81 | t.deepEqual(await result, ["4", "8", "12", "16"]);
82 | });
83 |
84 | test(`DataStream can be piped to multiple streams ${ maxParallel }`, async (t) => {
85 | const stream1 = DataStream
86 | .from([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], { maxParallel: maxParallel[0] })
87 | .map(x => x * 2);
88 | const stream2 = new DataStream({ maxParallel: maxParallel[1] })
89 | .filter(x => x % 4 === 0)
90 | .map(x => `foo-${ x }-`);
91 | const stream3 = new DataStream({ maxParallel: maxParallel[2] })
92 | .map(x => ({ value: x }));
93 | const stream4 = new DataStream({ maxParallel: maxParallel[3] });
94 | const stringStream = new StringStream({ maxParallel: maxParallel[4] });
95 |
96 | stream1.pipe(stream2);
97 | stream1.pipe(stream3);
98 |
99 | stream2.pipe(stream4);
100 | stream2.pipe(stringStream);
101 |
102 | const [result3, result4, resultString] = await Promise.all([
103 | stream3.toArray(), // Result of: stream1 | stream3
104 | stream4.toArray(), // Result of: stream1 | stream2 | stream4
105 | stringStream.split("-").toArray() // Result of: stream1 | stream2 | stringStream
106 | ]);
107 |
108 | t.deepEqual(result3, [{ value: 2 }, { value: 4 }, { value: 6 }, { value: 8 },
109 | { value: 10 }, { value: 12 }, { value: 14 }, { value: 16 }, { value: 18 }, { value: 20 }]);
110 | t.deepEqual(result4, ["foo-4-", "foo-8-", "foo-12-", "foo-16-", "foo-20-"]);
111 | t.deepEqual(resultString, ["foo", "4", "foo", "8", "foo", "12", "foo", "16", "foo", "20", ""]);
112 | });
113 |
114 | test(`DataStream using write can be piped to another DataStream (read) #1, ${ maxParallel.slice(0, 2) }`, async (t) => {
115 | const sourceStream = new DataStream({ maxParallel: maxParallel[0] })
116 | .map(x => x * 2);
117 | const destStream = new DataStream({ maxParallel: maxParallel[1] }).map((x) => `${ x }`);
118 |
119 | sourceStream.pipe(destStream);
120 |
121 | const result = [];
122 |
123 | for (const i of [1, 2, 3, 4, 5, 6, 7, 8]) {
124 | const [, out] = await Promise.all([sourceStream.write(i), destStream.read()]);
125 |
126 | result.push(out);
127 | }
128 |
129 | t.deepEqual(result, ["2", "4", "6", "8", "10", "12", "14", "16"]);
130 | });
131 |
132 | test(`DataStream using write can be piped to another DataStream (read) #2, ${ maxParallel.slice(0, 2) }`, async (t) => {
133 | const sourceStream = new DataStream({ maxParallel: maxParallel[0] })
134 | .filter(x => x % 2 === 0)
135 | .map(x => x * 2);
136 | const destStream = new DataStream({ maxParallel: maxParallel[1] }).map((x) => `${ x }`);
137 |
138 | sourceStream.pipe(destStream);
139 |
140 | const result = [];
141 |
142 | for (const i of [1, 2, 3, 4, 5, 6, 7, 8]) {
143 | await sourceStream.write(i);
144 |
145 | // Since we filter out odd chunks, we need to read just half of initial chunks number.
146 | if (i % 2 === 0) {
147 | result.push(await destStream.read());
148 | }
149 | }
150 |
151 | t.deepEqual(result, ["4", "8", "12", "16"]);
152 | });
153 |
154 | test(`DataStream can pipe to nodejs Writable stream, ${ maxParallel.slice(0, 2) }`, (t) => {
155 | const filePath = `./build/test/_assets/tmp-pipe-${ maxParallel[0] }-${ maxParallel[1] }`;
156 | const writable = fs.createWriteStream(filePath, { highWaterMark: 4 });
157 | const sourceStream = DataStream
158 | .from([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12], { maxParallel: maxParallel[1] })
159 | .map(x => `${ x }\n`);
160 |
161 | return new Promise(resolve => {
162 | writable.once("open", async () => {
163 | sourceStream.pipe(writable);
164 |
165 | const result = await StringStream
166 | .fromFile(filePath, { readStream: { encoding: "utf8" } })
167 | .split("\n")
168 | .toArray();
169 |
170 | t.deepEqual(result, ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", ""]);
171 |
172 | fs.unlinkSync(filePath);
173 |
174 | resolve();
175 | });
176 | });
177 | });
178 | }
179 |
180 | test("DataStream pipe ends destination stream", async (t) => {
181 | const sourceStream = DataStream.from([1, 2, 3, 4, 5, 6, 7]);
182 | const destStream = new DataStream();
183 |
184 | sourceStream.pipe(destStream);
185 |
186 | for (let i = 0; i < 7; i++) {
187 | await destStream.read();
188 | }
189 |
190 | t.throws(() => destStream.write(8), { message: "Write after end" }, "Throws if stream is ended.");
191 | });
192 |
193 | test("DataStream pipe does not end destination stream if end:false passed", async (t) => {
194 | const sourceStream = DataStream.from([1, 2, 3, 4, 5, 6, 7]);
195 | const destStream = new DataStream();
196 |
197 | sourceStream.pipe(destStream, { end: false });
198 |
199 | for (let i = 0; i < 7; i++) {
200 | await destStream.read();
201 | }
202 |
203 | t.notThrows(() => destStream.write(8), "Should not throw if stream is not ended.");
204 | });
205 |
206 | test("Pipe source can be read from", async (t) => {
207 | const sourceStream = DataStream.from([1, 2, 3, 4, 5, 6, 7]);
208 | const destStream = new DataStream();
209 |
210 | sourceStream.pipe(destStream);
211 |
212 | const read = await sourceStream.read();
213 |
214 | // First chunk, which is one will be send to a piped stream,
215 | // so the result of the read will be second chunk.
216 | t.deepEqual(read, 2);
217 | });
218 |
219 | test("Pipe source can be piped from again", async (t) => {
220 | const sourceStream = DataStream.from([1, 2, 3, 4, 5, 6, 7]);
221 | const destStream1 = new DataStream();
222 | const destStream2 = new DataStream();
223 |
224 | sourceStream.pipe(destStream1);
225 |
226 | t.notThrows(() => sourceStream.pipe(destStream2), "Should not throw.");
227 | });
228 |
229 | test("Pipe source cannot be transformed further", async (t) => {
230 | const sourceStream = DataStream.from([1, 2, 3, 4, 5, 6, 7]);
231 | const destStream = new DataStream();
232 |
233 | sourceStream.pipe(destStream);
234 |
235 | t.throws(() => sourceStream.map(x => x * 2), { message: "Stream is not transformable." }, "Should throw.");
236 | });
237 |
238 | test("Pipe keeps correct backpressure (1 destination)", async (t) => {
239 | const assert = (stream: any) => {
240 | t.true(stream.ifca.state.all <= stream.ifca.state.maxParallel,
241 | `Backpressure is not exceeded (${ stream.ifca.state }).`);
242 | };
243 | const stream1 = DataStream.from([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12], { maxParallel: 5 });
244 | const stream2 = new DataStream({ maxParallel: 3 })
245 | .map(x => { assert(stream2); deferReturn(10, x); });
246 |
247 | stream1.pipe(stream2);
248 |
249 | await stream2.toArray();
250 | });
251 |
252 | test("Pipe keeps correct backpressure (2 destinations)", async (t) => {
253 | const state = {
254 | stream1: [0],
255 | stream2: [0],
256 | stream3: [0]
257 | };
258 | const assert = (name: string, stream: any) => {
259 | t.true(stream.ifca.state.all <= stream.ifca.state.maxParallel,
260 | `Backpressure is not exceeded (${ name }, ${ stream.ifca.state }).`);
261 |
262 | if (name === "stream3") {
263 | t.true(state.stream3.length === state.stream2.length, "Stream3 has same number of chunks done or in progress.");
264 | }
265 | };
266 | const stream1 = DataStream
267 | .from([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20], { maxParallel: 7 })
268 | .map(x => { state.stream1.push(x); assert("stream1", stream1); return x; });
269 | const stream2 = new DataStream({ maxParallel: 3 })
270 | .map(x => { state.stream2.push(x); assert("stream2", stream2); return deferReturn(10, x); });
271 | const stream3 = new DataStream({ maxParallel: 5 })
272 | .map(x => { state.stream3.push(x); assert("stream3", stream3); return deferReturn(5, x); });
273 |
274 | stream1.pipe(stream2);
275 | stream1.pipe(stream3);
276 |
277 | await Promise.all([stream2.toArray(), stream3.toArray()]);
278 | });
279 |
280 | test("Piping from not pipeable stream throws", async (t) => {
281 | const stream1 = DataStream.from([1, 2, 3, 4, 5]);
282 | const stream2 = new DataStream();
283 |
284 | (stream1 as any).pipeable = false;
285 |
286 | t.throws(() => {
287 | stream1.pipe(stream2);
288 | });
289 | });
290 |
291 | test("Piping to not writable stream-like object does not throw", async (t) => {
292 | const stream1 = DataStream.from([1, 2, 3, 4, 5]);
293 | const streamLike = {
294 | writable: false,
295 | on: () => {},
296 | end: () => {},
297 | removeListener: () => {}
298 | };
299 |
300 | t.notThrows(() => {
301 | stream1.pipe(streamLike as any as Writable);
302 | });
303 | });
304 |
305 | test("Draining not writable stream-like object after piping does not throw", async (t) => {
306 | const stream1 = DataStream.from([1, 2, 3, 4, 5]);
307 | const streamLike = {
308 | writable: false,
309 | on: (evtName: string, callback: () => {}) => {
310 | if (evtName === "drain") {
311 | callback();
312 | }
313 | },
314 | end: () => {},
315 | removeListener: () => {}
316 | };
317 |
318 | t.notThrows(() => {
319 | stream1.pipe(streamLike as any as Writable);
320 | });
321 | });
322 |
--------------------------------------------------------------------------------
/test/unit/streams/data/read.spec.ts:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { DataStream } from "../../../../src/streams/data-stream";
3 |
4 | for (const maxParallel of [1, 2, 4, 8]) {
5 | test(`Can read from stream which was written to (maxParallel: ${ maxParallel })`, async (t) => {
6 | const stream = new DataStream({ maxParallel });
7 |
8 | stream.write(10);
9 | stream.write(20);
10 | stream.write(30);
11 |
12 | const result = await Promise.all([stream.read(), stream.read(), stream.read()]);
13 |
14 | t.deepEqual(result, [10, 20, 30]);
15 | });
16 |
17 | test(`Can read from ended stream which was written to (maxParallel: ${ maxParallel })`, async (t) => {
18 | const stream = new DataStream({ maxParallel });
19 |
20 | stream.write(10);
21 | stream.write(20);
22 | stream.write(30);
23 |
24 | stream.end();
25 |
26 | const result = await Promise.all([stream.read(), stream.read(), stream.read()]);
27 |
28 | t.deepEqual(result, [10, 20, 30]);
29 | });
30 |
31 | test(`Can read from stream created with from (maxParallel: ${ maxParallel })`, async (t) => {
32 | const stream = DataStream.from([10, 20, 30, 40, 50], { maxParallel });
33 | const result = await Promise.all([stream.read(), stream.read(), stream.read(), stream.read()]);
34 |
35 | t.deepEqual(result, [10, 20, 30, 40]);
36 | });
37 |
38 | test(`Can read from transformed stream (maxParallel: ${ maxParallel })`, async (t) => {
39 | const stream = DataStream.from([10, 11, 20, 21, 22, 30], { maxParallel })
40 | .map(chunk => `foo${ chunk }`)
41 | .batch(chunk => chunk.endsWith("1"))
42 | .map(chunk => chunk.join(""))
43 | .map(chunk => ({ value: chunk }));
44 | const result = await Promise.all([stream.read(), stream.read(), stream.read(), stream.read()]);
45 |
46 | t.deepEqual(result,
47 | [{ value: "foo10foo11" }, { value: "foo20foo21" }, { value: "foo22foo30" }, null]);
48 | });
49 | }
50 |
51 | test("Reading from intermediate streams throws an error (first stream)", async (t) => {
52 | const stream = new DataStream()
53 | .map(chunk => `foo${ chunk }`);
54 |
55 | stream.batch(chunk => chunk.endsWith("1"))
56 | .map(chunk => chunk.join(""))
57 | .map(chunk => ({ value: chunk }));
58 |
59 | t.throws(() => stream.read(), { message: "Stream is not readable." });
60 | });
61 |
62 | test("Reading from intermediate streams throws an error (middle stream)", async (t) => {
63 | const stream = new DataStream()
64 | .map(chunk => `foo${ chunk }`);
65 | const stream2 = stream.batch(chunk => chunk.endsWith("1"))
66 | .map(chunk => chunk.join(""));
67 |
68 | stream2.map(chunk => ({ value: chunk }));
69 |
70 | t.throws(() => stream.read(), { message: "Stream is not readable." });
71 | });
72 |
73 | test("Reading from intermediate streams with for..of loop throws an error (first stream)", async (t) => {
74 | const stream = new DataStream()
75 | .map(chunk => `foo${ chunk }`);
76 |
77 | stream.batch(chunk => chunk.endsWith("1"))
78 | .map(chunk => chunk.join(""))
79 | .map(chunk => ({ value: chunk }));
80 |
81 | let error: Error | undefined;
82 |
83 | try {
84 | for await (const chunk of stream) {
85 | console.log(chunk);
86 | t.fail();
87 | }
88 | } catch (err) {
89 | error = err as Error;
90 | } finally {
91 | t.is(error!.message, "Stream is not readable.");
92 | }
93 | });
94 |
95 | test("Reading from intermediate streams with for..of throws an error (middle stream)", async (t) => {
96 | const stream = new DataStream()
97 | .map(chunk => `foo${ chunk }`);
98 | const stream2 = stream.batch(chunk => chunk.endsWith("1"))
99 | .map(chunk => chunk.join(""));
100 |
101 | stream2.map(chunk => ({ value: chunk }));
102 |
103 | let error: Error | undefined;
104 |
105 | try {
106 | for await (const chunk of stream2) {
107 | console.log(chunk);
108 | t.fail();
109 | }
110 | } catch (err) {
111 | error = err as Error;
112 | } finally {
113 | t.is(error!.message, "Stream is not readable.");
114 | }
115 | });
116 |
--------------------------------------------------------------------------------
/test/unit/streams/data/reduce.spec.ts:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { DataStream } from "../../../../src/streams/data-stream";
3 | import { deferReturn } from "../../../_helpers/utils";
4 |
5 | test("DataStream reduce can be use to calculate sum", async (t) => {
6 | const result = await DataStream
7 | .from([1, 2, 3, 4, 0, 0, 20, 10, 2, 2])
8 | .reduce((a, b) => a + b);
9 |
10 | t.deepEqual(result, 44);
11 | });
12 |
13 | test("DataStream reduce can be use to calculate sum (initial provided)", async (t) => {
14 | const result = await DataStream
15 | .from([1, 2, 3, 4, 0, 0, 20, 10, 2, 2])
16 | .reduce((a, b) => a + b, 0);
17 |
18 | t.deepEqual(result, 44);
19 | });
20 |
21 | test("DataStream reduce can be use to concate numbers to string", async (t) => {
22 | const result = await DataStream
23 | .from([1, 2, 3, 4, 5, 6, 7, 8, 9, 0])
24 | .reduce((a, b) => `${a}${b}`, "");
25 |
26 | t.deepEqual(result, "1234567890");
27 | });
28 |
29 | test("DataStream reduce can be use to calculate sum (async)", async (t) => {
30 | const result = await DataStream
31 | .from([1, 2, 3, 4, 0, 0, 20, 10, 2, 2])
32 | .reduce(async (a, b) => deferReturn(5, a + b));
33 |
34 | t.deepEqual(result, 44);
35 | });
36 |
37 | test("DataStream reduce can be use to calculate sum (initial provided, async)", async (t) => {
38 | const result = await DataStream
39 | .from([1, 2, 3, 4, 0, 0, 20, 10, 2, 2])
40 | .reduce(async (a, b) => deferReturn(5, a + b), 0);
41 |
42 | t.deepEqual(result, 44);
43 | });
44 |
45 | test("DataStream reduce can be use to concate numbers to string (async)", async (t) => {
46 | const result = await DataStream
47 | .from([1, 2, 3, 4, 5, 6, 7, 8, 9, 0])
48 | .reduce(async (a, b) => deferReturn(5, `${a}${b}`), "");
49 |
50 | t.deepEqual(result, "1234567890");
51 | });
52 |
53 | // Tests below inspired by:
54 | // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/Reduce#examples
55 |
56 | test("DataStream can sum values in an object stream", async (t) => {
57 | const result = await DataStream
58 | .from([{ x: 1 }, { x: 2 }, { x: 3 }])
59 | .reduce((a, b) => a + b.x, 0);
60 |
61 | t.deepEqual(result, 6);
62 | });
63 |
64 | test("DataStream can flatten a stream of arrays (explicit inital)", async (t) => {
65 | const initial: number[] = [];
66 | const result = await DataStream
67 | .from([[0, 1], [2, 3], [4, 5]])
68 | .reduce((a, b) => a.concat(b), initial);
69 |
70 | t.deepEqual(result, [0, 1, 2, 3, 4, 5]);
71 | });
72 |
73 | test("DataStream can flatten a stream of arrays (explicit type)", async (t) => {
74 | const result = await DataStream
75 | .from([[0, 1], [2, 3], [4, 5]])
76 | .reduce((a, b) => a.concat(b), []);
77 |
78 | t.deepEqual(result, [0, 1, 2, 3, 4, 5]);
79 | });
80 |
81 | test("DataStream can count instances of values in an object stream", async (t) => {
82 | const initial: any = {};
83 | const result = await DataStream
84 | .from(["Alice", "Bob", "Tiff", "Bruce", "Alice"])
85 | .reduce((allNames, name) => {
86 | if (name in allNames) {
87 | allNames[name]++;
88 | } else {
89 | allNames[name] = 1;
90 | }
91 |
92 | return allNames;
93 | }, initial);
94 |
95 | t.deepEqual(result, { Alice: 2, Bob: 1, Tiff: 1, Bruce: 1 });
96 | });
97 |
--------------------------------------------------------------------------------
/test/unit/streams/data/run.spec.ts:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { DataStream } from "../../../../src/streams/data-stream";
3 |
4 | test("DataStream run consumes entire stream", async (t) => {
5 | const result: number[] = [];
6 |
7 | await DataStream
8 | .from([1, 2, 3, 4, 0, 0, 20, 10, 2, 2])
9 | .map(x => { result.push(x); return x; })
10 | .run();
11 |
12 | t.deepEqual(result, [1, 2, 3, 4, 0, 0, 20, 10, 2, 2]);
13 | });
14 |
--------------------------------------------------------------------------------
/test/unit/streams/data/tofile.spec.ts:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { readFileSync, unlinkSync } from "fs";
3 | import { DataStream } from "../../../../src/streams/data-stream";
4 |
5 | test("Can write stream data to file", async (t) => {
6 | const stream = DataStream.from(["foo", "bar", "baz", "bax"]);
7 |
8 | const filePath = `./build/test/tmp-tofile1-${ Date.now() }.txt`;
9 |
10 | await stream.toFile(filePath);
11 |
12 | const data = readFileSync(filePath, "utf8");
13 |
14 | t.deepEqual(data, "foobarbazbax");
15 |
16 | unlinkSync(filePath);
17 | });
18 |
19 | test("Can write stream data to file (with map)", async (t) => {
20 | const stream = DataStream
21 | .from(["foo", "bar", "baz", "bax"])
22 | .map(chunk => `${ chunk }\n`);
23 |
24 | const filePath = `./build/test/tmp-tofile2-${ Date.now() }.txt`;
25 |
26 | await stream.toFile(filePath);
27 |
28 | const data = readFileSync(filePath, "utf8");
29 |
30 | t.deepEqual(data, "foo\nbar\nbaz\nbax\n");
31 |
32 | unlinkSync(filePath);
33 | });
34 |
--------------------------------------------------------------------------------
/test/unit/streams/data/use.spec.ts:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { DataStream } from "../../../../src/streams/data-stream";
3 | import { StringStream } from "../../../../src/streams/string-stream";
4 |
5 | test("DataStream use correctly uses passed callback (returns new stream instance)", async (t) => {
6 | const data = ["$8", "$25", "$3", "$14", "$20", "$9", "$13", "$16"];
7 | const stream = DataStream.from(data, { maxParallel: 4 });
8 |
9 | const parseSquareEvenDollars = (streamInstance: DataStream) => {
10 | return streamInstance
11 | .map(chunk => parseInt(chunk.replace("$", ""), 10))
12 | .filter(chunk => chunk % 2 === 0)
13 | .map(chunk => chunk ** 2)
14 | .map(chunk => `$${ chunk }`);
15 | };
16 |
17 | const newStream = stream.use(parseSquareEvenDollars);
18 |
19 | t.deepEqual(await newStream.toArray(), ["$64", "$196", "$400", "$256"]);
20 | });
21 |
22 | test("DataStream use correctly uses passed callback (returns array)", async (t) => {
23 | const data = ["$8", "$25", "$3", "$14", "$20", "$9", "$13", "$16"];
24 | const stream = DataStream.from(data, { maxParallel: 4 });
25 |
26 | const result = await stream.use((streamInstance: DataStream) => streamInstance.toArray());
27 |
28 | t.deepEqual(result, ["$8", "$25", "$3", "$14", "$20", "$9", "$13", "$16"]);
29 | });
30 |
31 | test("DataStream use correctly uses passed callback (returns new stream instance, StringStream)", async (t) => {
32 | const data = ["$8", "$25", "$3", "$14", "$20", "$9", "$13", "$16"];
33 | const stream = StringStream.from(data, { maxParallel: 4 });
34 |
35 | const newStream = stream
36 | .use((streamInstance: StringStream) => streamInstance.filter(chunk => chunk.length > 2))
37 | .match(/\$1\d+/g);
38 |
39 | t.deepEqual(await newStream.toArray(), ["$14", "$13", "$16"]);
40 | });
41 |
--------------------------------------------------------------------------------
/test/unit/streams/data/write.spec.ts:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { DataStream } from "../../../../src/streams/data-stream";
3 |
4 | for (const maxParallel of [1, 2, 4, 8]) {
5 | test(`Can write to stream created via constructor (maxParallel: ${ maxParallel })`, async (t) => {
6 | const stream = new DataStream({ maxParallel });
7 |
8 | stream.write(10);
9 | stream.write(20);
10 | stream.write(30);
11 |
12 | stream.end();
13 |
14 | t.deepEqual(await stream.toArray(), [10, 20, 30]);
15 | });
16 |
17 | test(`Can write to stream created with from (maxParallel: ${ maxParallel })`, async (t) => {
18 | const stream = DataStream.from([10, 20, 30], { maxParallel });
19 |
20 | // Since ".from()" is async, written values will be on the beginning of the stream data.
21 | stream.write(40);
22 | stream.write(50);
23 |
24 | t.deepEqual(await stream.toArray(), [40, 50, 10, 20, 30]);
25 | });
26 |
27 | test(`Can write to first stream in a chain (one internal stream, maxParallel: ${ maxParallel })`, async (t) => {
28 | const stream = new DataStream({ maxParallel });
29 | const transformedStream = stream
30 | .map(chunk => `foo${ chunk }`)
31 | .map(chunk => ({ value: chunk }));
32 |
33 | stream.write(10);
34 | stream.write(20);
35 | stream.write(30);
36 |
37 | stream.end();
38 |
39 | t.deepEqual(await transformedStream.toArray(), [{ value: "foo10" }, { value: "foo20" }, { value: "foo30" }]);
40 | });
41 |
42 | test(`Can write to first stream in a chain (multiple internal streams, maxParallel: ${ maxParallel })`, async (t) => {
43 | const stream = new DataStream({ maxParallel });
44 | const transformedStream = stream
45 | .map(chunk => `foo${ chunk }`)
46 | .batch(chunk => chunk.endsWith("1"))
47 | .map(chunk => chunk.join(""))
48 | .map(chunk => ({ value: chunk }));
49 |
50 | stream.write(10);
51 | stream.write(11);
52 | stream.write(20);
53 | stream.write(21);
54 | stream.write(30);
55 |
56 | const result = await Promise.all([transformedStream.read(), transformedStream.read()]);
57 |
58 | t.deepEqual(result, [{ value: "foo10foo11" }, { value: "foo20foo21" }]);
59 | });
60 |
61 | test(`Can write to first stream in a chain (multiple internal streams, toArray, maxParallel: ${ maxParallel })`, async (t) => {
62 | const stream = new DataStream({ maxParallel });
63 | const transformedStream = stream
64 | .map(chunk => `foo${ chunk }`)
65 | .batch(chunk => chunk.endsWith("1"))
66 | .map(chunk => chunk.join(""))
67 | .map(chunk => ({ value: chunk }));
68 |
69 | stream.write(10);
70 | stream.write(11);
71 | stream.write(20);
72 | stream.write(21);
73 | stream.write(30);
74 |
75 | stream.end();
76 |
77 | t.deepEqual(await transformedStream.toArray(), [{ value: "foo10foo11" }, { value: "foo20foo21" }, { value: "foo30" }]);
78 | });
79 |
80 | test(`Can write to first stream in a chain (multiple internal streams, read more than written, maxParallel: ${ maxParallel })`, async (t) => {
81 | const stream = new DataStream({ maxParallel });
82 | const transformedStream = stream
83 | .map(chunk => `foo${ chunk }`)
84 | .batch(chunk => chunk.endsWith("1"))
85 | .map(chunk => chunk.join(""))
86 | .map(chunk => ({ value: chunk }));
87 |
88 | stream.write(10);
89 | stream.write(11);
90 | stream.write(20);
91 | stream.write(21);
92 | stream.write(30);
93 |
94 | // batch will wati for next "...1" or stream end to emit last piece
95 | stream.end();
96 |
97 | const result = await Promise.all([
98 | transformedStream.read(), transformedStream.read(), transformedStream.read(), transformedStream.read()]);
99 |
100 | t.deepEqual(result, [{ value: "foo10foo11" }, { value: "foo20foo21" }, { value: "foo30" }, null]);
101 | });
102 |
103 | test(`Can write to last stream in a chain (one internal stream, maxParallel: ${ maxParallel })`, async (t) => {
104 | const stream = new DataStream({ maxParallel })
105 | .map(chunk => `foo${ chunk }`)
106 | .map(chunk => ({ value: chunk }));
107 |
108 | stream.write(10);
109 | stream.write(20);
110 | stream.write(30);
111 |
112 | stream.end();
113 |
114 | t.deepEqual(await stream.toArray(), [{ value: "foo10" }, { value: "foo20" }, { value: "foo30" }]);
115 | });
116 |
117 | test(`Can write to last stream in a chain (multiple internal streams, read more, maxParallel: ${ maxParallel })`, async (t) => {
118 | const stream = new DataStream({ maxParallel })
119 | .map(chunk => `foo${ chunk }`)
120 | .batch(chunk => chunk.endsWith("1"))
121 | .map(chunk => chunk.join(""))
122 | .map(chunk => ({ value: chunk }));
123 |
124 | stream.write(10);
125 | stream.write(11);
126 | stream.write(20);
127 | stream.write(21);
128 | stream.write(30);
129 |
130 | stream.end();
131 |
132 | t.deepEqual(await stream.toArray(), [{ value: "foo10foo11" }, { value: "foo20foo21" }, { value: "foo30" }]);
133 | });
134 |
135 |
136 | test(`Can write to any stream in a chain (multiple internal streams, maxParallel: ${ maxParallel })`, async (t) => {
137 | const stream = new DataStream({ maxParallel });
138 | const streamMap1 = stream.map(chunk => `foo${ chunk }`);
139 | const streamBatch1 = streamMap1.batch(chunk => chunk.endsWith("1"));
140 | const streamMap2 = streamBatch1.map(chunk => chunk.join(""));
141 | const streamFlatMap = streamMap2.flatMap(chunk => ([{ value: chunk }, { value: "flat" }]));
142 |
143 | // Creation order
144 | stream.write(10);
145 | streamMap1.write(11);
146 | streamBatch1.write(20);
147 | streamMap2.write(21);
148 | streamFlatMap.write(30);
149 |
150 | // Reverse order
151 | streamFlatMap.write(31);
152 | streamMap2.write(32);
153 | streamBatch1.write(33);
154 | streamMap1.write(41);
155 | stream.write(42);
156 |
157 | stream.end();
158 |
159 | t.deepEqual(await streamFlatMap.toArray(), [
160 | { value: "foo10foo11" }, { value: "flat" },
161 | { value: "foo20foo21" }, { value: "flat" },
162 | { value: "foo30foo31" }, { value: "flat" },
163 | { value: "foo32foo33foo41" }, { value: "flat" },
164 | { value: "foo42" }, { value: "flat" },
165 | ]);
166 | });
167 | }
168 |
169 | test("Throws error when writing to closed stream", async (t) => {
170 | const stream = new DataStream();
171 |
172 | await stream.end();
173 |
174 | t.throws(() => stream.write(4), { message: "Write after end" });
175 | });
176 |
177 | test("Throws error when writing to any closed stream", async (t) => {
178 | const stream = new DataStream()
179 | .map(chunk => `foo${ chunk }`);
180 | const stream2 = stream.batch(chunk => chunk.endsWith("1"))
181 | .map(chunk => chunk.join(""))
182 | .map(chunk => ({ value: chunk }));
183 |
184 | await stream.end();
185 |
186 | t.throws(() => stream2.write(4), { message: "Write after end" });
187 | });
188 |
--------------------------------------------------------------------------------
/test/unit/streams/string/creation.spec.ts:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { StringStream } from "../../../../src/streams/string-stream";
3 | import { DataStream } from "../../../../src/streams/data-stream";
4 |
5 | test("StringStream can be constructed", (t) => {
6 | const stringStream = new StringStream();
7 |
8 | t.true(stringStream instanceof StringStream);
9 | });
10 |
11 | test("StringStream can be created via static from method", (t) => {
12 | const stringStream = StringStream.from(["1", "2", "3", "4"]);
13 |
14 | t.true(stringStream instanceof StringStream);
15 | });
16 |
17 | test("StringStream split returns instance of StringStream", (t) => {
18 | const stringStream = StringStream.from(["1", "2", "3", "4"]);
19 | const newStream = stringStream.split("2");
20 |
21 | t.true(newStream instanceof StringStream, `Should be an instance of StringStream, not ${newStream.constructor.name}`);
22 | t.deepEqual(newStream.constructor.name, "StringStream");
23 |
24 | // This checks if "newStream" is a correct type in compile time.
25 | const newStream2 = newStream.split("1");
26 |
27 | t.true(newStream2 instanceof StringStream, `Should be an instance of StringStream, not ${newStream2.constructor.name}`);
28 | t.deepEqual(newStream2.constructor.name, "StringStream");
29 | });
30 |
31 | test("StringStream flatMap returns instance of StringStream", (t) => {
32 | const stringStream = StringStream.from(["1", "2", "3", "4"]);
33 | const newStream = stringStream.flatMap(chunk => [chunk]);
34 |
35 | t.true(newStream instanceof StringStream, `Should be an instance of StringStream, not ${newStream.constructor.name}`);
36 | t.deepEqual(newStream.constructor.name, "StringStream");
37 |
38 | // This checks if "newStream" is a correct type in compile time.
39 | const newStream2 = newStream.split("1");
40 |
41 | t.true(newStream2 instanceof StringStream, `Should be an instance of StringStream, not ${newStream2.constructor.name}`);
42 | t.deepEqual(newStream2.constructor.name, "StringStream");
43 | });
44 |
45 | test("StringStream filter returns instance of StringStream", (t) => {
46 | const stringStream = StringStream.from(["1", "2", "3", "4"]);
47 | const newStream = stringStream.filter(chunk => parseInt(chunk, 10) % 2 === 0);
48 |
49 | t.true(newStream instanceof StringStream, `Should be an instance of StringStream, not ${newStream.constructor.name}`);
50 | t.deepEqual(newStream.constructor.name, "StringStream");
51 |
52 | // This checks if "newStream" is a correct type in compile time.
53 | const newStream2 = newStream.split("1");
54 |
55 | t.true(newStream2 instanceof StringStream, `Should be an instance of StringStream, not ${newStream2.constructor.name}`);
56 | t.deepEqual(newStream2.constructor.name, "StringStream");
57 | });
58 |
59 | test("StringStream map returns instance of StringStream", (t) => {
60 | const stringStream = StringStream.from(["1", "2", "3", "4"]);
61 | const newStream = stringStream.map(chunk => `foo-${chunk}`);
62 |
63 | t.true(newStream instanceof StringStream, `Should be an instance of StringStream, not ${newStream.constructor.name}`);
64 | t.deepEqual(newStream.constructor.name, "StringStream");
65 |
66 | // This checks if "newStream" is a correct type in compile time.
67 | const newStream2 = newStream.split("1");
68 |
69 | t.true(newStream2 instanceof StringStream, `Should be an instance of StringStream, not ${newStream2.constructor.name}`);
70 | t.deepEqual(newStream2.constructor.name, "StringStream");
71 | });
72 |
73 | // Since batch returns "string[]" so it cannot be StringStream.
74 | test("StringStream batch returns instance of DataStream", (t) => {
75 | const stringStream = StringStream.from(["11", "22", "31", "41"]);
76 | const newStream = stringStream.batch(chunk => chunk.endsWith("1"));
77 |
78 | t.true(newStream instanceof DataStream, `Should be an instance of StringStream, not ${newStream.constructor.name}`);
79 | t.deepEqual(newStream.constructor.name, "DataStream");
80 | });
81 |
82 | test("Transforming intermediate streams throws an error (first stream)", async (t) => {
83 | const stream = new StringStream()
84 | .map(chunk => `foo${ chunk }`);
85 |
86 | stream.map(chunk => `foo${ chunk }`);
87 |
88 | t.throws(() => stream.split("foo"), { message: "Stream is not transformable." });
89 | });
90 |
91 | test("Transforming intermediate streams throws an error (middle stream)", async (t) => {
92 | const stream = new StringStream()
93 | .map(chunk => `foo${ chunk }`);
94 | const stream2 = stream.split("f");
95 |
96 | stream2.split("o");
97 |
98 | t.throws(() => stream2.map(chunk => chunk.repeat(2)), { message: "Stream is not transformable." });
99 | });
100 |
--------------------------------------------------------------------------------
/test/unit/streams/string/each.spec.ts:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { StringStream } from "../../../../src/streams/string-stream";
3 |
4 | test("StringStream each iterates over each chunk", async (t) => {
5 | const chunks: string[] = [];
6 | const stringStream = StringStream.from(["foo1bar", "baz111bax", "123", "345", "011", "201"]);
7 | const result = await stringStream.each(chunk => {
8 | chunks.push(chunk);
9 | }).toArray();
10 |
11 | t.deepEqual(result, ["foo1bar", "baz111bax", "123", "345", "011", "201"]);
12 | t.deepEqual(chunks, ["foo1bar", "baz111bax", "123", "345", "011", "201"]);
13 | });
14 |
--------------------------------------------------------------------------------
/test/unit/streams/string/grep.spec.ts:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { StringStream } from "../../../../src/streams/string-stream";
3 |
4 | test("StringStream grep filters out chunks not matching passed pattern", async (t) => {
5 | const stringStream = StringStream.from(["John", "Johnatan", "Johannes", "James", "Josh", "jelly", "joh"]);
6 | const result = await stringStream.grep(/[J|j]ohn?/).toArray();
7 |
8 | t.deepEqual(result, ["John", "Johnatan", "Johannes", "joh"]);
9 | });
10 |
--------------------------------------------------------------------------------
/test/unit/streams/string/match.spec.ts:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { StringStream } from "../../../../src/streams/string-stream";
3 |
4 | test("Match returns only matched parts of each chunk (no regexp group)", async (t) => {
5 | const text = "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et\n" +
6 | "dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea\n" +
7 | "commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla\n" +
8 | "pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est\n" +
9 | "laborum.";
10 | const words4 = await StringStream.from([text])
11 | .match(/\b\w{4}[^\w]/g)
12 | .toArray();
13 |
14 | t.deepEqual(words4, ["amet,", "elit,", "enim ", "quis ", "nisi ", "Duis ", "aute ", "esse ", "sint ", "sunt ", "anim "]);
15 | });
16 |
17 | test("Match returns only matched parts of each chunk (single regexp group)", async (t) => {
18 | const text = "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et\n" +
19 | "dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea\n" +
20 | "commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla\n" +
21 | "pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est\n" +
22 | "laborum.";
23 | const words4 = await StringStream.from([text])
24 | .match(/\b(\w{4})[^\w]/g)
25 | .toArray();
26 |
27 | t.deepEqual(words4, ["amet", "elit", "enim", "quis", "nisi", "Duis", "aute", "esse", "sint", "sunt", "anim"]);
28 | });
29 |
30 | test("Match returns only matched parts of each chunk (multiple regexp groups)", async (t) => {
31 | const text = "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et\n" +
32 | "dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea\n" +
33 | "commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla\n" +
34 | "pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est\n" +
35 | "laborum.";
36 | const words4 = await StringStream.from([text])
37 | .match(/\b(\w{2})(\w{2})[^\w]/g)
38 | .toArray();
39 |
40 | t.deepEqual(words4, [
41 | "am", "et", "el", "it", "en", "im", "qu", "is", "ni", "si", "Du", "is", "au", "te", "es", "se", "si", "nt", "su", "nt", "an", "im"
42 | ]);
43 | });
44 |
--------------------------------------------------------------------------------
/test/unit/streams/string/parse.spec.ts:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { DataStream } from "../../../../src/streams/data-stream";
3 | import { StringStream } from "../../../../src/streams/string-stream";
4 |
5 | test("Parse correctly parses string chunks into DataStream", async (t) => {
6 | const data = [
7 | "AAL\tAmerican Airlines Group Inc\t46.26\t \t0.43\t0.94%", "AAPL\tApple Inc\t110.06\t \t0.11\t0.10%", "ADBE\tAdobe Systems Inc\t105.02\t \t-0.79\t-0.75%", "ADI\tAnalog Devices Inc\t68.47\t \t0.26\t0.38%", "ADP\tAutomatic Data Processing Inc\t94.39\t \t0.01\t0.01%",
8 | "ADSK\tAutodesk Inc\t76.90\t \t-1.56\t-1.99%", "AKAM\tAkamai Technologies Inc\t66.44\t \t-0.16\t-0.24%", "ALXN\tAlexion Pharmaceuticals Inc\t119.85\t \t-3.12\t-2.54%", "AMAT\tApplied Materials Inc\t30.74\t \t0.01\t0.03%", "AMGN\tAmgen Inc\t145.23\t \t-2.13\t-1.45%",
9 | "AMZN\tAmazon.com Inc\t760.16\t \t3.76\t0.50%", "ATVI\tActivision Blizzard Inc\t38.39\t \t-1.55\t-3.88%", "AVGO\tBroadcom Ltd\t168.16\t \t1.12\t0.67%", "BBBY\tBed Bath & Beyond Inc\t44.42\t \t-0.50\t-1.11%", "BIDU\tBaidu Inc\t164.38\t \t-1.83\t-1.10%",
10 | "BIIB\tBiogen Inc\t317.00\t \t-2.30\t-0.72%", "BMRN\tBiomarin Pharmaceutical Inc\t89.00\t \t-1.74\t-1.92%", "CA\tCA Inc\t31.01\t \t-0.47\t-1.49%", "CELG\tCelgene Corp\t121.97\t \t-0.11\t-0.09%", "CERN\tCerner Corp\t49.53\t \t-0.06\t-0.12%",
11 | "CHKP\tCheck Point Software Technologies Ltd\t83.41\t \t-0.39\t-0.47%", "CHTR\tCharter Communications Inc\t262.70\t \t-2.78\t-1.05%", "CMCSA\tComcast Corp\t68.34\t \t-0.15\t-0.22%", "COST\tCostco Wholesale Corp\t150.36\t \t-0.84\t-0.56%",
12 | "CSCO\tCisco Systems Inc\t30.18\t \t0.13\t0.43%", "CSX\tCSX Corp\t34.00\t \t0.04\t0.12%", "CTRP\tCtrip.Com International Ltd\t42.02\t \t-0.29\t-0.69%", "CTSH\tCognizant Technology Solutions Corp\t55.57\t \t-0.81\t-1.44%", "CTXS\tCitrix Systems Inc\t86.82\t \t-1.16\t-1.32%",
13 | "DISCA\tDiscovery Communications Inc\t27.50\t \t-0.53\t-1.89%", "DISCK\tDiscovery Communications Inc\t26.75\t \t-0.34\t-1.26%", "DISH\tDISH Network Corp\t55.85\t \t0.19\t0.34%", "DLTR\tDollar Tree Inc\t81.91\t \t0.26\t0.32%", "EA\tElectronic Arts\t78.99\t \t-0.63\t-0.79%",
14 | "EBAY\teBay Inc\t28.69\t \t-0.18\t-0.62%", "ESRX\tExpress Scripts Holding Co\t75.77\t \t-0.67\t-0.88%", "EXPE\tExpedia Inc\t125.67\t \t-0.91\t-0.72%", "FAST\tFastenal Co\t44.80\t \t-0.16\t-0.36%", "FB\tFacebook\t117.02\t \t-0.77\t-0.65%",
15 | "FISV\tFiserv Inc\t104.29\t \t-0.77\t-0.73%", "FOX\t21st Century Fox Class B\t27.69\t \t-0.12\t-0.43%", "FOXA\t21st Century Fox Class A\t27.82\t \t-0.10\t-0.36%", "GILD\tGilead Sciences Inc\t74.62\t \t-0.96\t-1.27%", "GOOG\tAlphabet Class C\t760.54\t \t-10.69\t-1.39%",
16 | "GOOGL\tAlphabet Class A\t775.97\t \t-10.19\t-1.30%", "HSIC\tHenry Schein Inc\t156.96\t \t-1.93\t-1.21%", "ILMN\tIllumina Inc\t131.87\t \t-2.22\t-1.66%", "INCY\tIncyte Corp\t103.63\t \t-1.92\t-1.82%", "INTC\tIntel Corp\t34.95\t \t-0.07\t-0.20%",
17 | "INTU\tIntuit Inc\t115.98\t \t2.18\t1.92%", "ISRG\tIntuitive Surgical Inc\t654.89\t \t0.29\t0.04%", "JD\tJD.com Inc\t26.45\t \t-0.30\t-1.12%", "KHC\tKraft Heinz Co\t82.53\t \t-0.31\t-0.37%", "LBTYA\tLiberty Global PLC\t32.77\t \t-0.14\t-0.43%",
18 | "LBTYK\tLiberty Global PLC\t31.76\t \t-0.24\t-0.75%", "LLTC\tLinear Technology Corp\t61.00\t \t0.19\t0.31%", "LRCX\tLam Research Corp\t104.71\t \t0.91\t0.88%", "LVNTA\tLiberty Interactive Corp\t39.76\t \t0.04\t0.10%", "MAR\tMarriott International Inc\t77.14\t \t-0.33\t-0.43%",
19 | "MAT\tMattel Inc\t30.52\t \t-0.91\t-2.90%", "MCHP\tMicrochip Technology Inc\t64.57\t \t-0.88\t-1.34%", "MDLZ\tMondelez International Inc\t42.92\t \t-0.07\t-0.16%", "MNST\tMonster Beverage Corp\t41.68\t \t-0.29\t-0.69%", "MSFT\tMicrosoft Corp\t60.35\t \t-0.29\t-0.48%",
20 | "MU\tMicron Technology Inc\t19.21\t \t0.03\t0.16%", "MXIM\tMaxim Integrated Products Inc\t40.09\t \t0.35\t0.88%", "MYL\tMylan NV\t36.47\t \t-1.09\t-2.90%", "NCLH\tNorwegian Cruise Line Holdings Ltd\t39.68\t \t-0.15\t-0.38%", "NFLX\tNetflix Inc\t115.21\t \t0.18\t0.16%",
21 | "NTAP\tNetApp Inc\t37.00\t \t0.10\t0.27%", "NTES\tNetEase Inc\t230.81\t \t-6.02\t-2.54%", "NVDA\tNVIDIA Corp\t93.36\t \t0.97\t1.05%", "NXPI\tNXP Semiconductors NV\t98.88\t \t0.82\t0.84%", "ORLY\tO Reilly Automotive Inc\t265.74\t \t-5.25\t-1.94%",
22 | "PAYX\tPaychex Inc\t55.93\t \t0.01\t0.02%", "PCAR\tPACCAR Inc\t59.78\t \t-0.24\t-0.40%", "PCLN\tThe Priceline Group\t1507.35\t \t-5.55\t-0.37%", "PYPL\tPayPal Holdings Inc\t40.08\t \t0.20\t0.50%", "QCOM\tQualcomm Inc\t67.31\t \t0.64\t0.96%",
23 | "QVCA\tLiberty Interactive Corp\t21.07\t \t-0.11\t-0.52%", "REGN\tRegeneron Pharmaceuticals Inc\t397.48\t \t-7.08\t-1.75%", "ROST\tRoss Stores Inc\t68.00\t \t2.47\t3.77%", "SBAC\tSBA Communications Corp\t100.75\t \t0.12\t0.12%", "SBUX\tStarbucks Corp\t55.77\t \t-0.08\t-0.14%",
24 | "SIRI\tSirius XM Holdings Inc\t4.56\t \t-0.02\t-0.44%", "SRCL\tStericycle Inc\t76.09\t \t0.82\t1.09%", "STX\tSeagate Technology PLC\t39.29\t \t0.06\t0.15%", "SWKS\tSkyworks Solutions Inc\t78.21\t \t0.26\t0.33%", "SYMC\tSymantec Corp\t23.75\t \t-0.08\t-0.34%",
25 | "TMUS\tT-Mobile US Inc\t53.59\t \t0.20\t0.37%", "TRIP\tTripAdvisor Inc\t50.79\t \t-0.18\t-0.35%", "TSCO\tTractor Supply Co\t72.91\t \t-0.25\t-0.34%", "TSLA\tTesla Motors Inc\t185.02\t \t-3.64\t-1.93%", "TXN\tTexas Instruments Inc\t72.60\t \t0.52\t0.72%",
26 | "ULTA\tUlta Salon Cosmetics and Fragrance Inc\t250.09\t \t-1.04\t-0.41%", "VIAB\tViacom Inc\t37.77\t \t-0.84\t-2.18%", "VOD\tVodafone Group PLC\t25.69\t \t-0.36\t-1.38%", "VRSK\tVerisk Analytics Inc\t83.16\t \t0.16\t0.19%", "VRTX\tVertex Pharmaceuticals Inc\t89.44\t \t-1.76\t-1.93%",
27 | "WBA\tWalgreens Boots Alliance Inc\t83.27\t \t-0.72\t-0.86%", "WDC\tWestern Digital Corp\t60.93\t \t2.13\t3.62%", "WFM\tWhole Foods Market Inc\t30.96\t \t0.02\t0.06%", "XLNX\tXilinx Inc\t52.98\t \t-0.26\t-0.49%", "YHOO\tYahoo Inc\t41.19\t \t-0.26\t-0.63%", "XRAY\tDentsply Sirona Inc\t60.16\t \t-0.79\t-1.30%"
28 | ];
29 | const stream = StringStream
30 | .from(data)
31 | .parse(
32 | (ticker) => {
33 | const parts = ticker.split("\t");
34 |
35 | return {
36 | symbol: parts[0],
37 | name: parts[1],
38 | price: +parts[2],
39 | change: +parts[4]
40 | };
41 | }
42 | );
43 |
44 | t.true(stream instanceof DataStream, `Should be an instance of DataStream, not ${stream.constructor.name}`);
45 |
46 | const result = await stream.toArray();
47 |
48 | t.deepEqual(result[0], { symbol: "AAL", name: "American Airlines Group Inc", price: 46.26, change: 0.43 }, "Should parse first record.");
49 | t.deepEqual(result[6], { symbol: "AKAM", name: "Akamai Technologies Inc", price: 66.44, change: -0.16 }, "Should parse 7th line.");
50 | t.deepEqual(result[104], { symbol: "XRAY", name: "Dentsply Sirona Inc", price: 60.16, change: -0.79 }, "Should parse last line.");
51 | });
52 |
--------------------------------------------------------------------------------
/test/unit/streams/string/split.spec.ts:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { StringStream } from "../../../../src/streams/string-stream";
3 |
4 | test("StringStream can split chunks by given split sequence #1", async (t) => {
5 | const stringStream = StringStream.from(["foo1bar", "baz111bax", "123", "345", "011", "201"]);
6 | const result = await stringStream.split("1").toArray();
7 |
8 | t.deepEqual(result, ["foo", "barbaz", "", "", "bax", "233450", "", "20", ""]);
9 | });
10 |
11 | test("StringStream can split chunks by given split sequence #2", async (t) => {
12 | const stringStream = StringStream.from(["foo1bar", "barbaz", "1bar3", "baxbar", "bar", "barbay"]);
13 | const result = await stringStream.split("bar").toArray();
14 |
15 | t.deepEqual(result, ["foo1", "", "baz1", "3bax", "", "", "bay"]);
16 | });
17 |
18 | test("StringStream split ends correctly if there is no split sequence in input data", async (t) => {
19 | const stringStream = StringStream.from(["foo1bar", "baz11bax", "123", "345", "011", "201"]);
20 | const result = await stringStream.split("\n").toArray();
21 |
22 | t.deepEqual(result, ["foo1barbaz11bax123345011201"]);
23 | });
24 |
25 | test("StringStream split works correctly if there are only split sequences in input data", async (t) => {
26 | const stringStream = StringStream.from(["foo", "foofoo", "foo"]);
27 | const result = await stringStream.split("foo").toArray();
28 |
29 | t.deepEqual(result, ["", "", "", "", ""]);
30 | });
31 |
32 | test("StringStream split works correctly if single split sequence is split across multiple chunks", async (t) => {
33 | const stringStream = StringStream.from(["f", "o", "o", "barf", "oobaz123", "f", "ozfo", "ooobax", "12f", "oo"]);
34 | const result = await stringStream.split("foo").toArray();
35 |
36 | t.deepEqual(result, ["", "bar", "baz123foz", "oobax12", ""]);
37 | });
38 |
39 | test("StringStream split works correctly with single chunk split two multiple ones", async (t) => {
40 | const stringStream = StringStream.from(["this-is-single-chunk"]);
41 | const result = await stringStream.split("-").toArray();
42 |
43 | t.deepEqual(result, ["this", "is", "single", "chunk"]);
44 | });
45 |
46 | test("StringStream split can handle empty stream (explicit array typing)", async (t) => {
47 | const input: string[] = []; // explicit typing is needed here sinc TS can't deduce it on empty array
48 | const stringStream = StringStream.from(input);
49 | const result = await stringStream.split("-").toArray();
50 |
51 | t.deepEqual(result, []);
52 | });
53 |
54 | test("StringStream split can handle empty stream (explicit stream typing)", async (t) => {
55 | const stringStream = StringStream.from([]); // without generic types it will result in compile error since TS can't deduce empty array type
56 | const result = await stringStream.split("-").toArray();
57 |
58 | t.deepEqual(result, []);
59 | });
60 |
61 | test("StringStream split can stream with only one split value", async (t) => {
62 | const stringStream = StringStream.from(["-"]);
63 | const result = await stringStream.split("-").toArray();
64 |
65 | t.deepEqual(result, ["", ""]);
66 | });
67 |
68 | test("StringStream can split chunks by given regexp #1", async (t) => {
69 | const stringStream = StringStream.from(["foo1bar", "baz111bax", "123", "345", "011", "201"]);
70 | const result = await stringStream.split(/1+/).toArray();
71 |
72 | t.deepEqual(result, ["foo", "barbaz", "bax", "233450", "20", ""]);
73 | });
74 |
75 | test("StringStream can split chunks by given regexp #2", async (t) => {
76 | const stringStream = StringStream.from(["foo1bar", "baz111bax", "123", "345", "011", "201"]);
77 | const result = await stringStream.split(/1{1,2}/).toArray();
78 |
79 | t.deepEqual(result, ["foo", "barbaz", "", "bax", "233450", "20", ""]);
80 | });
81 |
--------------------------------------------------------------------------------
/test/unit/utils.spec.ts:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { isAsyncFunction, trace } from "../../src/utils";
3 | import { TransformFunction } from "../../src/types";
4 |
5 | test("isAsyncFunction correctly detects sync function (function declaration)", t => {
6 | function sync() {
7 | return 1;
8 | }
9 |
10 | t.false(isAsyncFunction(sync));
11 | });
12 |
13 | test("isAsyncFunction correctly detects sync function (const lambda)", t => {
14 | const sync = () => {
15 | return 1;
16 | };
17 |
18 | t.false(isAsyncFunction(sync));
19 | });
20 |
21 | test("isAsyncFunction correctly detects async function (function declaration)", t => {
22 | async function async() {
23 | return Promise.resolve(1);
24 | }
25 |
26 | t.true(isAsyncFunction(async));
27 | });
28 |
29 | test("isAsyncFunction correctly detects async function (const lambda)", t => {
30 | const async = async () => {
31 | return Promise.resolve(1);
32 | };
33 |
34 | t.true(isAsyncFunction(async));
35 | });
36 |
37 | test("isAsyncFunction correctly detects async function (TransformFunction)", t => {
38 | const apt: TransformFunction<{a: number}, {[k: string]: number}> = async ({ a }: { a: number }) => {
39 | return { a, n: a % 2, x: 1 };
40 | };
41 |
42 | t.true(isAsyncFunction(apt));
43 | });
44 |
45 | // This cannot really be done without calling the function to check it's result.
46 | // We don't need this ATM but will be good to keep in mind, such function is a valid JS function too.
47 | test.skip("isAsyncFunction correctly detects sync function returning promise", t => {
48 | function syncPromise() {
49 | return Promise.resolve(1);
50 | }
51 |
52 | t.true(isAsyncFunction(syncPromise));
53 | });
54 |
55 | test("trace does not throw", t => {
56 | t.notThrows(() => {
57 | trace("foo");
58 | });
59 | });
60 |
--------------------------------------------------------------------------------
/tsconfig.build.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "./tsconfig.json",
3 | "compilerOptions": {
4 | "sourceMap": false,
5 | "removeComments": false,
6 | "outDir": "build"
7 | },
8 | "watchOptions": {
9 | "watchFile": "useFsEvents",
10 | "watchDirectory": "useFsEvents",
11 | "excludeDirectories": ["node_modules", "build", "dist", "coverage", ".nyc_output", "scripts"],
12 | },
13 | "include": [
14 | "src",
15 | "test",
16 | ]
17 | }
18 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "module": "commonjs",
4 | "moduleResolution": "node",
5 | "declaration": true,
6 | "noImplicitAny": true,
7 | "noImplicitThis": true,
8 | "noUnusedLocals": true,
9 | "removeComments": true,
10 | "noLib": false,
11 | "emitDecoratorMetadata": true,
12 | "experimentalDecorators": true,
13 | "sourceMap": true,
14 | "allowJs": true,
15 | "target": "es2017",
16 | "lib": ["es2020", "dom"],
17 | "strict": true,
18 | "resolveJsonModule": true,
19 | "forceConsistentCasingInFileNames": true,
20 | "esModuleInterop": true,
21 | "incremental": false,
22 | "outDir": "dist"
23 | },
24 | "typeAcquisition": {
25 | "include": ["node"]
26 | },
27 | "include": [
28 | "src"
29 | ]
30 | }
31 |
--------------------------------------------------------------------------------