├── .eslintrc.js ├── .github └── workflows │ └── main.yml ├── .gitignore ├── .prettierignore ├── .prettierrc.js ├── CHANGELOG.md ├── CHANGES.md ├── LICENSE ├── Makefile ├── README.md ├── api.md ├── lib └── index.js ├── package.json ├── test ├── .eslintrc ├── etc │ ├── corrupt.json │ └── small.json └── index.js └── tools └── githooks └── pre-push /.eslintrc.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = { 4 | // eslint: recommended automatically enables most/all rules from the 5 | // possible errors section and more: 6 | // http://eslint.org/docs/rules/#possible-errors 7 | extends: ['plugin:prettier/recommended'], 8 | env: { 9 | browser: false, 10 | node: true, 11 | es6: true, 12 | mocha: true 13 | }, 14 | plugins: ['prettier'], 15 | rules: { 16 | 'prettier/prettier': 'error', 17 | 18 | // possible errors 19 | 'no-cond-assign': ['error'], 20 | 'no-constant-condition': ['error'], 21 | 'no-control-regex': ['error'], 22 | 'no-debugger': ['error'], 23 | 'no-dupe-args': ['error'], 24 | 'no-dupe-keys': ['error'], 25 | 'no-duplicate-case': ['error'], 26 | 'no-empty': ['error'], 27 | 'no-empty-character-class': ['error'], 28 | 'no-ex-assign': ['error'], 29 | 'no-extra-boolean-cast': ['error'], 30 | 'no-extra-semi': ['error'], 31 | 'no-func-assign': ['error'], 32 | // this is for variable hoisting, not necessary if we use block scoped declarations 33 | // "no-inner-declarations": ["error", "both" ], 34 | 'no-invalid-regexp': ['error'], 35 | 'no-irregular-whitespace': ['error'], 36 | 'no-reserved-keys': ['off'], 37 | 'no-regex-spaces': ['error'], 38 | 'no-sparse-arrays': ['error'], 39 | 'no-unreachable': ['error'], 40 | 'no-unsafe-negation': ['error'], 41 | 'use-isnan': ['error'], 42 | 'valid-jsdoc': [ 43 | 'error', 44 | { 45 | requireReturnDescription: false 46 | } 47 | ], 48 | 'valid-typeof': ['error'], 49 | 50 | // best practices 51 | 'array-callback-return': ['error'], 52 | 'block-scoped-var': ['error'], 53 | 'class-methods-use-this': ['error'], 54 | complexity: ['warn'], 55 | 'consistent-return': ['error'], 56 | curly: ['error'], 57 | 'default-case': ['error'], 58 | 'dot-notation': ['error', { allowKeywords: true }], 59 | eqeqeq: ['error'], 60 | 'guard-for-in': ['error'], 61 | 'no-alert': ['error'], 62 | 'no-caller': ['error'], 63 | 'no-case-declarations': ['error'], 64 | 'no-div-regex': ['error'], 65 | 'no-empty-function': ['error'], 66 | 'no-empty-pattern': ['error'], 67 | 'no-eq-null': ['error'], 68 | 'no-eval': ['error'], 69 | 'no-extend-native': ['error'], 70 | 'no-extra-bind': ['error'], 71 | 'no-extra-label': ['error'], 72 | 'no-fallthrough': ['error'], 73 | 'no-floating-decimal': ['error'], 74 | 'no-global-assign': ['error'], 75 | 'no-implicit-coercion': ['error'], 76 | 'no-implied-eval': ['error'], 77 | 'no-iterator': ['error'], 78 | 'no-labels': ['error'], 79 | 'no-lone-blocks': ['error'], 80 | 'no-loop-func': ['error'], 81 | 'no-magic-numbers': ['off'], 82 | 'no-multi-spaces': ['off'], 83 | 'no-new': ['error'], 84 | 'no-new-func': ['error'], 85 | 'no-new-wrappers': ['error'], 86 | 'no-octal': ['error'], 87 | 'no-octal-escape': ['error'], 88 | 'no-param-reassign': ['error'], 89 | 'no-proto': ['error'], 90 | 'no-redeclare': ['error'], 91 | 'no-return-assign': ['error'], 92 | 'no-script-url': ['error'], 93 | 'no-self-assign': ['error'], 94 | 'no-self-compare': ['error'], 95 | 'no-sequences': ['error'], 96 | 'no-throw-literal': ['error'], 97 | 'no-unmodified-loop-condition': ['error'], 98 | 'no-unused-expressions': ['error'], 99 | 'no-unused-labels': ['error'], 100 | 'no-useless-call': ['error'], 101 | 'no-useless-concat': ['error'], 102 | 'no-var': ['error'], 103 | 'no-void': ['error'], 104 | 'no-warning-comments': ['warn'], 105 | 'no-with': ['error'], 106 | 'prefer-const': ['error'], 107 | 'wrap-iife': ['error'], 108 | yoda: ['error', 'never'], 109 | 110 | // strict mode 111 | strict: ['error', 'global'], 112 | 113 | // variables 114 | 'no-catch-shadow': ['error'], 115 | 'no-delete-var': ['error'], 116 | 'no-shadow': ['error'], 117 | 'no-shadow-restricted-names': ['error'], 118 | 'no-undef': ['error'], 119 | 'no-undef-init': ['error'], 120 | 'no-unused-vars': ['error', { vars: 'all', args: 'none' }], 121 | 'no-use-before-define': ['error', 'nofunc'], 122 | 123 | // node.js 124 | 'callback-return': [ 125 | 'error', 126 | ['callback', 'cb', 'cb1', 'cb2', 'cb3', 'next', 'innerCb', 'done'] 127 | ], 128 | 'global-require': ['error'], 129 | 'handle-callback-err': ['error', '^.*(e|E)rr'], 130 | 'no-mixed-requires': ['error'], 131 | 'no-new-require': ['error'], 132 | 'no-path-concat': ['error'], 133 | 'no-process-exit': ['error'] 134 | } 135 | }; 136 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | on: 3 | - push 4 | - pull_request 5 | permissions: 6 | contents: read 7 | jobs: 8 | test: 9 | name: Use Node.js with ${{ matrix.node-version }} 10 | 11 | runs-on: ${{ matrix.os }} 12 | strategy: 13 | fail-fast: false 14 | matrix: 15 | os: [ubuntu-latest] 16 | node-version: [lts/-2, lts/-1, lts/*] 17 | steps: 18 | - uses: actions/checkout@v3 19 | - uses: actions/setup-node@v3 20 | with: 21 | node-version: ${{ matrix.node-version }} 22 | - run: make clean 23 | - run: make coverage 24 | - run: make report-coverage 25 | 26 | - name: Coveralls 27 | uses: coverallsapp/github-action@master 28 | with: 29 | github-token: ${{ secrets.GITHUB_TOKEN }} 30 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # OS generated files # 2 | ###################### 3 | .DS_Store 4 | .DS_Store? 5 | ._* 6 | .Spotlight-V100 7 | .Trashes 8 | ehthumbs.db 9 | Thumbs.db 10 | 11 | /node_modules/ 12 | npm-debug.log 13 | yarn.lock 14 | package-lock.json 15 | 16 | # VIM viles # 17 | ############# 18 | [._]*.s[a-w][a-z] 19 | [._]s[a-w][a-z] 20 | *.un~ 21 | Session.vim 22 | .netrwhist 23 | *~ 24 | 25 | # Unit Test Coverage # 26 | ###################### 27 | coverage/ 28 | .nyc_output/ 29 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | .githooks 2 | -------------------------------------------------------------------------------- /.prettierrc.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = { 4 | singleQuote: true, 5 | printWidth: 80, 6 | tabWidth: 4, 7 | semi: true, 8 | arrowParens: 'always' 9 | }; 10 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | 2 | ## 3.2.0 (2022-10-10) 3 | 4 | 5 | #### Features 6 | 7 | * support auto detection of array as the primary root object (#27) ([98f306dc](git@github.com:DonutEspresso/big-json.git/commit/98f306dc)) 8 | * support parsing from buffer (#26) ([f33d60a6](git@github.com:DonutEspresso/big-json.git/commit/f33d60a6)) 9 | 10 | 11 | 12 | ## 3.1.0 (2019-12-14) 13 | 14 | 15 | #### Features 16 | 17 | * **promises:** async methods now support cb and promises (#17) ([119af301](git@github.com:DonutEspresso/big-json.git/commit/119af301)) 18 | 19 | 20 | 21 | ## 3.0.0 (2019-08-15) 22 | 23 | 24 | #### Breaking Changes 25 | 26 | * drop support for node 6 27 | ([0f14d027](git@github.com:DonutEspresso/big-json.git/commit/0f14d027)) 28 | 29 | -------------------------------------------------------------------------------- /CHANGES.md: -------------------------------------------------------------------------------- 1 | ## 2.0.2 2 | 3 | #### Fix 4 | 5 | * json-stream-stringify@1.6.1 to support noDecycle (#10) ([7971ad1](https://github.com/DonutEspresso/big-json/commit/7971ad1e02d64e549a030eb1b6de84bd4e8c1232)) 6 | 7 | ## 2.0.1 8 | 9 | #### Chore 10 | 11 | * update dev deps. move to nyc (#8) ([0fb40b1](https://github.com/DonutEspresso/big-json/commit/0fb40b10e52d10c8fde671a4eafe88119414a329)) 12 | 13 | #### Upgrade 14 | 15 | * swap mississippi for through2 (#9) ([6b95783](https://github.com/DonutEspresso/big-json/commit/6b9578329e285a30538a9c75b90a3e3e6c2e2dd4)) 16 | 17 | ## 2.0.0 18 | 19 | #### Breaking 20 | 21 | * move to JSONStream. remove multibyte option (#6) ([43344be](https://github.com/DonutEspresso/big-json/commit/43344be1fd0f0adf6864b2e1faeba09ac3ede82d)) 22 | 23 | #### Upgrade 24 | 25 | * rev dev dependencies (#5) ([58aaa92](https://github.com/DonutEspresso/big-json/commit/58aaa927b36c7f9577171293e16befdd18631e1f)) 26 | 27 | ## 1.2.0 28 | 29 | #### Chore 30 | 31 | * minor changelog script fix ([a17a035](https://github.com/DonutEspresso/big-json/commit/a17a035e62d6d449a7ed69f59758dfa7425063d5)) 32 | * update changelog script ([73980ad](https://github.com/DonutEspresso/big-json/commit/73980adb898a8de4fc5a9d4ba5e5aff84420b3c7)) 33 | 34 | #### New 35 | 36 | * add callback style APIs (#4) ([a12598f](https://github.com/DonutEspresso/big-json/commit/a12598fbfc70307427c9b34e3e05ccce59553919)) 37 | 38 | ## 1.1.0 39 | 40 | #### Chore 41 | 42 | * fix changelog bug with versioning ([aa23d6b](https://github.com/DonutEspresso/big-json/commit/aa23d6b0cc70c4032625bac0bc6b0880cf603f97)) 43 | 44 | #### New 45 | 46 | * add support for multibyte chars (#2) ([ec4bb36](https://github.com/DonutEspresso/big-json/commit/ec4bb3661ecde84c2d214e4b8d24a4f9725d8722)) 47 | 48 | ## 1.0.0 49 | 50 | #### Chore 51 | 52 | * more changelog fixes ([d76911b](https://github.com/DonutEspresso/big-json/commit/d76911b0a76dc9fd09c0f89427781f96b53fcf7f)) 53 | * more fixes to changelog script ([3ff79fa](https://github.com/DonutEspresso/big-json/commit/3ff79fae51892eb71bfddb26e907386cc5d5ce18)) 54 | * update changelog script, add CHANGES.md ([d9a9ac1](https://github.com/DonutEspresso/big-json/commit/d9a9ac158b45340d34f6e9acb79f715a39f65801)) 55 | * update license in README ([2308c03](https://github.com/DonutEspresso/big-json/commit/2308c033cf46315a3539ed1a2054ab05fd131714)) 56 | * add gitignore file ([5b30220](https://github.com/DonutEspresso/big-json/commit/5b302207c69758eb8104ca9f5dc0718abd118930)) 57 | 58 | #### New 59 | 60 | * first release (#1) ([d0997ee](https://github.com/DonutEspresso/big-json/commit/d0997ee20ee11a27e7813269cef15610e5eb9e74)) 61 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2017 Alex Liu 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | # 2 | # Directories 3 | # 4 | ROOT_SLASH := $(dir $(realpath $(firstword $(MAKEFILE_LIST)))) 5 | ROOT := $(patsubst %/,%,$(ROOT_SLASH)) 6 | LIB := $(ROOT)/lib 7 | TEST := $(ROOT)/test 8 | TOOLS := $(ROOT)/tools 9 | GITHOOKS_SRC := $(TOOLS)/githooks 10 | GITHOOKS_DEST := $(ROOT)/.git/hooks 11 | 12 | 13 | # 14 | # Generated Files & Directories 15 | # 16 | NODE_MODULES := $(ROOT)/node_modules 17 | NODE_BIN := $(NODE_MODULES)/.bin 18 | COVERAGE := $(ROOT)/.nyc_output 19 | COVERAGE_RES := $(ROOT)/coverage 20 | PACKAGE_LOCK := $(ROOT)/package-lock.json 21 | 22 | 23 | # 24 | # Tools and binaries 25 | # 26 | DOCUMENT := $(NODE_BIN)/documentation 27 | NPM := npm 28 | ESLINT := $(NODE_BIN)/eslint 29 | MOCHA := $(NODE_BIN)/mocha 30 | NYC := $(NODE_BIN)/nyc 31 | PRETTIER := $(NODE_BIN)/prettier 32 | UNLEASH := $(NODE_BIN)/unleash 33 | CONVENTIONAL_RECOMMENDED_BUMP := $(NODE_BIN)/conventional-recommended-bump 34 | 35 | 36 | # 37 | # Files and globs 38 | # 39 | PACKAGE_JSON := $(ROOT)/package.json 40 | API_MD := $(ROOT)/api.md 41 | GITHOOKS := $(wildcard $(GITHOOKS_SRC)/*) 42 | LCOV := $(COVERAGE)/lcov.info 43 | ALL_FILES := $(shell find $(ROOT) \ 44 | -not \( -path $(NODE_MODULES) -prune \) \ 45 | -not \( -path $(COVERAGE) -prune \) \ 46 | -not \( -path $(COVERAGE_RES) -prune \) \ 47 | -name '*.js' -type f) 48 | TEST_FILES := $(shell find $(TEST) -name '*.js' -type f) 49 | 50 | # 51 | # Targets 52 | # 53 | 54 | $(NODE_MODULES): $(PACKAGE_JSON) ## Install node_modules 55 | @$(NPM) install 56 | @touch $(NODE_MODULES) 57 | 58 | 59 | .PHONY: docs 60 | docs: $(DOCUMENT) $(ALL_FILES) 61 | @$(DOCUMENT) build $(LIB) -f md -o $(API_MD) 62 | 63 | 64 | .PHONY: help 65 | help: 66 | @perl -nle'print $& if m{^[a-zA-Z_-]+:.*?## .*$$}' $(MAKEFILE_LIST) \ 67 | | sort | awk 'BEGIN {FS = ":.*?## "}; \ 68 | {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' 69 | 70 | 71 | .PHONY: githooks 72 | githooks: $(GITHOOKS) ## Symlink githooks 73 | @$(foreach hook,\ 74 | $(GITHOOKS),\ 75 | ln -sf $(hook) $(GITHOOKS_DEST)/$(hook##*/);\ 76 | ) 77 | 78 | 79 | .PHONY: release-dry 80 | release-dry: $(NODE_MODULES) ## Dry run of `release` target 81 | @$(UNLEASH) -d --type=$(shell $(CONVENTIONAL_RECOMMENDED_BUMP) -p angular) 82 | 83 | 84 | .PHONY: release 85 | release: $(NODE_MODULES) ## Versions, tags, and updates changelog based on commit messages 86 | @$(UNLEASH) --type=$(shell $(CONVENTIONAL_RECOMMENDED_BUMP) -p angular) --no-publish 87 | @$(NPM) publish 88 | 89 | 90 | .PHONY: lint 91 | lint: $(NODE_MODULES) $(ESLINT) $(ALL_FILES) ## Run lint checker (eslint). 92 | @$(ESLINT) $(ALL_FILES) 93 | 94 | 95 | .PHONY: lint-fix 96 | lint-fix: $(NODE_MODULES) $(PRETTIER) $(ALL_FILES) ## Reprint code (prettier, eslint). 97 | @$(PRETTIER) --write $(ALL_FILES) 98 | @$(ESLINT) --fix $(ALL_FILES) 99 | 100 | 101 | .PHONY: prepush 102 | prepush: $(NODE_MODULES) lint coverage docs ## Git pre-push hook task. Run before committing and pushing. 103 | 104 | 105 | .PHONY: test 106 | test: $(NODE_MODULES) $(MOCHA) ## Run unit tests. 107 | @$(MOCHA) -R spec --full-trace --no-timeouts $(TEST_FILES) 108 | 109 | 110 | .PHONY: coverage 111 | coverage: $(NODE_MODULES) $(NYC) ## Run unit tests with coverage reporting. Generates reports into /coverage. 112 | @$(NYC) --reporter=html --reporter=text make test 113 | 114 | 115 | .PHONY: report-coverage 116 | report-coverage: $(NODE_MODULES) $(NYC) ## Report unit test coverage to coveralls. used only in CI. 117 | @$(NYC) report --reporter=lcov 118 | 119 | 120 | .PHONY: clean 121 | clean: ## Cleans unit test coverage files and node_modules. 122 | @rm -rf $(NODE_MODULES) $(COVERAGE) $(COVERAGE_RES) $(PACKAGE_LOCK) 123 | 124 | 125 | # 126 | ## Debug -- print out a a variable via `make print-FOO` 127 | # 128 | print-% : ; @echo $* = $($*) 129 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # big-json 2 | 3 | [![NPM Version](https://img.shields.io/npm/v/big-json.svg)](https://npmjs.org/package/big-json) 4 | [![CI](https://github.com/DonutEspresso/big-json/actions/workflows/main.yml/badge.svg)](https://github.com/DonutEspresso/big-json/actions/workflows/main.yml) 5 | [![Coverage Status](https://coveralls.io/repos/github/DonutEspresso/big-json/badge.svg?branch=master)](https://coveralls.io/github/DonutEspresso/big-json?branch=master) 6 | 7 | > A stream based implementation of JSON.parse and JSON.stringify for big POJOs 8 | 9 | There exist many stream based implementations of JSON parsing or stringifying 10 | for large data sets. These implementations typical target time series data, new 11 | line delimited data or other array-like data, e.g., logging records or other 12 | continuous flowing data. 13 | 14 | This module hopes to fill a gap in the ecosystem: parsing large JSON objects 15 | that are just _really_ big objects. With large in-memory objects, it is 16 | possible to run up against the V8 string length limitation, which is currently 17 | (as of 9/2017) limited to 512MB. Thus, if your large object has enough keys 18 | or values, it is possible to exceed the string length limit when calling 19 | [JSON.stringify](https://github.com/nodejs/node/issues/10738). 20 | 21 | Similarly, when retrieving stored JSON from disk or over the network, if the 22 | JSON stringified representation of the object exceeds the string length limit, 23 | the process will throw when attempting to convert the Buffer into a string. 24 | 25 | The only way to work with such large objects is to use a streaming 26 | implementation of both `JSON.parse` and `JSON.stringify`. This module does just 27 | that by normalizing the APIs for different modules that have previously 28 | published, combining both parse and stringify functions into a single module. 29 | These underlying modules are subject to change at anytime. 30 | 31 | The major caveat is that the reconstructed POJO must be able to fit in memory. 32 | If the reconstructed POJO cannot be stored in memory, then it may be time to 33 | reconsider the way these large objects are being transported and processed. 34 | 35 | This module currently uses 36 | [JSONStream](https://github.com/dominictarr/JSONStream) for parsing, and 37 | [json-stream-stringify](https://github.com/Faleij/json-stream-stringify) for 38 | stringification. 39 | 40 | ## Getting Started 41 | 42 | Install the module with: `npm install big-json` 43 | 44 | ## Usage 45 | 46 | To parse a big JSON coming from an external source: 47 | 48 | ```js 49 | const fs = require('fs'); 50 | const path = require('path'); 51 | const json = require('big-json'); 52 | 53 | const readStream = fs.createReadStream('big.json'); 54 | const parseStream = json.createParseStream(); 55 | 56 | parseStream.on('data', function(pojo) { 57 | // => receive reconstructed POJO 58 | }); 59 | 60 | readStream.pipe(parseStream); 61 | ``` 62 | 63 | To stringify JSON: 64 | ```js 65 | const json = require('big-json'); 66 | 67 | const stringifyStream = json.createStringifyStream({ 68 | body: BIG_POJO 69 | }); 70 | 71 | stringifyStream.on('data', function(strChunk) { 72 | // => BIG_POJO will be sent out in JSON chunks as the object is traversed 73 | }); 74 | ``` 75 | 76 | 77 | ## API 78 | 79 | ### createParseStream() 80 | Parses an incoming stream and accumulates it into a POJO. Supports both objects 81 | and arrays as root objects for stream data. 82 | 83 | __Returns__: {Stream} a JSON.parse stream 84 | 85 | ### createStringifyStream(opts) 86 | 87 | * `opts` {Object} an options object 88 | * `opts.body` {Object | Array} an object or array to JSON.stringify 89 | 90 | __Returns__: {Stream} a JSON.stringify stream 91 | 92 | ### parse(opts, [callback]) 93 | An async JSON.parse using the same underlying stream implementation. If a 94 | callback is not passed, a promise is returned. 95 | 96 | * `opts` {Object} an options object 97 | * `opts.body` {String | Buffer} the string or buffer to be parsed 98 | * `callback` {Function} a callback object 99 | 100 | __Returns__: {Object | Array} the parsed JSON 101 | 102 | ### stringify(opts, [callback]) 103 | An async JSON.stringify using the same underlying stream implementation. If a 104 | callback is not passed, a promise is returned. 105 | 106 | * `opts` {Object} an options object 107 | * `opts.body` {Object} the object to be stringified 108 | * `callback` {Function} a callback object 109 | 110 | __Returns__: {Object} the stringified object 111 | 112 | ## Contributing 113 | 114 | Ensure that all linting and codestyle tasks are passing. Add unit tests for any 115 | new or changed functionality. 116 | 117 | To start contributing, install the git prepush hooks: 118 | 119 | ```sh 120 | make githooks 121 | ``` 122 | 123 | Before committing, lint and test your code using the included Makefile: 124 | ```sh 125 | make prepush 126 | ``` 127 | 128 | ## License 129 | 130 | Copyright (c) 2019 Alex Liu 131 | 132 | Licensed under the MIT license. 133 | -------------------------------------------------------------------------------- /api.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | ### Table of Contents 4 | 5 | - [createParseStream][1] 6 | - [createStringifyStream][2] 7 | - [Parameters][3] 8 | - [\_parse][4] 9 | - [Parameters][5] 10 | - [parse][6] 11 | - [Parameters][7] 12 | - [stringify][8] 13 | - [Parameters][9] 14 | 15 | ## createParseStream 16 | 17 | Create a JSON.parse that uses a stream interface. The underlying 18 | implementation is handled by JSONStream. This is merely a thin wrapper for 19 | convenience that handles the reconstruction/accumulation of each 20 | individually parsed field. 21 | 22 | The advantage of this approach is that by also using a streams interface, 23 | any JSON parsing or stringification of large objects won't block the CPU. 24 | 25 | Returns **[Stream][10]** 26 | 27 | ## createStringifyStream 28 | 29 | create a JSON.stringify readable stream. 30 | 31 | ### Parameters 32 | 33 | - `opts` **[Object][11]** an options object 34 | - `opts.body` **[Object][11]** the JS object to JSON.stringify 35 | 36 | Returns **[Stream][10]** 37 | 38 | ## \_parse 39 | 40 | stream based JSON.parse. async function signature to abstract over streams. 41 | 42 | ### Parameters 43 | 44 | - `opts` **[Object][11]** options to pass to parse stream 45 | - `opts.body` **([String][12] \| [Buffer][13])** string or buffer to parse 46 | - `callback` **[Function][14]** a callback function 47 | 48 | Returns **([Object][11] \| [Array][15])** the parsed JSON 49 | 50 | ## parse 51 | 52 | stream based JSON.parse. async function signature to abstract over streams. 53 | variadic arguments to support both promise and callback based usage. 54 | 55 | ### Parameters 56 | 57 | - `opts` **[Object][11]** options to pass to parse stream 58 | - `opts.body` **[String][12]** string to parse 59 | - `callback` **[Function][14]?** a callback function. if empty, returns a 60 | promise. 61 | 62 | Returns **([Object][11] \| [Array][15])** the parsed JSON 63 | 64 | ## stringify 65 | 66 | stream based JSON.stringify. async function signature to abstract over 67 | streams. variadic arguments to support both promise and callback based usage. 68 | 69 | ### Parameters 70 | 71 | - `opts` **[Object][11]** options to pass to stringify stream 72 | - `callback` **[Function][14]?** a callback function. if empty, returns a 73 | promise. 74 | 75 | Returns **[Object][11]** the parsed JSON object 76 | 77 | [1]: #createparsestream 78 | 79 | [2]: #createstringifystream 80 | 81 | [3]: #parameters 82 | 83 | [4]: #_parse 84 | 85 | [5]: #parameters-1 86 | 87 | [6]: #parse 88 | 89 | [7]: #parameters-2 90 | 91 | [8]: #stringify 92 | 93 | [9]: #parameters-3 94 | 95 | [10]: https://nodejs.org/api/stream.html 96 | 97 | [11]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object 98 | 99 | [12]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String 100 | 101 | [13]: https://nodejs.org/api/buffer.html 102 | 103 | [14]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Statements/function 104 | 105 | [15]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array 106 | -------------------------------------------------------------------------------- /lib/index.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | // core modules 4 | const { PassThrough } = require('stream'); 5 | const util = require('util'); 6 | 7 | // external modules 8 | const assert = require('assert-plus'); 9 | const intoStream = require('into-stream'); 10 | const JSONStream = require('JSONStream'); 11 | const through2 = require('through2'); 12 | const once = require('once').strict; 13 | const { JsonStreamStringify } = require('json-stream-stringify'); 14 | 15 | // promisified implementations of callback APIs. 16 | const _parsePromisified = util.promisify(_parse); 17 | const _stringifyPromisified = util.promisify(_stringify); 18 | 19 | /** 20 | * Create a JSON.parse that uses a stream interface. The underlying 21 | * implementation is handled by JSONStream. This is merely a thin wrapper for 22 | * convenience that handles the reconstruction/accumulation of each 23 | * individually parsed field. 24 | * 25 | * The advantage of this approach is that by also using a streams interface, 26 | * any JSON parsing or stringification of large objects won't block the CPU. 27 | * @public 28 | * @return {Stream} 29 | */ 30 | function createParseStream() { 31 | // when the parse stream gets chunks of data, it is an object with key/val 32 | // fields. accumulate the parsed fields. 33 | let accumulator = null; 34 | const parseStream = JSONStream.parse('$*'); 35 | const wrapperStream = through2.obj( 36 | function write(chunk, enc, cb) { 37 | // try to be clever (oh noes). assume we parse objects by default. 38 | // if the stream starts and it looks like an array, set the 39 | // starting value of the accumulator to an array. we opt into the 40 | // array, with default accumulator as an object. this introduces 41 | // less risk with this feature for any unexpected circumstances 42 | // (hopefully). 43 | if (accumulator === null) { 44 | const chunkStr = chunk.toString(enc).trim(); 45 | // if the trimmed chunk is an empty string, delay initialization 46 | // of the accumulator till we get something meaningful 47 | if (chunkStr !== '') { 48 | if (chunkStr.charAt(0) === '[') { 49 | accumulator = []; 50 | } else { 51 | accumulator = {}; 52 | } 53 | } 54 | } 55 | parseStream.write(chunk); 56 | return cb(); 57 | }, 58 | function flush(cb) { 59 | parseStream.on('end', function() { 60 | return cb(null, accumulator); 61 | }); 62 | parseStream.end(); 63 | } 64 | ); 65 | 66 | parseStream.on('data', function(chunk) { 67 | // this syntax should work when accumulator is object or array 68 | accumulator[chunk.key] = chunk.value; 69 | }); 70 | 71 | // make sure error is forwarded on to wrapper stream. 72 | parseStream.on('error', function(err) { 73 | wrapperStream.emit('error', err); 74 | }); 75 | 76 | return wrapperStream; 77 | } 78 | 79 | /** 80 | * create a JSON.stringify readable stream. 81 | * @public 82 | * @param {Object} opts an options object 83 | * @param {Object} opts.body the JS object to JSON.stringify 84 | * @function createStringifyStream 85 | * @return {Stream} 86 | */ 87 | function createStringifyStream(opts) { 88 | assert.object(opts, 'opts'); 89 | assert.ok( 90 | Array.isArray(opts.body) || typeof opts.body === 'object', 91 | 'opts.body must be an array or object' 92 | ); 93 | 94 | return new JsonStreamStringify(opts.body, null, null, false); 95 | } 96 | 97 | /** 98 | * stream based JSON.parse. async function signature to abstract over streams. 99 | * @public 100 | * @param {Object} opts options to pass to parse stream 101 | * @param {String|Buffer} opts.body string or buffer to parse 102 | * @param {Function} callback a callback function 103 | * @return {Object|Array} the parsed JSON 104 | */ 105 | function _parse(opts, callback) { 106 | assert.object(opts, 'opts'); 107 | assert.ok( 108 | typeof opts.body === 'string' || Buffer.isBuffer(opts.body), 109 | 'opts.body' 110 | ); 111 | assert.func(callback, 'callback'); 112 | 113 | const sourceStream = intoStream(opts.body); 114 | const parseStream = createParseStream(); 115 | const cb = once(callback); 116 | 117 | parseStream.on('data', function(data) { 118 | return cb(null, data); 119 | }); 120 | 121 | parseStream.on('error', function(err) { 122 | return cb(err); 123 | }); 124 | 125 | sourceStream.pipe(parseStream); 126 | } 127 | 128 | /** 129 | * stream based JSON.parse. async function signature to abstract over streams. 130 | * variadic arguments to support both promise and callback based usage. 131 | * @public 132 | * @function parse 133 | * @param {Object} opts options to pass to parse stream 134 | * @param {String} opts.body string to parse 135 | * @param {Function} [callback] a callback function. if empty, returns a 136 | * promise. 137 | * @return {Object|Array} the parsed JSON 138 | */ 139 | function parse(opts, callback) { 140 | // if more than one argument was passed, assume it's a callback based usage. 141 | if (arguments.length > 1) { 142 | return _parse(opts, callback); 143 | } 144 | 145 | // otherwise, caller expects a promise. 146 | return _parsePromisified(opts); 147 | } 148 | 149 | /** 150 | * stream based JSON.stringify. async function signature to abstract over 151 | * streams. 152 | * @private 153 | * @param {Object} opts options to pass to stringify stream 154 | * @param {Function} callback a callback function 155 | * @return {Object} the parsed JSON object 156 | */ 157 | function _stringify(opts, callback) { 158 | assert.object(opts, 'opts'); 159 | assert.func(callback, 'callback'); 160 | 161 | let stringified = ''; 162 | const stringifyStream = createStringifyStream(opts); 163 | const passthroughStream = new PassThrough(); 164 | const cb = once(callback); 165 | 166 | // setup the passthrough stream as a sink 167 | passthroughStream.on('data', function(chunk) { 168 | stringified += chunk; 169 | }); 170 | 171 | passthroughStream.on('end', function() { 172 | return cb(null, stringified); 173 | }); 174 | 175 | // don't know what errors stringify stream may emit, but pass them back 176 | // up. 177 | stringifyStream.on('error', function(err) { 178 | return cb(err); 179 | }); 180 | 181 | stringifyStream.pipe(passthroughStream); 182 | } 183 | 184 | /** 185 | * stream based JSON.stringify. async function signature to abstract over 186 | * streams. variadic arguments to support both promise and callback based usage. 187 | * @public 188 | * @function stringify 189 | * @param {Object} opts options to pass to stringify stream 190 | * @param {Function} [callback] a callback function. if empty, returns a 191 | * promise. 192 | * @return {Object} the parsed JSON object 193 | */ 194 | function stringify(opts, callback) { 195 | // if more than one argument was passed, assume it's a callback based usage. 196 | if (arguments.length > 1) { 197 | return _stringify(opts, callback); 198 | } 199 | 200 | // otherwise, caller expects a promise. 201 | return _stringifyPromisified(opts); 202 | } 203 | 204 | module.exports = { 205 | createParseStream, 206 | createStringifyStream, 207 | parse, 208 | stringify 209 | }; 210 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "big-json", 3 | "version": "3.2.0", 4 | "main": "lib/index.js", 5 | "description": "A stream based implementation of JSON.parse and JSON.stringify for big POJOs", 6 | "homepage": "https://github.com/DonutEspresso/big-json", 7 | "author": { 8 | "name": "Alex Liu", 9 | "email": "donutespresso@gmail.com" 10 | }, 11 | "repository": { 12 | "type": "git", 13 | "url": "git@github.com:DonutEspresso/big-json.git" 14 | }, 15 | "license": "MIT", 16 | "files": [ 17 | "lib" 18 | ], 19 | "keywords": [ 20 | "big-json", 21 | "json", 22 | "streams", 23 | "parse", 24 | "stringify" 25 | ], 26 | "devDependencies": { 27 | "chai": "^4.2.0", 28 | "conventional-changelog-angular": "^5.0.3", 29 | "conventional-recommended-bump": "^6.0.0", 30 | "coveralls": "^3.0.6", 31 | "documentation": "^12.0.0", 32 | "eslint": "^6.1.0", 33 | "eslint-config-prettier": "^6.0.0", 34 | "eslint-plugin-prettier": "^3.1.0", 35 | "mocha": "^6.2.0", 36 | "nyc": "^15.1.0", 37 | "prettier": "^1.18.2", 38 | "unleash": "^2.0.1" 39 | }, 40 | "dependencies": { 41 | "assert-plus": "^1.0.0", 42 | "into-stream": "^6.0.0", 43 | "json-stream-stringify": "^3.0.0", 44 | "JSONStream": "^1.3.1", 45 | "once": "^1.4.0", 46 | "through2": "^4.0.2" 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /test/.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "mocha": true 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /test/etc/corrupt.json: -------------------------------------------------------------------------------- 1 | { 2 | "a": 1, 3 | "b": "foo 4 | } 5 | -------------------------------------------------------------------------------- /test/etc/small.json: -------------------------------------------------------------------------------- 1 | { 2 | "artist": "The Beatles", 3 | "album": "Sgt Pepper Lonely Hearts Club Band", 4 | "year": 1967, 5 | "genre": "Rock", 6 | "tracks": [ 7 | { 8 | "name": "Sgt Pepper's Lonely Hearts Club Band" 9 | }, 10 | { 11 | "name": "With A Little Help From My Friends" 12 | } 13 | ], 14 | "comments": "What I can't even" 15 | } 16 | -------------------------------------------------------------------------------- /test/index.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | // core modules 4 | const fs = require('fs'); 5 | const path = require('path'); 6 | const stream = require('stream'); 7 | 8 | // external modules 9 | const assert = require('chai').assert; 10 | const isStream = require('is-stream'); 11 | 12 | // local files 13 | const json = require('../lib'); 14 | const POJO = require('./etc/small.json'); 15 | const STRINGIFIED_POJO = JSON.stringify(POJO); 16 | 17 | describe('big-json', function() { 18 | describe('createStringifyStream', function() { 19 | it('should create a stringify stream', function(done) { 20 | const stringifyStream = json.createStringifyStream({ 21 | body: POJO 22 | }); 23 | 24 | assert.ok(stringifyStream); 25 | assert.isTrue(isStream(stringifyStream)); 26 | 27 | return done(); 28 | }); 29 | 30 | it('should emit JSON string on data event', function(done) { 31 | const stringifyStream = json.createStringifyStream({ 32 | body: POJO 33 | }); 34 | const passthrough = new stream.PassThrough(); 35 | let stringified = ''; 36 | 37 | passthrough.on('data', function(chunk) { 38 | stringified += chunk; 39 | }); 40 | 41 | passthrough.on('end', function() { 42 | assert.equal(stringified, JSON.stringify(POJO)); 43 | return done(); 44 | }); 45 | 46 | stringifyStream.pipe(passthrough); 47 | }); 48 | 49 | it('should serialize repeated references', function(done) { 50 | const foo = { foo: 'a' }; 51 | const body = [foo, foo]; 52 | const stringifyStream = json.createStringifyStream({ 53 | body 54 | }); 55 | let stringified = ''; 56 | 57 | stringifyStream.on('data', function(chunk) { 58 | stringified += chunk; 59 | }); 60 | 61 | stringifyStream.on('end', function() { 62 | assert.deepEqual(stringified, JSON.stringify(body)); 63 | return done(); 64 | }); 65 | }); 66 | }); 67 | 68 | describe('createParseStream', function() { 69 | it('should create a parse stream', function(done) { 70 | const parseStream = json.createParseStream(); 71 | 72 | assert.ok(parseStream); 73 | assert.isTrue(isStream(parseStream)); 74 | 75 | return done(); 76 | }); 77 | 78 | it('should allow writing to parse stream', function(done) { 79 | const parseStream = json.createParseStream(); 80 | let dataValidated = false; 81 | 82 | parseStream.on('data', function(data) { 83 | assert.deepEqual(data, POJO); 84 | dataValidated = true; 85 | }); 86 | 87 | parseStream.on('error', done); 88 | parseStream.on('end', function() { 89 | if (dataValidated === false) { 90 | assert.fail('test completed without verification!'); 91 | } 92 | return done(); 93 | }); 94 | parseStream.end(STRINGIFIED_POJO); 95 | }); 96 | 97 | it('should emit "data" with reconstructed POJO and "end"', function(done) { 98 | const readStream = fs.createReadStream( 99 | path.join(__dirname, './etc/small.json') 100 | ); 101 | const parseStream = json.createParseStream(); 102 | let dataValidated = false; 103 | 104 | parseStream.on('data', function(pojo) { 105 | assert.deepEqual(POJO, pojo); 106 | dataValidated = true; 107 | }); 108 | 109 | parseStream.on('end', function(data) { 110 | assert.isTrue(dataValidated); 111 | return done(); 112 | }); 113 | 114 | readStream.pipe(parseStream); 115 | }); 116 | 117 | it('should pipe to subsequent streams', function(done) { 118 | const readStream = fs.createReadStream( 119 | path.join(__dirname, './etc/small.json') 120 | ); 121 | const parseStream = json.createParseStream(); 122 | let dataValidated = false; 123 | 124 | const afterStream = new stream.PassThrough({ 125 | objectMode: true 126 | }); 127 | 128 | afterStream.on('data', function(chunk) { 129 | assert.deepEqual(chunk, POJO); 130 | dataValidated = true; 131 | }); 132 | 133 | afterStream.on('end', function() { 134 | assert.isTrue(dataValidated); 135 | return done(); 136 | }); 137 | 138 | readStream.pipe(parseStream).pipe(afterStream); 139 | }); 140 | 141 | it('should pipe to multiple output streams', function(done) { 142 | const readStream = fs.createReadStream( 143 | path.join(__dirname, './etc/small.json') 144 | ); 145 | const parseStream = json.createParseStream(); 146 | const afterStream = new stream.PassThrough({ 147 | objectMode: true 148 | }); 149 | const afterStream2 = new stream.PassThrough({ 150 | objectMode: true 151 | }); 152 | 153 | let dataValidated = false; 154 | let streamsCompleted = 0; 155 | 156 | afterStream.on('data', function(chunk) { 157 | assert.deepEqual(chunk, POJO); 158 | dataValidated = true; 159 | }); 160 | 161 | afterStream.on('end', function() { 162 | assert.isTrue(dataValidated); 163 | 164 | if (++streamsCompleted === 2) { 165 | return done(); 166 | } 167 | return null; 168 | }); 169 | 170 | afterStream2.on('data', function(chunk) { 171 | assert.deepEqual(chunk, POJO); 172 | dataValidated = true; 173 | }); 174 | 175 | afterStream2.on('end', function() { 176 | assert.isTrue(dataValidated); 177 | 178 | if (++streamsCompleted === 2) { 179 | return done(); 180 | } 181 | return null; 182 | }); 183 | 184 | readStream.pipe(parseStream).pipe(afterStream); 185 | parseStream.pipe(afterStream2); 186 | }); 187 | 188 | it('should emit "error" event when parsing bad JSON', function(done) { 189 | const readStream = fs.createReadStream( 190 | path.join(__dirname, './etc/corrupt.json') 191 | ); 192 | const parseStream = json.createParseStream(); 193 | 194 | parseStream.on('error', function(err) { 195 | assert.ok(err); 196 | assert.equal(err.name, 'Error'); 197 | assert.include(err.message, 'Invalid JSON'); 198 | return done(); 199 | }); 200 | 201 | readStream.pipe(parseStream); 202 | }); 203 | 204 | it('should handle multibyte keys and vals', function(done) { 205 | const parseStream = json.createParseStream(); 206 | 207 | parseStream.on('data', function(pojo) { 208 | assert.deepEqual(pojo, { 209 | 遙: '遙遠未來的事件' 210 | }); 211 | return done(); 212 | }); 213 | 214 | parseStream.write('{"'); 215 | parseStream.write(Buffer.from([0xe9, 0x81])); 216 | parseStream.write(Buffer.from([0x99])); 217 | parseStream.write('":"'); 218 | parseStream.write(Buffer.from([0xe9, 0x81])); 219 | parseStream.write(Buffer.from([0x99, 0xe9, 0x81, 0xa0, 0xe6])); 220 | parseStream.write(Buffer.from([0x9c, 0xaa, 0xe4, 0xbe])); 221 | parseStream.write( 222 | Buffer.from([0x86, 0xe7, 0x9a, 0x84, 0xe4, 0xba, 0x8b]) 223 | ); 224 | parseStream.write(Buffer.from([0xe4, 0xbb, 0xb6])); 225 | parseStream.end('"}'); 226 | }); 227 | }); 228 | 229 | describe('async JSON', function() { 230 | it('should stringify async (callback)', function(done) { 231 | json.stringify( 232 | { 233 | body: POJO 234 | }, 235 | function(err, stringified) { 236 | assert.ifError(err); 237 | assert.deepEqual(stringified, JSON.stringify(POJO)); 238 | return done(); 239 | } 240 | ); 241 | }); 242 | 243 | it('should stringify async (promise)', function(done) { 244 | json.stringify({ 245 | body: POJO 246 | }) 247 | .then(function(stringified) { 248 | assert.deepEqual(stringified, JSON.stringify(POJO)); 249 | return done(); 250 | }) 251 | .catch(done); 252 | }); 253 | 254 | it('should parse async (callback)', function(done) { 255 | json.parse( 256 | { 257 | body: JSON.stringify(POJO) 258 | }, 259 | function(err, pojo) { 260 | assert.ifError(err); 261 | assert.deepEqual(pojo, POJO); 262 | return done(); 263 | } 264 | ); 265 | }); 266 | 267 | it('should parse async (promise)', function(done) { 268 | json.parse({ 269 | body: JSON.stringify(POJO) 270 | }) 271 | .then(function(pojo) { 272 | assert.deepEqual(pojo, POJO); 273 | return done(); 274 | }) 275 | .catch(done); 276 | }); 277 | 278 | it('should return err in parse async (callback)', function(done) { 279 | json.parse( 280 | { 281 | body: fs 282 | .readFileSync( 283 | path.join(__dirname, './etc/corrupt.json') 284 | ) 285 | .toString() 286 | }, 287 | function(err, pojo) { 288 | assert.ok(err); 289 | assert.include(err.message, 'Invalid JSON (Unexpected'); 290 | return done(); 291 | } 292 | ); 293 | }); 294 | 295 | it('should return err in parse async (promise)', function(done) { 296 | json.parse({ 297 | body: fs 298 | .readFileSync(path.join(__dirname, './etc/corrupt.json')) 299 | .toString() 300 | }).catch(function(err) { 301 | assert.ok(err); 302 | assert.include(err.message, 'Invalid JSON (Unexpected'); 303 | return done(); 304 | }); 305 | }); 306 | 307 | it('should parse buffer (promise)', function(done) { 308 | json.parse({ 309 | body: Buffer.from(JSON.stringify(POJO)) 310 | }) 311 | .then(function(pojo) { 312 | assert.deepEqual(pojo, POJO); 313 | return done(); 314 | }) 315 | .catch(done); 316 | }); 317 | 318 | it('should return err if body is neither string nor buffer', function(done) { 319 | json.parse({ 320 | body: POJO 321 | }).catch(function(err) { 322 | assert.ok(err); 323 | assert.include(err.message, 'opts.body'); 324 | return done(); 325 | }); 326 | }); 327 | 328 | it('should parse root JSON Object as Object', function(done) { 329 | const input = { 0: { key: 'value' }, 1: { key: null } }; 330 | json.parse({ 331 | body: JSON.stringify(input) 332 | }) 333 | .then(function(pojo) { 334 | assert.deepEqual(pojo, input); 335 | return done(); 336 | }) 337 | .catch(done); 338 | }); 339 | 340 | it('should parse root JSON Array as Array', function(done) { 341 | const input = [{ key: 'value' }, { key: null }]; 342 | json.parse({ 343 | body: JSON.stringify(input) 344 | }) 345 | .then(function(pojo) { 346 | assert.deepEqual(pojo, input); 347 | return done(); 348 | }) 349 | .catch(done); 350 | }); 351 | 352 | it('should determine correct root object with leading whitespace', function(done) { 353 | const parseStream = json.createParseStream(); 354 | 355 | parseStream.on('data', function(pojo) { 356 | assert.deepEqual(pojo, { 357 | foo: 'bar' 358 | }); 359 | return done(); 360 | }); 361 | 362 | parseStream.write('\n\n \n'); 363 | parseStream.write('\n\n {'); 364 | parseStream.write('"foo": "bar"'); 365 | parseStream.end('\n\n }"'); 366 | }); 367 | 368 | it('should determine correct root array with leading whitespace', function(done) { 369 | const parseStream = json.createParseStream(); 370 | 371 | parseStream.on('data', function(pojo) { 372 | assert.deepEqual(pojo, [0, 1, 2]); 373 | return done(); 374 | }); 375 | 376 | parseStream.write('\n\n \n'); 377 | parseStream.write('\n\n ['); 378 | parseStream.write('0, 1, 2'); 379 | parseStream.end('\n\n ]"'); 380 | }); 381 | }); 382 | }); 383 | -------------------------------------------------------------------------------- /tools/githooks/pre-push: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # commit hook for JShint 4 | make prepush 5 | prepushProcess=$? 6 | 7 | # now output the stuff 8 | exit $prepushProcess 9 | --------------------------------------------------------------------------------