├── test-worker.js ├── .npmignore ├── assets └── performance.png ├── tests ├── deno.sh ├── recursive.msgpack ├── floats.json ├── .mocharc.json ├── example5.json ├── bun.js ├── index.html ├── deno.ts ├── example3.json ├── example2.json ├── example.json ├── test-incomplete.js ├── test-compatibility.cjs ├── test-node-iterators.js ├── test-node-stream.js ├── strings2.json ├── example4.json ├── benchmark.js ├── benchmark-stream.cjs ├── benchmark.cjs └── sample-large.json ├── unpack.d.ts ├── unitTests └── .mocharc.json ├── pack.d.ts ├── SECURITY.md ├── index.js ├── LICENSE ├── node-index.js ├── .gitignore ├── stream.js ├── rollup.config.js ├── package.json ├── iterators.js ├── index.d.ts ├── benchmark.md ├── struct.js ├── README.md └── unpack.js /test-worker.js: -------------------------------------------------------------------------------- 1 | setTimeout(() => { 2 | console.log('done'); 3 | }, 10000); -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | # Dependency directories 2 | node_modules/ 3 | tests/samples 4 | .vs 5 | build/ -------------------------------------------------------------------------------- /assets/performance.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kriszyp/msgpackr/master/assets/performance.png -------------------------------------------------------------------------------- /tests/deno.sh: -------------------------------------------------------------------------------- 1 | deno run --unstable --allow-env --allow-read --allow-write --allow-ffi tests/deno.ts -------------------------------------------------------------------------------- /tests/recursive.msgpack: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kriszyp/msgpackr/master/tests/recursive.msgpack -------------------------------------------------------------------------------- /tests/floats.json: -------------------------------------------------------------------------------- 1 | [0.53232,542.5325,3252200000,6643.2,0.000000432,1.992e20,5.1,9.3242e-20,525.235,8899.32,522.42,2342.43,12211.1,8888.3,0.000432] -------------------------------------------------------------------------------- /tests/.mocharc.json: -------------------------------------------------------------------------------- 1 | { 2 | "timeout": 0, 3 | "ui": "bdd", 4 | "extension": [ 5 | "js" 6 | ], 7 | "recursive": true, 8 | "exit": true 9 | } -------------------------------------------------------------------------------- /unpack.d.ts: -------------------------------------------------------------------------------- 1 | export { Unpackr, Decoder, unpack, unpackMultiple, decode, 2 | addExtension, FLOAT32_OPTIONS, Options, Extension, clearSource, roundFloat32 } from '.' 3 | -------------------------------------------------------------------------------- /unitTests/.mocharc.json: -------------------------------------------------------------------------------- 1 | { 2 | "timeout": 0, 3 | "ui": "bdd", 4 | "extension": [ 5 | "js" 6 | ], 7 | "recursive": true, 8 | "exit": true, 9 | } -------------------------------------------------------------------------------- /pack.d.ts: -------------------------------------------------------------------------------- 1 | export { Unpackr, Decoder, Packr, Encoder, pack, encode, unpack, decode, addExtension, FLOAT32_OPTIONS, REUSE_BUFFER_MODE, RESET_BUFFER_MODE, RESERVE_START_SPACE } from '.' 2 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Supported Versions 4 | 5 | | Version | Supported | 6 | | ------- | ------------------ | 7 | | 1.4.x | :white_check_mark: | 8 | 9 | ## Reporting a Vulnerability 10 | 11 | Please report security vulnerabilities to kriszyp@gmail.com. 12 | -------------------------------------------------------------------------------- /tests/example5.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "test", 3 | "greeting": "Hello, World!", 4 | "flag": true, 5 | "littleNum": 3, 6 | "biggerNum": 32254435, 7 | "decimal":1.33, 8 | "bigDecimal": 3.5522E35, 9 | "negative": -54, 10 | "aNull": null, 11 | "more": "another string" 12 | } 13 | -------------------------------------------------------------------------------- /tests/bun.js: -------------------------------------------------------------------------------- 1 | import { unpack, pack } from '../node-index.js'; 2 | import { readFileSync } from 'fs'; 3 | let sampleData = JSON.parse(readFileSync(new URL(`./example4.json`, import.meta.url))); 4 | 5 | var data = sampleData 6 | let structures = [] 7 | var serialized = pack(data) 8 | var deserialized = unpack(serialized) 9 | console.log(deserialized) 10 | var serialized = pack(data) 11 | var deserialized = unpack(serialized) 12 | console.log(deserialized) 13 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | export { Packr, Encoder, addExtension, pack, encode, NEVER, ALWAYS, DECIMAL_ROUND, DECIMAL_FIT, REUSE_BUFFER_MODE, RESET_BUFFER_MODE, RESERVE_START_SPACE } from './pack.js' 2 | export { Unpackr, Decoder, C1, unpack, unpackMultiple, decode, FLOAT32_OPTIONS, clearSource, roundFloat32, isNativeAccelerationEnabled } from './unpack.js' 3 | export { decodeIter, encodeIter } from './iterators.js' 4 | export const useRecords = false 5 | export const mapsAsObjects = true 6 | -------------------------------------------------------------------------------- /tests/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 14 | 15 | 16 | 17 | 18 | 21 | 22 | 23 |
24 | 27 | 28 | 29 | -------------------------------------------------------------------------------- /tests/deno.ts: -------------------------------------------------------------------------------- 1 | import { pack, unpack, isNativeAccelerationEnabled } from 'npm:msgpackr'; 2 | import chai from "https://cdn.skypack.dev/chai@4.3.4?dts"; 3 | import sampleData from './example4.json' assert { type: 'json'}; 4 | const { assert, should: loadShould } = chai; 5 | let should = loadShould(); 6 | console.log({isNativeAccelerationEnabled}) 7 | var data = sampleData 8 | let structures = [] 9 | var serialized = pack(data) 10 | var deserialized = unpack(serialized) 11 | sampleData.should.deep.equal(deserialized); 12 | var serialized = new Uint8Array(pack(data)); 13 | var deserialized = unpack(serialized) 14 | sampleData.should.deep.equal(deserialized); 15 | console.log('done') 16 | -------------------------------------------------------------------------------- /tests/example3.json: -------------------------------------------------------------------------------- 1 | { 2 | "glossary": { 3 | "title": "example glossary", 4 | "GlossDiv": { 5 | "title": "S", 6 | "GlossList": { 7 | "GlossEntry": { 8 | "ID": "SGML", 9 | "SortAs": "SGML", 10 | "GlossTerm": "Standard Generalized Markup Language", 11 | "Acronym": "SGML", 12 | "Abbrev": "ISO 8879:1986", 13 | "GlossDef": { 14 | "para": "A meta-markup language, used to create markup languages such as DocBook.", 15 | "GlossSeeAlso": ["GML", "XML"] 16 | }, 17 | "GlossSee": "markup" 18 | } 19 | } 20 | } 21 | } 22 | } -------------------------------------------------------------------------------- /tests/example2.json: -------------------------------------------------------------------------------- 1 | {"widget": { 2 | "debug": "on", 3 | "window": { 4 | "title": "Sample Konfabulator Widget", 5 | "name": "main_window", 6 | "width": 500, 7 | "height": 500 8 | }, 9 | "image": { 10 | "src": "Images/Sun.png", 11 | "name": "sun1", 12 | "hOffset": 250, 13 | "vOffset": 250, 14 | "alignment": "center" 15 | }, 16 | "text": { 17 | "data": "Click Here", 18 | "size": 36, 19 | "style": "bold", 20 | "name": "text1", 21 | "hOffset": 250, 22 | "vOffset": 100, 23 | "alignment": "center", 24 | "onMouseUp": "sun1.opacity = (sun1.opacity / 100) * 90;" 25 | } 26 | }} -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Kris Zyp 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /node-index.js: -------------------------------------------------------------------------------- 1 | export { Packr, Encoder, addExtension, pack, encode, NEVER, ALWAYS, DECIMAL_ROUND, DECIMAL_FIT, REUSE_BUFFER_MODE, RESET_BUFFER_MODE, RESERVE_START_SPACE } from './pack.js' 2 | export { Unpackr, Decoder, C1, unpack, unpackMultiple, decode, FLOAT32_OPTIONS, clearSource, roundFloat32, isNativeAccelerationEnabled } from './unpack.js' 3 | import './struct.js' 4 | export { PackrStream, UnpackrStream, PackrStream as EncoderStream, UnpackrStream as DecoderStream } from './stream.js' 5 | export { decodeIter, encodeIter } from './iterators.js' 6 | export const useRecords = false 7 | export const mapsAsObjects = true 8 | import { setExtractor } from './unpack.js' 9 | import { createRequire } from 'module' 10 | 11 | const nativeAccelerationDisabled = process.env.MSGPACKR_NATIVE_ACCELERATION_DISABLED !== undefined && process.env.MSGPACKR_NATIVE_ACCELERATION_DISABLED.toLowerCase() === 'true'; 12 | 13 | if (!nativeAccelerationDisabled) { 14 | let extractor 15 | try { 16 | if (typeof require == 'function') 17 | extractor = require('msgpackr-extract') 18 | else 19 | extractor = createRequire(import.meta.url)('msgpackr-extract') 20 | if (extractor) 21 | setExtractor(extractor.extractStrings) 22 | } catch (error) { 23 | // native module is optional 24 | } 25 | } -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | dist 8 | 9 | # Runtime data 10 | pids 11 | *.pid 12 | *.seed 13 | *.pid.lock 14 | 15 | # Directory for instrumented libs generated by jscoverage/JSCover 16 | lib-cov 17 | 18 | # Coverage directory used by tools like istanbul 19 | coverage 20 | 21 | # nyc test coverage 22 | .nyc_output 23 | 24 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 25 | .grunt 26 | 27 | # Bower dependency directory (https://bower.io/) 28 | bower_components 29 | 30 | # node-waf configuration 31 | .lock-wscript 32 | 33 | # Compiled binary addons (http://nodejs.org/api/addons.html) 34 | build/Release 35 | 36 | # Dependency directories 37 | node_modules/ 38 | jspm_packages/ 39 | 40 | package-lock.json 41 | yarn.lock 42 | # Typescript v1 declaration files 43 | typings/ 44 | index.d.cts 45 | pack.d.cts 46 | unpack.d.cts 47 | 48 | # Optional npm cache directory 49 | .npm 50 | 51 | # Optional eslint cache 52 | .eslintcache 53 | 54 | # Optional REPL history 55 | .node_repl_history 56 | 57 | # Output of 'npm pack' 58 | *.tgz 59 | 60 | # Yarn Integrity file 61 | .yarn-integrity 62 | 63 | # dotenv environment variables file 64 | .env 65 | tests/samples 66 | 67 | # Visual Studio Code directory 68 | .vscode 69 | .vs 70 | .idea 71 | 72 | build 73 | dist/test.js 74 | -------------------------------------------------------------------------------- /tests/example.json: -------------------------------------------------------------------------------- 1 | { 2 | "int0": 0, 3 | "int1": 1, 4 | "int1-": -1, 5 | "int8": 255, 6 | "int8-": -255, 7 | "int16": 256, 8 | "int16-": -256, 9 | "int32": 65536, 10 | "int32-": -65536, 11 | "nil": null, 12 | "true": true, 13 | "false": false, 14 | "float": 0.5, 15 | "float-": -0.5, 16 | "string0": "", 17 | "string1": "A", 18 | "string4": "foobarbaz", 19 | "string8": "Omnes viae Romam ducunt.", 20 | "string16": "L’homme n’est qu’un roseau, le plus faible de la nature ; mais c’est un roseau pensant. Il ne faut pas que l’univers entier s’arme pour l’écraser : une vapeur, une goutte d’eau, suffit pour le tuer. Mais, quand l’univers l’écraserait, l’homme serait encore plus noble que ce qui le tue, puisqu’il sait qu’il meurt, et l’avantage que l’univers a sur lui, l’univers n’en sait rien. Toute notre dignité consiste donc en la pensée. C’est de là qu’il faut nous relever et non de l’espace et de la durée, que nous ne saurions remplir. Travaillons donc à bien penser : voilà le principe de la morale.", 21 | "array0": [], 22 | "array1": [ 23 | "foo" 24 | ], 25 | "array8": [ 26 | 1, 27 | 2, 28 | 4, 29 | 8, 30 | 16, 31 | 32, 32 | 64, 33 | 128, 34 | 256, 35 | 512, 36 | 1024, 37 | 2048, 38 | 4096, 39 | 8192, 40 | 16384, 41 | 32768, 42 | 65536, 43 | 131072, 44 | 262144, 45 | 524288, 46 | 1048576 47 | ], 48 | "map0": {}, 49 | "map1": { 50 | "foo": "bar" 51 | } 52 | } -------------------------------------------------------------------------------- /stream.js: -------------------------------------------------------------------------------- 1 | import { Transform } from 'stream' 2 | import { Packr } from './pack.js' 3 | import { Unpackr } from './unpack.js' 4 | var DEFAULT_OPTIONS = {objectMode: true} 5 | 6 | export class PackrStream extends Transform { 7 | constructor(options) { 8 | if (!options) 9 | options = {} 10 | options.writableObjectMode = true 11 | super(options) 12 | options.sequential = true 13 | this.packr = options.packr || new Packr(options) 14 | } 15 | _transform(value, encoding, callback) { 16 | this.push(this.packr.pack(value)) 17 | callback() 18 | } 19 | } 20 | 21 | export class UnpackrStream extends Transform { 22 | constructor(options) { 23 | if (!options) 24 | options = {} 25 | options.objectMode = true 26 | super(options) 27 | options.structures = [] 28 | this.unpackr = options.unpackr || new Unpackr(options) 29 | } 30 | _transform(chunk, encoding, callback) { 31 | if (this.incompleteBuffer) { 32 | chunk = Buffer.concat([this.incompleteBuffer, chunk]) 33 | this.incompleteBuffer = null 34 | } 35 | let values 36 | try { 37 | values = this.unpackr.unpackMultiple(chunk) 38 | } catch(error) { 39 | if (error.incomplete) { 40 | this.incompleteBuffer = chunk.slice(error.lastPosition) 41 | values = error.values 42 | } 43 | else 44 | throw error 45 | } finally { 46 | for (let value of values || []) { 47 | if (value === null) 48 | value = this.getNullValue() 49 | this.push(value) 50 | } 51 | } 52 | if (callback) callback() 53 | } 54 | getNullValue() { 55 | return Symbol.for(null) 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /tests/test-incomplete.js: -------------------------------------------------------------------------------- 1 | import { encode } from '../index.js' 2 | import { assert } from 'chai' 3 | import { Encoder } from '../pack.js' 4 | 5 | const tests = { 6 | string: 'interesting string', 7 | number: 12345, 8 | buffer: Buffer.from('hello world'), 9 | bigint: 12345678910n, 10 | array: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 11 | 'many-strings': [], 12 | set: new Set('abcdefghijklmnopqrstuvwxyz'.split('')), 13 | object: { a: 1, b: 2, c: 3, d: 4, e: 5, f: 6 } 14 | } 15 | for (let i = 0; i < 100; i++) { 16 | tests['many-strings'].push('test-data-' + i) 17 | } 18 | 19 | suite('encode and decode tests with partial values', function () { 20 | const encoder = new Encoder({ objectMode: true, structures: [], moreTypes: true, structuredClone: true }) 21 | 22 | for (const [label, testData] of Object.entries(tests)) { 23 | test(label, () => { 24 | const encoded = encoder.encode(testData) 25 | assert.isTrue(Buffer.isBuffer(encoded), 'encode returns a Buffer') 26 | assert.deepStrictEqual(encoder.decode(encoded, encoded.length, true), testData, 'full buffer decodes well') 27 | for (let length = Math.max(1, Math.ceil(encoded.length / 2) - 40); length < Math.ceil(encoded.length / 2); length++) { 28 | const firstHalf = encoded.slice(0, length) 29 | let value 30 | try { 31 | value = encoder.decode(firstHalf, firstHalf.length, true) 32 | } catch (err) { 33 | if (err.incomplete !== true) { 34 | assert.fail(`Should throw an error with .incomplete set to true, instead threw error <${err}>, for ${JSON.stringify(testData)} ${encoded.length}, ${length}`) 35 | } else { 36 | continue; // victory! correct outcome! 37 | } 38 | } 39 | assert.fail(`Should throw an error with .incomplete set to true, instead returned value ${JSON.stringify(value)}`) 40 | } 41 | }) 42 | } 43 | }) 44 | -------------------------------------------------------------------------------- /tests/test-compatibility.cjs: -------------------------------------------------------------------------------- 1 | const data = require('./example4.json'); 2 | const { pack, unpack, Packr } = require('msgpackr/pack'); 3 | const chai = require('chai'); 4 | 5 | function tryRequire(module) { 6 | try { 7 | return require(module) 8 | } catch(error) { 9 | console.log(error) 10 | } 11 | } 12 | //if (typeof chai === 'undefined') { chai = require('chai') } 13 | const assert = chai.assert 14 | //if (typeof msgpackr === 'undefined') { msgpackr = require('..') } 15 | var msgpack_msgpack = tryRequire('@msgpack/msgpack'); 16 | var msgpack_lite = tryRequire('msgpack-lite'); 17 | var msgpack = tryRequire('msgpack'); 18 | 19 | const addCompatibilitySuite = (data) => () => { 20 | if (msgpack_msgpack) { 21 | test('from @msgpack/msgpack', function(){ 22 | var serialized = msgpack_msgpack.encode(data) 23 | var deserialized = unpack(serialized) 24 | assert.deepEqual(deserialized, data) 25 | }) 26 | 27 | test('to @msgpack/msgpack', function(){ 28 | var serialized = pack(data) 29 | var deserialized = msgpack_msgpack.decode(serialized) 30 | assert.deepEqual(deserialized, data) 31 | }) 32 | } 33 | if (msgpack_lite) { 34 | test('from msgpack-lite', function(){ 35 | var serialized = msgpack_lite.encode(data) 36 | var deserialized = unpack(serialized) 37 | assert.deepEqual(deserialized, data) 38 | }) 39 | 40 | test('to msgpack-lite', function(){ 41 | var serialized = pack(data) 42 | var deserialized = msgpack_lite.decode(serialized) 43 | assert.deepEqual(deserialized, data) 44 | }) 45 | } 46 | if (msgpack) { 47 | test.skip('from msgpack', function(){ 48 | var serialized = msgpack.pack(data) 49 | var deserialized = unpack(serialized) 50 | assert.deepEqual(deserialized, data) 51 | }) 52 | 53 | test('to msgpack', function(){ 54 | var serialized = pack(data) 55 | var deserialized = msgpack.unpack(serialized) 56 | assert.deepEqual(deserialized, data) 57 | }) 58 | } 59 | } 60 | 61 | suite('msgpackr compatibility tests (example)', addCompatibilitySuite(require('./example.json'))) 62 | suite('msgpackr compatibility tests (example4)', addCompatibilitySuite(require('./example4.json'))) 63 | suite('msgpackr compatibility tests (example5)', addCompatibilitySuite(require('./example5.json'))) 64 | suite.skip('msgpackr compatibility tests with dates', addCompatibilitySuite({ date: new Date() })) -------------------------------------------------------------------------------- /rollup.config.js: -------------------------------------------------------------------------------- 1 | import terser from '@rollup/plugin-terser'; 2 | import json from "@rollup/plugin-json"; 3 | import replace from "@rollup/plugin-replace"; 4 | 5 | export default [ 6 | { 7 | input: "node-index.js", 8 | output: [ 9 | { 10 | file: "dist/node.cjs", 11 | format: "cjs", 12 | sourcemap: true 13 | } 14 | ] 15 | }, 16 | { 17 | input: "index.js", 18 | output: { 19 | file: "dist/index.js", 20 | format: "umd", 21 | name: "msgpackr", 22 | sourcemap: true 23 | } 24 | }, 25 | { 26 | input: "index.js", 27 | plugins: [ 28 | replace({ Function: 'BlockedFunction '}) 29 | ], 30 | output: { 31 | file: "dist/index-no-eval.cjs", 32 | format: "umd", 33 | name: "msgpackr", 34 | sourcemap: true 35 | }, 36 | }, 37 | { 38 | input: "unpack.js", 39 | plugins: [ 40 | replace({ Function: 'BlockedFunction '}) 41 | ], 42 | output: { 43 | file: "dist/unpack-no-eval.cjs", 44 | format: "umd", 45 | name: "msgpackr", 46 | sourcemap: true 47 | }, 48 | }, 49 | { 50 | input: "index.js", 51 | plugins: [ 52 | terser({}) 53 | ], 54 | output: { 55 | file: "dist/index.min.js", 56 | format: "umd", 57 | name: "msgpackr", 58 | sourcemap: true 59 | } 60 | }, 61 | { 62 | input: "index.js", 63 | plugins: [ 64 | replace({ Function: 'BlockedFunction '}), 65 | terser({}) 66 | ], 67 | output: { 68 | file: "dist/index-no-eval.min.js", 69 | format: "umd", 70 | name: "msgpackr", 71 | sourcemap: true 72 | } 73 | }, 74 | { 75 | input: "tests/test.js", 76 | plugins: [json()], 77 | external: ['chai', '../index.js'], 78 | output: { 79 | file: "dist/test.js", 80 | format: "iife", 81 | sourcemap: true, 82 | globals: { 83 | chai: 'chai', 84 | './index.js': 'msgpackr', 85 | }, 86 | } 87 | } 88 | ]; 89 | -------------------------------------------------------------------------------- /tests/test-node-iterators.js: -------------------------------------------------------------------------------- 1 | import { encodeIter, decodeIter } from '../index.js' 2 | import { decode } from '../index.js' 3 | import { assert } from 'chai' 4 | 5 | const tests = [ 6 | null, 7 | false, 8 | true, 9 | 'interesting string', 10 | 12345, 11 | 123456789n, 12 | 123.456, 13 | Buffer.from('Hello World'), 14 | 'abcdefghijklmnopqrstuvwxyz'.split('') 15 | ] 16 | 17 | suite('msgpackr iterators interface tests', function () { 18 | test('sync encode iterator', () => { 19 | const encodings = [...encodeIter(tests)] 20 | const decodings = encodings.map(x => decode(x)) 21 | assert.deepStrictEqual(decodings, tests) 22 | }) 23 | 24 | test('async encode iterator', async () => { 25 | async function * generate () { 26 | for (const test of tests) { 27 | await new Promise((resolve, reject) => setImmediate(resolve)) 28 | yield test 29 | } 30 | } 31 | 32 | const chunks = [] 33 | for await (const chunk of encodeIter(generate())) { 34 | chunks.push(chunk) 35 | } 36 | 37 | const decodings = chunks.map(x => decode(x)) 38 | assert.deepStrictEqual(decodings, tests) 39 | }) 40 | 41 | test('sync encode and decode iterator', () => { 42 | const encodings = [...encodeIter(tests)] 43 | assert.isTrue(encodings.every(v => Buffer.isBuffer(v))) 44 | const decodings = [...decodeIter(encodings)] 45 | assert.deepStrictEqual(decodings, tests) 46 | 47 | // also test decodings work with buffers multiple values in a buffer 48 | const concatEncoding = Buffer.concat([...encodings]) 49 | const decodings2 = [...decodeIter([concatEncoding])] 50 | assert.deepStrictEqual(decodings2, tests) 51 | 52 | // also test decodings work with partial buffers that don't align to values perfectly 53 | const half1 = concatEncoding.slice(0, Math.floor(concatEncoding.length / 2)) 54 | const half2 = concatEncoding.slice(Math.floor(concatEncoding.length / 2)) 55 | const decodings3 = [...decodeIter([half1, half2])] 56 | assert.deepStrictEqual(decodings3, tests) 57 | }) 58 | 59 | test('async encode and decode iterator', async () => { 60 | async function * generator () { 61 | for (const obj of tests) { 62 | await new Promise((resolve, reject) => setImmediate(resolve)) 63 | yield obj 64 | } 65 | } 66 | const yields = [] 67 | for await (const value of decodeIter(encodeIter(generator()))) { 68 | yields.push(value) 69 | } 70 | assert.deepStrictEqual(yields, tests) 71 | }) 72 | }) -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "msgpackr", 3 | "author": "Kris Zyp", 4 | "version": "1.11.8", 5 | "description": "Ultra-fast MessagePack implementation with extensions for records and structured cloning", 6 | "license": "MIT", 7 | "types": "./index.d.ts", 8 | "main": "./dist/node.cjs", 9 | "module": "./index.js", 10 | "react-native": "./index.js", 11 | "keywords": [ 12 | "MessagePack", 13 | "msgpack", 14 | "performance", 15 | "structured", 16 | "clone" 17 | ], 18 | "repository": { 19 | "type": "git", 20 | "url": "http://github.com/kriszyp/msgpackr" 21 | }, 22 | "scripts": { 23 | "benchmark": "node ./tests/benchmark.cjs", 24 | "build": "rollup -c && cpy index.d.ts . --rename=index.d.cts && cpy pack.d.ts . --rename=pack.d.cts && cpy unpack.d.ts . --rename=unpack.d.cts", 25 | "dry-run": "npm publish --dry-run", 26 | "prepare": "npm run build", 27 | "test": "mocha tests/test**.*js -u tdd --experimental-json-modules" 28 | }, 29 | "type": "module", 30 | "exports": { 31 | ".": { 32 | "types": { 33 | "require": "./index.d.cts", 34 | "import": "./index.d.ts" 35 | }, 36 | "browser": "./index.js", 37 | "node": { 38 | "require": "./dist/node.cjs", 39 | "import": "./node-index.js" 40 | }, 41 | "bun": { 42 | "require": "./dist/node.cjs", 43 | "import": "./node-index.js" 44 | }, 45 | "default": "./index.js" 46 | }, 47 | "./pack": { 48 | "types": { 49 | "require": "./pack.d.cts", 50 | "import": "./pack.d.ts" 51 | }, 52 | "browser": "./pack.js", 53 | "node": { 54 | "import": "./index.js", 55 | "require": "./dist/node.cjs" 56 | }, 57 | "bun": { 58 | "import": "./index.js", 59 | "require": "./dist/node.cjs" 60 | }, 61 | "default": "./pack.js" 62 | }, 63 | "./unpack": { 64 | "types": { 65 | "require": "./unpack.d.cts", 66 | "import": "./unpack.d.ts" 67 | }, 68 | "browser": "./unpack.js", 69 | "node": { 70 | "import": "./index.js", 71 | "require": "./dist/node.cjs" 72 | }, 73 | "bun": { 74 | "import": "./index.js", 75 | "require": "./dist/node.cjs" 76 | }, 77 | "default": "./unpack.js" 78 | }, 79 | "./unpack-no-eval": "./dist/unpack-no-eval.cjs", 80 | "./index-no-eval": "./dist/index-no-eval.cjs" 81 | }, 82 | "files": [ 83 | "/dist", 84 | "*.md", 85 | "/*.js", 86 | "/*.ts", 87 | "/*.cts" 88 | ], 89 | "optionalDependencies": { 90 | "msgpackr-extract": "^3.0.2" 91 | }, 92 | "devDependencies": { 93 | "@rollup/plugin-json": "^5.0.1", 94 | "@rollup/plugin-replace": "^5.0.1", 95 | "@types/node": "latest", 96 | "async": "^3", 97 | "chai": "^4.3.4", 98 | "cpy-cli": "^4.1.0", 99 | "esm": "^3.2.25", 100 | "mocha": "^10.1.0", 101 | "rollup": "^3.2.5", 102 | "@rollup/plugin-terser": "^0.1.0" 103 | } 104 | } 105 | -------------------------------------------------------------------------------- /tests/test-node-stream.js: -------------------------------------------------------------------------------- 1 | import { PackrStream, UnpackrStream } from '../node-index.js' 2 | import stream from 'stream' 3 | import chai from 'chai' 4 | import util from 'util' 5 | import fs from 'fs' 6 | let allSampleData = []; 7 | for (let i = 1; i < 6; i++) { 8 | allSampleData.push(JSON.parse(fs.readFileSync(new URL(`./example${i > 1 ? i : ''}.json`, import.meta.url)))); 9 | } 10 | 11 | const finished = util.promisify(stream.finished) 12 | var assert = chai.assert 13 | 14 | suite('msgpackr node stream tests', function(){ 15 | test('serialize/parse stream', () => { 16 | const serializeStream = new PackrStream({ 17 | }) 18 | const parseStream = new UnpackrStream() 19 | serializeStream.pipe(parseStream) 20 | const received = [] 21 | parseStream.on('data', data => { 22 | received.push(data) 23 | }) 24 | const messages = [{ 25 | name: 'first' 26 | }, { 27 | name: 'second' 28 | }, { 29 | name: 'third' 30 | }, { 31 | name: 'third', 32 | extra: [1, 3, { foo: 'hi'}, 'bye'] 33 | }] 34 | for (const message of messages) 35 | serializeStream.write(message) 36 | return new Promise((resolve, reject) => { 37 | setTimeout(() => { 38 | assert.deepEqual(received, messages) 39 | resolve() 40 | }, 10) 41 | }) 42 | }) 43 | test('stream from buffer', () => new Promise(async resolve => { 44 | const parseStream = new UnpackrStream() 45 | let values = [] 46 | parseStream.on('data', (value) => { 47 | values.push(value) 48 | }) 49 | parseStream.on('end', () => { 50 | assert.deepEqual(values, [1, 2]) 51 | resolve() 52 | }) 53 | let bufferStream = new stream.Duplex() 54 | bufferStream.pipe(parseStream) 55 | bufferStream.push(new Uint8Array([1, 2])) 56 | bufferStream.push(null) 57 | })) 58 | test('serialize stream to file', async function() { 59 | const serializeStream = new PackrStream({ 60 | }) 61 | const fileStream = fs.createWriteStream('test-output.msgpack'); 62 | serializeStream.pipe(fileStream); 63 | const messages = [{ 64 | name: 'first' 65 | }, { 66 | name: 'second', 67 | extra: [1, 3, { foo: 'hi'}, 'bye'] 68 | }] 69 | for (const message of messages) 70 | serializeStream.write(message) 71 | setTimeout(() => serializeStream.end(), 10) 72 | 73 | await finished(serializeStream) 74 | }) 75 | test('stream with records and bundleStrings', async function() { 76 | const serializeStream = new PackrStream({ 77 | useRecords: true, 78 | bundleStrings: true, 79 | }) 80 | const parseStream = new UnpackrStream() 81 | serializeStream.pipe(parseStream) 82 | const received = [] 83 | parseStream.on('data', data => { 84 | received.push(data) 85 | }) 86 | const messages = allSampleData; 87 | for (const message of messages) 88 | serializeStream.write(message) 89 | return new Promise((resolve, reject) => { 90 | setTimeout(() => { 91 | assert.deepEqual(received, messages) 92 | resolve() 93 | }, 10) 94 | }) 95 | }) 96 | teardown(function() { 97 | try { 98 | fs.unlinkSync('test-output.msgpack') 99 | }catch(error){} 100 | }) 101 | }) 102 | 103 | -------------------------------------------------------------------------------- /iterators.js: -------------------------------------------------------------------------------- 1 | import { Packr } from './pack.js' 2 | import { Unpackr } from './unpack.js' 3 | 4 | /** 5 | * Given an Iterable first argument, returns an Iterable where each value is packed as a Buffer 6 | * If the argument is only Async Iterable, the return value will be an Async Iterable. 7 | * @param {Iterable|Iterator|AsyncIterable|AsyncIterator} objectIterator - iterable source, like a Readable object stream, an array, Set, or custom object 8 | * @param {options} [options] - msgpackr pack options 9 | * @returns {IterableIterator|Promise.} 10 | */ 11 | export function packIter (objectIterator, options = {}) { 12 | if (!objectIterator || typeof objectIterator !== 'object') { 13 | throw new Error('first argument must be an Iterable, Async Iterable, or a Promise for an Async Iterable') 14 | } else if (typeof objectIterator[Symbol.iterator] === 'function') { 15 | return packIterSync(objectIterator, options) 16 | } else if (typeof objectIterator.then === 'function' || typeof objectIterator[Symbol.asyncIterator] === 'function') { 17 | return packIterAsync(objectIterator, options) 18 | } else { 19 | throw new Error('first argument must be an Iterable, Async Iterable, Iterator, Async Iterator, or a Promise') 20 | } 21 | } 22 | 23 | function * packIterSync (objectIterator, options) { 24 | const packr = new Packr(options) 25 | for (const value of objectIterator) { 26 | yield packr.pack(value) 27 | } 28 | } 29 | 30 | async function * packIterAsync (objectIterator, options) { 31 | const packr = new Packr(options) 32 | for await (const value of objectIterator) { 33 | yield packr.pack(value) 34 | } 35 | } 36 | 37 | /** 38 | * Given an Iterable/Iterator input which yields buffers, returns an IterableIterator which yields sync decoded objects 39 | * Or, given an Async Iterable/Iterator which yields promises resolving in buffers, returns an AsyncIterableIterator. 40 | * @param {Iterable|Iterator|AsyncIterable|AsyncIterableIterator} bufferIterator 41 | * @param {object} [options] - unpackr options 42 | * @returns {IterableIterator|Promise. { 52 | let yields 53 | // if there's incomplete data from previous chunk, concatinate and try again 54 | if (incomplete) { 55 | chunk = Buffer.concat([incomplete, chunk]) 56 | incomplete = undefined 57 | } 58 | 59 | try { 60 | yields = unpackr.unpackMultiple(chunk) 61 | } catch (err) { 62 | if (err.incomplete) { 63 | incomplete = chunk.slice(err.lastPosition) 64 | yields = err.values 65 | } else { 66 | throw err 67 | } 68 | } 69 | return yields 70 | } 71 | 72 | if (typeof bufferIterator[Symbol.iterator] === 'function') { 73 | return (function * iter () { 74 | for (const value of bufferIterator) { 75 | yield * parser(value) 76 | } 77 | })() 78 | } else if (typeof bufferIterator[Symbol.asyncIterator] === 'function') { 79 | return (async function * iter () { 80 | for await (const value of bufferIterator) { 81 | yield * parser(value) 82 | } 83 | })() 84 | } 85 | } 86 | export const decodeIter = unpackIter 87 | export const encodeIter = packIter -------------------------------------------------------------------------------- /index.d.ts: -------------------------------------------------------------------------------- 1 | export enum FLOAT32_OPTIONS { 2 | NEVER = 0, 3 | ALWAYS = 1, 4 | DECIMAL_ROUND = 3, 5 | DECIMAL_FIT = 4 6 | } 7 | 8 | export interface Options { 9 | useFloat32?: FLOAT32_OPTIONS 10 | useRecords?: boolean | ((value:any)=> boolean) 11 | structures?: {}[] 12 | moreTypes?: boolean 13 | sequential?: boolean 14 | structuredClone?: boolean 15 | mapsAsObjects?: boolean 16 | variableMapSize?: boolean 17 | coercibleKeyAsNumber?: boolean 18 | copyBuffers?: boolean 19 | bundleStrings?: boolean 20 | useTimestamp32?: boolean 21 | largeBigIntToFloat?: boolean 22 | largeBigIntToString?: boolean 23 | useBigIntExtension?: boolean 24 | encodeUndefinedAsNil?: boolean 25 | maxSharedStructures?: number 26 | maxOwnStructures?: number 27 | mapAsEmptyObject?: boolean 28 | setAsEmptyObject?: boolean 29 | allowArraysInMapKeys?: boolean 30 | writeFunction?: () => any 31 | /** @deprecated use int64AsType: 'number' */ 32 | int64AsNumber?: boolean 33 | int64AsType?: 'bigint' | 'number' | 'string' 34 | shouldShareStructure?: (keys: string[]) => boolean 35 | getStructures?(): {}[] 36 | saveStructures?(structures: {}[]): boolean | void 37 | onInvalidDate?: () => any 38 | } 39 | interface Extension { 40 | Class?: Function 41 | type?: number 42 | pack?(value: any): Buffer | Uint8Array 43 | unpack?(messagePack: Buffer | Uint8Array): any 44 | read?(datum: any): any 45 | write?(instance: any): any 46 | } 47 | export type UnpackOptions = { start?: number; end?: number; lazy?: boolean; } | number; 48 | export class Unpackr { 49 | constructor(options?: Options) 50 | unpack(messagePack: Buffer | Uint8Array, options?: UnpackOptions): any 51 | decode(messagePack: Buffer | Uint8Array, options?: UnpackOptions): any 52 | unpackMultiple(messagePack: Buffer | Uint8Array): any[] 53 | unpackMultiple(messagePack: Buffer | Uint8Array, forEach: (value: any, start?: number, end?: number) => any): void 54 | } 55 | export class Decoder extends Unpackr {} 56 | export function unpack(messagePack: Buffer | Uint8Array, options?: UnpackOptions): any 57 | export function unpackMultiple(messagePack: Buffer | Uint8Array): any[] 58 | export function unpackMultiple(messagePack: Buffer | Uint8Array, forEach: (value: any, start?: number, end?: number) => any): void 59 | export function decode(messagePack: Buffer | Uint8Array, options?: UnpackOptions): any 60 | export function addExtension(extension: Extension): void 61 | export function clearSource(): void 62 | export function roundFloat32(float32Number: number): number 63 | export const C1: {} 64 | export let isNativeAccelerationEnabled: boolean 65 | 66 | export class Packr extends Unpackr { 67 | offset: number; 68 | position: number; 69 | pack(value: any, encodeOptions?: number): Buffer 70 | encode(value: any, encodeOptions?: number): Buffer 71 | useBuffer(buffer: Buffer | Uint8Array): void; 72 | clearSharedData(): void; 73 | } 74 | export class Encoder extends Packr {} 75 | export function pack(value: any, encodeOptions?: number): Buffer 76 | export function encode(value: any, encodeOptions?: number): Buffer 77 | 78 | export const REUSE_BUFFER_MODE: number; 79 | export const RESET_BUFFER_MODE: number; 80 | export const RESERVE_START_SPACE: number; 81 | 82 | import { Transform, Readable } from 'stream' 83 | 84 | export as namespace msgpackr; 85 | export class UnpackrStream extends Transform { 86 | constructor(options?: Options | { highWaterMark: number, emitClose: boolean, allowHalfOpen: boolean }) 87 | } 88 | export class PackrStream extends Transform { 89 | constructor(options?: Options | { highWaterMark: number, emitClose: boolean, allowHalfOpen: boolean }) 90 | } 91 | export { PackrStream as EncoderStream, UnpackrStream as DecoderStream }; -------------------------------------------------------------------------------- /benchmark.md: -------------------------------------------------------------------------------- 1 | Here are more comprehensive benchmarks. This is comparison with the next fastest JS projects using the benchmark tool from `msgpack-lite` (and data is from some clinical research data we use that has a good mix of different value types and structures). It also includes comparison to V8 native JSON functionality, and JavaScript Avro (`avsc`, a very optimized Avro implementation): 2 | 3 | operation | op | ms | op/s 4 | ---------------------------------------------------------- | ------: | ----: | -----: 5 | buf = Buffer(JSON.stringify(obj)); | 82000 | 5004 | 16386 6 | obj = JSON.parse(buf); | 88600 | 5000 | 17720 7 | require("msgpackr").pack(obj); | 161500 | 5002 | 32287 8 | require("msgpackr").unpack(buf); | 94600 | 5004 | 18904 9 | msgpackr w/ shared structures: packr.pack(obj); | 178400 | 5002 | 35665 10 | msgpackr w/ shared structures: packr.unpack(buf); | 376700 | 5000 | 75340 11 | buf = require("msgpack-lite").encode(obj); | 30100 | 5012 | 6005 12 | obj = require("msgpack-lite").decode(buf); | 16200 | 5001 | 3239 13 | buf = require("notepack").encode(obj); | 62600 | 5005 | 12507 14 | obj = require("notepack").decode(buf); | 32400 | 5007 | 6470 15 | require("what-the-pack")... encoder.encode(obj); | 63500 | 5002 | 12694 16 | require("what-the-pack")... encoder.decode(buf); | 32000 | 5001 | 6398 17 | require("avsc")...make schema/type...type.toBuffer(obj); | 84600 | 5003 | 16909 18 | require("avsc")...make schema/type...type.toBuffer(obj); | 99300 | 5001 | 19856 19 | 20 | (`avsc` is schema-based and more comparable in style to msgpackr with shared structures). 21 | 22 | Here is a benchmark of streaming data (again borrowed from `msgpack-lite`'s benchmarking), where msgpackr is able to take advantage of the structured record extension and really pull away from other tools: 23 | 24 | operation (1000000 x 2) | op | ms | op/s 25 | ------------------------------------------------ | ------: | ----: | -----: 26 | new PackrStream().write(obj); | 1000000 | 372 | 2688172 27 | new UnpackrStream().write(buf); | 1000000 | 247 | 4048582 28 | stream.write(msgpack.encode(obj)); | 1000000 | 2898 | 345065 29 | stream.write(msgpack.decode(buf)); | 1000000 | 1969 | 507872 30 | stream.write(notepack.encode(obj)); | 1000000 | 901 | 1109877 31 | stream.write(notepack.decode(buf)); | 1000000 | 1012 | 988142 32 | msgpack.Encoder().on("data",ondata).encode(obj); | 1000000 | 1763 | 567214 33 | msgpack.createDecodeStream().write(buf); | 1000000 | 2222 | 450045 34 | msgpack.createEncodeStream().write(obj); | 1000000 | 1577 | 634115 35 | msgpack.Decoder().on("data",ondata).decode(buf); | 1000000 | 2246 | 445235 36 | 37 | 38 | 39 | These are the benchmarks from notepack package. The larger test data for these benchmarks is very heavily weighted with large binary/buffer data and objects with extreme numbers of keys (much more than I typically see with real-world data, but YMMV): 40 | 41 | node ./benchmarks/encode 42 | 43 | library | tiny | small | medium | large 44 | ---------------- | ----------------: | --------------: | ---------------| -------: 45 | notepack | 2,171,621 ops/sec | 546,905 ops/sec | 29,578 ops/sec | 265 ops/sec 46 | msgpack-js | 967,682 ops/sec | 184,455 ops/sec | 20,556 ops/sec | 259 ops/sec 47 | msgpackr | 2,392,826 ops/sec | 556,915 ops/sec | 70,573 ops/sec | 313 ops/sec 48 | msgpack-lite | 553,143 ops/sec | 132,318 ops/sec | 11,816 ops/sec | 186 ops/sec 49 | @msgpack/msgpack | 2,157,655 ops/sec | 573,236 ops/sec | 25,864 ops/sec | 90.26 ops/sec 50 | 51 | 52 | node ./benchmarks/decode 53 | 54 | library | tiny | small | medium | large 55 | ---------------- | ----------------: | --------------: | --------------- | -------: 56 | notepack | 2,220,904 ops/sec | 560,630 ops/sec | 28,177 ops/sec | 275 ops/sec 57 | msgpack-js | 965,719 ops/sec | 222,047 ops/sec | 21,431 ops/sec | 257 ops/sec 58 | msgpackr | 2,320,046 ops/sec | 589,167 ops/sec | 70,299 ops/sec | 329 ops/sec 59 | msgpackr records | 3,750,547 ops/sec | 912,419 ops/sec | 136,853 ops/sec | 733 ops/sec 60 | msgpack-lite | 569,222 ops/sec | 129,008 ops/sec | 12,424 ops/sec | 180 ops/sec 61 | @msgpack/msgpack | 2,089,697 ops/sec | 557,507 ops/sec | 20,256 ops/sec | 85.03 ops/sec 62 | 63 | This was run by adding the msgpackr to the benchmarks for notepack. 64 | 65 | All benchmarks were performed on Node 14.8.0 (Windows i7-4770 3.4Ghz). They can be run with: 66 | npm install --no-save msgpack msgpack-js @msgpack/msgpack msgpack-lite notepack avsc 67 | node tests/benchmark 68 | -------------------------------------------------------------------------------- /tests/strings2.json: -------------------------------------------------------------------------------- 1 | ["metadata","Designs","Randomized Controlled Trial","Types","BriefSummary","To determine the efficacy, long-term safety, and tolerability of alirocumab , mg every ,\n weeks (Q,W), in comparison with placebo, as well as its potential as a starting regimen. The\n dose regimen of , mg every , weeks (Q,W), as used in other studies, was added as a\n calibrator.","Abstract","To determine the efficacy, long-term safety, and tolerability of alirocumab , mg every ,\n weeks (Q,W), in comparison with placebo, as well as its potential as a starting regimen. The\n dose regimen of , mg every , weeks (Q,W), as used in other studies, was added as a\n calibrator.","Acronym","null","ArticleId","Qy,gwKWSoaWRmbmFEQA","Authors","null","CochraneID","null","Confidential","false","CorporateAuthor","null","Country","Bulgaria, Canada, Hungary, Israel, Norway, Slovakia, United Kingdom, United States","CustomData","null","DatabaseType","ClinicalTrials.gov","DOI","null","EmbaseAccessionNumber","null","Emtree","null","ErrataText","null","FullTextURL","null","Institution","null","ISSN","null","Issue","null","JournalTitle","null","MedlineID","null","MeSH","Hypercholesterolemia|Antibodies, Monoclonal","Pages","null","ParentChildStatus","null","ParentID","null","PublicationDate","March","PublicationYear","PubType","null","ReferenceStudy","null","SecondarySourceID","null","Source","Regeneron Pharmaceuticals","SourceReferenceId","NCT","TaStudyDesign","Randomized","Title","A Randomized, Double-Blind, Placebo-Controlled Study to Evaluate the Efficacy and Safety of an Every Four Weeks Treatment Regimen of Alirocumab in Patients With Primary Hypercholesterolemia","TrialOutcome","null","Volume","null","Id","Created","VersionNo","ExtractData","null","Digitized","null","IsRapidExtract","false","IsUploaded","false","design","Randomized Controlled Trial","conditions","label","Cholesterol Total Increased","id","SUE_c","phase","name","NCT","trialIds","NCT","acronyms","outcomeCount","id","groups","Id","RefId","B,|O,~Alirocumab , mg Q,W/Up , mg Q,W Without Concomitant Statin","OriginalName","Alirocumab , mg Q,W/Up , mg Q,W Without Concomitant Statin","N","age","ageSD","male","Interventions","termIds","SUBYEL","SUB_Oc","SUNUVb","analyzeAs","Alirocumab","analyzableScore","matchingScore","Id","zB","RefId","B,|O,~Alirocumab , mg Q,W/Up , mg Q,W Without Concomitant Statin","OriginalName","Alirocumab , mg Q,W/Up , mg Q,W Without Concomitant Statin","N","age","ageSD","male","Interventions","termIds","SUBYEL","SUB_Oc","analyzeAs","Statins","analyzableScore","matchingScore","Id","RefId","B,|O,~Placebo Q,W Without Concomitant Statin","OriginalName","Placebo Q,W Without Concomitant Statin","N","age","ageSD","male","Interventions","termIds","SUGeLS","SUBYEL","SUB_Oc","analyzeAs","Control","analyzableScore","matchingScore","Id","tv","RefId","OriginalName","Alirocumab , mg Q,W/Up , mg Q,W","Interventions","termIds","SUCO","SUNUVb","Id","jt","RefId","B,|O,~Alirocumab , mg Q,W/Up , mg Q,W With Concomitant Statin","OriginalName","Alirocumab , mg Q,W/Up , mg Q,W With Concomitant Statin","N","age","ageSD","male","Interventions","termIds","SUBYEL","SUB_Oc","Id","RefId","OriginalName","Alirocumab , mg Q,W/Up , mg Q,W","Interventions","termIds","SUNUVb","Id","RefId","B,|O,~Alirocumab , mg Q,W/Up , mg Q,W With Concomitant Statin","OriginalName","Alirocumab , mg Q,W/Up , mg Q,W With Concomitant Statin","N","age","ageSD","male","Interventions","termIds","SUBYEL","SUB_Oc","SUNUVb","Id","Interventions","Id","Ya","Name","Treatments","Id","((","Phase","k)","Type","Drug","termIds","SUGeLS","SUNUVb","terms","Placebo","Alirocumab","Id","o)","Name","Treatments","Id","Phase","k)","Type","Drug","termIds","SUBYEL","terms","Statins","RefId","E,|Placebo Q,W","OriginalName","Placebo Q,W","Id","Ls","RefId","B,|O,~Placebo Q,W With Concomitant Statin","OriginalName","Placebo Q,W With Concomitant Statin","N","age","ageSD","male","Interventions","termIds","SUGeLS","SUBYEL","SUB_Oc","hasDocData","null","hasRapidExtract","false","N","queryScore","matchingScore","score","outcomes","id","type","Change","unit","%","termIds","SUF,R","SUBskP","quantifiers","name","Calculated LDL-C in Not Receiving Concomitant Statin Therapy - On-Treatment Analysis","cells","number","unit","%","group","!","varType","se","N","se","sd","number","unit","%","group","varType","se","N","se","sd","number","unit","%","group","zB","varType","se","N","se","sd","time","Id","Low","Value","Baseline","High","Number","Unit","wk","Type","Total","days","description","wk","score","matchingTerm","SUF,R","suggestedPositive","false","sourceUnit","%","id","type","Change","unit","%","termIds","SUF,R","SUBskP","quantifiers","name","Calculated LDL-C in Receiving Concomitant Statin Therapy - On-Treatment Analysis","cells","number","unit","%","group","Ls","varType","se","N","se","sd","number","unit","%","group","varType","se","N","se","sd","number","unit","%","group","jt","varType","se","N","se","sd","time","Id","Low","Value","Baseline","High","Number","Unit","wk","Type","Total","days","description","wk","score","matchingTerm","SUF,R","suggestedPositive","false","sourceUnit","%","id","type","Change","unit","%","termIds","SUF,R","SUBskP","quantifiers","name","Calculated LDL-C in Not Receiving Concomitant Statin Therapy - On-Treatment Analysis","cells","number","unit","%","group","varType","se","N","se","sd","number","unit","%","group","varType","se","N","se","sd","number","unit","%","group","zB","varType","se","N","se","sd","time","Id","Low","Value","Baseline","High","Number","Unit","wk","Type","Total","days","description","score","matchingTerm","SUF,R","suggestedPositive","false","sourceUnit","%","id","type","Change","unit","%","termIds","SUF,R","SUBskP","quantifiers","name","Calculated LDL-C in Receiving Concomitant Statin Therapy - On-Treatment Analysis","cells","number","unit","%","group","Ls","varType","se","N","se","sd","number","unit","%","group","E","varType","se","N","se","sd","number","unit","%","group","jt","varType","se","N","se","sd","time","Id","Low","Value","Baseline","High","Number","Unit","wk","Type","Total","days","description","wk","score","matchingTerm","SUF,R","suggestedPositive","false","sourceUnit","%","characteristics","id","type","Binary","isCharacteristic","null","termIds","SUE_c","SUCbN","SUyJj","quantifiers","name","Patients not having adequate control of their hypercholesterolemia based on their individual level of CVD risk","cells","number","outcomesScore"] -------------------------------------------------------------------------------- /tests/example4.json: -------------------------------------------------------------------------------- 1 | {"metadata":{"Designs":["Randomized Controlled Trial"],"Types":[],"BriefSummary":"To determine the efficacy, long-term safety, and tolerability of alirocumab 300 mg every 4\n weeks (Q4W), in comparison with placebo, as well as its potential as a starting regimen. The\n dose regimen of 75 mg every 2 weeks (Q2W), as used in other studies, was added as a\n calibrator.","Abstract":"To determine the efficacy, long-term safety, and tolerability of alirocumab 300 mg every 4\n weeks (Q4W), in comparison with placebo, as well as its potential as a starting regimen. The\n dose regimen of 75 mg every 2 weeks (Q2W), as used in other studies, was added as a\n calibrator.","Acronym":null,"ArticleId":"Qy3gwKWSoaWRmbmFEQA","Authors":null,"CochraneID":null,"Confidential":false,"CorporateAuthor":null,"Country":"Bulgaria, Canada, Hungary, Israel, Norway, Slovakia, United Kingdom, United States","CustomData":null,"DatabaseType":"ClinicalTrials.gov","DOI":null,"EmbaseAccessionNumber":null,"Emtree":null,"ErrataText":null,"FullTextURL":null,"Institution":null,"ISSN":null,"Issue":null,"JournalTitle":null,"MedlineID":null,"MeSH":"Hypercholesterolemia|Antibodies, Monoclonal","Pages":null,"ParentChildStatus":null,"ParentID":null,"PublicationDate":"March 21, 2017","PublicationYear":2017,"PubType":null,"ReferenceStudy":null,"SecondarySourceID":null,"Source":"Regeneron Pharmaceuticals","SourceReferenceId":"NCT01926782","TaStudyDesign":"Randomized","Title":"A Randomized, Double-Blind, Placebo-Controlled Study to Evaluate the Efficacy and Safety of an Every Four Weeks Treatment Regimen of Alirocumab in Patients With Primary Hypercholesterolemia","TrialOutcome":null,"Volume":null,"Id":179246831,"Created":"2020-04-10T14:48:20.4384957Z","VersionNo":2,"ExtractData":null,"Digitized":true,"IsRapidExtract":false,"IsUploaded":false},"design":"Randomized Controlled Trial","conditions":[{"label":"Cholesterol Total Increased","id":"SUE_c"}],"phase":3,"name":"NCT01926782","trialIds":["NCT01926782"],"acronyms":[],"outcomeCount":156,"id":179246831,"groups":[{"Id":"4r","RefId":"B5|O2~Alirocumab 75 mg Q2W/Up 150 mg Q2W Without Concomitant Statin","OriginalName":"Alirocumab 75 mg Q2W/Up 150 mg Q2W Without Concomitant Statin","N":37,"age":59.3,"ageSD":11.3,"male":37.83783783783784,"Interventions":[{"termIds":[["SUBYEL","SUB_Oc"],["SUNUVb"]]}],"analyzeAs":"Alirocumab","analyzableScore":1.0717734625362931,"matchingScore":0},{"Id":"zB","RefId":"B6|O3~Alirocumab 300 mg Q4W/Up 150 mg Q2W Without Concomitant Statin","OriginalName":"Alirocumab 300 mg Q4W/Up 150 mg Q2W Without Concomitant Statin","N":146,"age":59.2,"ageSD":10.8,"male":45.205479452054796,"Interventions":[{"termIds":[["SUBYEL","SUB_Oc"]]}],"analyzeAs":"Statins","analyzableScore":1.0717734625362931,"matchingScore":0},{"Id":"3!","RefId":"B4|O1~Placebo Q2W Without Concomitant Statin","OriginalName":"Placebo Q2W Without Concomitant Statin","N":73,"age":59.4,"ageSD":10.2,"male":54.794520547945204,"Interventions":[{"termIds":[["SUGeLS"],["SUBYEL","SUB_Oc"]]}],"analyzeAs":"Control","analyzableScore":1.2020833333333334,"matchingScore":0},{"Id":"tv","RefId":"E3","OriginalName":"Alirocumab 300 mg Q4W/Up 150 mg Q2W","Interventions":[{"termIds":[["SUCO54","SUNUVb"]]}]},{"Id":"jt","RefId":"B3|O3~Alirocumab 300 mg Q4W/Up 150 mg Q2W With Concomitant Statin","OriginalName":"Alirocumab 300 mg Q4W/Up 150 mg Q2W With Concomitant Statin","N":312,"age":61.6,"ageSD":10,"male":60.8974358974359,"Interventions":[{"termIds":[["SUBYEL","SUB_Oc"]]}]},{"Id":"5!","RefId":"E2","OriginalName":"Alirocumab 75 mg Q2W/Up 150 mg Q2W","Interventions":[{"termIds":[["SUNUVb"]]}]},{"Id":"4E","RefId":"B2|O2~Alirocumab 75 mg Q2W/Up 150 mg Q2W With Concomitant Statin","OriginalName":"Alirocumab 75 mg Q2W/Up 150 mg Q2W With Concomitant Statin","N":78,"age":60.7,"ageSD":9.1,"male":65.38461538461539,"Interventions":[{"termIds":[["SUBYEL","SUB_Oc"],["SUNUVb"]]}]},{"Id":"i4","Interventions":[{"Id":"Ya","Name":178613599,"Treatments":[{"Id":"((","Phase":"k)"}],"Type":"Drug","termIds":[["SUGeLS"],["SUNUVb"]],"terms":[["Placebo"],["Alirocumab"]]},{"Id":"o)","Name":2159990,"Treatments":[{"Id":"1$","Phase":"k)"}],"Type":"Drug","termIds":[["SUBYEL"]],"terms":[["Statins"]]}],"RefId":"E1|Placebo Q2W","OriginalName":"Placebo Q2W"},{"Id":"Ls","RefId":"B1|O1~Placebo Q2W With Concomitant Statin","OriginalName":"Placebo Q2W With Concomitant Statin","N":157,"age":61.6,"ageSD":9.7,"male":64.3312101910828,"Interventions":[{"termIds":[["SUGeLS"],["SUBYEL","SUB_Oc"]]}]}],"hasDocData":true,"hasRapidExtract":false,"N":803,"queryScore":1.4868329805051381,"matchingScore":7.960635921410255,"score":22.084654254966498,"outcomes":[{"id":"179246387","type":"Change","unit":"%","termIds":[["SUF0R","SUBskP"]],"quantifiers":[],"name":"Calculated LDL-C in Not Receiving Concomitant Statin Therapy - On-Treatment Analysis","cells":[{"number":-0.4,"unit":"%","group":"3!","varType":"se","N":70,"se":2,"sd":16.73},{"number":-54.6,"unit":"%","group":"4r","varType":"se","N":37,"se":2.8,"sd":17.03},{"number":-59.4,"unit":"%","group":"zB","varType":"se","N":141,"se":1.4,"sd":16.62}],"time":{"Id":67122072,"Low":{"Value":"Baseline"},"High":{"Number":24,"Unit":"wk"},"Type":"Total","days":168,"description":"24wk"},"score":2.08,"matchingTerm":"SUF0R","suggestedPositive":false,"sourceUnit":"%"},{"id":"179246389","type":"Change","unit":"%","termIds":[["SUF0R","SUBskP"]],"quantifiers":[],"name":"Calculated LDL-C in Receiving Concomitant Statin Therapy - On-Treatment Analysis","cells":[{"number":-0.3,"unit":"%","group":"Ls","varType":"se","N":151,"se":2.1,"sd":25.81},{"number":-55.1,"unit":"%","group":"4E","varType":"se","N":75,"se":3,"sd":25.98},{"number":-62.3,"unit":"%","group":"jt","varType":"se","N":302,"se":1.5,"sd":26.07}],"time":{"Id":67122072,"Low":{"Value":"Baseline"},"High":{"Number":24,"Unit":"wk"},"Type":"Total","days":168,"description":"24wk"},"score":2.08,"matchingTerm":"SUF0R","suggestedPositive":false,"sourceUnit":"%"},{"id":"179246393","type":"Change","unit":"%","termIds":[["SUF0R","SUBskP"]],"quantifiers":[],"name":"Calculated LDL-C in Not Receiving Concomitant Statin Therapy - On-Treatment Analysis","cells":[{"number":-0.5,"unit":"%","group":"3!","varType":"se","N":70,"se":2,"sd":16.73},{"number":-53.9,"unit":"%","group":"4r","varType":"se","N":37,"se":2.7,"sd":16.42},{"number":-60,"unit":"%","group":"zB","varType":"se","N":141,"se":1.4,"sd":16.62}],"time":{"Id":67122069,"Low":{"Value":"Baseline"},"High":{"Number":12,"Unit":"wk"},"Type":"Total","days":84,"description":"12wk"},"score":2.08,"matchingTerm":"SUF0R","suggestedPositive":false,"sourceUnit":"%"},{"id":"179246394","type":"Change","unit":"%","termIds":[["SUF0R","SUBskP"]],"quantifiers":[],"name":"Calculated LDL-C in Receiving Concomitant Statin Therapy - On-Treatment Analysis","cells":[{"number":1.4,"unit":"%","group":"Ls","varType":"se","N":151,"se":1.9,"sd":23.35},{"number":-47.3,"unit":"%","group":"4E","varType":"se","N":75,"se":2.8,"sd":24.25},{"number":-58,"unit":"%","group":"jt","varType":"se","N":302,"se":1.4,"sd":24.33}],"time":{"Id":67122069,"Low":{"Value":"Baseline"},"High":{"Number":12,"Unit":"wk"},"Type":"Total","days":84,"description":"12wk"},"score":2.08,"matchingTerm":"SUF0R","suggestedPositive":false,"sourceUnit":"%"}],"characteristics":[{"id":"179246354","type":"Binary","isCharacteristic":true,"termIds":[["SUE_c","SUCbN","SUyJj"]],"quantifiers":[],"name":"Patients not having adequate control of their hypercholesterolemia based on their individual level of CVD risk","cells":[],"number":100}],"outcomesScore":18.97947630112307} -------------------------------------------------------------------------------- /tests/benchmark.js: -------------------------------------------------------------------------------- 1 | import * as msgpackr from "../index.js"; 2 | var msgpack_node = tryRequire("msgpack"); 3 | var msgpack_msgpack = tryRequire("@msgpack/msgpack"); 4 | var msgpack_lite = tryRequire("msgpack-lite"); 5 | var msgpack_js = tryRequire("msgpack-js"); 6 | var msgpack_js_v5 = tryRequire("msgpack-js-v5"); 7 | var msgpack5 = tryRequire("msgpack5"); 8 | var msgpack_unpack = tryRequire("msgpack-unpack"); 9 | var msgpack_codec = tryRequire("msgpack.codec"); 10 | var notepack = tryRequire("notepack"); 11 | var what_the_pack = tryRequire("what-the-pack"); 12 | var avro = tryRequire('avsc') 13 | var cbor = tryRequire('cbor') 14 | 15 | /*msgpack5 = msgpack5 && msgpack5(); 16 | msgpack_codec = msgpack_codec && msgpack_codec.msgpack; 17 | what_the_pack = what_the_pack && what_the_pack.initialize(2**20);*/ 18 | 19 | var pkg = require("../package.json"); 20 | var data = require("./example5.json"); 21 | //var packed = msgpack_lite && msgpack_lite.encode(data); 22 | var expected = JSON.stringify(data); 23 | 24 | var argv = Array.prototype.slice.call(process.argv, 2); 25 | console.log('msgpackr: ',msgpackr) 26 | if (argv[0] === "-v") { 27 | console.warn(pkg.name + " " + pkg.version); 28 | process.exit(0); 29 | } 30 | 31 | var limit = 5; 32 | if (argv[0] - 0) limit = argv.shift() - 0; 33 | limit *= 1000; 34 | 35 | var COL1 = 58; 36 | var COL2 = 7; 37 | var COL3 = 5; 38 | var COL4 = 6; 39 | 40 | console.log(rpad("operation", COL1), "|", " op ", "|", " ms ", "|", " op/s "); 41 | console.log(rpad("", COL1, "-"), "|", lpad(":", COL2, "-"), "|", lpad(":", COL3, "-"), "|", lpad(":", COL4, "-")); 42 | 43 | var buf, obj; 44 | 45 | if (msgpackr) { 46 | let packr 47 | packr = new msgpackr.Packr({ structures: [] }) 48 | buf = bench('msgpackr w/ shared structures: packr.pack(obj);', packr.pack.bind(packr), data); 49 | //buf = bench('msgpackr w/ shared structures: packr.pack(obj);', data => {let result = packr.pack(data); packr.resetMemory(); return result;}, data); 50 | 51 | obj = bench('msgpackr w/ shared structures: packr.unpack(buf);', packr.unpack.bind(packr), buf); 52 | test(obj); 53 | 54 | packr = new msgpackr.Packr({ useRecords: false }) 55 | buf = bench('require("msgpackr").pack(obj);', msgpackr.pack, data); 56 | //buf = bench('require("msgpackr").pack(obj);', data => {let result = packr.pack(data); packr.resetMemory(); return result;}, data); 57 | 58 | obj = bench('require("msgpackr").unpack(buf);', msgpackr.unpack, buf); 59 | test(obj); 60 | 61 | packr = new msgpackr.Packr({ bundleStrings: true, structures: [] }) 62 | buf = bench('bundled strings packr.pack(obj);', packr.pack.bind(packr), data); 63 | //buf = bench('require("msgpackr").pack(obj);', data => {let result = packr.pack(data); packr.resetMemory(); return result;}, data); 64 | 65 | obj = bench('bundled strings packr.unpack(buf);', packr.unpack.bind(packr), buf); 66 | test(obj); 67 | 68 | } 69 | 70 | if (JSON) { 71 | buf = bench('buf = Buffer(JSON.stringify(obj));', JSON_stringify, data); 72 | obj = bench('obj = JSON.parse(buf);', JSON.parse, buf); 73 | test(obj); 74 | } 75 | 76 | if (msgpack_lite) { 77 | buf = bench('buf = require("msgpack-lite").encode(obj);', msgpack_lite.encode, data); 78 | obj = bench('obj = require("msgpack-lite").decode(buf);', msgpack_lite.decode, packed); 79 | test(obj); 80 | } 81 | 82 | if (msgpack_msgpack) { 83 | buf = bench('buf = require("@msgpack/msgpack").encode(obj);', msgpack_msgpack.encode, data); 84 | obj = bench('obj = require("@msgpack/msgpack").decode(buf);', msgpack_msgpack.decode, buf); 85 | test(obj); 86 | } 87 | 88 | if (msgpack_node) { 89 | buf = bench('buf = require("msgpack").pack(obj);', msgpack_node.pack, data); 90 | obj = bench('obj = require("msgpack").unpack(buf);', msgpack_node.unpack, buf); 91 | test(obj); 92 | } 93 | 94 | if (msgpack_codec) { 95 | buf = bench('buf = Buffer(require("msgpack.codec").msgpack.pack(obj));', msgpack_codec_pack, data); 96 | obj = bench('obj = require("msgpack.codec").msgpack.unpack(buf);', msgpack_codec.unpack, buf); 97 | test(obj); 98 | } 99 | 100 | if (msgpack_js_v5) { 101 | buf = bench('buf = require("msgpack-js-v5").encode(obj);', msgpack_js_v5.encode, data); 102 | obj = bench('obj = require("msgpack-js-v5").decode(buf);', msgpack_js_v5.decode, buf); 103 | test(obj); 104 | } 105 | 106 | if (msgpack_js) { 107 | buf = bench('buf = require("msgpack-js").encode(obj);', msgpack_js.encode, data); 108 | obj = bench('obj = require("msgpack-js").decode(buf);', msgpack_js.decode, buf); 109 | test(obj); 110 | } 111 | 112 | if (msgpack5) { 113 | buf = bench('buf = require("msgpack5")().encode(obj);', msgpack5.encode, data); 114 | obj = bench('obj = require("msgpack5")().decode(buf);', msgpack5.decode, buf); 115 | test(obj); 116 | } 117 | 118 | if (notepack) { 119 | buf = bench('buf = require("notepack").encode(obj);', notepack.encode, data); 120 | obj = bench('obj = require("notepack").decode(buf);', notepack.decode, buf); 121 | test(obj); 122 | } 123 | if (what_the_pack) { 124 | buf = bench('require("what-the-pack")... encoder.encode(obj);', what_the_pack.encode, data); 125 | obj = bench('require("what-the-pack")... encoder.decode(buf);', what_the_pack.decode, buf); 126 | test(obj); 127 | } 128 | 129 | if (msgpack_unpack) { 130 | obj = bench('obj = require("msgpack-unpack").decode(buf);', msgpack_unpack, packed); 131 | test(obj); 132 | } 133 | 134 | if (avro) { 135 | const type = avro.Type.forValue(data); 136 | buf = bench('require("avsc")...make schema/type...type.toBuffer(obj);', type.toBuffer.bind(type), data); 137 | obj = bench('require("avsc")...make schema/type...type.fromBuffer(obj);', type.fromBuffer.bind(type), buf); 138 | } 139 | if (cbor) { 140 | buf = bench('buf = require("cbor").encode(obj);', cbor.encode, data); 141 | obj = bench('obj = require("cbor").decode(buf);', cbor.decode, buf); 142 | test(obj); 143 | } 144 | 145 | function JSON_stringify(src) { 146 | return JSON.stringify(src); 147 | } 148 | 149 | function msgpack_codec_pack(data) { 150 | return Buffer(msgpack_codec.pack(data)); 151 | } 152 | 153 | function bench(name, func, src) { 154 | if (argv.length) { 155 | var match = argv.filter(function(grep) { 156 | return (name.indexOf(grep) > -1); 157 | }); 158 | if (!match.length) return SKIP; 159 | } 160 | var ret, duration; 161 | var start = new Date() - 0; 162 | var count = 0; 163 | while (1) { 164 | var end = new Date() - 0; 165 | duration = end - start; 166 | if (duration >= limit) break; 167 | while ((++count) % 100) ret = func(src); 168 | } 169 | name = rpad(name, COL1); 170 | var score = Math.floor(count / duration * 1000); 171 | count = lpad(count, COL2); 172 | duration = lpad(duration, COL3); 173 | score = lpad(score, COL4); 174 | console.log(name, "|", count, "|", duration, "|", score); 175 | return ret; 176 | } 177 | 178 | function rpad(str, len, chr) { 179 | if (!chr) chr = " "; 180 | while (str.length < len) str += chr; 181 | return str; 182 | } 183 | 184 | function lpad(str, len, chr) { 185 | if (!chr) chr = " "; 186 | str += ""; 187 | while (str.length < len) str = chr + str; 188 | return str; 189 | } 190 | 191 | function test(actual) { 192 | if (actual === SKIP) return; 193 | actual = JSON.stringify(actual); 194 | if (actual === expected) return; 195 | console.warn("expected: " + expected); 196 | console.warn("actual: " + actual); 197 | } 198 | 199 | function SKIP() { 200 | } 201 | 202 | function tryRequire(name) { 203 | try { 204 | return require(name); 205 | } catch (e) { 206 | // ignore 207 | } 208 | } 209 | -------------------------------------------------------------------------------- /tests/benchmark-stream.cjs: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | var PassThrough = require("stream").PassThrough; 4 | var async = require("async"); 5 | 6 | let { PackrStream, UnpackrStream } = require(".."); 7 | var msgpack = require("msgpack-lite"); 8 | var Encoder = require("msgpack-lite/lib/encoder").Encoder; 9 | var Decoder = require("msgpack-lite/lib/decoder").Decoder; 10 | var notepack = require("notepack"); 11 | 12 | var pkg = require("../package.json"); 13 | 14 | // a sample fluentd message 15 | var data = ["tag", [[1440949922, {"message": "hi there"}]]]; 16 | var packed = msgpack.encode(data); // 30 bytes per message 17 | var packsize = packed.length; 18 | var opcount = 1000000; 19 | var joincount = 100; 20 | var packjoin = repeatbuf(packed, joincount); // 3KB per chunk 21 | var limit = 2; 22 | 23 | var blocksToJoin = [] 24 | var streamForJoin = new PackrStream(); 25 | streamForJoin.on("data", data => blocksToJoin.push(data)); 26 | for (var j = 0; j < joincount; j++) { 27 | streamForJoin.write(data); 28 | } 29 | var packjoinWithRecords = Buffer.concat(blocksToJoin) 30 | 31 | var argv = Array.prototype.slice.call(process.argv, 2); 32 | 33 | if (argv[0] === "-v") { 34 | console.warn(pkg.name + " " + pkg.version); 35 | process.exit(0); 36 | } 37 | 38 | if (argv[0] - 0) limit = argv.shift() - 0; 39 | 40 | var list = [ 41 | ['new PackrStream().write(obj);', encode5], 42 | ['new UnpackrStream().write(buf);', decode5], 43 | ['stream.write(msgpack.encode(obj));', encode1], 44 | ['stream.write(msgpack.decode(buf));', decode1], 45 | ['stream.write(notepack.encode(obj));', encode4], 46 | ['stream.write(notepack.decode(buf));', decode4], 47 | ['msgpack.Encoder().on("data",ondata).encode(obj);', encode2], 48 | ['msgpack.createDecodeStream().write(buf);', decode3], 49 | ['msgpack.createEncodeStream().write(obj);', encode3], 50 | ['msgpack.Decoder().on("data",ondata).decode(buf);', decode2], 51 | // ['stream.write(Buffer.from(JSON.stringify(obj)));', stringify], 52 | // ['stream.write(JSON.parse(buf));', parse] 53 | ]; 54 | 55 | function encode5(callback) { 56 | var stream = new PackrStream(); 57 | var cnt = counter(callback); 58 | stream.on("data", cnt.inc); 59 | stream.on("end", cnt.end); 60 | for (var j = 0; j < opcount; j++) { 61 | stream.write(data); 62 | } 63 | stream.end(); 64 | } 65 | 66 | function encode1(callback) { 67 | var stream = new PassThrough(); 68 | var cnt = counter(callback); 69 | stream.on("data", cnt.buf); 70 | stream.on("end", cnt.end); 71 | for (var j = 0; j < opcount; j++) { 72 | stream.write(msgpack.encode(data)); 73 | } 74 | stream.end(); 75 | } 76 | 77 | function encode2(callback) { 78 | var stream = new PassThrough(); 79 | var cnt = counter(callback); 80 | stream.on("data", cnt.buf); 81 | stream.on("end", cnt.end); 82 | var encoder = Encoder(); 83 | encoder.on("data", function(chunk) { 84 | stream.write(chunk); 85 | }); 86 | encoder.on("end", function() { 87 | stream.end(); 88 | }); 89 | for (var j = 0; j < opcount; j++) { 90 | encoder.encode(data); 91 | } 92 | encoder.end(); 93 | } 94 | 95 | function encode3(callback) { 96 | var stream = msgpack.createEncodeStream(); 97 | var cnt = counter(callback); 98 | stream.on("data", cnt.buf); 99 | stream.on("end", cnt.end); 100 | for (var j = 0; j < opcount; j++) { 101 | stream.write(data); 102 | } 103 | stream.end(); 104 | } 105 | 106 | function encode4(callback) { 107 | var stream = new PassThrough(); 108 | var cnt = counter(callback); 109 | stream.on("data", cnt.buf); 110 | stream.on("end", cnt.end); 111 | for (var j = 0; j < opcount; j++) { 112 | stream.write(notepack.encode(data)); 113 | } 114 | stream.end(); 115 | } 116 | 117 | function decode5(callback) { 118 | var stream = new UnpackrStream(); 119 | var cnt = counter(callback); 120 | stream.on("data", cnt.inc); 121 | stream.on("end", cnt.end); 122 | for (var j = 0; j < opcount / joincount; j++) { 123 | stream.write(packjoinWithRecords); 124 | } 125 | stream.end(); 126 | } 127 | 128 | function decode1(callback) { 129 | var stream = new PassThrough({objectMode: true}); 130 | var cnt = counter(callback); 131 | stream.on("data", cnt.inc); 132 | stream.on("end", cnt.end); 133 | for (var j = 0; j < opcount; j++) { 134 | stream.write(msgpack.decode(packed)); 135 | } 136 | stream.end(); 137 | } 138 | 139 | function decode2(callback) { 140 | var stream = new PassThrough({objectMode: true}); 141 | var cnt = counter(callback); 142 | stream.on("data", cnt.inc); 143 | stream.on("end", cnt.end); 144 | var decoder = Decoder(); 145 | decoder.on("data", function(chunk) { 146 | stream.write(chunk); 147 | }); 148 | decoder.on("end", function() { 149 | stream.end(); 150 | }); 151 | for (var j = 0; j < opcount / joincount; j++) { 152 | decoder.decode(packjoin); 153 | } 154 | decoder.end(); 155 | } 156 | 157 | function decode3(callback) { 158 | var stream = msgpack.createDecodeStream(); 159 | var cnt = counter(callback); 160 | stream.on("data", cnt.inc); 161 | stream.on("end", cnt.end); 162 | for (var j = 0; j < opcount / joincount; j++) { 163 | stream.write(packjoin); 164 | } 165 | stream.end(); 166 | } 167 | 168 | function decode4(callback) { 169 | var stream = new PassThrough({objectMode: true}); 170 | var cnt = counter(callback); 171 | stream.on("data", cnt.inc); 172 | stream.on("end", cnt.end); 173 | for (var j = 0; j < opcount; j++) { 174 | stream.write(notepack.decode(packed)); 175 | } 176 | stream.end(); 177 | } 178 | 179 | function rpad(str, len, chr) { 180 | if (!chr) chr = " "; 181 | str += ""; 182 | while (str.length < len) str += chr; 183 | return str; 184 | } 185 | 186 | function lpad(str, len, chr) { 187 | if (!chr) chr = " "; 188 | str += ""; 189 | while (str.length < len) str = chr + str; 190 | return str; 191 | } 192 | 193 | function repeatbuf(buf, cnt) { 194 | var array = []; 195 | for (var i = 0; i < cnt; i++) { 196 | array.push(buf); 197 | } 198 | return Buffer.concat(array); 199 | } 200 | 201 | function counter(callback) { 202 | var cnt = 0; 203 | return {buf: b, inc: i, end: e}; 204 | 205 | function b(buf) { 206 | cnt += buf.length / packsize; 207 | } 208 | 209 | function i() { 210 | cnt++; 211 | } 212 | 213 | function e() { 214 | cnt = Math.round(cnt); 215 | callback(null, cnt); 216 | } 217 | } 218 | 219 | function run() { 220 | // task filter 221 | if (argv.length) { 222 | list = list.filter(function(pair) { 223 | var name = pair[0]; 224 | var match = argv.filter(function(grep) { 225 | return (name.indexOf(grep) > -1); 226 | }); 227 | return match.length; 228 | }); 229 | } 230 | 231 | // run tasks repeatedly 232 | var tasks = []; 233 | for (var i = 0; i < limit; i++) { 234 | tasks.push(oneset); 235 | } 236 | async.series(tasks, end); 237 | 238 | // run a series of tasks 239 | function oneset(callback) { 240 | async.eachSeries(list, bench, callback); 241 | } 242 | 243 | // run a single benchmark 244 | function bench(pair, callback) { 245 | process.stdout.write("."); 246 | var func = pair[1]; 247 | var start = new Date() - 0; 248 | func(function(err, cnt) { 249 | var end = new Date() - 0; 250 | var array = pair[2] || (pair[2] = []); 251 | array.push(end - start); 252 | pair[3] = cnt; 253 | setTimeout(callback, 100); 254 | }); 255 | } 256 | 257 | // show result 258 | function end() { 259 | var title = "operation (" + opcount + " x " + limit + ")"; 260 | process.stdout.write("\n"); 261 | 262 | // table header 263 | var COL1 = 48; 264 | console.log(rpad(title, COL1), "|", " op ", "|", " ms ", "|", " op/s "); 265 | console.log(rpad("", COL1, "-"), "|", "------:", "|", "----:", "|", "-----:"); 266 | 267 | // table body 268 | list.forEach(function(pair) { 269 | var name = pair[0]; 270 | var op = pair[3]; 271 | var array = pair[2]; 272 | array = array.sort(function(a, b) { 273 | return a > b; 274 | }); 275 | var fastest = array[0]; 276 | var score = Math.floor(opcount / fastest * 1000); 277 | console.log(rpad(name, COL1), "|", lpad(op, 7), "|", lpad(fastest, 5), "|", lpad(score, 6)); 278 | }); 279 | } 280 | } 281 | 282 | run(); 283 | -------------------------------------------------------------------------------- /tests/benchmark.cjs: -------------------------------------------------------------------------------- 1 | var msgpackr = require("../dist/node.cjs"); 2 | console.log('isNativeAccelerationEnabled', msgpackr.isNativeAccelerationEnabled) 3 | var msgpack_node = tryRequire("msgpack"); 4 | var msgpack_msgpack = tryRequire("@msgpack/msgpack"); 5 | var msgpack_lite = tryRequire("msgpack-lite"); 6 | var msgpack_js = tryRequire("msgpack-js"); 7 | var msgpack_js_v5 = tryRequire("msgpack-js-v5"); 8 | var msgpack5 = tryRequire("msgpack5"); 9 | var msgpack_unpack = tryRequire("msgpack-unpack"); 10 | var msgpack_codec = tryRequire("msgpack.codec"); 11 | var notepack = tryRequire("notepack"); 12 | var what_the_pack = tryRequire("what-the-pack"); 13 | var avro = tryRequire('avsc') 14 | var cbor = tryRequire('cbor') 15 | var inspector = require('inspector'); 16 | //inspector.open(9229, null, true); debugger 17 | const LAZY = { lazy: true }; 18 | 19 | 20 | msgpack5 = msgpack5 && msgpack5(); 21 | msgpack_codec = msgpack_codec && msgpack_codec.msgpack; 22 | what_the_pack = what_the_pack && what_the_pack.initialize(2**20); 23 | 24 | var pkg = require("../package.json"); 25 | var data = require("./example4.json"); 26 | var packed = msgpack_lite && msgpack_lite.encode(data); 27 | var expected = JSON.stringify(data); 28 | 29 | var argv = Array.prototype.slice.call(process.argv, 2); 30 | 31 | if (argv[0] === "-v") { 32 | console.warn(pkg.name + " " + pkg.version); 33 | process.exit(0); 34 | } 35 | 36 | var limit = 5; 37 | if (argv[0] - 0) limit = argv.shift() - 0; 38 | limit *= 1000; 39 | 40 | var COL1 = 58; 41 | var COL2 = 7; 42 | var COL3 = 5; 43 | var COL4 = 6; 44 | 45 | console.log(rpad("operation", COL1), "|", " op ", "|", " ms ", "|", " op/s ", "|", "size"); 46 | console.log(rpad("", COL1, "-"), "|", lpad(":", COL2, "-"), "|", lpad(":", COL3, "-"), "|", lpad(":", COL4, "-"), "|", lpad(":", COL4, "-")); 47 | 48 | var buf, obj; 49 | 50 | if (msgpackr) { 51 | let packr, last 52 | let keys = ['littleNum'];//Object.keys(data); 53 | packr = new msgpackr.Packr({ structures: [] }) 54 | buf = bench('msgpackr w/ shared structures: packr.pack(obj);', packr.pack.bind(packr), data); 55 | console.log('buffer size', buf.length); 56 | //buf = bench('msgpackr w/ shared structures: packr.pack(obj);', data => {let result = packr.pack(data); packr.resetMemory(); return result;}, data); 57 | obj = bench('msgpackr w/ shared structures: packr.unpack(buf);', value => packr.unpack(value), buf); 58 | test(obj); 59 | 60 | packr = new msgpackr.Packr({ structures: [],randomAccessStructure: true, saveStructures(structures) { 61 | } }) 62 | buf = bench('msgpackr w/ random access structures: packr.pack(obj);', value => packr.pack(value), data); 63 | //buf = bench('msgpackr w/ shared structures: packr.pack(obj);', data => {let result = packr.pack(data); packr.resetMemory(); return result;}, data); 64 | console.log('buffer size', buf.length); 65 | obj = bench('msgpackr w/ random access structures: packr.unpack(buf);', value => packr.unpack(value), buf); 66 | test(obj); 67 | 68 | packr = new msgpackr.Packr({ useRecords: false }) 69 | buf = bench('require("msgpackr").pack(obj);', msgpackr.pack, data); 70 | //buf = bench('require("msgpackr").pack(obj);', data => {let result = packr.pack(data); packr.resetMemory(); return result;}, data); 71 | 72 | obj = bench('require("msgpackr").unpack(buf);', msgpackr.unpack, buf); 73 | test(obj); 74 | 75 | packr = new msgpackr.Packr({ bundleStrings: true, structures: [] }) 76 | buf = bench('bundled strings packr.pack(obj);', packr.pack.bind(packr), data); 77 | //buf = bench('require("msgpackr").pack(obj);', data => {let result = packr.pack(data); packr.resetMemory(); return result;}, data); 78 | 79 | obj = bench('bundled strings packr.unpack(buf);', packr.unpack.bind(packr), buf); 80 | test(obj); 81 | 82 | } 83 | 84 | if (JSON) { 85 | console.log('JSON') 86 | buf = bench('buf = Buffer(JSON.stringify(obj));', JSON_stringify, data); 87 | obj = bench('obj = JSON.parse(buf);', JSON.parse, buf); 88 | test(obj); 89 | } 90 | 91 | if (msgpack_lite) { 92 | buf = bench('buf = require("msgpack-lite").encode(obj);', msgpack_lite.encode, data); 93 | obj = bench('obj = require("msgpack-lite").decode(buf);', msgpack_lite.decode, packed); 94 | test(obj); 95 | } 96 | 97 | if (msgpack_msgpack) { 98 | buf = bench('buf = require("@msgpack/msgpack").encode(obj);', msgpack_msgpack.encode, data); 99 | obj = bench('obj = require("@msgpack/msgpack").decode(buf);', msgpack_msgpack.decode, buf); 100 | test(obj); 101 | } 102 | 103 | if (msgpack_node) { 104 | buf = bench('buf = require("msgpack").pack(obj);', msgpack_node.pack, data); 105 | obj = bench('obj = require("msgpack").unpack(buf);', msgpack_node.unpack, buf); 106 | test(obj); 107 | } 108 | 109 | if (msgpack_codec) { 110 | buf = bench('buf = Buffer(require("msgpack.codec").msgpack.pack(obj));', msgpack_codec_pack, data); 111 | obj = bench('obj = require("msgpack.codec").msgpack.unpack(buf);', msgpack_codec.unpack, buf); 112 | test(obj); 113 | } 114 | 115 | if (msgpack_js_v5) { 116 | buf = bench('buf = require("msgpack-js-v5").encode(obj);', msgpack_js_v5.encode, data); 117 | obj = bench('obj = require("msgpack-js-v5").decode(buf);', msgpack_js_v5.decode, buf); 118 | test(obj); 119 | } 120 | 121 | if (msgpack_js) { 122 | buf = bench('buf = require("msgpack-js").encode(obj);', msgpack_js.encode, data); 123 | obj = bench('obj = require("msgpack-js").decode(buf);', msgpack_js.decode, buf); 124 | test(obj); 125 | } 126 | 127 | if (msgpack5) { 128 | buf = bench('buf = require("msgpack5")().encode(obj);', msgpack5.encode, data); 129 | obj = bench('obj = require("msgpack5")().decode(buf);', msgpack5.decode, buf); 130 | test(obj); 131 | } 132 | 133 | if (notepack) { 134 | buf = bench('buf = require("notepack").encode(obj);', notepack.encode, data); 135 | obj = bench('obj = require("notepack").decode(buf);', notepack.decode, buf); 136 | test(obj); 137 | } 138 | if (what_the_pack) { 139 | buf = bench('require("what-the-pack")... encoder.encode(obj);', what_the_pack.encode, data); 140 | obj = bench('require("what-the-pack")... encoder.decode(buf);', what_the_pack.decode, buf); 141 | test(obj); 142 | } 143 | 144 | if (msgpack_unpack) { 145 | obj = bench('obj = require("msgpack-unpack").decode(buf);', msgpack_unpack, packed); 146 | test(obj); 147 | } 148 | 149 | if (avro) { 150 | const type = avro.Type.forValue(data); 151 | buf = bench('require("avsc")...make schema/type...type.toBuffer(obj);', type.toBuffer.bind(type), data); 152 | obj = bench('require("avsc")...make schema/type...type.fromBuffer(obj);', type.fromBuffer.bind(type), buf); 153 | } 154 | if (cbor) { 155 | buf = bench('buf = require("cbor").encode(obj);', cbor.encode, data); 156 | obj = bench('obj = require("cbor").decode(buf);', cbor.decode, buf); 157 | test(obj); 158 | } 159 | 160 | function JSON_stringify(src) { 161 | return Buffer.from(JSON.stringify(src)); 162 | } 163 | 164 | function msgpack_codec_pack(data) { 165 | return Buffer.from(msgpack_codec.pack(data)); 166 | } 167 | 168 | function bench(name, func, src) { 169 | if (argv.length) { 170 | var match = argv.filter(function(grep) { 171 | return (name.indexOf(grep) > -1); 172 | }); 173 | if (!match.length) return SKIP; 174 | } 175 | var ret, duration; 176 | var start = new Date() - 0; 177 | var count = 0; 178 | while (1) { 179 | var end = new Date() - 0; 180 | duration = end - start; 181 | if (duration >= limit) break; 182 | while ((++count) % 100) ret = func(src); 183 | } 184 | name = rpad(name, COL1); 185 | var score = Math.floor(count / duration * 1000); 186 | count = lpad(count, COL2); 187 | duration = lpad(duration, COL3); 188 | score = lpad(score, COL4); 189 | console.log(name, "|", count, "|", duration, "|", score, "|", src.length); 190 | return ret; 191 | } 192 | 193 | function rpad(str, len, chr) { 194 | if (!chr) chr = " "; 195 | while (str.length < len) str += chr; 196 | return str; 197 | } 198 | 199 | function lpad(str, len, chr) { 200 | if (!chr) chr = " "; 201 | str += ""; 202 | while (str.length < len) str = chr + str; 203 | return str; 204 | } 205 | 206 | function test(actual) { 207 | if (actual === SKIP) return; 208 | actual = JSON.stringify(actual); 209 | if (actual === expected) return; 210 | console.warn("expected: " + expected); 211 | console.warn("actual: " + actual); 212 | } 213 | 214 | function SKIP() { 215 | } 216 | 217 | function tryRequire(name) { 218 | try { 219 | return require(name); 220 | } catch (e) { 221 | // ignore 222 | } 223 | } 224 | -------------------------------------------------------------------------------- /tests/sample-large.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "_id":"56490c18d9275a0003000000", 4 | "author":null, 5 | "created_at":"2015-11-15T22:50:00.170Z", 6 | "description":"A weekly discussion by Ruby developers about programming, life, and careers.", 7 | "image":"https://s3.amazonaws.com/devchat.tv/ruby-rogues-thumb.jpg", 8 | "keywords":[ 9 | "Business", 10 | "Careers", 11 | "Technology", 12 | "Software How-To" 13 | ], 14 | "language":"en", 15 | "permalink":"http://rubyrogues.com/", 16 | "published":true, 17 | "title":"The Ruby Rogues", 18 | "updated_at":"2015-11-15T22:50:06.565Z", 19 | "url":"http://feeds.feedwrench.com/RubyRogues.rss" 20 | }, 21 | { 22 | "_id":"56490d6ad9275a00030000eb", 23 | "author":null, 24 | "created_at":"2015-11-15T22:55:38.074Z", 25 | "description":"Um podcast feito para programadores e empreendedores.", 26 | "image":"http://www.grokpodcast.com/images/logo_itunes_grande.png", 27 | "keywords":[ 28 | "Technology", 29 | "Podcasting", 30 | "Business", 31 | "Careers" 32 | ], 33 | "language":"pt-BR", 34 | "permalink":"http://www.grokpodcast.com/", 35 | "published":true, 36 | "title":"Grok Podcast", 37 | "updated_at":"2015-11-15T22:55:47.498Z", 38 | "url":"http://www.grokpodcast.com/atom.xml" 39 | }, 40 | { 41 | "_id":"564a1c30b1191d0003000000", 42 | "author":null, 43 | "created_at":"2015-11-16T18:10:56.610Z", 44 | "description":"The Web Platform Podcast is a developer discussion that dives deep into ‘all things’ web. We discuss everything from developing for mobile to building HDTV software. From wearables \u0026 robotics to user experience \u0026 mentoring, we bring to our listeners everything related to building products \u0026 services for The Web Platform of today, tomorrow, and beyond.", 45 | "image":"http://static.libsyn.com/p/assets/f/7/2/0/f7208dae16d0543e/twp-logo-flat-blue-square.png", 46 | "keywords":[ 47 | "Technology", 48 | "Software How-To", 49 | "Tech News" 50 | ], 51 | "language":"en", 52 | "permalink":"http://thewebplatform.libsyn.com/webpage", 53 | "published":true, 54 | "title":"The Web Platform Podcast", 55 | "updated_at":"2015-11-16T18:11:02.022Z", 56 | "url":"http://thewebplatform.libsyn.com//rss" 57 | }, 58 | { 59 | "_id":"564a1de3b1191d0003000047", 60 | "author":null, 61 | "created_at":"2015-11-16T18:18:11.854Z", 62 | "description":"Developer Tea is a podcast for web and software developers hosted by a developer that you can listen to in less than 10 minutes. The show will cover a wide variety of topics related to the career of being a developer. We hope you'll take the topics from this podcast and continue the conversation, either online or in person with your peers. The show is hosted by Jonathan Cutrell, Director of Technology at Whiteboard and the author of Hacking the Impossible, a developer's guide to working with visionaries. :: Twitter: @developertea @jcutrell :: Email: developertea@gmail.com", 63 | "image":"http://simplecast-media.s3.amazonaws.com/podcast/image/363/1440374119-artwork.jpg", 64 | "keywords":[ 65 | "Technology", 66 | "Business", 67 | "Careers", 68 | "Society \u0026 Culture" 69 | ], 70 | "language":"en-us", 71 | "permalink":"http://www.developertea.com/", 72 | "published":true, 73 | "title":"Developer Tea", 74 | "updated_at":"2015-11-16T23:00:23.224Z", 75 | "url":"http://feeds.feedburner.com/developertea" 76 | }, 77 | { 78 | "_id":"564a3163e51cc0000300004c", 79 | "author":null, 80 | "created_at":"2015-11-16T19:41:23.436Z", 81 | "description":"Conference talks from the Remote Conferences series put on by Devchat.tv", 82 | "image":"https://s3.amazonaws.com/devchat.tv/RemoteConfs.jpg", 83 | "keywords":[ 84 | 85 | ], 86 | "language":"en", 87 | "permalink":"http://remoteconfs.com/", 88 | "published":true, 89 | "title":"Remote Conferences - Audio", 90 | "updated_at":"2015-11-16T19:41:24.367Z", 91 | "url":"http://feeds.feedwrench.com/remoteconfs-audio.rss" 92 | }, 93 | { 94 | "_id":"564a315de51cc00003000000", 95 | "author":null, 96 | "created_at":"2015-11-16T19:41:17.492Z", 97 | "description":"Weekly discussion by freelancers and professionals about running a business, finding clients, marketing, and lifestyle related to being a freelancer.", 98 | "image":"https://s3.amazonaws.com/devchat.tv/freelancers_show_thumb.jpg", 99 | "keywords":[ 100 | "Business", 101 | "Careers", 102 | "Management \u0026amp; Marketing", 103 | "Education", 104 | "Training" 105 | ], 106 | "language":"en", 107 | "permalink":"http://www.freelancersshow.com/", 108 | "published":true, 109 | "title":"The Freelancers' Show", 110 | "updated_at":"2015-11-16T19:41:27.459Z", 111 | "url":"http://feeds.feedwrench.com/TheFreelancersShow.rss" 112 | }, 113 | { 114 | "_id":"564a3169e51cc000030000cd", 115 | "author":null, 116 | "created_at":"2015-11-16T19:41:29.686Z", 117 | "description":"React Native Radio Podcast", 118 | "image":"https://s3.amazonaws.com/devchat.tv/react-native-radio-album-art.jpg", 119 | "keywords":[ 120 | 121 | ], 122 | "language":"en", 123 | "permalink":"http://devchat.tv/react-native-radio", 124 | "published":true, 125 | "title":"React Native Radio", 126 | "updated_at":"2015-11-16T19:41:29.999Z", 127 | "url":"http://feeds.feedwrench.com/react-native-radio.rss" 128 | }, 129 | { 130 | "_id":"564a316fe51cc000030000d4", 131 | "author":null, 132 | "created_at":"2015-11-16T19:41:35.937Z", 133 | "description":"The iOS Development Podcast", 134 | "image":"https://s3.amazonaws.com/devchat.tv/iPhreaks-thumb.jpg", 135 | "keywords":[ 136 | "Technology", 137 | "Tech News", 138 | "Software How-To" 139 | ], 140 | "language":"en", 141 | "permalink":"http://iphreaksshow.com/", 142 | "published":true, 143 | "title":"The iPhreaks Show", 144 | "updated_at":"2015-11-16T19:41:43.700Z", 145 | "url":"http://feeds.feedwrench.com/iPhreaks.rss" 146 | }, 147 | { 148 | "_id":"564a3184e51cc00003000156", 149 | "author":null, 150 | "created_at":"2015-11-16T19:41:56.874Z", 151 | "description":"Weekly podcast discussion about Javascript on the front and back ends. Also discuss programming practices, coding environments, and the communities related to the technology.", 152 | "image":"https://s3.amazonaws.com/devchat.tv/javascript_jabber_thumb.jpg", 153 | "keywords":[ 154 | "Education", 155 | "Training", 156 | "Technology", 157 | "Software How-To" 158 | ], 159 | "language":"en", 160 | "permalink":"http://javascriptjabber.com/", 161 | "published":true, 162 | "title":"JavaScript Jabber", 163 | "updated_at":"2015-11-16T19:42:24.692Z", 164 | "url":"http://feeds.feedwrench.com/JavaScriptJabber.rss" 165 | }, 166 | { 167 | "_id":"564a31dee51cc00003000210", 168 | "author":null, 169 | "created_at":"2015-11-16T19:43:26.390Z", 170 | "description":"Each week we explore an aspect of web security.", 171 | "image":"http://devchat.cachefly.net/websecwarriors/logo_3000x3000.jpeg", 172 | "keywords":[ 173 | 174 | ], 175 | "language":"en", 176 | "permalink":"http://websecuritywarriors.com/", 177 | "published":true, 178 | "title":"Web Security Warriors", 179 | "updated_at":"2015-11-16T19:43:28.133Z", 180 | "url":"http://feeds.feedwrench.com/websecwarriors.rss" 181 | }, 182 | { 183 | "_id":"564a3ddbe51cc00003000217", 184 | "author":null, 185 | "created_at":"2015-11-16T20:34:35.791Z", 186 | "description":"Podcasts produzidos de 2008 a 2010 sobre jogos e todos os tipos de assuntos relacionados ao universo e cultura dos vídeogames.", 187 | "image":"http://jogabilida.de/wp-content/uploads/2011/12/nl-podcast.png", 188 | "keywords":[ 189 | "Games \u0026 Hobbies", 190 | "Video Games" 191 | ], 192 | "language":"pt-BR", 193 | "permalink":"http://jogabilida.de/", 194 | "published":true, 195 | "title":"Podcast NowLoading", 196 | "updated_at":"2015-11-16T23:00:23.963Z", 197 | "url":"http://feeds.feedburner.com/podcastnowloading" 198 | }, 199 | { 200 | "_id":"564b9cfe08602e00030000fa", 201 | "author":null, 202 | "created_at":"2015-11-17T21:32:46.210Z", 203 | "description":"Being Boss is a podcast for creative entrepreneurs. From Emily Thompson and Kathleen Shannon. Get your business together. Being boss is hard. Making a dream job of your own isn't easy. But getting paid for it, becoming known for it, and finding purpose in it, is so doable - if you do the work.", 204 | "image":"http://www.lovebeingboss.com/img/skin/Header_WhiteLogo.png", 205 | "keywords":[ 206 | 207 | ], 208 | "language":null, 209 | "permalink":"http://www.lovebeingboss.com/", 210 | "published":true, 211 | "title":"Being Boss // A Podcast for Creative Entrepreneurs", 212 | "updated_at":"2015-11-17T21:32:50.672Z", 213 | "url":"http://www.lovebeingboss.com/RSSRetrieve.aspx?ID=18365\u0026Type=RSS20" 214 | }, 215 | { 216 | "_id":"564c5c8008602e0003000128", 217 | "author":null, 218 | "created_at":"2015-11-18T11:09:52.991Z", 219 | "description":"O mundo pop vira piada no Jovem Nerd", 220 | "image":"http://jovemnerd.ig.com.br/wp-content/themes/jovemnerd_v2b/images/NC_FEED.jpg", 221 | "keywords":[ 222 | "Society \u0026 Culture" 223 | ], 224 | "language":"pt-BR", 225 | "permalink":"http://jovemnerd.com.br/", 226 | "published":true, 227 | "title":"Nerdcast", 228 | "updated_at":"2015-11-18T11:11:20.034Z", 229 | "url":"http://jovemnerd.com.br/categoria/nerdcast/feed/" 230 | } 231 | ] -------------------------------------------------------------------------------- /struct.js: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | 4 | For "any-data": 5 | 32-55 - record with record ids (-32) 6 | 56 - 8-bit record ids 7 | 57 - 16-bit record ids 8 | 58 - 24-bit record ids 9 | 59 - 32-bit record ids 10 | 250-255 - followed by typed fixed width values 11 | 64-250 msgpackr/cbor/paired data 12 | arrays and strings within arrays are handled by paired encoding 13 | 14 | Structure encoding: 15 | (type - string (using paired encoding))+ 16 | 17 | Type encoding 18 | encoding byte - fixed width byte - next reference+ 19 | 20 | Encoding byte: 21 | first bit: 22 | 0 - inline 23 | 1 - reference 24 | second bit: 25 | 0 - data or number 26 | 1 - string 27 | 28 | remaining bits: 29 | character encoding - ISO-8859-x 30 | 31 | 32 | null (0xff)+ 0xf6 33 | null (0xff)+ 0xf7 34 | 35 | */ 36 | 37 | 38 | import {setWriteStructSlots, RECORD_SYMBOL, addExtension} from './pack.js' 39 | import {setReadStruct, mult10, readString} from './unpack.js'; 40 | const ASCII = 3; // the MIBenum from https://www.iana.org/assignments/character-sets/character-sets.xhtml (and other character encodings could be referenced by MIBenum) 41 | const NUMBER = 0; 42 | const UTF8 = 2; 43 | const OBJECT_DATA = 1; 44 | const DATE = 16; 45 | const TYPE_NAMES = ['num', 'object', 'string', 'ascii']; 46 | TYPE_NAMES[DATE] = 'date'; 47 | const float32Headers = [false, true, true, false, false, true, true, false]; 48 | let evalSupported; 49 | try { 50 | new Function(''); 51 | evalSupported = true; 52 | } catch(error) { 53 | // if eval variants are not supported, do not create inline object readers ever 54 | } 55 | 56 | let updatedPosition; 57 | const hasNodeBuffer = typeof Buffer !== 'undefined' 58 | let textEncoder, currentSource; 59 | try { 60 | textEncoder = new TextEncoder() 61 | } catch (error) {} 62 | const encodeUtf8 = hasNodeBuffer ? function(target, string, position) { 63 | return target.utf8Write(string, position, target.byteLength - position) 64 | } : (textEncoder && textEncoder.encodeInto) ? 65 | function(target, string, position) { 66 | return textEncoder.encodeInto(string, target.subarray(position)).written 67 | } : false 68 | 69 | const TYPE = Symbol('type'); 70 | const PARENT = Symbol('parent'); 71 | setWriteStructSlots(writeStruct, prepareStructures); 72 | function writeStruct(object, target, encodingStart, position, structures, makeRoom, pack, packr) { 73 | let typedStructs = packr.typedStructs || (packr.typedStructs = []); 74 | // note that we rely on pack.js to load stored structures before we get to this point 75 | let targetView = target.dataView; 76 | let refsStartPosition = (typedStructs.lastStringStart || 100) + position; 77 | let safeEnd = target.length - 10; 78 | let start = position; 79 | if (position > safeEnd) { 80 | target = makeRoom(position); 81 | targetView = target.dataView; 82 | position -= encodingStart; 83 | start -= encodingStart; 84 | refsStartPosition -= encodingStart; 85 | encodingStart = 0; 86 | safeEnd = target.length - 10; 87 | } 88 | 89 | let refOffset, refPosition = refsStartPosition; 90 | 91 | let transition = typedStructs.transitions || (typedStructs.transitions = Object.create(null)); 92 | let nextId = typedStructs.nextId || typedStructs.length; 93 | let headerSize = 94 | nextId < 0xf ? 1 : 95 | nextId < 0xf0 ? 2 : 96 | nextId < 0xf000 ? 3 : 97 | nextId < 0xf00000 ? 4 : 0; 98 | if (headerSize === 0) 99 | return 0; 100 | position += headerSize; 101 | let queuedReferences = []; 102 | let usedAscii0; 103 | let keyIndex = 0; 104 | for (let key in object) { 105 | let value = object[key]; 106 | let nextTransition = transition[key]; 107 | if (!nextTransition) { 108 | transition[key] = nextTransition = { 109 | key, 110 | parent: transition, 111 | enumerationOffset: 0, 112 | ascii0: null, 113 | ascii8: null, 114 | num8: null, 115 | string16: null, 116 | object16: null, 117 | num32: null, 118 | float64: null, 119 | date64: null 120 | }; 121 | } 122 | if (position > safeEnd) { 123 | target = makeRoom(position); 124 | targetView = target.dataView; 125 | position -= encodingStart; 126 | start -= encodingStart; 127 | refsStartPosition -= encodingStart; 128 | refPosition -= encodingStart; 129 | encodingStart = 0; 130 | safeEnd = target.length - 10 131 | } 132 | switch (typeof value) { 133 | case 'number': 134 | let number = value; 135 | // first check to see if we are using a lot of ids and should default to wide/common format 136 | if (nextId < 200 || !nextTransition.num64) { 137 | if (number >> 0 === number && number < 0x20000000 && number > -0x1f000000) { 138 | if (number < 0xf6 && number >= 0 && (nextTransition.num8 && !(nextId > 200 && nextTransition.num32) || number < 0x20 && !nextTransition.num32)) { 139 | transition = nextTransition.num8 || createTypeTransition(nextTransition, NUMBER, 1); 140 | target[position++] = number; 141 | } else { 142 | transition = nextTransition.num32 || createTypeTransition(nextTransition, NUMBER, 4); 143 | targetView.setUint32(position, number, true); 144 | position += 4; 145 | } 146 | break; 147 | } else if (number < 0x100000000 && number >= -0x80000000) { 148 | targetView.setFloat32(position, number, true); 149 | if (float32Headers[target[position + 3] >>> 5]) { 150 | let xShifted 151 | // this checks for rounding of numbers that were encoded in 32-bit float to nearest significant decimal digit that could be preserved 152 | if (((xShifted = number * mult10[((target[position + 3] & 0x7f) << 1) | (target[position + 2] >> 7)]) >> 0) === xShifted) { 153 | transition = nextTransition.num32 || createTypeTransition(nextTransition, NUMBER, 4); 154 | position += 4; 155 | break; 156 | } 157 | } 158 | } 159 | } 160 | transition = nextTransition.num64 || createTypeTransition(nextTransition, NUMBER, 8); 161 | targetView.setFloat64(position, number, true); 162 | position += 8; 163 | break; 164 | case 'string': 165 | let strLength = value.length; 166 | refOffset = refPosition - refsStartPosition; 167 | if ((strLength << 2) + refPosition > safeEnd) { 168 | target = makeRoom((strLength << 2) + refPosition); 169 | targetView = target.dataView; 170 | position -= encodingStart; 171 | start -= encodingStart; 172 | refsStartPosition -= encodingStart; 173 | refPosition -= encodingStart; 174 | encodingStart = 0; 175 | safeEnd = target.length - 10 176 | } 177 | if (strLength > ((0xff00 + refOffset) >> 2)) { 178 | queuedReferences.push(key, value, position - start); 179 | break; 180 | } 181 | let isNotAscii 182 | let strStart = refPosition; 183 | if (strLength < 0x40) { 184 | let i, c1, c2; 185 | for (i = 0; i < strLength; i++) { 186 | c1 = value.charCodeAt(i) 187 | if (c1 < 0x80) { 188 | target[refPosition++] = c1 189 | } else if (c1 < 0x800) { 190 | isNotAscii = true; 191 | target[refPosition++] = c1 >> 6 | 0xc0 192 | target[refPosition++] = c1 & 0x3f | 0x80 193 | } else if ( 194 | (c1 & 0xfc00) === 0xd800 && 195 | ((c2 = value.charCodeAt(i + 1)) & 0xfc00) === 0xdc00 196 | ) { 197 | isNotAscii = true; 198 | c1 = 0x10000 + ((c1 & 0x03ff) << 10) + (c2 & 0x03ff) 199 | i++ 200 | target[refPosition++] = c1 >> 18 | 0xf0 201 | target[refPosition++] = c1 >> 12 & 0x3f | 0x80 202 | target[refPosition++] = c1 >> 6 & 0x3f | 0x80 203 | target[refPosition++] = c1 & 0x3f | 0x80 204 | } else { 205 | isNotAscii = true; 206 | target[refPosition++] = c1 >> 12 | 0xe0 207 | target[refPosition++] = c1 >> 6 & 0x3f | 0x80 208 | target[refPosition++] = c1 & 0x3f | 0x80 209 | } 210 | } 211 | } else { 212 | refPosition += encodeUtf8(target, value, refPosition); 213 | isNotAscii = refPosition - strStart > strLength; 214 | } 215 | if (refOffset < 0xa0 || (refOffset < 0xf6 && (nextTransition.ascii8 || nextTransition.string8))) { 216 | // short strings 217 | if (isNotAscii) { 218 | if (!(transition = nextTransition.string8)) { 219 | if (typedStructs.length > 10 && (transition = nextTransition.ascii8)) { 220 | // we can safely change ascii to utf8 in place since they are compatible 221 | transition.__type = UTF8; 222 | nextTransition.ascii8 = null; 223 | nextTransition.string8 = transition; 224 | pack(null, 0, true); // special call to notify that structures have been updated 225 | } else { 226 | transition = createTypeTransition(nextTransition, UTF8, 1); 227 | } 228 | } 229 | } else if (refOffset === 0 && !usedAscii0) { 230 | usedAscii0 = true; 231 | transition = nextTransition.ascii0 || createTypeTransition(nextTransition, ASCII, 0); 232 | break; // don't increment position 233 | }// else ascii: 234 | else if (!(transition = nextTransition.ascii8) && !(typedStructs.length > 10 && (transition = nextTransition.string8))) 235 | transition = createTypeTransition(nextTransition, ASCII, 1); 236 | target[position++] = refOffset; 237 | } else { 238 | // TODO: Enable ascii16 at some point, but get the logic right 239 | //if (isNotAscii) 240 | transition = nextTransition.string16 || createTypeTransition(nextTransition, UTF8, 2); 241 | //else 242 | //transition = nextTransition.ascii16 || createTypeTransition(nextTransition, ASCII, 2); 243 | targetView.setUint16(position, refOffset, true); 244 | position += 2; 245 | } 246 | break; 247 | case 'object': 248 | if (value) { 249 | if (value.constructor === Date) { 250 | transition = nextTransition.date64 || createTypeTransition(nextTransition, DATE, 8); 251 | targetView.setFloat64(position, value.getTime(), true); 252 | position += 8; 253 | } else { 254 | queuedReferences.push(key, value, keyIndex); 255 | } 256 | break; 257 | } else { // null 258 | nextTransition = anyType(nextTransition, position, targetView, -10); // match CBOR with this 259 | if (nextTransition) { 260 | transition = nextTransition; 261 | position = updatedPosition; 262 | } else queuedReferences.push(key, value, keyIndex); 263 | } 264 | break; 265 | case 'boolean': 266 | transition = nextTransition.num8 || nextTransition.ascii8 || createTypeTransition(nextTransition, NUMBER, 1); 267 | target[position++] = value ? 0xf9 : 0xf8; // match CBOR with these 268 | break; 269 | case 'undefined': 270 | nextTransition = anyType(nextTransition, position, targetView, -9); // match CBOR with this 271 | if (nextTransition) { 272 | transition = nextTransition; 273 | position = updatedPosition; 274 | } else queuedReferences.push(key, value, keyIndex); 275 | break; 276 | default: 277 | queuedReferences.push(key, value, keyIndex); 278 | } 279 | keyIndex++; 280 | } 281 | 282 | for (let i = 0, l = queuedReferences.length; i < l;) { 283 | let key = queuedReferences[i++]; 284 | let value = queuedReferences[i++]; 285 | let propertyIndex = queuedReferences[i++]; 286 | let nextTransition = transition[key]; 287 | if (!nextTransition) { 288 | transition[key] = nextTransition = { 289 | key, 290 | parent: transition, 291 | enumerationOffset: propertyIndex - keyIndex, 292 | ascii0: null, 293 | ascii8: null, 294 | num8: null, 295 | string16: null, 296 | object16: null, 297 | num32: null, 298 | float64: null 299 | }; 300 | } 301 | let newPosition; 302 | if (value) { 303 | /*if (typeof value === 'string') { // TODO: we could re-enable long strings 304 | if (position + value.length * 3 > safeEnd) { 305 | target = makeRoom(position + value.length * 3); 306 | position -= start; 307 | targetView = target.dataView; 308 | start = 0; 309 | } 310 | newPosition = position + target.utf8Write(value, position, 0xffffffff); 311 | } else { */ 312 | let size; 313 | refOffset = refPosition - refsStartPosition; 314 | if (refOffset < 0xff00) { 315 | transition = nextTransition.object16; 316 | if (transition) 317 | size = 2; 318 | else if ((transition = nextTransition.object32)) 319 | size = 4; 320 | else { 321 | transition = createTypeTransition(nextTransition, OBJECT_DATA, 2); 322 | size = 2; 323 | } 324 | } else { 325 | transition = nextTransition.object32 || createTypeTransition(nextTransition, OBJECT_DATA, 4); 326 | size = 4; 327 | } 328 | newPosition = pack(value, refPosition); 329 | //} 330 | if (typeof newPosition === 'object') { 331 | // re-allocated 332 | refPosition = newPosition.position; 333 | targetView = newPosition.targetView; 334 | target = newPosition.target; 335 | refsStartPosition -= encodingStart; 336 | position -= encodingStart; 337 | start -= encodingStart; 338 | encodingStart = 0; 339 | } else 340 | refPosition = newPosition; 341 | if (size === 2) { 342 | targetView.setUint16(position, refOffset, true); 343 | position += 2; 344 | } else { 345 | targetView.setUint32(position, refOffset, true); 346 | position += 4; 347 | } 348 | } else { // null or undefined 349 | transition = nextTransition.object16 || createTypeTransition(nextTransition, OBJECT_DATA, 2); 350 | targetView.setInt16(position, value === null ? -10 : -9, true); 351 | position += 2; 352 | } 353 | keyIndex++; 354 | } 355 | 356 | 357 | let recordId = transition[RECORD_SYMBOL]; 358 | if (recordId == null) { 359 | recordId = packr.typedStructs.length; 360 | let structure = []; 361 | let nextTransition = transition; 362 | let key, type; 363 | while ((type = nextTransition.__type) !== undefined) { 364 | let size = nextTransition.__size; 365 | nextTransition = nextTransition.__parent; 366 | key = nextTransition.key; 367 | let property = [type, size, key]; 368 | if (nextTransition.enumerationOffset) 369 | property.push(nextTransition.enumerationOffset); 370 | structure.push(property); 371 | nextTransition = nextTransition.parent; 372 | } 373 | structure.reverse(); 374 | transition[RECORD_SYMBOL] = recordId; 375 | packr.typedStructs[recordId] = structure; 376 | pack(null, 0, true); // special call to notify that structures have been updated 377 | } 378 | 379 | 380 | switch (headerSize) { 381 | case 1: 382 | if (recordId >= 0x10) return 0; 383 | target[start] = recordId + 0x20; 384 | break; 385 | case 2: 386 | if (recordId >= 0x100) return 0; 387 | target[start] = 0x38; 388 | target[start + 1] = recordId; 389 | break; 390 | case 3: 391 | if (recordId >= 0x10000) return 0; 392 | target[start] = 0x39; 393 | targetView.setUint16(start + 1, recordId, true); 394 | break; 395 | case 4: 396 | if (recordId >= 0x1000000) return 0; 397 | targetView.setUint32(start, (recordId << 8) + 0x3a, true); 398 | break; 399 | } 400 | 401 | if (position < refsStartPosition) { 402 | if (refsStartPosition === refPosition) 403 | return position; // no refs 404 | // adjust positioning 405 | target.copyWithin(position, refsStartPosition, refPosition); 406 | refPosition += position - refsStartPosition; 407 | typedStructs.lastStringStart = position - start; 408 | } else if (position > refsStartPosition) { 409 | if (refsStartPosition === refPosition) 410 | return position; // no refs 411 | typedStructs.lastStringStart = position - start; 412 | return writeStruct(object, target, encodingStart, start, structures, makeRoom, pack, packr); 413 | } 414 | return refPosition; 415 | } 416 | function anyType(transition, position, targetView, value) { 417 | let nextTransition; 418 | if ((nextTransition = transition.ascii8 || transition.num8)) { 419 | targetView.setInt8(position, value, true); 420 | updatedPosition = position + 1; 421 | return nextTransition; 422 | } 423 | if ((nextTransition = transition.string16 || transition.object16)) { 424 | targetView.setInt16(position, value, true); 425 | updatedPosition = position + 2; 426 | return nextTransition; 427 | } 428 | if (nextTransition = transition.num32) { 429 | targetView.setUint32(position, 0xe0000100 + value, true); 430 | updatedPosition = position + 4; 431 | return nextTransition; 432 | } 433 | // transition.float64 434 | if (nextTransition = transition.num64) { 435 | targetView.setFloat64(position, NaN, true); 436 | targetView.setInt8(position, value); 437 | updatedPosition = position + 8; 438 | return nextTransition; 439 | } 440 | updatedPosition = position; 441 | // TODO: can we do an "any" type where we defer the decision? 442 | return; 443 | } 444 | function createTypeTransition(transition, type, size) { 445 | let typeName = TYPE_NAMES[type] + (size << 3); 446 | let newTransition = transition[typeName] || (transition[typeName] = Object.create(null)); 447 | newTransition.__type = type; 448 | newTransition.__size = size; 449 | newTransition.__parent = transition; 450 | return newTransition; 451 | } 452 | function onLoadedStructures(sharedData) { 453 | if (!(sharedData instanceof Map)) 454 | return sharedData; 455 | let typed = sharedData.get('typed') || []; 456 | if (Object.isFrozen(typed)) 457 | typed = typed.map(structure => structure.slice(0)); 458 | let named = sharedData.get('named'); 459 | let transitions = Object.create(null); 460 | for (let i = 0, l = typed.length; i < l; i++) { 461 | let structure = typed[i]; 462 | let transition = transitions; 463 | for (let [type, size, key] of structure) { 464 | let nextTransition = transition[key]; 465 | if (!nextTransition) { 466 | transition[key] = nextTransition = { 467 | key, 468 | parent: transition, 469 | enumerationOffset: 0, 470 | ascii0: null, 471 | ascii8: null, 472 | num8: null, 473 | string16: null, 474 | object16: null, 475 | num32: null, 476 | float64: null, 477 | date64: null, 478 | }; 479 | } 480 | transition = createTypeTransition(nextTransition, type, size); 481 | } 482 | transition[RECORD_SYMBOL] = i; 483 | } 484 | typed.transitions = transitions; 485 | this.typedStructs = typed; 486 | this.lastTypedStructuresLength = typed.length; 487 | return named; 488 | } 489 | var sourceSymbol = Symbol.for('source') 490 | function readStruct(src, position, srcEnd, unpackr) { 491 | let recordId = src[position++] - 0x20; 492 | if (recordId >= 24) { 493 | switch(recordId) { 494 | case 24: recordId = src[position++]; break; 495 | // little endian: 496 | case 25: recordId = src[position++] + (src[position++] << 8); break; 497 | case 26: recordId = src[position++] + (src[position++] << 8) + (src[position++] << 16); break; 498 | case 27: recordId = src[position++] + (src[position++] << 8) + (src[position++] << 16) + (src[position++] << 24); break; 499 | } 500 | } 501 | let structure = unpackr.typedStructs && unpackr.typedStructs[recordId]; 502 | if (!structure) { 503 | // copy src buffer because getStructures will override it 504 | src = Uint8Array.prototype.slice.call(src, position, srcEnd); 505 | srcEnd -= position; 506 | position = 0; 507 | if (!unpackr.getStructures) 508 | throw new Error(`Reference to shared structure ${recordId} without getStructures method`); 509 | unpackr._mergeStructures(unpackr.getStructures()); 510 | if (!unpackr.typedStructs) 511 | throw new Error('Could not find any shared typed structures'); 512 | unpackr.lastTypedStructuresLength = unpackr.typedStructs.length; 513 | structure = unpackr.typedStructs[recordId]; 514 | if (!structure) 515 | throw new Error('Could not find typed structure ' + recordId); 516 | } 517 | var construct = structure.construct; 518 | var fullConstruct = structure.fullConstruct; 519 | if (!construct) { 520 | construct = structure.construct = function LazyObject() { 521 | } 522 | fullConstruct = structure.fullConstruct = function LoadedObject() { 523 | } 524 | fullConstruct.prototype = unpackr.structPrototype || {}; 525 | var prototype = construct.prototype = unpackr.structPrototype ? Object.create(unpackr.structPrototype) : {}; 526 | let properties = []; 527 | let currentOffset = 0; 528 | let lastRefProperty; 529 | for (let i = 0, l = structure.length; i < l; i++) { 530 | let definition = structure[i]; 531 | let [ type, size, key, enumerationOffset ] = definition; 532 | if (key === '__proto__') 533 | key = '__proto_'; 534 | let property = { 535 | key, 536 | offset: currentOffset, 537 | } 538 | if (enumerationOffset) 539 | properties.splice(i + enumerationOffset, 0, property); 540 | else 541 | properties.push(property); 542 | let getRef; 543 | switch(size) { // TODO: Move into a separate function 544 | case 0: getRef = () => 0; break; 545 | case 1: 546 | getRef = (source, position) => { 547 | let ref = source.bytes[position + property.offset]; 548 | return ref >= 0xf6 ? toConstant(ref) : ref; 549 | }; 550 | break; 551 | case 2: 552 | getRef = (source, position) => { 553 | let src = source.bytes; 554 | let dataView = src.dataView || (src.dataView = new DataView(src.buffer, src.byteOffset, src.byteLength)); 555 | let ref = dataView.getUint16(position + property.offset, true); 556 | return ref >= 0xff00 ? toConstant(ref & 0xff) : ref; 557 | }; 558 | break; 559 | case 4: 560 | getRef = (source, position) => { 561 | let src = source.bytes; 562 | let dataView = src.dataView || (src.dataView = new DataView(src.buffer, src.byteOffset, src.byteLength)); 563 | let ref = dataView.getUint32(position + property.offset, true); 564 | return ref >= 0xffffff00 ? toConstant(ref & 0xff) : ref; 565 | }; 566 | break; 567 | } 568 | property.getRef = getRef; 569 | currentOffset += size; 570 | let get; 571 | switch(type) { 572 | case ASCII: 573 | if (lastRefProperty && !lastRefProperty.next) 574 | lastRefProperty.next = property; 575 | lastRefProperty = property; 576 | property.multiGetCount = 0; 577 | get = function(source) { 578 | let src = source.bytes; 579 | let position = source.position; 580 | let refStart = currentOffset + position; 581 | let ref = getRef(source, position); 582 | if (typeof ref !== 'number') return ref; 583 | 584 | let end, next = property.next; 585 | while(next) { 586 | end = next.getRef(source, position); 587 | if (typeof end === 'number') 588 | break; 589 | else 590 | end = null; 591 | next = next.next; 592 | } 593 | if (end == null) 594 | end = source.bytesEnd - refStart; 595 | if (source.srcString) { 596 | return source.srcString.slice(ref, end); 597 | } 598 | /*if (property.multiGetCount > 0) { 599 | let asciiEnd; 600 | next = firstRefProperty; 601 | let dataView = src.dataView || (src.dataView = new DataView(src.buffer, src.byteOffset, src.byteLength)); 602 | do { 603 | asciiEnd = dataView.getUint16(source.position + next.offset, true); 604 | if (asciiEnd < 0xff00) 605 | break; 606 | else 607 | asciiEnd = null; 608 | } while((next = next.next)); 609 | if (asciiEnd == null) 610 | asciiEnd = source.bytesEnd - refStart 611 | source.srcString = src.toString('latin1', refStart, refStart + asciiEnd); 612 | return source.srcString.slice(ref, end); 613 | } 614 | if (source.prevStringGet) { 615 | source.prevStringGet.multiGetCount += 2; 616 | } else { 617 | source.prevStringGet = property; 618 | property.multiGetCount--; 619 | }*/ 620 | return readString(src, ref + refStart, end - ref); 621 | //return src.toString('latin1', ref + refStart, end + refStart); 622 | }; 623 | break; 624 | case UTF8: case OBJECT_DATA: 625 | if (lastRefProperty && !lastRefProperty.next) 626 | lastRefProperty.next = property; 627 | lastRefProperty = property; 628 | get = function(source) { 629 | let position = source.position; 630 | let refStart = currentOffset + position; 631 | let ref = getRef(source, position); 632 | if (typeof ref !== 'number') return ref; 633 | let src = source.bytes; 634 | let end, next = property.next; 635 | while(next) { 636 | end = next.getRef(source, position); 637 | if (typeof end === 'number') 638 | break; 639 | else 640 | end = null; 641 | next = next.next; 642 | } 643 | if (end == null) 644 | end = source.bytesEnd - refStart; 645 | if (type === UTF8) { 646 | return src.toString('utf8', ref + refStart, end + refStart); 647 | } else { 648 | currentSource = source; 649 | try { 650 | return unpackr.unpack(src, { start: ref + refStart, end: end + refStart }); 651 | } finally { 652 | currentSource = null; 653 | } 654 | } 655 | }; 656 | break; 657 | case NUMBER: 658 | switch(size) { 659 | case 4: 660 | get = function (source) { 661 | let src = source.bytes; 662 | let dataView = src.dataView || (src.dataView = new DataView(src.buffer, src.byteOffset, src.byteLength)); 663 | let position = source.position + property.offset; 664 | let value = dataView.getInt32(position, true) 665 | if (value < 0x20000000) { 666 | if (value > -0x1f000000) 667 | return value; 668 | if (value > -0x20000000) 669 | return toConstant(value & 0xff); 670 | } 671 | let fValue = dataView.getFloat32(position, true); 672 | // this does rounding of numbers that were encoded in 32-bit float to nearest significant decimal digit that could be preserved 673 | let multiplier = mult10[((src[position + 3] & 0x7f) << 1) | (src[position + 2] >> 7)] 674 | return ((multiplier * fValue + (fValue > 0 ? 0.5 : -0.5)) >> 0) / multiplier; 675 | }; 676 | break; 677 | case 8: 678 | get = function (source) { 679 | let src = source.bytes; 680 | let dataView = src.dataView || (src.dataView = new DataView(src.buffer, src.byteOffset, src.byteLength)); 681 | let value = dataView.getFloat64(source.position + property.offset, true); 682 | if (isNaN(value)) { 683 | let byte = src[source.position + property.offset]; 684 | if (byte >= 0xf6) 685 | return toConstant(byte); 686 | } 687 | return value; 688 | }; 689 | break; 690 | case 1: 691 | get = function (source) { 692 | let src = source.bytes; 693 | let value = src[source.position + property.offset]; 694 | return value < 0xf6 ? value : toConstant(value); 695 | }; 696 | break; 697 | } 698 | break; 699 | case DATE: 700 | get = function (source) { 701 | let src = source.bytes; 702 | let dataView = src.dataView || (src.dataView = new DataView(src.buffer, src.byteOffset, src.byteLength)); 703 | return new Date(dataView.getFloat64(source.position + property.offset, true)); 704 | }; 705 | break; 706 | 707 | } 708 | property.get = get; 709 | } 710 | // TODO: load the srcString for faster string decoding on toJSON 711 | if (evalSupported) { 712 | let objectLiteralProperties = []; 713 | let args = []; 714 | let i = 0; 715 | let hasInheritedProperties; 716 | for (let property of properties) { // assign in enumeration order 717 | if (unpackr.alwaysLazyProperty && unpackr.alwaysLazyProperty(property.key)) { 718 | // these properties are not eagerly evaluated and this can be used for creating properties 719 | // that are not serialized as JSON 720 | hasInheritedProperties = true; 721 | continue; 722 | } 723 | Object.defineProperty(prototype, property.key, { get: withSource(property.get), enumerable: true }); 724 | let valueFunction = 'v' + i++; 725 | args.push(valueFunction); 726 | objectLiteralProperties.push('o[' + JSON.stringify(property.key) + ']=' + valueFunction + '(s)'); 727 | } 728 | if (hasInheritedProperties) { 729 | objectLiteralProperties.push('__proto__:this'); 730 | } 731 | let toObject = (new Function(...args, 'var c=this;return function(s){var o=new c();' + objectLiteralProperties.join(';') + ';return o;}')).apply(fullConstruct, properties.map(prop => prop.get)); 732 | Object.defineProperty(prototype, 'toJSON', { 733 | value(omitUnderscoredProperties) { 734 | return toObject.call(this, this[sourceSymbol]); 735 | } 736 | }); 737 | } else { 738 | Object.defineProperty(prototype, 'toJSON', { 739 | value(omitUnderscoredProperties) { 740 | // return an enumerable object with own properties to JSON stringify 741 | let resolved = {}; 742 | for (let i = 0, l = properties.length; i < l; i++) { 743 | // TODO: check alwaysLazyProperty 744 | let key = properties[i].key; 745 | 746 | resolved[key] = this[key]; 747 | } 748 | return resolved; 749 | }, 750 | // not enumerable or anything 751 | }); 752 | } 753 | } 754 | var instance = new construct(); 755 | instance[sourceSymbol] = { 756 | bytes: src, 757 | position, 758 | srcString: '', 759 | bytesEnd: srcEnd 760 | } 761 | return instance; 762 | } 763 | function toConstant(code) { 764 | switch(code) { 765 | case 0xf6: return null; 766 | case 0xf7: return undefined; 767 | case 0xf8: return false; 768 | case 0xf9: return true; 769 | } 770 | throw new Error('Unknown constant'); 771 | } 772 | function withSource(get) { 773 | return function() { 774 | return get(this[sourceSymbol]); 775 | } 776 | } 777 | 778 | function saveState() { 779 | if (currentSource) { 780 | currentSource.bytes = Uint8Array.prototype.slice.call(currentSource.bytes, currentSource.position, currentSource.bytesEnd); 781 | currentSource.position = 0; 782 | currentSource.bytesEnd = currentSource.bytes.length; 783 | } 784 | } 785 | function prepareStructures(structures, packr) { 786 | if (packr.typedStructs) { 787 | let structMap = new Map(); 788 | structMap.set('named', structures); 789 | structMap.set('typed', packr.typedStructs); 790 | structures = structMap; 791 | } 792 | let lastTypedStructuresLength = packr.lastTypedStructuresLength || 0; 793 | structures.isCompatible = existing => { 794 | let compatible = true; 795 | if (existing instanceof Map) { 796 | let named = existing.get('named') || []; 797 | if (named.length !== (packr.lastNamedStructuresLength || 0)) 798 | compatible = false; 799 | let typed = existing.get('typed') || []; 800 | if (typed.length !== lastTypedStructuresLength) 801 | compatible = false; 802 | } else if (existing instanceof Array || Array.isArray(existing)) { 803 | if (existing.length !== (packr.lastNamedStructuresLength || 0)) 804 | compatible = false; 805 | } 806 | if (!compatible) 807 | packr._mergeStructures(existing); 808 | return compatible; 809 | }; 810 | packr.lastTypedStructuresLength = packr.typedStructs && packr.typedStructs.length; 811 | return structures; 812 | } 813 | 814 | setReadStruct(readStruct, onLoadedStructures, saveState); 815 | 816 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # msgpackr 2 | [![npm version](https://img.shields.io/npm/v/msgpackr.svg?style=flat-square)](https://www.npmjs.org/package/msgpackr) 3 | [![npm version](https://img.shields.io/npm/dw/msgpackr)](https://www.npmjs.org/package/msgpackr) 4 | [![encode](https://img.shields.io/badge/encode-1.5GB%2Fs-yellow)](benchmark.md) 5 | [![decode](https://img.shields.io/badge/decode-2GB%2Fs-yellow)](benchmark.md) 6 | [![types](https://img.shields.io/npm/types/msgpackr)](README.md) 7 | [![module](https://img.shields.io/badge/module-ESM%2FCJS-blue)](README.md) 8 | [![license](https://img.shields.io/badge/license-MIT-brightgreen)](LICENSE) 9 | 10 | 11 | 12 | The msgpackr package is an extremely fast MessagePack NodeJS/JavaScript implementation. Currently, it is significantly faster than any other known implementations, faster than Avro (for JS), and generally faster than native V8 JSON.stringify/parse, on NodeJS. It also includes an optional record extension (the `r` in msgpackr), for defining record structures that makes MessagePack even faster and more compact, often over twice as fast as even native JSON functions, several times faster than other JS implementations, and 15-50% more compact. See the performance section for more details. Structured cloning (with support for cyclical references) is also supported through optional extensions. 13 | 14 | ## Basic Usage 15 | 16 | Install with: 17 | 18 | ``` 19 | npm i msgpackr 20 | ``` 21 | And `import` or `require` it for basic standard serialization/encoding (`pack`) and deserialization/decoding (`unpack`) functions: 22 | ```js 23 | import { unpack, pack } from 'msgpackr'; 24 | let serializedAsBuffer = pack(value); 25 | let data = unpack(serializedAsBuffer); 26 | ``` 27 | This `pack` function will generate standard MessagePack without any extensions that should be compatible with any standard MessagePack parser/decoder. It will serialize JavaScript objects as MessagePack `map`s by default. The `unpack` function will deserialize MessagePack `map`s as an `Object` with the properties from the map. 28 | 29 | ## Node Usage 30 | The msgpackr package runs on any modern JS platform, but is optimized for NodeJS usage (and will use a node addon for performance boost as an optional dependency). 31 | 32 | ### Streams 33 | We can use the including streaming functionality (which further improves performance). The `PackrStream` is a NodeJS transform stream that can be used to serialize objects to a binary stream (writing to network/socket, IPC, etc.), and the `UnpackrStream` can be used to deserialize objects from a binary sream (reading from network/socket, etc.): 34 | 35 | ```js 36 | import { PackrStream } from 'msgpackr'; 37 | let stream = new PackrStream(); 38 | stream.write(myData); 39 | 40 | ``` 41 | Or for a full example of sending and receiving data on a stream: 42 | ```js 43 | import { PackrStream, UnpackrStream } from 'msgpackr'; 44 | let sendingStream = new PackrStream(); 45 | let receivingStream = new UnpackrStream(); 46 | // we are just piping to our own stream, but normally you would send and 47 | // receive over some type of inter-process or network connection. 48 | sendingStream.pipe(receivingStream); 49 | sendingStream.write(myData); 50 | receivingStream.on('data', (data) => { 51 | // received data 52 | }); 53 | ``` 54 | The `PackrStream` and `UnpackrStream` instances will have also the record structure extension enabled by default (see below). 55 | 56 | ## Deno and Bun Usage 57 | Msgpackr modules are standard ESM modules and can be loaded directly from the [deno.land registry for msgpackr](https://deno.land/x/msgpackr) for use in Deno or using the NPM module loader with `import { unpack } from 'npm:msgpackr'`. The standard pack/encode and unpack/decode functionality is available on Deno, like other platforms. msgpackr can be used like any other package on Bun. 58 | 59 | ## Browser Usage 60 | Msgpackr works as standalone JavaScript as well, and runs on modern browsers. It includes a bundled script, at `dist/index.js` for ease of direct loading: 61 | ```html 62 | 63 | ``` 64 | 65 | This is UMD based, and will register as a module if possible, or create a `msgpackr` global with all the exported functions. 66 | 67 | For module-based development, it is recommended that you directly import the module of interest, to minimize dependencies that get pulled into your application: 68 | ```js 69 | import { unpack } from 'msgpackr/unpack' // if you only need to unpack 70 | ``` 71 | 72 | The package also includes a minified bundle in index.min.js. 73 | Additionally, the package includes a version that excludes dynamic code evaluation called index-no-eval.js, for situations where Content Security Policy (CSP) forbids eval/Function in code. The dynamic evaluation provides important performance optimizations (for records), so is not recommended unless required by CSP policy. 74 | 75 | ## Structured Cloning 76 | You can also use msgpackr for [structured cloning](https://html.spec.whatwg.org/multipage/structured-data.html). By enabling the `structuredClone` option, you can include references to other objects or cyclic references, and object identity will be preserved. Structured cloning also enables preserving certain typed objects like `Error`, `Set`, `RegExp` and TypedArray instances. For example: 77 | ```js 78 | let obj = { 79 | set: new Set(['a', 'b']), 80 | regular: /a\spattern/ 81 | }; 82 | obj.self = obj; 83 | let packr = new Packr({ structuredClone: true }); 84 | let serialized = packr.pack(obj); 85 | let copy = packr.unpack(serialized); 86 | copy.self === copy // true 87 | copy.set.has('a') // true 88 | 89 | ``` 90 | 91 | This option is disabled by default because it uses extensions and reference checking degrades performance (by about 25-30%). (Note this implementation doesn't serialize every class/type specified in the HTML specification since not all of them make sense for storing across platforms.) 92 | 93 | ### Alternate Terminology 94 | If you prefer to use encoder/decode terminology, msgpackr exports aliases, so `decode` is equivalent to `unpack`, `encode` is `pack`, `Encoder` is `Packr`, `Decoder` is `Unpackr`, and `EncoderStream` and `DecoderStream` can be used as well. 95 | 96 | ## Record / Object Structures 97 | There is a critical difference between maps (or dictionaries) that hold an arbitrary set of keys and values (JavaScript `Map` is designed for these), and records or object structures that have a well-defined set of fields. Typical JS objects/records may have many instances re(use) the same structure. By using the record extension, this distinction is preserved in MessagePack and the encoding can reuse structures and not only provides better type preservation, but yield much more compact encodings and increase decoding performance by 2-3x. Msgpackr automatically generates record definitions that are reused and referenced by objects with the same structure. There are a number of ways to use this to our advantage. For large object structures with repeating nested objects with similar structures, simply serializing with the record extension can yield significant benefits. To use the record structures extension, we create a new `Packr` instance. By default a new `Packr` instance will have the record extension enabled: 98 | ```js 99 | import { Packr } from 'msgpackr'; 100 | let packr = new Packr(); 101 | packr.pack(bigDataWithLotsOfObjects); 102 | 103 | ``` 104 | 105 | Another way to further leverage the benefits of the msgpackr record structures is to use streams that naturally allow for data to reuse based on previous record structures. The stream classes have the record structure extension enabled by default and provide excellent out-of-the-box performance. 106 | 107 | When creating a new `Packr`, `Unpackr`, `PackrStream`, or `UnpackrStream` instance, we can enable or disable the record structure extension with the `useRecords` property. When this is `false`, the record structure extension will be disabled (standard/compatibility mode), and all objects will revert to being serialized using MessageMap `map`s, and all `map`s will be deserialized to JS `Object`s as properties (like the standalone `pack` and `unpack` functions). 108 | 109 | Streaming with record structures works by encoding a structure the first time it is seen in a stream and referencing the structure in later messages that are sent across that stream. When an encoder can expect a decoder to understand previous structure references, this can be configured using the `sequential: true` flag, which is auto-enabled by streams, but can also be used with Packr instances. 110 | 111 | ### Shared Record Structures 112 | Another useful way of using msgpackr, and the record extension, is for storing data in a databases, files, or other storage systems. If a number of objects with common data structures are being stored, a shared structure can be used to greatly improve data storage and deserialization efficiency. In the simplest form, provide a `structures` array, which is updated if any new object structure is encountered: 113 | ```js 114 | import { Packr } from 'msgpackr'; 115 | let packr = new Packr({ 116 | structures: [... structures that were last generated ...] 117 | }); 118 | ``` 119 | If you are working with persisted data, you will need to persist the `structures` data when it is updated. Msgpackr provides an API for loading and saving the `structures` on demand (which is robust and can be used in multiple-process situations where other processes may be updating this same `structures` array), we just need to provide a way to store the generated shared structure so it is available to deserialize stored data in the future: 120 | ```js 121 | import { Packr } from 'msgpackr'; 122 | let packr = new Packr({ 123 | getStructures() { 124 | // storing our data in file (but we could also store in a db or key-value store) 125 | return unpack(readFileSync('my-shared-structures.mp')) || []; 126 | }, 127 | saveStructures(structures) { 128 | writeFileSync('my-shared-structures.mp', pack(structures)); 129 | } 130 | }); 131 | ``` 132 | Msgpackr will automatically add and saves structures as it encounters any new object structures (up to a limit of 32, by default). It will always add structures in an incremental/compatible way: Any object encoded with an earlier structure can be decoded with a later version (as long as it is persisted). 133 | 134 | #### Shared Structures Options 135 | By default there is a limit of 32 shared structures. This default is designed to record common shared structures, but also be resilient against sharing too many structures if there are many objects with dynamic properties that are likely to be repeated. This also allows for slightly more efficient one byte encoding. However, if your application has more structures that are commonly repeated, you can increase this limit by setting `maxSharedStructures` to a higher value. The maximum supported shared structures is 8160. 136 | 137 | You can also provide a `shouldShareStructure` function in the options if you want to specifically indicate which structures should be shared. This is called during the encoding process with the array of keys for a structure that is being considered for addition to the shared structure. For example, you might want: 138 | ``` 139 | maxSharedStructures: 100, 140 | shouldShareStructure(keys) { 141 | return !(keys[0] > 1) // don't share structures that consist of numbers as keys 142 | } 143 | ``` 144 | 145 | ### Reading Multiple Values 146 | If you have a buffer with multiple values sequentially encoded, you can choose to parse and read multiple values. This can be done using the `unpackMultiple` function/method, which can return an array of all the values it can sequentially parse within the provided buffer. For example: 147 | ```js 148 | let data = new Uint8Array([1, 2, 3]) // encodings of values 1, 2, and 3 149 | let values = unpackMultiple(data) // [1, 2, 3] 150 | ``` 151 | Alternately, you can provide a callback function that is called as the parsing occurs with each value, and can optionally terminate the parsing by returning `false`: 152 | ```js 153 | let data = new Uint8Array([1, 2, 3]) // encodings of values 1, 2, and 3 154 | unpackMultiple(data, (value) => { 155 | // called for each value 156 | // return false if you wish to end the parsing 157 | }) 158 | ``` 159 | 160 | If you need to know the start and end offsets of the unpacked values, these are 161 | provided as optional parameters in the callback: 162 | ```js 163 | let data = new Uint8Array([1, 2, 3]) // encodings of values 1, 2, and 3 164 | unpackMultiple(data, (value,start,end) => { 165 | // called for each value 166 | // `start` is the data buffer offset where the value was read from 167 | // `end` is `start` plus the byte length of the encoded value 168 | // return false if you wish to end the parsing 169 | }) 170 | ``` 171 | 172 | ## Options 173 | The following options properties can be provided to the Packr or Unpackr constructor: 174 | 175 | * `useRecords` - Setting this to `false` disables the record extension and stores JavaScript objects as MessagePack maps, and unpacks maps as JavaScript `Object`s, which ensures compatibilty with other decoders. Setting this to a function will use records for objects where `useRecords(object)` returns `true`. 176 | * `structures` - Provides the array of structures that is to be used for record extension, if you want the structures saved and used again. This array will be modified in place with new record structures that are serialized (if less than 32 structures are in the array). 177 | * `moreTypes` - Enable serialization of additional built-in types/classes including typed arrays, `Set`s, `Map`s, and `Error`s. 178 | * `structuredClone` - This enables the structured cloning extensions that will encode object/cyclic references. `moreTypes` is enabled by default when this is enabled. 179 | * `mapsAsObjects` - If `true`, this will decode MessagePack maps and JS `Object`s with the map entries decoded to object properties. If `false`, maps are decoded as JavaScript `Map`s. This is disabled by default if `useRecords` is enabled (which allows `Map`s to be preserved), and is enabled by default if `useRecords` is disabled. 180 | * `useFloat32` - This will enable msgpackr to encode non-integer numbers as `float32`. See next section for possible values. 181 | * `variableMapSize` - This will use varying map size definition (fixmap, map16, map32) based on the number of keys when encoding objects, which yields slightly more compact encodings (for small objects), but is typically 5-10% slower during encoding. This is necessary if you need to use objects with more than 65535 keys. This is only relevant when record extension is disabled. 182 | * `bundleStrings` - If `true` this uses a custom extension that bundles strings together, so that they can be decoded more quickly on browsers and Deno that do not have access to the NodeJS addon. This a custom extension, so both encoder and decoder need to support this. This can yield significant decoding performance increases on browsers (30%-50%). 183 | * `copyBuffers` - When decoding a MessagePack with binary data (Buffers are encoded as binary data), copy the buffer rather than providing a slice/view of the buffer. If you want your input data to be collected or modified while the decoded embedded buffer continues to live on, you can use this option (there is extra overhead to copying). 184 | * `useTimestamp32` - Encode JS `Date`s in 32-bit format when possible by dropping the milliseconds. This is a more efficient encoding of dates. You can also cause dates to use 32-bit format by manually setting the milliseconds to zero (`date.setMilliseconds(0)`). 185 | * `sequential` - Encode structures in serialized data, and reference previously encoded structures with expectation that decoder will read the encoded structures in the same order as encoded, with `unpackMultiple`. 186 | * `largeBigIntToFloat` - If a bigint needs to be encoded that is larger than will fit in 64-bit integers, it will be encoded as a float-64 (otherwise will throw a RangeError). 187 | * `largeBigIntToString` - If a bigint needs to be encoded that is larger than will fit in 64-bit integers, it will be encoded as a string (otherwise will throw a RangeError). 188 | * `useBigIntExtension` - If a bigint needs to be encoded that is larger than will fit in 64-bit integers, it will be encoded using a custom extension that supports up to about 1000-bits of integer precision. 189 | * `encodeUndefinedAsNil` - Encodes a value of `undefined` as a MessagePack `nil`, the same as a `null`. 190 | * `int64AsType` - This will decode uint64 and int64 numbers as the specified type. The type can be `bigint` (default), `number`, `string`, or `auto` (where range [-2^53...2^53] is represented by number and everything else by a bigint). 191 | * `skipValues` - This can be an array of property values that will indicate properties that should be skipped when serializing objects. For example, to mimic `JSON.stringify`'s behavior of skipping properties with a value of `undefined`, you can provide `skipValues: [undefined]`. Note, that this will only apply to serializing objects as standard MessagePack maps, not to records. Also, the array is checked by calling the `include` method, so you can provide an object with an `includes` if you want a custom function to skip values. 192 | * `onInvalidDate` - This can be provided as function that will be called when an invalid date is provided. The function can throw an error, or return a value that will be encoded in place of the invalid date. If not provided, an invalid date will be encoded as an invalid timestamp (which decodes with msgpackr back to an invalid date). 193 | * `writeFunction` - This can be provided as function that will be called when a function is encountered. The function can throw an error, or return a value that will be encoded in place of the function. If not provided, a function will be encoded as undefined (similar to `JSON.stringify`). 194 | * `mapAsEmptyObject` - Encodes JS `Map`s as empty objects (for back-compat with older libraries). 195 | * `setAsEmptyObject` - Encodes JS `Set`s as empty objects (for back-compat with older libraries). 196 | * `allowArraysInMapKeys` - Allows arrays to be used as keys in Maps, as long as all elements are strings, numbers, booleans, or bigints. When enabled, such arrays are flattened and converted to a string representation. 197 | 198 | ### 32-bit Float Options 199 | By default all non-integer numbers are serialized as 64-bit float (double). This is fast, and ensures maximum precision. However, often real-world data doesn't not need 64-bits of precision, and using 32-bit encoding can be much more space efficient. There are several options that provide more efficient encodings. Using the decimal rounding options for encoding and decoding provides lossless storage of common decimal representations like 7.99, in more efficient 32-bit format (rather than 64-bit). The `useFloat32` property has several possible options, available from the module as constants: 200 | ```js 201 | import { FLOAT32_OPTIONS } from 'msgpackr'; 202 | const { ALWAYS, DECIMAL_ROUND, DECIMAL_FIT } = FLOAT32_OPTIONS; 203 | ``` 204 | 205 | * `ALWAYS` (1) - Always will encode non-integers (absolute less than 2147483648) as 32-bit float. 206 | * `DECIMAL_ROUND` (3) - Always will encode non-integers as 32-bit float, and when decoding 32-bit float, round to the significant decimal digits (usually 7, but 6 or 8 digits for some ranges). 207 | * `DECIMAL_FIT` (4) - Only encode non-integers as 32-bit float if all significant digits (usually up to 7) can be unambiguously encoded as a 32-bit float, and decode/unpack with decimal rounding (same as above). This will ensure round-trip encoding/decoding without loss in precision and uses 32-bit when possible. 208 | 209 | Note, that the performance is decreased with decimal rounding by about 20-25%, although if only 5% of your values are floating point, that will only have about a 1% impact overall. 210 | 211 | In addition, msgpackr exports a `roundFloat32(number)` function that can be used to round floating point numbers to the maximum significant decimal digits that can be stored in 32-bit float, just as DECIMAL_ROUND does when decoding. This can be useful for determining how a number will be decoded prior to encoding it. 212 | 213 | ## Performance 214 | ### Native Acceleration 215 | Msgpackr employs an optional native node-addon to accelerate the parsing of strings. This should be automatically installed and utilized on NodeJS. However, you can verify this by checking the `isNativeAccelerationEnabled` property that is exported from msgpackr. If this is `false`, the `msgpackr-extract` package may not have been properly installed, and you may want to verify that it is installed correctly: 216 | ```js 217 | import { isNativeAccelerationEnabled } from 'msgpackr' 218 | if (!isNativeAccelerationEnabled) 219 | console.warn('Native acceleration not enabled, verify that install finished properly') 220 | ``` 221 | 222 | ### Benchmarks 223 | Msgpackr is fast. Really fast. Here is comparison with the next fastest JS projects using the benchmark tool from `msgpack-lite` (and the sample data is from some clinical research data we use that has a good mix of different value types and structures). It also includes comparison to V8 native JSON functionality, and JavaScript Avro (`avsc`, a very optimized Avro implementation): 224 | 225 | operation | op | ms | op/s 226 | ---------------------------------------------------------- | ------: | ----: | -----: 227 | buf = Buffer(JSON.stringify(obj)); | 81600 | 5002 | 16313 228 | obj = JSON.parse(buf); | 90700 | 5004 | 18125 229 | require("msgpackr").pack(obj); | 169700 | 5000 | 33940 230 | require("msgpackr").unpack(buf); | 109700 | 5003 | 21926 231 | msgpackr w/ shared structures: packr.pack(obj); | 190400 | 5001 | 38072 232 | msgpackr w/ shared structures: packr.unpack(buf); | 422900 | 5000 | 84580 233 | buf = require("msgpack-lite").encode(obj); | 31300 | 5005 | 6253 234 | obj = require("msgpack-lite").decode(buf); | 15700 | 5007 | 3135 235 | buf = require("@msgpack/msgpack").encode(obj); | 103100 | 5003 | 20607 236 | obj = require("@msgpack/msgpack").decode(buf); | 59100 | 5004 | 11810 237 | buf = require("notepack").encode(obj); | 65500 | 5007 | 13081 238 | obj = require("notepack").decode(buf); | 33400 | 5009 | 6667 239 | obj = require("msgpack-unpack").decode(buf); | 6900 | 5036 | 1370 240 | require("avsc")...make schema/type...type.toBuffer(obj); | 89300 | 5005 | 17842 241 | require("avsc")...make schema/type...type.fromBuffer(obj); | 108400 | 5001 | 21675 242 | 243 | All benchmarks were performed on Node 15 / V8 8.6 (Windows i7-4770 3.4Ghz). 244 | (`avsc` is schema-based and more comparable in style to msgpackr with shared structures). 245 | 246 | Here is a benchmark of streaming data (again borrowed from `msgpack-lite`'s benchmarking), where msgpackr is able to take advantage of the structured record extension and really demonstrate its performance capabilities: 247 | 248 | operation (1000000 x 2) | op | ms | op/s 249 | ------------------------------------------------ | ------: | ----: | -----: 250 | new PackrStream().write(obj); | 1000000 | 372 | 2688172 251 | new UnpackrStream().write(buf); | 1000000 | 247 | 4048582 252 | stream.write(msgpack.encode(obj)); | 1000000 | 2898 | 345065 253 | stream.write(msgpack.decode(buf)); | 1000000 | 1969 | 507872 254 | stream.write(notepack.encode(obj)); | 1000000 | 901 | 1109877 255 | stream.write(notepack.decode(buf)); | 1000000 | 1012 | 988142 256 | msgpack.Encoder().on("data",ondata).encode(obj); | 1000000 | 1763 | 567214 257 | msgpack.createDecodeStream().write(buf); | 1000000 | 2222 | 450045 258 | msgpack.createEncodeStream().write(obj); | 1000000 | 1577 | 634115 259 | msgpack.Decoder().on("data",ondata).decode(buf); | 1000000 | 2246 | 445235 260 | 261 | See the [benchmark.md](benchmark.md) for more benchmarks and information about benchmarking. 262 | 263 | ## Custom Extensions 264 | You can add your own custom extensions, which can be used to encode specific types/classes in certain ways. This is done by using the `addExtension` function, and specifying the class, extension `type` code (should be a number from 1-100, reserving negatives for MessagePack, 101-127 for msgpackr), and your `pack` and `unpack` functions (or just the one you need). 265 | ```js 266 | import { addExtension, Packr } from 'msgpackr'; 267 | 268 | class MyCustomClass {...} 269 | 270 | let extPackr = new Packr(); 271 | addExtension({ 272 | Class: MyCustomClass, 273 | type: 11, // register your own extension code (a type code from 1-100) 274 | pack(instance) { 275 | // define how your custom class should be encoded 276 | return Buffer.from([instance.myData]); // return a buffer 277 | }, 278 | unpack(buffer) { 279 | // define how your custom class should be decoded 280 | let instance = new MyCustomClass(); 281 | instance.myData = buffer[0]; 282 | return instance; // decoded value from buffer 283 | } 284 | }); 285 | ``` 286 | If you want to use msgpackr to encode and decode the data within your extensions, you can use the `read` and `write` functions and read and write data/objects that will be encoded and decoded by msgpackr, which can be easier and faster than creating and receiving separate buffers: 287 | 288 | ```js 289 | import { addExtension, Packr } from 'msgpackr'; 290 | 291 | class MyCustomClass {...} 292 | 293 | let extPackr = new Packr(); 294 | addExtension({ 295 | Class: MyCustomClass, 296 | type: 11, // register your own extension code (a type code from 1-100) 297 | write(instance) { 298 | // define how your custom class should be encoded 299 | return instance.myData; // return some data to be encoded 300 | } 301 | read(data) { 302 | // define how your custom class should be decoded, 303 | // data will already be unpacked/decoded 304 | let instance = new MyCustomClass(); 305 | instance.myData = data; 306 | return instance; // return decoded value 307 | } 308 | }); 309 | ``` 310 | Note that you can just return the same object from `write`, and in this case msgpackr will encode it using the default object/array encoding: 311 | ```js 312 | addExtension({ 313 | Class: MyCustomClass, 314 | type: 12, 315 | read: function(data) { 316 | Object.setPrototypeOf(data, MyCustomClass.prototype) 317 | return data 318 | }, 319 | write: function(data) { 320 | return data 321 | } 322 | }) 323 | ``` 324 | You can also create an extension with `Class` and `write` methods, but no `type` (or `read`), if you just want to customize how a class is serialized without using MessagePack extension encoding. 325 | 326 | ### Additional Performance Optimizations 327 | Msgpackr is already fast, but here are some tips for making it faster: 328 | 329 | #### Buffer Reuse 330 | Msgpackr is designed to work well with reusable buffers. Allocating new buffers can be relatively expensive, so if you have Node addons, it can be much faster to reuse buffers and use memcpy to copy data into existing buffers. Then msgpackr `unpack` can be executed on the same buffer, with new data, and optionally take a second paramter indicating the effective size of the available data in the buffer. 331 | 332 | #### Arena Allocation (`useBuffer()`) 333 | During the serialization process, data is written to buffers. Again, allocating new buffers is a relatively expensive process, and the `useBuffer` method can help allow reuse of buffers that will further improve performance. With `useBuffer` method, you can provide a buffer, serialize data into it, and when it is known that you are done using that buffer, you can call `useBuffer` again to reuse it. The use of `useBuffer` is never required, buffers will still be handled and cleaned up through GC if not used, it just provides a small performance boost. 334 | 335 | ## Record Structure Extension Definition 336 | The record struction extension uses extension id 0x72 ("r") to declare the use of this functionality. The extension "data" byte (or bytes) identifies the byte or bytes used to identify the start of a record in the subsequent MessagePack block or stream. The identifier byte (or the first byte in a sequence) must be from 0x40 - 0x7f (and therefore replaces one byte representations of positive integers 64 - 127, which can alternately be represented with int or uint types). The extension declaration must be immediately follow by an MessagePack array that defines the field names of the record structure. 337 | 338 | Once a record identifier and record field names have been defined, the parser/decoder should proceed to read the next value. Any subsequent use of the record identifier as a value in the block or stream should parsed as a record instance, and the next n values, where is n is the number of fields (as defined in the array of field names), should be read as the values of the fields. For example, here we have defined a structure with fields "foo" and "bar", with the record identifier 0x40, and then read a record instance that defines the field values of 4 and 2, respectively: 339 | ``` 340 | +--------+--------+--------+~~~~~~~~~~~~~~~~~~~~~~~~~+--------+--------+ 341 | | 0xd4 | 0x72 | 0x40 | array: [ "foo", "bar" ] | 0x04 | 0x02 | 342 | +--------+--------+--------+~~~~~~~~~~~~~~~~~~~~~~~~~+--------+--------+ 343 | ``` 344 | Which should generate an object that would correspond to JSON: 345 | ```js 346 | { "foo": 4, "bar": 2} 347 | ``` 348 | 349 | ## Additional value types 350 | msgpackr supports `undefined` (using fixext1 + type: 0 + data: 0 to match other JS implementations), `NaN`, `Infinity`, and `-Infinity` (using standard IEEE 754 representations with doubles/floats). 351 | 352 | ### Dates 353 | msgpackr saves all JavaScript `Date`s using the standard MessagePack date extension (type -1), using the smallest of 32-bit, 64-bit or 96-bit format needed to store the date without data loss (or using 32-bit if useTimestamp32 options is specified). 354 | 355 | ### Structured Cloning 356 | With structured cloning enabled, msgpackr will also use extensions to store Set, Map, Error, RegExp, ArrayBufferView objects and preserve their types. 357 | 358 | ## Alternate Encoding/Package 359 | The high-performance serialization and deserialization algorithms in the msgpackr package are also available in the [cbor-x](https://github.com/kriszyp/cbor-x) for the CBOR format, with the same API and design. A quick summary of the pros and cons of using MessagePack vs CBOR are: 360 | * MessagePack has wider adoption, and, at least with this implementation is slightly more efficient (by roughly 1%). 361 | * CBOR has an [official IETF standardization track](https://tools.ietf.org/html/rfc7049), and the record extensions is conceptually/philosophically a better fit for CBOR tags. 362 | 363 | ## License 364 | 365 | MIT 366 | 367 | ### Browser Consideration 368 | MessagePack can be a great choice for high-performance data delivery to browsers, as reasonable data size is possible without compression. And msgpackr works very well in modern browsers. However, it is worth noting that if you want highly compact data, brotli or gzip are most effective in compressing, and MessagePack's character frequency tends to defeat Huffman encoding used by these standard compression algorithms, resulting in less compact data than compressed JSON. 369 | 370 | ### Credits 371 | 372 | Various projects have been inspirations for this, and code has been borrowed from https://github.com/msgpack/msgpack-javascript and https://github.com/mtth/avsc. 373 | -------------------------------------------------------------------------------- /unpack.js: -------------------------------------------------------------------------------- 1 | var decoder 2 | try { 3 | decoder = new TextDecoder() 4 | } catch(error) {} 5 | var src 6 | var srcEnd 7 | var position = 0 8 | var alreadySet 9 | const EMPTY_ARRAY = [] 10 | var strings = EMPTY_ARRAY 11 | var stringPosition = 0 12 | var currentUnpackr = {} 13 | var currentStructures 14 | var srcString 15 | var srcStringStart = 0 16 | var srcStringEnd = 0 17 | var bundledStrings 18 | var referenceMap 19 | var currentExtensions = [] 20 | var dataView 21 | var defaultOptions = { 22 | useRecords: false, 23 | mapsAsObjects: true 24 | } 25 | export class C1Type {} 26 | export const C1 = new C1Type() 27 | C1.name = 'MessagePack 0xC1' 28 | var sequentialMode = false 29 | var inlineObjectReadThreshold = 2 30 | var readStruct, onLoadedStructures, onSaveState 31 | var BlockedFunction // we use search and replace to change the next call to BlockedFunction to avoid CSP issues for 32 | // no-eval build 33 | try { 34 | new Function('') 35 | } catch(error) { 36 | // if eval variants are not supported, do not create inline object readers ever 37 | inlineObjectReadThreshold = Infinity 38 | } 39 | 40 | export class Unpackr { 41 | constructor(options) { 42 | if (options) { 43 | if (options.useRecords === false && options.mapsAsObjects === undefined) 44 | options.mapsAsObjects = true 45 | if (options.sequential && options.trusted !== false) { 46 | options.trusted = true; 47 | if (!options.structures && options.useRecords != false) { 48 | options.structures = [] 49 | if (!options.maxSharedStructures) 50 | options.maxSharedStructures = 0 51 | } 52 | } 53 | if (options.structures) 54 | options.structures.sharedLength = options.structures.length 55 | else if (options.getStructures) { 56 | (options.structures = []).uninitialized = true // this is what we use to denote an uninitialized structures 57 | options.structures.sharedLength = 0 58 | } 59 | if (options.int64AsNumber) { 60 | options.int64AsType = 'number' 61 | } 62 | } 63 | Object.assign(this, options) 64 | } 65 | unpack(source, options) { 66 | if (src) { 67 | // re-entrant execution, save the state and restore it after we do this unpack 68 | return saveState(() => { 69 | clearSource() 70 | return this ? this.unpack(source, options) : Unpackr.prototype.unpack.call(defaultOptions, source, options) 71 | }) 72 | } 73 | if (!source.buffer && source.constructor === ArrayBuffer) 74 | source = typeof Buffer !== 'undefined' ? Buffer.from(source) : new Uint8Array(source); 75 | if (typeof options === 'object') { 76 | srcEnd = options.end || source.length 77 | position = options.start || 0 78 | } else { 79 | position = 0 80 | srcEnd = options > -1 ? options : source.length 81 | } 82 | stringPosition = 0 83 | srcStringEnd = 0 84 | srcString = null 85 | strings = EMPTY_ARRAY 86 | bundledStrings = null 87 | src = source 88 | // this provides cached access to the data view for a buffer if it is getting reused, which is a recommend 89 | // technique for getting data from a database where it can be copied into an existing buffer instead of creating 90 | // new ones 91 | try { 92 | dataView = source.dataView || (source.dataView = new DataView(source.buffer, source.byteOffset, source.byteLength)) 93 | } catch(error) { 94 | // if it doesn't have a buffer, maybe it is the wrong type of object 95 | src = null 96 | if (source instanceof Uint8Array) 97 | throw error 98 | throw new Error('Source must be a Uint8Array or Buffer but was a ' + ((source && typeof source == 'object') ? source.constructor.name : typeof source)) 99 | } 100 | if (this instanceof Unpackr) { 101 | currentUnpackr = this 102 | if (this.structures) { 103 | currentStructures = this.structures 104 | return checkedRead(options) 105 | } else if (!currentStructures || currentStructures.length > 0) { 106 | currentStructures = [] 107 | } 108 | } else { 109 | currentUnpackr = defaultOptions 110 | if (!currentStructures || currentStructures.length > 0) 111 | currentStructures = [] 112 | } 113 | return checkedRead(options) 114 | } 115 | unpackMultiple(source, forEach) { 116 | let values, lastPosition = 0 117 | try { 118 | sequentialMode = true 119 | let size = source.length 120 | let value = this ? this.unpack(source, size) : defaultUnpackr.unpack(source, size) 121 | if (forEach) { 122 | if (forEach(value, lastPosition, position) === false) return; 123 | while(position < size) { 124 | lastPosition = position 125 | if (forEach(checkedRead(), lastPosition, position) === false) { 126 | return 127 | } 128 | } 129 | } 130 | else { 131 | values = [ value ] 132 | while(position < size) { 133 | lastPosition = position 134 | values.push(checkedRead()) 135 | } 136 | return values 137 | } 138 | } catch(error) { 139 | error.lastPosition = lastPosition 140 | error.values = values 141 | throw error 142 | } finally { 143 | sequentialMode = false 144 | clearSource() 145 | } 146 | } 147 | _mergeStructures(loadedStructures, existingStructures) { 148 | if (onLoadedStructures) 149 | loadedStructures = onLoadedStructures.call(this, loadedStructures); 150 | loadedStructures = loadedStructures || [] 151 | if (Object.isFrozen(loadedStructures)) 152 | loadedStructures = loadedStructures.map(structure => structure.slice(0)) 153 | for (let i = 0, l = loadedStructures.length; i < l; i++) { 154 | let structure = loadedStructures[i] 155 | if (structure) { 156 | structure.isShared = true 157 | if (i >= 32) 158 | structure.highByte = (i - 32) >> 5 159 | } 160 | } 161 | loadedStructures.sharedLength = loadedStructures.length 162 | for (let id in existingStructures || []) { 163 | if (id >= 0) { 164 | let structure = loadedStructures[id] 165 | let existing = existingStructures[id] 166 | if (existing) { 167 | if (structure) 168 | (loadedStructures.restoreStructures || (loadedStructures.restoreStructures = []))[id] = structure 169 | loadedStructures[id] = existing 170 | } 171 | } 172 | } 173 | return this.structures = loadedStructures 174 | } 175 | decode(source, options) { 176 | return this.unpack(source, options) 177 | } 178 | } 179 | export function getPosition() { 180 | return position 181 | } 182 | export function checkedRead(options) { 183 | try { 184 | if (!currentUnpackr.trusted && !sequentialMode) { 185 | let sharedLength = currentStructures.sharedLength || 0 186 | if (sharedLength < currentStructures.length) 187 | currentStructures.length = sharedLength 188 | } 189 | let result 190 | if (currentUnpackr.randomAccessStructure && src[position] < 0x40 && src[position] >= 0x20 && readStruct) { 191 | result = readStruct(src, position, srcEnd, currentUnpackr) 192 | src = null // dispose of this so that recursive unpack calls don't save state 193 | if (!(options && options.lazy) && result) 194 | result = result.toJSON() 195 | position = srcEnd 196 | } else 197 | result = read() 198 | if (bundledStrings) { // bundled strings to skip past 199 | position = bundledStrings.postBundlePosition 200 | bundledStrings = null 201 | } 202 | if (sequentialMode) 203 | // we only need to restore the structures if there was an error, but if we completed a read, 204 | // we can clear this out and keep the structures we read 205 | currentStructures.restoreStructures = null 206 | 207 | if (position == srcEnd) { 208 | // finished reading this source, cleanup references 209 | if (currentStructures && currentStructures.restoreStructures) 210 | restoreStructures() 211 | currentStructures = null 212 | src = null 213 | if (referenceMap) 214 | referenceMap = null 215 | } else if (position > srcEnd) { 216 | // over read 217 | throw new Error('Unexpected end of MessagePack data') 218 | } else if (!sequentialMode) { 219 | let jsonView; 220 | try { 221 | jsonView = JSON.stringify(result, (_, value) => typeof value === "bigint" ? `${value}n` : value).slice(0, 100) 222 | } catch(error) { 223 | jsonView = '(JSON view not available ' + error + ')' 224 | } 225 | throw new Error('Data read, but end of buffer not reached ' + jsonView) 226 | } 227 | // else more to read, but we are reading sequentially, so don't clear source yet 228 | return result 229 | } catch(error) { 230 | if (currentStructures && currentStructures.restoreStructures) 231 | restoreStructures() 232 | clearSource() 233 | if (error instanceof RangeError || error.message.startsWith('Unexpected end of buffer') || position > srcEnd) { 234 | error.incomplete = true 235 | } 236 | throw error 237 | } 238 | } 239 | 240 | function restoreStructures() { 241 | for (let id in currentStructures.restoreStructures) { 242 | currentStructures[id] = currentStructures.restoreStructures[id] 243 | } 244 | currentStructures.restoreStructures = null 245 | } 246 | 247 | export function read() { 248 | let token = src[position++] 249 | if (token < 0xa0) { 250 | if (token < 0x80) { 251 | if (token < 0x40) 252 | return token 253 | else { 254 | let structure = currentStructures[token & 0x3f] || 255 | currentUnpackr.getStructures && loadStructures()[token & 0x3f] 256 | if (structure) { 257 | if (!structure.read) { 258 | structure.read = createStructureReader(structure, token & 0x3f) 259 | } 260 | return structure.read() 261 | } else 262 | return token 263 | } 264 | } else if (token < 0x90) { 265 | // map 266 | token -= 0x80 267 | if (currentUnpackr.mapsAsObjects) { 268 | let object = {} 269 | for (let i = 0; i < token; i++) { 270 | let key = readKey() 271 | if (key === '__proto__') 272 | key = '__proto_' 273 | object[key] = read() 274 | } 275 | return object 276 | } else { 277 | let map = new Map() 278 | for (let i = 0; i < token; i++) { 279 | map.set(read(), read()) 280 | } 281 | return map 282 | } 283 | } else { 284 | token -= 0x90 285 | let array = new Array(token) 286 | for (let i = 0; i < token; i++) { 287 | array[i] = read() 288 | } 289 | if (currentUnpackr.freezeData) 290 | return Object.freeze(array) 291 | return array 292 | } 293 | } else if (token < 0xc0) { 294 | // fixstr 295 | let length = token - 0xa0 296 | if (srcStringEnd >= position) { 297 | return srcString.slice(position - srcStringStart, (position += length) - srcStringStart) 298 | } 299 | if (srcStringEnd == 0 && srcEnd < 140) { 300 | // for small blocks, avoiding the overhead of the extract call is helpful 301 | let string = length < 16 ? shortStringInJS(length) : longStringInJS(length) 302 | if (string != null) 303 | return string 304 | } 305 | return readFixedString(length) 306 | } else { 307 | let value 308 | switch (token) { 309 | case 0xc0: return null 310 | case 0xc1: 311 | if (bundledStrings) { 312 | value = read() // followed by the length of the string in characters (not bytes!) 313 | if (value > 0) 314 | return bundledStrings[1].slice(bundledStrings.position1, bundledStrings.position1 += value) 315 | else 316 | return bundledStrings[0].slice(bundledStrings.position0, bundledStrings.position0 -= value) 317 | } 318 | return C1; // "never-used", return special object to denote that 319 | case 0xc2: return false 320 | case 0xc3: return true 321 | case 0xc4: 322 | // bin 8 323 | value = src[position++] 324 | if (value === undefined) 325 | throw new Error('Unexpected end of buffer') 326 | return readBin(value) 327 | case 0xc5: 328 | // bin 16 329 | value = dataView.getUint16(position) 330 | position += 2 331 | return readBin(value) 332 | case 0xc6: 333 | // bin 32 334 | value = dataView.getUint32(position) 335 | position += 4 336 | return readBin(value) 337 | case 0xc7: 338 | // ext 8 339 | return readExt(src[position++]) 340 | case 0xc8: 341 | // ext 16 342 | value = dataView.getUint16(position) 343 | position += 2 344 | return readExt(value) 345 | case 0xc9: 346 | // ext 32 347 | value = dataView.getUint32(position) 348 | position += 4 349 | return readExt(value) 350 | case 0xca: 351 | value = dataView.getFloat32(position) 352 | if (currentUnpackr.useFloat32 > 2) { 353 | // this does rounding of numbers that were encoded in 32-bit float to nearest significant decimal digit that could be preserved 354 | let multiplier = mult10[((src[position] & 0x7f) << 1) | (src[position + 1] >> 7)] 355 | position += 4 356 | return ((multiplier * value + (value > 0 ? 0.5 : -0.5)) >> 0) / multiplier 357 | } 358 | position += 4 359 | return value 360 | case 0xcb: 361 | value = dataView.getFloat64(position) 362 | position += 8 363 | return value 364 | // uint handlers 365 | case 0xcc: 366 | return src[position++] 367 | case 0xcd: 368 | value = dataView.getUint16(position) 369 | position += 2 370 | return value 371 | case 0xce: 372 | value = dataView.getUint32(position) 373 | position += 4 374 | return value 375 | case 0xcf: 376 | if (currentUnpackr.int64AsType === 'number') { 377 | value = dataView.getUint32(position) * 0x100000000 378 | value += dataView.getUint32(position + 4) 379 | } else if (currentUnpackr.int64AsType === 'string') { 380 | value = dataView.getBigUint64(position).toString() 381 | } else if (currentUnpackr.int64AsType === 'auto') { 382 | value = dataView.getBigUint64(position) 383 | if (value<=BigInt(2)<=BigInt(-2)<= position) { 454 | return srcString.slice(position - srcStringStart, (position += value) - srcStringStart) 455 | } 456 | return readString8(value) 457 | case 0xda: 458 | // str 16 459 | value = dataView.getUint16(position) 460 | position += 2 461 | if (srcStringEnd >= position) { 462 | return srcString.slice(position - srcStringStart, (position += value) - srcStringStart) 463 | } 464 | return readString16(value) 465 | case 0xdb: 466 | // str 32 467 | value = dataView.getUint32(position) 468 | position += 4 469 | if (srcStringEnd >= position) { 470 | return srcString.slice(position - srcStringStart, (position += value) - srcStringStart) 471 | } 472 | return readString32(value) 473 | case 0xdc: 474 | // array 16 475 | value = dataView.getUint16(position) 476 | position += 2 477 | return readArray(value) 478 | case 0xdd: 479 | // array 32 480 | value = dataView.getUint32(position) 481 | position += 4 482 | return readArray(value) 483 | case 0xde: 484 | // map 16 485 | value = dataView.getUint16(position) 486 | position += 2 487 | return readMap(value) 488 | case 0xdf: 489 | // map 32 490 | value = dataView.getUint32(position) 491 | position += 4 492 | return readMap(value) 493 | default: // negative int 494 | if (token >= 0xe0) 495 | return token - 0x100 496 | if (token === undefined) { 497 | let error = new Error('Unexpected end of MessagePack data') 498 | error.incomplete = true 499 | throw error 500 | } 501 | throw new Error('Unknown MessagePack token ' + token) 502 | 503 | } 504 | } 505 | } 506 | const validName = /^[a-zA-Z_$][a-zA-Z\d_$]*$/ 507 | function createStructureReader(structure, firstId) { 508 | function readObject() { 509 | // This initial function is quick to instantiate, but runs slower. After several iterations pay the cost to build the faster function 510 | if (readObject.count++ > inlineObjectReadThreshold) { 511 | let readObject = structure.read = (new Function('r', 'return function(){return ' + (currentUnpackr.freezeData ? 'Object.freeze' : '') + 512 | '({' + structure.map(key => key === '__proto__' ? '__proto_:r()' : validName.test(key) ? key + ':r()' : ('[' + JSON.stringify(key) + ']:r()')).join(',') + '})}'))(read) 513 | if (structure.highByte === 0) 514 | structure.read = createSecondByteReader(firstId, structure.read) 515 | return readObject() // second byte is already read, if there is one so immediately read object 516 | } 517 | let object = {} 518 | for (let i = 0, l = structure.length; i < l; i++) { 519 | let key = structure[i] 520 | if (key === '__proto__') 521 | key = '__proto_' 522 | object[key] = read() 523 | } 524 | if (currentUnpackr.freezeData) 525 | return Object.freeze(object); 526 | return object 527 | } 528 | readObject.count = 0 529 | if (structure.highByte === 0) { 530 | return createSecondByteReader(firstId, readObject) 531 | } 532 | return readObject 533 | } 534 | 535 | const createSecondByteReader = (firstId, read0) => { 536 | return function() { 537 | let highByte = src[position++] 538 | if (highByte === 0) 539 | return read0() 540 | let id = firstId < 32 ? -(firstId + (highByte << 5)) : firstId + (highByte << 5) 541 | let structure = currentStructures[id] || loadStructures()[id] 542 | if (!structure) { 543 | throw new Error('Record id is not defined for ' + id) 544 | } 545 | if (!structure.read) 546 | structure.read = createStructureReader(structure, firstId) 547 | return structure.read() 548 | } 549 | } 550 | 551 | export function loadStructures() { 552 | let loadedStructures = saveState(() => { 553 | // save the state in case getStructures modifies our buffer 554 | src = null 555 | return currentUnpackr.getStructures() 556 | }) 557 | return currentStructures = currentUnpackr._mergeStructures(loadedStructures, currentStructures) 558 | } 559 | 560 | var readFixedString = readStringJS 561 | var readString8 = readStringJS 562 | var readString16 = readStringJS 563 | var readString32 = readStringJS 564 | export let isNativeAccelerationEnabled = false 565 | 566 | export function setExtractor(extractStrings) { 567 | isNativeAccelerationEnabled = true 568 | readFixedString = readString(1) 569 | readString8 = readString(2) 570 | readString16 = readString(3) 571 | readString32 = readString(5) 572 | function readString(headerLength) { 573 | return function readString(length) { 574 | let string = strings[stringPosition++] 575 | if (string == null) { 576 | if (bundledStrings) 577 | return readStringJS(length) 578 | let byteOffset = src.byteOffset 579 | let extraction = extractStrings(position - headerLength + byteOffset, srcEnd + byteOffset, src.buffer) 580 | if (typeof extraction == 'string') { 581 | string = extraction 582 | strings = EMPTY_ARRAY 583 | } else { 584 | strings = extraction 585 | stringPosition = 1 586 | srcStringEnd = 1 // even if a utf-8 string was decoded, must indicate we are in the midst of extracted strings and can't skip strings 587 | string = strings[0] 588 | if (string === undefined) 589 | throw new Error('Unexpected end of buffer') 590 | } 591 | } 592 | let srcStringLength = string.length 593 | if (srcStringLength <= length) { 594 | position += length 595 | return string 596 | } 597 | srcString = string 598 | srcStringStart = position 599 | srcStringEnd = position + srcStringLength 600 | position += length 601 | return string.slice(0, length) // we know we just want the beginning 602 | } 603 | } 604 | } 605 | function readStringJS(length) { 606 | let result 607 | if (length < 16) { 608 | if (result = shortStringInJS(length)) 609 | return result 610 | } 611 | if (length > 64 && decoder) 612 | return decoder.decode(src.subarray(position, position += length)) 613 | const end = position + length 614 | const units = [] 615 | result = '' 616 | while (position < end) { 617 | const byte1 = src[position++] 618 | if ((byte1 & 0x80) === 0) { 619 | // 1 byte 620 | units.push(byte1) 621 | } else if ((byte1 & 0xe0) === 0xc0) { 622 | // 2 bytes 623 | const byte2 = src[position++] & 0x3f 624 | units.push(((byte1 & 0x1f) << 6) | byte2) 625 | } else if ((byte1 & 0xf0) === 0xe0) { 626 | // 3 bytes 627 | const byte2 = src[position++] & 0x3f 628 | const byte3 = src[position++] & 0x3f 629 | units.push(((byte1 & 0x1f) << 12) | (byte2 << 6) | byte3) 630 | } else if ((byte1 & 0xf8) === 0xf0) { 631 | // 4 bytes 632 | const byte2 = src[position++] & 0x3f 633 | const byte3 = src[position++] & 0x3f 634 | const byte4 = src[position++] & 0x3f 635 | let unit = ((byte1 & 0x07) << 0x12) | (byte2 << 0x0c) | (byte3 << 0x06) | byte4 636 | if (unit > 0xffff) { 637 | unit -= 0x10000 638 | units.push(((unit >>> 10) & 0x3ff) | 0xd800) 639 | unit = 0xdc00 | (unit & 0x3ff) 640 | } 641 | units.push(unit) 642 | } else { 643 | units.push(byte1) 644 | } 645 | 646 | if (units.length >= 0x1000) { 647 | result += fromCharCode.apply(String, units) 648 | units.length = 0 649 | } 650 | } 651 | 652 | if (units.length > 0) { 653 | result += fromCharCode.apply(String, units) 654 | } 655 | 656 | return result 657 | } 658 | export function readString(source, start, length) { 659 | let existingSrc = src; 660 | src = source; 661 | position = start; 662 | try { 663 | return readStringJS(length); 664 | } finally { 665 | src = existingSrc; 666 | } 667 | } 668 | 669 | function readArray(length) { 670 | let array = new Array(length) 671 | for (let i = 0; i < length; i++) { 672 | array[i] = read() 673 | } 674 | if (currentUnpackr.freezeData) 675 | return Object.freeze(array) 676 | return array 677 | } 678 | 679 | function readMap(length) { 680 | if (currentUnpackr.mapsAsObjects) { 681 | let object = {} 682 | for (let i = 0; i < length; i++) { 683 | let key = readKey() 684 | if (key === '__proto__') 685 | key = '__proto_'; 686 | object[key] = read() 687 | } 688 | return object 689 | } else { 690 | let map = new Map() 691 | for (let i = 0; i < length; i++) { 692 | map.set(read(), read()) 693 | } 694 | return map 695 | } 696 | } 697 | 698 | var fromCharCode = String.fromCharCode 699 | function longStringInJS(length) { 700 | let start = position 701 | let bytes = new Array(length) 702 | for (let i = 0; i < length; i++) { 703 | const byte = src[position++]; 704 | if ((byte & 0x80) > 0) { 705 | position = start 706 | return 707 | } 708 | bytes[i] = byte 709 | } 710 | return fromCharCode.apply(String, bytes) 711 | } 712 | function shortStringInJS(length) { 713 | if (length < 4) { 714 | if (length < 2) { 715 | if (length === 0) 716 | return '' 717 | else { 718 | let a = src[position++] 719 | if ((a & 0x80) > 1) { 720 | position -= 1 721 | return 722 | } 723 | return fromCharCode(a) 724 | } 725 | } else { 726 | let a = src[position++] 727 | let b = src[position++] 728 | if ((a & 0x80) > 0 || (b & 0x80) > 0) { 729 | position -= 2 730 | return 731 | } 732 | if (length < 3) 733 | return fromCharCode(a, b) 734 | let c = src[position++] 735 | if ((c & 0x80) > 0) { 736 | position -= 3 737 | return 738 | } 739 | return fromCharCode(a, b, c) 740 | } 741 | } else { 742 | let a = src[position++] 743 | let b = src[position++] 744 | let c = src[position++] 745 | let d = src[position++] 746 | if ((a & 0x80) > 0 || (b & 0x80) > 0 || (c & 0x80) > 0 || (d & 0x80) > 0) { 747 | position -= 4 748 | return 749 | } 750 | if (length < 6) { 751 | if (length === 4) 752 | return fromCharCode(a, b, c, d) 753 | else { 754 | let e = src[position++] 755 | if ((e & 0x80) > 0) { 756 | position -= 5 757 | return 758 | } 759 | return fromCharCode(a, b, c, d, e) 760 | } 761 | } else if (length < 8) { 762 | let e = src[position++] 763 | let f = src[position++] 764 | if ((e & 0x80) > 0 || (f & 0x80) > 0) { 765 | position -= 6 766 | return 767 | } 768 | if (length < 7) 769 | return fromCharCode(a, b, c, d, e, f) 770 | let g = src[position++] 771 | if ((g & 0x80) > 0) { 772 | position -= 7 773 | return 774 | } 775 | return fromCharCode(a, b, c, d, e, f, g) 776 | } else { 777 | let e = src[position++] 778 | let f = src[position++] 779 | let g = src[position++] 780 | let h = src[position++] 781 | if ((e & 0x80) > 0 || (f & 0x80) > 0 || (g & 0x80) > 0 || (h & 0x80) > 0) { 782 | position -= 8 783 | return 784 | } 785 | if (length < 10) { 786 | if (length === 8) 787 | return fromCharCode(a, b, c, d, e, f, g, h) 788 | else { 789 | let i = src[position++] 790 | if ((i & 0x80) > 0) { 791 | position -= 9 792 | return 793 | } 794 | return fromCharCode(a, b, c, d, e, f, g, h, i) 795 | } 796 | } else if (length < 12) { 797 | let i = src[position++] 798 | let j = src[position++] 799 | if ((i & 0x80) > 0 || (j & 0x80) > 0) { 800 | position -= 10 801 | return 802 | } 803 | if (length < 11) 804 | return fromCharCode(a, b, c, d, e, f, g, h, i, j) 805 | let k = src[position++] 806 | if ((k & 0x80) > 0) { 807 | position -= 11 808 | return 809 | } 810 | return fromCharCode(a, b, c, d, e, f, g, h, i, j, k) 811 | } else { 812 | let i = src[position++] 813 | let j = src[position++] 814 | let k = src[position++] 815 | let l = src[position++] 816 | if ((i & 0x80) > 0 || (j & 0x80) > 0 || (k & 0x80) > 0 || (l & 0x80) > 0) { 817 | position -= 12 818 | return 819 | } 820 | if (length < 14) { 821 | if (length === 12) 822 | return fromCharCode(a, b, c, d, e, f, g, h, i, j, k, l) 823 | else { 824 | let m = src[position++] 825 | if ((m & 0x80) > 0) { 826 | position -= 13 827 | return 828 | } 829 | return fromCharCode(a, b, c, d, e, f, g, h, i, j, k, l, m) 830 | } 831 | } else { 832 | let m = src[position++] 833 | let n = src[position++] 834 | if ((m & 0x80) > 0 || (n & 0x80) > 0) { 835 | position -= 14 836 | return 837 | } 838 | if (length < 15) 839 | return fromCharCode(a, b, c, d, e, f, g, h, i, j, k, l, m, n) 840 | let o = src[position++] 841 | if ((o & 0x80) > 0) { 842 | position -= 15 843 | return 844 | } 845 | return fromCharCode(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o) 846 | } 847 | } 848 | } 849 | } 850 | } 851 | 852 | function readOnlyJSString() { 853 | let token = src[position++] 854 | let length 855 | if (token < 0xc0) { 856 | // fixstr 857 | length = token - 0xa0 858 | } else { 859 | switch(token) { 860 | case 0xd9: 861 | // str 8 862 | length = src[position++] 863 | break 864 | case 0xda: 865 | // str 16 866 | length = dataView.getUint16(position) 867 | position += 2 868 | break 869 | case 0xdb: 870 | // str 32 871 | length = dataView.getUint32(position) 872 | position += 4 873 | break 874 | default: 875 | throw new Error('Expected string') 876 | } 877 | } 878 | return readStringJS(length) 879 | } 880 | 881 | 882 | function readBin(length) { 883 | return currentUnpackr.copyBuffers ? 884 | // specifically use the copying slice (not the node one) 885 | Uint8Array.prototype.slice.call(src, position, position += length) : 886 | src.subarray(position, position += length) 887 | } 888 | function readExt(length) { 889 | let type = src[position++] 890 | if (currentExtensions[type]) { 891 | let end 892 | return currentExtensions[type](src.subarray(position, end = (position += length)), (readPosition) => { 893 | position = readPosition; 894 | try { 895 | return read(); 896 | } finally { 897 | position = end; 898 | } 899 | }) 900 | } 901 | else 902 | throw new Error('Unknown extension type ' + type) 903 | } 904 | 905 | var keyCache = new Array(4096) 906 | function readKey() { 907 | let length = src[position++] 908 | if (length >= 0xa0 && length < 0xc0) { 909 | // fixstr, potentially use key cache 910 | length = length - 0xa0 911 | if (srcStringEnd >= position) // if it has been extracted, must use it (and faster anyway) 912 | return srcString.slice(position - srcStringStart, (position += length) - srcStringStart) 913 | else if (!(srcStringEnd == 0 && srcEnd < 180)) 914 | return readFixedString(length) 915 | } else { // not cacheable, go back and do a standard read 916 | position-- 917 | return asSafeString(read()) 918 | } 919 | let key = ((length << 5) ^ (length > 1 ? dataView.getUint16(position) : length > 0 ? src[position] : 0)) & 0xfff 920 | let entry = keyCache[key] 921 | let checkPosition = position 922 | let end = position + length - 3 923 | let chunk 924 | let i = 0 925 | if (entry && entry.bytes == length) { 926 | while (checkPosition < end) { 927 | chunk = dataView.getUint32(checkPosition) 928 | if (chunk != entry[i++]) { 929 | checkPosition = 0x70000000 930 | break 931 | } 932 | checkPosition += 4 933 | } 934 | end += 3 935 | while (checkPosition < end) { 936 | chunk = src[checkPosition++] 937 | if (chunk != entry[i++]) { 938 | checkPosition = 0x70000000 939 | break 940 | } 941 | } 942 | if (checkPosition === end) { 943 | position = checkPosition 944 | return entry.string 945 | } 946 | end -= 3 947 | checkPosition = position 948 | } 949 | entry = [] 950 | keyCache[key] = entry 951 | entry.bytes = length 952 | while (checkPosition < end) { 953 | chunk = dataView.getUint32(checkPosition) 954 | entry.push(chunk) 955 | checkPosition += 4 956 | } 957 | end += 3 958 | while (checkPosition < end) { 959 | chunk = src[checkPosition++] 960 | entry.push(chunk) 961 | } 962 | // for small blocks, avoiding the overhead of the extract call is helpful 963 | let string = length < 16 ? shortStringInJS(length) : longStringInJS(length) 964 | if (string != null) 965 | return entry.string = string 966 | return entry.string = readFixedString(length) 967 | } 968 | 969 | function asSafeString(property) { 970 | // protect against expensive (DoS) string conversions 971 | if (typeof property === 'string') return property; 972 | if (typeof property === 'number' || typeof property === 'boolean' || typeof property === 'bigint') return property.toString(); 973 | if (property == null) return property + ''; 974 | if (currentUnpackr.allowArraysInMapKeys && Array.isArray(property) && property.flat().every(item => ['string', 'number', 'boolean', 'bigint'].includes(typeof item))) { 975 | return property.flat().toString(); 976 | } 977 | throw new Error(`Invalid property type for record: ${typeof property}`); 978 | } 979 | // the registration of the record definition extension (as "r") 980 | const recordDefinition = (id, highByte) => { 981 | let structure = read().map(asSafeString) // ensure that all keys are strings and 982 | // that the array is mutable 983 | let firstByte = id 984 | if (highByte !== undefined) { 985 | id = id < 32 ? -((highByte << 5) + id) : ((highByte << 5) + id) 986 | structure.highByte = highByte 987 | } 988 | let existingStructure = currentStructures[id] 989 | // If it is a shared structure, we need to restore any changes after reading. 990 | // Also in sequential mode, we may get incomplete reads and thus errors, and we need to restore 991 | // to the state prior to an incomplete read in order to properly resume. 992 | if (existingStructure && (existingStructure.isShared || sequentialMode)) { 993 | (currentStructures.restoreStructures || (currentStructures.restoreStructures = []))[id] = existingStructure 994 | } 995 | currentStructures[id] = structure 996 | structure.read = createStructureReader(structure, firstByte) 997 | return structure.read() 998 | } 999 | currentExtensions[0] = () => {} // notepack defines extension 0 to mean undefined, so use that as the default here 1000 | currentExtensions[0].noBuffer = true 1001 | 1002 | currentExtensions[0x42] = data => { 1003 | let headLength = (data.byteLength % 8) || 8 1004 | let head = BigInt(data[0] & 0x80 ? data[0] - 0x100 : data[0]) 1005 | for (let i = 1; i < headLength; i++) { 1006 | head <<= BigInt(8) 1007 | head += BigInt(data[i]) 1008 | } 1009 | if (data.byteLength !== headLength) { 1010 | let view = new DataView(data.buffer, data.byteOffset, data.byteLength) 1011 | let decode = (start, end) => { 1012 | let length = end - start 1013 | if (length <= 40) { 1014 | let out = view.getBigUint64(start) 1015 | for (let i = start + 8; i < end; i += 8) { 1016 | out <<= BigInt(64n) 1017 | out |= view.getBigUint64(i) 1018 | } 1019 | return out 1020 | } 1021 | // if (length === 8) return view.getBigUint64(start) 1022 | let middle = start + (length >> 4 << 3) 1023 | let left = decode(start, middle) 1024 | let right = decode(middle, end) 1025 | return (left << BigInt((end - middle) * 8)) | right 1026 | } 1027 | head = (head << BigInt((view.byteLength - headLength) * 8)) | decode(headLength, view.byteLength) 1028 | } 1029 | return head 1030 | } 1031 | 1032 | let errors = { 1033 | Error, EvalError, RangeError, ReferenceError, SyntaxError, TypeError, URIError, AggregateError: typeof AggregateError === 'function' ? AggregateError : null, 1034 | } 1035 | currentExtensions[0x65] = () => { 1036 | let data = read() 1037 | if (!errors[data[0]]) { 1038 | let error = Error(data[1], { cause: data[2] }) 1039 | error.name = data[0] 1040 | return error 1041 | } 1042 | return errors[data[0]](data[1], { cause: data[2] }) 1043 | } 1044 | 1045 | currentExtensions[0x69] = (data) => { 1046 | // id extension (for structured clones) 1047 | if (currentUnpackr.structuredClone === false) throw new Error('Structured clone extension is disabled') 1048 | let id = dataView.getUint32(position - 4) 1049 | if (!referenceMap) 1050 | referenceMap = new Map() 1051 | let token = src[position] 1052 | let target 1053 | // TODO: handle any other types that can cycle and make the code more robust if there are other extensions 1054 | if (token >= 0x90 && token < 0xa0 || token == 0xdc || token == 0xdd) 1055 | target = [] 1056 | else if (token >= 0x80 && token < 0x90 || token == 0xde || token == 0xdf) 1057 | target = new Map() 1058 | else if ((token >= 0xc7 && token <= 0xc9 || token >= 0xd4 && token <= 0xd8) && src[position + 1] === 0x73) 1059 | target = new Set() 1060 | else 1061 | target = {} 1062 | 1063 | let refEntry = { target } // a placeholder object 1064 | referenceMap.set(id, refEntry) 1065 | let targetProperties = read() // read the next value as the target object to id 1066 | if (!refEntry.used) { 1067 | // no cycle, can just use the returned read object 1068 | return refEntry.target = targetProperties // replace the placeholder with the real one 1069 | } else { 1070 | // there is a cycle, so we have to assign properties to original target 1071 | Object.assign(target, targetProperties) 1072 | } 1073 | 1074 | // copy over map/set entries if we're able to 1075 | if (target instanceof Map) 1076 | for (let [k, v] of targetProperties.entries()) target.set(k, v) 1077 | if (target instanceof Set) 1078 | for (let i of Array.from(targetProperties)) target.add(i) 1079 | return target 1080 | } 1081 | 1082 | currentExtensions[0x70] = (data) => { 1083 | // pointer extension (for structured clones) 1084 | if (currentUnpackr.structuredClone === false) throw new Error('Structured clone extension is disabled') 1085 | let id = dataView.getUint32(position - 4) 1086 | let refEntry = referenceMap.get(id) 1087 | refEntry.used = true 1088 | return refEntry.target 1089 | } 1090 | 1091 | currentExtensions[0x73] = () => new Set(read()) 1092 | 1093 | export const typedArrays = ['Int8','Uint8','Uint8Clamped','Int16','Uint16','Int32','Uint32','Float32','Float64','BigInt64','BigUint64'].map(type => type + 'Array') 1094 | 1095 | let glbl = typeof globalThis === 'object' ? globalThis : window; 1096 | currentExtensions[0x74] = (data) => { 1097 | let typeCode = data[0] 1098 | // we always have to slice to get a new ArrayBuffer that is aligned 1099 | let buffer = Uint8Array.prototype.slice.call(data, 1).buffer 1100 | 1101 | let typedArrayName = typedArrays[typeCode] 1102 | if (!typedArrayName) { 1103 | if (typeCode === 16) return buffer 1104 | if (typeCode === 17) return new DataView(buffer) 1105 | throw new Error('Could not find typed array for code ' + typeCode) 1106 | } 1107 | return new glbl[typedArrayName](buffer) 1108 | } 1109 | currentExtensions[0x78] = () => { 1110 | let data = read() 1111 | return new RegExp(data[0], data[1]) 1112 | } 1113 | const TEMP_BUNDLE = [] 1114 | currentExtensions[0x62] = (data) => { 1115 | let dataSize = (data[0] << 24) + (data[1] << 16) + (data[2] << 8) + data[3] 1116 | let dataPosition = position 1117 | position += dataSize - data.length 1118 | bundledStrings = TEMP_BUNDLE 1119 | bundledStrings = [readOnlyJSString(), readOnlyJSString()] 1120 | bundledStrings.position0 = 0 1121 | bundledStrings.position1 = 0 1122 | bundledStrings.postBundlePosition = position 1123 | position = dataPosition 1124 | return read() 1125 | } 1126 | 1127 | currentExtensions[0xff] = (data) => { 1128 | // 32-bit date extension 1129 | if (data.length == 4) 1130 | return new Date((data[0] * 0x1000000 + (data[1] << 16) + (data[2] << 8) + data[3]) * 1000) 1131 | else if (data.length == 8) 1132 | return new Date( 1133 | ((data[0] << 22) + (data[1] << 14) + (data[2] << 6) + (data[3] >> 2)) / 1000000 + 1134 | ((data[3] & 0x3) * 0x100000000 + data[4] * 0x1000000 + (data[5] << 16) + (data[6] << 8) + data[7]) * 1000) 1135 | else if (data.length == 12) 1136 | return new Date( 1137 | ((data[0] << 24) + (data[1] << 16) + (data[2] << 8) + data[3]) / 1000000 + 1138 | (((data[4] & 0x80) ? -0x1000000000000 : 0) + data[6] * 0x10000000000 + data[7] * 0x100000000 + data[8] * 0x1000000 + (data[9] << 16) + (data[10] << 8) + data[11]) * 1000) 1139 | else 1140 | return new Date('invalid') 1141 | } 1142 | // registration of bulk record definition? 1143 | // currentExtensions[0x52] = () => 1144 | 1145 | function saveState(callback) { 1146 | if (onSaveState) 1147 | onSaveState(); 1148 | let savedSrcEnd = srcEnd 1149 | let savedPosition = position 1150 | let savedStringPosition = stringPosition 1151 | let savedSrcStringStart = srcStringStart 1152 | let savedSrcStringEnd = srcStringEnd 1153 | let savedSrcString = srcString 1154 | let savedStrings = strings 1155 | let savedReferenceMap = referenceMap 1156 | let savedBundledStrings = bundledStrings 1157 | 1158 | // TODO: We may need to revisit this if we do more external calls to user code (since it could be slow) 1159 | let savedSrc = new Uint8Array(src.slice(0, srcEnd)) // we copy the data in case it changes while external data is processed 1160 | let savedStructures = currentStructures 1161 | let savedStructuresContents = currentStructures.slice(0, currentStructures.length) 1162 | let savedPackr = currentUnpackr 1163 | let savedSequentialMode = sequentialMode 1164 | let value = callback() 1165 | srcEnd = savedSrcEnd 1166 | position = savedPosition 1167 | stringPosition = savedStringPosition 1168 | srcStringStart = savedSrcStringStart 1169 | srcStringEnd = savedSrcStringEnd 1170 | srcString = savedSrcString 1171 | strings = savedStrings 1172 | referenceMap = savedReferenceMap 1173 | bundledStrings = savedBundledStrings 1174 | src = savedSrc 1175 | sequentialMode = savedSequentialMode 1176 | currentStructures = savedStructures 1177 | currentStructures.splice(0, currentStructures.length, ...savedStructuresContents) 1178 | currentUnpackr = savedPackr 1179 | dataView = new DataView(src.buffer, src.byteOffset, src.byteLength) 1180 | return value 1181 | } 1182 | export function clearSource() { 1183 | src = null 1184 | referenceMap = null 1185 | currentStructures = null 1186 | } 1187 | 1188 | export function addExtension(extension) { 1189 | if (extension.unpack) 1190 | currentExtensions[extension.type] = extension.unpack 1191 | else 1192 | currentExtensions[extension.type] = extension 1193 | } 1194 | 1195 | export const mult10 = new Array(147) // this is a table matching binary exponents to the multiplier to determine significant digit rounding 1196 | for (let i = 0; i < 256; i++) { 1197 | mult10[i] = +('1e' + Math.floor(45.15 - i * 0.30103)) 1198 | } 1199 | export const Decoder = Unpackr 1200 | var defaultUnpackr = new Unpackr({ useRecords: false }) 1201 | export const unpack = defaultUnpackr.unpack 1202 | export const unpackMultiple = defaultUnpackr.unpackMultiple 1203 | export const decode = defaultUnpackr.unpack 1204 | export const FLOAT32_OPTIONS = { 1205 | NEVER: 0, 1206 | ALWAYS: 1, 1207 | DECIMAL_ROUND: 3, 1208 | DECIMAL_FIT: 4 1209 | } 1210 | let f32Array = new Float32Array(1) 1211 | let u8Array = new Uint8Array(f32Array.buffer, 0, 4) 1212 | export function roundFloat32(float32Number) { 1213 | f32Array[0] = float32Number 1214 | let multiplier = mult10[((u8Array[3] & 0x7f) << 1) | (u8Array[2] >> 7)] 1215 | return ((multiplier * float32Number + (float32Number > 0 ? 0.5 : -0.5)) >> 0) / multiplier 1216 | } 1217 | export function setReadStruct(updatedReadStruct, loadedStructs, saveState) { 1218 | readStruct = updatedReadStruct; 1219 | onLoadedStructures = loadedStructs; 1220 | onSaveState = saveState; 1221 | } 1222 | --------------------------------------------------------------------------------