├── .npmignore ├── assets ├── performance.png └── test-keymap.txt ├── encode.d.ts ├── tests ├── floats.json ├── example5.json ├── index.html ├── example3.json ├── example2.json ├── test-compatibility.cjs ├── example.json ├── test-incomplete.js ├── test-node-iterators.js ├── test-node-stream.js ├── test-keymap.js ├── strings2.json ├── example4.json ├── benchmark.cjs ├── benchmark-stream.cjs ├── sample-large.json ├── example-twitter.json └── test.js ├── decode.d.ts ├── SECURITY.md ├── index.js ├── browser.js ├── webpack.config.js ├── LICENSE ├── node-index.js ├── .gitignore ├── stream.js ├── rollup.config.js ├── package.json ├── index.d.ts ├── iterators.js ├── benchmark.md ├── README.md └── decode.js /.npmignore: -------------------------------------------------------------------------------- 1 | # Dependency directories 2 | node_modules/ 3 | tests/samples 4 | .vs 5 | build/ -------------------------------------------------------------------------------- /assets/performance.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kriszyp/cbor-x/master/assets/performance.png -------------------------------------------------------------------------------- /encode.d.ts: -------------------------------------------------------------------------------- 1 | export { encode, encodeAsIterable, encodeAsAsyncIterable, Encoder, addExtension, FLOAT32_OPTIONS } from '.' 2 | -------------------------------------------------------------------------------- /tests/floats.json: -------------------------------------------------------------------------------- 1 | [0.53232,542.5325,3252200000,6643.2,0.000000432,1.992e20,5.1,9.3242e-20,525.235,8899.32,522.42,2342.43,12211.1,8888.3,0.000432] -------------------------------------------------------------------------------- /decode.d.ts: -------------------------------------------------------------------------------- 1 | export { decode, decodeMultiple, Decoder, addExtension, clearSource,roundFloat32, isNativeAccelerationEnabled, 2 | Extension, Options, FLOAT32_OPTIONS, setMaxLimits, MAX_LIMITS_OPTIONS } from '.' 3 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Supported Versions 4 | 5 | | Version | Supported | 6 | | ------- | ------------------ | 7 | | 0.9.x | :white_check_mark: | 8 | 9 | ## Reporting a Vulnerability 10 | 11 | Please report security vulnerabilities to kriszyp@gmail.com. 12 | -------------------------------------------------------------------------------- /tests/example5.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "test", 3 | "greeting": "Hello, World!", 4 | "flag": true, 5 | "littleNum": 3, 6 | "biggerNum": 32254435, 7 | "decimal":1.332232, 8 | "bigDecimal": 3.5522E35, 9 | "negative": -54, 10 | "aNull": null, 11 | "more": "another string" 12 | } 13 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | export { Encoder, addExtension, encode, encodeAsIterable, encodeAsAsyncIterable, NEVER, ALWAYS, DECIMAL_ROUND, DECIMAL_FIT, REUSE_BUFFER_MODE } from './encode.js' 2 | export { Tag, Decoder, decodeMultiple, decode, FLOAT32_OPTIONS, clearSource, roundFloat32, isNativeAccelerationEnabled, setSizeLimits } from './decode.js' 3 | export { decodeIter, encodeIter } from './iterators.js' 4 | -------------------------------------------------------------------------------- /browser.js: -------------------------------------------------------------------------------- 1 | exports.Encoder = require('./encode').Encoder 2 | exports.Decoder = require('./decode').Decoder 3 | exports.addExtension = require('./encode').addExtension 4 | let encoder = new exports.Encoder({ useRecords: false }) 5 | exports.decode = encoder.decode 6 | exports.encode = encoder.encode 7 | Object.assign(exports, { 8 | ALWAYS:1, 9 | DECIMAL_ROUND: 3, 10 | DECIMAL_FIT: 4 11 | }) 12 | -------------------------------------------------------------------------------- /webpack.config.js: -------------------------------------------------------------------------------- 1 | var webpack = require('webpack') 2 | var path = require('path') 3 | module.exports = { 4 | entry: { 5 | index: './browser.js' 6 | }, 7 | output: { 8 | path: path.join(__dirname, 'dist'), 9 | library: 'CBOR', 10 | libraryTarget: 'umd' 11 | }, 12 | node: { Buffer: false }, 13 | devtool: 'source-map', 14 | optimization: { 15 | minimize: true 16 | }, 17 | //mode: 'development' 18 | mode: 'production' 19 | }; 20 | -------------------------------------------------------------------------------- /tests/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 14 | 15 | 16 | 17 | 18 | 21 | 22 | 23 |
24 | 27 | 28 | 29 | -------------------------------------------------------------------------------- /tests/example3.json: -------------------------------------------------------------------------------- 1 | { 2 | "glossary": { 3 | "title": "example glossary", 4 | "GlossDiv": { 5 | "title": "S", 6 | "GlossList": { 7 | "GlossEntry": { 8 | "ID": "SGML", 9 | "SortAs": "SGML", 10 | "GlossTerm": "Standard Generalized Markup Language", 11 | "Acronym": "SGML", 12 | "Abbrev": "ISO 8879:1986", 13 | "GlossDef": { 14 | "para": "A meta-markup language, used to create markup languages such as DocBook.", 15 | "GlossSeeAlso": ["GML", "XML"] 16 | }, 17 | "GlossSee": "markup" 18 | } 19 | } 20 | } 21 | } 22 | } -------------------------------------------------------------------------------- /tests/example2.json: -------------------------------------------------------------------------------- 1 | {"widget": { 2 | "debug": "on", 3 | "window": { 4 | "title": "Sample Konfabulator Widget", 5 | "name": "main_window", 6 | "width": 500, 7 | "height": 500 8 | }, 9 | "image": { 10 | "src": "Images/Sun.png", 11 | "name": "sun1", 12 | "hOffset": 250, 13 | "vOffset": 250, 14 | "alignment": "center" 15 | }, 16 | "text": { 17 | "data": "Click Here", 18 | "size": 36, 19 | "style": "bold", 20 | "name": "text1", 21 | "hOffset": 250, 22 | "vOffset": 100, 23 | "alignment": "center", 24 | "onMouseUp": "sun1.opacity = (sun1.opacity / 100) * 90;" 25 | } 26 | }} -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Kris Zyp 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /node-index.js: -------------------------------------------------------------------------------- 1 | export { Encoder, addExtension, encode, encodeAsIterable, encodeAsAsyncIterable, NEVER, ALWAYS, DECIMAL_ROUND, DECIMAL_FIT, REUSE_BUFFER_MODE } from './encode.js' 2 | export { Tag, Decoder, decodeMultiple, decode, FLOAT32_OPTIONS, clearSource, roundFloat32, isNativeAccelerationEnabled, setSizeLimits } from './decode.js' 3 | export { EncoderStream, DecoderStream } from './stream.js' 4 | export { decodeIter, encodeIter } from './iterators.js' 5 | export const useRecords = false 6 | export const mapsAsObjects = true 7 | import { setExtractor } from './decode.js' 8 | import { createRequire } from 'module' 9 | 10 | const nativeAccelerationDisabled = process.env.CBOR_NATIVE_ACCELERATION_DISABLED !== undefined && process.env.CBOR_NATIVE_ACCELERATION_DISABLED.toLowerCase() === 'true'; 11 | 12 | if (!nativeAccelerationDisabled) { 13 | let extractor 14 | try { 15 | if (typeof require == 'function') 16 | extractor = require('cbor-extract') 17 | else 18 | extractor = createRequire(import.meta.url)('cbor-extract') 19 | if (extractor) 20 | setExtractor(extractor.extractStrings) 21 | } catch (error) { 22 | // native module is optional 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /tests/test-compatibility.cjs: -------------------------------------------------------------------------------- 1 | const data = require('./example4.json'); 2 | const cborX = require('..'); 3 | const chai = require('chai'); 4 | 5 | function tryRequire(module) { 6 | try { 7 | return require(module) 8 | } catch(error) { 9 | console.log(error) 10 | } 11 | } 12 | //if (typeof chai === 'undefined') { chai = require('chai') } 13 | const assert = chai.assert 14 | var cbor_module = tryRequire('cbor'); 15 | var decode = cborX.decode 16 | var encode = cborX.encode 17 | 18 | const addCompatibilitySuite = (data) => () => { 19 | if (cbor_module) { 20 | test('from cbor', function(){ 21 | var serialized = cbor_module.encode(data) 22 | var deserialized = decode(serialized) 23 | assert.deepEqual(deserialized, data) 24 | }) 25 | 26 | test('to cbor', function(){ 27 | var serialized = encode(data) 28 | var deserialized = cbor_module.decodeFirstSync(serialized) 29 | assert.deepEqual(deserialized, data) 30 | }) 31 | } 32 | } 33 | 34 | suite('cbor-x compatibility tests (example)', addCompatibilitySuite(require('./example.json'))) 35 | suite('cbor-x compatibility tests (example4)', addCompatibilitySuite(require('./example4.json'))) 36 | suite('cbor-x compatibility tests (example5)', addCompatibilitySuite(require('./example5.json'))) 37 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | dist 8 | 9 | # Runtime data 10 | pids 11 | *.pid 12 | *.seed 13 | *.pid.lock 14 | 15 | # Directory for instrumented libs generated by jscoverage/JSCover 16 | lib-cov 17 | 18 | # Coverage directory used by tools like istanbul 19 | coverage 20 | 21 | # nyc test coverage 22 | .nyc_output 23 | 24 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 25 | .grunt 26 | 27 | # Bower dependency directory (https://bower.io/) 28 | bower_components 29 | 30 | # node-waf configuration 31 | .lock-wscript 32 | 33 | # Compiled binary addons (http://nodejs.org/api/addons.html) 34 | build/Release 35 | 36 | # Dependency directories 37 | node_modules/ 38 | jspm_packages/ 39 | 40 | package-lock.json 41 | yarn.lock 42 | # Typescript v1 declaration files 43 | typings/ 44 | 45 | # Optional npm cache directory 46 | .npm 47 | 48 | # Optional eslint cache 49 | .eslintcache 50 | 51 | # Optional REPL history 52 | .node_repl_history 53 | 54 | # Output of 'npm pack' 55 | *.tgz 56 | 57 | # Yarn Integrity file 58 | .yarn-integrity 59 | 60 | # dotenv environment variables file 61 | .env 62 | tests/samples 63 | 64 | # Visual Studio Code directory 65 | .vscode 66 | .vs 67 | .idea 68 | 69 | build 70 | dist/test.js -------------------------------------------------------------------------------- /tests/example.json: -------------------------------------------------------------------------------- 1 | { 2 | "int0": 0, 3 | "int1": 1, 4 | "int1-": -1, 5 | "int8": 255, 6 | "int8-": -255, 7 | "int16": 256, 8 | "int16-": -256, 9 | "int32": 65536, 10 | "int32-": -65536, 11 | "nil": null, 12 | "true": true, 13 | "false": false, 14 | "float": 0.5, 15 | "float-": -0.5, 16 | "string0": "", 17 | "string1": "A", 18 | "string4": "foobarbaz", 19 | "string8": "Omnes viae Romam ducunt.", 20 | "string16": "L’homme n’est qu’un roseau, le plus faible de la nature ; mais c’est un roseau pensant. Il ne faut pas que l’univers entier s’arme pour l’écraser : une vapeur, une goutte d’eau, suffit pour le tuer. Mais, quand l’univers l’écraserait, l’homme serait encore plus noble que ce qui le tue, puisqu’il sait qu’il meurt, et l’avantage que l’univers a sur lui, l’univers n’en sait rien. Toute notre dignité consiste donc en la pensée. C’est de là qu’il faut nous relever et non de l’espace et de la durée, que nous ne saurions remplir. Travaillons donc à bien penser : voilà le principe de la morale.", 21 | "array0": [], 22 | "array1": [ 23 | "foo" 24 | ], 25 | "array8": [ 26 | 1, 27 | 2, 28 | 4, 29 | 8, 30 | 16, 31 | 32, 32 | 64, 33 | 128, 34 | 256, 35 | 512, 36 | 1024, 37 | 2048, 38 | 4096, 39 | 8192, 40 | 16384, 41 | 32768, 42 | 65536, 43 | 131072, 44 | 262144, 45 | 524288, 46 | 1048576 47 | ], 48 | "map0": {}, 49 | "map1": { 50 | "foo": "bar" 51 | } 52 | } -------------------------------------------------------------------------------- /tests/test-incomplete.js: -------------------------------------------------------------------------------- 1 | import { encode } from '../index.js' 2 | import { assert } from 'chai' 3 | import { Encoder } from '../encode.js' 4 | 5 | const tests = { 6 | string: 'interesting string', 7 | number: 12345, 8 | buffer: Buffer.from('hello world'), 9 | bigint: 12345678910n, 10 | array: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 11 | 'many-strings': [], 12 | set: new Set('abcdefghijklmnopqrstuvwxyz'.split('')), 13 | object: { a: 1, b: 2, c: 3, d: 4, e: 5, f: 6 } 14 | } 15 | for (let i = 0; i < 100; i++) { 16 | tests['many-strings'].push('test-data-' + i) 17 | } 18 | 19 | suite('encode and decode tests with partial values', function () { 20 | const encoder = new Encoder({ objectMode: true, structures: [] }) 21 | 22 | for (const [label, testData] of Object.entries(tests)) { 23 | test(label, () => { 24 | const encoded = encoder.encode(testData) 25 | assert.isTrue(Buffer.isBuffer(encoded), 'encode returns a Buffer') 26 | assert.deepStrictEqual(encoder.decode(encoded, encoded.length, true), testData, 'full buffer decodes well') 27 | const firstHalf = encoded.slice(0, Math.floor(encoded.length / 2)) 28 | let value 29 | try { 30 | value = encoder.decode(firstHalf, firstHalf.length, true) 31 | } catch (err) { 32 | if (err.incomplete !== true) { 33 | assert.fail(`Should throw an error with .incomplete set to true, instead threw error <${err}>`) 34 | } else { 35 | return; // victory! correct outcome! 36 | } 37 | } 38 | assert.fail(`Should throw an error with .incomplete set to true, instead returned value ${JSON.stringify(value)}`) 39 | }) 40 | } 41 | }) 42 | -------------------------------------------------------------------------------- /assets/test-keymap.txt: -------------------------------------------------------------------------------- 1 | Basic No Recs: Small 2 | Buffer: 100% (92) 3 | Encode: 100% (0.01) 4 | Decode: 100% (0.01) 5 | PreMap No Recs: Small 6 | Buffer: 90% (83) 7 | Encode: 110% (0.011) 8 | Decode: 100% (0.01) 9 | KeyMap No Recs: Small 10 | Buffer: 84% (77) 11 | Encode: 90% (0.009) 12 | Decode: 70% (0.007) 13 | Optima No Recs: Small 14 | Buffer: 90% (83) 15 | Encode: 100% (0.01) 16 | Decode: 90% (0.009) 17 | Basic Wi Recs: Small 18 | Buffer: 98% (90) 19 | Encode: 100% (0.01) 20 | Decode: 90% (0.009) 21 | PreMap Wi Recs: Small 22 | Buffer: 71% (65) 23 | Encode: 110% (0.011) 24 | Decode: 80% (0.008) 25 | KeyMap Wi Recs: Small 26 | Buffer: 90% (83) 27 | Encode: 120% (0.012) 28 | Decode: 70% (0.007) 29 | Optima Wi Recs: Small 30 | Buffer: 71% (65) 31 | Encode: 110% (0.011) 32 | Decode: 60% (0.006) 33 | Basic No Recs: Large 34 | Buffer: 100% (24737) 35 | Encode: 100% (0.157) 36 | Decode: 100% (0.614) 37 | PreMap No Recs: Large 38 | Buffer: 88% (21737) 39 | Encode: 145% (0.227) 40 | Decode: 116% (0.714) 41 | KeyMap No Recs: Large 42 | Buffer: 84% (20737) 43 | Encode: 143% (0.224) 44 | Decode: 95% (0.586) 45 | Optima No Recs: Large 46 | Buffer: 88% (21737) 47 | Encode: 175% (0.274) 48 | Decode: 123% (0.753) 49 | Basic Wi Recs: Large 50 | Buffer: 76% (18748) 51 | Encode: 112% (0.176) 52 | Decode: 82% (0.502) 53 | PreMap Wi Recs: Large 54 | Buffer: 76% (18737) 55 | Encode: 136% (0.214) 56 | Decode: 115% (0.709) 57 | KeyMap Wi Recs: Large 58 | Buffer: 76% (18744) 59 | Encode: 166% (0.26) 60 | Decode: 81% (0.5) 61 | Optima Wi Recs: Large 62 | Buffer: 76% (18737) 63 | Encode: 185% (0.291) 64 | Decode: 118% (0.725) 65 | -------------------------------------------------------------------------------- /stream.js: -------------------------------------------------------------------------------- 1 | import { Transform } from 'stream' 2 | import { Encoder } from './encode.js' 3 | import { checkedRead, getPosition, Decoder, clearSource } from './decode.js' 4 | var DEFAULT_OPTIONS = {objectMode: true} 5 | 6 | export class EncoderStream extends Transform { 7 | constructor(options) { 8 | if (!options) 9 | options = {} 10 | options.writableObjectMode = true 11 | super(options) 12 | options.sequential = true 13 | this.encoder = options.encoder || new Encoder(options) 14 | } 15 | async _transform(value, encoding, callback) { 16 | try { 17 | for await (let chunk of this.encoder.encodeAsAsyncIterable(value)) { 18 | this.push(chunk) 19 | } 20 | callback() 21 | } catch(error) { callback (error) } 22 | } 23 | } 24 | 25 | export class DecoderStream extends Transform { 26 | constructor(options) { 27 | if (!options) 28 | options = {} 29 | options.objectMode = true 30 | super(options) 31 | options.structures = [] 32 | this.decoder = options.decoder || new Decoder(options) 33 | } 34 | _transform(chunk, encoding, callback) { 35 | if (this.incompleteBuffer) { 36 | chunk = Buffer.concat([this.incompleteBuffer, chunk]) 37 | this.incompleteBuffer = null 38 | } 39 | let values 40 | try { 41 | values = this.decoder.decodeMultiple(chunk) 42 | } catch(error) { 43 | if (error.incomplete) { 44 | this.incompleteBuffer = chunk.slice(error.lastPosition) 45 | values = error.values 46 | } else { 47 | return callback(error) 48 | } 49 | } finally { 50 | for (let value of values || []) { 51 | if (value === null) 52 | value = this.getNullValue() 53 | this.push(value) 54 | } 55 | } 56 | callback() 57 | } 58 | getNullValue() { 59 | return Symbol.for(null) 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /rollup.config.js: -------------------------------------------------------------------------------- 1 | import terser from '@rollup/plugin-terser'; 2 | import json from "@rollup/plugin-json"; 3 | import replace from "@rollup/plugin-replace"; 4 | 5 | export default [ 6 | { 7 | input: "node-index.js", 8 | output: [ 9 | { 10 | file: "dist/node.cjs", 11 | format: "cjs", 12 | sourcemap: true 13 | } 14 | ] 15 | }, 16 | { 17 | input: "index.js", 18 | output: { 19 | file: "dist/index.js", 20 | format: "umd", 21 | name: "CBOR", 22 | sourcemap: true 23 | } 24 | }, 25 | { 26 | input: "index.js", 27 | plugins: [ 28 | replace({ Function: 'BlockedFunction '}) 29 | ], 30 | output: { 31 | file: "dist/index-no-eval.cjs", 32 | format: "umd", 33 | name: "CBOR", 34 | sourcemap: true 35 | }, 36 | }, 37 | { 38 | input: "decode.js", 39 | plugins: [ 40 | replace({ Function: 'BlockedFunction '}) 41 | ], 42 | output: { 43 | file: "dist/decode-no-eval.cjs", 44 | format: "umd", 45 | name: "CBOR", 46 | sourcemap: true 47 | }, 48 | }, 49 | { 50 | input: "index.js", 51 | plugins: [ 52 | terser({}) 53 | ], 54 | output: { 55 | file: "dist/index.min.js", 56 | format: "umd", 57 | name: "CBOR", 58 | sourcemap: true 59 | } 60 | }, 61 | { 62 | input: "index.js", 63 | plugins: [ 64 | replace({ Function: 'BlockedFunction '}), 65 | terser({}) 66 | ], 67 | output: { 68 | file: "dist/index-no-eval.min.js", 69 | format: "umd", 70 | name: "CBOR", 71 | sourcemap: true 72 | } 73 | }, 74 | { 75 | input: "tests/test.js", 76 | plugins: [json()], 77 | external: ['chai', '../index.js'], 78 | output: { 79 | file: "dist/test.js", 80 | format: "iife", 81 | sourcemap: true, 82 | globals: { 83 | chai: 'chai', 84 | './index.js': 'CBOR', 85 | }, 86 | } 87 | } 88 | ]; 89 | -------------------------------------------------------------------------------- /tests/test-node-iterators.js: -------------------------------------------------------------------------------- 1 | import { encodeIter, decodeIter } from '../index.js' 2 | import { decode } from '../index.js' 3 | import { assert } from 'chai' 4 | 5 | const tests = [ 6 | null, 7 | false, 8 | true, 9 | 'interesting string', 10 | 12345, 11 | 123456789n, 12 | 123.456, 13 | Buffer.from('Hello World'), 14 | new Set('abcdefghijklmnopqrstuvwxyz'.split('')) 15 | ] 16 | 17 | suite('cbor-x iterators interface tests', function () { 18 | test('sync encode iterator', () => { 19 | const encodings = [...encodeIter(tests)] 20 | const decodings = encodings.map(x => decode(x)) 21 | assert.deepStrictEqual(decodings, tests) 22 | }) 23 | 24 | test('async encode iterator', async () => { 25 | async function * generate () { 26 | for (const test of tests) { 27 | await new Promise((resolve, reject) => setImmediate(resolve)) 28 | yield test 29 | } 30 | } 31 | 32 | const chunks = [] 33 | for await (const chunk of encodeIter(generate())) { 34 | chunks.push(chunk) 35 | } 36 | 37 | const decodings = chunks.map(x => decode(x)) 38 | assert.deepStrictEqual(decodings, tests) 39 | }) 40 | 41 | test('sync encode and decode iterator', () => { 42 | const encodings = [...encodeIter(tests)] 43 | assert.isTrue(encodings.every(v => Buffer.isBuffer(v))) 44 | const decodings = [...decodeIter(encodings)] 45 | assert.deepStrictEqual(decodings, tests) 46 | 47 | // also test decodings work with buffers multiple values in a buffer 48 | const concatEncoding = Buffer.concat([...encodings]) 49 | const decodings2 = [...decodeIter([concatEncoding])] 50 | assert.deepStrictEqual(decodings2, tests) 51 | 52 | // also test decodings work with partial buffers that don't align to values perfectly 53 | const half1 = concatEncoding.slice(0, Math.floor(concatEncoding.length / 2)) 54 | const half2 = concatEncoding.slice(Math.floor(concatEncoding.length / 2)) 55 | const decodings3 = [...decodeIter([half1, half2])] 56 | assert.deepStrictEqual(decodings3, tests) 57 | }) 58 | 59 | test('async encode and decode iterator', async () => { 60 | async function * generator () { 61 | for (const obj of tests) { 62 | await new Promise((resolve, reject) => setImmediate(resolve)) 63 | yield obj 64 | } 65 | } 66 | const yields = [] 67 | for await (const value of decodeIter(encodeIter(generator()))) { 68 | yields.push(value) 69 | } 70 | assert.deepStrictEqual(yields, tests) 71 | }) 72 | }) -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cbor-x", 3 | "author": "Kris Zyp", 4 | "version": "1.6.2", 5 | "description": "Ultra-fast and conformant CBOR (RFC 8949) implementation with support for numerous tag extensions including records and structured cloning", 6 | "license": "MIT", 7 | "types": "./index.d.ts", 8 | "main": "./dist/node.cjs", 9 | "module": "./index.js", 10 | "keywords": [ 11 | "CBOR", 12 | "performance", 13 | "structured", 14 | "clone" 15 | ], 16 | "repository": { 17 | "type": "git", 18 | "url": "http://github.com/kriszyp/cbor-x" 19 | }, 20 | "scripts": { 21 | "benchmark": "node ./tests/benchmark.cjs", 22 | "build": "rollup -c", 23 | "dry-run": "npm publish --dry-run", 24 | "prepare": "npm run build", 25 | "test": "mocha tests/test**.*js -u tdd --experimental-json-modules" 26 | }, 27 | "type": "module", 28 | "exports": { 29 | ".": { 30 | "node": { 31 | "require": "./dist/node.cjs", 32 | "import": "./node-index.js" 33 | }, 34 | "types": { 35 | "require": "./index.d.cts", 36 | "import": "./index.d.ts" 37 | }, 38 | "default": "./index.js" 39 | }, 40 | "./encode": { 41 | "node": { 42 | "import": "./index.js", 43 | "require": "./dist/node.cjs" 44 | }, 45 | "default": { 46 | "import": "./encode.js" 47 | } 48 | }, 49 | "./decode": { 50 | "node": { 51 | "import": "./index.js", 52 | "require": "./dist/node.cjs" 53 | }, 54 | "default": { 55 | "import": "./decode.js" 56 | } 57 | }, 58 | "./decode-no-eval": { 59 | "types": "./decode.d.ts", 60 | "default": "./dist/decode-no-eval.cjs" 61 | }, 62 | "./index-no-eval": { 63 | "types": "./index.d.ts", 64 | "default": "./dist/index-no-eval.cjs" 65 | }, 66 | "./package.json": "./package.json" 67 | }, 68 | "files": [ 69 | "/dist", 70 | "*.md", 71 | "/*.js", 72 | "/*.ts" 73 | ], 74 | "browser": { 75 | "node:buffer": false 76 | }, 77 | "optionalDependencies": { 78 | "cbor-extract": "^2.2.0" 79 | }, 80 | "devDependencies": { 81 | "@rollup/plugin-json": "^5.0.1", 82 | "@rollup/plugin-replace": "^5.0.1", 83 | "@rollup/plugin-terser": "^0.1.0", 84 | "@types/node": "latest", 85 | "async": "^3", 86 | "cbor": "^5", 87 | "cbor-sync": "^1.0.4", 88 | "chai": "^4.3.4", 89 | "cpy-cli": "^4.1.0", 90 | "esm": "^3.2.25", 91 | "mocha": "^10.1.0", 92 | "rollup": "^3.2.5" 93 | } 94 | } 95 | -------------------------------------------------------------------------------- /index.d.ts: -------------------------------------------------------------------------------- 1 | export enum FLOAT32_OPTIONS { 2 | NEVER = 0, 3 | ALWAYS = 1, 4 | DECIMAL_ROUND = 3, 5 | DECIMAL_FIT = 4 6 | } 7 | export interface SizeLimitOptions { 8 | maxArraySize: number; 9 | maxMapSize: number; 10 | maxObjectSize: number; 11 | } 12 | export interface Options { 13 | alwaysUseFloat?: boolean 14 | useFloat32?: FLOAT32_OPTIONS 15 | useRecords?: boolean 16 | structures?: {}[] 17 | structuredClone?: boolean 18 | mapsAsObjects?: boolean 19 | variableMapSize?: boolean 20 | copyBuffers?: boolean 21 | bundleStrings?: boolean 22 | useTimestamp32?: boolean 23 | largeBigIntToFloat?: boolean 24 | encodeUndefinedAsNil?: boolean 25 | maxSharedStructures?: number 26 | maxOwnStructures?: number 27 | useSelfDescribedHeader?: boolean 28 | useToJSON?: boolean 29 | keyMap?: {} 30 | shouldShareStructure?: (keys: string[]) => boolean 31 | getStructures?(): {}[] 32 | saveStructures?(structures: {}[]): boolean | void 33 | onInvalidDate?: () => any 34 | tagUint8Array?: boolean 35 | pack?: boolean 36 | sequential?: boolean 37 | } 38 | type ClassOf = new (...args: any[]) => T; 39 | interface Extension { 40 | Class: ClassOf 41 | tag: number 42 | encode(value: T, encodeFn: (data: R) => Uint8Array): Buffer | Uint8Array 43 | decode(item: R): T 44 | } 45 | export class Decoder { 46 | constructor(options?: Options) 47 | decode(messagePack: Buffer | Uint8Array): any 48 | decodeMultiple(messagePack: Buffer | Uint8Array, forEach?: (value: any) => any): [] | void 49 | } 50 | export function setMaxLimits(options: SizeLimitOptions): void 51 | export function decode(messagePack: Buffer | Uint8Array): any 52 | export function decodeMultiple(messagePack: Buffer | Uint8Array, forEach?: (value: any) => any): [] | void 53 | export function addExtension(extension: Extension): void 54 | export function clearSource(): void 55 | export function roundFloat32(float32Number: number): number 56 | export let isNativeAccelerationEnabled: boolean 57 | 58 | export class Encoder extends Decoder { 59 | encode(value: any): Buffer 60 | } 61 | export function encode(value: any): Buffer 62 | export function encodeAsIterable(value: any): Iterable> 63 | export function encodeAsAsyncIterable(value: any): AsyncIterable 64 | 65 | import { Transform, Readable } from 'stream' 66 | 67 | export as namespace CBOR; 68 | export class DecoderStream extends Transform { 69 | constructor(options?: Options | { highWaterMark: number, emitClose: boolean, allowHalfOpen: boolean }) 70 | } 71 | export class EncoderStream extends Transform { 72 | constructor(options?: Options | { highWaterMark: number, emitClose: boolean, allowHalfOpen: boolean }) 73 | } 74 | 75 | export class Tag { 76 | constructor(value: any, tagNumber: number) 77 | value: any 78 | tag: number 79 | } -------------------------------------------------------------------------------- /tests/test-node-stream.js: -------------------------------------------------------------------------------- 1 | import { EncoderStream, DecoderStream } from '../node-index.js' 2 | import stream from 'stream' 3 | import chai from 'chai' 4 | import util from 'util' 5 | import fs from 'fs' 6 | const finished = util.promisify(stream.finished) 7 | var assert = chai.assert 8 | 9 | suite('cbor-x node stream tests', function(){ 10 | test('serialize/parse stream', () => { 11 | const serializeStream = new EncoderStream({ 12 | }) 13 | const parseStream = new DecoderStream() 14 | serializeStream.pipe(parseStream) 15 | const received = [] 16 | parseStream.on('data', data => { 17 | received.push(data) 18 | }) 19 | const messages = [{ 20 | name: 'first' 21 | }, { 22 | name: 'second' 23 | }, { 24 | name: 'third' 25 | }, { 26 | name: 'third', 27 | extra: [1, 3, { foo: 'hi'}, 'bye'] 28 | }] 29 | for (const message of messages) 30 | serializeStream.write(message) 31 | return new Promise((resolve, reject) => { 32 | setTimeout(() => { 33 | assert.deepEqual(received, messages) 34 | resolve() 35 | }, 10) 36 | }) 37 | }) 38 | test('stream from buffer', () => new Promise(async resolve => { 39 | const parseStream = new DecoderStream() 40 | let values = [] 41 | parseStream.on('data', (value) => { 42 | values.push(value) 43 | }) 44 | parseStream.on('end', () => { 45 | assert.deepEqual(values, [1, 2]) 46 | resolve() 47 | }) 48 | let bufferStream = new stream.Duplex() 49 | bufferStream.pipe(parseStream) 50 | bufferStream.push(new Uint8Array([1, 2])) 51 | bufferStream.push(null) 52 | })) 53 | test('stream to/from file', (done) => { 54 | const recordNum = 10000 55 | 56 | const enc = new EncoderStream({ 57 | //bundleStrings: true, // TODO: bundle strings is incompatible with stream right now 58 | }) 59 | 60 | const read = () => { 61 | console.time('READ') 62 | 63 | const dec = new DecoderStream({ 64 | //bundleStrings: true, 65 | }) 66 | 67 | fs.createReadStream('test.cbor') 68 | .on('data', (c) => console.log(c.length)) 69 | .pipe(dec) 70 | .on('data', (data) => { 71 | assert.equal(data.str, 'TEST_STR'); 72 | }) 73 | .on('end', () => console.timeEnd('READ') || done()) 74 | 75 | } 76 | 77 | enc.pipe(fs.createWriteStream('test.cbor')) 78 | enc.on('end', () => console.timeEnd('GEN') || read()) 79 | 80 | console.log('Generating') 81 | 82 | console.time('GEN') 83 | 84 | const curr = Date.now() 85 | 86 | for (let i = 0; i < recordNum; ++i) { 87 | enc.write({ i, str: 'TEST_STR', ts: Date.now() }) 88 | } 89 | 90 | enc.end() 91 | }) 92 | 93 | teardown(function() { 94 | try { 95 | fs.unlinkSync('test.cbor') 96 | }catch(error){} 97 | }) 98 | }) 99 | 100 | -------------------------------------------------------------------------------- /tests/test-keymap.js: -------------------------------------------------------------------------------- 1 | import { Encoder } from '../index.js' 2 | import assert from 'assert' 3 | import { Console } from 'console' 4 | import cborExtract from 'cbor-extract' 5 | 6 | const small = [ 7 | { bn: '/3303/0/5700', bt: 1278887, v: 35.5 },{ t: 10, v: 34 },{ t: 20, v: 33 },{ t: 30, v: 32 },{ t: 40, v: 31 },{ t: 50, v: 30 } 8 | ] 9 | 10 | 11 | let large = [] 12 | 13 | for (let i = 0; i < 1000; i++) large.push({ t: 100+i, n: '1', vs: 'value-'+i } ) 14 | 15 | let senmlKeys = { bs: -6, bv: -5, bu: -4, bt: -3, bn: -2, bver: -1, n: 0, u: 1, v: 2, vs: 3, vb: 4, s: 5, t: 6, ut: 7, vd: 8 } 16 | 17 | function reverse(obj) { 18 | let rev = new Map() 19 | for (let [k,v] of Object.entries(obj)) rev.set(v,k) 20 | return rev 21 | } 22 | 23 | let senmlRevs = reverse(senmlKeys) 24 | 25 | function preMapDecode(maps) { 26 | let data = [] 27 | for (let map of maps) { 28 | let item = {} 29 | map.forEach((v,k) => item[senmlRevs.has(k) ? senmlRevs.get(k) : k] = v) 30 | data.push(item) 31 | } 32 | return data 33 | } 34 | 35 | function preMapEncode(data) { 36 | let maps = [] 37 | for (let r of data) { 38 | let map = new Map() 39 | Object.entries(r).map(([k,v]) => map.set(senmlKeys[k], v)) 40 | maps.push(map) 41 | } 42 | return maps 43 | } 44 | 45 | function perfTest(data, label) { 46 | let basic = test(data, {useRecords: false}) 47 | compare(`Basic No Recs: ${label}`, basic, basic) 48 | compare(`PreMap No Recs: ${label}`, test(data, {useRecords: false}, true), basic) 49 | compare(`KeyMap No Recs: ${label}`, test(data, {useRecords: false, keyMap: senmlKeys}), basic) 50 | compare(`Optima No Recs: ${label}`, test(data, {useRecords: false, _keyMap: senmlKeys}), basic) 51 | compare(`Basic Wi Recs: ${label}`, test(data, {useRecords: true}), basic) 52 | compare(`PreMap Wi Recs: ${label}`, test(data, {useRecords: true}, true), basic) 53 | compare(`KeyMap Wi Recs: ${label}`, test(data, {useRecords: true, keyMap: senmlKeys}), basic) 54 | compare(`Optima Wi Recs: ${label}`, test(data, {useRecords: true, _keyMap: senmlKeys}), basic) 55 | } 56 | 57 | function compare(label, r1, r2) { 58 | if (!r2) r2 = r1 59 | console.log(label) 60 | let pct = (n1, n2) => Math.round(100 * (n2/n1)) 61 | console.log(` Buffer: ${pct(r2.bufLen, r1.bufLen)}% \t(${r1.bufLen})`) 62 | console.log(` Encode: ${pct(r2.encAvg, r1.encAvg)}% \t(${r1.encAvg})`) 63 | console.log(` Decode: ${pct(r2.decAvg, r1.decAvg)}% \t(${r1.decAvg})`) 64 | } 65 | 66 | function test(data, opts, preMap, its=10) { 67 | let cbor = new Encoder(opts) 68 | let decode = (b) => preMap ? preMapDecode(cbor.decode(b)) : cbor.mapDecode(b) 69 | let encode = (d) => preMap ? cbor.encode(preMapEncode(d)) : cbor.mapEncode(d) 70 | let buff = encode(data) 71 | let t1 = Date.now() 72 | for (let i = 0; i < its; i++) assert.deepEqual(encode(data), buff) 73 | let t2 = Date.now() 74 | for (let i = 0; i < its; i++) assert.deepEqual(decode(buff), data) 75 | let t3 = Date.now() 76 | return {bufLen: buff.length, encAvg: (t2-t1)/its, decAvg: (t3-t2)/its } 77 | } 78 | 79 | perfTest(small, 'Small') 80 | perfTest(large, 'Large') 81 | 82 | -------------------------------------------------------------------------------- /iterators.js: -------------------------------------------------------------------------------- 1 | import { Encoder } from './encode.js' 2 | import { Decoder } from './decode.js' 3 | 4 | /** 5 | * Given an Iterable first argument, returns an Iterable where each value is encoded as a Buffer 6 | * If the argument is only Async Iterable, the return value will be an Async Iterable. 7 | * @param {Iterable|Iterator|AsyncIterable|AsyncIterator} objectIterator - iterable source, like a Readable object stream, an array, Set, or custom object 8 | * @param {options} [options] - cbor-x Encoder options 9 | * @returns {IterableIterator|Promise.} 10 | */ 11 | export function encodeIter (objectIterator, options = {}) { 12 | if (!objectIterator || typeof objectIterator !== 'object') { 13 | throw new Error('first argument must be an Iterable, Async Iterable, or a Promise for an Async Iterable') 14 | } else if (typeof objectIterator[Symbol.iterator] === 'function') { 15 | return encodeIterSync(objectIterator, options) 16 | } else if (typeof objectIterator.then === 'function' || typeof objectIterator[Symbol.asyncIterator] === 'function') { 17 | return encodeIterAsync(objectIterator, options) 18 | } else { 19 | throw new Error('first argument must be an Iterable, Async Iterable, Iterator, Async Iterator, or a Promise') 20 | } 21 | } 22 | 23 | function * encodeIterSync (objectIterator, options) { 24 | const encoder = new Encoder(options) 25 | for (const value of objectIterator) { 26 | yield encoder.encode(value) 27 | } 28 | } 29 | 30 | async function * encodeIterAsync (objectIterator, options) { 31 | const encoder = new Encoder(options) 32 | for await (const value of objectIterator) { 33 | yield encoder.encode(value) 34 | } 35 | } 36 | 37 | /** 38 | * Given an Iterable/Iterator input which yields buffers, returns an IterableIterator which yields sync decoded objects 39 | * Or, given an Async Iterable/Iterator which yields promises resolving in buffers, returns an AsyncIterableIterator. 40 | * @param {Iterable|Iterator|AsyncIterable|AsyncIterableIterator} bufferIterator 41 | * @param {object} [options] - Decoder options 42 | * @returns {IterableIterator|Promise. { 52 | let yields 53 | // if there's incomplete data from previous chunk, concatinate and try again 54 | if (incomplete) { 55 | chunk = Buffer.concat([incomplete, chunk]) 56 | incomplete = undefined 57 | } 58 | 59 | try { 60 | yields = decoder.decodeMultiple(chunk) 61 | } catch (err) { 62 | if (err.incomplete) { 63 | incomplete = chunk.slice(err.lastPosition) 64 | yields = err.values 65 | } else { 66 | throw err 67 | } 68 | } 69 | return yields 70 | } 71 | 72 | if (typeof bufferIterator[Symbol.iterator] === 'function') { 73 | return (function * iter () { 74 | for (const value of bufferIterator) { 75 | yield * parser(value) 76 | } 77 | })() 78 | } else if (typeof bufferIterator[Symbol.asyncIterator] === 'function') { 79 | return (async function * iter () { 80 | for await (const value of bufferIterator) { 81 | yield * parser(value) 82 | } 83 | })() 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /benchmark.md: -------------------------------------------------------------------------------- 1 | Here are more comprehensive benchmarks. This is comparison with the next fastest JS projects using the benchmark tool from `msgpack-lite` (and data is from some clinical research data we use that has a good mix of different value types and structures). It also includes comparison to V8 native JSON functionality, and JavaScript Avro (`avsc`, a very optimized Avro implementation): 2 | 3 | operation | op | ms | op/s 4 | ---------------------------------------------------------- | ------: | ----: | -----: 5 | buf = Buffer(JSON.stringify(obj)); | 82000 | 5004 | 16386 6 | obj = JSON.parse(buf); | 88600 | 5000 | 17720 7 | require("cbor-x").encode(obj); | 161500 | 5002 | 32287 8 | require("cbor-x").decode(buf); | 94600 | 5004 | 18904 9 | cbor-x w/ shared structures: encoder.encode(obj); | 178400 | 5002 | 35665 10 | cbor-x w/ shared structures: encoder.decode(buf); | 376700 | 5000 | 75340 11 | buf = require("cbor").encode(obj); | 7700 | 5008 | 1537 12 | obj = require("cbor").decode(buf); | 3100 | 5119 | 605 13 | buf = require("cbor-sync").encode(obj); | 18000 | 5025 | 3582 14 | obj = require("cbor-sync").decode(buf); | 20900 | 5011 | 4170 15 | buf = require("msgpack-lite").encode(obj); | 30100 | 5012 | 6005 16 | obj = require("msgpack-lite").decode(buf); | 16200 | 5001 | 3239 17 | buf = require("@msgpack/msgpack").encode(obj); | 101200 | 5001 | 20235 18 | obj = require("@msgpack/msgpack").decode(buf); | 71200 | 5004 | 14228 19 | buf = require("notepack").encode(obj); | 62600 | 5005 | 12507 20 | obj = require("notepack").decode(buf); | 32400 | 5007 | 6470 21 | require("what-the-pack")... encoder.encode(obj); | 63500 | 5002 | 12694 22 | require("what-the-pack")... encoder.decode(buf); | 32000 | 5001 | 6398 23 | require("avsc")...make schema/type...type.toBuffer(obj); | 84600 | 5003 | 16909 24 | require("avsc")...make schema/type...type.toBuffer(obj); | 99300 | 5001 | 19856 25 | 26 | (`avsc` is schema-based and more comparable in style to cbor-x with shared structures). 27 | 28 | Here is a benchmark of streaming data (again borrowed from `msgpack-lite`'s benchmarking), where cbor-x is able to take advantage of the structured record extension and really pull away from other tools: 29 | 30 | operation (1000000 x 2) | op | ms | op/s 31 | ------------------------------------------------ | ------: | ----: | -----: 32 | new EncoderStream().write(obj); | 1000000 | 533 | 1876172 33 | new DecoderStream().write(buf); | 1000000 | 297 | 3367003 34 | stream.write(msgpack.encode(obj)); | 1000000 | 3179 | 314564 35 | stream.write(msgpack.decode(buf)); | 1000000 | 2151 | 464900 36 | stream.write(notepack.encode(obj)); | 1000000 | 944 | 1059322 37 | stream.write(notepack.decode(buf)); | 1000000 | 1131 | 884173 38 | msgpack.Encoder().on("data",ondata).encode(obj); | 1000000 | 1687 | 592768 39 | msgpack.createDecodeStream().write(buf); | 1000000 | 2084 | 479846 40 | msgpack.createEncodeStream().write(obj); | 1000000 | 1475 | 677966 41 | msgpack.Decoder().on("data",ondata).decode(buf); | 1000000 | 2264 | 441696 42 | 43 | 44 | 45 | These are the benchmarks from notepack package. The larger test data for these benchmarks is very heavily weighted with large binary/buffer data and objects with extreme numbers of keys (much more than I typically see with real-world data, but YMMV): 46 | 47 | node ./benchmarks/encode 48 | 49 | library | tiny | small | medium | large 50 | ---------------- | ----------------: | --------------: | ---------------| -------: 51 | notepack | 2,171,621 ops/sec | 546,905 ops/sec | 29,578 ops/sec | 265 ops/sec 52 | msgpack-js | 967,682 ops/sec | 184,455 ops/sec | 20,556 ops/sec | 259 ops/sec 53 | cbor-x | 2,392,826 ops/sec | 556,915 ops/sec | 70,573 ops/sec | 313 ops/sec 54 | msgpack-lite | 553,143 ops/sec | 132,318 ops/sec | 11,816 ops/sec | 186 ops/sec 55 | @msgpack/msgpack | 2,157,655 ops/sec | 573,236 ops/sec | 25,864 ops/sec | 90.26 ops/sec 56 | 57 | 58 | node ./benchmarks/decode 59 | 60 | library | tiny | small | medium | large 61 | ---------------- | ----------------: | --------------: | --------------- | -------: 62 | notepack | 2,220,904 ops/sec | 560,630 ops/sec | 28,177 ops/sec | 275 ops/sec 63 | msgpack-js | 965,719 ops/sec | 222,047 ops/sec | 21,431 ops/sec | 257 ops/sec 64 | cbor-x | 2,320,046 ops/sec | 589,167 ops/sec | 70,299 ops/sec | 329 ops/sec 65 | cbor-x records | 3,750,547 ops/sec | 912,419 ops/sec | 136,853 ops/sec | 733 ops/sec 66 | msgpack-lite | 569,222 ops/sec | 129,008 ops/sec | 12,424 ops/sec | 180 ops/sec 67 | @msgpack/msgpack | 2,089,697 ops/sec | 557,507 ops/sec | 20,256 ops/sec | 85.03 ops/sec 68 | 69 | This was run by adding the cbor-x to the benchmarks for notepack. 70 | 71 | All benchmarks were performed on Node 14.8.0 (Windows i7-4770 3.4Ghz). They can be run with: 72 | npm install --no-save msgpack msgpack-js @msgpack/msgpack msgpack-lite notepack avsc 73 | node tests/benchmark 74 | -------------------------------------------------------------------------------- /tests/strings2.json: -------------------------------------------------------------------------------- 1 | ["metadata","Designs","Randomized Controlled Trial","Types","BriefSummary","To determine the efficacy, long-term safety, and tolerability of alirocumab , mg every ,\n weeks (Q,W), in comparison with placebo, as well as its potential as a starting regimen. The\n dose regimen of , mg every , weeks (Q,W), as used in other studies, was added as a\n calibrator.","Abstract","To determine the efficacy, long-term safety, and tolerability of alirocumab , mg every ,\n weeks (Q,W), in comparison with placebo, as well as its potential as a starting regimen. The\n dose regimen of , mg every , weeks (Q,W), as used in other studies, was added as a\n calibrator.","Acronym","null","ArticleId","Qy,gwKWSoaWRmbmFEQA","Authors","null","CochraneID","null","Confidential","false","CorporateAuthor","null","Country","Bulgaria, Canada, Hungary, Israel, Norway, Slovakia, United Kingdom, United States","CustomData","null","DatabaseType","ClinicalTrials.gov","DOI","null","EmbaseAccessionNumber","null","Emtree","null","ErrataText","null","FullTextURL","null","Institution","null","ISSN","null","Issue","null","JournalTitle","null","MedlineID","null","MeSH","Hypercholesterolemia|Antibodies, Monoclonal","Pages","null","ParentChildStatus","null","ParentID","null","PublicationDate","March","PublicationYear","PubType","null","ReferenceStudy","null","SecondarySourceID","null","Source","Regeneron Pharmaceuticals","SourceReferenceId","NCT","TaStudyDesign","Randomized","Title","A Randomized, Double-Blind, Placebo-Controlled Study to Evaluate the Efficacy and Safety of an Every Four Weeks Treatment Regimen of Alirocumab in Patients With Primary Hypercholesterolemia","TrialOutcome","null","Volume","null","Id","Created","VersionNo","ExtractData","null","Digitized","null","IsRapidExtract","false","IsUploaded","false","design","Randomized Controlled Trial","conditions","label","Cholesterol Total Increased","id","SUE_c","phase","name","NCT","trialIds","NCT","acronyms","outcomeCount","id","groups","Id","RefId","B,|O,~Alirocumab , mg Q,W/Up , mg Q,W Without Concomitant Statin","OriginalName","Alirocumab , mg Q,W/Up , mg Q,W Without Concomitant Statin","N","age","ageSD","male","Interventions","termIds","SUBYEL","SUB_Oc","SUNUVb","analyzeAs","Alirocumab","analyzableScore","matchingScore","Id","zB","RefId","B,|O,~Alirocumab , mg Q,W/Up , mg Q,W Without Concomitant Statin","OriginalName","Alirocumab , mg Q,W/Up , mg Q,W Without Concomitant Statin","N","age","ageSD","male","Interventions","termIds","SUBYEL","SUB_Oc","analyzeAs","Statins","analyzableScore","matchingScore","Id","RefId","B,|O,~Placebo Q,W Without Concomitant Statin","OriginalName","Placebo Q,W Without Concomitant Statin","N","age","ageSD","male","Interventions","termIds","SUGeLS","SUBYEL","SUB_Oc","analyzeAs","Control","analyzableScore","matchingScore","Id","tv","RefId","OriginalName","Alirocumab , mg Q,W/Up , mg Q,W","Interventions","termIds","SUCO","SUNUVb","Id","jt","RefId","B,|O,~Alirocumab , mg Q,W/Up , mg Q,W With Concomitant Statin","OriginalName","Alirocumab , mg Q,W/Up , mg Q,W With Concomitant Statin","N","age","ageSD","male","Interventions","termIds","SUBYEL","SUB_Oc","Id","RefId","OriginalName","Alirocumab , mg Q,W/Up , mg Q,W","Interventions","termIds","SUNUVb","Id","RefId","B,|O,~Alirocumab , mg Q,W/Up , mg Q,W With Concomitant Statin","OriginalName","Alirocumab , mg Q,W/Up , mg Q,W With Concomitant Statin","N","age","ageSD","male","Interventions","termIds","SUBYEL","SUB_Oc","SUNUVb","Id","Interventions","Id","Ya","Name","Treatments","Id","((","Phase","k)","Type","Drug","termIds","SUGeLS","SUNUVb","terms","Placebo","Alirocumab","Id","o)","Name","Treatments","Id","Phase","k)","Type","Drug","termIds","SUBYEL","terms","Statins","RefId","E,|Placebo Q,W","OriginalName","Placebo Q,W","Id","Ls","RefId","B,|O,~Placebo Q,W With Concomitant Statin","OriginalName","Placebo Q,W With Concomitant Statin","N","age","ageSD","male","Interventions","termIds","SUGeLS","SUBYEL","SUB_Oc","hasDocData","null","hasRapidExtract","false","N","queryScore","matchingScore","score","outcomes","id","type","Change","unit","%","termIds","SUF,R","SUBskP","quantifiers","name","Calculated LDL-C in Not Receiving Concomitant Statin Therapy - On-Treatment Analysis","cells","number","unit","%","group","!","varType","se","N","se","sd","number","unit","%","group","varType","se","N","se","sd","number","unit","%","group","zB","varType","se","N","se","sd","time","Id","Low","Value","Baseline","High","Number","Unit","wk","Type","Total","days","description","wk","score","matchingTerm","SUF,R","suggestedPositive","false","sourceUnit","%","id","type","Change","unit","%","termIds","SUF,R","SUBskP","quantifiers","name","Calculated LDL-C in Receiving Concomitant Statin Therapy - On-Treatment Analysis","cells","number","unit","%","group","Ls","varType","se","N","se","sd","number","unit","%","group","varType","se","N","se","sd","number","unit","%","group","jt","varType","se","N","se","sd","time","Id","Low","Value","Baseline","High","Number","Unit","wk","Type","Total","days","description","wk","score","matchingTerm","SUF,R","suggestedPositive","false","sourceUnit","%","id","type","Change","unit","%","termIds","SUF,R","SUBskP","quantifiers","name","Calculated LDL-C in Not Receiving Concomitant Statin Therapy - On-Treatment Analysis","cells","number","unit","%","group","varType","se","N","se","sd","number","unit","%","group","varType","se","N","se","sd","number","unit","%","group","zB","varType","se","N","se","sd","time","Id","Low","Value","Baseline","High","Number","Unit","wk","Type","Total","days","description","score","matchingTerm","SUF,R","suggestedPositive","false","sourceUnit","%","id","type","Change","unit","%","termIds","SUF,R","SUBskP","quantifiers","name","Calculated LDL-C in Receiving Concomitant Statin Therapy - On-Treatment Analysis","cells","number","unit","%","group","Ls","varType","se","N","se","sd","number","unit","%","group","E","varType","se","N","se","sd","number","unit","%","group","jt","varType","se","N","se","sd","time","Id","Low","Value","Baseline","High","Number","Unit","wk","Type","Total","days","description","wk","score","matchingTerm","SUF,R","suggestedPositive","false","sourceUnit","%","characteristics","id","type","Binary","isCharacteristic","null","termIds","SUE_c","SUCbN","SUyJj","quantifiers","name","Patients not having adequate control of their hypercholesterolemia based on their individual level of CVD risk","cells","number","outcomesScore"] -------------------------------------------------------------------------------- /tests/example4.json: -------------------------------------------------------------------------------- 1 | {"metadata":{"Designs":["Randomized Controlled Trial"],"Types":[],"BriefSummary":"To determine the efficacy, long-term safety, and tolerability of alirocumab 300 mg every 4\n weeks (Q4W), in comparison with placebo, as well as its potential as a starting regimen. The\n dose regimen of 75 mg every 2 weeks (Q2W), as used in other studies, was added as a\n calibrator.","Abstract":"To determine the efficacy, long-term safety, and tolerability of alirocumab 300 mg every 4\n weeks (Q4W), in comparison with placebo, as well as its potential as a starting regimen. The\n dose regimen of 75 mg every 2 weeks (Q2W), as used in other studies, was added as a\n calibrator.","Acronym":null,"ArticleId":"Qy3gwKWSoaWRmbmFEQA","Authors":null,"CochraneID":null,"Confidential":false,"CorporateAuthor":null,"Country":"Bulgaria, Canada, Hungary, Israel, Norway, Slovakia, United Kingdom, United States","CustomData":null,"DatabaseType":"ClinicalTrials.gov","DOI":null,"EmbaseAccessionNumber":null,"Emtree":null,"ErrataText":null,"FullTextURL":null,"Institution":null,"ISSN":null,"Issue":null,"JournalTitle":null,"MedlineID":null,"MeSH":"Hypercholesterolemia|Antibodies, Monoclonal","Pages":null,"ParentChildStatus":null,"ParentID":null,"PublicationDate":"March 21, 2017","PublicationYear":2017,"PubType":null,"ReferenceStudy":null,"SecondarySourceID":null,"Source":"Regeneron Pharmaceuticals","SourceReferenceId":"NCT01926782","TaStudyDesign":"Randomized","Title":"A Randomized, Double-Blind, Placebo-Controlled Study to Evaluate the Efficacy and Safety of an Every Four Weeks Treatment Regimen of Alirocumab in Patients With Primary Hypercholesterolemia","TrialOutcome":null,"Volume":null,"Id":179246831,"Created":"2020-04-10T14:48:20.4384957Z","VersionNo":2,"ExtractData":null,"Digitized":true,"IsRapidExtract":false,"IsUploaded":false},"design":"Randomized Controlled Trial","conditions":[{"label":"Cholesterol Total Increased","id":"SUE_c"}],"phase":3,"name":"NCT01926782","trialIds":["NCT01926782"],"acronyms":[],"outcomeCount":156,"id":179246831,"groups":[{"Id":"4r","RefId":"B5|O2~Alirocumab 75 mg Q2W/Up 150 mg Q2W Without Concomitant Statin","OriginalName":"Alirocumab 75 mg Q2W/Up 150 mg Q2W Without Concomitant Statin","N":37,"age":59.3,"ageSD":11.3,"male":37.83783783783784,"Interventions":[{"termIds":[["SUBYEL","SUB_Oc"],["SUNUVb"]]}],"analyzeAs":"Alirocumab","analyzableScore":1.0717734625362931,"matchingScore":0},{"Id":"zB","RefId":"B6|O3~Alirocumab 300 mg Q4W/Up 150 mg Q2W Without Concomitant Statin","OriginalName":"Alirocumab 300 mg Q4W/Up 150 mg Q2W Without Concomitant Statin","N":146,"age":59.2,"ageSD":10.8,"male":45.205479452054796,"Interventions":[{"termIds":[["SUBYEL","SUB_Oc"]]}],"analyzeAs":"Statins","analyzableScore":1.0717734625362931,"matchingScore":0},{"Id":"3!","RefId":"B4|O1~Placebo Q2W Without Concomitant Statin","OriginalName":"Placebo Q2W Without Concomitant Statin","N":73,"age":59.4,"ageSD":10.2,"male":54.794520547945204,"Interventions":[{"termIds":[["SUGeLS"],["SUBYEL","SUB_Oc"]]}],"analyzeAs":"Control","analyzableScore":1.2020833333333334,"matchingScore":0},{"Id":"tv","RefId":"E3","OriginalName":"Alirocumab 300 mg Q4W/Up 150 mg Q2W","Interventions":[{"termIds":[["SUCO54","SUNUVb"]]}]},{"Id":"jt","RefId":"B3|O3~Alirocumab 300 mg Q4W/Up 150 mg Q2W With Concomitant Statin","OriginalName":"Alirocumab 300 mg Q4W/Up 150 mg Q2W With Concomitant Statin","N":312,"age":61.6,"ageSD":10,"male":60.8974358974359,"Interventions":[{"termIds":[["SUBYEL","SUB_Oc"]]}]},{"Id":"5!","RefId":"E2","OriginalName":"Alirocumab 75 mg Q2W/Up 150 mg Q2W","Interventions":[{"termIds":[["SUNUVb"]]}]},{"Id":"4E","RefId":"B2|O2~Alirocumab 75 mg Q2W/Up 150 mg Q2W With Concomitant Statin","OriginalName":"Alirocumab 75 mg Q2W/Up 150 mg Q2W With Concomitant Statin","N":78,"age":60.7,"ageSD":9.1,"male":65.38461538461539,"Interventions":[{"termIds":[["SUBYEL","SUB_Oc"],["SUNUVb"]]}]},{"Id":"i4","Interventions":[{"Id":"Ya","Name":178613599,"Treatments":[{"Id":"((","Phase":"k)"}],"Type":"Drug","termIds":[["SUGeLS"],["SUNUVb"]],"terms":[["Placebo"],["Alirocumab"]]},{"Id":"o)","Name":2159990,"Treatments":[{"Id":"1$","Phase":"k)"}],"Type":"Drug","termIds":[["SUBYEL"]],"terms":[["Statins"]]}],"RefId":"E1|Placebo Q2W","OriginalName":"Placebo Q2W"},{"Id":"Ls","RefId":"B1|O1~Placebo Q2W With Concomitant Statin","OriginalName":"Placebo Q2W With Concomitant Statin","N":157,"age":61.6,"ageSD":9.7,"male":64.3312101910828,"Interventions":[{"termIds":[["SUGeLS"],["SUBYEL","SUB_Oc"]]}]}],"hasDocData":true,"hasRapidExtract":false,"N":803,"queryScore":1.4868329805051381,"matchingScore":7.960635921410255,"score":22.084654254966498,"outcomes":[{"id":"179246387","type":"Change","unit":"%","termIds":[["SUF0R","SUBskP"]],"quantifiers":[],"name":"Calculated LDL-C in Not Receiving Concomitant Statin Therapy - On-Treatment Analysis","cells":[{"number":-0.4,"unit":"%","group":"3!","varType":"se","N":70,"se":2,"sd":16.73},{"number":-54.6,"unit":"%","group":"4r","varType":"se","N":37,"se":2.8,"sd":17.03},{"number":-59.4,"unit":"%","group":"zB","varType":"se","N":141,"se":1.4,"sd":16.62}],"time":{"Id":67122072,"Low":{"Value":"Baseline"},"High":{"Number":24,"Unit":"wk"},"Type":"Total","days":168,"description":"24wk"},"score":2.08,"matchingTerm":"SUF0R","suggestedPositive":false,"sourceUnit":"%"},{"id":"179246389","type":"Change","unit":"%","termIds":[["SUF0R","SUBskP"]],"quantifiers":[],"name":"Calculated LDL-C in Receiving Concomitant Statin Therapy - On-Treatment Analysis","cells":[{"number":-0.3,"unit":"%","group":"Ls","varType":"se","N":151,"se":2.1,"sd":25.81},{"number":-55.1,"unit":"%","group":"4E","varType":"se","N":75,"se":3,"sd":25.98},{"number":-62.3,"unit":"%","group":"jt","varType":"se","N":302,"se":1.5,"sd":26.07}],"time":{"Id":67122072,"Low":{"Value":"Baseline"},"High":{"Number":24,"Unit":"wk"},"Type":"Total","days":168,"description":"24wk"},"score":2.08,"matchingTerm":"SUF0R","suggestedPositive":false,"sourceUnit":"%"},{"id":"179246393","type":"Change","unit":"%","termIds":[["SUF0R","SUBskP"]],"quantifiers":[],"name":"Calculated LDL-C in Not Receiving Concomitant Statin Therapy - On-Treatment Analysis","cells":[{"number":-0.5,"unit":"%","group":"3!","varType":"se","N":70,"se":2,"sd":16.73},{"number":-53.9,"unit":"%","group":"4r","varType":"se","N":37,"se":2.7,"sd":16.42},{"number":-60,"unit":"%","group":"zB","varType":"se","N":141,"se":1.4,"sd":16.62}],"time":{"Id":67122069,"Low":{"Value":"Baseline"},"High":{"Number":12,"Unit":"wk"},"Type":"Total","days":84,"description":"12wk"},"score":2.08,"matchingTerm":"SUF0R","suggestedPositive":false,"sourceUnit":"%"},{"id":"179246394","type":"Change","unit":"%","termIds":[["SUF0R","SUBskP"]],"quantifiers":[],"name":"Calculated LDL-C in Receiving Concomitant Statin Therapy - On-Treatment Analysis","cells":[{"number":1.4,"unit":"%","group":"Ls","varType":"se","N":151,"se":1.9,"sd":23.35},{"number":-47.3,"unit":"%","group":"4E","varType":"se","N":75,"se":2.8,"sd":24.25},{"number":-58,"unit":"%","group":"jt","varType":"se","N":302,"se":1.4,"sd":24.33}],"time":{"Id":67122069,"Low":{"Value":"Baseline"},"High":{"Number":12,"Unit":"wk"},"Type":"Total","days":84,"description":"12wk"},"score":2.08,"matchingTerm":"SUF0R","suggestedPositive":false,"sourceUnit":"%"}],"characteristics":[{"id":"179246354","type":"Binary","isCharacteristic":true,"termIds":[["SUE_c","SUCbN","SUyJj"]],"quantifiers":[],"name":"Patients not having adequate control of their hypercholesterolemia based on their individual level of CVD risk","cells":[],"number":100}],"outcomesScore":18.97947630112307} -------------------------------------------------------------------------------- /tests/benchmark.cjs: -------------------------------------------------------------------------------- 1 | var cborX = tryRequire("../dist/node.cjs"); 2 | var msgpack_node = tryRequire("msgpack"); 3 | var msgpack_msgpack = tryRequire("@msgpack/msgpack"); 4 | var msgpack_lite = tryRequire("msgpack-lite"); 5 | var msgpack_js = tryRequire("msgpack-js"); 6 | var msgpack_js_v5 = tryRequire("msgpack-js-v5"); 7 | var msgpack5 = tryRequire("msgpack5"); 8 | var msgpack_unpack = tryRequire("msgpack-unpack"); 9 | var msgpack_codec = tryRequire("msgpack.codec"); 10 | var notepack = tryRequire("notepack"); 11 | var what_the_pack = tryRequire("what-the-pack"); 12 | var avro = tryRequire('avsc') 13 | var cbor = tryRequire('cbor') 14 | var cborSync = tryRequire('cbor-sync') 15 | var zlib = require('zlib') 16 | var deflateSync = zlib.deflateSync 17 | var inflateSync = zlib.inflateSync 18 | //var deflateSync = zlib.brotliCompressSync 19 | //var inflateSync = zlib.brotliDecompressSync 20 | var constants = zlib.constants 21 | //require('inspector').open(9330, null, true); 22 | msgpack5 = msgpack5 && msgpack5(); 23 | msgpack_codec = msgpack_codec && msgpack_codec.msgpack; 24 | what_the_pack = what_the_pack && what_the_pack.initialize(2**20); 25 | 26 | var pkg = require("../package.json"); 27 | var data = require("./example4.json"); 28 | var packed = msgpack_lite && msgpack_lite.encode(data); 29 | var expected = JSON.stringify(data); 30 | 31 | var argv = Array.prototype.slice.call(process.argv, 2); 32 | 33 | if (argv[0] === "-v") { 34 | console.warn(pkg.name + " " + pkg.version); 35 | process.exit(0); 36 | } 37 | 38 | var limit = 5; 39 | if (argv[0] - 0) limit = argv.shift() - 0; 40 | limit *= 1000; 41 | 42 | var COL1 = 58; 43 | var COL2 = 7; 44 | var COL3 = 5; 45 | var COL4 = 6; 46 | 47 | console.log(rpad("operation", COL1), "|", " op ", "|", " ms ", "|", " op/s "); 48 | console.log(rpad("", COL1, "-"), "|", lpad(":", COL2, "-"), "|", lpad(":", COL3, "-"), "|", lpad(":", COL4, "-")); 49 | 50 | var buf, obj; 51 | 52 | if (cborX) { 53 | var encoder = new cborX.Encoder({ structures: [] }) 54 | buf = bench('cbor-x w/ records: encoder.encode(obj);', encoder.encode.bind(encoder), data); 55 | console.log('size', buf.length) 56 | 57 | obj = bench('cbor-x w/ records: encoder.decode(buf);', encoder.decode.bind(encoder), buf); 58 | test(obj); 59 | 60 | encoder = new cborX.Encoder({ useRecords: false, pack: true }) 61 | buf = bench('cbor-x packed: encoder.encode(obj);', encoder.encode.bind(encoder), data); 62 | 63 | obj = bench('cbor-x packed: encoder.decode(buf);', encoder.decode.bind(encoder), buf); 64 | test(obj); 65 | console.log('size', buf.length) 66 | 67 | 68 | // buf = bench('require("cbor-x").encode(obj) and compress;', (data) => deflateSync(cborX.encode(data)), data); 69 | //console.log('size', buf.length) 70 | buf = bench('require("cbor-x").encode(obj);', cborX.encode, data); 71 | 72 | obj = bench('require("cbor-x").decode(buf);', cborX.decode, buf); 73 | test(obj); 74 | console.log('size', buf.length) 75 | 76 | } 77 | if (JSON) { 78 | buf = bench('buf = Buffer(JSON.stringify(obj));', JSON_stringify, data); 79 | obj = bench('obj = JSON.parse(buf);', JSON.parse, buf); 80 | test(obj); 81 | } 82 | 83 | 84 | if (JSON) { 85 | buf = bench('buf = Buffer(JSON.stringify(obj));', JSON_stringify, data); 86 | obj = bench('obj = JSON.parse(buf);', JSON.parse, buf); 87 | test(obj); 88 | } 89 | 90 | if (msgpack_lite) { 91 | buf = bench('buf = require("msgpack-lite").encode(obj);', msgpack_lite.encode, data); 92 | obj = bench('obj = require("msgpack-lite").decode(buf);', msgpack_lite.decode, packed); 93 | test(obj); 94 | } 95 | 96 | if (msgpack_msgpack) { 97 | buf = bench('buf = require("@msgpack/msgpack").encode(obj);', msgpack_msgpack.encode, data); 98 | obj = bench('obj = require("@msgpack/msgpack").decode(buf);', msgpack_msgpack.decode, buf); 99 | test(obj); 100 | } 101 | 102 | if (msgpack_node) { 103 | buf = bench('buf = require("msgpack").pack(obj);', msgpack_node.pack, data); 104 | obj = bench('obj = require("msgpack").unpack(buf);', msgpack_node.unpack, buf); 105 | test(obj); 106 | } 107 | 108 | if (msgpack_codec) { 109 | buf = bench('buf = Buffer(require("msgpack.codec").msgpack.pack(obj));', msgpack_codec_pack, data); 110 | obj = bench('obj = require("msgpack.codec").msgpack.unpack(buf);', msgpack_codec.unpack, buf); 111 | test(obj); 112 | } 113 | 114 | if (msgpack_js_v5) { 115 | buf = bench('buf = require("msgpack-js-v5").encode(obj);', msgpack_js_v5.encode, data); 116 | obj = bench('obj = require("msgpack-js-v5").decode(buf);', msgpack_js_v5.decode, buf); 117 | test(obj); 118 | } 119 | 120 | if (msgpack_js) { 121 | buf = bench('buf = require("msgpack-js").encode(obj);', msgpack_js.encode, data); 122 | obj = bench('obj = require("msgpack-js").decode(buf);', msgpack_js.decode, buf); 123 | test(obj); 124 | } 125 | 126 | if (msgpack5) { 127 | buf = bench('buf = require("msgpack5")().encode(obj);', msgpack5.encode, data); 128 | obj = bench('obj = require("msgpack5")().decode(buf);', msgpack5.decode, buf); 129 | test(obj); 130 | } 131 | 132 | if (notepack) { 133 | buf = bench('buf = require("notepack").encode(obj);', notepack.encode, data); 134 | obj = bench('obj = require("notepack").decode(buf);', notepack.decode, buf); 135 | test(obj); 136 | } 137 | if (what_the_pack) { 138 | buf = bench('require("what-the-pack")... encoder.encode(obj);', what_the_pack.encode, data); 139 | obj = bench('require("what-the-pack")... encoder.decode(buf);', what_the_pack.decode, buf); 140 | test(obj); 141 | } 142 | 143 | if (msgpack_unpack) { 144 | obj = bench('obj = require("msgpack-unpack").decode(buf);', msgpack_unpack, packed); 145 | test(obj); 146 | } 147 | 148 | if (avro) { 149 | const type = avro.Type.forValue(data); 150 | buf = bench('require("avsc")...make schema/type...type.toBuffer(obj);', type.toBuffer.bind(type), data); 151 | obj = bench('require("avsc")...make schema/type...type.fromBuffer(obj);', type.fromBuffer.bind(type), buf); 152 | } 153 | 154 | if (cbor) { 155 | buf = bench('buf = require("cbor").encode(obj);', cbor.encode, data); 156 | obj = bench('obj = require("cbor").decode(buf);', cbor.decode, buf); 157 | test(obj); 158 | } 159 | if (cborSync) { 160 | buf = bench('buf = require("cbor-sync").encode(obj);', cborSync.encode, data); 161 | obj = bench('obj = require("cbor-sync").decode(buf);', cborSync.decode, buf); 162 | test(obj); 163 | } 164 | 165 | function JSON_stringify(src) { 166 | return Buffer(JSON.stringify(src)); 167 | } 168 | 169 | function msgpack_codec_pack(data) { 170 | return Buffer(msgpack_codec.pack(data)); 171 | } 172 | 173 | function bench(name, func, src) { 174 | if (argv.length) { 175 | var match = argv.filter(function(grep) { 176 | return (name.indexOf(grep) > -1); 177 | }); 178 | if (!match.length) return SKIP; 179 | } 180 | var ret, duration; 181 | var start = new Date() - 0; 182 | var count = 0; 183 | while (1) { 184 | var end = new Date() - 0; 185 | duration = end - start; 186 | if (duration >= limit) break; 187 | while ((++count) % 100) ret = func(src); 188 | } 189 | name = rpad(name, COL1); 190 | var score = Math.floor(count / duration * 1000); 191 | count = lpad(count, COL2); 192 | duration = lpad(duration, COL3); 193 | score = lpad(score, COL4); 194 | console.log(name, "|", count, "|", duration, "|", score); 195 | return ret; 196 | } 197 | 198 | function rpad(str, len, chr) { 199 | if (!chr) chr = " "; 200 | while (str.length < len) str += chr; 201 | return str; 202 | } 203 | 204 | function lpad(str, len, chr) { 205 | if (!chr) chr = " "; 206 | str += ""; 207 | while (str.length < len) str = chr + str; 208 | return str; 209 | } 210 | 211 | function test(actual) { 212 | if (actual === SKIP) return; 213 | actual = JSON.stringify(actual); 214 | if (actual === expected) return; 215 | console.warn("expected: " + expected); 216 | console.warn("actual: " + actual); 217 | } 218 | 219 | function SKIP() { 220 | } 221 | 222 | function tryRequire(name) { 223 | try { 224 | return require(name); 225 | } catch (e) { 226 | // ignore 227 | } 228 | } 229 | -------------------------------------------------------------------------------- /tests/benchmark-stream.cjs: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | var PassThrough = require("stream").PassThrough; 4 | var async = require("async"); 5 | 6 | let { EncoderStream, DecoderStream } = require(".."); 7 | var msgpack = require("msgpack-lite"); 8 | var Encoder = require("msgpack-lite/lib/encoder").Encoder; 9 | var Decoder = require("msgpack-lite/lib/decoder").Decoder; 10 | var notepack = require("notepack"); 11 | 12 | var pkg = require("../package.json"); 13 | 14 | // a sample fluentd message 15 | var data = ["tag", [[1440949922, {"message": "hi there"}]]]; 16 | var packed = msgpack.encode(data); // 30 bytes per message 17 | var packsize = packed.length; 18 | var opcount = 1000000; 19 | var joincount = 100; 20 | var packjoin = repeatbuf(packed, joincount); // 3KB per chunk 21 | var limit = 2; 22 | 23 | var blocksToJoin = [] 24 | var streamForJoin = new EncoderStream(); 25 | streamForJoin.on("data", data => blocksToJoin.push(data)); 26 | for (var j = 0; j < joincount; j++) { 27 | streamForJoin.write(data); 28 | } 29 | var packjoinWithRecords = Buffer.concat(blocksToJoin) 30 | 31 | var argv = Array.prototype.slice.call(process.argv, 2); 32 | 33 | if (argv[0] === "-v") { 34 | console.warn(pkg.name + " " + pkg.version); 35 | process.exit(0); 36 | } 37 | 38 | if (argv[0] - 0) limit = argv.shift() - 0; 39 | 40 | var list = [ 41 | ['new EncoderStream().write(obj);', encode5], 42 | ['new DecoderStream().write(buf);', decode5], 43 | ['stream.write(msgpack.encode(obj));', encode1], 44 | ['stream.write(msgpack.decode(buf));', decode1], 45 | ['stream.write(notepack.encode(obj));', encode4], 46 | ['stream.write(notepack.decode(buf));', decode4], 47 | ['msgpack.Encoder().on("data",ondata).encode(obj);', encode2], 48 | ['msgpack.createDecodeStream().write(buf);', decode3], 49 | ['msgpack.createEncodeStream().write(obj);', encode3], 50 | ['msgpack.Decoder().on("data",ondata).decode(buf);', decode2], 51 | // ['stream.write(Buffer.from(JSON.stringify(obj)));', stringify], 52 | // ['stream.write(JSON.parse(buf));', parse] 53 | ]; 54 | 55 | function encode5(callback) { 56 | var stream = new EncoderStream(); 57 | var cnt = counter(callback); 58 | stream.on("data", cnt.inc); 59 | stream.on("end", cnt.end); 60 | for (var j = 0; j < opcount; j++) { 61 | stream.write(data); 62 | } 63 | stream.end(); 64 | } 65 | 66 | function encode1(callback) { 67 | var stream = new PassThrough(); 68 | var cnt = counter(callback); 69 | stream.on("data", cnt.buf); 70 | stream.on("end", cnt.end); 71 | for (var j = 0; j < opcount; j++) { 72 | stream.write(msgpack.encode(data)); 73 | } 74 | stream.end(); 75 | } 76 | 77 | function encode2(callback) { 78 | var stream = new PassThrough(); 79 | var cnt = counter(callback); 80 | stream.on("data", cnt.buf); 81 | stream.on("end", cnt.end); 82 | var encoder = Encoder(); 83 | encoder.on("data", function(chunk) { 84 | stream.write(chunk); 85 | }); 86 | encoder.on("end", function() { 87 | stream.end(); 88 | }); 89 | for (var j = 0; j < opcount; j++) { 90 | encoder.encode(data); 91 | } 92 | encoder.end(); 93 | } 94 | 95 | function encode3(callback) { 96 | var stream = msgpack.createEncodeStream(); 97 | var cnt = counter(callback); 98 | stream.on("data", cnt.buf); 99 | stream.on("end", cnt.end); 100 | for (var j = 0; j < opcount; j++) { 101 | stream.write(data); 102 | } 103 | stream.end(); 104 | } 105 | 106 | function encode4(callback) { 107 | var stream = new PassThrough(); 108 | var cnt = counter(callback); 109 | stream.on("data", cnt.buf); 110 | stream.on("end", cnt.end); 111 | for (var j = 0; j < opcount; j++) { 112 | stream.write(notepack.encode(data)); 113 | } 114 | stream.end(); 115 | } 116 | 117 | function decode5(callback) { 118 | var stream = new DecoderStream(); 119 | var cnt = counter(callback); 120 | stream.on("data", cnt.inc); 121 | stream.on("end", cnt.end); 122 | for (var j = 0; j < opcount / joincount; j++) { 123 | stream.write(packjoinWithRecords); 124 | } 125 | stream.end(); 126 | } 127 | 128 | function decode1(callback) { 129 | var stream = new PassThrough({objectMode: true}); 130 | var cnt = counter(callback); 131 | stream.on("data", cnt.inc); 132 | stream.on("end", cnt.end); 133 | for (var j = 0; j < opcount; j++) { 134 | stream.write(msgpack.decode(packed)); 135 | } 136 | stream.end(); 137 | } 138 | 139 | function decode2(callback) { 140 | var stream = new PassThrough({objectMode: true}); 141 | var cnt = counter(callback); 142 | stream.on("data", cnt.inc); 143 | stream.on("end", cnt.end); 144 | var decoder = Decoder(); 145 | decoder.on("data", function(chunk) { 146 | stream.write(chunk); 147 | }); 148 | decoder.on("end", function() { 149 | stream.end(); 150 | }); 151 | for (var j = 0; j < opcount / joincount; j++) { 152 | decoder.decode(packjoin); 153 | } 154 | decoder.end(); 155 | } 156 | 157 | function decode3(callback) { 158 | var stream = msgpack.createDecodeStream(); 159 | var cnt = counter(callback); 160 | stream.on("data", cnt.inc); 161 | stream.on("end", cnt.end); 162 | for (var j = 0; j < opcount / joincount; j++) { 163 | stream.write(packjoin); 164 | } 165 | stream.end(); 166 | } 167 | 168 | function decode4(callback) { 169 | var stream = new PassThrough({objectMode: true}); 170 | var cnt = counter(callback); 171 | stream.on("data", cnt.inc); 172 | stream.on("end", cnt.end); 173 | for (var j = 0; j < opcount; j++) { 174 | stream.write(notepack.decode(packed)); 175 | } 176 | stream.end(); 177 | } 178 | 179 | function rpad(str, len, chr) { 180 | if (!chr) chr = " "; 181 | str += ""; 182 | while (str.length < len) str += chr; 183 | return str; 184 | } 185 | 186 | function lpad(str, len, chr) { 187 | if (!chr) chr = " "; 188 | str += ""; 189 | while (str.length < len) str = chr + str; 190 | return str; 191 | } 192 | 193 | function repeatbuf(buf, cnt) { 194 | var array = []; 195 | for (var i = 0; i < cnt; i++) { 196 | array.push(buf); 197 | } 198 | return Buffer.concat(array); 199 | } 200 | 201 | function counter(callback) { 202 | var cnt = 0; 203 | return {buf: b, inc: i, end: e}; 204 | 205 | function b(buf) { 206 | cnt += buf.length / packsize; 207 | } 208 | 209 | function i() { 210 | cnt++; 211 | } 212 | 213 | function e() { 214 | cnt = Math.round(cnt); 215 | callback(null, cnt); 216 | } 217 | } 218 | 219 | function run() { 220 | // task filter 221 | if (argv.length) { 222 | list = list.filter(function(pair) { 223 | var name = pair[0]; 224 | var match = argv.filter(function(grep) { 225 | return (name.indexOf(grep) > -1); 226 | }); 227 | return match.length; 228 | }); 229 | } 230 | 231 | // run tasks repeatedly 232 | var tasks = []; 233 | for (var i = 0; i < limit; i++) { 234 | tasks.push(oneset); 235 | } 236 | async.series(tasks, end); 237 | 238 | // run a series of tasks 239 | function oneset(callback) { 240 | async.eachSeries(list, bench, callback); 241 | } 242 | 243 | // run a single benchmark 244 | function bench(pair, callback) { 245 | process.stdout.write("."); 246 | var func = pair[1]; 247 | var start = new Date() - 0; 248 | func(function(err, cnt) { 249 | var end = new Date() - 0; 250 | var array = pair[2] || (pair[2] = []); 251 | array.push(end - start); 252 | pair[3] = cnt; 253 | setTimeout(callback, 100); 254 | }); 255 | } 256 | 257 | // show result 258 | function end() { 259 | var title = "operation (" + opcount + " x " + limit + ")"; 260 | process.stdout.write("\n"); 261 | 262 | // table header 263 | var COL1 = 48; 264 | console.log(rpad(title, COL1), "|", " op ", "|", " ms ", "|", " op/s "); 265 | console.log(rpad("", COL1, "-"), "|", "------:", "|", "----:", "|", "-----:"); 266 | 267 | // table body 268 | list.forEach(function(pair) { 269 | var name = pair[0]; 270 | var op = pair[3]; 271 | var array = pair[2]; 272 | array = array.sort(function(a, b) { 273 | return a > b; 274 | }); 275 | var fastest = array[0]; 276 | var score = Math.floor(opcount / fastest * 1000); 277 | console.log(rpad(name, COL1), "|", lpad(op, 7), "|", lpad(fastest, 5), "|", lpad(score, 6)); 278 | }); 279 | } 280 | } 281 | 282 | run(); 283 | -------------------------------------------------------------------------------- /tests/sample-large.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "_id":"56490c18d9275a0003000000", 4 | "author":null, 5 | "created_at":"2015-11-15T22:50:00.170Z", 6 | "description":"A weekly discussion by Ruby developers about programming, life, and careers.", 7 | "image":"https://s3.amazonaws.com/devchat.tv/ruby-rogues-thumb.jpg", 8 | "keywords":[ 9 | "Business", 10 | "Careers", 11 | "Technology", 12 | "Software How-To" 13 | ], 14 | "language":"en", 15 | "permalink":"http://rubyrogues.com/", 16 | "published":true, 17 | "title":"The Ruby Rogues", 18 | "updated_at":"2015-11-15T22:50:06.565Z", 19 | "url":"http://feeds.feedwrench.com/RubyRogues.rss" 20 | }, 21 | { 22 | "_id":"56490d6ad9275a00030000eb", 23 | "author":null, 24 | "created_at":"2015-11-15T22:55:38.074Z", 25 | "description":"Um podcast feito para programadores e empreendedores.", 26 | "image":"http://www.grokpodcast.com/images/logo_itunes_grande.png", 27 | "keywords":[ 28 | "Technology", 29 | "Podcasting", 30 | "Business", 31 | "Careers" 32 | ], 33 | "language":"pt-BR", 34 | "permalink":"http://www.grokpodcast.com/", 35 | "published":true, 36 | "title":"Grok Podcast", 37 | "updated_at":"2015-11-15T22:55:47.498Z", 38 | "url":"http://www.grokpodcast.com/atom.xml" 39 | }, 40 | { 41 | "_id":"564a1c30b1191d0003000000", 42 | "author":null, 43 | "created_at":"2015-11-16T18:10:56.610Z", 44 | "description":"The Web Platform Podcast is a developer discussion that dives deep into ‘all things’ web. We discuss everything from developing for mobile to building HDTV software. From wearables \u0026 robotics to user experience \u0026 mentoring, we bring to our listeners everything related to building products \u0026 services for The Web Platform of today, tomorrow, and beyond.", 45 | "image":"http://static.libsyn.com/p/assets/f/7/2/0/f7208dae16d0543e/twp-logo-flat-blue-square.png", 46 | "keywords":[ 47 | "Technology", 48 | "Software How-To", 49 | "Tech News" 50 | ], 51 | "language":"en", 52 | "permalink":"http://thewebplatform.libsyn.com/webpage", 53 | "published":true, 54 | "title":"The Web Platform Podcast", 55 | "updated_at":"2015-11-16T18:11:02.022Z", 56 | "url":"http://thewebplatform.libsyn.com//rss" 57 | }, 58 | { 59 | "_id":"564a1de3b1191d0003000047", 60 | "author":null, 61 | "created_at":"2015-11-16T18:18:11.854Z", 62 | "description":"Developer Tea is a podcast for web and software developers hosted by a developer that you can listen to in less than 10 minutes. The show will cover a wide variety of topics related to the career of being a developer. We hope you'll take the topics from this podcast and continue the conversation, either online or in person with your peers. The show is hosted by Jonathan Cutrell, Director of Technology at Whiteboard and the author of Hacking the Impossible, a developer's guide to working with visionaries. :: Twitter: @developertea @jcutrell :: Email: developertea@gmail.com", 63 | "image":"http://simplecast-media.s3.amazonaws.com/podcast/image/363/1440374119-artwork.jpg", 64 | "keywords":[ 65 | "Technology", 66 | "Business", 67 | "Careers", 68 | "Society \u0026 Culture" 69 | ], 70 | "language":"en-us", 71 | "permalink":"http://www.developertea.com/", 72 | "published":true, 73 | "title":"Developer Tea", 74 | "updated_at":"2015-11-16T23:00:23.224Z", 75 | "url":"http://feeds.feedburner.com/developertea" 76 | }, 77 | { 78 | "_id":"564a3163e51cc0000300004c", 79 | "author":null, 80 | "created_at":"2015-11-16T19:41:23.436Z", 81 | "description":"Conference talks from the Remote Conferences series put on by Devchat.tv", 82 | "image":"https://s3.amazonaws.com/devchat.tv/RemoteConfs.jpg", 83 | "keywords":[ 84 | 85 | ], 86 | "language":"en", 87 | "permalink":"http://remoteconfs.com/", 88 | "published":true, 89 | "title":"Remote Conferences - Audio", 90 | "updated_at":"2015-11-16T19:41:24.367Z", 91 | "url":"http://feeds.feedwrench.com/remoteconfs-audio.rss" 92 | }, 93 | { 94 | "_id":"564a315de51cc00003000000", 95 | "author":null, 96 | "created_at":"2015-11-16T19:41:17.492Z", 97 | "description":"Weekly discussion by freelancers and professionals about running a business, finding clients, marketing, and lifestyle related to being a freelancer.", 98 | "image":"https://s3.amazonaws.com/devchat.tv/freelancers_show_thumb.jpg", 99 | "keywords":[ 100 | "Business", 101 | "Careers", 102 | "Management \u0026amp; Marketing", 103 | "Education", 104 | "Training" 105 | ], 106 | "language":"en", 107 | "permalink":"http://www.freelancersshow.com/", 108 | "published":true, 109 | "title":"The Freelancers' Show", 110 | "updated_at":"2015-11-16T19:41:27.459Z", 111 | "url":"http://feeds.feedwrench.com/TheFreelancersShow.rss" 112 | }, 113 | { 114 | "_id":"564a3169e51cc000030000cd", 115 | "author":null, 116 | "created_at":"2015-11-16T19:41:29.686Z", 117 | "description":"React Native Radio Podcast", 118 | "image":"https://s3.amazonaws.com/devchat.tv/react-native-radio-album-art.jpg", 119 | "keywords":[ 120 | 121 | ], 122 | "language":"en", 123 | "permalink":"http://devchat.tv/react-native-radio", 124 | "published":true, 125 | "title":"React Native Radio", 126 | "updated_at":"2015-11-16T19:41:29.999Z", 127 | "url":"http://feeds.feedwrench.com/react-native-radio.rss" 128 | }, 129 | { 130 | "_id":"564a316fe51cc000030000d4", 131 | "author":null, 132 | "created_at":"2015-11-16T19:41:35.937Z", 133 | "description":"The iOS Development Podcast", 134 | "image":"https://s3.amazonaws.com/devchat.tv/iPhreaks-thumb.jpg", 135 | "keywords":[ 136 | "Technology", 137 | "Tech News", 138 | "Software How-To" 139 | ], 140 | "language":"en", 141 | "permalink":"http://iphreaksshow.com/", 142 | "published":true, 143 | "title":"The iPhreaks Show", 144 | "updated_at":"2015-11-16T19:41:43.700Z", 145 | "url":"http://feeds.feedwrench.com/iPhreaks.rss" 146 | }, 147 | { 148 | "_id":"564a3184e51cc00003000156", 149 | "author":null, 150 | "created_at":"2015-11-16T19:41:56.874Z", 151 | "description":"Weekly podcast discussion about Javascript on the front and back ends. Also discuss programming practices, coding environments, and the communities related to the technology.", 152 | "image":"https://s3.amazonaws.com/devchat.tv/javascript_jabber_thumb.jpg", 153 | "keywords":[ 154 | "Education", 155 | "Training", 156 | "Technology", 157 | "Software How-To" 158 | ], 159 | "language":"en", 160 | "permalink":"http://javascriptjabber.com/", 161 | "published":true, 162 | "title":"JavaScript Jabber", 163 | "updated_at":"2015-11-16T19:42:24.692Z", 164 | "url":"http://feeds.feedwrench.com/JavaScriptJabber.rss" 165 | }, 166 | { 167 | "_id":"564a31dee51cc00003000210", 168 | "author":null, 169 | "created_at":"2015-11-16T19:43:26.390Z", 170 | "description":"Each week we explore an aspect of web security.", 171 | "image":"http://devchat.cachefly.net/websecwarriors/logo_3000x3000.jpeg", 172 | "keywords":[ 173 | 174 | ], 175 | "language":"en", 176 | "permalink":"http://websecuritywarriors.com/", 177 | "published":true, 178 | "title":"Web Security Warriors", 179 | "updated_at":"2015-11-16T19:43:28.133Z", 180 | "url":"http://feeds.feedwrench.com/websecwarriors.rss" 181 | }, 182 | { 183 | "_id":"564a3ddbe51cc00003000217", 184 | "author":null, 185 | "created_at":"2015-11-16T20:34:35.791Z", 186 | "description":"Podcasts produzidos de 2008 a 2010 sobre jogos e todos os tipos de assuntos relacionados ao universo e cultura dos vídeogames.", 187 | "image":"http://jogabilida.de/wp-content/uploads/2011/12/nl-podcast.png", 188 | "keywords":[ 189 | "Games \u0026 Hobbies", 190 | "Video Games" 191 | ], 192 | "language":"pt-BR", 193 | "permalink":"http://jogabilida.de/", 194 | "published":true, 195 | "title":"Podcast NowLoading", 196 | "updated_at":"2015-11-16T23:00:23.963Z", 197 | "url":"http://feeds.feedburner.com/podcastnowloading" 198 | }, 199 | { 200 | "_id":"564b9cfe08602e00030000fa", 201 | "author":null, 202 | "created_at":"2015-11-17T21:32:46.210Z", 203 | "description":"Being Boss is a podcast for creative entrepreneurs. From Emily Thompson and Kathleen Shannon. Get your business together. Being boss is hard. Making a dream job of your own isn't easy. But getting paid for it, becoming known for it, and finding purpose in it, is so doable - if you do the work.", 204 | "image":"http://www.lovebeingboss.com/img/skin/Header_WhiteLogo.png", 205 | "keywords":[ 206 | 207 | ], 208 | "language":null, 209 | "permalink":"http://www.lovebeingboss.com/", 210 | "published":true, 211 | "title":"Being Boss // A Podcast for Creative Entrepreneurs", 212 | "updated_at":"2015-11-17T21:32:50.672Z", 213 | "url":"http://www.lovebeingboss.com/RSSRetrieve.aspx?ID=18365\u0026Type=RSS20" 214 | }, 215 | { 216 | "_id":"564c5c8008602e0003000128", 217 | "author":null, 218 | "created_at":"2015-11-18T11:09:52.991Z", 219 | "description":"O mundo pop vira piada no Jovem Nerd", 220 | "image":"http://jovemnerd.ig.com.br/wp-content/themes/jovemnerd_v2b/images/NC_FEED.jpg", 221 | "keywords":[ 222 | "Society \u0026 Culture" 223 | ], 224 | "language":"pt-BR", 225 | "permalink":"http://jovemnerd.com.br/", 226 | "published":true, 227 | "title":"Nerdcast", 228 | "updated_at":"2015-11-18T11:11:20.034Z", 229 | "url":"http://jovemnerd.com.br/categoria/nerdcast/feed/" 230 | } 231 | ] -------------------------------------------------------------------------------- /tests/example-twitter.json: -------------------------------------------------------------------------------- 1 | { 2 | "statuses": [ 3 | { 4 | "created_at": "Sun Feb 25 18:11:01 +0000 2018", 5 | "id": 967824267948773377, 6 | "id_str": "967824267948773377", 7 | "text": "From pilot to astronaut, Robert H. Lawrence was the first African-American to be selected as an astronaut by any na… https://t.co/FjPEWnh804", 8 | "truncated": true, 9 | "entities": { 10 | "hashtags": [], 11 | "symbols": [], 12 | "user_mentions": [], 13 | "urls": [ 14 | { 15 | "url": "https://t.co/FjPEWnh804", 16 | "expanded_url": "https://twitter.com/i/web/status/967824267948773377", 17 | "display_url": "twitter.com/i/web/status/9…", 18 | "indices": [ 19 | 117, 20 | 140 21 | ] 22 | } 23 | ] 24 | }, 25 | "metadata": { 26 | "result_type": "popular", 27 | "iso_language_code": "en" 28 | }, 29 | "source": "Sprinklr", 30 | "in_reply_to_status_id": null, 31 | "in_reply_to_status_id_str": null, 32 | "in_reply_to_user_id": null, 33 | "in_reply_to_user_id_str": null, 34 | "in_reply_to_screen_name": null, 35 | "user": { 36 | "id": 11348282, 37 | "id_str": "11348282", 38 | "name": "NASA", 39 | "screen_name": "NASA", 40 | "location": "", 41 | "description": "Explore the universe and discover our home planet with @NASA. We usually post in EST (UTC-5)", 42 | "url": "https://t.co/TcEE6NS8nD", 43 | "entities": { 44 | "url": { 45 | "urls": [ 46 | { 47 | "url": "https://t.co/TcEE6NS8nD", 48 | "expanded_url": "http://www.nasa.gov", 49 | "display_url": "nasa.gov", 50 | "indices": [ 51 | 0, 52 | 23 53 | ] 54 | } 55 | ] 56 | }, 57 | "description": { 58 | "urls": [] 59 | } 60 | }, 61 | "protected": false, 62 | "followers_count": 28605561, 63 | "friends_count": 270, 64 | "listed_count": 90405, 65 | "created_at": "Wed Dec 19 20:20:32 +0000 2007", 66 | "favourites_count": 2960, 67 | "utc_offset": -18000, 68 | "time_zone": "Eastern Time (US & Canada)", 69 | "geo_enabled": false, 70 | "verified": true, 71 | "statuses_count": 50713, 72 | "lang": "en", 73 | "contributors_enabled": false, 74 | "is_translator": false, 75 | "is_translation_enabled": false, 76 | "profile_background_color": "000000", 77 | "profile_background_image_url": "http://pbs.twimg.com/profile_background_images/590922434682880000/3byPYvqe.jpg", 78 | "profile_background_image_url_https": "https://pbs.twimg.com/profile_background_images/590922434682880000/3byPYvqe.jpg", 79 | "profile_background_tile": false, 80 | "profile_image_url": "http://pbs.twimg.com/profile_images/188302352/nasalogo_twitter_normal.jpg", 81 | "profile_image_url_https": "https://pbs.twimg.com/profile_images/188302352/nasalogo_twitter_normal.jpg", 82 | "profile_banner_url": "https://pbs.twimg.com/profile_banners/11348282/1518798395", 83 | "profile_link_color": "205BA7", 84 | "profile_sidebar_border_color": "000000", 85 | "profile_sidebar_fill_color": "F3F2F2", 86 | "profile_text_color": "000000", 87 | "profile_use_background_image": true, 88 | "has_extended_profile": true, 89 | "default_profile": false, 90 | "default_profile_image": false, 91 | "following": null, 92 | "follow_request_sent": null, 93 | "notifications": null, 94 | "translator_type": "regular" 95 | }, 96 | "geo": null, 97 | "coordinates": null, 98 | "place": null, 99 | "contributors": null, 100 | "is_quote_status": false, 101 | "retweet_count": 988, 102 | "favorite_count": 3875, 103 | "favorited": false, 104 | "retweeted": false, 105 | "possibly_sensitive": false, 106 | "lang": "en" 107 | }, 108 | { 109 | "created_at": "Sun Feb 25 19:31:07 +0000 2018", 110 | "id": 967844427480911872, 111 | "id_str": "967844427480911872", 112 | "text": "A magnetic power struggle of galactic proportions - new research highlights the role of the Sun's magnetic landscap… https://t.co/29dZgga54m", 113 | "truncated": true, 114 | "entities": { 115 | "hashtags": [], 116 | "symbols": [], 117 | "user_mentions": [], 118 | "urls": [ 119 | { 120 | "url": "https://t.co/29dZgga54m", 121 | "expanded_url": "https://twitter.com/i/web/status/967844427480911872", 122 | "display_url": "twitter.com/i/web/status/9…", 123 | "indices": [ 124 | 117, 125 | 140 126 | ] 127 | } 128 | ] 129 | }, 130 | "metadata": { 131 | "result_type": "popular", 132 | "iso_language_code": "en" 133 | }, 134 | "source": "Sprinklr", 135 | "in_reply_to_status_id": null, 136 | "in_reply_to_status_id_str": null, 137 | "in_reply_to_user_id": null, 138 | "in_reply_to_user_id_str": null, 139 | "in_reply_to_screen_name": null, 140 | "user": { 141 | "id": 11348282, 142 | "id_str": "11348282", 143 | "name": "NASA", 144 | "screen_name": "NASA", 145 | "location": "", 146 | "description": "Explore the universe and discover our home planet with @NASA. We usually post in EST (UTC-5)", 147 | "url": "https://t.co/TcEE6NS8nD", 148 | "entities": { 149 | "url": { 150 | "urls": [ 151 | { 152 | "url": "https://t.co/TcEE6NS8nD", 153 | "expanded_url": "http://www.nasa.gov", 154 | "display_url": "nasa.gov", 155 | "indices": [ 156 | 0, 157 | 23 158 | ] 159 | } 160 | ] 161 | }, 162 | "description": { 163 | "urls": [] 164 | } 165 | }, 166 | "protected": false, 167 | "followers_count": 28605561, 168 | "friends_count": 270, 169 | "listed_count": 90405, 170 | "created_at": "Wed Dec 19 20:20:32 +0000 2007", 171 | "favourites_count": 2960, 172 | "utc_offset": -18000, 173 | "time_zone": "Eastern Time (US & Canada)", 174 | "geo_enabled": false, 175 | "verified": true, 176 | "statuses_count": 50713, 177 | "lang": "en", 178 | "contributors_enabled": false, 179 | "is_translator": false, 180 | "is_translation_enabled": false, 181 | "profile_background_color": "000000", 182 | "profile_background_image_url": "http://pbs.twimg.com/profile_background_images/590922434682880000/3byPYvqe.jpg", 183 | "profile_background_image_url_https": "https://pbs.twimg.com/profile_background_images/590922434682880000/3byPYvqe.jpg", 184 | "profile_background_tile": false, 185 | "profile_image_url": "http://pbs.twimg.com/profile_images/188302352/nasalogo_twitter_normal.jpg", 186 | "profile_image_url_https": "https://pbs.twimg.com/profile_images/188302352/nasalogo_twitter_normal.jpg", 187 | "profile_banner_url": "https://pbs.twimg.com/profile_banners/11348282/1518798395", 188 | "profile_link_color": "205BA7", 189 | "profile_sidebar_border_color": "000000", 190 | "profile_sidebar_fill_color": "F3F2F2", 191 | "profile_text_color": "000000", 192 | "profile_use_background_image": true, 193 | "has_extended_profile": true, 194 | "default_profile": false, 195 | "default_profile_image": false, 196 | "following": null, 197 | "follow_request_sent": null, 198 | "notifications": null, 199 | "translator_type": "regular" 200 | }, 201 | "geo": null, 202 | "coordinates": null, 203 | "place": null, 204 | "contributors": null, 205 | "is_quote_status": false, 206 | "retweet_count": 2654, 207 | "favorite_count": 7962, 208 | "favorited": false, 209 | "retweeted": false, 210 | "possibly_sensitive": false, 211 | "lang": "en" 212 | }, 213 | { 214 | "created_at": "Mon Feb 26 19:21:43 +0000 2018", 215 | "id": 968204446625869827, 216 | "id_str": "968204446625869827", 217 | "text": "Someone's got to be first. In space, the first explorers beyond Mars were Pioneers 10 and 11, twin robots who chart… https://t.co/SUX30Y45mr", 218 | "truncated": true, 219 | "entities": { 220 | "hashtags": [], 221 | "symbols": [], 222 | "user_mentions": [], 223 | "urls": [ 224 | { 225 | "url": "https://t.co/SUX30Y45mr", 226 | "expanded_url": "https://twitter.com/i/web/status/968204446625869827", 227 | "display_url": "twitter.com/i/web/status/9…", 228 | "indices": [ 229 | 117, 230 | 140 231 | ] 232 | } 233 | ] 234 | }, 235 | "metadata": { 236 | "result_type": "popular", 237 | "iso_language_code": "en" 238 | }, 239 | "source": "Sprinklr", 240 | "in_reply_to_status_id": null, 241 | "in_reply_to_status_id_str": null, 242 | "in_reply_to_user_id": null, 243 | "in_reply_to_user_id_str": null, 244 | "in_reply_to_screen_name": null, 245 | "user": { 246 | "id": 11348282, 247 | "id_str": "11348282", 248 | "name": "NASA", 249 | "screen_name": "NASA", 250 | "location": "", 251 | "description": "Explore the universe and discover our home planet with @NASA. We usually post in EST (UTC-5)", 252 | "url": "https://t.co/TcEE6NS8nD", 253 | "entities": { 254 | "url": { 255 | "urls": [ 256 | { 257 | "url": "https://t.co/TcEE6NS8nD", 258 | "expanded_url": "http://www.nasa.gov", 259 | "display_url": "nasa.gov", 260 | "indices": [ 261 | 0, 262 | 23 263 | ] 264 | } 265 | ] 266 | }, 267 | "description": { 268 | "urls": [] 269 | } 270 | }, 271 | "protected": false, 272 | "followers_count": 28605561, 273 | "friends_count": 270, 274 | "listed_count": 90405, 275 | "created_at": "Wed Dec 19 20:20:32 +0000 2007", 276 | "favourites_count": 2960, 277 | "utc_offset": -18000, 278 | "time_zone": "Eastern Time (US & Canada)", 279 | "geo_enabled": false, 280 | "verified": true, 281 | "statuses_count": 50713, 282 | "lang": "en", 283 | "contributors_enabled": false, 284 | "is_translator": false, 285 | "is_translation_enabled": false, 286 | "profile_background_color": "000000", 287 | "profile_background_image_url": "http://pbs.twimg.com/profile_background_images/590922434682880000/3byPYvqe.jpg", 288 | "profile_background_image_url_https": "https://pbs.twimg.com/profile_background_images/590922434682880000/3byPYvqe.jpg", 289 | "profile_background_tile": false, 290 | "profile_image_url": "http://pbs.twimg.com/profile_images/188302352/nasalogo_twitter_normal.jpg", 291 | "profile_image_url_https": "https://pbs.twimg.com/profile_images/188302352/nasalogo_twitter_normal.jpg", 292 | "profile_banner_url": "https://pbs.twimg.com/profile_banners/11348282/1518798395", 293 | "profile_link_color": "205BA7", 294 | "profile_sidebar_border_color": "000000", 295 | "profile_sidebar_fill_color": "F3F2F2", 296 | "profile_text_color": "000000", 297 | "profile_use_background_image": true, 298 | "has_extended_profile": true, 299 | "default_profile": false, 300 | "default_profile_image": false, 301 | "following": null, 302 | "follow_request_sent": null, 303 | "notifications": null, 304 | "translator_type": "regular" 305 | }, 306 | "geo": null, 307 | "coordinates": null, 308 | "place": null, 309 | "contributors": null, 310 | "is_quote_status": false, 311 | "retweet_count": 729, 312 | "favorite_count": 2777, 313 | "favorited": false, 314 | "retweeted": false, 315 | "possibly_sensitive": false, 316 | "lang": "en" 317 | }, 318 | { 319 | "created_at": "Mon Feb 26 06:42:50 +0000 2018", 320 | "id": 968013469743288321, 321 | "id_str": "968013469743288321", 322 | "text": "宇宙ステーションでも、日本と9時間の時差で月曜日が始まりました。n今週は6人から3人にクルーのサイズダウンがありますが、しっかりと任されているタスクをこなしたいと思います。nn写真は、NASAの実験施設「ディスティニー」のグローブ… https://t.co/2CYoPV6Aqx", 323 | "truncated": true, 324 | "entities": { 325 | "hashtags": [], 326 | "symbols": [], 327 | "user_mentions": [], 328 | "urls": [ 329 | { 330 | "url": "https://t.co/2CYoPV6Aqx", 331 | "expanded_url": "https://twitter.com/i/web/status/968013469743288321", 332 | "display_url": "twitter.com/i/web/status/9…", 333 | "indices": [ 334 | 117, 335 | 140 336 | ] 337 | } 338 | ] 339 | }, 340 | "metadata": { 341 | "result_type": "popular", 342 | "iso_language_code": "ja" 343 | }, 344 | "source": "Twitter Web Client", 345 | "in_reply_to_status_id": null, 346 | "in_reply_to_status_id_str": null, 347 | "in_reply_to_user_id": null, 348 | "in_reply_to_user_id_str": null, 349 | "in_reply_to_screen_name": null, 350 | "user": { 351 | "id": 842625693733203968, 352 | "id_str": "842625693733203968", 353 | "name": "金井 宣茂", 354 | "screen_name": "Astro_Kanai", 355 | "location": "", 356 | "description": "宇宙飛行士。2017年12月19日から国際宇宙ステーションに長期滞在中。 応援いただいているフォロワーのみなさまと一緒に、宇宙滞在を楽しみたいと思います!", 357 | "url": "https://t.co/rWU6cxY9iL", 358 | "entities": { 359 | "url": { 360 | "urls": [ 361 | { 362 | "url": "https://t.co/rWU6cxY9iL", 363 | "expanded_url": "https://ameblo.jp/astro-kanai/", 364 | "display_url": "ameblo.jp/astro-kanai/", 365 | "indices": [ 366 | 0, 367 | 23 368 | ] 369 | } 370 | ] 371 | }, 372 | "description": { 373 | "urls": [] 374 | } 375 | }, 376 | "protected": false, 377 | "followers_count": 51512, 378 | "friends_count": 59, 379 | "listed_count": 655, 380 | "created_at": "Fri Mar 17 06:36:35 +0000 2017", 381 | "favourites_count": 7075, 382 | "utc_offset": 32400, 383 | "time_zone": "Tokyo", 384 | "geo_enabled": false, 385 | "verified": true, 386 | "statuses_count": 1035, 387 | "lang": "ja", 388 | "contributors_enabled": false, 389 | "is_translator": false, 390 | "is_translation_enabled": false, 391 | "profile_background_color": "000000", 392 | "profile_background_image_url": "http://abs.twimg.com/images/themes/theme1/bg.png", 393 | "profile_background_image_url_https": "https://abs.twimg.com/images/themes/theme1/bg.png", 394 | "profile_background_tile": false, 395 | "profile_image_url": "http://pbs.twimg.com/profile_images/879071738625232901/u0nlrr4Y_normal.jpg", 396 | "profile_image_url_https": "https://pbs.twimg.com/profile_images/879071738625232901/u0nlrr4Y_normal.jpg", 397 | "profile_banner_url": "https://pbs.twimg.com/profile_banners/842625693733203968/1492509582", 398 | "profile_link_color": "E81C4F", 399 | "profile_sidebar_border_color": "000000", 400 | "profile_sidebar_fill_color": "000000", 401 | "profile_text_color": "000000", 402 | "profile_use_background_image": false, 403 | "has_extended_profile": true, 404 | "default_profile": false, 405 | "default_profile_image": false, 406 | "following": null, 407 | "follow_request_sent": null, 408 | "notifications": null, 409 | "translator_type": "none" 410 | }, 411 | "geo": null, 412 | "coordinates": null, 413 | "place": null, 414 | "contributors": null, 415 | "is_quote_status": false, 416 | "retweet_count": 226, 417 | "favorite_count": 1356, 418 | "favorited": false, 419 | "retweeted": false, 420 | "possibly_sensitive": false, 421 | "lang": "ja" 422 | }, 423 | { 424 | "created_at": "Mon Feb 26 01:07:05 +0000 2018", 425 | "id": 967928974960545793, 426 | "id_str": "967928974960545793", 427 | "text": "Congratulations to #Olympics athletes who won gold! Neutron stars like the one at the heart of the Crab Nebula may… https://t.co/vz4SnPupe2", 428 | "truncated": true, 429 | "entities": { 430 | "hashtags": [ 431 | { 432 | "text": "Olympics", 433 | "indices": [ 434 | 19, 435 | 28 436 | ] 437 | } 438 | ], 439 | "symbols": [], 440 | "user_mentions": [], 441 | "urls": [ 442 | { 443 | "url": "https://t.co/vz4SnPupe2", 444 | "expanded_url": "https://twitter.com/i/web/status/967928974960545793", 445 | "display_url": "twitter.com/i/web/status/9…", 446 | "indices": [ 447 | 116, 448 | 139 449 | ] 450 | } 451 | ] 452 | }, 453 | "metadata": { 454 | "result_type": "popular", 455 | "iso_language_code": "en" 456 | }, 457 | "source": "Media Studio", 458 | "in_reply_to_status_id": null, 459 | "in_reply_to_status_id_str": null, 460 | "in_reply_to_user_id": null, 461 | "in_reply_to_user_id_str": null, 462 | "in_reply_to_screen_name": null, 463 | "user": { 464 | "id": 19802879, 465 | "id_str": "19802879", 466 | "name": "NASA JPL", 467 | "screen_name": "NASAJPL", 468 | "location": "Pasadena, Calif.", 469 | "description": "NASA Jet Propulsion Laboratory manages many of NASA's robotic missions exploring Earth, the solar system and our universe. Tweets from JPL's News Office.", 470 | "url": "http://t.co/gcM9d1YLUB", 471 | "entities": { 472 | "url": { 473 | "urls": [ 474 | { 475 | "url": "http://t.co/gcM9d1YLUB", 476 | "expanded_url": "http://www.jpl.nasa.gov", 477 | "display_url": "jpl.nasa.gov", 478 | "indices": [ 479 | 0, 480 | 22 481 | ] 482 | } 483 | ] 484 | }, 485 | "description": { 486 | "urls": [] 487 | } 488 | }, 489 | "protected": false, 490 | "followers_count": 2566921, 491 | "friends_count": 379, 492 | "listed_count": 15065, 493 | "created_at": "Sat Jan 31 03:19:43 +0000 2009", 494 | "favourites_count": 1281, 495 | "utc_offset": -32400, 496 | "time_zone": "Alaska", 497 | "geo_enabled": false, 498 | "verified": true, 499 | "statuses_count": 6328, 500 | "lang": "en", 501 | "contributors_enabled": false, 502 | "is_translator": false, 503 | "is_translation_enabled": false, 504 | "profile_background_color": "0B090B", 505 | "profile_background_image_url": "http://pbs.twimg.com/profile_background_images/8479565/twitter_jpl_bkg.009.jpg", 506 | "profile_background_image_url_https": "https://pbs.twimg.com/profile_background_images/8479565/twitter_jpl_bkg.009.jpg", 507 | "profile_background_tile": false, 508 | "profile_image_url": "http://pbs.twimg.com/profile_images/2305452633/lg0hov3l8g4msxbdwv48_normal.jpeg", 509 | "profile_image_url_https": "https://pbs.twimg.com/profile_images/2305452633/lg0hov3l8g4msxbdwv48_normal.jpeg", 510 | "profile_banner_url": "https://pbs.twimg.com/profile_banners/19802879/1398298134", 511 | "profile_link_color": "0D1787", 512 | "profile_sidebar_border_color": "100F0E", 513 | "profile_sidebar_fill_color": "74A6CD", 514 | "profile_text_color": "0C0C0D", 515 | "profile_use_background_image": true, 516 | "has_extended_profile": false, 517 | "default_profile": false, 518 | "default_profile_image": false, 519 | "following": null, 520 | "follow_request_sent": null, 521 | "notifications": null, 522 | "translator_type": "none" 523 | }, 524 | "geo": null, 525 | "coordinates": null, 526 | "place": null, 527 | "contributors": null, 528 | "is_quote_status": false, 529 | "retweet_count": 325, 530 | "favorite_count": 1280, 531 | "favorited": false, 532 | "retweeted": false, 533 | "possibly_sensitive": false, 534 | "lang": "en" 535 | } 536 | ], 537 | "search_metadata": { 538 | "completed_in": 0.057, 539 | "max_id": 0, 540 | "max_id_str": "0", 541 | "next_results": "?max_id=967574182522482687&q=nasa&include_entities=1&result_type=popular", 542 | "query": "nasa", 543 | "count": 3, 544 | "since_id": 0, 545 | "since_id_str": "0" 546 | } 547 | } -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # cbor-x 2 | [![license](https://img.shields.io/badge/license-MIT-brightgreen)](LICENSE) 3 | [![npm version](https://img.shields.io/npm/v/cbor-x.svg?style=flat-square)](https://www.npmjs.org/package/cbor-x) 4 | [![encode](https://img.shields.io/badge/encode-1.5GB%2Fs-yellow)](benchmark.md) 5 | [![decode](https://img.shields.io/badge/decode-2GB%2Fs-yellow)](benchmark.md) 6 | [![types](https://img.shields.io/npm/types/cbor-x)](README.md) 7 | [![module](https://img.shields.io/badge/module-ESM%2FCJS-blue)](README.md) 8 | [![license](https://img.shields.io/badge/license-MIT-brightgreen)](LICENSE) 9 | 10 | 11 | 12 | The cbor-x package is an extremely fast and conformant CBOR NodeJS/JavaScript implementation. Currently, it is over 3-10x faster than any other CBOR JS implementation (including cbor-js and cborg) and faster than most MessagePack encoders, Avro, and generally faster than native V8 JSON.stringify/parse, on NodeJS. It implements the CBOR format as specificed in [RFC-8949](https://www.rfc-editor.org/rfc/rfc8949.html), [RFC-8746](https://tools.ietf.org/html/rfc8746), [RFC-8742](https://datatracker.ietf.org/doc/html/rfc8742), [Packed CBOR](https://datatracker.ietf.org/doc/html/draft-ietf-cbor-packed), numerous [registered IANA tag extensions](https://www.iana.org/assignments/cbor-tags/cbor-tags.xhtml) (the `x` in cbor-x), and proposed optional [record extension](https://github.com/kriszyp/cbor-records), for defining record structures that makes CBOR even faster and more compact, often over twice as fast as even native JSON functions, and 15-50% more compact. See the performance section for more details. Structured cloning (with support for cyclical references) is supported through these tag extensions. 13 | 14 | ## Basic Usage 15 | Install on NodeJS with: 16 | 17 | ``` 18 | npm i cbor-x 19 | ``` 20 | And `import` or `require` it for basic standard serialization/encoding (`encode`) and deserialization/decoding (`decode`) functions: 21 | ```JavaScript 22 | import { decode, encode } from 'cbor-x'; 23 | let serializedAsBuffer = encode(value); 24 | let data = decode(serializedAsBuffer); 25 | ``` 26 | This `encode` function will generate standard CBOR without any extensions that should be compatible with any standard CBOR parser/decoder. It will serialize JavaScript objects as CBOR `map`s by default. The `decode` function will deserialize CBOR `map`s as an `Object` with the properties from the map. The cbor-x package runs on any modern JS platform, but does have additional optimizations for NodeJS usage (and will use a node addon for performance boost as an optional dependency). 27 | 28 | ## Deno Usage 29 | Cbor-x modules are standard ESM modules and can be loaded directly from the [deno.land registry for cbor](https://deno.land/x/cbor) for use in Deno. The standard encode and decode functionality is available on Deno, like other platforms. 30 | 31 | ### Streams 32 | We can use the including streaming functionality (which further improves performance). The `EncoderStream` is a NodeJS transform stream that can be used to serialize objects to a binary stream (writing to network/socket, IPC, etc.), and the `DecoderStream` can be used to deserialize objects from a binary stream (reading from network/socket, etc.): 33 | 34 | ```JavaScript 35 | import { EncoderStream } from 'cbor-x'; 36 | let stream = new EncoderStream(); 37 | stream.write(myData); 38 | 39 | ``` 40 | Or for a full example of sending and receiving data on a stream: 41 | ```JavaScript 42 | import { EncoderStream } from 'cbor-x'; 43 | let sendingStream = new EncoderStream(); 44 | let receivingStream = new DecoderStream(); 45 | // we are just piping to our own stream, but normally you would send and 46 | // receive over some type of inter-process or network connection. 47 | sendingStream.pipe(receivingStream); 48 | sendingStream.write(myData); 49 | receivingStream.on('data', (data) => { 50 | // received data 51 | }); 52 | ``` 53 | The `EncoderStream` and `DecoderStream` instances will have also the record structure extension enabled by default (see below). 54 | 55 | ### Iterables 56 | In addition to using CBOR with streams, CBOR can also encode to an iterable that can be iterated as a sequence of binary chunks with `encodeAsIterable`, which facilitates progressive encoding: 57 | ```JavaScript 58 | import { encodeAsIterable } from 'cbor-x'; 59 | 60 | for (let binaryChunk of encodeAsIterable(data)){ 61 | // progressively get binary chunks as data is encoded 62 | } 63 | ``` 64 | And `encodeAsAsyncIterable` is also available, which returns an async iterable, and can be used to encode data from async iterables as well as Blob data. 65 | 66 | ```JavaScript 67 | import { encodeAsAsyncIterable } from 'cbor-x'; 68 | 69 | let data = { blob: new Blob(...) }; 70 | for await (let binaryChunk of encodeAsAsyncIterable(data)){ 71 | // progressively get binary chunks as asynchronous data source is encoded 72 | } 73 | ``` 74 | 75 | ## Deno Usage 76 | Cbor-x modules are standard ESM modules and can be loaded directly from the [deno.land registry for cbor](https://deno.land/x/cbor) for use in Deno. The standard pack/encode and unpack/decode functionality is available on Deno, like other platforms. 77 | 78 | ## Browser Usage 79 | Cbor-x works as standalone JavaScript as well, and runs on modern browsers. It includes a bundled script, at `dist/index.js` for ease of direct loading: 80 | ```HTML 81 | 82 | ``` 83 | 84 | This is UMD based, and will register as a module if possible, or create a `CBOR` global with all the exported functions. 85 | 86 | For module-based development, it is recommended that you directly import the module of interest, to minimize dependencies that get pulled into your application: 87 | ```JavaScript 88 | import { decode } from 'cbor-x/decode' // if you only need to decode 89 | ``` 90 | 91 | ## Structured Cloning 92 | You can also use cbor-x for [structured cloning](https://html.spec.whatwg.org/multipage/structured-data.html). By enabling the `structuredClone` option, you can include references to other objects or cyclic references, and object identity will be preserved.For example: 93 | ```JavaScript 94 | let obj = { 95 | }; 96 | obj.self = obj; 97 | let encoder = new Encoder({ structuredClone: true }); 98 | let serialized = encoder.encode(obj); 99 | let copy = encoder.decode(serialized); 100 | copy.self === copy // true 101 | 102 | ``` 103 | 104 | This option is disabled by default because reference checking degrades performance (by about 25-30%). (Note this implementation doesn't serialize every class/type specified in the HTML specification since not all of them make sense for storing across platforms.) 105 | 106 | cbor-x also preserves certain typed objects like `Error`, `Set`, `RegExp` and TypedArray instances, using [registered CBOR tag extensions](https://www.iana.org/assignments/cbor-tags/cbor-tags.xhtml). This works with or without structured cloning enabled. 107 | 108 | ## Record / Object Structures 109 | There is a critical difference between maps (or dictionaries) that hold an arbitrary set of keys and values (JavaScript `Map` is designed for these), and records or object structures that have a well-defined set of fields. Typical JS objects/records may have many instances re(use) the same structure. By using the record extension, this distinction is preserved in CBOR and the encoding can reuse structures and not only provides better type preservation, but yield much more compact encodings and increase decoding performance by 2-3x. Cbor-x automatically generates record definitions that are reused and referenced by objects with the same structure. Records use CBOR's tags to align well CBOR's tag/extension mechanism. There are a number of ways to use this to our advantage. For large object structures with repeating nested objects with similar structures, simply serializing with the record extension can yield significant benefits. To use the record structures extension, we create a new `Encoder` instance. By default a new `Encoder` instance will have the record extension enabled: 110 | ```JavaScript 111 | import { Encoder } from 'cbor-x'; 112 | let encoder = new Encoder(); 113 | encoder.encode(myBigData); 114 | ``` 115 | 116 | Another way to further leverage the benefits of the cbor-x record structures is to use streams that naturally allow for data to reuse based on previous record structures. The stream classes have the record structure extension enabled by default and provide excellent out-of-the-box performance. 117 | 118 | When creating a new `Encoder`, `EncoderStream`, or `DecoderStream` instance, we can enable or disable the record structure extension with the `objectsAsMaps` property. When this is `true`, the record structure extension will be disabled, and all objects will revert to being serialized using MessageMap `map`s, and all `map`s will be deserialized to JS `Object`s as properties (like the standalone `encode` and `decode` functions). 119 | 120 | Streaming with record structures works by encoding a structure the first time it is seen in a stream and referencing the structure in later messages that are sent across that stream. When an encoder can expect a decoder to understand previous structure references, this can be configured using the `sequential: true` flag, which is auto-enabled by streams, but can also be used with Packr instances. 121 | 122 | ### Shared Record Structures 123 | Another useful way of using cbor-x, and the record extension, is for storing data in a databases, files, or other storage systems. If a number of objects with common data structures are being stored, a shared structure can be used to greatly improve data storage and deserialization efficiency. In the simplest form, provide a `structures` array, which is updated if any new object structure is encountered: 124 | 125 | ```JavaScript 126 | import { Encoder } from 'cbor-x'; 127 | let encoder = new Encoder({ 128 | structures: [... structures that were last generated ...] 129 | }); 130 | ``` 131 | If you are working with persisted data, you will need to persist the `structures` data when it is updated. Cbor-x provides an API for loading and saving the `structures` on demand (which is robust and can be used in multiple-process situations where other processes may be updating this same `structures` array), we just need to provide a way to store the generated shared structure so it is available to deserialize stored data in the future: 132 | ```JavaScript 133 | import { Encoder } from 'cbor-x'; 134 | let encoder = new Encoder({ 135 | getStructures() { 136 | // storing our data in file (but we could also store in a db or key-value store) 137 | return decode(readFileSync('my-shared-structures.cbor')) || []; 138 | }, 139 | saveStructures(structures) { 140 | writeFileSync('my-shared-structures.cbor', encode(structures)) 141 | }, 142 | structures: [] 143 | }); 144 | ``` 145 | Cbor-x will automatically add and saves structures as it encounters any new object structures (up to a limit of 64). It will always add structures in incremental/compatible way: Any object encoded with an earlier structure can be decoded with a later version (as long as it is persisted). 146 | 147 | ### Reading Multiple Values 148 | If you have a buffer with multiple values sequentially encoded, you can choose to parse and read multiple values. This can be done using the `decodeMultiple` function/method, which can return an array of all the values it can sequentially parse within the provided buffer. For example: 149 | ```js 150 | let data = new Uint8Array([1, 2, 3]) // encodings of values 1, 2, and 3 151 | let values = decodeMultiple(data) // [1, 2, 3] 152 | ``` 153 | Alternately, you can provide a callback function that is called as the parsing occurs with each value, and can optionally terminate the parsing by returning `false`: 154 | ```js 155 | let data = new Uint8Array([1, 2, 3]) // encodings of values 1, 2, and 3 156 | decodeMultiple(data, (value) => { 157 | // called for each value 158 | // return false if you wish to end the parsing 159 | }) 160 | ``` 161 | ### KeyMaps for Senml 162 | KeyMaps can be used to remap properties of source Objects and Maps to numerical equivalents for more efficient encoding. 163 | The principle driver for this feature is to support `application/senml+cbor`content-encoding as defined in https://datatracker.ietf.org/doc/html/rfc8428#section-6 for use in LWM2M application (see http://www.openmobilealliance.org/release/LightweightM2M/V1_2-20201110-A/HTML-Version/OMA-TS-LightweightM2M_Core-V1_2-20201110-A.html#7-4-7-0-747-SenML-CBOR) 164 | 165 | Records are also supported in conjunction with keyMaps, but these are disabled by default when keyMaps are specified as use of the two features does not introduce any additional compression efficiency unless that the data arrays are quite large (> 10 items). 166 | 167 | ```JavaScript 168 | import { Decoder, Encoder } from 'cbor-x' 169 | const data = [ 170 | { bn: '/3303/0/5700', bt: 1278887, v: 35.5 }, 171 | { t: 10, v: 34 }, 172 | { t: 20, v: 33 }, 173 | { t: 30, v: 32 }, 174 | { t: 40, v: 31 }, 175 | { t: 50, v: 30 } 176 | ] 177 | 178 | let senmlKeys = { bs: -6, bv: -5, bu: -4, bt: -3, bn: -2, bver: -1, n: 0, u: 1, v: 2, vs: 3, vb: 4, s: 5, t: 6, ut: 7, vd: 8}} 179 | let senmlCbor = new Encoder({ keyMap: senmlKeys }) 180 | let basicCbor = new Encoder() 181 | let senmlBuff = senmlCbor.encode(data) 182 | let basicBuff = basicCbor.encode(data) 183 | console.log('Senml CBOR size:', senmlBuff.length) // 77 184 | console.log('Basic CBOR size:', basicBuff.length) // 90 185 | let senmlDecoder = new Decoder({ keyMap: senmlKeys }); 186 | assert.deepEqual(senmlDecoder.decode(senmlBuff), data) 187 | 188 | ``` 189 | 190 | ### CBOR Packing 191 | [Packed CBOR](https://datatracker.ietf.org/doc/html/draft-ietf-cbor-packed) is additional specification for CBOR which allows for compact encoding of data that has repeated values. Cbor-x supports decoding packed CBOR, no flags or options needed. Cbor-x can also optionally generate packed CBOR (with the `pack` option), which will cause the encoder to look for repeated strings in a data structure that is being encoded, and store the strings in a packed table that can be referenced, to reduce encoding size. This involves extra overhead and reduces encoding performance, and generally does not yield as much compaction as standard compression tools. However, this is can be much faster than encoding plus compression, while still providing some level of reduction in encoding size. In addition to size reduction, packed CBOR is also usually faster to decode (assuming that some repetitive values could be found/packed). 192 | 193 | Cbor-x also has in-progress effort to support shared packed tables. 194 | 195 | 196 | ## Options 197 | The following options properties can be provided to the Encoder or Decoder constructor: 198 | 199 | * `keyMap` - This can be set to an object which will be used to map keys in the source Object or Map to other keys including integers. This allows for more efficient encoding, and enables support for numeric cbar tag encodings such as used by `application/senml+cbor` (https://datatracker.ietf.org/doc/html/rfc8428#section-6) 200 | * `useRecords` - Setting this to `false` disables the record extension and stores JavaScript objects as CBOR maps (with tag 259), and decodes maps as JavaScript `Object`s, which ensures compatibilty with other decoders. 201 | * `structures` - Provides the array of structures that is to be used for record extension, if you want the structures saved and used again. This array will be modified in place with new record structures that are serialized (if less than 64 structures are in the array). 202 | * `structuredClone` - This enables the structured cloning extensions that will encode object/cyclic references and additional built-in types/classes. 203 | * `mapsAsObjects` - If `true`, this will decode CBOR maps and JS `Object`s with the map entries decoded to object properties. If `false`, maps are decoded as JavaScript `Map`s. This is disabled by default if `useRecords` is enabled (`Map`s are preserved since they are distinct from records), and is enabled by default if `useRecords` is disabled. 204 | * `useFloat32` - This will enable cbor-x to encode non-integer numbers as 32-bit (4 byte) floating point numbers. See next section for possible values. 205 | * `alwaysUseFloat` - This will force cbor-x to encode any number, including integers, as floating-point numbers. 206 | * `pack` - This will enable [CBOR packing](https://datatracker.ietf.org/doc/html/draft-ietf-cbor-packed) for encoding, as described above. 207 | * `variableMapSize` - This will use varying map size definition (from single-byte to full 32-bit representation) based on the number of keys when encoding objects, which yields slightly more compact encodings (for small objects), but is typically 5-10% slower during encoding. This is only relevant when record extension is disabled. 208 | * `copyBuffers` - When decoding a CBOR message with binary data (Buffers are encoded as binary data), copy the buffer rather than providing a slice/view of the buffer. If you want your input data to be collected or modified while the decoded embedded buffer continues to live on, you can use this option (there is extra overhead to copying). 209 | * `bundleStrings` - If `true` this uses a custom extension that bundles strings together, so that they can be decoded more quickly on browsers and Deno that do not have access to the NodeJS addon. This a custom extension, so both encoder and decoder need to support this. This can yield significant decoding performance increases on browsers (30%-50%). 210 | * `useTimestamp32` - Encode JS `Date`s in 32-bit format when possible by dropping the milliseconds. This is a more efficient encoding of dates. You can also cause dates to use 32-bit format by manually setting the milliseconds to zero (`date.setMilliseconds(0)`). 211 | * `sequential` - Encode structures in serialized data, and reference previously encoded structures with expectation that decoder will read the encoded structures in the same order as encoded, with `unpackMultiple`. 212 | * `largeBigIntToFloat` - If a bigint needs to be encoded that is larger than will fit in 64-bit integers, it will be encoded as a float-64 (otherwise will throw a RangeError). 213 | * `useTag259ForMaps` - This flag indicates if [tag 259 (explicit maps)](https://github.com/shanewholloway/js-cbor-codec/blob/master/docs/CBOR-259-spec--explicit-maps.md) should be used to encode JS `Map`s. When using records is enabled, this is disabled by default, since plain objects are encoded with record structures and unambigiously differentiated from `Map`s, which are encoded as CBOR maps. Without using records, this enabled by default and is necessary to distinguish plain objects from `Map`s (but can be disabled by setting this to `false`). 214 | * `tagUint8Array` - Indicates if tag 64 should be used for `Uint8Array`s. 215 | * `int64AsNumber` - This will decode uint64 and int64 numbers as standard JS numbers rather than as bigint numbers. 216 | * `skipFunction` - This skip functions in encode object. 217 | 218 | ### 32-bit Float Options 219 | By default all non-integer numbers are serialized as 64-bit float (double). This is fast, and ensures maximum precision. However, often real-world data doesn't not need 64-bits of precision, and using 32-bit encoding can be much more space efficient. There are several options that provide more efficient encodings. Using the decimal rounding options for encoding and decoding provides lossless storage of common decimal representations like 7.99, in more efficient 32-bit format (rather than 64-bit). The `useFloat32` property has several possible options, available from the module as constants: 220 | ```JavaScript 221 | import { ALWAYS, DECIMAL_ROUND, DECIMAL_FIT } from 'cbor-x' 222 | ``` 223 | 224 | * `ALWAYS` (1) - Always will encode non-integers (absolute less than 2147483648) as 32-bit float. 225 | * `DECIMAL_ROUND` (3) - Always will encode non-integers as 32-bit float, and when decoding 32-bit float, round to the significant decimal digits (usually 7, but 6 or 8 digits for some ranges). 226 | * `DECIMAL_FIT` (4) - Only encode non-integers as 32-bit float if all significant digits (usually up to 7) can be unamiguously encoded as a 32-bit float, and decode with decimal rounding (same as above). This will ensure round-trip encoding/decoding without loss in precision and uses 32-bit when possible. 227 | 228 | Note, that the performance is decreased with decimal rounding by about 20-25%, although if only 5% of your values are floating point, that will only have about a 1% impact overall. 229 | 230 | In addition, msgpackr exports a `roundFloat32(number)` function that can be used to round floating point numbers to the maximum significant decimal digits that can be stored in 32-bit float, just as DECIMAL_ROUND does when decoding. This can be useful for determining how a number will be decoded prior to encoding it. 231 | 232 | ### Setting Size Limits 233 | You can set size limits on objects, arrays, and maps to prevent resource exhaustion when decoding. This can be done by calling the setMaxLimits export. Each of the properties are optional (only provide 234 | the properties you want to change), for example (with the defaults): 235 | ```JavaScript 236 | import { setMaxLimits } from 'cbor-x'; 237 | setMaxLimits({ 238 | maxArraySize: 112810000, 239 | maxMapSize: 16810000, 240 | maxObjectSize : 16710000 241 | }); 242 | ``` 243 | 244 | ## Performance 245 | Cbor-x is fast. Really fast. Here is comparison with the next fastest JS projects using the benchmark tool from `msgpack-lite` (and the sample data is from some clinical research data we use that has a good mix of different value types and structures). It also includes comparison to V8 native JSON functionality, and JavaScript Avro (`avsc`, a very optimized Avro implementation): 246 | 247 | ### Native Acceleration 248 | Cbor-x employs an optional native node-addon to accelerate the parsing of strings. This should be automatically installed and utilized on NodeJS. However, you can verify this by checking the `isNativeAccelerationEnabled` property that is exported from cbor-x. If this is `false`, the `cbor-extract` package may not have been properly installed, and you may want to verify that it is installed correctly: 249 | ```js 250 | import { isNativeAccelerationEnabled } from 'cbor-x' 251 | if (!isNativeAccelerationEnabled) 252 | console.warn('Native acceleration not enabled, verify that install finished properly') 253 | ``` 254 | 255 | 256 | operation | op | ms | op/s 257 | ---------------------------------------------------------- | ------: | ----: | -----: 258 | buf = Buffer(JSON.stringify(obj)); | 78200 | 5004 | 15627 259 | obj = JSON.parse(buf); | 89600 | 5003 | 17909 260 | require("cbor-x").encode(obj); | 163100 | 5001 | 32613 261 | require("cbor-x").decode(buf); | 100200 | 5004 | 20023 262 | cbor-x w/ shared structures: packr.encode(obj); | 178300 | 5002 | 35645 263 | cbor-x w/ shared structures: packr.decode(buf); | 414000 | 5000 | 82800 264 | buf = require("cbor").encode(obj); | 7800 | 5016 | 1555 265 | obj = require("cbor").decode(buf); | 3200 | 5087 | 629 266 | buf = require("cbor-sync").encode(obj); | 18600 | 5012 | 3711 267 | obj = require("cbor-sync").decode(buf); | 20000 | 5020 | 3984 268 | buf = require("msgpack-lite").encode(obj); | 30900 | 5013 | 6163 269 | obj = require("msgpack-lite").decode(buf); | 15800 | 5012 | 3152 270 | buf = require("notepack").encode(obj); | 62600 | 5006 | 12504 271 | obj = require("notepack").decode(buf); | 33700 | 5007 | 6730 272 | require("avsc")...make schema/type...type.toBuffer(obj); | 86900 | 5002 | 17373 273 | require("avsc")...make schema/type...type.fromBuffer(obj); | 106100 | 5000 | 21220 274 | 275 | All benchmarks were performed on Node 14.8.0 (Windows i7-4770 3.4Ghz). 276 | (`avsc` is schema-based and more comparable in style to cbor-x with shared structures). 277 | 278 | Here is a benchmark of streaming data (again borrowed from `msgpack-lite`'s benchmarking), where cbor-x is able to take advantage of the structured record extension and really demonstrate its performance capabilities: 279 | 280 | operation (1000000 x 2) | op | ms | op/s 281 | ------------------------------------------------ | ------: | ----: | -----: 282 | new EncoderStream().write(obj); | 1000000 | 372 | 2688172 283 | new DecoderStream().write(buf); | 1000000 | 247 | 4048582 284 | stream.write(msgpack.encode(obj)); | 1000000 | 2898 | 345065 285 | stream.write(msgpack.decode(buf)); | 1000000 | 1969 | 507872 286 | stream.write(notepack.encode(obj)); | 1000000 | 901 | 1109877 287 | stream.write(notepack.decode(buf)); | 1000000 | 1012 | 988142 288 | msgpack.Encoder().on("data",ondata).encode(obj); | 1000000 | 1763 | 567214 289 | msgpack.createDecodeStream().write(buf); | 1000000 | 2222 | 450045 290 | msgpack.createEncodeStream().write(obj); | 1000000 | 1577 | 634115 291 | msgpack.Decoder().on("data",ondata).decode(buf); | 1000000 | 2246 | 445235 292 | 293 | See the [benchmark.md](benchmark.md) for more benchmarks and information about benchmarking. 294 | 295 | ## Custom Extensions 296 | You can add your own custom extensions, which can be used to encode specific types/classes in certain ways. This is done by using the `addExtension` function, and specifying the class, extension type code (custom extensions should be a number greater than 40500, all others are reserved for CBOR or cbor-x), and your encode and decode functions (or just the one you need). You can use cbor-x encoding and decoding within your extensions: 297 | ```JavaScript 298 | import { addExtension, Encoder } from 'cbor-x'; 299 | 300 | class MyCustomClass {...} 301 | 302 | let extEncoder = new Encoder(); 303 | addExtension({ 304 | Class: MyCustomClass, 305 | tag: 43311, // register our own extension code (a tag code) 306 | encode(instance, encode) { 307 | // define how your custom class should be encoded 308 | encode(instance.myData); // return a buffer 309 | } 310 | decode(data) { 311 | // define how your custom class should be decoded 312 | let instance = new MyCustomClass(); 313 | instance.myData = data 314 | return instance; // decoded value from buffer 315 | } 316 | }); 317 | ``` 318 | 319 | ## Unknown Tags 320 | If no extension is registered for a tag, the decoder will return an instance of the `Tag` class, where the value provided for the tag will be available in the `value` property of the `Tag` instance. The `Tag` class is an export of the package and decode module. 321 | 322 | ### CBOR Compliance 323 | The cbor-x package is designed to encode and decode to the CBOR extended generic data model, implementing extensions to support the extended model, and will generally attempt to use preferred serializations where feasible. When duplicate keys are encountered in maps, previous entries will be lost, and the final entry is preserved. 324 | 325 | ### Additional Performance Optimizations 326 | Cbor-x is already fast, but here are some tips for making it faster. 327 | 328 | #### Arena Allocation (`useBuffer()`) 329 | During the serialization process, data is written to buffers. Again, allocating new buffers is a relatively expensive process, and the `useBuffer` method can help allow reuse of buffers that will further improve performance. With `useBuffer` method, you can provide a buffer, serialize data into it, and when it is known that you are done using that buffer, you can call `useBuffer` again to reuse it. The use of `useBuffer` is never required, buffers will still be handled and cleaned up through GC if not used, it just provides a small performance boost. 330 | 331 | ## Extensions 332 | Cbor-x currently uses tag id 105 and 26880-27135 for its [proposed extension for records](https://github.com/kriszyp/cbor-records). 333 | 334 | ### Dates 335 | Cbor-x saves all JavaScript `Date`s using the standard CBOR date extension (tag 1). 336 | 337 | ### Structured Cloning 338 | With structured cloning enabled, cbor-x will also use tags/extensions to store Set, Map, Error, RegExp, ArrayBufferView objects and preserve their types. 339 | 340 | ### List of supported tags for decoding 341 | Here is a list of CBOR tags that are supported for decoding: 342 | 343 | * 0 - String date 344 | * 1 - Numeric Date 345 | * 2 - BigInt 346 | * 3 - Negative BigInt 347 | * 6 - Packed string reference 348 | * 27 - Generic named objects (used for Error, RegExp) 349 | * 28, 29 - Value sharing/object referencing 350 | * 51 - Packed table 351 | * 64 - Uint8Array 352 | * 68 - Uint8ClampedArray 353 | * 69 - Uint16Array 354 | * 70 - Uint32Array 355 | * 71 - BigUint64Array 356 | * 72 - Int8Array 357 | * 77 - Int16Array 358 | * 78 - Int32Array 359 | * 79 - BigInt64Array 360 | * 81 - Float32Array 361 | * 82 - Float64Array 362 | * 105 - Records 363 | * 258 - Set 364 | * 259 - Map 365 | * 57344 - 57599 - Records 366 | 367 | ## Alternate Encoding/Package 368 | The high-performance serialization and deserialization algorithms in this package are also available in the [msgpackr](https://github.com/kriszyp/msgpackr) for the MessagePack format, with the same API and design. A quick summary of the pros and cons of using MessagePack vs CBOR are: 369 | * MessagePack has wider adoption and msgpackr has broader usage. 370 | * CBOR has an [official IETF standardization track](https://www.rfc-editor.org/rfc/rfc8949.html), and the record extensions is conceptually/philosophically a better fit for CBOR tags. 371 | 372 | ## License 373 | 374 | MIT 375 | 376 | ### Browser Consideration 377 | CBOR can be a great choice for high-performance data delivery to browsers, as reasonable data size is possible without compression. And CBOR works very well in modern browsers. However, it is worth noting that if you want highly compact data, brotli or gzip are most effective in compressing, and CBOR's character frequency tends to defeat Huffman encoding used by these standard compression algorithms, often resulting in less compact data than compressed JSON. 378 | 379 | ### Credits 380 | 381 | Various projects have been inspirations for this, and code has been borrowed from https://github.com/msgpack/msgpack-javascript and https://github.com/mtth/avsc. 382 | -------------------------------------------------------------------------------- /tests/test.js: -------------------------------------------------------------------------------- 1 | import * as CBOR from '../node-index.js' 2 | import chai from 'chai' 3 | import { readFileSync } from 'fs' 4 | const sampleData = JSON.parse(readFileSync(new URL('./example4.json', import.meta.url))) 5 | 6 | const senmlData = [ 7 | { bn: '/3303/0/5700', bt: 1278887, v: 35.5 }, 8 | { t: 10, v: 34 }, 9 | { t: 20, v: 33 }, 10 | { t: 30, v: 32 }, 11 | { t: 40, v: 31 }, 12 | { t: 50, v: 30 } 13 | ] 14 | 15 | const senmlKeys = { bs: -6, bv: -5, bu: -4, bt: -3, bn: -2, n: 0, u: 1, v: 2, vs: 3, t: 6, ut: 7, vd: 8 } 16 | 17 | //import inspector from 'inspector'; inspector.open(9229, null, true); debugger 18 | function tryRequire(module) { 19 | try { 20 | return require(module) 21 | } catch(error) { 22 | return {} 23 | } 24 | } 25 | var assert = chai.assert 26 | 27 | var Encoder = CBOR.Encoder 28 | var EncoderStream = CBOR.EncoderStream 29 | var DecoderStream = CBOR.DecoderStream 30 | var decode = CBOR.decode 31 | var encode = CBOR.encode 32 | var encodeAsIterable = CBOR.encodeAsIterable 33 | var encodeAsAsyncIterable = CBOR.encodeAsAsyncIterable 34 | var DECIMAL_FIT = CBOR.DECIMAL_FIT 35 | 36 | var addExtension = CBOR.addExtension 37 | 38 | var zlib = tryRequire('zlib') 39 | var deflateSync = zlib.deflateSync 40 | var inflateSync = zlib.inflateSync 41 | var deflateSync = zlib.brotliCompressSync 42 | var inflateSync = zlib.brotliDecompressSync 43 | var constants = zlib.constants 44 | try { 45 | // var { decode, encode } = require('msgencode-lite') 46 | } catch (error) {} 47 | 48 | var ITERATIONS = 4000 49 | 50 | suite('CBOR basic tests', function(){ 51 | test('encode/decode with keyMaps (basic)', function() { 52 | var data = senmlData 53 | let cborSenml = new Encoder({ useRecords: false, keyMap: senmlKeys }) 54 | let cborBasic = new Encoder() 55 | var serialized = cborSenml.encode(data) 56 | var deserialized = cborSenml.decode(serialized) 57 | assert(serialized.length < cborBasic.encode(data).length) 58 | assert.deepEqual(deserialized, data) 59 | }) 60 | 61 | test('encode/decode with keyMaps and Records)', function() { 62 | var data = senmlData 63 | let cborSenml = new Encoder({ useRecords: true, keyMap: senmlKeys }) 64 | let cborBasic = new Encoder() 65 | var serialized = cborSenml.encode(data) 66 | var deserialized = cborSenml.decode(serialized) 67 | assert(serialized.length < cborBasic.encode(data).length) 68 | assert.deepEqual(deserialized, data) 69 | }) 70 | 71 | test('encode/decode data', function(){ 72 | var data = { 73 | data: [ 74 | { a: 1, name: 'one', type: 'odd', isOdd: true }, 75 | { a: 2, name: 'two', type: 'even'}, 76 | { a: 3, name: 'three', type: 'odd', isOdd: true }, 77 | { a: 4, name: 'four', type: 'even'}, 78 | { a: 5, name: 'five', type: 'odd', isOdd: true }, 79 | { a: 6, name: 'six', type: 'even', isOdd: null } 80 | ], 81 | description: 'some names', 82 | types: ['odd', 'even'], 83 | convertEnumToNum: [ 84 | { prop: 'test' }, 85 | { prop: 'test' }, 86 | { prop: 'test' }, 87 | { prop: 1 }, 88 | { prop: 2 }, 89 | { prop: [undefined] }, 90 | { prop: null } 91 | ] 92 | } 93 | let structures = [] 94 | let encoder = new Encoder({ structures }) 95 | var serialized = encoder.encode(data) 96 | serialized = encoder.encode(data) 97 | var deserialized = encoder.decode(serialized) 98 | assert.deepEqual(deserialized, data) 99 | }) 100 | 101 | test('mixed structures, shared', function(){ 102 | let data1 = { a: 1, b: 2, c: 3 } 103 | let data2 = { a: 1, b: 2, d: 4 } 104 | let data3 = { a: 1, b: 2, e: 5 } 105 | let structures = [] 106 | let encoder = new Encoder({ structures }) 107 | var serialized = encoder.encode(data1) 108 | var deserialized = encoder.decode(serialized) 109 | assert.deepEqual(deserialized, data1) 110 | var serialized = encoder.encode(data2) 111 | var deserialized = encoder.decode(serialized) 112 | assert.deepEqual(deserialized, data2) 113 | var serialized = encoder.encode(data3) 114 | var deserialized = encoder.decode(serialized) 115 | assert.deepEqual(deserialized, data3) 116 | }) 117 | 118 | test('mixed structures, unshared', function(){ 119 | let data = [] 120 | let encoder = new Encoder({ }) 121 | for (let i = 0; i< 1000; i++) { 122 | data.push({a: 1, ['test' + i]: i}) 123 | } 124 | var serialized = encoder.encode(data) 125 | var deserialized = encoder.decode(serialized) 126 | assert.deepEqual(deserialized, data) 127 | serialized = encoder.encode(data) 128 | deserialized = encoder.decode(serialized) 129 | assert.deepEqual(deserialized, data) 130 | }) 131 | 132 | test('mixed array', function(){ 133 | var data = [ 134 | 'one', 135 | 'two', 136 | 'one', 137 | 10, 138 | 11, 139 | null, 140 | true, 141 | 'three', 142 | 'three', 143 | 'one', [ 144 | 3, -5, -50, -400,1.3, -5.3, true 145 | ] 146 | ] 147 | let structures = [] 148 | let encoder = new Encoder({ structures }) 149 | var serialized = encoder.encode(data) 150 | var deserialized = encoder.decode(serialized) 151 | assert.deepEqual(deserialized, data) 152 | }) 153 | 154 | test('255 chars', function() { 155 | const data = 'RRZG9A6I7xupPeOZhxcOcioFsuhszGOdyDUcbRf4Zef2kdPIfC9RaLO4jTM5JhuZvTsF09fbRHMGtqk7YAgu3vespeTe9l61ziZ6VrMnYu2CamK96wCkmz0VUXyqaiUoTPgzk414LS9yYrd5uh7w18ksJF5SlC2e91rukWvNqAZJjYN3jpkqHNOFchCwFrhbxq2Lrv1kSJPYCx9blRg2hGmYqTbElLTZHv20iNqwZeQbRMgSBPT6vnbCBPnOh1W' 156 | var serialized = CBOR.encode(data) 157 | var deserialized = CBOR.decode(serialized) 158 | assert.equal(deserialized, data) 159 | }) 160 | 161 | test('encode/decode sample data', function(){ 162 | var data = sampleData 163 | var serialized = CBOR.encode(data) 164 | var deserialized = CBOR.decode(serialized) 165 | assert.deepEqual(deserialized, data) 166 | var serialized = CBOR.encode(data) 167 | var deserialized = CBOR.decode(serialized) 168 | assert.deepEqual(deserialized, data) 169 | }) 170 | test('encode/decode sample data with records', function(){ 171 | var data = sampleData 172 | let sharedSerialized 173 | let encoder = new Encoder({ getStructures() { return }, saveStructures(shared) { sharedSerialized = encode(shared) }, useRecords: true }) 174 | var serialized = encoder.encode(data) 175 | encoder = new Encoder({ getStructures() { return decode(sharedSerialized) }, saveStructures(shared) { sharedSerialized = encode(shared) }, useRecords: true }) 176 | var deserialized = encoder.decode(serialized) 177 | assert.deepEqual(deserialized, data) 178 | }) 179 | test('encode/decode sample data with packing', function(){ 180 | var data = sampleData 181 | let encoder = new Encoder({ pack: true, useRecords: false }) 182 | var serialized = encoder.encode(data) 183 | var deserialized = encoder.decode(serialized) 184 | assert.deepEqual(deserialized, data) 185 | }) 186 | test('encode/decode sample data with packing and records', function(){ 187 | var data = sampleData 188 | let structures = [] 189 | let encoder = new Encoder({ useStringRefs: true }) 190 | var serialized = encoder.encode(data) 191 | var deserialized = encoder.decode(serialized) 192 | assert.deepEqual(deserialized, data) 193 | }) 194 | test('encode/decode sample data with shared packing and records', function(){ 195 | let encoder = new Encoder({ useRecords: true }) 196 | let finishPack = encoder.findCommonStringsToPack() 197 | for (let i = 0; i < 20; i++) { 198 | let data = { 199 | shouldShare: 'same each time', 200 | shouldShare2: 'same each time 2', 201 | shouldntShare: 'different each time ' + i 202 | } 203 | if (i == 10) 204 | finishPack({}) 205 | var serialized = encoder.encode(data) 206 | var deserialized = encoder.decode(serialized) 207 | assert.deepEqual(deserialized, data) 208 | } 209 | }) 210 | test('encode/decode sample data with individual packing, shared packing and records', function(){ 211 | let encoder = new Encoder({ pack: true, useRecords: true }) 212 | let finishPack = encoder.findCommonStringsToPack() 213 | for (let i = 0; i < 20; i++) { 214 | let data = { 215 | shouldShare: 'same each time', 216 | shouldShare2: 'same each time', 217 | shouldntShare: 'different each time ' + i, 218 | shouldntShare2: 'different each time ' + i, 219 | noPack: 'no packing ' + i, 220 | } 221 | if (i == 10) 222 | finishPack({ threshold: 5 }) 223 | var serialized = encoder.encode(data) 224 | var deserialized = encoder.decode(serialized) 225 | assert.deepEqual(deserialized, data) 226 | } 227 | }) 228 | test('pack/unpack sample data with bundled strings', function(){ 229 | var data = sampleData 230 | let encoder = new Encoder({ /*structures,*/ useRecords: false, bundleStrings: true }) 231 | var serialized = encoder.encode(data) 232 | var deserialized = encoder.decode(serialized) 233 | assert.deepEqual(deserialized, data) 234 | }) 235 | test('pack/unpack sample data with self-descriptive header', function(){ 236 | var data = sampleData 237 | let encoder = new Encoder({ useSelfDescribedHeader: true }) 238 | var serialized = encoder.encode(data) 239 | var deserialized = encoder.decode(serialized) 240 | assert.deepEqual(deserialized, data) 241 | assert.equal(serialized[0], 0xd9) 242 | assert.equal(serialized[1], 0xd9) 243 | assert.equal(serialized[2], 0xf7) 244 | }) 245 | if (typeof Buffer != 'undefined') 246 | test('replace data', function(){ 247 | var data1 = { 248 | data: [ 249 | { a: 1, name: 'one', type: 'odd', isOdd: true, a: '13 characters' }, 250 | { a: 2, name: 'two', type: 'even', a: '11 characte' }, 251 | { a: 3, name: 'three', type: 'odd', isOdd: true, a: '12 character' }, 252 | { a: 4, name: 'four', type: 'even', a: '9 charact'}, 253 | { a: 5, name: 'five', type: 'odd', isOdd: true, a: '14 characters!' }, 254 | { a: 6, name: 'six', type: 'even', isOdd: null } 255 | ], 256 | } 257 | var data2 = { 258 | data: [ 259 | { foo: 7, name: 'one', type: 'odd', isOdd: true }, 260 | { foo: 8, name: 'two', type: 'even'}, 261 | { foo: 9, name: 'three', type: 'odd', isOdd: true }, 262 | { foo: 10, name: 'four', type: 'even'}, 263 | { foo: 11, name: 'five', type: 'odd', isOdd: true }, 264 | { foo: 12, name: 'six', type: 'even', isOdd: null } 265 | ], 266 | } 267 | var serialized1 = encode(data1) 268 | var serialized2 = encode(data2) 269 | var b = Buffer.alloc(8000) 270 | serialized1.copy(b) 271 | var deserialized1 = decode(b, serialized1.length) 272 | serialized2.copy(b) 273 | var deserialized2 = decode(b, serialized2.length) 274 | assert.deepEqual(deserialized1, data1) 275 | assert.deepEqual(deserialized2, data2) 276 | }) 277 | test('extended class encode/decode', function() { 278 | function Extended() { 279 | 280 | } 281 | 282 | Extended.prototype.getDouble = function() { 283 | return this.value * 2 284 | } 285 | var instance = new Extended() 286 | instance.value = 4 287 | instance.string = 'decode this: ᾜ' 288 | var data = { 289 | prop1: 'has multi-byte: ᾜ', 290 | extendedInstance: instance, 291 | prop2: 'more string', 292 | num: 3, 293 | } 294 | let encoder = new Encoder() 295 | addExtension({ 296 | Class: Extended, 297 | tag: 300, 298 | decode: function(data) { 299 | let e = new Extended() 300 | e.value = data[0] 301 | e.string = data[1] 302 | return e 303 | }, 304 | encode: function(instance) { 305 | return encoder.encode([instance.value, instance.string]) 306 | } 307 | }) 308 | }) 309 | test('extended class encode/decode with self reference in structered clone', function(){ 310 | function Extended() { 311 | 312 | } 313 | addExtension({ 314 | Class: Extended, 315 | tag: 301, 316 | decode: function(data) { 317 | let e = new Extended() 318 | e.value = data[0] 319 | e.string = data[1] 320 | return e 321 | }, 322 | encode: function(instance, encode) { 323 | return encode([instance.value, instance.string]) 324 | } 325 | }) 326 | var instance = new Extended() 327 | instance.value = instance; 328 | instance.string = 'hi' 329 | let data = { 330 | extended: instance 331 | } 332 | let encoder = new Encoder({ 333 | structuredClone: true, 334 | }) 335 | let serialized = encoder.encode(data) 336 | let deserialized = encoder.decode(serialized) 337 | assert(data.extended.value.value === data.extended) 338 | assert(data.extended instanceof Extended) 339 | }) 340 | 341 | test('addExtension with map', function(){ 342 | function Extended() { 343 | } 344 | var instance = new Extended() 345 | instance.value = 4 346 | instance.map = new Map(); 347 | instance.map.set('key', 'value'); 348 | var data = { 349 | extendedInstance: instance, 350 | } 351 | let encoder = new Encoder() 352 | addExtension({ 353 | Class: Extended, 354 | tag: 301, 355 | decode: function(data) { 356 | let e = new Extended() 357 | e.value = data[0] 358 | e.map = data[1] 359 | return e 360 | }, 361 | encode: function(instance, encode) { 362 | return encode([instance.value, instance.map]) 363 | } 364 | }) 365 | var serialized = encoder.encode(data) 366 | var deserialized = encoder.decode(serialized) 367 | assert.deepEqual(data, deserialized) 368 | }) 369 | 370 | test.skip('text decoder', function() { 371 | let td = new TextDecoder('ISO-8859-15') 372 | let b = Buffer.alloc(3) 373 | let total = 0 374 | for (var i = 0; i < 256; i++) { 375 | b[0] = i 376 | b[1] = 0 377 | b[2] = 0 378 | let s = td.decode(b) 379 | if (!require('CBOR-extract').isOneByte(s)) { 380 | console.log(i.toString(16), s.length) 381 | total++ 382 | } 383 | } 384 | }) 385 | 386 | test('structured cloning: self reference', function() { 387 | let object = { 388 | test: 'string', 389 | children: [ 390 | { name: 'child' } 391 | ] 392 | } 393 | object.self = object 394 | object.children[1] = object 395 | object.children[2] = object.children[0] 396 | object.childrenAgain = object.children 397 | let encoder = new Encoder({ 398 | structuredClone: true, 399 | }) 400 | var serialized = encoder.encode(object) 401 | var deserialized = encoder.decode(serialized) 402 | assert.equal(deserialized.self, deserialized) 403 | assert.equal(deserialized.children[0].name, 'child') 404 | assert.equal(deserialized.children[1], deserialized) 405 | assert.equal(deserialized.children[0], deserialized.children[2]) 406 | assert.equal(deserialized.children, deserialized.childrenAgain) 407 | }) 408 | test('nested same key', function() { 409 | const encoder = new Encoder(); 410 | const r_key = "key"; 411 | const d_key = "key"; 412 | const data = { [r_key]: { [d_key]: "foo" } }; 413 | const enc = encoder.encode(data); 414 | const dec = encoder.decode(enc); 415 | assert.deepEqual(dec, data); 416 | }); 417 | test('decode float 16', function() { 418 | assert.equal(decode(new Uint8Array([0xF9, 0x4A, 0x60])), 12.75); 419 | assert.equal(decode(new Uint8Array([0xF9, 0xC4, 0x80])), -4.5); 420 | assert.equal(decode(new Uint8Array([0xF9, 0x5A, 0xF9])), 223.125); 421 | assert.equal(decode(new Uint8Array([0xF9, 0x45, 0x80])), 5.5); 422 | assert.equal(decode(new Uint8Array([0xF9, 0x7C, 0])), Infinity); 423 | assert.equal(decode(new Uint8Array([0xF9, 0xFC, 0])), -Infinity); 424 | assert.isNaN(decode(new Uint8Array([0xF9, 0x7E, 0]))); 425 | }); 426 | test('structured cloning: types', function() { 427 | let b = typeof Buffer != 'undefined' ? Buffer.alloc(20) : new Uint8Array(20) 428 | let fa = new Float32Array(b.buffer, 8, 2) 429 | fa[0] = 2.25 430 | fa[1] = 6 431 | let f64a = new Float64Array([2.3, 4.7]) 432 | let map = new Map() 433 | map.set('key', 'value') 434 | let object = { 435 | error: new Error('test'), 436 | set: new Set(['a', 'b']), 437 | regexp: /test/gi, 438 | map, 439 | float32Array: fa, 440 | float64Array: f64a, 441 | uint16Array: new Uint16Array([3,4]) 442 | } 443 | let encoder = new Encoder({ 444 | structuredClone: true, 445 | }) 446 | var serialized = encoder.encode(object) 447 | var deserialized = encoder.decode(serialized) 448 | assert.deepEqual(Array.from(deserialized.set), Array.from(object.set)) 449 | assert.equal(deserialized.map.get('key'), 'value') 450 | assert.equal(deserialized.error.message, object.error.message) 451 | assert.equal(deserialized.regexp.test('TEST'), true) 452 | assert.equal(deserialized.float32Array.constructor.name, 'Float32Array') 453 | assert.equal(deserialized.float32Array[0], 2.25) 454 | assert.equal(deserialized.float32Array[1], 6) 455 | assert.equal(deserialized.float64Array[0], 2.3) 456 | assert.equal(deserialized.float64Array[1], 4.7) 457 | assert.equal(deserialized.uint16Array.constructor.name, 'Uint16Array') 458 | assert.equal(deserialized.uint16Array[0], 3) 459 | assert.equal(deserialized.uint16Array[1], 4) 460 | }) 461 | 462 | test('explicit maps and sets', function () { 463 | let map = new Map() 464 | map.set('key', { inside: 'value'}) 465 | let object = { 466 | set: new Set(['a', 'b']), 467 | map, 468 | } 469 | var serialized = encode(object) // default encoder 470 | var deserialized = decode(serialized) 471 | assert.deepEqual(Array.from(deserialized.set), Array.from(object.set)) 472 | assert.equal(deserialized.map.get('key').inside, 'value') 473 | }) 474 | 475 | test('object without prototype', function(){ 476 | var data = Object.create(null) 477 | data.test = 3 478 | var serialized = encode(data) 479 | var deserialized = decode(serialized) 480 | assert.deepEqual(deserialized, data) 481 | }) 482 | test('object with __proto__', function(){ 483 | const data = { foo: 'bar', __proto__: { isAdmin: true } }; 484 | var serialized = encode(data) 485 | var deserialized = decode(serialized) 486 | assert.deepEqual(deserialized, { foo: 'bar' }); 487 | }) 488 | 489 | test('big buffer', function() { 490 | var size = 100000000 491 | var data = new Uint8Array(size).fill(1) 492 | var encoded = encode(data) 493 | var decoded = decode(encoded) 494 | assert.equal(decoded.length, size) 495 | }) 496 | test('little buffer', function() { 497 | var data = typeof Buffer == 'undefined' ? new Uint8Array(0) : Buffer.alloc(0) 498 | var encoded = encode(data) 499 | assert.equal(encoded.length, 1) // make sure to use canonical form 500 | var decoded = decode(encoded) 501 | assert.equal(decoded.length, 0) 502 | }) 503 | 504 | test('random strings', function(){ 505 | var data = [] 506 | for (var i = 0; i < 2000; i++) { 507 | var str = 'test' 508 | while (Math.random() < 0.7 && str.length < 0x100000) { 509 | str = str + String.fromCharCode(90/(Math.random() + 0.01)) + str 510 | } 511 | data.push(str) 512 | } 513 | var serialized = encode(data) 514 | var deserialized = decode(serialized) 515 | assert.deepEqual(deserialized, data) 516 | }) 517 | 518 | test('map/date', function(){ 519 | var map = new Map() 520 | map.set(4, 'four') 521 | map.set('three', 3) 522 | let year2039 = new Date('2039-07-05T16:22:35.792Z') 523 | let year2038 = new Date('2038-08-06T00:19:02.911Z') 524 | 525 | var data = { 526 | map: map, 527 | date: new Date(1532219539733), 528 | farFutureDate: new Date(3532219539133), 529 | ancient: new Date(-3532219539133), 530 | year2038, 531 | year2039, 532 | invalidDate: new Date('invalid') 533 | } 534 | let encoder = new Encoder() 535 | var serialized = encoder.encode(data) 536 | var deserialized = encoder.decode(serialized) 537 | assert.equal(deserialized.map.get(4), 'four') 538 | assert.equal(deserialized.map.get('three'), 3) 539 | assert.equal(deserialized.date.getTime(), 1532219539733) 540 | assert.equal(deserialized.farFutureDate.getTime(), 3532219539133) 541 | assert.equal(deserialized.ancient.getTime(), -3532219539133) 542 | assert.equal(deserialized.year2038.getTime(), year2038.getTime()) 543 | assert.equal(deserialized.year2039.getTime(), year2039.getTime()) 544 | assert.equal(deserialized.invalidDate.toString(), 'Invalid Date') 545 | }) 546 | test('map/date with options', function(){ 547 | var map = new Map() 548 | map.set(4, 'four') 549 | map.set('three', 3) 550 | var data = { 551 | map: map, 552 | date: new Date(1532219539011), 553 | invalidDate: new Date('invalid') 554 | } 555 | let encoder = new Encoder({ 556 | mapsAsObjects: true, 557 | useTimestamp32: true, 558 | useTag259ForMaps: false, 559 | }) 560 | var serialized = encoder.encode(data) 561 | var deserialized = encoder.decode(serialized) 562 | assert.equal(deserialized.map[4], 'four') 563 | assert.equal(deserialized.map.three, 3) 564 | assert.equal(deserialized.date.getTime(), 1532219539000) 565 | assert.isTrue(isNaN(deserialized.invalidDate.getTime())) 566 | }) 567 | test('key caching', function() { 568 | var data = { 569 | foo: 2, 570 | bar: 'test', 571 | four: 4, 572 | seven: 7, 573 | foz: 3, 574 | } 575 | var serialized = CBOR.encode(data) 576 | var deserialized = CBOR.decode(serialized) 577 | assert.deepEqual(deserialized, data) 578 | // do multiple times to test caching 579 | var serialized = CBOR.encode(data) 580 | var deserialized = CBOR.decode(serialized) 581 | assert.deepEqual(deserialized, data) 582 | var serialized = CBOR.encode(data) 583 | var deserialized = CBOR.decode(serialized) 584 | assert.deepEqual(deserialized, data) 585 | }) 586 | test('strings', function() { 587 | var data = [''] 588 | var serialized = encode(data) 589 | var deserialized = decode(serialized) 590 | assert.deepEqual(deserialized, data) 591 | // do multiple times 592 | var serialized = encode(data) 593 | var deserialized = decode(serialized) 594 | assert.deepEqual(deserialized, data) 595 | data = 'decode this: ᾜ' 596 | var serialized = encode(data) 597 | var deserialized = decode(serialized) 598 | assert.deepEqual(deserialized, data) 599 | data = 'decode this that is longer but without any non-latin characters' 600 | var serialized = encode(data) 601 | var deserialized = decode(serialized) 602 | assert.deepEqual(deserialized, data) 603 | }) 604 | test('decimal float32', function() { 605 | var data = { 606 | a: 2.526, 607 | b: 0.0035235, 608 | c: 0.00000000000352501, 609 | d: 3252.77, 610 | } 611 | let encoder = new Encoder({ 612 | useFloat32: DECIMAL_FIT 613 | }) 614 | var serialized = encoder.encode(data) 615 | assert.equal(serialized.length, 36) 616 | var deserialized = encoder.decode(serialized) 617 | assert.deepEqual(deserialized, data) 618 | }) 619 | test('decimal alwaysUseFloat', function() { 620 | var data = 123 621 | let encoder = new Encoder({ 622 | alwaysUseFloat: true 623 | }) 624 | var serialized = encoder.encode(data) 625 | assert.equal(serialized.length, 9) 626 | var deserialized = encoder.decode(serialized) 627 | assert.equal(deserialized, data) 628 | }) 629 | test('bigint to float', function() { 630 | var data = { 631 | a: 325283295382932843n 632 | } 633 | let encoder = new Encoder({ 634 | int64AsNumber: true 635 | }) 636 | var serialized = encoder.encode(data) 637 | var deserialized = encoder.decode(serialized) 638 | assert.deepEqual(deserialized.a, 325283295382932843) 639 | }) 640 | test('numbers', function(){ 641 | var data = { 642 | bigEncodable: 48978578104322, 643 | dateEpoch: 1530886513200, 644 | realBig: 3432235352353255323, 645 | decimal: 32.55234, 646 | negative: -34.11, 647 | exponential: 0.234e123, 648 | tiny: 3.233e-120, 649 | zero: 0, 650 | //negativeZero: -0, 651 | Infinity: Infinity 652 | } 653 | var serialized = encode(data) 654 | var deserialized = decode(serialized) 655 | assert.deepEqual(deserialized, data) 656 | }) 657 | test('numbers are compact', function(){ 658 | assert.equal(encode(-256).length, 2) 659 | let encoding = encode(-4294967296) 660 | assert.equal(encoding.length, 5) 661 | assert.equal(decode(encoding), -4294967296) 662 | }) 663 | test('encode ArrayBuffer', function() { 664 | let ua = new Uint8Array([3, 4, 5]); 665 | let encoded = encode(ua.buffer); 666 | let decoded = decode(encoded); 667 | assert.equal(decoded[0], 3); 668 | assert.equal(decoded[0], 3); 669 | assert.equal(decoded[1], 4); 670 | assert.equal(decoded[2], 5); 671 | assert.equal(decoded.byteLength, 3); 672 | }) 673 | 674 | test('iterator/indefinite length array', function(){ 675 | class NotArray { 676 | } 677 | let data = ['a', 'b', 'c', ['d']] // iterable 678 | data.constructor = NotArray 679 | var serialized = encode(data) 680 | var deserialized = decode(serialized) 681 | assert.deepEqual(deserialized, data) 682 | }) 683 | test('bigint', function(){ 684 | var data = { 685 | bigintSmall: 352n, 686 | bigintSmallNegative: -333335252n, 687 | bigintBig: 2n**64n - 1n, // biggest 64-bit possible 688 | bigintBigNegative: -(2n**63n), // largest negative 689 | mixedWithNormal: 44, 690 | } 691 | var serialized = encode(data) 692 | var deserialized = decode(serialized) 693 | assert.deepEqual(deserialized, data) 694 | var evenBiggerInt = { 695 | big: 2n**66n, 696 | bigger: 53285732853728573289573289573289573289583725892358732859532n, 697 | negBig: -93025879203578903275903285903285903289502n, 698 | } 699 | var serialized = encode(evenBiggerInt) 700 | var deserialized = decode(serialized) 701 | assert.deepEqual(deserialized, evenBiggerInt) 702 | let encoder = new Encoder({ 703 | largeBigIntToFloat: true 704 | }) 705 | serialized = encoder.encode(evenBiggerInt) 706 | deserialized = decode(serialized) 707 | assert.isTrue(deserialized.bigger > 2n**65n) 708 | }) 709 | 710 | test('buffers', function() { 711 | var data = { 712 | buffer1: new Uint8Array([2,3,4]), 713 | buffer2: new Uint8Array(encode(sampleData)) 714 | } 715 | var serialized = encode(data) 716 | var deserialized = decode(serialized) 717 | assert.deepEqual(deserialized, data) 718 | let encoder = new Encoder({ tagUint8Array: true }) 719 | serialized = encoder.encode(new Uint8Array([2,3,4])) 720 | assert.equal(serialized[0], 0xd8); 721 | encoder = new Encoder({ tagUint8Array: false }) 722 | serialized = encoder.encode(new Uint8Array([2,3,4])) 723 | assert.equal(serialized[0], 0x43); 724 | }) 725 | 726 | test('noteencode test', function() { 727 | const data = { 728 | foo: 1, 729 | bar: [1, 2, 3, 4, 'abc', 'def'], 730 | foobar: { 731 | foo: true, 732 | bar: -2147483649, 733 | foobar: { 734 | foo: new Uint8Array([1, 2, 3, 4, 5]), 735 | bar: 1.5, 736 | foobar: [true, false, 'abcdefghijkmonpqrstuvwxyz'] 737 | } 738 | } 739 | }; 740 | var serialized = encode(data) 741 | var deserialized = decode(serialized) 742 | var deserialized = decode(serialized) 743 | var deserialized = decode(serialized) 744 | assert.deepEqual(deserialized, data) 745 | }) 746 | 747 | test('utf16 causing expansion', function() { 748 | this.timeout(10000) 749 | let data = {fixstr: 'ᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝ', str8:'ᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝ'} 750 | var serialized = encode(data) 751 | var deserialized = decode(serialized) 752 | assert.deepEqual(deserialized, data) 753 | }) 754 | test('decodeMultiple', () => { 755 | let values = CBOR.decodeMultiple(new Uint8Array([1, 2, 3, 4])) 756 | assert.deepEqual(values, [1, 2, 3, 4]) 757 | values = [] 758 | CBOR.decodeMultiple(new Uint8Array([1, 2, 3, 4]), value => values.push(value)) 759 | assert.deepEqual(values, [1, 2, 3, 4]) 760 | }) 761 | test('skipFunction', () => { 762 | var data = { 763 | a: 325283295382932843n, 764 | f: () => {}, 765 | } 766 | let encoder = new Encoder({ 767 | int64AsNumber: true, 768 | skipFunction: true, 769 | }) 770 | var serialized = encoder.encode(data) 771 | var deserialized = encoder.decode(serialized) 772 | assert.deepEqual(deserialized.a, 325283295382932843) 773 | assert.equal(Object.hasOwn(deserialized, "f"), false); 774 | }) 775 | test('bad input', function() { 776 | let badInput = Buffer.from('7b2273657269616c6e6f223a2265343a30222c226970223a223139322e3136382e312e3335222c226b6579223a226770735f736563726574227d', 'hex'); 777 | assert.throws(function(){ decode(badInput) }) // should throw, not crash 778 | }) 779 | test('buffer key', function() { 780 | let encoder = new Encoder({ mapsAsObjects: false }) 781 | let test = encoder.decode(Buffer.from('D87982A1446E616D654361626301', 'hex')); 782 | console.log(test); 783 | }); 784 | test('encode as iterator', function() { 785 | let hasIterables = { 786 | a: 1, 787 | iterator: (function*() { 788 | yield 2; 789 | yield { 790 | b: (function*() { 791 | yield 3; 792 | })(), 793 | }; 794 | })() 795 | }; 796 | let encodedIterable = encodeAsIterable(hasIterables); 797 | let result = [...encodedIterable]; 798 | result = Buffer.concat(result); 799 | let deserialized = decode(result); 800 | const expectedResult = { 801 | a: 1, 802 | iterator: [2, { b: [3]}] 803 | }; 804 | assert.deepEqual(deserialized, expectedResult); 805 | }); 806 | if (typeof Blob !== 'undefined') 807 | test('encode as iterator with async/blob parts', function() { 808 | let blob = new Blob([Buffer.from([4,5])]); 809 | let hasIterables = { 810 | a: 1, 811 | iterator: (async function*() { 812 | yield 2; 813 | yield { 814 | b: (function*() { 815 | yield 3; 816 | })(), 817 | }; 818 | })(), 819 | blob 820 | }; 821 | let encodedIterable = encodeAsIterable(hasIterables); 822 | let result = [...encodedIterable]; 823 | assert.equal(result[result.length - 1].constructor, Blob); 824 | }); 825 | if (typeof Blob !== 'undefined') 826 | test('encode as async iterator with async/blob parts', async function() { 827 | let blob = new Blob([Buffer.from([4, 5])]); 828 | let hasIterables = { 829 | a: 1, 830 | iterator: (async function* () { 831 | yield 2; 832 | yield { 833 | b: (function* () { 834 | yield 3; 835 | })(), 836 | }; 837 | })(), 838 | blob 839 | }; 840 | let encodedIterable = encodeAsAsyncIterable(hasIterables); 841 | let result = []; 842 | for await (let encodedPart of encodedIterable) { 843 | result.push(encodedPart) 844 | } 845 | let deserialized = decode(Buffer.concat(result)); 846 | const expectedResult = { 847 | a: 1, 848 | iterator: [2, { b: [3]}], 849 | blob: Buffer.from([4,5]), 850 | }; 851 | assert.deepEqual(deserialized, expectedResult); 852 | }); 853 | test.skip('encode as iterator performance', async function() { 854 | function* iterator() { 855 | for (let i = 0; i < 1000; i++) { 856 | yield { 857 | a: 1, 858 | b: 'hello, world', 859 | c: true, 860 | sub: { 861 | d: 'inside', 862 | e: 3 863 | } 864 | } 865 | } 866 | } 867 | let result; 868 | let start = performance.now(); 869 | for (let i = 0; i < 1000; i++) { 870 | let encodedIterable = encodeAsIterable(iterator()); 871 | result = [...encodedIterable]; 872 | } 873 | let deserialized = decode(Buffer.concat(result)); 874 | console.log(performance.now() - start, result.length); 875 | }); 876 | 877 | test('little-endian typed array with aligned data', function() { 878 | // array[1] { uint32-little-endian-typed-array { bytes <00 00 00 00> } } 879 | let data = new Uint8Array([ 129, 216, 70, 68, 0, 0, 0, 0 ]); 880 | assert.deepEqual(decode(data), [new Uint32Array([0])]); 881 | 882 | let value = {x: new Float32Array([1, 2, 3])}; 883 | assert.deepEqual(decode(encode(value)), value); 884 | }); 885 | }) 886 | suite('CBOR performance tests', function(){ 887 | test('performance JSON.parse', function() { 888 | var data = sampleData 889 | this.timeout(10000) 890 | let structures = [] 891 | var serialized = JSON.stringify(data) 892 | console.log('JSON size', serialized.length) 893 | for (var i = 0; i < ITERATIONS; i++) { 894 | var deserialized = JSON.parse(serialized) 895 | } 896 | }) 897 | test('performance JSON.stringify', function() { 898 | var data = sampleData 899 | this.timeout(10000) 900 | for (var i = 0; i < ITERATIONS; i++) { 901 | var serialized = JSON.stringify(data) 902 | } 903 | }) 904 | test('performance decode', function() { 905 | var data = sampleData 906 | this.timeout(10000) 907 | let structures = [] 908 | var serialized = encode(data) 909 | console.log('CBOR size', serialized.length) 910 | let encoder = new Encoder({ structures, bundleStrings: true }) 911 | var serialized = encoder.encode(data) 912 | console.log('CBOR w/ record ext size', serialized.length) 913 | for (var i = 0; i < ITERATIONS; i++) { 914 | var deserialized = encoder.decode(serialized) 915 | } 916 | }) 917 | test('performance encode', function() { 918 | var data = sampleData 919 | this.timeout(10000) 920 | let structures = [] 921 | let encoder = new Encoder({ structures, bundleStrings: true }) 922 | let buffer = typeof Buffer != 'undefined' ? Buffer.alloc(0x10000) : new Uint8Array(0x10000) 923 | 924 | for (var i = 0; i < ITERATIONS; i++) { 925 | //serialized = encode(data, { shared: sharedStructure }) 926 | encoder.useBuffer(buffer) 927 | var serialized = encoder.encode(data) 928 | //var serializedGzip = deflateSync(serialized) 929 | } 930 | //console.log('serialized', serialized.length, global.propertyComparisons) 931 | }) 932 | }) -------------------------------------------------------------------------------- /decode.js: -------------------------------------------------------------------------------- 1 | let decoder 2 | try { 3 | decoder = new TextDecoder() 4 | } catch(error) {} 5 | let src 6 | let srcEnd 7 | let position = 0 8 | let alreadySet 9 | const EMPTY_ARRAY = [] 10 | const LEGACY_RECORD_INLINE_ID = 105 11 | const RECORD_DEFINITIONS_ID = 0xdffe 12 | const RECORD_INLINE_ID = 0xdfff // temporary first-come first-serve tag // proposed tag: 0x7265 // 're' 13 | const BUNDLED_STRINGS_ID = 0xdff9 14 | const PACKED_TABLE_TAG_ID = 51 15 | const PACKED_REFERENCE_TAG_ID = 6 16 | const STOP_CODE = {} 17 | let maxArraySize = 112810000 // This is the maximum array size in V8. We would potentially detect and set it higher 18 | // for JSC, but this is pretty large and should be sufficient for most use cases 19 | let maxMapSize = 16810000 // JavaScript has a fixed maximum map size of about 16710000, but JS itself enforces this, 20 | // so we don't need to 21 | 22 | let maxObjectSize = 16710000; // This is the maximum number of keys in a Map. It takes over a minute to create this 23 | // many keys in an object, so also probably a reasonable choice there. 24 | let strings = EMPTY_ARRAY 25 | let stringPosition = 0 26 | let currentDecoder = {} 27 | let currentStructures 28 | let srcString 29 | let srcStringStart = 0 30 | let srcStringEnd = 0 31 | let bundledStrings 32 | let referenceMap 33 | let currentExtensions = [] 34 | let currentExtensionRanges = [] 35 | let packedValues 36 | let dataView 37 | let restoreMapsAsObject 38 | let defaultOptions = { 39 | useRecords: false, 40 | mapsAsObjects: true 41 | } 42 | let sequentialMode = false 43 | let inlineObjectReadThreshold = 2; 44 | var BlockedFunction // we use search and replace to change the next call to BlockedFunction to avoid CSP issues for 45 | // no-eval build 46 | try { 47 | new Function('') 48 | } catch(error) { 49 | // if eval variants are not supported, do not create inline object readers ever 50 | inlineObjectReadThreshold = Infinity 51 | } 52 | 53 | 54 | 55 | export class Decoder { 56 | constructor(options) { 57 | if (options) { 58 | if ((options.keyMap || options._keyMap) && !options.useRecords) { 59 | options.useRecords = false 60 | options.mapsAsObjects = true 61 | } 62 | if (options.useRecords === false && options.mapsAsObjects === undefined) 63 | options.mapsAsObjects = true 64 | if (options.getStructures) 65 | options.getShared = options.getStructures 66 | if (options.getShared && !options.structures) 67 | (options.structures = []).uninitialized = true // this is what we use to denote an uninitialized structures 68 | if (options.keyMap) { 69 | this.mapKey = new Map() 70 | for (let [k,v] of Object.entries(options.keyMap)) this.mapKey.set(v,k) 71 | } 72 | } 73 | Object.assign(this, options) 74 | } 75 | /* 76 | decodeKey(key) { 77 | return this.keyMap 78 | ? Object.keys(this.keyMap)[Object.values(this.keyMap).indexOf(key)] || key 79 | : key 80 | } 81 | */ 82 | decodeKey(key) { 83 | return this.keyMap ? this.mapKey.get(key) || key : key 84 | } 85 | 86 | encodeKey(key) { 87 | return this.keyMap && this.keyMap.hasOwnProperty(key) ? this.keyMap[key] : key 88 | } 89 | 90 | encodeKeys(rec) { 91 | if (!this._keyMap) return rec 92 | let map = new Map() 93 | for (let [k,v] of Object.entries(rec)) map.set((this._keyMap.hasOwnProperty(k) ? this._keyMap[k] : k), v) 94 | return map 95 | } 96 | 97 | decodeKeys(map) { 98 | if (!this._keyMap || map.constructor.name != 'Map') return map 99 | if (!this._mapKey) { 100 | this._mapKey = new Map() 101 | for (let [k,v] of Object.entries(this._keyMap)) this._mapKey.set(v,k) 102 | } 103 | let res = {} 104 | //map.forEach((v,k) => res[Object.keys(this._keyMap)[Object.values(this._keyMap).indexOf(k)] || k] = v) 105 | map.forEach((v,k) => res[safeKey(this._mapKey.has(k) ? this._mapKey.get(k) : k)] = v) 106 | return res 107 | } 108 | 109 | mapDecode(source, end) { 110 | 111 | let res = this.decode(source) 112 | if (this._keyMap) { 113 | //Experiemntal support for Optimised KeyMap decoding 114 | switch (res.constructor.name) { 115 | case 'Array': return res.map(r => this.decodeKeys(r)) 116 | //case 'Map': return this.decodeKeys(res) 117 | } 118 | } 119 | return res 120 | } 121 | 122 | decode(source, end) { 123 | if (src) { 124 | // re-entrant execution, save the state and restore it after we do this decode 125 | return saveState(() => { 126 | clearSource() 127 | return this ? this.decode(source, end) : Decoder.prototype.decode.call(defaultOptions, source, end) 128 | }) 129 | } 130 | srcEnd = end > -1 ? end : source.length 131 | position = 0 132 | stringPosition = 0 133 | srcStringEnd = 0 134 | srcString = null 135 | strings = EMPTY_ARRAY 136 | bundledStrings = null 137 | src = source 138 | // this provides cached access to the data view for a buffer if it is getting reused, which is a recommend 139 | // technique for getting data from a database where it can be copied into an existing buffer instead of creating 140 | // new ones 141 | try { 142 | dataView = source.dataView || (source.dataView = new DataView(source.buffer, source.byteOffset, source.byteLength)) 143 | } catch(error) { 144 | // if it doesn't have a buffer, maybe it is the wrong type of object 145 | src = null 146 | if (source instanceof Uint8Array) 147 | throw error 148 | throw new Error('Source must be a Uint8Array or Buffer but was a ' + ((source && typeof source == 'object') ? source.constructor.name : typeof source)) 149 | } 150 | if (this instanceof Decoder) { 151 | currentDecoder = this 152 | packedValues = this.sharedValues && 153 | (this.pack ? new Array(this.maxPrivatePackedValues || 16).concat(this.sharedValues) : 154 | this.sharedValues) 155 | if (this.structures) { 156 | currentStructures = this.structures 157 | return checkedRead() 158 | } else if (!currentStructures || currentStructures.length > 0) { 159 | currentStructures = [] 160 | } 161 | } else { 162 | currentDecoder = defaultOptions 163 | if (!currentStructures || currentStructures.length > 0) 164 | currentStructures = [] 165 | packedValues = null 166 | } 167 | return checkedRead() 168 | } 169 | decodeMultiple(source, forEach) { 170 | let values, lastPosition = 0 171 | try { 172 | let size = source.length 173 | sequentialMode = true 174 | let value = this ? this.decode(source, size) : defaultDecoder.decode(source, size) 175 | if (forEach) { 176 | if (forEach(value) === false) { 177 | return 178 | } 179 | while(position < size) { 180 | lastPosition = position 181 | if (forEach(checkedRead()) === false) { 182 | return 183 | } 184 | } 185 | } 186 | else { 187 | values = [ value ] 188 | while(position < size) { 189 | lastPosition = position 190 | values.push(checkedRead()) 191 | } 192 | return values 193 | } 194 | } catch(error) { 195 | error.lastPosition = lastPosition 196 | error.values = values 197 | throw error 198 | } finally { 199 | sequentialMode = false 200 | clearSource() 201 | } 202 | } 203 | } 204 | export function getPosition() { 205 | return position 206 | } 207 | export function checkedRead() { 208 | try { 209 | let result = read() 210 | if (bundledStrings) { 211 | if (position >= bundledStrings.postBundlePosition) { 212 | let error = new Error('Unexpected bundle position'); 213 | error.incomplete = true; 214 | throw error 215 | } 216 | // bundled strings to skip past 217 | position = bundledStrings.postBundlePosition; 218 | bundledStrings = null; 219 | } 220 | 221 | if (position == srcEnd) { 222 | // finished reading this source, cleanup references 223 | currentStructures = null 224 | src = null 225 | if (referenceMap) 226 | referenceMap = null 227 | } else if (position > srcEnd) { 228 | // over read 229 | let error = new Error('Unexpected end of CBOR data') 230 | error.incomplete = true 231 | throw error 232 | } else if (!sequentialMode) { 233 | throw new Error('Data read, but end of buffer not reached') 234 | } 235 | // else more to read, but we are reading sequentially, so don't clear source yet 236 | return result 237 | } catch(error) { 238 | clearSource() 239 | if (error instanceof RangeError || error.message.startsWith('Unexpected end of buffer')) { 240 | error.incomplete = true 241 | } 242 | throw error 243 | } 244 | } 245 | 246 | export function read() { 247 | let token = src[position++] 248 | let majorType = token >> 5 249 | token = token & 0x1f 250 | if (token > 0x17) { 251 | switch (token) { 252 | case 0x18: 253 | token = src[position++] 254 | break 255 | case 0x19: 256 | if (majorType == 7) { 257 | return getFloat16() 258 | } 259 | token = dataView.getUint16(position) 260 | position += 2 261 | break 262 | case 0x1a: 263 | if (majorType == 7) { 264 | let value = dataView.getFloat32(position) 265 | if (currentDecoder.useFloat32 > 2) { 266 | // this does rounding of numbers that were encoded in 32-bit float to nearest significant decimal digit that could be preserved 267 | let multiplier = mult10[((src[position] & 0x7f) << 1) | (src[position + 1] >> 7)] 268 | position += 4 269 | return ((multiplier * value + (value > 0 ? 0.5 : -0.5)) >> 0) / multiplier 270 | } 271 | position += 4 272 | return value 273 | } 274 | token = dataView.getUint32(position) 275 | position += 4 276 | if (majorType === 1) return -1 - token; // can't safely use negation operator here 277 | break 278 | case 0x1b: 279 | if (majorType == 7) { 280 | let value = dataView.getFloat64(position) 281 | position += 8 282 | return value 283 | } 284 | if (majorType > 1) { 285 | if (dataView.getUint32(position) > 0) 286 | throw new Error('JavaScript does not support arrays, maps, or strings with length over 4294967295') 287 | token = dataView.getUint32(position + 4) 288 | } else if (currentDecoder.int64AsNumber) { 289 | token = dataView.getUint32(position) * 0x100000000 290 | token += dataView.getUint32(position + 4) 291 | } else token = dataView.getBigUint64(position) 292 | position += 8 293 | break 294 | case 0x1f: 295 | // indefinite length 296 | switch(majorType) { 297 | case 2: // byte string 298 | case 3: // text string 299 | throw new Error('Indefinite length not supported for byte or text strings') 300 | case 4: // array 301 | let array = [] 302 | let value, i = 0 303 | while ((value = read()) != STOP_CODE) { 304 | if (i >= maxArraySize) throw new Error(`Array length exceeds ${maxArraySize}`) 305 | array[i++] = value 306 | } 307 | return majorType == 4 ? array : majorType == 3 ? array.join('') : Buffer.concat(array) 308 | case 5: // map 309 | let key 310 | if (currentDecoder.mapsAsObjects) { 311 | let object = {} 312 | let i = 0; 313 | if (currentDecoder.keyMap) { 314 | while((key = read()) != STOP_CODE) { 315 | if (i++ >= maxMapSize) throw new Error(`Property count exceeds ${maxMapSize}`) 316 | object[safeKey(currentDecoder.decodeKey(key))] = read() 317 | } 318 | } 319 | else { 320 | while ((key = read()) != STOP_CODE) { 321 | if (i++ >= maxMapSize) throw new Error(`Property count exceeds ${maxMapSize}`) 322 | object[safeKey(key)] = read() 323 | } 324 | } 325 | return object 326 | } else { 327 | if (restoreMapsAsObject) { 328 | currentDecoder.mapsAsObjects = true 329 | restoreMapsAsObject = false 330 | } 331 | let map = new Map() 332 | if (currentDecoder.keyMap) { 333 | let i = 0; 334 | while((key = read()) != STOP_CODE) { 335 | if (i++ >= maxMapSize) { 336 | throw new Error(`Map size exceeds ${maxMapSize}`); 337 | } 338 | map.set(currentDecoder.decodeKey(key), read()) 339 | } 340 | } 341 | else { 342 | let i = 0; 343 | while ((key = read()) != STOP_CODE) { 344 | if (i++ >= maxMapSize) { 345 | throw new Error(`Map size exceeds ${maxMapSize}`); 346 | } 347 | map.set(key, read()) 348 | } 349 | } 350 | return map 351 | } 352 | case 7: 353 | return STOP_CODE 354 | default: 355 | throw new Error('Invalid major type for indefinite length ' + majorType) 356 | } 357 | default: 358 | throw new Error('Unknown token ' + token) 359 | } 360 | } 361 | switch (majorType) { 362 | case 0: // positive int 363 | return token 364 | case 1: // negative int 365 | return ~token 366 | case 2: // buffer 367 | return readBin(token) 368 | case 3: // string 369 | if (srcStringEnd >= position) { 370 | return srcString.slice(position - srcStringStart, (position += token) - srcStringStart) 371 | } 372 | if (srcStringEnd == 0 && srcEnd < 140 && token < 32) { 373 | // for small blocks, avoiding the overhead of the extract call is helpful 374 | let string = token < 16 ? shortStringInJS(token) : longStringInJS(token) 375 | if (string != null) 376 | return string 377 | } 378 | return readFixedString(token) 379 | case 4: // array 380 | if (token >= maxArraySize) throw new Error(`Array length exceeds ${maxArraySize}`) 381 | let array = new Array(token) 382 | //if (currentDecoder.keyMap) for (let i = 0; i < token; i++) array[i] = currentDecoder.decodeKey(read()) 383 | //else 384 | for (let i = 0; i < token; i++) array[i] = read() 385 | return array 386 | case 5: // map 387 | if (token >= maxMapSize) throw new Error(`Map size exceeds ${maxArraySize}`) 388 | if (currentDecoder.mapsAsObjects) { 389 | let object = {} 390 | if (currentDecoder.keyMap) for (let i = 0; i < token; i++) object[safeKey(currentDecoder.decodeKey(read()))] = read() 391 | else for (let i = 0; i < token; i++) object[safeKey(read())] = read() 392 | return object 393 | } else { 394 | if (restoreMapsAsObject) { 395 | currentDecoder.mapsAsObjects = true 396 | restoreMapsAsObject = false 397 | } 398 | let map = new Map() 399 | if (currentDecoder.keyMap) for (let i = 0; i < token; i++) map.set(currentDecoder.decodeKey(read()),read()) 400 | else for (let i = 0; i < token; i++) map.set(read(), read()) 401 | return map 402 | } 403 | case 6: // extension 404 | if (token >= BUNDLED_STRINGS_ID) { 405 | let structure = currentStructures[token & 0x1fff] // check record structures first 406 | // At some point we may provide an option for dynamic tag assignment with a range like token >= 8 && (token < 16 || (token > 0x80 && token < 0xc0) || (token > 0x130 && token < 0x4000)) 407 | if (structure) { 408 | if (!structure.read) structure.read = createStructureReader(structure) 409 | return structure.read() 410 | } 411 | if (token < 0x10000) { 412 | if (token == RECORD_INLINE_ID) { // we do a special check for this so that we can keep the 413 | // currentExtensions as densely stored array (v8 stores arrays densely under about 3000 elements) 414 | let length = readJustLength() 415 | let id = read() 416 | let structure = read() 417 | recordDefinition(id, structure) 418 | let object = {} 419 | if (currentDecoder.keyMap) for (let i = 2; i < length; i++) { 420 | let key = currentDecoder.decodeKey(structure[i - 2]) 421 | object[safeKey(key)] = read() 422 | } 423 | else for (let i = 2; i < length; i++) { 424 | let key = structure[i - 2] 425 | object[safeKey(key)] = read() 426 | } 427 | return object 428 | } 429 | else if (token == RECORD_DEFINITIONS_ID) { 430 | let length = readJustLength() 431 | let id = read() 432 | for (let i = 2; i < length; i++) { 433 | recordDefinition(id++, read()) 434 | } 435 | return read() 436 | } else if (token == BUNDLED_STRINGS_ID) { 437 | return readBundleExt() 438 | } 439 | if (currentDecoder.getShared) { 440 | loadShared() 441 | structure = currentStructures[token & 0x1fff] 442 | if (structure) { 443 | if (!structure.read) 444 | structure.read = createStructureReader(structure) 445 | return structure.read() 446 | } 447 | } 448 | } 449 | } 450 | let extension = currentExtensions[token] 451 | if (extension) { 452 | if (extension.handlesRead) 453 | return extension(read) 454 | else 455 | return extension(read()) 456 | } else { 457 | let input = read() 458 | for (let i = 0; i < currentExtensionRanges.length; i++) { 459 | let value = currentExtensionRanges[i](token, input) 460 | if (value !== undefined) 461 | return value 462 | } 463 | return new Tag(input, token) 464 | } 465 | case 7: // fixed value 466 | switch (token) { 467 | case 0x14: return false 468 | case 0x15: return true 469 | case 0x16: return null 470 | case 0x17: return; // undefined 471 | case 0x1f: 472 | default: 473 | let packedValue = (packedValues || getPackedValues())[token] 474 | if (packedValue !== undefined) 475 | return packedValue 476 | throw new Error('Unknown token ' + token) 477 | } 478 | default: // negative int 479 | if (isNaN(token)) { 480 | let error = new Error('Unexpected end of CBOR data') 481 | error.incomplete = true 482 | throw error 483 | } 484 | throw new Error('Unknown CBOR token ' + token) 485 | } 486 | } 487 | const validName = /^[a-zA-Z_$][a-zA-Z\d_$]*$/ 488 | function createStructureReader(structure) { 489 | if (!structure) throw new Error('Structure is required in record definition'); 490 | function readObject() { 491 | // get the array size from the header 492 | let length = src[position++] 493 | //let majorType = token >> 5 494 | length = length & 0x1f 495 | if (length > 0x17) { 496 | switch (length) { 497 | case 0x18: 498 | length = src[position++] 499 | break 500 | case 0x19: 501 | length = dataView.getUint16(position) 502 | position += 2 503 | break 504 | case 0x1a: 505 | length = dataView.getUint32(position) 506 | position += 4 507 | break 508 | default: 509 | throw new Error('Expected array header, but got ' + src[position - 1]) 510 | } 511 | } 512 | // This initial function is quick to instantiate, but runs slower. After several iterations pay the cost to build the faster function 513 | let compiledReader = this.compiledReader // first look to see if we have the fast compiled function 514 | while(compiledReader) { 515 | // we have a fast compiled object literal reader 516 | if (compiledReader.propertyCount === length) 517 | return compiledReader(read) // with the right length, so we use it 518 | compiledReader = compiledReader.next // see if there is another reader with the right length 519 | } 520 | if (this.slowReads++ >= inlineObjectReadThreshold) { // create a fast compiled reader 521 | let array = this.length == length ? this : this.slice(0, length) 522 | compiledReader = currentDecoder.keyMap 523 | ? new Function('r', 'return {' + array.map(k => currentDecoder.decodeKey(k)).map(k => validName.test(k) ? safeKey(k) + ':r()' : ('[' + JSON.stringify(k) + ']:r()')).join(',') + '}') 524 | : new Function('r', 'return {' + array.map(key => validName.test(key) ? safeKey(key) + ':r()' : ('[' + JSON.stringify(key) + ']:r()')).join(',') + '}') 525 | if (this.compiledReader) 526 | compiledReader.next = this.compiledReader // if there is an existing one, we store multiple readers as a linked list because it is usually pretty rare to have multiple readers (of different length) for the same structure 527 | compiledReader.propertyCount = length 528 | this.compiledReader = compiledReader 529 | return compiledReader(read) 530 | } 531 | let object = {} 532 | if (currentDecoder.keyMap) for (let i = 0; i < length; i++) object[safeKey(currentDecoder.decodeKey(this[i]))] = read() 533 | else for (let i = 0; i < length; i++) { 534 | object[safeKey(this[i])] = read(); 535 | } 536 | return object 537 | } 538 | structure.slowReads = 0 539 | return readObject 540 | } 541 | 542 | function safeKey(key) { 543 | // protect against prototype pollution 544 | if (typeof key === 'string') return key === '__proto__' ? '__proto_' : key 545 | if (typeof key === 'number' || typeof key === 'boolean' || typeof key === 'bigint') return key.toString(); 546 | if (key == null) return key + ''; 547 | // protect against expensive (DoS) string conversions 548 | throw new Error('Invalid property name type ' + typeof key); 549 | } 550 | 551 | let readFixedString = readStringJS 552 | let readString8 = readStringJS 553 | let readString16 = readStringJS 554 | let readString32 = readStringJS 555 | 556 | export let isNativeAccelerationEnabled = false 557 | export function setExtractor(extractStrings) { 558 | isNativeAccelerationEnabled = true 559 | readFixedString = readString(1) 560 | readString8 = readString(2) 561 | readString16 = readString(3) 562 | readString32 = readString(5) 563 | function readString(headerLength) { 564 | return function readString(length) { 565 | let string = strings[stringPosition++] 566 | if (string == null) { 567 | if (bundledStrings) 568 | return readStringJS(length) 569 | let extraction = extractStrings(position, srcEnd, length, src) 570 | if (typeof extraction == 'string') { 571 | string = extraction 572 | strings = EMPTY_ARRAY 573 | } else { 574 | strings = extraction 575 | stringPosition = 1 576 | srcStringEnd = 1 // even if a utf-8 string was decoded, must indicate we are in the midst of extracted strings and can't skip strings 577 | string = strings[0] 578 | if (string === undefined) 579 | throw new Error('Unexpected end of buffer') 580 | } 581 | } 582 | let srcStringLength = string.length 583 | if (srcStringLength <= length) { 584 | position += length 585 | return string 586 | } 587 | srcString = string 588 | srcStringStart = position 589 | srcStringEnd = position + srcStringLength 590 | position += length 591 | return string.slice(0, length) // we know we just want the beginning 592 | } 593 | } 594 | } 595 | function readStringJS(length) { 596 | let result 597 | if (length < 16) { 598 | if (result = shortStringInJS(length)) 599 | return result 600 | } 601 | if (length > 64 && decoder) 602 | return decoder.decode(src.subarray(position, position += length)) 603 | const end = position + length 604 | const units = [] 605 | result = '' 606 | while (position < end) { 607 | const byte1 = src[position++] 608 | if ((byte1 & 0x80) === 0) { 609 | // 1 byte 610 | units.push(byte1) 611 | } else if ((byte1 & 0xe0) === 0xc0) { 612 | // 2 bytes 613 | const byte2 = src[position++] & 0x3f 614 | units.push(((byte1 & 0x1f) << 6) | byte2) 615 | } else if ((byte1 & 0xf0) === 0xe0) { 616 | // 3 bytes 617 | const byte2 = src[position++] & 0x3f 618 | const byte3 = src[position++] & 0x3f 619 | units.push(((byte1 & 0x1f) << 12) | (byte2 << 6) | byte3) 620 | } else if ((byte1 & 0xf8) === 0xf0) { 621 | // 4 bytes 622 | const byte2 = src[position++] & 0x3f 623 | const byte3 = src[position++] & 0x3f 624 | const byte4 = src[position++] & 0x3f 625 | let unit = ((byte1 & 0x07) << 0x12) | (byte2 << 0x0c) | (byte3 << 0x06) | byte4 626 | if (unit > 0xffff) { 627 | unit -= 0x10000 628 | units.push(((unit >>> 10) & 0x3ff) | 0xd800) 629 | unit = 0xdc00 | (unit & 0x3ff) 630 | } 631 | units.push(unit) 632 | } else { 633 | units.push(byte1) 634 | } 635 | 636 | if (units.length >= 0x1000) { 637 | result += fromCharCode.apply(String, units) 638 | units.length = 0 639 | } 640 | } 641 | 642 | if (units.length > 0) { 643 | result += fromCharCode.apply(String, units) 644 | } 645 | 646 | return result 647 | } 648 | let fromCharCode = String.fromCharCode 649 | function longStringInJS(length) { 650 | let start = position 651 | let bytes = new Array(length) 652 | for (let i = 0; i < length; i++) { 653 | const byte = src[position++]; 654 | if ((byte & 0x80) > 0) { 655 | position = start 656 | return 657 | } 658 | bytes[i] = byte 659 | } 660 | return fromCharCode.apply(String, bytes) 661 | } 662 | function shortStringInJS(length) { 663 | if (length < 4) { 664 | if (length < 2) { 665 | if (length === 0) 666 | return '' 667 | else { 668 | let a = src[position++] 669 | if ((a & 0x80) > 1) { 670 | position -= 1 671 | return 672 | } 673 | return fromCharCode(a) 674 | } 675 | } else { 676 | let a = src[position++] 677 | let b = src[position++] 678 | if ((a & 0x80) > 0 || (b & 0x80) > 0) { 679 | position -= 2 680 | return 681 | } 682 | if (length < 3) 683 | return fromCharCode(a, b) 684 | let c = src[position++] 685 | if ((c & 0x80) > 0) { 686 | position -= 3 687 | return 688 | } 689 | return fromCharCode(a, b, c) 690 | } 691 | } else { 692 | let a = src[position++] 693 | let b = src[position++] 694 | let c = src[position++] 695 | let d = src[position++] 696 | if ((a & 0x80) > 0 || (b & 0x80) > 0 || (c & 0x80) > 0 || (d & 0x80) > 0) { 697 | position -= 4 698 | return 699 | } 700 | if (length < 6) { 701 | if (length === 4) 702 | return fromCharCode(a, b, c, d) 703 | else { 704 | let e = src[position++] 705 | if ((e & 0x80) > 0) { 706 | position -= 5 707 | return 708 | } 709 | return fromCharCode(a, b, c, d, e) 710 | } 711 | } else if (length < 8) { 712 | let e = src[position++] 713 | let f = src[position++] 714 | if ((e & 0x80) > 0 || (f & 0x80) > 0) { 715 | position -= 6 716 | return 717 | } 718 | if (length < 7) 719 | return fromCharCode(a, b, c, d, e, f) 720 | let g = src[position++] 721 | if ((g & 0x80) > 0) { 722 | position -= 7 723 | return 724 | } 725 | return fromCharCode(a, b, c, d, e, f, g) 726 | } else { 727 | let e = src[position++] 728 | let f = src[position++] 729 | let g = src[position++] 730 | let h = src[position++] 731 | if ((e & 0x80) > 0 || (f & 0x80) > 0 || (g & 0x80) > 0 || (h & 0x80) > 0) { 732 | position -= 8 733 | return 734 | } 735 | if (length < 10) { 736 | if (length === 8) 737 | return fromCharCode(a, b, c, d, e, f, g, h) 738 | else { 739 | let i = src[position++] 740 | if ((i & 0x80) > 0) { 741 | position -= 9 742 | return 743 | } 744 | return fromCharCode(a, b, c, d, e, f, g, h, i) 745 | } 746 | } else if (length < 12) { 747 | let i = src[position++] 748 | let j = src[position++] 749 | if ((i & 0x80) > 0 || (j & 0x80) > 0) { 750 | position -= 10 751 | return 752 | } 753 | if (length < 11) 754 | return fromCharCode(a, b, c, d, e, f, g, h, i, j) 755 | let k = src[position++] 756 | if ((k & 0x80) > 0) { 757 | position -= 11 758 | return 759 | } 760 | return fromCharCode(a, b, c, d, e, f, g, h, i, j, k) 761 | } else { 762 | let i = src[position++] 763 | let j = src[position++] 764 | let k = src[position++] 765 | let l = src[position++] 766 | if ((i & 0x80) > 0 || (j & 0x80) > 0 || (k & 0x80) > 0 || (l & 0x80) > 0) { 767 | position -= 12 768 | return 769 | } 770 | if (length < 14) { 771 | if (length === 12) 772 | return fromCharCode(a, b, c, d, e, f, g, h, i, j, k, l) 773 | else { 774 | let m = src[position++] 775 | if ((m & 0x80) > 0) { 776 | position -= 13 777 | return 778 | } 779 | return fromCharCode(a, b, c, d, e, f, g, h, i, j, k, l, m) 780 | } 781 | } else { 782 | let m = src[position++] 783 | let n = src[position++] 784 | if ((m & 0x80) > 0 || (n & 0x80) > 0) { 785 | position -= 14 786 | return 787 | } 788 | if (length < 15) 789 | return fromCharCode(a, b, c, d, e, f, g, h, i, j, k, l, m, n) 790 | let o = src[position++] 791 | if ((o & 0x80) > 0) { 792 | position -= 15 793 | return 794 | } 795 | return fromCharCode(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o) 796 | } 797 | } 798 | } 799 | } 800 | } 801 | 802 | function readBin(length) { 803 | return currentDecoder.copyBuffers ? 804 | // specifically use the copying slice (not the node one) 805 | Uint8Array.prototype.slice.call(src, position, position += length) : 806 | src.subarray(position, position += length) 807 | } 808 | function readExt(length) { 809 | let type = src[position++] 810 | if (currentExtensions[type]) { 811 | return currentExtensions[type](src.subarray(position, position += length)) 812 | } 813 | else 814 | throw new Error('Unknown extension type ' + type) 815 | } 816 | let f32Array = new Float32Array(1) 817 | let u8Array = new Uint8Array(f32Array.buffer, 0, 4) 818 | function getFloat16() { 819 | let byte0 = src[position++] 820 | let byte1 = src[position++] 821 | let exponent = (byte0 & 0x7f) >> 2; 822 | if (exponent === 0x1f) { // specials 823 | if (byte1 || (byte0 & 3)) 824 | return NaN; 825 | return (byte0 & 0x80) ? -Infinity : Infinity; 826 | } 827 | if (exponent === 0) { // sub-normals 828 | // significand with 10 fractional bits and divided by 2^14 829 | let abs = (((byte0 & 3) << 8) | byte1) / (1 << 24) 830 | return (byte0 & 0x80) ? -abs : abs 831 | } 832 | 833 | u8Array[3] = (byte0 & 0x80) | // sign bit 834 | ((exponent >> 1) + 56) // 4 of 5 of the exponent bits, re-offset-ed 835 | u8Array[2] = ((byte0 & 7) << 5) | // last exponent bit and first two mantissa bits 836 | (byte1 >> 3) // next 5 bits of mantissa 837 | u8Array[1] = byte1 << 5; // last three bits of mantissa 838 | u8Array[0] = 0; 839 | return f32Array[0]; 840 | } 841 | 842 | let keyCache = new Array(4096) 843 | function readKey() { 844 | let length = src[position++] 845 | if (length >= 0x60 && length < 0x78) { 846 | // fixstr, potentially use key cache 847 | length = length - 0x60 848 | if (srcStringEnd >= position) // if it has been extracted, must use it (and faster anyway) 849 | return srcString.slice(position - srcStringStart, (position += length) - srcStringStart) 850 | else if (!(srcStringEnd == 0 && srcEnd < 180)) 851 | return readFixedString(length) 852 | } else { // not cacheable, go back and do a standard read 853 | position-- 854 | return read() 855 | } 856 | let key = ((length << 5) ^ (length > 1 ? dataView.getUint16(position) : length > 0 ? src[position] : 0)) & 0xfff 857 | let entry = keyCache[key] 858 | let checkPosition = position 859 | let end = position + length - 3 860 | let chunk 861 | let i = 0 862 | if (entry && entry.bytes == length) { 863 | while (checkPosition < end) { 864 | chunk = dataView.getUint32(checkPosition) 865 | if (chunk != entry[i++]) { 866 | checkPosition = 0x70000000 867 | break 868 | } 869 | checkPosition += 4 870 | } 871 | end += 3 872 | while (checkPosition < end) { 873 | chunk = src[checkPosition++] 874 | if (chunk != entry[i++]) { 875 | checkPosition = 0x70000000 876 | break 877 | } 878 | } 879 | if (checkPosition === end) { 880 | position = checkPosition 881 | return entry.string 882 | } 883 | end -= 3 884 | checkPosition = position 885 | } 886 | entry = [] 887 | keyCache[key] = entry 888 | entry.bytes = length 889 | while (checkPosition < end) { 890 | chunk = dataView.getUint32(checkPosition) 891 | entry.push(chunk) 892 | checkPosition += 4 893 | } 894 | end += 3 895 | while (checkPosition < end) { 896 | chunk = src[checkPosition++] 897 | entry.push(chunk) 898 | } 899 | // for small blocks, avoiding the overhead of the extract call is helpful 900 | let string = length < 16 ? shortStringInJS(length) : longStringInJS(length) 901 | if (string != null) 902 | return entry.string = string 903 | return entry.string = readFixedString(length) 904 | } 905 | 906 | export class Tag { 907 | constructor(value, tag) { 908 | this.value = value 909 | this.tag = tag 910 | } 911 | } 912 | 913 | currentExtensions[0] = (dateString) => { 914 | // string date extension 915 | return new Date(dateString) 916 | } 917 | 918 | currentExtensions[1] = (epochSec) => { 919 | // numeric date extension 920 | return new Date(Math.round(epochSec * 1000)) 921 | } 922 | 923 | currentExtensions[2] = (buffer) => { 924 | // bigint extension 925 | let value = BigInt(0) 926 | for (let i = 0, l = buffer.byteLength; i < l; i++) { 927 | value = BigInt(buffer[i]) + (value << BigInt(8)) 928 | } 929 | return value 930 | } 931 | 932 | currentExtensions[3] = (buffer) => { 933 | // negative bigint extension 934 | return BigInt(-1) - currentExtensions[2](buffer) 935 | } 936 | currentExtensions[4] = (fraction) => { 937 | // best to reparse to maintain accuracy 938 | return +(fraction[1] + 'e' + fraction[0]) 939 | } 940 | 941 | currentExtensions[5] = (fraction) => { 942 | // probably not sufficiently accurate 943 | return fraction[1] * Math.exp(fraction[0] * Math.log(2)) 944 | } 945 | 946 | // the registration of the record definition extension 947 | const recordDefinition = (id, structure) => { 948 | id = id - 0xe000 949 | let existingStructure = currentStructures[id] 950 | if (existingStructure && existingStructure.isShared) { 951 | (currentStructures.restoreStructures || (currentStructures.restoreStructures = []))[id] = existingStructure 952 | } 953 | currentStructures[id] = structure 954 | 955 | structure.read = createStructureReader(structure) 956 | } 957 | currentExtensions[LEGACY_RECORD_INLINE_ID] = (data) => { 958 | let length = data.length 959 | let structure = data[1] 960 | recordDefinition(data[0], structure) 961 | let object = {} 962 | for (let i = 2; i < length; i++) { 963 | let key = structure[i - 2] 964 | object[safeKey(key)] = data[i] 965 | } 966 | return object 967 | } 968 | currentExtensions[14] = (value) => { 969 | if (bundledStrings) 970 | return bundledStrings[0].slice(bundledStrings.position0, bundledStrings.position0 += value) 971 | return new Tag(value, 14) 972 | } 973 | currentExtensions[15] = (value) => { 974 | if (bundledStrings) 975 | return bundledStrings[1].slice(bundledStrings.position1, bundledStrings.position1 += value) 976 | return new Tag(value, 15) 977 | } 978 | let glbl = { Error, RegExp } 979 | currentExtensions[27] = (data) => { // http://cbor.schmorp.de/generic-object 980 | return (glbl[data[0]] || Error)(data[1], data[2]) 981 | } 982 | const packedTable = (read) => { 983 | if (src[position++] != 0x84) { 984 | let error = new Error('Packed values structure must be followed by a 4 element array') 985 | if (src.length < position) 986 | error.incomplete = true 987 | throw error 988 | } 989 | let newPackedValues = read() // packed values 990 | if (!newPackedValues || !newPackedValues.length) { 991 | let error = new Error('Packed values structure must be followed by a 4 element array') 992 | error.incomplete = true 993 | throw error 994 | } 995 | packedValues = packedValues ? newPackedValues.concat(packedValues.slice(newPackedValues.length)) : newPackedValues 996 | packedValues.prefixes = read() 997 | packedValues.suffixes = read() 998 | return read() // read the rump 999 | } 1000 | packedTable.handlesRead = true 1001 | currentExtensions[51] = packedTable 1002 | 1003 | currentExtensions[PACKED_REFERENCE_TAG_ID] = (data) => { // packed reference 1004 | if (!packedValues) { 1005 | if (currentDecoder.getShared) 1006 | loadShared() 1007 | else 1008 | return new Tag(data, PACKED_REFERENCE_TAG_ID) 1009 | } 1010 | if (typeof data == 'number') 1011 | return packedValues[16 + (data >= 0 ? 2 * data : (-2 * data - 1))] 1012 | let error = new Error('No support for non-integer packed references yet') 1013 | if (data === undefined) 1014 | error.incomplete = true 1015 | throw error 1016 | } 1017 | 1018 | // The following code is an incomplete implementation of http://cbor.schmorp.de/stringref 1019 | // the real thing would need to implemennt more logic to populate the stringRefs table and 1020 | // maintain a stack of stringRef "namespaces". 1021 | // 1022 | // currentExtensions[25] = (id) => { 1023 | // return stringRefs[id] 1024 | // } 1025 | // currentExtensions[256] = (read) => { 1026 | // stringRefs = [] 1027 | // try { 1028 | // return read() 1029 | // } finally { 1030 | // stringRefs = null 1031 | // } 1032 | // } 1033 | // currentExtensions[256].handlesRead = true 1034 | 1035 | currentExtensions[28] = (read) => { 1036 | // shareable http://cbor.schmorp.de/value-sharing (for structured clones) 1037 | if (!referenceMap) { 1038 | referenceMap = new Map() 1039 | referenceMap.id = 0 1040 | } 1041 | let id = referenceMap.id++ 1042 | let startingPosition = position 1043 | let token = src[position] 1044 | let target 1045 | // TODO: handle Maps, Sets, and other types that can cycle; this is complicated, because you potentially need to read 1046 | // ahead past references to record structure definitions 1047 | if ((token >> 5) == 4) 1048 | target = [] 1049 | else 1050 | target = {} 1051 | 1052 | let refEntry = { target } // a placeholder object 1053 | referenceMap.set(id, refEntry) 1054 | let targetProperties = read() // read the next value as the target object to id 1055 | if (refEntry.used) {// there is a cycle, so we have to assign properties to original target 1056 | if (Object.getPrototypeOf(target) !== Object.getPrototypeOf(targetProperties)) { 1057 | // this means that the returned target does not match the targetProperties, so we need rerun the read to 1058 | // have the correctly create instance be assigned as a reference, then we do the copy the properties back to the 1059 | // target 1060 | // reset the position so that the read can be repeated 1061 | position = startingPosition 1062 | // the returned instance is our new target for references 1063 | target = targetProperties 1064 | referenceMap.set(id, { target }) 1065 | targetProperties = read() 1066 | } 1067 | return Object.assign(target, targetProperties) 1068 | } 1069 | refEntry.target = targetProperties // the placeholder wasn't used, replace with the deserialized one 1070 | return targetProperties // no cycle, can just use the returned read object 1071 | } 1072 | currentExtensions[28].handlesRead = true 1073 | 1074 | currentExtensions[29] = (id) => { 1075 | // sharedref http://cbor.schmorp.de/value-sharing (for structured clones) 1076 | let refEntry = referenceMap.get(id) 1077 | refEntry.used = true 1078 | return refEntry.target 1079 | } 1080 | 1081 | currentExtensions[258] = (array) => new Set(array); // https://github.com/input-output-hk/cbor-sets-spec/blob/master/CBOR_SETS.md 1082 | (currentExtensions[259] = (read) => { 1083 | // https://github.com/shanewholloway/js-cbor-codec/blob/master/docs/CBOR-259-spec 1084 | // for decoding as a standard Map 1085 | if (currentDecoder.mapsAsObjects) { 1086 | currentDecoder.mapsAsObjects = false 1087 | restoreMapsAsObject = true 1088 | } 1089 | return read() 1090 | }).handlesRead = true 1091 | function combine(a, b) { 1092 | if (typeof a === 'string') 1093 | return a + b 1094 | if (a instanceof Array) 1095 | return a.concat(b) 1096 | return Object.assign({}, a, b) 1097 | } 1098 | function getPackedValues() { 1099 | if (!packedValues) { 1100 | if (currentDecoder.getShared) 1101 | loadShared() 1102 | else 1103 | throw new Error('No packed values available') 1104 | } 1105 | return packedValues 1106 | } 1107 | const SHARED_DATA_TAG_ID = 0x53687264 // ascii 'Shrd' 1108 | currentExtensionRanges.push((tag, input) => { 1109 | if (tag >= 225 && tag <= 255) 1110 | return combine(getPackedValues().prefixes[tag - 224], input) 1111 | if (tag >= 28704 && tag <= 32767) 1112 | return combine(getPackedValues().prefixes[tag - 28672], input) 1113 | if (tag >= 1879052288 && tag <= 2147483647) 1114 | return combine(getPackedValues().prefixes[tag - 1879048192], input) 1115 | if (tag >= 216 && tag <= 223) 1116 | return combine(input, getPackedValues().suffixes[tag - 216]) 1117 | if (tag >= 27647 && tag <= 28671) 1118 | return combine(input, getPackedValues().suffixes[tag - 27639]) 1119 | if (tag >= 1811940352 && tag <= 1879048191) 1120 | return combine(input, getPackedValues().suffixes[tag - 1811939328]) 1121 | if (tag == SHARED_DATA_TAG_ID) {// we do a special check for this so that we can keep the currentExtensions as densely stored array (v8 stores arrays densely under about 3000 elements) 1122 | return { 1123 | packedValues: packedValues, 1124 | structures: currentStructures.slice(0), 1125 | version: input, 1126 | } 1127 | } 1128 | if (tag == 55799) // self-descriptive CBOR tag, just return input value 1129 | return input 1130 | }) 1131 | 1132 | const isLittleEndianMachine = new Uint8Array(new Uint16Array([1]).buffer)[0] == 1 1133 | export const typedArrays = [Uint8Array, Uint8ClampedArray, Uint16Array, Uint32Array, 1134 | typeof BigUint64Array == 'undefined' ? { name:'BigUint64Array' } : BigUint64Array, Int8Array, Int16Array, Int32Array, 1135 | typeof BigInt64Array == 'undefined' ? { name:'BigInt64Array' } : BigInt64Array, Float32Array, Float64Array] 1136 | const typedArrayTags = [64, 68, 69, 70, 71, 72, 77, 78, 79, 85, 86] 1137 | for (let i = 0; i < typedArrays.length; i++) { 1138 | registerTypedArray(typedArrays[i], typedArrayTags[i]) 1139 | } 1140 | function registerTypedArray(TypedArray, tag) { 1141 | let dvMethod = 'get' + TypedArray.name.slice(0, -5) 1142 | let bytesPerElement; 1143 | if (typeof TypedArray === 'function') 1144 | bytesPerElement = TypedArray.BYTES_PER_ELEMENT; 1145 | else 1146 | TypedArray = null; 1147 | for (let littleEndian = 0; littleEndian < 2; littleEndian++) { 1148 | if (!littleEndian && bytesPerElement == 1) 1149 | continue 1150 | let sizeShift = bytesPerElement == 2 ? 1 : bytesPerElement == 4 ? 2 : bytesPerElement == 8 ? 3 : 0 1151 | currentExtensions[littleEndian ? tag : (tag - 4)] = (bytesPerElement == 1 || littleEndian == isLittleEndianMachine) ? (buffer) => { 1152 | if (!TypedArray) 1153 | throw new Error('Could not find typed array for code ' + tag) 1154 | if (!currentDecoder.copyBuffers) { 1155 | // try provide a direct view, but will only work if we are byte-aligned 1156 | if (bytesPerElement === 1 || 1157 | bytesPerElement === 2 && !(buffer.byteOffset & 1) || 1158 | bytesPerElement === 4 && !(buffer.byteOffset & 3) || 1159 | bytesPerElement === 8 && !(buffer.byteOffset & 7)) 1160 | return new TypedArray(buffer.buffer, buffer.byteOffset, buffer.byteLength >> sizeShift); 1161 | } 1162 | // we have to slice/copy here to get a new ArrayBuffer, if we are not word/byte aligned 1163 | return new TypedArray(Uint8Array.prototype.slice.call(buffer, 0).buffer) 1164 | } : buffer => { 1165 | if (!TypedArray) 1166 | throw new Error('Could not find typed array for code ' + tag) 1167 | let dv = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength) 1168 | let elements = buffer.length >> sizeShift 1169 | let ta = new TypedArray(elements) 1170 | let method = dv[dvMethod] 1171 | for (let i = 0; i < elements; i++) { 1172 | ta[i] = method.call(dv, i << sizeShift, littleEndian) 1173 | } 1174 | return ta 1175 | } 1176 | } 1177 | } 1178 | 1179 | function readBundleExt() { 1180 | let length = readJustLength() 1181 | let bundlePosition = position + read() 1182 | for (let i = 2; i < length; i++) { 1183 | // skip past bundles that were already read 1184 | let bundleLength = readJustLength() // this will increment position, so must add to position afterwards 1185 | position += bundleLength 1186 | } 1187 | let dataPosition = position 1188 | position = bundlePosition 1189 | bundledStrings = [readStringJS(readJustLength()), readStringJS(readJustLength())] 1190 | bundledStrings.position0 = 0 1191 | bundledStrings.position1 = 0 1192 | bundledStrings.postBundlePosition = position 1193 | position = dataPosition 1194 | return read() 1195 | } 1196 | 1197 | function readJustLength() { 1198 | let token = src[position++] & 0x1f 1199 | if (token > 0x17) { 1200 | switch (token) { 1201 | case 0x18: 1202 | token = src[position++] 1203 | break 1204 | case 0x19: 1205 | token = dataView.getUint16(position) 1206 | position += 2 1207 | break 1208 | case 0x1a: 1209 | token = dataView.getUint32(position) 1210 | position += 4 1211 | break 1212 | } 1213 | } 1214 | return token 1215 | } 1216 | 1217 | function loadShared() { 1218 | if (currentDecoder.getShared) { 1219 | let sharedData = saveState(() => { 1220 | // save the state in case getShared modifies our buffer 1221 | src = null 1222 | return currentDecoder.getShared() 1223 | }) || {} 1224 | let updatedStructures = sharedData.structures || [] 1225 | currentDecoder.sharedVersion = sharedData.version 1226 | packedValues = currentDecoder.sharedValues = sharedData.packedValues 1227 | if (currentStructures === true) 1228 | currentDecoder.structures = currentStructures = updatedStructures 1229 | else 1230 | currentStructures.splice.apply(currentStructures, [0, updatedStructures.length].concat(updatedStructures)) 1231 | } 1232 | } 1233 | 1234 | function saveState(callback) { 1235 | let savedSrcEnd = srcEnd 1236 | let savedPosition = position 1237 | let savedStringPosition = stringPosition 1238 | let savedSrcStringStart = srcStringStart 1239 | let savedSrcStringEnd = srcStringEnd 1240 | let savedSrcString = srcString 1241 | let savedStrings = strings 1242 | let savedReferenceMap = referenceMap 1243 | let savedBundledStrings = bundledStrings 1244 | 1245 | // TODO: We may need to revisit this if we do more external calls to user code (since it could be slow) 1246 | let savedSrc = new Uint8Array(src.slice(0, srcEnd)) // we copy the data in case it changes while external data is processed 1247 | let savedStructures = currentStructures 1248 | let savedDecoder = currentDecoder 1249 | let savedSequentialMode = sequentialMode 1250 | let value = callback() 1251 | srcEnd = savedSrcEnd 1252 | position = savedPosition 1253 | stringPosition = savedStringPosition 1254 | srcStringStart = savedSrcStringStart 1255 | srcStringEnd = savedSrcStringEnd 1256 | srcString = savedSrcString 1257 | strings = savedStrings 1258 | referenceMap = savedReferenceMap 1259 | bundledStrings = savedBundledStrings 1260 | src = savedSrc 1261 | sequentialMode = savedSequentialMode 1262 | currentStructures = savedStructures 1263 | currentDecoder = savedDecoder 1264 | dataView = new DataView(src.buffer, src.byteOffset, src.byteLength) 1265 | return value 1266 | } 1267 | export function clearSource() { 1268 | src = null 1269 | referenceMap = null 1270 | currentStructures = null 1271 | } 1272 | 1273 | export function addExtension(extension) { 1274 | currentExtensions[extension.tag] = extension.decode 1275 | } 1276 | 1277 | export function setSizeLimits(limits) { 1278 | if (limits.maxMapSize) maxMapSize = limits.maxMapSize; 1279 | if (limits.maxArraySize) maxArraySize = limits.maxArraySize; 1280 | if (limits.maxObjectSize) maxObjectSize = limits.maxObjectSize; 1281 | } 1282 | 1283 | export const mult10 = new Array(147) // this is a table matching binary exponents to the multiplier to determine significant digit rounding 1284 | for (let i = 0; i < 256; i++) { 1285 | mult10[i] = +('1e' + Math.floor(45.15 - i * 0.30103)) 1286 | } 1287 | let defaultDecoder = new Decoder({ useRecords: false }) 1288 | export const decode = defaultDecoder.decode 1289 | export const decodeMultiple = defaultDecoder.decodeMultiple 1290 | export const FLOAT32_OPTIONS = { 1291 | NEVER: 0, 1292 | ALWAYS: 1, 1293 | DECIMAL_ROUND: 3, 1294 | DECIMAL_FIT: 4 1295 | } 1296 | export function roundFloat32(float32Number) { 1297 | f32Array[0] = float32Number 1298 | let multiplier = mult10[((u8Array[3] & 0x7f) << 1) | (u8Array[2] >> 7)] 1299 | return ((multiplier * float32Number + (float32Number > 0 ? 0.5 : -0.5)) >> 0) / multiplier 1300 | } 1301 | --------------------------------------------------------------------------------