├── .npmignore ├── assets ├── performance.png └── test-keymap.txt ├── encode.d.ts ├── tests ├── floats.json ├── example5.json ├── index.html ├── example3.json ├── example2.json ├── test-compatibility.cjs ├── example.json ├── test-incomplete.js ├── test-node-iterators.js ├── test-node-stream.js ├── test-keymap.js ├── strings2.json ├── example4.json ├── benchmark.cjs ├── benchmark-stream.cjs ├── sample-large.json ├── example-twitter.json └── test.js ├── decode.d.ts ├── SECURITY.md ├── index.js ├── browser.js ├── webpack.config.js ├── LICENSE ├── node-index.js ├── .gitignore ├── stream.js ├── rollup.config.js ├── package.json ├── index.d.ts ├── iterators.js ├── benchmark.md ├── README.md └── decode.js /.npmignore: -------------------------------------------------------------------------------- 1 | # Dependency directories 2 | node_modules/ 3 | tests/samples 4 | .vs 5 | build/ -------------------------------------------------------------------------------- /assets/performance.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kriszyp/cbor-x/master/assets/performance.png -------------------------------------------------------------------------------- /encode.d.ts: -------------------------------------------------------------------------------- 1 | export { encode, encodeAsIterable, encodeAsAsyncIterable, Encoder, addExtension, FLOAT32_OPTIONS } from '.' 2 | -------------------------------------------------------------------------------- /tests/floats.json: -------------------------------------------------------------------------------- 1 | [0.53232,542.5325,3252200000,6643.2,0.000000432,1.992e20,5.1,9.3242e-20,525.235,8899.32,522.42,2342.43,12211.1,8888.3,0.000432] -------------------------------------------------------------------------------- /decode.d.ts: -------------------------------------------------------------------------------- 1 | export { decode, decodeMultiple, Decoder, addExtension, clearSource,roundFloat32, isNativeAccelerationEnabled, 2 | Extension, Options, FLOAT32_OPTIONS, setMaxLimits, MAX_LIMITS_OPTIONS } from '.' 3 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Supported Versions 4 | 5 | | Version | Supported | 6 | | ------- | ------------------ | 7 | | 0.9.x | :white_check_mark: | 8 | 9 | ## Reporting a Vulnerability 10 | 11 | Please report security vulnerabilities to kriszyp@gmail.com. 12 | -------------------------------------------------------------------------------- /tests/example5.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "test", 3 | "greeting": "Hello, World!", 4 | "flag": true, 5 | "littleNum": 3, 6 | "biggerNum": 32254435, 7 | "decimal":1.332232, 8 | "bigDecimal": 3.5522E35, 9 | "negative": -54, 10 | "aNull": null, 11 | "more": "another string" 12 | } 13 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | export { Encoder, addExtension, encode, encodeAsIterable, encodeAsAsyncIterable, NEVER, ALWAYS, DECIMAL_ROUND, DECIMAL_FIT, REUSE_BUFFER_MODE } from './encode.js' 2 | export { Tag, Decoder, decodeMultiple, decode, FLOAT32_OPTIONS, clearSource, roundFloat32, isNativeAccelerationEnabled, setSizeLimits } from './decode.js' 3 | export { decodeIter, encodeIter } from './iterators.js' 4 | -------------------------------------------------------------------------------- /browser.js: -------------------------------------------------------------------------------- 1 | exports.Encoder = require('./encode').Encoder 2 | exports.Decoder = require('./decode').Decoder 3 | exports.addExtension = require('./encode').addExtension 4 | let encoder = new exports.Encoder({ useRecords: false }) 5 | exports.decode = encoder.decode 6 | exports.encode = encoder.encode 7 | Object.assign(exports, { 8 | ALWAYS:1, 9 | DECIMAL_ROUND: 3, 10 | DECIMAL_FIT: 4 11 | }) 12 | -------------------------------------------------------------------------------- /webpack.config.js: -------------------------------------------------------------------------------- 1 | var webpack = require('webpack') 2 | var path = require('path') 3 | module.exports = { 4 | entry: { 5 | index: './browser.js' 6 | }, 7 | output: { 8 | path: path.join(__dirname, 'dist'), 9 | library: 'CBOR', 10 | libraryTarget: 'umd' 11 | }, 12 | node: { Buffer: false }, 13 | devtool: 'source-map', 14 | optimization: { 15 | minimize: true 16 | }, 17 | //mode: 'development' 18 | mode: 'production' 19 | }; 20 | -------------------------------------------------------------------------------- /tests/index.html: -------------------------------------------------------------------------------- 1 | 2 |
3 | 14 | 15 | 16 | 17 | 18 | 21 | 22 | 23 | 24 | 27 | 28 | 29 | -------------------------------------------------------------------------------- /tests/example3.json: -------------------------------------------------------------------------------- 1 | { 2 | "glossary": { 3 | "title": "example glossary", 4 | "GlossDiv": { 5 | "title": "S", 6 | "GlossList": { 7 | "GlossEntry": { 8 | "ID": "SGML", 9 | "SortAs": "SGML", 10 | "GlossTerm": "Standard Generalized Markup Language", 11 | "Acronym": "SGML", 12 | "Abbrev": "ISO 8879:1986", 13 | "GlossDef": { 14 | "para": "A meta-markup language, used to create markup languages such as DocBook.", 15 | "GlossSeeAlso": ["GML", "XML"] 16 | }, 17 | "GlossSee": "markup" 18 | } 19 | } 20 | } 21 | } 22 | } -------------------------------------------------------------------------------- /tests/example2.json: -------------------------------------------------------------------------------- 1 | {"widget": { 2 | "debug": "on", 3 | "window": { 4 | "title": "Sample Konfabulator Widget", 5 | "name": "main_window", 6 | "width": 500, 7 | "height": 500 8 | }, 9 | "image": { 10 | "src": "Images/Sun.png", 11 | "name": "sun1", 12 | "hOffset": 250, 13 | "vOffset": 250, 14 | "alignment": "center" 15 | }, 16 | "text": { 17 | "data": "Click Here", 18 | "size": 36, 19 | "style": "bold", 20 | "name": "text1", 21 | "hOffset": 250, 22 | "vOffset": 100, 23 | "alignment": "center", 24 | "onMouseUp": "sun1.opacity = (sun1.opacity / 100) * 90;" 25 | } 26 | }} -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Kris Zyp 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /node-index.js: -------------------------------------------------------------------------------- 1 | export { Encoder, addExtension, encode, encodeAsIterable, encodeAsAsyncIterable, NEVER, ALWAYS, DECIMAL_ROUND, DECIMAL_FIT, REUSE_BUFFER_MODE } from './encode.js' 2 | export { Tag, Decoder, decodeMultiple, decode, FLOAT32_OPTIONS, clearSource, roundFloat32, isNativeAccelerationEnabled, setSizeLimits } from './decode.js' 3 | export { EncoderStream, DecoderStream } from './stream.js' 4 | export { decodeIter, encodeIter } from './iterators.js' 5 | export const useRecords = false 6 | export const mapsAsObjects = true 7 | import { setExtractor } from './decode.js' 8 | import { createRequire } from 'module' 9 | 10 | const nativeAccelerationDisabled = process.env.CBOR_NATIVE_ACCELERATION_DISABLED !== undefined && process.env.CBOR_NATIVE_ACCELERATION_DISABLED.toLowerCase() === 'true'; 11 | 12 | if (!nativeAccelerationDisabled) { 13 | let extractor 14 | try { 15 | if (typeof require == 'function') 16 | extractor = require('cbor-extract') 17 | else 18 | extractor = createRequire(import.meta.url)('cbor-extract') 19 | if (extractor) 20 | setExtractor(extractor.extractStrings) 21 | } catch (error) { 22 | // native module is optional 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /tests/test-compatibility.cjs: -------------------------------------------------------------------------------- 1 | const data = require('./example4.json'); 2 | const cborX = require('..'); 3 | const chai = require('chai'); 4 | 5 | function tryRequire(module) { 6 | try { 7 | return require(module) 8 | } catch(error) { 9 | console.log(error) 10 | } 11 | } 12 | //if (typeof chai === 'undefined') { chai = require('chai') } 13 | const assert = chai.assert 14 | var cbor_module = tryRequire('cbor'); 15 | var decode = cborX.decode 16 | var encode = cborX.encode 17 | 18 | const addCompatibilitySuite = (data) => () => { 19 | if (cbor_module) { 20 | test('from cbor', function(){ 21 | var serialized = cbor_module.encode(data) 22 | var deserialized = decode(serialized) 23 | assert.deepEqual(deserialized, data) 24 | }) 25 | 26 | test('to cbor', function(){ 27 | var serialized = encode(data) 28 | var deserialized = cbor_module.decodeFirstSync(serialized) 29 | assert.deepEqual(deserialized, data) 30 | }) 31 | } 32 | } 33 | 34 | suite('cbor-x compatibility tests (example)', addCompatibilitySuite(require('./example.json'))) 35 | suite('cbor-x compatibility tests (example4)', addCompatibilitySuite(require('./example4.json'))) 36 | suite('cbor-x compatibility tests (example5)', addCompatibilitySuite(require('./example5.json'))) 37 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | dist 8 | 9 | # Runtime data 10 | pids 11 | *.pid 12 | *.seed 13 | *.pid.lock 14 | 15 | # Directory for instrumented libs generated by jscoverage/JSCover 16 | lib-cov 17 | 18 | # Coverage directory used by tools like istanbul 19 | coverage 20 | 21 | # nyc test coverage 22 | .nyc_output 23 | 24 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 25 | .grunt 26 | 27 | # Bower dependency directory (https://bower.io/) 28 | bower_components 29 | 30 | # node-waf configuration 31 | .lock-wscript 32 | 33 | # Compiled binary addons (http://nodejs.org/api/addons.html) 34 | build/Release 35 | 36 | # Dependency directories 37 | node_modules/ 38 | jspm_packages/ 39 | 40 | package-lock.json 41 | yarn.lock 42 | # Typescript v1 declaration files 43 | typings/ 44 | 45 | # Optional npm cache directory 46 | .npm 47 | 48 | # Optional eslint cache 49 | .eslintcache 50 | 51 | # Optional REPL history 52 | .node_repl_history 53 | 54 | # Output of 'npm pack' 55 | *.tgz 56 | 57 | # Yarn Integrity file 58 | .yarn-integrity 59 | 60 | # dotenv environment variables file 61 | .env 62 | tests/samples 63 | 64 | # Visual Studio Code directory 65 | .vscode 66 | .vs 67 | .idea 68 | 69 | build 70 | dist/test.js -------------------------------------------------------------------------------- /tests/example.json: -------------------------------------------------------------------------------- 1 | { 2 | "int0": 0, 3 | "int1": 1, 4 | "int1-": -1, 5 | "int8": 255, 6 | "int8-": -255, 7 | "int16": 256, 8 | "int16-": -256, 9 | "int32": 65536, 10 | "int32-": -65536, 11 | "nil": null, 12 | "true": true, 13 | "false": false, 14 | "float": 0.5, 15 | "float-": -0.5, 16 | "string0": "", 17 | "string1": "A", 18 | "string4": "foobarbaz", 19 | "string8": "Omnes viae Romam ducunt.", 20 | "string16": "L’homme n’est qu’un roseau, le plus faible de la nature ; mais c’est un roseau pensant. Il ne faut pas que l’univers entier s’arme pour l’écraser : une vapeur, une goutte d’eau, suffit pour le tuer. Mais, quand l’univers l’écraserait, l’homme serait encore plus noble que ce qui le tue, puisqu’il sait qu’il meurt, et l’avantage que l’univers a sur lui, l’univers n’en sait rien. Toute notre dignité consiste donc en la pensée. C’est de là qu’il faut nous relever et non de l’espace et de la durée, que nous ne saurions remplir. Travaillons donc à bien penser : voilà le principe de la morale.", 21 | "array0": [], 22 | "array1": [ 23 | "foo" 24 | ], 25 | "array8": [ 26 | 1, 27 | 2, 28 | 4, 29 | 8, 30 | 16, 31 | 32, 32 | 64, 33 | 128, 34 | 256, 35 | 512, 36 | 1024, 37 | 2048, 38 | 4096, 39 | 8192, 40 | 16384, 41 | 32768, 42 | 65536, 43 | 131072, 44 | 262144, 45 | 524288, 46 | 1048576 47 | ], 48 | "map0": {}, 49 | "map1": { 50 | "foo": "bar" 51 | } 52 | } -------------------------------------------------------------------------------- /tests/test-incomplete.js: -------------------------------------------------------------------------------- 1 | import { encode } from '../index.js' 2 | import { assert } from 'chai' 3 | import { Encoder } from '../encode.js' 4 | 5 | const tests = { 6 | string: 'interesting string', 7 | number: 12345, 8 | buffer: Buffer.from('hello world'), 9 | bigint: 12345678910n, 10 | array: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 11 | 'many-strings': [], 12 | set: new Set('abcdefghijklmnopqrstuvwxyz'.split('')), 13 | object: { a: 1, b: 2, c: 3, d: 4, e: 5, f: 6 } 14 | } 15 | for (let i = 0; i < 100; i++) { 16 | tests['many-strings'].push('test-data-' + i) 17 | } 18 | 19 | suite('encode and decode tests with partial values', function () { 20 | const encoder = new Encoder({ objectMode: true, structures: [] }) 21 | 22 | for (const [label, testData] of Object.entries(tests)) { 23 | test(label, () => { 24 | const encoded = encoder.encode(testData) 25 | assert.isTrue(Buffer.isBuffer(encoded), 'encode returns a Buffer') 26 | assert.deepStrictEqual(encoder.decode(encoded, encoded.length, true), testData, 'full buffer decodes well') 27 | const firstHalf = encoded.slice(0, Math.floor(encoded.length / 2)) 28 | let value 29 | try { 30 | value = encoder.decode(firstHalf, firstHalf.length, true) 31 | } catch (err) { 32 | if (err.incomplete !== true) { 33 | assert.fail(`Should throw an error with .incomplete set to true, instead threw error <${err}>`) 34 | } else { 35 | return; // victory! correct outcome! 36 | } 37 | } 38 | assert.fail(`Should throw an error with .incomplete set to true, instead returned value ${JSON.stringify(value)}`) 39 | }) 40 | } 41 | }) 42 | -------------------------------------------------------------------------------- /assets/test-keymap.txt: -------------------------------------------------------------------------------- 1 | Basic No Recs: Small 2 | Buffer: 100% (92) 3 | Encode: 100% (0.01) 4 | Decode: 100% (0.01) 5 | PreMap No Recs: Small 6 | Buffer: 90% (83) 7 | Encode: 110% (0.011) 8 | Decode: 100% (0.01) 9 | KeyMap No Recs: Small 10 | Buffer: 84% (77) 11 | Encode: 90% (0.009) 12 | Decode: 70% (0.007) 13 | Optima No Recs: Small 14 | Buffer: 90% (83) 15 | Encode: 100% (0.01) 16 | Decode: 90% (0.009) 17 | Basic Wi Recs: Small 18 | Buffer: 98% (90) 19 | Encode: 100% (0.01) 20 | Decode: 90% (0.009) 21 | PreMap Wi Recs: Small 22 | Buffer: 71% (65) 23 | Encode: 110% (0.011) 24 | Decode: 80% (0.008) 25 | KeyMap Wi Recs: Small 26 | Buffer: 90% (83) 27 | Encode: 120% (0.012) 28 | Decode: 70% (0.007) 29 | Optima Wi Recs: Small 30 | Buffer: 71% (65) 31 | Encode: 110% (0.011) 32 | Decode: 60% (0.006) 33 | Basic No Recs: Large 34 | Buffer: 100% (24737) 35 | Encode: 100% (0.157) 36 | Decode: 100% (0.614) 37 | PreMap No Recs: Large 38 | Buffer: 88% (21737) 39 | Encode: 145% (0.227) 40 | Decode: 116% (0.714) 41 | KeyMap No Recs: Large 42 | Buffer: 84% (20737) 43 | Encode: 143% (0.224) 44 | Decode: 95% (0.586) 45 | Optima No Recs: Large 46 | Buffer: 88% (21737) 47 | Encode: 175% (0.274) 48 | Decode: 123% (0.753) 49 | Basic Wi Recs: Large 50 | Buffer: 76% (18748) 51 | Encode: 112% (0.176) 52 | Decode: 82% (0.502) 53 | PreMap Wi Recs: Large 54 | Buffer: 76% (18737) 55 | Encode: 136% (0.214) 56 | Decode: 115% (0.709) 57 | KeyMap Wi Recs: Large 58 | Buffer: 76% (18744) 59 | Encode: 166% (0.26) 60 | Decode: 81% (0.5) 61 | Optima Wi Recs: Large 62 | Buffer: 76% (18737) 63 | Encode: 185% (0.291) 64 | Decode: 118% (0.725) 65 | -------------------------------------------------------------------------------- /stream.js: -------------------------------------------------------------------------------- 1 | import { Transform } from 'stream' 2 | import { Encoder } from './encode.js' 3 | import { checkedRead, getPosition, Decoder, clearSource } from './decode.js' 4 | var DEFAULT_OPTIONS = {objectMode: true} 5 | 6 | export class EncoderStream extends Transform { 7 | constructor(options) { 8 | if (!options) 9 | options = {} 10 | options.writableObjectMode = true 11 | super(options) 12 | options.sequential = true 13 | this.encoder = options.encoder || new Encoder(options) 14 | } 15 | async _transform(value, encoding, callback) { 16 | try { 17 | for await (let chunk of this.encoder.encodeAsAsyncIterable(value)) { 18 | this.push(chunk) 19 | } 20 | callback() 21 | } catch(error) { callback (error) } 22 | } 23 | } 24 | 25 | export class DecoderStream extends Transform { 26 | constructor(options) { 27 | if (!options) 28 | options = {} 29 | options.objectMode = true 30 | super(options) 31 | options.structures = [] 32 | this.decoder = options.decoder || new Decoder(options) 33 | } 34 | _transform(chunk, encoding, callback) { 35 | if (this.incompleteBuffer) { 36 | chunk = Buffer.concat([this.incompleteBuffer, chunk]) 37 | this.incompleteBuffer = null 38 | } 39 | let values 40 | try { 41 | values = this.decoder.decodeMultiple(chunk) 42 | } catch(error) { 43 | if (error.incomplete) { 44 | this.incompleteBuffer = chunk.slice(error.lastPosition) 45 | values = error.values 46 | } else { 47 | return callback(error) 48 | } 49 | } finally { 50 | for (let value of values || []) { 51 | if (value === null) 52 | value = this.getNullValue() 53 | this.push(value) 54 | } 55 | } 56 | callback() 57 | } 58 | getNullValue() { 59 | return Symbol.for(null) 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /rollup.config.js: -------------------------------------------------------------------------------- 1 | import terser from '@rollup/plugin-terser'; 2 | import json from "@rollup/plugin-json"; 3 | import replace from "@rollup/plugin-replace"; 4 | 5 | export default [ 6 | { 7 | input: "node-index.js", 8 | output: [ 9 | { 10 | file: "dist/node.cjs", 11 | format: "cjs", 12 | sourcemap: true 13 | } 14 | ] 15 | }, 16 | { 17 | input: "index.js", 18 | output: { 19 | file: "dist/index.js", 20 | format: "umd", 21 | name: "CBOR", 22 | sourcemap: true 23 | } 24 | }, 25 | { 26 | input: "index.js", 27 | plugins: [ 28 | replace({ Function: 'BlockedFunction '}) 29 | ], 30 | output: { 31 | file: "dist/index-no-eval.cjs", 32 | format: "umd", 33 | name: "CBOR", 34 | sourcemap: true 35 | }, 36 | }, 37 | { 38 | input: "decode.js", 39 | plugins: [ 40 | replace({ Function: 'BlockedFunction '}) 41 | ], 42 | output: { 43 | file: "dist/decode-no-eval.cjs", 44 | format: "umd", 45 | name: "CBOR", 46 | sourcemap: true 47 | }, 48 | }, 49 | { 50 | input: "index.js", 51 | plugins: [ 52 | terser({}) 53 | ], 54 | output: { 55 | file: "dist/index.min.js", 56 | format: "umd", 57 | name: "CBOR", 58 | sourcemap: true 59 | } 60 | }, 61 | { 62 | input: "index.js", 63 | plugins: [ 64 | replace({ Function: 'BlockedFunction '}), 65 | terser({}) 66 | ], 67 | output: { 68 | file: "dist/index-no-eval.min.js", 69 | format: "umd", 70 | name: "CBOR", 71 | sourcemap: true 72 | } 73 | }, 74 | { 75 | input: "tests/test.js", 76 | plugins: [json()], 77 | external: ['chai', '../index.js'], 78 | output: { 79 | file: "dist/test.js", 80 | format: "iife", 81 | sourcemap: true, 82 | globals: { 83 | chai: 'chai', 84 | './index.js': 'CBOR', 85 | }, 86 | } 87 | } 88 | ]; 89 | -------------------------------------------------------------------------------- /tests/test-node-iterators.js: -------------------------------------------------------------------------------- 1 | import { encodeIter, decodeIter } from '../index.js' 2 | import { decode } from '../index.js' 3 | import { assert } from 'chai' 4 | 5 | const tests = [ 6 | null, 7 | false, 8 | true, 9 | 'interesting string', 10 | 12345, 11 | 123456789n, 12 | 123.456, 13 | Buffer.from('Hello World'), 14 | new Set('abcdefghijklmnopqrstuvwxyz'.split('')) 15 | ] 16 | 17 | suite('cbor-x iterators interface tests', function () { 18 | test('sync encode iterator', () => { 19 | const encodings = [...encodeIter(tests)] 20 | const decodings = encodings.map(x => decode(x)) 21 | assert.deepStrictEqual(decodings, tests) 22 | }) 23 | 24 | test('async encode iterator', async () => { 25 | async function * generate () { 26 | for (const test of tests) { 27 | await new Promise((resolve, reject) => setImmediate(resolve)) 28 | yield test 29 | } 30 | } 31 | 32 | const chunks = [] 33 | for await (const chunk of encodeIter(generate())) { 34 | chunks.push(chunk) 35 | } 36 | 37 | const decodings = chunks.map(x => decode(x)) 38 | assert.deepStrictEqual(decodings, tests) 39 | }) 40 | 41 | test('sync encode and decode iterator', () => { 42 | const encodings = [...encodeIter(tests)] 43 | assert.isTrue(encodings.every(v => Buffer.isBuffer(v))) 44 | const decodings = [...decodeIter(encodings)] 45 | assert.deepStrictEqual(decodings, tests) 46 | 47 | // also test decodings work with buffers multiple values in a buffer 48 | const concatEncoding = Buffer.concat([...encodings]) 49 | const decodings2 = [...decodeIter([concatEncoding])] 50 | assert.deepStrictEqual(decodings2, tests) 51 | 52 | // also test decodings work with partial buffers that don't align to values perfectly 53 | const half1 = concatEncoding.slice(0, Math.floor(concatEncoding.length / 2)) 54 | const half2 = concatEncoding.slice(Math.floor(concatEncoding.length / 2)) 55 | const decodings3 = [...decodeIter([half1, half2])] 56 | assert.deepStrictEqual(decodings3, tests) 57 | }) 58 | 59 | test('async encode and decode iterator', async () => { 60 | async function * generator () { 61 | for (const obj of tests) { 62 | await new Promise((resolve, reject) => setImmediate(resolve)) 63 | yield obj 64 | } 65 | } 66 | const yields = [] 67 | for await (const value of decodeIter(encodeIter(generator()))) { 68 | yields.push(value) 69 | } 70 | assert.deepStrictEqual(yields, tests) 71 | }) 72 | }) -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cbor-x", 3 | "author": "Kris Zyp", 4 | "version": "1.6.2", 5 | "description": "Ultra-fast and conformant CBOR (RFC 8949) implementation with support for numerous tag extensions including records and structured cloning", 6 | "license": "MIT", 7 | "types": "./index.d.ts", 8 | "main": "./dist/node.cjs", 9 | "module": "./index.js", 10 | "keywords": [ 11 | "CBOR", 12 | "performance", 13 | "structured", 14 | "clone" 15 | ], 16 | "repository": { 17 | "type": "git", 18 | "url": "http://github.com/kriszyp/cbor-x" 19 | }, 20 | "scripts": { 21 | "benchmark": "node ./tests/benchmark.cjs", 22 | "build": "rollup -c", 23 | "dry-run": "npm publish --dry-run", 24 | "prepare": "npm run build", 25 | "test": "mocha tests/test**.*js -u tdd --experimental-json-modules" 26 | }, 27 | "type": "module", 28 | "exports": { 29 | ".": { 30 | "node": { 31 | "require": "./dist/node.cjs", 32 | "import": "./node-index.js" 33 | }, 34 | "types": { 35 | "require": "./index.d.cts", 36 | "import": "./index.d.ts" 37 | }, 38 | "default": "./index.js" 39 | }, 40 | "./encode": { 41 | "node": { 42 | "import": "./index.js", 43 | "require": "./dist/node.cjs" 44 | }, 45 | "default": { 46 | "import": "./encode.js" 47 | } 48 | }, 49 | "./decode": { 50 | "node": { 51 | "import": "./index.js", 52 | "require": "./dist/node.cjs" 53 | }, 54 | "default": { 55 | "import": "./decode.js" 56 | } 57 | }, 58 | "./decode-no-eval": { 59 | "types": "./decode.d.ts", 60 | "default": "./dist/decode-no-eval.cjs" 61 | }, 62 | "./index-no-eval": { 63 | "types": "./index.d.ts", 64 | "default": "./dist/index-no-eval.cjs" 65 | }, 66 | "./package.json": "./package.json" 67 | }, 68 | "files": [ 69 | "/dist", 70 | "*.md", 71 | "/*.js", 72 | "/*.ts" 73 | ], 74 | "browser": { 75 | "node:buffer": false 76 | }, 77 | "optionalDependencies": { 78 | "cbor-extract": "^2.2.0" 79 | }, 80 | "devDependencies": { 81 | "@rollup/plugin-json": "^5.0.1", 82 | "@rollup/plugin-replace": "^5.0.1", 83 | "@rollup/plugin-terser": "^0.1.0", 84 | "@types/node": "latest", 85 | "async": "^3", 86 | "cbor": "^5", 87 | "cbor-sync": "^1.0.4", 88 | "chai": "^4.3.4", 89 | "cpy-cli": "^4.1.0", 90 | "esm": "^3.2.25", 91 | "mocha": "^10.1.0", 92 | "rollup": "^3.2.5" 93 | } 94 | } 95 | -------------------------------------------------------------------------------- /index.d.ts: -------------------------------------------------------------------------------- 1 | export enum FLOAT32_OPTIONS { 2 | NEVER = 0, 3 | ALWAYS = 1, 4 | DECIMAL_ROUND = 3, 5 | DECIMAL_FIT = 4 6 | } 7 | export interface SizeLimitOptions { 8 | maxArraySize: number; 9 | maxMapSize: number; 10 | maxObjectSize: number; 11 | } 12 | export interface Options { 13 | alwaysUseFloat?: boolean 14 | useFloat32?: FLOAT32_OPTIONS 15 | useRecords?: boolean 16 | structures?: {}[] 17 | structuredClone?: boolean 18 | mapsAsObjects?: boolean 19 | variableMapSize?: boolean 20 | copyBuffers?: boolean 21 | bundleStrings?: boolean 22 | useTimestamp32?: boolean 23 | largeBigIntToFloat?: boolean 24 | encodeUndefinedAsNil?: boolean 25 | maxSharedStructures?: number 26 | maxOwnStructures?: number 27 | useSelfDescribedHeader?: boolean 28 | useToJSON?: boolean 29 | keyMap?: {} 30 | shouldShareStructure?: (keys: string[]) => boolean 31 | getStructures?(): {}[] 32 | saveStructures?(structures: {}[]): boolean | void 33 | onInvalidDate?: () => any 34 | tagUint8Array?: boolean 35 | pack?: boolean 36 | sequential?: boolean 37 | } 38 | type ClassOf
11 |
12 | The cbor-x package is an extremely fast and conformant CBOR NodeJS/JavaScript implementation. Currently, it is over 3-10x faster than any other CBOR JS implementation (including cbor-js and cborg) and faster than most MessagePack encoders, Avro, and generally faster than native V8 JSON.stringify/parse, on NodeJS. It implements the CBOR format as specificed in [RFC-8949](https://www.rfc-editor.org/rfc/rfc8949.html), [RFC-8746](https://tools.ietf.org/html/rfc8746), [RFC-8742](https://datatracker.ietf.org/doc/html/rfc8742), [Packed CBOR](https://datatracker.ietf.org/doc/html/draft-ietf-cbor-packed), numerous [registered IANA tag extensions](https://www.iana.org/assignments/cbor-tags/cbor-tags.xhtml) (the `x` in cbor-x), and proposed optional [record extension](https://github.com/kriszyp/cbor-records), for defining record structures that makes CBOR even faster and more compact, often over twice as fast as even native JSON functions, and 15-50% more compact. See the performance section for more details. Structured cloning (with support for cyclical references) is supported through these tag extensions.
13 |
14 | ## Basic Usage
15 | Install on NodeJS with:
16 |
17 | ```
18 | npm i cbor-x
19 | ```
20 | And `import` or `require` it for basic standard serialization/encoding (`encode`) and deserialization/decoding (`decode`) functions:
21 | ```JavaScript
22 | import { decode, encode } from 'cbor-x';
23 | let serializedAsBuffer = encode(value);
24 | let data = decode(serializedAsBuffer);
25 | ```
26 | This `encode` function will generate standard CBOR without any extensions that should be compatible with any standard CBOR parser/decoder. It will serialize JavaScript objects as CBOR `map`s by default. The `decode` function will deserialize CBOR `map`s as an `Object` with the properties from the map. The cbor-x package runs on any modern JS platform, but does have additional optimizations for NodeJS usage (and will use a node addon for performance boost as an optional dependency).
27 |
28 | ## Deno Usage
29 | Cbor-x modules are standard ESM modules and can be loaded directly from the [deno.land registry for cbor](https://deno.land/x/cbor) for use in Deno. The standard encode and decode functionality is available on Deno, like other platforms.
30 |
31 | ### Streams
32 | We can use the including streaming functionality (which further improves performance). The `EncoderStream` is a NodeJS transform stream that can be used to serialize objects to a binary stream (writing to network/socket, IPC, etc.), and the `DecoderStream` can be used to deserialize objects from a binary stream (reading from network/socket, etc.):
33 |
34 | ```JavaScript
35 | import { EncoderStream } from 'cbor-x';
36 | let stream = new EncoderStream();
37 | stream.write(myData);
38 |
39 | ```
40 | Or for a full example of sending and receiving data on a stream:
41 | ```JavaScript
42 | import { EncoderStream } from 'cbor-x';
43 | let sendingStream = new EncoderStream();
44 | let receivingStream = new DecoderStream();
45 | // we are just piping to our own stream, but normally you would send and
46 | // receive over some type of inter-process or network connection.
47 | sendingStream.pipe(receivingStream);
48 | sendingStream.write(myData);
49 | receivingStream.on('data', (data) => {
50 | // received data
51 | });
52 | ```
53 | The `EncoderStream` and `DecoderStream` instances will have also the record structure extension enabled by default (see below).
54 |
55 | ### Iterables
56 | In addition to using CBOR with streams, CBOR can also encode to an iterable that can be iterated as a sequence of binary chunks with `encodeAsIterable`, which facilitates progressive encoding:
57 | ```JavaScript
58 | import { encodeAsIterable } from 'cbor-x';
59 |
60 | for (let binaryChunk of encodeAsIterable(data)){
61 | // progressively get binary chunks as data is encoded
62 | }
63 | ```
64 | And `encodeAsAsyncIterable` is also available, which returns an async iterable, and can be used to encode data from async iterables as well as Blob data.
65 |
66 | ```JavaScript
67 | import { encodeAsAsyncIterable } from 'cbor-x';
68 |
69 | let data = { blob: new Blob(...) };
70 | for await (let binaryChunk of encodeAsAsyncIterable(data)){
71 | // progressively get binary chunks as asynchronous data source is encoded
72 | }
73 | ```
74 |
75 | ## Deno Usage
76 | Cbor-x modules are standard ESM modules and can be loaded directly from the [deno.land registry for cbor](https://deno.land/x/cbor) for use in Deno. The standard pack/encode and unpack/decode functionality is available on Deno, like other platforms.
77 |
78 | ## Browser Usage
79 | Cbor-x works as standalone JavaScript as well, and runs on modern browsers. It includes a bundled script, at `dist/index.js` for ease of direct loading:
80 | ```HTML
81 |
82 | ```
83 |
84 | This is UMD based, and will register as a module if possible, or create a `CBOR` global with all the exported functions.
85 |
86 | For module-based development, it is recommended that you directly import the module of interest, to minimize dependencies that get pulled into your application:
87 | ```JavaScript
88 | import { decode } from 'cbor-x/decode' // if you only need to decode
89 | ```
90 |
91 | ## Structured Cloning
92 | You can also use cbor-x for [structured cloning](https://html.spec.whatwg.org/multipage/structured-data.html). By enabling the `structuredClone` option, you can include references to other objects or cyclic references, and object identity will be preserved.For example:
93 | ```JavaScript
94 | let obj = {
95 | };
96 | obj.self = obj;
97 | let encoder = new Encoder({ structuredClone: true });
98 | let serialized = encoder.encode(obj);
99 | let copy = encoder.decode(serialized);
100 | copy.self === copy // true
101 |
102 | ```
103 |
104 | This option is disabled by default because reference checking degrades performance (by about 25-30%). (Note this implementation doesn't serialize every class/type specified in the HTML specification since not all of them make sense for storing across platforms.)
105 |
106 | cbor-x also preserves certain typed objects like `Error`, `Set`, `RegExp` and TypedArray instances, using [registered CBOR tag extensions](https://www.iana.org/assignments/cbor-tags/cbor-tags.xhtml). This works with or without structured cloning enabled.
107 |
108 | ## Record / Object Structures
109 | There is a critical difference between maps (or dictionaries) that hold an arbitrary set of keys and values (JavaScript `Map` is designed for these), and records or object structures that have a well-defined set of fields. Typical JS objects/records may have many instances re(use) the same structure. By using the record extension, this distinction is preserved in CBOR and the encoding can reuse structures and not only provides better type preservation, but yield much more compact encodings and increase decoding performance by 2-3x. Cbor-x automatically generates record definitions that are reused and referenced by objects with the same structure. Records use CBOR's tags to align well CBOR's tag/extension mechanism. There are a number of ways to use this to our advantage. For large object structures with repeating nested objects with similar structures, simply serializing with the record extension can yield significant benefits. To use the record structures extension, we create a new `Encoder` instance. By default a new `Encoder` instance will have the record extension enabled:
110 | ```JavaScript
111 | import { Encoder } from 'cbor-x';
112 | let encoder = new Encoder();
113 | encoder.encode(myBigData);
114 | ```
115 |
116 | Another way to further leverage the benefits of the cbor-x record structures is to use streams that naturally allow for data to reuse based on previous record structures. The stream classes have the record structure extension enabled by default and provide excellent out-of-the-box performance.
117 |
118 | When creating a new `Encoder`, `EncoderStream`, or `DecoderStream` instance, we can enable or disable the record structure extension with the `objectsAsMaps` property. When this is `true`, the record structure extension will be disabled, and all objects will revert to being serialized using MessageMap `map`s, and all `map`s will be deserialized to JS `Object`s as properties (like the standalone `encode` and `decode` functions).
119 |
120 | Streaming with record structures works by encoding a structure the first time it is seen in a stream and referencing the structure in later messages that are sent across that stream. When an encoder can expect a decoder to understand previous structure references, this can be configured using the `sequential: true` flag, which is auto-enabled by streams, but can also be used with Packr instances.
121 |
122 | ### Shared Record Structures
123 | Another useful way of using cbor-x, and the record extension, is for storing data in a databases, files, or other storage systems. If a number of objects with common data structures are being stored, a shared structure can be used to greatly improve data storage and deserialization efficiency. In the simplest form, provide a `structures` array, which is updated if any new object structure is encountered:
124 |
125 | ```JavaScript
126 | import { Encoder } from 'cbor-x';
127 | let encoder = new Encoder({
128 | structures: [... structures that were last generated ...]
129 | });
130 | ```
131 | If you are working with persisted data, you will need to persist the `structures` data when it is updated. Cbor-x provides an API for loading and saving the `structures` on demand (which is robust and can be used in multiple-process situations where other processes may be updating this same `structures` array), we just need to provide a way to store the generated shared structure so it is available to deserialize stored data in the future:
132 | ```JavaScript
133 | import { Encoder } from 'cbor-x';
134 | let encoder = new Encoder({
135 | getStructures() {
136 | // storing our data in file (but we could also store in a db or key-value store)
137 | return decode(readFileSync('my-shared-structures.cbor')) || [];
138 | },
139 | saveStructures(structures) {
140 | writeFileSync('my-shared-structures.cbor', encode(structures))
141 | },
142 | structures: []
143 | });
144 | ```
145 | Cbor-x will automatically add and saves structures as it encounters any new object structures (up to a limit of 64). It will always add structures in incremental/compatible way: Any object encoded with an earlier structure can be decoded with a later version (as long as it is persisted).
146 |
147 | ### Reading Multiple Values
148 | If you have a buffer with multiple values sequentially encoded, you can choose to parse and read multiple values. This can be done using the `decodeMultiple` function/method, which can return an array of all the values it can sequentially parse within the provided buffer. For example:
149 | ```js
150 | let data = new Uint8Array([1, 2, 3]) // encodings of values 1, 2, and 3
151 | let values = decodeMultiple(data) // [1, 2, 3]
152 | ```
153 | Alternately, you can provide a callback function that is called as the parsing occurs with each value, and can optionally terminate the parsing by returning `false`:
154 | ```js
155 | let data = new Uint8Array([1, 2, 3]) // encodings of values 1, 2, and 3
156 | decodeMultiple(data, (value) => {
157 | // called for each value
158 | // return false if you wish to end the parsing
159 | })
160 | ```
161 | ### KeyMaps for Senml
162 | KeyMaps can be used to remap properties of source Objects and Maps to numerical equivalents for more efficient encoding.
163 | The principle driver for this feature is to support `application/senml+cbor`content-encoding as defined in https://datatracker.ietf.org/doc/html/rfc8428#section-6 for use in LWM2M application (see http://www.openmobilealliance.org/release/LightweightM2M/V1_2-20201110-A/HTML-Version/OMA-TS-LightweightM2M_Core-V1_2-20201110-A.html#7-4-7-0-747-SenML-CBOR)
164 |
165 | Records are also supported in conjunction with keyMaps, but these are disabled by default when keyMaps are specified as use of the two features does not introduce any additional compression efficiency unless that the data arrays are quite large (> 10 items).
166 |
167 | ```JavaScript
168 | import { Decoder, Encoder } from 'cbor-x'
169 | const data = [
170 | { bn: '/3303/0/5700', bt: 1278887, v: 35.5 },
171 | { t: 10, v: 34 },
172 | { t: 20, v: 33 },
173 | { t: 30, v: 32 },
174 | { t: 40, v: 31 },
175 | { t: 50, v: 30 }
176 | ]
177 |
178 | let senmlKeys = { bs: -6, bv: -5, bu: -4, bt: -3, bn: -2, bver: -1, n: 0, u: 1, v: 2, vs: 3, vb: 4, s: 5, t: 6, ut: 7, vd: 8}}
179 | let senmlCbor = new Encoder({ keyMap: senmlKeys })
180 | let basicCbor = new Encoder()
181 | let senmlBuff = senmlCbor.encode(data)
182 | let basicBuff = basicCbor.encode(data)
183 | console.log('Senml CBOR size:', senmlBuff.length) // 77
184 | console.log('Basic CBOR size:', basicBuff.length) // 90
185 | let senmlDecoder = new Decoder({ keyMap: senmlKeys });
186 | assert.deepEqual(senmlDecoder.decode(senmlBuff), data)
187 |
188 | ```
189 |
190 | ### CBOR Packing
191 | [Packed CBOR](https://datatracker.ietf.org/doc/html/draft-ietf-cbor-packed) is additional specification for CBOR which allows for compact encoding of data that has repeated values. Cbor-x supports decoding packed CBOR, no flags or options needed. Cbor-x can also optionally generate packed CBOR (with the `pack` option), which will cause the encoder to look for repeated strings in a data structure that is being encoded, and store the strings in a packed table that can be referenced, to reduce encoding size. This involves extra overhead and reduces encoding performance, and generally does not yield as much compaction as standard compression tools. However, this is can be much faster than encoding plus compression, while still providing some level of reduction in encoding size. In addition to size reduction, packed CBOR is also usually faster to decode (assuming that some repetitive values could be found/packed).
192 |
193 | Cbor-x also has in-progress effort to support shared packed tables.
194 |
195 |
196 | ## Options
197 | The following options properties can be provided to the Encoder or Decoder constructor:
198 |
199 | * `keyMap` - This can be set to an object which will be used to map keys in the source Object or Map to other keys including integers. This allows for more efficient encoding, and enables support for numeric cbar tag encodings such as used by `application/senml+cbor` (https://datatracker.ietf.org/doc/html/rfc8428#section-6)
200 | * `useRecords` - Setting this to `false` disables the record extension and stores JavaScript objects as CBOR maps (with tag 259), and decodes maps as JavaScript `Object`s, which ensures compatibilty with other decoders.
201 | * `structures` - Provides the array of structures that is to be used for record extension, if you want the structures saved and used again. This array will be modified in place with new record structures that are serialized (if less than 64 structures are in the array).
202 | * `structuredClone` - This enables the structured cloning extensions that will encode object/cyclic references and additional built-in types/classes.
203 | * `mapsAsObjects` - If `true`, this will decode CBOR maps and JS `Object`s with the map entries decoded to object properties. If `false`, maps are decoded as JavaScript `Map`s. This is disabled by default if `useRecords` is enabled (`Map`s are preserved since they are distinct from records), and is enabled by default if `useRecords` is disabled.
204 | * `useFloat32` - This will enable cbor-x to encode non-integer numbers as 32-bit (4 byte) floating point numbers. See next section for possible values.
205 | * `alwaysUseFloat` - This will force cbor-x to encode any number, including integers, as floating-point numbers.
206 | * `pack` - This will enable [CBOR packing](https://datatracker.ietf.org/doc/html/draft-ietf-cbor-packed) for encoding, as described above.
207 | * `variableMapSize` - This will use varying map size definition (from single-byte to full 32-bit representation) based on the number of keys when encoding objects, which yields slightly more compact encodings (for small objects), but is typically 5-10% slower during encoding. This is only relevant when record extension is disabled.
208 | * `copyBuffers` - When decoding a CBOR message with binary data (Buffers are encoded as binary data), copy the buffer rather than providing a slice/view of the buffer. If you want your input data to be collected or modified while the decoded embedded buffer continues to live on, you can use this option (there is extra overhead to copying).
209 | * `bundleStrings` - If `true` this uses a custom extension that bundles strings together, so that they can be decoded more quickly on browsers and Deno that do not have access to the NodeJS addon. This a custom extension, so both encoder and decoder need to support this. This can yield significant decoding performance increases on browsers (30%-50%).
210 | * `useTimestamp32` - Encode JS `Date`s in 32-bit format when possible by dropping the milliseconds. This is a more efficient encoding of dates. You can also cause dates to use 32-bit format by manually setting the milliseconds to zero (`date.setMilliseconds(0)`).
211 | * `sequential` - Encode structures in serialized data, and reference previously encoded structures with expectation that decoder will read the encoded structures in the same order as encoded, with `unpackMultiple`.
212 | * `largeBigIntToFloat` - If a bigint needs to be encoded that is larger than will fit in 64-bit integers, it will be encoded as a float-64 (otherwise will throw a RangeError).
213 | * `useTag259ForMaps` - This flag indicates if [tag 259 (explicit maps)](https://github.com/shanewholloway/js-cbor-codec/blob/master/docs/CBOR-259-spec--explicit-maps.md) should be used to encode JS `Map`s. When using records is enabled, this is disabled by default, since plain objects are encoded with record structures and unambigiously differentiated from `Map`s, which are encoded as CBOR maps. Without using records, this enabled by default and is necessary to distinguish plain objects from `Map`s (but can be disabled by setting this to `false`).
214 | * `tagUint8Array` - Indicates if tag 64 should be used for `Uint8Array`s.
215 | * `int64AsNumber` - This will decode uint64 and int64 numbers as standard JS numbers rather than as bigint numbers.
216 | * `skipFunction` - This skip functions in encode object.
217 |
218 | ### 32-bit Float Options
219 | By default all non-integer numbers are serialized as 64-bit float (double). This is fast, and ensures maximum precision. However, often real-world data doesn't not need 64-bits of precision, and using 32-bit encoding can be much more space efficient. There are several options that provide more efficient encodings. Using the decimal rounding options for encoding and decoding provides lossless storage of common decimal representations like 7.99, in more efficient 32-bit format (rather than 64-bit). The `useFloat32` property has several possible options, available from the module as constants:
220 | ```JavaScript
221 | import { ALWAYS, DECIMAL_ROUND, DECIMAL_FIT } from 'cbor-x'
222 | ```
223 |
224 | * `ALWAYS` (1) - Always will encode non-integers (absolute less than 2147483648) as 32-bit float.
225 | * `DECIMAL_ROUND` (3) - Always will encode non-integers as 32-bit float, and when decoding 32-bit float, round to the significant decimal digits (usually 7, but 6 or 8 digits for some ranges).
226 | * `DECIMAL_FIT` (4) - Only encode non-integers as 32-bit float if all significant digits (usually up to 7) can be unamiguously encoded as a 32-bit float, and decode with decimal rounding (same as above). This will ensure round-trip encoding/decoding without loss in precision and uses 32-bit when possible.
227 |
228 | Note, that the performance is decreased with decimal rounding by about 20-25%, although if only 5% of your values are floating point, that will only have about a 1% impact overall.
229 |
230 | In addition, msgpackr exports a `roundFloat32(number)` function that can be used to round floating point numbers to the maximum significant decimal digits that can be stored in 32-bit float, just as DECIMAL_ROUND does when decoding. This can be useful for determining how a number will be decoded prior to encoding it.
231 |
232 | ### Setting Size Limits
233 | You can set size limits on objects, arrays, and maps to prevent resource exhaustion when decoding. This can be done by calling the setMaxLimits export. Each of the properties are optional (only provide
234 | the properties you want to change), for example (with the defaults):
235 | ```JavaScript
236 | import { setMaxLimits } from 'cbor-x';
237 | setMaxLimits({
238 | maxArraySize: 112810000,
239 | maxMapSize: 16810000,
240 | maxObjectSize : 16710000
241 | });
242 | ```
243 |
244 | ## Performance
245 | Cbor-x is fast. Really fast. Here is comparison with the next fastest JS projects using the benchmark tool from `msgpack-lite` (and the sample data is from some clinical research data we use that has a good mix of different value types and structures). It also includes comparison to V8 native JSON functionality, and JavaScript Avro (`avsc`, a very optimized Avro implementation):
246 |
247 | ### Native Acceleration
248 | Cbor-x employs an optional native node-addon to accelerate the parsing of strings. This should be automatically installed and utilized on NodeJS. However, you can verify this by checking the `isNativeAccelerationEnabled` property that is exported from cbor-x. If this is `false`, the `cbor-extract` package may not have been properly installed, and you may want to verify that it is installed correctly:
249 | ```js
250 | import { isNativeAccelerationEnabled } from 'cbor-x'
251 | if (!isNativeAccelerationEnabled)
252 | console.warn('Native acceleration not enabled, verify that install finished properly')
253 | ```
254 |
255 |
256 | operation | op | ms | op/s
257 | ---------------------------------------------------------- | ------: | ----: | -----:
258 | buf = Buffer(JSON.stringify(obj)); | 78200 | 5004 | 15627
259 | obj = JSON.parse(buf); | 89600 | 5003 | 17909
260 | require("cbor-x").encode(obj); | 163100 | 5001 | 32613
261 | require("cbor-x").decode(buf); | 100200 | 5004 | 20023
262 | cbor-x w/ shared structures: packr.encode(obj); | 178300 | 5002 | 35645
263 | cbor-x w/ shared structures: packr.decode(buf); | 414000 | 5000 | 82800
264 | buf = require("cbor").encode(obj); | 7800 | 5016 | 1555
265 | obj = require("cbor").decode(buf); | 3200 | 5087 | 629
266 | buf = require("cbor-sync").encode(obj); | 18600 | 5012 | 3711
267 | obj = require("cbor-sync").decode(buf); | 20000 | 5020 | 3984
268 | buf = require("msgpack-lite").encode(obj); | 30900 | 5013 | 6163
269 | obj = require("msgpack-lite").decode(buf); | 15800 | 5012 | 3152
270 | buf = require("notepack").encode(obj); | 62600 | 5006 | 12504
271 | obj = require("notepack").decode(buf); | 33700 | 5007 | 6730
272 | require("avsc")...make schema/type...type.toBuffer(obj); | 86900 | 5002 | 17373
273 | require("avsc")...make schema/type...type.fromBuffer(obj); | 106100 | 5000 | 21220
274 |
275 | All benchmarks were performed on Node 14.8.0 (Windows i7-4770 3.4Ghz).
276 | (`avsc` is schema-based and more comparable in style to cbor-x with shared structures).
277 |
278 | Here is a benchmark of streaming data (again borrowed from `msgpack-lite`'s benchmarking), where cbor-x is able to take advantage of the structured record extension and really demonstrate its performance capabilities:
279 |
280 | operation (1000000 x 2) | op | ms | op/s
281 | ------------------------------------------------ | ------: | ----: | -----:
282 | new EncoderStream().write(obj); | 1000000 | 372 | 2688172
283 | new DecoderStream().write(buf); | 1000000 | 247 | 4048582
284 | stream.write(msgpack.encode(obj)); | 1000000 | 2898 | 345065
285 | stream.write(msgpack.decode(buf)); | 1000000 | 1969 | 507872
286 | stream.write(notepack.encode(obj)); | 1000000 | 901 | 1109877
287 | stream.write(notepack.decode(buf)); | 1000000 | 1012 | 988142
288 | msgpack.Encoder().on("data",ondata).encode(obj); | 1000000 | 1763 | 567214
289 | msgpack.createDecodeStream().write(buf); | 1000000 | 2222 | 450045
290 | msgpack.createEncodeStream().write(obj); | 1000000 | 1577 | 634115
291 | msgpack.Decoder().on("data",ondata).decode(buf); | 1000000 | 2246 | 445235
292 |
293 | See the [benchmark.md](benchmark.md) for more benchmarks and information about benchmarking.
294 |
295 | ## Custom Extensions
296 | You can add your own custom extensions, which can be used to encode specific types/classes in certain ways. This is done by using the `addExtension` function, and specifying the class, extension type code (custom extensions should be a number greater than 40500, all others are reserved for CBOR or cbor-x), and your encode and decode functions (or just the one you need). You can use cbor-x encoding and decoding within your extensions:
297 | ```JavaScript
298 | import { addExtension, Encoder } from 'cbor-x';
299 |
300 | class MyCustomClass {...}
301 |
302 | let extEncoder = new Encoder();
303 | addExtension({
304 | Class: MyCustomClass,
305 | tag: 43311, // register our own extension code (a tag code)
306 | encode(instance, encode) {
307 | // define how your custom class should be encoded
308 | encode(instance.myData); // return a buffer
309 | }
310 | decode(data) {
311 | // define how your custom class should be decoded
312 | let instance = new MyCustomClass();
313 | instance.myData = data
314 | return instance; // decoded value from buffer
315 | }
316 | });
317 | ```
318 |
319 | ## Unknown Tags
320 | If no extension is registered for a tag, the decoder will return an instance of the `Tag` class, where the value provided for the tag will be available in the `value` property of the `Tag` instance. The `Tag` class is an export of the package and decode module.
321 |
322 | ### CBOR Compliance
323 | The cbor-x package is designed to encode and decode to the CBOR extended generic data model, implementing extensions to support the extended model, and will generally attempt to use preferred serializations where feasible. When duplicate keys are encountered in maps, previous entries will be lost, and the final entry is preserved.
324 |
325 | ### Additional Performance Optimizations
326 | Cbor-x is already fast, but here are some tips for making it faster.
327 |
328 | #### Arena Allocation (`useBuffer()`)
329 | During the serialization process, data is written to buffers. Again, allocating new buffers is a relatively expensive process, and the `useBuffer` method can help allow reuse of buffers that will further improve performance. With `useBuffer` method, you can provide a buffer, serialize data into it, and when it is known that you are done using that buffer, you can call `useBuffer` again to reuse it. The use of `useBuffer` is never required, buffers will still be handled and cleaned up through GC if not used, it just provides a small performance boost.
330 |
331 | ## Extensions
332 | Cbor-x currently uses tag id 105 and 26880-27135 for its [proposed extension for records](https://github.com/kriszyp/cbor-records).
333 |
334 | ### Dates
335 | Cbor-x saves all JavaScript `Date`s using the standard CBOR date extension (tag 1).
336 |
337 | ### Structured Cloning
338 | With structured cloning enabled, cbor-x will also use tags/extensions to store Set, Map, Error, RegExp, ArrayBufferView objects and preserve their types.
339 |
340 | ### List of supported tags for decoding
341 | Here is a list of CBOR tags that are supported for decoding:
342 |
343 | * 0 - String date
344 | * 1 - Numeric Date
345 | * 2 - BigInt
346 | * 3 - Negative BigInt
347 | * 6 - Packed string reference
348 | * 27 - Generic named objects (used for Error, RegExp)
349 | * 28, 29 - Value sharing/object referencing
350 | * 51 - Packed table
351 | * 64 - Uint8Array
352 | * 68 - Uint8ClampedArray
353 | * 69 - Uint16Array
354 | * 70 - Uint32Array
355 | * 71 - BigUint64Array
356 | * 72 - Int8Array
357 | * 77 - Int16Array
358 | * 78 - Int32Array
359 | * 79 - BigInt64Array
360 | * 81 - Float32Array
361 | * 82 - Float64Array
362 | * 105 - Records
363 | * 258 - Set
364 | * 259 - Map
365 | * 57344 - 57599 - Records
366 |
367 | ## Alternate Encoding/Package
368 | The high-performance serialization and deserialization algorithms in this package are also available in the [msgpackr](https://github.com/kriszyp/msgpackr) for the MessagePack format, with the same API and design. A quick summary of the pros and cons of using MessagePack vs CBOR are:
369 | * MessagePack has wider adoption and msgpackr has broader usage.
370 | * CBOR has an [official IETF standardization track](https://www.rfc-editor.org/rfc/rfc8949.html), and the record extensions is conceptually/philosophically a better fit for CBOR tags.
371 |
372 | ## License
373 |
374 | MIT
375 |
376 | ### Browser Consideration
377 | CBOR can be a great choice for high-performance data delivery to browsers, as reasonable data size is possible without compression. And CBOR works very well in modern browsers. However, it is worth noting that if you want highly compact data, brotli or gzip are most effective in compressing, and CBOR's character frequency tends to defeat Huffman encoding used by these standard compression algorithms, often resulting in less compact data than compressed JSON.
378 |
379 | ### Credits
380 |
381 | Various projects have been inspirations for this, and code has been borrowed from https://github.com/msgpack/msgpack-javascript and https://github.com/mtth/avsc.
382 |
--------------------------------------------------------------------------------
/tests/test.js:
--------------------------------------------------------------------------------
1 | import * as CBOR from '../node-index.js'
2 | import chai from 'chai'
3 | import { readFileSync } from 'fs'
4 | const sampleData = JSON.parse(readFileSync(new URL('./example4.json', import.meta.url)))
5 |
6 | const senmlData = [
7 | { bn: '/3303/0/5700', bt: 1278887, v: 35.5 },
8 | { t: 10, v: 34 },
9 | { t: 20, v: 33 },
10 | { t: 30, v: 32 },
11 | { t: 40, v: 31 },
12 | { t: 50, v: 30 }
13 | ]
14 |
15 | const senmlKeys = { bs: -6, bv: -5, bu: -4, bt: -3, bn: -2, n: 0, u: 1, v: 2, vs: 3, t: 6, ut: 7, vd: 8 }
16 |
17 | //import inspector from 'inspector'; inspector.open(9229, null, true); debugger
18 | function tryRequire(module) {
19 | try {
20 | return require(module)
21 | } catch(error) {
22 | return {}
23 | }
24 | }
25 | var assert = chai.assert
26 |
27 | var Encoder = CBOR.Encoder
28 | var EncoderStream = CBOR.EncoderStream
29 | var DecoderStream = CBOR.DecoderStream
30 | var decode = CBOR.decode
31 | var encode = CBOR.encode
32 | var encodeAsIterable = CBOR.encodeAsIterable
33 | var encodeAsAsyncIterable = CBOR.encodeAsAsyncIterable
34 | var DECIMAL_FIT = CBOR.DECIMAL_FIT
35 |
36 | var addExtension = CBOR.addExtension
37 |
38 | var zlib = tryRequire('zlib')
39 | var deflateSync = zlib.deflateSync
40 | var inflateSync = zlib.inflateSync
41 | var deflateSync = zlib.brotliCompressSync
42 | var inflateSync = zlib.brotliDecompressSync
43 | var constants = zlib.constants
44 | try {
45 | // var { decode, encode } = require('msgencode-lite')
46 | } catch (error) {}
47 |
48 | var ITERATIONS = 4000
49 |
50 | suite('CBOR basic tests', function(){
51 | test('encode/decode with keyMaps (basic)', function() {
52 | var data = senmlData
53 | let cborSenml = new Encoder({ useRecords: false, keyMap: senmlKeys })
54 | let cborBasic = new Encoder()
55 | var serialized = cborSenml.encode(data)
56 | var deserialized = cborSenml.decode(serialized)
57 | assert(serialized.length < cborBasic.encode(data).length)
58 | assert.deepEqual(deserialized, data)
59 | })
60 |
61 | test('encode/decode with keyMaps and Records)', function() {
62 | var data = senmlData
63 | let cborSenml = new Encoder({ useRecords: true, keyMap: senmlKeys })
64 | let cborBasic = new Encoder()
65 | var serialized = cborSenml.encode(data)
66 | var deserialized = cborSenml.decode(serialized)
67 | assert(serialized.length < cborBasic.encode(data).length)
68 | assert.deepEqual(deserialized, data)
69 | })
70 |
71 | test('encode/decode data', function(){
72 | var data = {
73 | data: [
74 | { a: 1, name: 'one', type: 'odd', isOdd: true },
75 | { a: 2, name: 'two', type: 'even'},
76 | { a: 3, name: 'three', type: 'odd', isOdd: true },
77 | { a: 4, name: 'four', type: 'even'},
78 | { a: 5, name: 'five', type: 'odd', isOdd: true },
79 | { a: 6, name: 'six', type: 'even', isOdd: null }
80 | ],
81 | description: 'some names',
82 | types: ['odd', 'even'],
83 | convertEnumToNum: [
84 | { prop: 'test' },
85 | { prop: 'test' },
86 | { prop: 'test' },
87 | { prop: 1 },
88 | { prop: 2 },
89 | { prop: [undefined] },
90 | { prop: null }
91 | ]
92 | }
93 | let structures = []
94 | let encoder = new Encoder({ structures })
95 | var serialized = encoder.encode(data)
96 | serialized = encoder.encode(data)
97 | var deserialized = encoder.decode(serialized)
98 | assert.deepEqual(deserialized, data)
99 | })
100 |
101 | test('mixed structures, shared', function(){
102 | let data1 = { a: 1, b: 2, c: 3 }
103 | let data2 = { a: 1, b: 2, d: 4 }
104 | let data3 = { a: 1, b: 2, e: 5 }
105 | let structures = []
106 | let encoder = new Encoder({ structures })
107 | var serialized = encoder.encode(data1)
108 | var deserialized = encoder.decode(serialized)
109 | assert.deepEqual(deserialized, data1)
110 | var serialized = encoder.encode(data2)
111 | var deserialized = encoder.decode(serialized)
112 | assert.deepEqual(deserialized, data2)
113 | var serialized = encoder.encode(data3)
114 | var deserialized = encoder.decode(serialized)
115 | assert.deepEqual(deserialized, data3)
116 | })
117 |
118 | test('mixed structures, unshared', function(){
119 | let data = []
120 | let encoder = new Encoder({ })
121 | for (let i = 0; i< 1000; i++) {
122 | data.push({a: 1, ['test' + i]: i})
123 | }
124 | var serialized = encoder.encode(data)
125 | var deserialized = encoder.decode(serialized)
126 | assert.deepEqual(deserialized, data)
127 | serialized = encoder.encode(data)
128 | deserialized = encoder.decode(serialized)
129 | assert.deepEqual(deserialized, data)
130 | })
131 |
132 | test('mixed array', function(){
133 | var data = [
134 | 'one',
135 | 'two',
136 | 'one',
137 | 10,
138 | 11,
139 | null,
140 | true,
141 | 'three',
142 | 'three',
143 | 'one', [
144 | 3, -5, -50, -400,1.3, -5.3, true
145 | ]
146 | ]
147 | let structures = []
148 | let encoder = new Encoder({ structures })
149 | var serialized = encoder.encode(data)
150 | var deserialized = encoder.decode(serialized)
151 | assert.deepEqual(deserialized, data)
152 | })
153 |
154 | test('255 chars', function() {
155 | const data = 'RRZG9A6I7xupPeOZhxcOcioFsuhszGOdyDUcbRf4Zef2kdPIfC9RaLO4jTM5JhuZvTsF09fbRHMGtqk7YAgu3vespeTe9l61ziZ6VrMnYu2CamK96wCkmz0VUXyqaiUoTPgzk414LS9yYrd5uh7w18ksJF5SlC2e91rukWvNqAZJjYN3jpkqHNOFchCwFrhbxq2Lrv1kSJPYCx9blRg2hGmYqTbElLTZHv20iNqwZeQbRMgSBPT6vnbCBPnOh1W'
156 | var serialized = CBOR.encode(data)
157 | var deserialized = CBOR.decode(serialized)
158 | assert.equal(deserialized, data)
159 | })
160 |
161 | test('encode/decode sample data', function(){
162 | var data = sampleData
163 | var serialized = CBOR.encode(data)
164 | var deserialized = CBOR.decode(serialized)
165 | assert.deepEqual(deserialized, data)
166 | var serialized = CBOR.encode(data)
167 | var deserialized = CBOR.decode(serialized)
168 | assert.deepEqual(deserialized, data)
169 | })
170 | test('encode/decode sample data with records', function(){
171 | var data = sampleData
172 | let sharedSerialized
173 | let encoder = new Encoder({ getStructures() { return }, saveStructures(shared) { sharedSerialized = encode(shared) }, useRecords: true })
174 | var serialized = encoder.encode(data)
175 | encoder = new Encoder({ getStructures() { return decode(sharedSerialized) }, saveStructures(shared) { sharedSerialized = encode(shared) }, useRecords: true })
176 | var deserialized = encoder.decode(serialized)
177 | assert.deepEqual(deserialized, data)
178 | })
179 | test('encode/decode sample data with packing', function(){
180 | var data = sampleData
181 | let encoder = new Encoder({ pack: true, useRecords: false })
182 | var serialized = encoder.encode(data)
183 | var deserialized = encoder.decode(serialized)
184 | assert.deepEqual(deserialized, data)
185 | })
186 | test('encode/decode sample data with packing and records', function(){
187 | var data = sampleData
188 | let structures = []
189 | let encoder = new Encoder({ useStringRefs: true })
190 | var serialized = encoder.encode(data)
191 | var deserialized = encoder.decode(serialized)
192 | assert.deepEqual(deserialized, data)
193 | })
194 | test('encode/decode sample data with shared packing and records', function(){
195 | let encoder = new Encoder({ useRecords: true })
196 | let finishPack = encoder.findCommonStringsToPack()
197 | for (let i = 0; i < 20; i++) {
198 | let data = {
199 | shouldShare: 'same each time',
200 | shouldShare2: 'same each time 2',
201 | shouldntShare: 'different each time ' + i
202 | }
203 | if (i == 10)
204 | finishPack({})
205 | var serialized = encoder.encode(data)
206 | var deserialized = encoder.decode(serialized)
207 | assert.deepEqual(deserialized, data)
208 | }
209 | })
210 | test('encode/decode sample data with individual packing, shared packing and records', function(){
211 | let encoder = new Encoder({ pack: true, useRecords: true })
212 | let finishPack = encoder.findCommonStringsToPack()
213 | for (let i = 0; i < 20; i++) {
214 | let data = {
215 | shouldShare: 'same each time',
216 | shouldShare2: 'same each time',
217 | shouldntShare: 'different each time ' + i,
218 | shouldntShare2: 'different each time ' + i,
219 | noPack: 'no packing ' + i,
220 | }
221 | if (i == 10)
222 | finishPack({ threshold: 5 })
223 | var serialized = encoder.encode(data)
224 | var deserialized = encoder.decode(serialized)
225 | assert.deepEqual(deserialized, data)
226 | }
227 | })
228 | test('pack/unpack sample data with bundled strings', function(){
229 | var data = sampleData
230 | let encoder = new Encoder({ /*structures,*/ useRecords: false, bundleStrings: true })
231 | var serialized = encoder.encode(data)
232 | var deserialized = encoder.decode(serialized)
233 | assert.deepEqual(deserialized, data)
234 | })
235 | test('pack/unpack sample data with self-descriptive header', function(){
236 | var data = sampleData
237 | let encoder = new Encoder({ useSelfDescribedHeader: true })
238 | var serialized = encoder.encode(data)
239 | var deserialized = encoder.decode(serialized)
240 | assert.deepEqual(deserialized, data)
241 | assert.equal(serialized[0], 0xd9)
242 | assert.equal(serialized[1], 0xd9)
243 | assert.equal(serialized[2], 0xf7)
244 | })
245 | if (typeof Buffer != 'undefined')
246 | test('replace data', function(){
247 | var data1 = {
248 | data: [
249 | { a: 1, name: 'one', type: 'odd', isOdd: true, a: '13 characters' },
250 | { a: 2, name: 'two', type: 'even', a: '11 characte' },
251 | { a: 3, name: 'three', type: 'odd', isOdd: true, a: '12 character' },
252 | { a: 4, name: 'four', type: 'even', a: '9 charact'},
253 | { a: 5, name: 'five', type: 'odd', isOdd: true, a: '14 characters!' },
254 | { a: 6, name: 'six', type: 'even', isOdd: null }
255 | ],
256 | }
257 | var data2 = {
258 | data: [
259 | { foo: 7, name: 'one', type: 'odd', isOdd: true },
260 | { foo: 8, name: 'two', type: 'even'},
261 | { foo: 9, name: 'three', type: 'odd', isOdd: true },
262 | { foo: 10, name: 'four', type: 'even'},
263 | { foo: 11, name: 'five', type: 'odd', isOdd: true },
264 | { foo: 12, name: 'six', type: 'even', isOdd: null }
265 | ],
266 | }
267 | var serialized1 = encode(data1)
268 | var serialized2 = encode(data2)
269 | var b = Buffer.alloc(8000)
270 | serialized1.copy(b)
271 | var deserialized1 = decode(b, serialized1.length)
272 | serialized2.copy(b)
273 | var deserialized2 = decode(b, serialized2.length)
274 | assert.deepEqual(deserialized1, data1)
275 | assert.deepEqual(deserialized2, data2)
276 | })
277 | test('extended class encode/decode', function() {
278 | function Extended() {
279 |
280 | }
281 |
282 | Extended.prototype.getDouble = function() {
283 | return this.value * 2
284 | }
285 | var instance = new Extended()
286 | instance.value = 4
287 | instance.string = 'decode this: ᾜ'
288 | var data = {
289 | prop1: 'has multi-byte: ᾜ',
290 | extendedInstance: instance,
291 | prop2: 'more string',
292 | num: 3,
293 | }
294 | let encoder = new Encoder()
295 | addExtension({
296 | Class: Extended,
297 | tag: 300,
298 | decode: function(data) {
299 | let e = new Extended()
300 | e.value = data[0]
301 | e.string = data[1]
302 | return e
303 | },
304 | encode: function(instance) {
305 | return encoder.encode([instance.value, instance.string])
306 | }
307 | })
308 | })
309 | test('extended class encode/decode with self reference in structered clone', function(){
310 | function Extended() {
311 |
312 | }
313 | addExtension({
314 | Class: Extended,
315 | tag: 301,
316 | decode: function(data) {
317 | let e = new Extended()
318 | e.value = data[0]
319 | e.string = data[1]
320 | return e
321 | },
322 | encode: function(instance, encode) {
323 | return encode([instance.value, instance.string])
324 | }
325 | })
326 | var instance = new Extended()
327 | instance.value = instance;
328 | instance.string = 'hi'
329 | let data = {
330 | extended: instance
331 | }
332 | let encoder = new Encoder({
333 | structuredClone: true,
334 | })
335 | let serialized = encoder.encode(data)
336 | let deserialized = encoder.decode(serialized)
337 | assert(data.extended.value.value === data.extended)
338 | assert(data.extended instanceof Extended)
339 | })
340 |
341 | test('addExtension with map', function(){
342 | function Extended() {
343 | }
344 | var instance = new Extended()
345 | instance.value = 4
346 | instance.map = new Map();
347 | instance.map.set('key', 'value');
348 | var data = {
349 | extendedInstance: instance,
350 | }
351 | let encoder = new Encoder()
352 | addExtension({
353 | Class: Extended,
354 | tag: 301,
355 | decode: function(data) {
356 | let e = new Extended()
357 | e.value = data[0]
358 | e.map = data[1]
359 | return e
360 | },
361 | encode: function(instance, encode) {
362 | return encode([instance.value, instance.map])
363 | }
364 | })
365 | var serialized = encoder.encode(data)
366 | var deserialized = encoder.decode(serialized)
367 | assert.deepEqual(data, deserialized)
368 | })
369 |
370 | test.skip('text decoder', function() {
371 | let td = new TextDecoder('ISO-8859-15')
372 | let b = Buffer.alloc(3)
373 | let total = 0
374 | for (var i = 0; i < 256; i++) {
375 | b[0] = i
376 | b[1] = 0
377 | b[2] = 0
378 | let s = td.decode(b)
379 | if (!require('CBOR-extract').isOneByte(s)) {
380 | console.log(i.toString(16), s.length)
381 | total++
382 | }
383 | }
384 | })
385 |
386 | test('structured cloning: self reference', function() {
387 | let object = {
388 | test: 'string',
389 | children: [
390 | { name: 'child' }
391 | ]
392 | }
393 | object.self = object
394 | object.children[1] = object
395 | object.children[2] = object.children[0]
396 | object.childrenAgain = object.children
397 | let encoder = new Encoder({
398 | structuredClone: true,
399 | })
400 | var serialized = encoder.encode(object)
401 | var deserialized = encoder.decode(serialized)
402 | assert.equal(deserialized.self, deserialized)
403 | assert.equal(deserialized.children[0].name, 'child')
404 | assert.equal(deserialized.children[1], deserialized)
405 | assert.equal(deserialized.children[0], deserialized.children[2])
406 | assert.equal(deserialized.children, deserialized.childrenAgain)
407 | })
408 | test('nested same key', function() {
409 | const encoder = new Encoder();
410 | const r_key = "key";
411 | const d_key = "key";
412 | const data = { [r_key]: { [d_key]: "foo" } };
413 | const enc = encoder.encode(data);
414 | const dec = encoder.decode(enc);
415 | assert.deepEqual(dec, data);
416 | });
417 | test('decode float 16', function() {
418 | assert.equal(decode(new Uint8Array([0xF9, 0x4A, 0x60])), 12.75);
419 | assert.equal(decode(new Uint8Array([0xF9, 0xC4, 0x80])), -4.5);
420 | assert.equal(decode(new Uint8Array([0xF9, 0x5A, 0xF9])), 223.125);
421 | assert.equal(decode(new Uint8Array([0xF9, 0x45, 0x80])), 5.5);
422 | assert.equal(decode(new Uint8Array([0xF9, 0x7C, 0])), Infinity);
423 | assert.equal(decode(new Uint8Array([0xF9, 0xFC, 0])), -Infinity);
424 | assert.isNaN(decode(new Uint8Array([0xF9, 0x7E, 0])));
425 | });
426 | test('structured cloning: types', function() {
427 | let b = typeof Buffer != 'undefined' ? Buffer.alloc(20) : new Uint8Array(20)
428 | let fa = new Float32Array(b.buffer, 8, 2)
429 | fa[0] = 2.25
430 | fa[1] = 6
431 | let f64a = new Float64Array([2.3, 4.7])
432 | let map = new Map()
433 | map.set('key', 'value')
434 | let object = {
435 | error: new Error('test'),
436 | set: new Set(['a', 'b']),
437 | regexp: /test/gi,
438 | map,
439 | float32Array: fa,
440 | float64Array: f64a,
441 | uint16Array: new Uint16Array([3,4])
442 | }
443 | let encoder = new Encoder({
444 | structuredClone: true,
445 | })
446 | var serialized = encoder.encode(object)
447 | var deserialized = encoder.decode(serialized)
448 | assert.deepEqual(Array.from(deserialized.set), Array.from(object.set))
449 | assert.equal(deserialized.map.get('key'), 'value')
450 | assert.equal(deserialized.error.message, object.error.message)
451 | assert.equal(deserialized.regexp.test('TEST'), true)
452 | assert.equal(deserialized.float32Array.constructor.name, 'Float32Array')
453 | assert.equal(deserialized.float32Array[0], 2.25)
454 | assert.equal(deserialized.float32Array[1], 6)
455 | assert.equal(deserialized.float64Array[0], 2.3)
456 | assert.equal(deserialized.float64Array[1], 4.7)
457 | assert.equal(deserialized.uint16Array.constructor.name, 'Uint16Array')
458 | assert.equal(deserialized.uint16Array[0], 3)
459 | assert.equal(deserialized.uint16Array[1], 4)
460 | })
461 |
462 | test('explicit maps and sets', function () {
463 | let map = new Map()
464 | map.set('key', { inside: 'value'})
465 | let object = {
466 | set: new Set(['a', 'b']),
467 | map,
468 | }
469 | var serialized = encode(object) // default encoder
470 | var deserialized = decode(serialized)
471 | assert.deepEqual(Array.from(deserialized.set), Array.from(object.set))
472 | assert.equal(deserialized.map.get('key').inside, 'value')
473 | })
474 |
475 | test('object without prototype', function(){
476 | var data = Object.create(null)
477 | data.test = 3
478 | var serialized = encode(data)
479 | var deserialized = decode(serialized)
480 | assert.deepEqual(deserialized, data)
481 | })
482 | test('object with __proto__', function(){
483 | const data = { foo: 'bar', __proto__: { isAdmin: true } };
484 | var serialized = encode(data)
485 | var deserialized = decode(serialized)
486 | assert.deepEqual(deserialized, { foo: 'bar' });
487 | })
488 |
489 | test('big buffer', function() {
490 | var size = 100000000
491 | var data = new Uint8Array(size).fill(1)
492 | var encoded = encode(data)
493 | var decoded = decode(encoded)
494 | assert.equal(decoded.length, size)
495 | })
496 | test('little buffer', function() {
497 | var data = typeof Buffer == 'undefined' ? new Uint8Array(0) : Buffer.alloc(0)
498 | var encoded = encode(data)
499 | assert.equal(encoded.length, 1) // make sure to use canonical form
500 | var decoded = decode(encoded)
501 | assert.equal(decoded.length, 0)
502 | })
503 |
504 | test('random strings', function(){
505 | var data = []
506 | for (var i = 0; i < 2000; i++) {
507 | var str = 'test'
508 | while (Math.random() < 0.7 && str.length < 0x100000) {
509 | str = str + String.fromCharCode(90/(Math.random() + 0.01)) + str
510 | }
511 | data.push(str)
512 | }
513 | var serialized = encode(data)
514 | var deserialized = decode(serialized)
515 | assert.deepEqual(deserialized, data)
516 | })
517 |
518 | test('map/date', function(){
519 | var map = new Map()
520 | map.set(4, 'four')
521 | map.set('three', 3)
522 | let year2039 = new Date('2039-07-05T16:22:35.792Z')
523 | let year2038 = new Date('2038-08-06T00:19:02.911Z')
524 |
525 | var data = {
526 | map: map,
527 | date: new Date(1532219539733),
528 | farFutureDate: new Date(3532219539133),
529 | ancient: new Date(-3532219539133),
530 | year2038,
531 | year2039,
532 | invalidDate: new Date('invalid')
533 | }
534 | let encoder = new Encoder()
535 | var serialized = encoder.encode(data)
536 | var deserialized = encoder.decode(serialized)
537 | assert.equal(deserialized.map.get(4), 'four')
538 | assert.equal(deserialized.map.get('three'), 3)
539 | assert.equal(deserialized.date.getTime(), 1532219539733)
540 | assert.equal(deserialized.farFutureDate.getTime(), 3532219539133)
541 | assert.equal(deserialized.ancient.getTime(), -3532219539133)
542 | assert.equal(deserialized.year2038.getTime(), year2038.getTime())
543 | assert.equal(deserialized.year2039.getTime(), year2039.getTime())
544 | assert.equal(deserialized.invalidDate.toString(), 'Invalid Date')
545 | })
546 | test('map/date with options', function(){
547 | var map = new Map()
548 | map.set(4, 'four')
549 | map.set('three', 3)
550 | var data = {
551 | map: map,
552 | date: new Date(1532219539011),
553 | invalidDate: new Date('invalid')
554 | }
555 | let encoder = new Encoder({
556 | mapsAsObjects: true,
557 | useTimestamp32: true,
558 | useTag259ForMaps: false,
559 | })
560 | var serialized = encoder.encode(data)
561 | var deserialized = encoder.decode(serialized)
562 | assert.equal(deserialized.map[4], 'four')
563 | assert.equal(deserialized.map.three, 3)
564 | assert.equal(deserialized.date.getTime(), 1532219539000)
565 | assert.isTrue(isNaN(deserialized.invalidDate.getTime()))
566 | })
567 | test('key caching', function() {
568 | var data = {
569 | foo: 2,
570 | bar: 'test',
571 | four: 4,
572 | seven: 7,
573 | foz: 3,
574 | }
575 | var serialized = CBOR.encode(data)
576 | var deserialized = CBOR.decode(serialized)
577 | assert.deepEqual(deserialized, data)
578 | // do multiple times to test caching
579 | var serialized = CBOR.encode(data)
580 | var deserialized = CBOR.decode(serialized)
581 | assert.deepEqual(deserialized, data)
582 | var serialized = CBOR.encode(data)
583 | var deserialized = CBOR.decode(serialized)
584 | assert.deepEqual(deserialized, data)
585 | })
586 | test('strings', function() {
587 | var data = ['']
588 | var serialized = encode(data)
589 | var deserialized = decode(serialized)
590 | assert.deepEqual(deserialized, data)
591 | // do multiple times
592 | var serialized = encode(data)
593 | var deserialized = decode(serialized)
594 | assert.deepEqual(deserialized, data)
595 | data = 'decode this: ᾜ'
596 | var serialized = encode(data)
597 | var deserialized = decode(serialized)
598 | assert.deepEqual(deserialized, data)
599 | data = 'decode this that is longer but without any non-latin characters'
600 | var serialized = encode(data)
601 | var deserialized = decode(serialized)
602 | assert.deepEqual(deserialized, data)
603 | })
604 | test('decimal float32', function() {
605 | var data = {
606 | a: 2.526,
607 | b: 0.0035235,
608 | c: 0.00000000000352501,
609 | d: 3252.77,
610 | }
611 | let encoder = new Encoder({
612 | useFloat32: DECIMAL_FIT
613 | })
614 | var serialized = encoder.encode(data)
615 | assert.equal(serialized.length, 36)
616 | var deserialized = encoder.decode(serialized)
617 | assert.deepEqual(deserialized, data)
618 | })
619 | test('decimal alwaysUseFloat', function() {
620 | var data = 123
621 | let encoder = new Encoder({
622 | alwaysUseFloat: true
623 | })
624 | var serialized = encoder.encode(data)
625 | assert.equal(serialized.length, 9)
626 | var deserialized = encoder.decode(serialized)
627 | assert.equal(deserialized, data)
628 | })
629 | test('bigint to float', function() {
630 | var data = {
631 | a: 325283295382932843n
632 | }
633 | let encoder = new Encoder({
634 | int64AsNumber: true
635 | })
636 | var serialized = encoder.encode(data)
637 | var deserialized = encoder.decode(serialized)
638 | assert.deepEqual(deserialized.a, 325283295382932843)
639 | })
640 | test('numbers', function(){
641 | var data = {
642 | bigEncodable: 48978578104322,
643 | dateEpoch: 1530886513200,
644 | realBig: 3432235352353255323,
645 | decimal: 32.55234,
646 | negative: -34.11,
647 | exponential: 0.234e123,
648 | tiny: 3.233e-120,
649 | zero: 0,
650 | //negativeZero: -0,
651 | Infinity: Infinity
652 | }
653 | var serialized = encode(data)
654 | var deserialized = decode(serialized)
655 | assert.deepEqual(deserialized, data)
656 | })
657 | test('numbers are compact', function(){
658 | assert.equal(encode(-256).length, 2)
659 | let encoding = encode(-4294967296)
660 | assert.equal(encoding.length, 5)
661 | assert.equal(decode(encoding), -4294967296)
662 | })
663 | test('encode ArrayBuffer', function() {
664 | let ua = new Uint8Array([3, 4, 5]);
665 | let encoded = encode(ua.buffer);
666 | let decoded = decode(encoded);
667 | assert.equal(decoded[0], 3);
668 | assert.equal(decoded[0], 3);
669 | assert.equal(decoded[1], 4);
670 | assert.equal(decoded[2], 5);
671 | assert.equal(decoded.byteLength, 3);
672 | })
673 |
674 | test('iterator/indefinite length array', function(){
675 | class NotArray {
676 | }
677 | let data = ['a', 'b', 'c', ['d']] // iterable
678 | data.constructor = NotArray
679 | var serialized = encode(data)
680 | var deserialized = decode(serialized)
681 | assert.deepEqual(deserialized, data)
682 | })
683 | test('bigint', function(){
684 | var data = {
685 | bigintSmall: 352n,
686 | bigintSmallNegative: -333335252n,
687 | bigintBig: 2n**64n - 1n, // biggest 64-bit possible
688 | bigintBigNegative: -(2n**63n), // largest negative
689 | mixedWithNormal: 44,
690 | }
691 | var serialized = encode(data)
692 | var deserialized = decode(serialized)
693 | assert.deepEqual(deserialized, data)
694 | var evenBiggerInt = {
695 | big: 2n**66n,
696 | bigger: 53285732853728573289573289573289573289583725892358732859532n,
697 | negBig: -93025879203578903275903285903285903289502n,
698 | }
699 | var serialized = encode(evenBiggerInt)
700 | var deserialized = decode(serialized)
701 | assert.deepEqual(deserialized, evenBiggerInt)
702 | let encoder = new Encoder({
703 | largeBigIntToFloat: true
704 | })
705 | serialized = encoder.encode(evenBiggerInt)
706 | deserialized = decode(serialized)
707 | assert.isTrue(deserialized.bigger > 2n**65n)
708 | })
709 |
710 | test('buffers', function() {
711 | var data = {
712 | buffer1: new Uint8Array([2,3,4]),
713 | buffer2: new Uint8Array(encode(sampleData))
714 | }
715 | var serialized = encode(data)
716 | var deserialized = decode(serialized)
717 | assert.deepEqual(deserialized, data)
718 | let encoder = new Encoder({ tagUint8Array: true })
719 | serialized = encoder.encode(new Uint8Array([2,3,4]))
720 | assert.equal(serialized[0], 0xd8);
721 | encoder = new Encoder({ tagUint8Array: false })
722 | serialized = encoder.encode(new Uint8Array([2,3,4]))
723 | assert.equal(serialized[0], 0x43);
724 | })
725 |
726 | test('noteencode test', function() {
727 | const data = {
728 | foo: 1,
729 | bar: [1, 2, 3, 4, 'abc', 'def'],
730 | foobar: {
731 | foo: true,
732 | bar: -2147483649,
733 | foobar: {
734 | foo: new Uint8Array([1, 2, 3, 4, 5]),
735 | bar: 1.5,
736 | foobar: [true, false, 'abcdefghijkmonpqrstuvwxyz']
737 | }
738 | }
739 | };
740 | var serialized = encode(data)
741 | var deserialized = decode(serialized)
742 | var deserialized = decode(serialized)
743 | var deserialized = decode(serialized)
744 | assert.deepEqual(deserialized, data)
745 | })
746 |
747 | test('utf16 causing expansion', function() {
748 | this.timeout(10000)
749 | let data = {fixstr: 'ᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝ', str8:'ᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝ'}
750 | var serialized = encode(data)
751 | var deserialized = decode(serialized)
752 | assert.deepEqual(deserialized, data)
753 | })
754 | test('decodeMultiple', () => {
755 | let values = CBOR.decodeMultiple(new Uint8Array([1, 2, 3, 4]))
756 | assert.deepEqual(values, [1, 2, 3, 4])
757 | values = []
758 | CBOR.decodeMultiple(new Uint8Array([1, 2, 3, 4]), value => values.push(value))
759 | assert.deepEqual(values, [1, 2, 3, 4])
760 | })
761 | test('skipFunction', () => {
762 | var data = {
763 | a: 325283295382932843n,
764 | f: () => {},
765 | }
766 | let encoder = new Encoder({
767 | int64AsNumber: true,
768 | skipFunction: true,
769 | })
770 | var serialized = encoder.encode(data)
771 | var deserialized = encoder.decode(serialized)
772 | assert.deepEqual(deserialized.a, 325283295382932843)
773 | assert.equal(Object.hasOwn(deserialized, "f"), false);
774 | })
775 | test('bad input', function() {
776 | let badInput = Buffer.from('7b2273657269616c6e6f223a2265343a30222c226970223a223139322e3136382e312e3335222c226b6579223a226770735f736563726574227d', 'hex');
777 | assert.throws(function(){ decode(badInput) }) // should throw, not crash
778 | })
779 | test('buffer key', function() {
780 | let encoder = new Encoder({ mapsAsObjects: false })
781 | let test = encoder.decode(Buffer.from('D87982A1446E616D654361626301', 'hex'));
782 | console.log(test);
783 | });
784 | test('encode as iterator', function() {
785 | let hasIterables = {
786 | a: 1,
787 | iterator: (function*() {
788 | yield 2;
789 | yield {
790 | b: (function*() {
791 | yield 3;
792 | })(),
793 | };
794 | })()
795 | };
796 | let encodedIterable = encodeAsIterable(hasIterables);
797 | let result = [...encodedIterable];
798 | result = Buffer.concat(result);
799 | let deserialized = decode(result);
800 | const expectedResult = {
801 | a: 1,
802 | iterator: [2, { b: [3]}]
803 | };
804 | assert.deepEqual(deserialized, expectedResult);
805 | });
806 | if (typeof Blob !== 'undefined')
807 | test('encode as iterator with async/blob parts', function() {
808 | let blob = new Blob([Buffer.from([4,5])]);
809 | let hasIterables = {
810 | a: 1,
811 | iterator: (async function*() {
812 | yield 2;
813 | yield {
814 | b: (function*() {
815 | yield 3;
816 | })(),
817 | };
818 | })(),
819 | blob
820 | };
821 | let encodedIterable = encodeAsIterable(hasIterables);
822 | let result = [...encodedIterable];
823 | assert.equal(result[result.length - 1].constructor, Blob);
824 | });
825 | if (typeof Blob !== 'undefined')
826 | test('encode as async iterator with async/blob parts', async function() {
827 | let blob = new Blob([Buffer.from([4, 5])]);
828 | let hasIterables = {
829 | a: 1,
830 | iterator: (async function* () {
831 | yield 2;
832 | yield {
833 | b: (function* () {
834 | yield 3;
835 | })(),
836 | };
837 | })(),
838 | blob
839 | };
840 | let encodedIterable = encodeAsAsyncIterable(hasIterables);
841 | let result = [];
842 | for await (let encodedPart of encodedIterable) {
843 | result.push(encodedPart)
844 | }
845 | let deserialized = decode(Buffer.concat(result));
846 | const expectedResult = {
847 | a: 1,
848 | iterator: [2, { b: [3]}],
849 | blob: Buffer.from([4,5]),
850 | };
851 | assert.deepEqual(deserialized, expectedResult);
852 | });
853 | test.skip('encode as iterator performance', async function() {
854 | function* iterator() {
855 | for (let i = 0; i < 1000; i++) {
856 | yield {
857 | a: 1,
858 | b: 'hello, world',
859 | c: true,
860 | sub: {
861 | d: 'inside',
862 | e: 3
863 | }
864 | }
865 | }
866 | }
867 | let result;
868 | let start = performance.now();
869 | for (let i = 0; i < 1000; i++) {
870 | let encodedIterable = encodeAsIterable(iterator());
871 | result = [...encodedIterable];
872 | }
873 | let deserialized = decode(Buffer.concat(result));
874 | console.log(performance.now() - start, result.length);
875 | });
876 |
877 | test('little-endian typed array with aligned data', function() {
878 | // array[1] { uint32-little-endian-typed-array { bytes <00 00 00 00> } }
879 | let data = new Uint8Array([ 129, 216, 70, 68, 0, 0, 0, 0 ]);
880 | assert.deepEqual(decode(data), [new Uint32Array([0])]);
881 |
882 | let value = {x: new Float32Array([1, 2, 3])};
883 | assert.deepEqual(decode(encode(value)), value);
884 | });
885 | })
886 | suite('CBOR performance tests', function(){
887 | test('performance JSON.parse', function() {
888 | var data = sampleData
889 | this.timeout(10000)
890 | let structures = []
891 | var serialized = JSON.stringify(data)
892 | console.log('JSON size', serialized.length)
893 | for (var i = 0; i < ITERATIONS; i++) {
894 | var deserialized = JSON.parse(serialized)
895 | }
896 | })
897 | test('performance JSON.stringify', function() {
898 | var data = sampleData
899 | this.timeout(10000)
900 | for (var i = 0; i < ITERATIONS; i++) {
901 | var serialized = JSON.stringify(data)
902 | }
903 | })
904 | test('performance decode', function() {
905 | var data = sampleData
906 | this.timeout(10000)
907 | let structures = []
908 | var serialized = encode(data)
909 | console.log('CBOR size', serialized.length)
910 | let encoder = new Encoder({ structures, bundleStrings: true })
911 | var serialized = encoder.encode(data)
912 | console.log('CBOR w/ record ext size', serialized.length)
913 | for (var i = 0; i < ITERATIONS; i++) {
914 | var deserialized = encoder.decode(serialized)
915 | }
916 | })
917 | test('performance encode', function() {
918 | var data = sampleData
919 | this.timeout(10000)
920 | let structures = []
921 | let encoder = new Encoder({ structures, bundleStrings: true })
922 | let buffer = typeof Buffer != 'undefined' ? Buffer.alloc(0x10000) : new Uint8Array(0x10000)
923 |
924 | for (var i = 0; i < ITERATIONS; i++) {
925 | //serialized = encode(data, { shared: sharedStructure })
926 | encoder.useBuffer(buffer)
927 | var serialized = encoder.encode(data)
928 | //var serializedGzip = deflateSync(serialized)
929 | }
930 | //console.log('serialized', serialized.length, global.propertyComparisons)
931 | })
932 | })
--------------------------------------------------------------------------------
/decode.js:
--------------------------------------------------------------------------------
1 | let decoder
2 | try {
3 | decoder = new TextDecoder()
4 | } catch(error) {}
5 | let src
6 | let srcEnd
7 | let position = 0
8 | let alreadySet
9 | const EMPTY_ARRAY = []
10 | const LEGACY_RECORD_INLINE_ID = 105
11 | const RECORD_DEFINITIONS_ID = 0xdffe
12 | const RECORD_INLINE_ID = 0xdfff // temporary first-come first-serve tag // proposed tag: 0x7265 // 're'
13 | const BUNDLED_STRINGS_ID = 0xdff9
14 | const PACKED_TABLE_TAG_ID = 51
15 | const PACKED_REFERENCE_TAG_ID = 6
16 | const STOP_CODE = {}
17 | let maxArraySize = 112810000 // This is the maximum array size in V8. We would potentially detect and set it higher
18 | // for JSC, but this is pretty large and should be sufficient for most use cases
19 | let maxMapSize = 16810000 // JavaScript has a fixed maximum map size of about 16710000, but JS itself enforces this,
20 | // so we don't need to
21 |
22 | let maxObjectSize = 16710000; // This is the maximum number of keys in a Map. It takes over a minute to create this
23 | // many keys in an object, so also probably a reasonable choice there.
24 | let strings = EMPTY_ARRAY
25 | let stringPosition = 0
26 | let currentDecoder = {}
27 | let currentStructures
28 | let srcString
29 | let srcStringStart = 0
30 | let srcStringEnd = 0
31 | let bundledStrings
32 | let referenceMap
33 | let currentExtensions = []
34 | let currentExtensionRanges = []
35 | let packedValues
36 | let dataView
37 | let restoreMapsAsObject
38 | let defaultOptions = {
39 | useRecords: false,
40 | mapsAsObjects: true
41 | }
42 | let sequentialMode = false
43 | let inlineObjectReadThreshold = 2;
44 | var BlockedFunction // we use search and replace to change the next call to BlockedFunction to avoid CSP issues for
45 | // no-eval build
46 | try {
47 | new Function('')
48 | } catch(error) {
49 | // if eval variants are not supported, do not create inline object readers ever
50 | inlineObjectReadThreshold = Infinity
51 | }
52 |
53 |
54 |
55 | export class Decoder {
56 | constructor(options) {
57 | if (options) {
58 | if ((options.keyMap || options._keyMap) && !options.useRecords) {
59 | options.useRecords = false
60 | options.mapsAsObjects = true
61 | }
62 | if (options.useRecords === false && options.mapsAsObjects === undefined)
63 | options.mapsAsObjects = true
64 | if (options.getStructures)
65 | options.getShared = options.getStructures
66 | if (options.getShared && !options.structures)
67 | (options.structures = []).uninitialized = true // this is what we use to denote an uninitialized structures
68 | if (options.keyMap) {
69 | this.mapKey = new Map()
70 | for (let [k,v] of Object.entries(options.keyMap)) this.mapKey.set(v,k)
71 | }
72 | }
73 | Object.assign(this, options)
74 | }
75 | /*
76 | decodeKey(key) {
77 | return this.keyMap
78 | ? Object.keys(this.keyMap)[Object.values(this.keyMap).indexOf(key)] || key
79 | : key
80 | }
81 | */
82 | decodeKey(key) {
83 | return this.keyMap ? this.mapKey.get(key) || key : key
84 | }
85 |
86 | encodeKey(key) {
87 | return this.keyMap && this.keyMap.hasOwnProperty(key) ? this.keyMap[key] : key
88 | }
89 |
90 | encodeKeys(rec) {
91 | if (!this._keyMap) return rec
92 | let map = new Map()
93 | for (let [k,v] of Object.entries(rec)) map.set((this._keyMap.hasOwnProperty(k) ? this._keyMap[k] : k), v)
94 | return map
95 | }
96 |
97 | decodeKeys(map) {
98 | if (!this._keyMap || map.constructor.name != 'Map') return map
99 | if (!this._mapKey) {
100 | this._mapKey = new Map()
101 | for (let [k,v] of Object.entries(this._keyMap)) this._mapKey.set(v,k)
102 | }
103 | let res = {}
104 | //map.forEach((v,k) => res[Object.keys(this._keyMap)[Object.values(this._keyMap).indexOf(k)] || k] = v)
105 | map.forEach((v,k) => res[safeKey(this._mapKey.has(k) ? this._mapKey.get(k) : k)] = v)
106 | return res
107 | }
108 |
109 | mapDecode(source, end) {
110 |
111 | let res = this.decode(source)
112 | if (this._keyMap) {
113 | //Experiemntal support for Optimised KeyMap decoding
114 | switch (res.constructor.name) {
115 | case 'Array': return res.map(r => this.decodeKeys(r))
116 | //case 'Map': return this.decodeKeys(res)
117 | }
118 | }
119 | return res
120 | }
121 |
122 | decode(source, end) {
123 | if (src) {
124 | // re-entrant execution, save the state and restore it after we do this decode
125 | return saveState(() => {
126 | clearSource()
127 | return this ? this.decode(source, end) : Decoder.prototype.decode.call(defaultOptions, source, end)
128 | })
129 | }
130 | srcEnd = end > -1 ? end : source.length
131 | position = 0
132 | stringPosition = 0
133 | srcStringEnd = 0
134 | srcString = null
135 | strings = EMPTY_ARRAY
136 | bundledStrings = null
137 | src = source
138 | // this provides cached access to the data view for a buffer if it is getting reused, which is a recommend
139 | // technique for getting data from a database where it can be copied into an existing buffer instead of creating
140 | // new ones
141 | try {
142 | dataView = source.dataView || (source.dataView = new DataView(source.buffer, source.byteOffset, source.byteLength))
143 | } catch(error) {
144 | // if it doesn't have a buffer, maybe it is the wrong type of object
145 | src = null
146 | if (source instanceof Uint8Array)
147 | throw error
148 | throw new Error('Source must be a Uint8Array or Buffer but was a ' + ((source && typeof source == 'object') ? source.constructor.name : typeof source))
149 | }
150 | if (this instanceof Decoder) {
151 | currentDecoder = this
152 | packedValues = this.sharedValues &&
153 | (this.pack ? new Array(this.maxPrivatePackedValues || 16).concat(this.sharedValues) :
154 | this.sharedValues)
155 | if (this.structures) {
156 | currentStructures = this.structures
157 | return checkedRead()
158 | } else if (!currentStructures || currentStructures.length > 0) {
159 | currentStructures = []
160 | }
161 | } else {
162 | currentDecoder = defaultOptions
163 | if (!currentStructures || currentStructures.length > 0)
164 | currentStructures = []
165 | packedValues = null
166 | }
167 | return checkedRead()
168 | }
169 | decodeMultiple(source, forEach) {
170 | let values, lastPosition = 0
171 | try {
172 | let size = source.length
173 | sequentialMode = true
174 | let value = this ? this.decode(source, size) : defaultDecoder.decode(source, size)
175 | if (forEach) {
176 | if (forEach(value) === false) {
177 | return
178 | }
179 | while(position < size) {
180 | lastPosition = position
181 | if (forEach(checkedRead()) === false) {
182 | return
183 | }
184 | }
185 | }
186 | else {
187 | values = [ value ]
188 | while(position < size) {
189 | lastPosition = position
190 | values.push(checkedRead())
191 | }
192 | return values
193 | }
194 | } catch(error) {
195 | error.lastPosition = lastPosition
196 | error.values = values
197 | throw error
198 | } finally {
199 | sequentialMode = false
200 | clearSource()
201 | }
202 | }
203 | }
204 | export function getPosition() {
205 | return position
206 | }
207 | export function checkedRead() {
208 | try {
209 | let result = read()
210 | if (bundledStrings) {
211 | if (position >= bundledStrings.postBundlePosition) {
212 | let error = new Error('Unexpected bundle position');
213 | error.incomplete = true;
214 | throw error
215 | }
216 | // bundled strings to skip past
217 | position = bundledStrings.postBundlePosition;
218 | bundledStrings = null;
219 | }
220 |
221 | if (position == srcEnd) {
222 | // finished reading this source, cleanup references
223 | currentStructures = null
224 | src = null
225 | if (referenceMap)
226 | referenceMap = null
227 | } else if (position > srcEnd) {
228 | // over read
229 | let error = new Error('Unexpected end of CBOR data')
230 | error.incomplete = true
231 | throw error
232 | } else if (!sequentialMode) {
233 | throw new Error('Data read, but end of buffer not reached')
234 | }
235 | // else more to read, but we are reading sequentially, so don't clear source yet
236 | return result
237 | } catch(error) {
238 | clearSource()
239 | if (error instanceof RangeError || error.message.startsWith('Unexpected end of buffer')) {
240 | error.incomplete = true
241 | }
242 | throw error
243 | }
244 | }
245 |
246 | export function read() {
247 | let token = src[position++]
248 | let majorType = token >> 5
249 | token = token & 0x1f
250 | if (token > 0x17) {
251 | switch (token) {
252 | case 0x18:
253 | token = src[position++]
254 | break
255 | case 0x19:
256 | if (majorType == 7) {
257 | return getFloat16()
258 | }
259 | token = dataView.getUint16(position)
260 | position += 2
261 | break
262 | case 0x1a:
263 | if (majorType == 7) {
264 | let value = dataView.getFloat32(position)
265 | if (currentDecoder.useFloat32 > 2) {
266 | // this does rounding of numbers that were encoded in 32-bit float to nearest significant decimal digit that could be preserved
267 | let multiplier = mult10[((src[position] & 0x7f) << 1) | (src[position + 1] >> 7)]
268 | position += 4
269 | return ((multiplier * value + (value > 0 ? 0.5 : -0.5)) >> 0) / multiplier
270 | }
271 | position += 4
272 | return value
273 | }
274 | token = dataView.getUint32(position)
275 | position += 4
276 | if (majorType === 1) return -1 - token; // can't safely use negation operator here
277 | break
278 | case 0x1b:
279 | if (majorType == 7) {
280 | let value = dataView.getFloat64(position)
281 | position += 8
282 | return value
283 | }
284 | if (majorType > 1) {
285 | if (dataView.getUint32(position) > 0)
286 | throw new Error('JavaScript does not support arrays, maps, or strings with length over 4294967295')
287 | token = dataView.getUint32(position + 4)
288 | } else if (currentDecoder.int64AsNumber) {
289 | token = dataView.getUint32(position) * 0x100000000
290 | token += dataView.getUint32(position + 4)
291 | } else token = dataView.getBigUint64(position)
292 | position += 8
293 | break
294 | case 0x1f:
295 | // indefinite length
296 | switch(majorType) {
297 | case 2: // byte string
298 | case 3: // text string
299 | throw new Error('Indefinite length not supported for byte or text strings')
300 | case 4: // array
301 | let array = []
302 | let value, i = 0
303 | while ((value = read()) != STOP_CODE) {
304 | if (i >= maxArraySize) throw new Error(`Array length exceeds ${maxArraySize}`)
305 | array[i++] = value
306 | }
307 | return majorType == 4 ? array : majorType == 3 ? array.join('') : Buffer.concat(array)
308 | case 5: // map
309 | let key
310 | if (currentDecoder.mapsAsObjects) {
311 | let object = {}
312 | let i = 0;
313 | if (currentDecoder.keyMap) {
314 | while((key = read()) != STOP_CODE) {
315 | if (i++ >= maxMapSize) throw new Error(`Property count exceeds ${maxMapSize}`)
316 | object[safeKey(currentDecoder.decodeKey(key))] = read()
317 | }
318 | }
319 | else {
320 | while ((key = read()) != STOP_CODE) {
321 | if (i++ >= maxMapSize) throw new Error(`Property count exceeds ${maxMapSize}`)
322 | object[safeKey(key)] = read()
323 | }
324 | }
325 | return object
326 | } else {
327 | if (restoreMapsAsObject) {
328 | currentDecoder.mapsAsObjects = true
329 | restoreMapsAsObject = false
330 | }
331 | let map = new Map()
332 | if (currentDecoder.keyMap) {
333 | let i = 0;
334 | while((key = read()) != STOP_CODE) {
335 | if (i++ >= maxMapSize) {
336 | throw new Error(`Map size exceeds ${maxMapSize}`);
337 | }
338 | map.set(currentDecoder.decodeKey(key), read())
339 | }
340 | }
341 | else {
342 | let i = 0;
343 | while ((key = read()) != STOP_CODE) {
344 | if (i++ >= maxMapSize) {
345 | throw new Error(`Map size exceeds ${maxMapSize}`);
346 | }
347 | map.set(key, read())
348 | }
349 | }
350 | return map
351 | }
352 | case 7:
353 | return STOP_CODE
354 | default:
355 | throw new Error('Invalid major type for indefinite length ' + majorType)
356 | }
357 | default:
358 | throw new Error('Unknown token ' + token)
359 | }
360 | }
361 | switch (majorType) {
362 | case 0: // positive int
363 | return token
364 | case 1: // negative int
365 | return ~token
366 | case 2: // buffer
367 | return readBin(token)
368 | case 3: // string
369 | if (srcStringEnd >= position) {
370 | return srcString.slice(position - srcStringStart, (position += token) - srcStringStart)
371 | }
372 | if (srcStringEnd == 0 && srcEnd < 140 && token < 32) {
373 | // for small blocks, avoiding the overhead of the extract call is helpful
374 | let string = token < 16 ? shortStringInJS(token) : longStringInJS(token)
375 | if (string != null)
376 | return string
377 | }
378 | return readFixedString(token)
379 | case 4: // array
380 | if (token >= maxArraySize) throw new Error(`Array length exceeds ${maxArraySize}`)
381 | let array = new Array(token)
382 | //if (currentDecoder.keyMap) for (let i = 0; i < token; i++) array[i] = currentDecoder.decodeKey(read())
383 | //else
384 | for (let i = 0; i < token; i++) array[i] = read()
385 | return array
386 | case 5: // map
387 | if (token >= maxMapSize) throw new Error(`Map size exceeds ${maxArraySize}`)
388 | if (currentDecoder.mapsAsObjects) {
389 | let object = {}
390 | if (currentDecoder.keyMap) for (let i = 0; i < token; i++) object[safeKey(currentDecoder.decodeKey(read()))] = read()
391 | else for (let i = 0; i < token; i++) object[safeKey(read())] = read()
392 | return object
393 | } else {
394 | if (restoreMapsAsObject) {
395 | currentDecoder.mapsAsObjects = true
396 | restoreMapsAsObject = false
397 | }
398 | let map = new Map()
399 | if (currentDecoder.keyMap) for (let i = 0; i < token; i++) map.set(currentDecoder.decodeKey(read()),read())
400 | else for (let i = 0; i < token; i++) map.set(read(), read())
401 | return map
402 | }
403 | case 6: // extension
404 | if (token >= BUNDLED_STRINGS_ID) {
405 | let structure = currentStructures[token & 0x1fff] // check record structures first
406 | // At some point we may provide an option for dynamic tag assignment with a range like token >= 8 && (token < 16 || (token > 0x80 && token < 0xc0) || (token > 0x130 && token < 0x4000))
407 | if (structure) {
408 | if (!structure.read) structure.read = createStructureReader(structure)
409 | return structure.read()
410 | }
411 | if (token < 0x10000) {
412 | if (token == RECORD_INLINE_ID) { // we do a special check for this so that we can keep the
413 | // currentExtensions as densely stored array (v8 stores arrays densely under about 3000 elements)
414 | let length = readJustLength()
415 | let id = read()
416 | let structure = read()
417 | recordDefinition(id, structure)
418 | let object = {}
419 | if (currentDecoder.keyMap) for (let i = 2; i < length; i++) {
420 | let key = currentDecoder.decodeKey(structure[i - 2])
421 | object[safeKey(key)] = read()
422 | }
423 | else for (let i = 2; i < length; i++) {
424 | let key = structure[i - 2]
425 | object[safeKey(key)] = read()
426 | }
427 | return object
428 | }
429 | else if (token == RECORD_DEFINITIONS_ID) {
430 | let length = readJustLength()
431 | let id = read()
432 | for (let i = 2; i < length; i++) {
433 | recordDefinition(id++, read())
434 | }
435 | return read()
436 | } else if (token == BUNDLED_STRINGS_ID) {
437 | return readBundleExt()
438 | }
439 | if (currentDecoder.getShared) {
440 | loadShared()
441 | structure = currentStructures[token & 0x1fff]
442 | if (structure) {
443 | if (!structure.read)
444 | structure.read = createStructureReader(structure)
445 | return structure.read()
446 | }
447 | }
448 | }
449 | }
450 | let extension = currentExtensions[token]
451 | if (extension) {
452 | if (extension.handlesRead)
453 | return extension(read)
454 | else
455 | return extension(read())
456 | } else {
457 | let input = read()
458 | for (let i = 0; i < currentExtensionRanges.length; i++) {
459 | let value = currentExtensionRanges[i](token, input)
460 | if (value !== undefined)
461 | return value
462 | }
463 | return new Tag(input, token)
464 | }
465 | case 7: // fixed value
466 | switch (token) {
467 | case 0x14: return false
468 | case 0x15: return true
469 | case 0x16: return null
470 | case 0x17: return; // undefined
471 | case 0x1f:
472 | default:
473 | let packedValue = (packedValues || getPackedValues())[token]
474 | if (packedValue !== undefined)
475 | return packedValue
476 | throw new Error('Unknown token ' + token)
477 | }
478 | default: // negative int
479 | if (isNaN(token)) {
480 | let error = new Error('Unexpected end of CBOR data')
481 | error.incomplete = true
482 | throw error
483 | }
484 | throw new Error('Unknown CBOR token ' + token)
485 | }
486 | }
487 | const validName = /^[a-zA-Z_$][a-zA-Z\d_$]*$/
488 | function createStructureReader(structure) {
489 | if (!structure) throw new Error('Structure is required in record definition');
490 | function readObject() {
491 | // get the array size from the header
492 | let length = src[position++]
493 | //let majorType = token >> 5
494 | length = length & 0x1f
495 | if (length > 0x17) {
496 | switch (length) {
497 | case 0x18:
498 | length = src[position++]
499 | break
500 | case 0x19:
501 | length = dataView.getUint16(position)
502 | position += 2
503 | break
504 | case 0x1a:
505 | length = dataView.getUint32(position)
506 | position += 4
507 | break
508 | default:
509 | throw new Error('Expected array header, but got ' + src[position - 1])
510 | }
511 | }
512 | // This initial function is quick to instantiate, but runs slower. After several iterations pay the cost to build the faster function
513 | let compiledReader = this.compiledReader // first look to see if we have the fast compiled function
514 | while(compiledReader) {
515 | // we have a fast compiled object literal reader
516 | if (compiledReader.propertyCount === length)
517 | return compiledReader(read) // with the right length, so we use it
518 | compiledReader = compiledReader.next // see if there is another reader with the right length
519 | }
520 | if (this.slowReads++ >= inlineObjectReadThreshold) { // create a fast compiled reader
521 | let array = this.length == length ? this : this.slice(0, length)
522 | compiledReader = currentDecoder.keyMap
523 | ? new Function('r', 'return {' + array.map(k => currentDecoder.decodeKey(k)).map(k => validName.test(k) ? safeKey(k) + ':r()' : ('[' + JSON.stringify(k) + ']:r()')).join(',') + '}')
524 | : new Function('r', 'return {' + array.map(key => validName.test(key) ? safeKey(key) + ':r()' : ('[' + JSON.stringify(key) + ']:r()')).join(',') + '}')
525 | if (this.compiledReader)
526 | compiledReader.next = this.compiledReader // if there is an existing one, we store multiple readers as a linked list because it is usually pretty rare to have multiple readers (of different length) for the same structure
527 | compiledReader.propertyCount = length
528 | this.compiledReader = compiledReader
529 | return compiledReader(read)
530 | }
531 | let object = {}
532 | if (currentDecoder.keyMap) for (let i = 0; i < length; i++) object[safeKey(currentDecoder.decodeKey(this[i]))] = read()
533 | else for (let i = 0; i < length; i++) {
534 | object[safeKey(this[i])] = read();
535 | }
536 | return object
537 | }
538 | structure.slowReads = 0
539 | return readObject
540 | }
541 |
542 | function safeKey(key) {
543 | // protect against prototype pollution
544 | if (typeof key === 'string') return key === '__proto__' ? '__proto_' : key
545 | if (typeof key === 'number' || typeof key === 'boolean' || typeof key === 'bigint') return key.toString();
546 | if (key == null) return key + '';
547 | // protect against expensive (DoS) string conversions
548 | throw new Error('Invalid property name type ' + typeof key);
549 | }
550 |
551 | let readFixedString = readStringJS
552 | let readString8 = readStringJS
553 | let readString16 = readStringJS
554 | let readString32 = readStringJS
555 |
556 | export let isNativeAccelerationEnabled = false
557 | export function setExtractor(extractStrings) {
558 | isNativeAccelerationEnabled = true
559 | readFixedString = readString(1)
560 | readString8 = readString(2)
561 | readString16 = readString(3)
562 | readString32 = readString(5)
563 | function readString(headerLength) {
564 | return function readString(length) {
565 | let string = strings[stringPosition++]
566 | if (string == null) {
567 | if (bundledStrings)
568 | return readStringJS(length)
569 | let extraction = extractStrings(position, srcEnd, length, src)
570 | if (typeof extraction == 'string') {
571 | string = extraction
572 | strings = EMPTY_ARRAY
573 | } else {
574 | strings = extraction
575 | stringPosition = 1
576 | srcStringEnd = 1 // even if a utf-8 string was decoded, must indicate we are in the midst of extracted strings and can't skip strings
577 | string = strings[0]
578 | if (string === undefined)
579 | throw new Error('Unexpected end of buffer')
580 | }
581 | }
582 | let srcStringLength = string.length
583 | if (srcStringLength <= length) {
584 | position += length
585 | return string
586 | }
587 | srcString = string
588 | srcStringStart = position
589 | srcStringEnd = position + srcStringLength
590 | position += length
591 | return string.slice(0, length) // we know we just want the beginning
592 | }
593 | }
594 | }
595 | function readStringJS(length) {
596 | let result
597 | if (length < 16) {
598 | if (result = shortStringInJS(length))
599 | return result
600 | }
601 | if (length > 64 && decoder)
602 | return decoder.decode(src.subarray(position, position += length))
603 | const end = position + length
604 | const units = []
605 | result = ''
606 | while (position < end) {
607 | const byte1 = src[position++]
608 | if ((byte1 & 0x80) === 0) {
609 | // 1 byte
610 | units.push(byte1)
611 | } else if ((byte1 & 0xe0) === 0xc0) {
612 | // 2 bytes
613 | const byte2 = src[position++] & 0x3f
614 | units.push(((byte1 & 0x1f) << 6) | byte2)
615 | } else if ((byte1 & 0xf0) === 0xe0) {
616 | // 3 bytes
617 | const byte2 = src[position++] & 0x3f
618 | const byte3 = src[position++] & 0x3f
619 | units.push(((byte1 & 0x1f) << 12) | (byte2 << 6) | byte3)
620 | } else if ((byte1 & 0xf8) === 0xf0) {
621 | // 4 bytes
622 | const byte2 = src[position++] & 0x3f
623 | const byte3 = src[position++] & 0x3f
624 | const byte4 = src[position++] & 0x3f
625 | let unit = ((byte1 & 0x07) << 0x12) | (byte2 << 0x0c) | (byte3 << 0x06) | byte4
626 | if (unit > 0xffff) {
627 | unit -= 0x10000
628 | units.push(((unit >>> 10) & 0x3ff) | 0xd800)
629 | unit = 0xdc00 | (unit & 0x3ff)
630 | }
631 | units.push(unit)
632 | } else {
633 | units.push(byte1)
634 | }
635 |
636 | if (units.length >= 0x1000) {
637 | result += fromCharCode.apply(String, units)
638 | units.length = 0
639 | }
640 | }
641 |
642 | if (units.length > 0) {
643 | result += fromCharCode.apply(String, units)
644 | }
645 |
646 | return result
647 | }
648 | let fromCharCode = String.fromCharCode
649 | function longStringInJS(length) {
650 | let start = position
651 | let bytes = new Array(length)
652 | for (let i = 0; i < length; i++) {
653 | const byte = src[position++];
654 | if ((byte & 0x80) > 0) {
655 | position = start
656 | return
657 | }
658 | bytes[i] = byte
659 | }
660 | return fromCharCode.apply(String, bytes)
661 | }
662 | function shortStringInJS(length) {
663 | if (length < 4) {
664 | if (length < 2) {
665 | if (length === 0)
666 | return ''
667 | else {
668 | let a = src[position++]
669 | if ((a & 0x80) > 1) {
670 | position -= 1
671 | return
672 | }
673 | return fromCharCode(a)
674 | }
675 | } else {
676 | let a = src[position++]
677 | let b = src[position++]
678 | if ((a & 0x80) > 0 || (b & 0x80) > 0) {
679 | position -= 2
680 | return
681 | }
682 | if (length < 3)
683 | return fromCharCode(a, b)
684 | let c = src[position++]
685 | if ((c & 0x80) > 0) {
686 | position -= 3
687 | return
688 | }
689 | return fromCharCode(a, b, c)
690 | }
691 | } else {
692 | let a = src[position++]
693 | let b = src[position++]
694 | let c = src[position++]
695 | let d = src[position++]
696 | if ((a & 0x80) > 0 || (b & 0x80) > 0 || (c & 0x80) > 0 || (d & 0x80) > 0) {
697 | position -= 4
698 | return
699 | }
700 | if (length < 6) {
701 | if (length === 4)
702 | return fromCharCode(a, b, c, d)
703 | else {
704 | let e = src[position++]
705 | if ((e & 0x80) > 0) {
706 | position -= 5
707 | return
708 | }
709 | return fromCharCode(a, b, c, d, e)
710 | }
711 | } else if (length < 8) {
712 | let e = src[position++]
713 | let f = src[position++]
714 | if ((e & 0x80) > 0 || (f & 0x80) > 0) {
715 | position -= 6
716 | return
717 | }
718 | if (length < 7)
719 | return fromCharCode(a, b, c, d, e, f)
720 | let g = src[position++]
721 | if ((g & 0x80) > 0) {
722 | position -= 7
723 | return
724 | }
725 | return fromCharCode(a, b, c, d, e, f, g)
726 | } else {
727 | let e = src[position++]
728 | let f = src[position++]
729 | let g = src[position++]
730 | let h = src[position++]
731 | if ((e & 0x80) > 0 || (f & 0x80) > 0 || (g & 0x80) > 0 || (h & 0x80) > 0) {
732 | position -= 8
733 | return
734 | }
735 | if (length < 10) {
736 | if (length === 8)
737 | return fromCharCode(a, b, c, d, e, f, g, h)
738 | else {
739 | let i = src[position++]
740 | if ((i & 0x80) > 0) {
741 | position -= 9
742 | return
743 | }
744 | return fromCharCode(a, b, c, d, e, f, g, h, i)
745 | }
746 | } else if (length < 12) {
747 | let i = src[position++]
748 | let j = src[position++]
749 | if ((i & 0x80) > 0 || (j & 0x80) > 0) {
750 | position -= 10
751 | return
752 | }
753 | if (length < 11)
754 | return fromCharCode(a, b, c, d, e, f, g, h, i, j)
755 | let k = src[position++]
756 | if ((k & 0x80) > 0) {
757 | position -= 11
758 | return
759 | }
760 | return fromCharCode(a, b, c, d, e, f, g, h, i, j, k)
761 | } else {
762 | let i = src[position++]
763 | let j = src[position++]
764 | let k = src[position++]
765 | let l = src[position++]
766 | if ((i & 0x80) > 0 || (j & 0x80) > 0 || (k & 0x80) > 0 || (l & 0x80) > 0) {
767 | position -= 12
768 | return
769 | }
770 | if (length < 14) {
771 | if (length === 12)
772 | return fromCharCode(a, b, c, d, e, f, g, h, i, j, k, l)
773 | else {
774 | let m = src[position++]
775 | if ((m & 0x80) > 0) {
776 | position -= 13
777 | return
778 | }
779 | return fromCharCode(a, b, c, d, e, f, g, h, i, j, k, l, m)
780 | }
781 | } else {
782 | let m = src[position++]
783 | let n = src[position++]
784 | if ((m & 0x80) > 0 || (n & 0x80) > 0) {
785 | position -= 14
786 | return
787 | }
788 | if (length < 15)
789 | return fromCharCode(a, b, c, d, e, f, g, h, i, j, k, l, m, n)
790 | let o = src[position++]
791 | if ((o & 0x80) > 0) {
792 | position -= 15
793 | return
794 | }
795 | return fromCharCode(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o)
796 | }
797 | }
798 | }
799 | }
800 | }
801 |
802 | function readBin(length) {
803 | return currentDecoder.copyBuffers ?
804 | // specifically use the copying slice (not the node one)
805 | Uint8Array.prototype.slice.call(src, position, position += length) :
806 | src.subarray(position, position += length)
807 | }
808 | function readExt(length) {
809 | let type = src[position++]
810 | if (currentExtensions[type]) {
811 | return currentExtensions[type](src.subarray(position, position += length))
812 | }
813 | else
814 | throw new Error('Unknown extension type ' + type)
815 | }
816 | let f32Array = new Float32Array(1)
817 | let u8Array = new Uint8Array(f32Array.buffer, 0, 4)
818 | function getFloat16() {
819 | let byte0 = src[position++]
820 | let byte1 = src[position++]
821 | let exponent = (byte0 & 0x7f) >> 2;
822 | if (exponent === 0x1f) { // specials
823 | if (byte1 || (byte0 & 3))
824 | return NaN;
825 | return (byte0 & 0x80) ? -Infinity : Infinity;
826 | }
827 | if (exponent === 0) { // sub-normals
828 | // significand with 10 fractional bits and divided by 2^14
829 | let abs = (((byte0 & 3) << 8) | byte1) / (1 << 24)
830 | return (byte0 & 0x80) ? -abs : abs
831 | }
832 |
833 | u8Array[3] = (byte0 & 0x80) | // sign bit
834 | ((exponent >> 1) + 56) // 4 of 5 of the exponent bits, re-offset-ed
835 | u8Array[2] = ((byte0 & 7) << 5) | // last exponent bit and first two mantissa bits
836 | (byte1 >> 3) // next 5 bits of mantissa
837 | u8Array[1] = byte1 << 5; // last three bits of mantissa
838 | u8Array[0] = 0;
839 | return f32Array[0];
840 | }
841 |
842 | let keyCache = new Array(4096)
843 | function readKey() {
844 | let length = src[position++]
845 | if (length >= 0x60 && length < 0x78) {
846 | // fixstr, potentially use key cache
847 | length = length - 0x60
848 | if (srcStringEnd >= position) // if it has been extracted, must use it (and faster anyway)
849 | return srcString.slice(position - srcStringStart, (position += length) - srcStringStart)
850 | else if (!(srcStringEnd == 0 && srcEnd < 180))
851 | return readFixedString(length)
852 | } else { // not cacheable, go back and do a standard read
853 | position--
854 | return read()
855 | }
856 | let key = ((length << 5) ^ (length > 1 ? dataView.getUint16(position) : length > 0 ? src[position] : 0)) & 0xfff
857 | let entry = keyCache[key]
858 | let checkPosition = position
859 | let end = position + length - 3
860 | let chunk
861 | let i = 0
862 | if (entry && entry.bytes == length) {
863 | while (checkPosition < end) {
864 | chunk = dataView.getUint32(checkPosition)
865 | if (chunk != entry[i++]) {
866 | checkPosition = 0x70000000
867 | break
868 | }
869 | checkPosition += 4
870 | }
871 | end += 3
872 | while (checkPosition < end) {
873 | chunk = src[checkPosition++]
874 | if (chunk != entry[i++]) {
875 | checkPosition = 0x70000000
876 | break
877 | }
878 | }
879 | if (checkPosition === end) {
880 | position = checkPosition
881 | return entry.string
882 | }
883 | end -= 3
884 | checkPosition = position
885 | }
886 | entry = []
887 | keyCache[key] = entry
888 | entry.bytes = length
889 | while (checkPosition < end) {
890 | chunk = dataView.getUint32(checkPosition)
891 | entry.push(chunk)
892 | checkPosition += 4
893 | }
894 | end += 3
895 | while (checkPosition < end) {
896 | chunk = src[checkPosition++]
897 | entry.push(chunk)
898 | }
899 | // for small blocks, avoiding the overhead of the extract call is helpful
900 | let string = length < 16 ? shortStringInJS(length) : longStringInJS(length)
901 | if (string != null)
902 | return entry.string = string
903 | return entry.string = readFixedString(length)
904 | }
905 |
906 | export class Tag {
907 | constructor(value, tag) {
908 | this.value = value
909 | this.tag = tag
910 | }
911 | }
912 |
913 | currentExtensions[0] = (dateString) => {
914 | // string date extension
915 | return new Date(dateString)
916 | }
917 |
918 | currentExtensions[1] = (epochSec) => {
919 | // numeric date extension
920 | return new Date(Math.round(epochSec * 1000))
921 | }
922 |
923 | currentExtensions[2] = (buffer) => {
924 | // bigint extension
925 | let value = BigInt(0)
926 | for (let i = 0, l = buffer.byteLength; i < l; i++) {
927 | value = BigInt(buffer[i]) + (value << BigInt(8))
928 | }
929 | return value
930 | }
931 |
932 | currentExtensions[3] = (buffer) => {
933 | // negative bigint extension
934 | return BigInt(-1) - currentExtensions[2](buffer)
935 | }
936 | currentExtensions[4] = (fraction) => {
937 | // best to reparse to maintain accuracy
938 | return +(fraction[1] + 'e' + fraction[0])
939 | }
940 |
941 | currentExtensions[5] = (fraction) => {
942 | // probably not sufficiently accurate
943 | return fraction[1] * Math.exp(fraction[0] * Math.log(2))
944 | }
945 |
946 | // the registration of the record definition extension
947 | const recordDefinition = (id, structure) => {
948 | id = id - 0xe000
949 | let existingStructure = currentStructures[id]
950 | if (existingStructure && existingStructure.isShared) {
951 | (currentStructures.restoreStructures || (currentStructures.restoreStructures = []))[id] = existingStructure
952 | }
953 | currentStructures[id] = structure
954 |
955 | structure.read = createStructureReader(structure)
956 | }
957 | currentExtensions[LEGACY_RECORD_INLINE_ID] = (data) => {
958 | let length = data.length
959 | let structure = data[1]
960 | recordDefinition(data[0], structure)
961 | let object = {}
962 | for (let i = 2; i < length; i++) {
963 | let key = structure[i - 2]
964 | object[safeKey(key)] = data[i]
965 | }
966 | return object
967 | }
968 | currentExtensions[14] = (value) => {
969 | if (bundledStrings)
970 | return bundledStrings[0].slice(bundledStrings.position0, bundledStrings.position0 += value)
971 | return new Tag(value, 14)
972 | }
973 | currentExtensions[15] = (value) => {
974 | if (bundledStrings)
975 | return bundledStrings[1].slice(bundledStrings.position1, bundledStrings.position1 += value)
976 | return new Tag(value, 15)
977 | }
978 | let glbl = { Error, RegExp }
979 | currentExtensions[27] = (data) => { // http://cbor.schmorp.de/generic-object
980 | return (glbl[data[0]] || Error)(data[1], data[2])
981 | }
982 | const packedTable = (read) => {
983 | if (src[position++] != 0x84) {
984 | let error = new Error('Packed values structure must be followed by a 4 element array')
985 | if (src.length < position)
986 | error.incomplete = true
987 | throw error
988 | }
989 | let newPackedValues = read() // packed values
990 | if (!newPackedValues || !newPackedValues.length) {
991 | let error = new Error('Packed values structure must be followed by a 4 element array')
992 | error.incomplete = true
993 | throw error
994 | }
995 | packedValues = packedValues ? newPackedValues.concat(packedValues.slice(newPackedValues.length)) : newPackedValues
996 | packedValues.prefixes = read()
997 | packedValues.suffixes = read()
998 | return read() // read the rump
999 | }
1000 | packedTable.handlesRead = true
1001 | currentExtensions[51] = packedTable
1002 |
1003 | currentExtensions[PACKED_REFERENCE_TAG_ID] = (data) => { // packed reference
1004 | if (!packedValues) {
1005 | if (currentDecoder.getShared)
1006 | loadShared()
1007 | else
1008 | return new Tag(data, PACKED_REFERENCE_TAG_ID)
1009 | }
1010 | if (typeof data == 'number')
1011 | return packedValues[16 + (data >= 0 ? 2 * data : (-2 * data - 1))]
1012 | let error = new Error('No support for non-integer packed references yet')
1013 | if (data === undefined)
1014 | error.incomplete = true
1015 | throw error
1016 | }
1017 |
1018 | // The following code is an incomplete implementation of http://cbor.schmorp.de/stringref
1019 | // the real thing would need to implemennt more logic to populate the stringRefs table and
1020 | // maintain a stack of stringRef "namespaces".
1021 | //
1022 | // currentExtensions[25] = (id) => {
1023 | // return stringRefs[id]
1024 | // }
1025 | // currentExtensions[256] = (read) => {
1026 | // stringRefs = []
1027 | // try {
1028 | // return read()
1029 | // } finally {
1030 | // stringRefs = null
1031 | // }
1032 | // }
1033 | // currentExtensions[256].handlesRead = true
1034 |
1035 | currentExtensions[28] = (read) => {
1036 | // shareable http://cbor.schmorp.de/value-sharing (for structured clones)
1037 | if (!referenceMap) {
1038 | referenceMap = new Map()
1039 | referenceMap.id = 0
1040 | }
1041 | let id = referenceMap.id++
1042 | let startingPosition = position
1043 | let token = src[position]
1044 | let target
1045 | // TODO: handle Maps, Sets, and other types that can cycle; this is complicated, because you potentially need to read
1046 | // ahead past references to record structure definitions
1047 | if ((token >> 5) == 4)
1048 | target = []
1049 | else
1050 | target = {}
1051 |
1052 | let refEntry = { target } // a placeholder object
1053 | referenceMap.set(id, refEntry)
1054 | let targetProperties = read() // read the next value as the target object to id
1055 | if (refEntry.used) {// there is a cycle, so we have to assign properties to original target
1056 | if (Object.getPrototypeOf(target) !== Object.getPrototypeOf(targetProperties)) {
1057 | // this means that the returned target does not match the targetProperties, so we need rerun the read to
1058 | // have the correctly create instance be assigned as a reference, then we do the copy the properties back to the
1059 | // target
1060 | // reset the position so that the read can be repeated
1061 | position = startingPosition
1062 | // the returned instance is our new target for references
1063 | target = targetProperties
1064 | referenceMap.set(id, { target })
1065 | targetProperties = read()
1066 | }
1067 | return Object.assign(target, targetProperties)
1068 | }
1069 | refEntry.target = targetProperties // the placeholder wasn't used, replace with the deserialized one
1070 | return targetProperties // no cycle, can just use the returned read object
1071 | }
1072 | currentExtensions[28].handlesRead = true
1073 |
1074 | currentExtensions[29] = (id) => {
1075 | // sharedref http://cbor.schmorp.de/value-sharing (for structured clones)
1076 | let refEntry = referenceMap.get(id)
1077 | refEntry.used = true
1078 | return refEntry.target
1079 | }
1080 |
1081 | currentExtensions[258] = (array) => new Set(array); // https://github.com/input-output-hk/cbor-sets-spec/blob/master/CBOR_SETS.md
1082 | (currentExtensions[259] = (read) => {
1083 | // https://github.com/shanewholloway/js-cbor-codec/blob/master/docs/CBOR-259-spec
1084 | // for decoding as a standard Map
1085 | if (currentDecoder.mapsAsObjects) {
1086 | currentDecoder.mapsAsObjects = false
1087 | restoreMapsAsObject = true
1088 | }
1089 | return read()
1090 | }).handlesRead = true
1091 | function combine(a, b) {
1092 | if (typeof a === 'string')
1093 | return a + b
1094 | if (a instanceof Array)
1095 | return a.concat(b)
1096 | return Object.assign({}, a, b)
1097 | }
1098 | function getPackedValues() {
1099 | if (!packedValues) {
1100 | if (currentDecoder.getShared)
1101 | loadShared()
1102 | else
1103 | throw new Error('No packed values available')
1104 | }
1105 | return packedValues
1106 | }
1107 | const SHARED_DATA_TAG_ID = 0x53687264 // ascii 'Shrd'
1108 | currentExtensionRanges.push((tag, input) => {
1109 | if (tag >= 225 && tag <= 255)
1110 | return combine(getPackedValues().prefixes[tag - 224], input)
1111 | if (tag >= 28704 && tag <= 32767)
1112 | return combine(getPackedValues().prefixes[tag - 28672], input)
1113 | if (tag >= 1879052288 && tag <= 2147483647)
1114 | return combine(getPackedValues().prefixes[tag - 1879048192], input)
1115 | if (tag >= 216 && tag <= 223)
1116 | return combine(input, getPackedValues().suffixes[tag - 216])
1117 | if (tag >= 27647 && tag <= 28671)
1118 | return combine(input, getPackedValues().suffixes[tag - 27639])
1119 | if (tag >= 1811940352 && tag <= 1879048191)
1120 | return combine(input, getPackedValues().suffixes[tag - 1811939328])
1121 | if (tag == SHARED_DATA_TAG_ID) {// we do a special check for this so that we can keep the currentExtensions as densely stored array (v8 stores arrays densely under about 3000 elements)
1122 | return {
1123 | packedValues: packedValues,
1124 | structures: currentStructures.slice(0),
1125 | version: input,
1126 | }
1127 | }
1128 | if (tag == 55799) // self-descriptive CBOR tag, just return input value
1129 | return input
1130 | })
1131 |
1132 | const isLittleEndianMachine = new Uint8Array(new Uint16Array([1]).buffer)[0] == 1
1133 | export const typedArrays = [Uint8Array, Uint8ClampedArray, Uint16Array, Uint32Array,
1134 | typeof BigUint64Array == 'undefined' ? { name:'BigUint64Array' } : BigUint64Array, Int8Array, Int16Array, Int32Array,
1135 | typeof BigInt64Array == 'undefined' ? { name:'BigInt64Array' } : BigInt64Array, Float32Array, Float64Array]
1136 | const typedArrayTags = [64, 68, 69, 70, 71, 72, 77, 78, 79, 85, 86]
1137 | for (let i = 0; i < typedArrays.length; i++) {
1138 | registerTypedArray(typedArrays[i], typedArrayTags[i])
1139 | }
1140 | function registerTypedArray(TypedArray, tag) {
1141 | let dvMethod = 'get' + TypedArray.name.slice(0, -5)
1142 | let bytesPerElement;
1143 | if (typeof TypedArray === 'function')
1144 | bytesPerElement = TypedArray.BYTES_PER_ELEMENT;
1145 | else
1146 | TypedArray = null;
1147 | for (let littleEndian = 0; littleEndian < 2; littleEndian++) {
1148 | if (!littleEndian && bytesPerElement == 1)
1149 | continue
1150 | let sizeShift = bytesPerElement == 2 ? 1 : bytesPerElement == 4 ? 2 : bytesPerElement == 8 ? 3 : 0
1151 | currentExtensions[littleEndian ? tag : (tag - 4)] = (bytesPerElement == 1 || littleEndian == isLittleEndianMachine) ? (buffer) => {
1152 | if (!TypedArray)
1153 | throw new Error('Could not find typed array for code ' + tag)
1154 | if (!currentDecoder.copyBuffers) {
1155 | // try provide a direct view, but will only work if we are byte-aligned
1156 | if (bytesPerElement === 1 ||
1157 | bytesPerElement === 2 && !(buffer.byteOffset & 1) ||
1158 | bytesPerElement === 4 && !(buffer.byteOffset & 3) ||
1159 | bytesPerElement === 8 && !(buffer.byteOffset & 7))
1160 | return new TypedArray(buffer.buffer, buffer.byteOffset, buffer.byteLength >> sizeShift);
1161 | }
1162 | // we have to slice/copy here to get a new ArrayBuffer, if we are not word/byte aligned
1163 | return new TypedArray(Uint8Array.prototype.slice.call(buffer, 0).buffer)
1164 | } : buffer => {
1165 | if (!TypedArray)
1166 | throw new Error('Could not find typed array for code ' + tag)
1167 | let dv = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength)
1168 | let elements = buffer.length >> sizeShift
1169 | let ta = new TypedArray(elements)
1170 | let method = dv[dvMethod]
1171 | for (let i = 0; i < elements; i++) {
1172 | ta[i] = method.call(dv, i << sizeShift, littleEndian)
1173 | }
1174 | return ta
1175 | }
1176 | }
1177 | }
1178 |
1179 | function readBundleExt() {
1180 | let length = readJustLength()
1181 | let bundlePosition = position + read()
1182 | for (let i = 2; i < length; i++) {
1183 | // skip past bundles that were already read
1184 | let bundleLength = readJustLength() // this will increment position, so must add to position afterwards
1185 | position += bundleLength
1186 | }
1187 | let dataPosition = position
1188 | position = bundlePosition
1189 | bundledStrings = [readStringJS(readJustLength()), readStringJS(readJustLength())]
1190 | bundledStrings.position0 = 0
1191 | bundledStrings.position1 = 0
1192 | bundledStrings.postBundlePosition = position
1193 | position = dataPosition
1194 | return read()
1195 | }
1196 |
1197 | function readJustLength() {
1198 | let token = src[position++] & 0x1f
1199 | if (token > 0x17) {
1200 | switch (token) {
1201 | case 0x18:
1202 | token = src[position++]
1203 | break
1204 | case 0x19:
1205 | token = dataView.getUint16(position)
1206 | position += 2
1207 | break
1208 | case 0x1a:
1209 | token = dataView.getUint32(position)
1210 | position += 4
1211 | break
1212 | }
1213 | }
1214 | return token
1215 | }
1216 |
1217 | function loadShared() {
1218 | if (currentDecoder.getShared) {
1219 | let sharedData = saveState(() => {
1220 | // save the state in case getShared modifies our buffer
1221 | src = null
1222 | return currentDecoder.getShared()
1223 | }) || {}
1224 | let updatedStructures = sharedData.structures || []
1225 | currentDecoder.sharedVersion = sharedData.version
1226 | packedValues = currentDecoder.sharedValues = sharedData.packedValues
1227 | if (currentStructures === true)
1228 | currentDecoder.structures = currentStructures = updatedStructures
1229 | else
1230 | currentStructures.splice.apply(currentStructures, [0, updatedStructures.length].concat(updatedStructures))
1231 | }
1232 | }
1233 |
1234 | function saveState(callback) {
1235 | let savedSrcEnd = srcEnd
1236 | let savedPosition = position
1237 | let savedStringPosition = stringPosition
1238 | let savedSrcStringStart = srcStringStart
1239 | let savedSrcStringEnd = srcStringEnd
1240 | let savedSrcString = srcString
1241 | let savedStrings = strings
1242 | let savedReferenceMap = referenceMap
1243 | let savedBundledStrings = bundledStrings
1244 |
1245 | // TODO: We may need to revisit this if we do more external calls to user code (since it could be slow)
1246 | let savedSrc = new Uint8Array(src.slice(0, srcEnd)) // we copy the data in case it changes while external data is processed
1247 | let savedStructures = currentStructures
1248 | let savedDecoder = currentDecoder
1249 | let savedSequentialMode = sequentialMode
1250 | let value = callback()
1251 | srcEnd = savedSrcEnd
1252 | position = savedPosition
1253 | stringPosition = savedStringPosition
1254 | srcStringStart = savedSrcStringStart
1255 | srcStringEnd = savedSrcStringEnd
1256 | srcString = savedSrcString
1257 | strings = savedStrings
1258 | referenceMap = savedReferenceMap
1259 | bundledStrings = savedBundledStrings
1260 | src = savedSrc
1261 | sequentialMode = savedSequentialMode
1262 | currentStructures = savedStructures
1263 | currentDecoder = savedDecoder
1264 | dataView = new DataView(src.buffer, src.byteOffset, src.byteLength)
1265 | return value
1266 | }
1267 | export function clearSource() {
1268 | src = null
1269 | referenceMap = null
1270 | currentStructures = null
1271 | }
1272 |
1273 | export function addExtension(extension) {
1274 | currentExtensions[extension.tag] = extension.decode
1275 | }
1276 |
1277 | export function setSizeLimits(limits) {
1278 | if (limits.maxMapSize) maxMapSize = limits.maxMapSize;
1279 | if (limits.maxArraySize) maxArraySize = limits.maxArraySize;
1280 | if (limits.maxObjectSize) maxObjectSize = limits.maxObjectSize;
1281 | }
1282 |
1283 | export const mult10 = new Array(147) // this is a table matching binary exponents to the multiplier to determine significant digit rounding
1284 | for (let i = 0; i < 256; i++) {
1285 | mult10[i] = +('1e' + Math.floor(45.15 - i * 0.30103))
1286 | }
1287 | let defaultDecoder = new Decoder({ useRecords: false })
1288 | export const decode = defaultDecoder.decode
1289 | export const decodeMultiple = defaultDecoder.decodeMultiple
1290 | export const FLOAT32_OPTIONS = {
1291 | NEVER: 0,
1292 | ALWAYS: 1,
1293 | DECIMAL_ROUND: 3,
1294 | DECIMAL_FIT: 4
1295 | }
1296 | export function roundFloat32(float32Number) {
1297 | f32Array[0] = float32Number
1298 | let multiplier = mult10[((u8Array[3] & 0x7f) << 1) | (u8Array[2] >> 7)]
1299 | return ((multiplier * float32Number + (float32Number > 0 ? 0.5 : -0.5)) >> 0) / multiplier
1300 | }
1301 |
--------------------------------------------------------------------------------