├── .evergreen ├── config.yml ├── install-dependencies.sh ├── run-prebuild.sh └── run-tests.sh ├── .github ├── ISSUE_TEMPLATE │ ├── Issue_template.md │ └── config.yml └── pull_request_template.md ├── .gitignore ├── .npmignore ├── HISTORY.md ├── LICENSE ├── Makefile ├── README.md ├── benchmarks └── serialization_benchmark.js ├── binding.gyp ├── lib └── index.js ├── package-lock.json ├── package.json ├── src ├── bson.cc ├── bson.h └── utf8decoder.h └── test ├── binary_parser.js ├── node ├── bson_array_test.js ├── bson_compliance_test.js ├── bson_corpus_tests.js ├── bson_test.js ├── compliance │ ├── corrupt.js │ └── valid.js ├── data │ ├── mongodump.airpair.tags.bson │ ├── test.bson │ └── test_gs_weird_bug.png ├── decimal128_tests.js ├── detect_cyclic_dep_tests.js ├── map_tests.js ├── null_byte_test.js ├── number_test.js ├── object_id_tests.js ├── promote_values_test.js ├── serialize_with_buffer_tests.js ├── specs │ └── bson-corpus │ │ ├── README.md │ │ ├── array.json │ │ ├── binary.json │ │ ├── boolean.json │ │ ├── bsonview │ │ ├── code.json │ │ ├── code_w_scope.json │ │ ├── datetime.json │ │ ├── dbpointer.json │ │ ├── dbref.json │ │ ├── decimal128-1.json │ │ ├── decimal128-2.json │ │ ├── decimal128-3.json │ │ ├── decimal128-4.json │ │ ├── decimal128-5.json │ │ ├── decimal128-6.json │ │ ├── decimal128-7.json │ │ ├── document.json │ │ ├── double.json │ │ ├── int32.json │ │ ├── int64.json │ │ ├── maxkey.json │ │ ├── minkey.json │ │ ├── multi-type-deprecated.json │ │ ├── multi-type.json │ │ ├── null.json │ │ ├── oid.json │ │ ├── regex.json │ │ ├── string.json │ │ ├── symbol.json │ │ ├── timestamp.json │ │ ├── top.json │ │ └── undefined.json ├── test_full_bson.js ├── to_bson_test.js └── tools │ └── utils.js ├── scripts └── test.cmd └── utils.js /.evergreen/config.yml: -------------------------------------------------------------------------------- 1 | # Run previous commits to pinpoint a failure's origin. 2 | stepback: true 3 | 4 | # Mark failures other than test failures with a purple box. 5 | command_type: system 6 | 7 | # Limit maximum test running time. 8 | exec_timeout_secs: 900 # 15 minutes 9 | 10 | # What to do when evergreen hits the timeout 11 | timeout: 12 | - command: shell.exec 13 | params: 14 | script: | 15 | ls -la 16 | 17 | functions: 18 | fetch source: 19 | - command: git.get_project 20 | params: 21 | directory: src 22 | - command: shell.exec 23 | params: 24 | working_dir: src 25 | script: | 26 | # Get the current unique version of this checkout 27 | if [ "${is_patch}" = "true" ]; then 28 | CURRENT_VERSION=$(git describe)-patch-${version_id} 29 | else 30 | CURRENT_VERSION=latest 31 | fi 32 | export PROJECT_DIRECTORY="$(pwd)" 33 | if [ "Windows_NT" = "$OS" ]; then 34 | export PROJECT_DIRECTORY=$(cygpath -m $PROJECT_DIRECTORY) 35 | fi 36 | 37 | # get the latest version of node for given major version 38 | NODE_VERSION=$(curl -sL nodejs.org/download/release/latest-v${NODE_MAJOR_VERSION}.x/SHASUMS256.txt -o - | head -n 1 | tr -s ' ' | cut -d' ' -f2 | cut -d- -f2 | cut -dv -f2) 39 | echo "LATEST NODE ${NODE_MAJOR_VERSION}.x = $NODE_VERSION" 40 | 41 | cat < expansion.yml 42 | CURRENT_VERSION: "$CURRENT_VERSION" 43 | PROJECT_DIRECTORY: "$PROJECT_DIRECTORY" 44 | NODE_VERSION: "$NODE_VERSION" 45 | PREPARE_SHELL: | 46 | set -o errexit 47 | set -o xtrace 48 | export PROJECT_DIRECTORY="$PROJECT_DIRECTORY" 49 | export PROJECT="${project}" 50 | export NODE_VERSION="$NODE_VERSION" 51 | EOT 52 | # See what we've done 53 | cat expansion.yml 54 | - command: expansions.update 55 | params: 56 | file: src/expansion.yml 57 | install dependencies: 58 | - command: shell.exec 59 | type: setup 60 | params: 61 | working_dir: src 62 | script: | 63 | ${PREPARE_SHELL} 64 | echo "NODE_VERSION=${NODE_VERSION}" 65 | NODE_VERSION=${NODE_VERSION} ${PROJECT_DIRECTORY}/.evergreen/install-dependencies.sh 66 | run tests: 67 | - command: shell.exec 68 | type: test 69 | params: 70 | working_dir: src 71 | script: | 72 | ${PREPARE_SHELL} 73 | echo "NODE_VERSION=${NODE_VERSION}" 74 | NODE_VERSION=${NODE_VERSION} ${PROJECT_DIRECTORY}/.evergreen/run-tests.sh 75 | run prebuild: 76 | - command: shell.exec 77 | type: test 78 | params: 79 | working_dir: src 80 | script: > 81 | ${PREPARE_SHELL} 82 | NODE_GITHUB_TOKEN=${github_token} ${PROJECT_DIRECTORY}/.evergreen/run-prebuild.sh 83 | run prebuild and force publish: 84 | - command: shell.exec 85 | type: test 86 | params: 87 | working_dir: src 88 | script: > 89 | ${PREPARE_SHELL} 90 | NODE_FORCE_PUBLISH=1 NODE_GITHUB_TOKEN=${github_token} ${PROJECT_DIRECTORY}/.evergreen/run-prebuild.sh 91 | 92 | tasks: 93 | - name: node-tests-v6 94 | tags: ["tests"] 95 | commands: 96 | - func: fetch source 97 | vars: 98 | NODE_MAJOR_VERSION: 6 99 | - func: install dependencies 100 | - func: run tests 101 | - name: node-tests-v8 102 | tags: ["tests"] 103 | commands: 104 | - func: fetch source 105 | vars: 106 | NODE_MAJOR_VERSION: 8 107 | - func: install dependencies 108 | - func: run tests 109 | - name: node-tests-v10 110 | tags: ["tests"] 111 | commands: 112 | - func: fetch source 113 | vars: 114 | NODE_MAJOR_VERSION: 10 115 | - func: install dependencies 116 | - func: run tests 117 | - name: node-tests-v12 118 | tags: ["tests"] 119 | commands: 120 | - func: fetch source 121 | vars: 122 | NODE_MAJOR_VERSION: 12 123 | - func: install dependencies 124 | - func: run tests 125 | - name: node-tests-v14 126 | tags: ["tests"] 127 | commands: 128 | - func: fetch source 129 | vars: 130 | NODE_MAJOR_VERSION: 14 131 | - func: install dependencies 132 | - func: run tests 133 | - name: run-prebuild 134 | tags: ["prebuild"] 135 | commands: 136 | - func: fetch source 137 | vars: 138 | NODE_MAJOR_VERSION: 14 139 | - func: install dependencies 140 | - func: run prebuild 141 | - name: run-prebuild-force-publish 142 | tags: ["force"] 143 | commands: 144 | - func: fetch source 145 | vars: 146 | NODE_MAJOR_VERSION: 14 147 | - func: install dependencies 148 | - func: run prebuild and force publish 149 | 150 | buildvariants: 151 | - name: linux 152 | display_name: Ubuntu 18.04 153 | run_on: ubuntu1804-test 154 | tasks: [".tests", ".prebuild", ".force"] 155 | - name: mac 156 | display_name: MacOS 10.14 157 | run_on: macos-1014 158 | tasks: [".tests", ".prebuild", ".force"] 159 | - name: windows 160 | display_name: Windows 64 161 | run_on: windows-64-vs2017-test 162 | tasks: [".tests", ".prebuild", ".force"] 163 | - name: ubuntu1604-arm64 164 | display_name: "Ubuntu 16.04 arm64" 165 | run_on: ubuntu1604-arm64-build 166 | expansions: 167 | has_packages: true 168 | packager_distro: ubuntu1604 169 | packager_arch: arm64 170 | tasks: [".tests", ".prebuild", ".force"] 171 | - name: suse12-s390x 172 | display_name: "SLES 12 s390x" 173 | run_on: suse12-zseries-test 174 | expansions: 175 | has_packages: true 176 | packager_distro: suse12 177 | packager_arch: s390x 178 | tasks: [".tests", ".prebuild", ".force"] 179 | -------------------------------------------------------------------------------- /.evergreen/install-dependencies.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # set -o xtrace # Write all commands first to stderr 3 | set -o errexit # Exit the script with error if any of the commands fail 4 | 5 | if [ -z "$NODE_VERSION" ]; then 6 | echo "NODE_VERSION environment variable must be specified" 7 | exit 1 8 | fi 9 | 10 | NODE_ARTIFACTS_PATH="${PROJECT_DIRECTORY}/node-artifacts" 11 | NPM_CACHE_DIR="${NODE_ARTIFACTS_PATH}/npm" 12 | NPM_TMP_DIR="${NODE_ARTIFACTS_PATH}/tmp" 13 | 14 | NVM_WINDOWS_URL="https://github.com/coreybutler/nvm-windows/releases/download/1.1.9/nvm-noinstall.zip" 15 | NVM_URL="https://raw.githubusercontent.com/nvm-sh/nvm/v0.38.0/install.sh" 16 | 17 | # this needs to be explicitly exported for the nvm install below 18 | export NVM_DIR="${NODE_ARTIFACTS_PATH}/nvm" 19 | export XDG_CONFIG_HOME=${NODE_ARTIFACTS_PATH} 20 | 21 | # create node artifacts path if needed 22 | mkdir -p ${NVM_DIR} 23 | mkdir -p ${NPM_CACHE_DIR} 24 | mkdir -p "${NPM_TMP_DIR}" 25 | 26 | # install Node.js 27 | echo "--- Installing Node ${NODE_VERSION} --- " 28 | if [ "$OS" == "Windows_NT" ]; then 29 | export NVM_HOME=`cygpath -w "$NVM_DIR"` 30 | export NVM_SYMLINK=`cygpath -w "$NODE_ARTIFACTS_PATH/bin"` 31 | export PATH=`cygpath $NVM_SYMLINK`:`cygpath $NVM_HOME`:$PATH 32 | 33 | # download and install nvm 34 | curl -L $NVM_WINDOWS_URL -o nvm.zip 35 | unzip -d $NVM_DIR nvm.zip 36 | rm nvm.zip 37 | 38 | chmod 777 $NVM_DIR 39 | chmod -R a+rx $NVM_DIR 40 | 41 | cat < $NVM_DIR/settings.txt 42 | root: $NVM_HOME 43 | path: $NVM_SYMLINK 44 | EOT 45 | 46 | nvm install ${NODE_VERSION} 47 | npm config set msvs_version 2017 48 | else 49 | curl -o- $NVM_URL | bash 50 | [ -s "${NVM_DIR}/nvm.sh" ] && \. "${NVM_DIR}/nvm.sh" 51 | 52 | nvm install --no-progress ${NODE_VERSION} 53 | fi 54 | nvm use ${NODE_VERSION} 55 | 56 | # setup npm cache in a local directory 57 | cat < .npmrc 58 | devdir=${NPM_CACHE_DIR}/.node-gyp 59 | init-module=${NPM_CACHE_DIR}/.npm-init.js 60 | cache=${NPM_CACHE_DIR} 61 | tmp=${NPM_TMP_DIR} 62 | registry=https://registry.npmjs.org 63 | EOT 64 | 65 | # install node dependencies 66 | npm install 67 | -------------------------------------------------------------------------------- /.evergreen/run-prebuild.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -o errexit # Exit the script with error if any of the commands fail 3 | 4 | get_version_at_git_rev () { 5 | local REV=$1 6 | local VERSION=$(node -r child_process -e "console.log(JSON.parse(child_process.execSync('git show $REV:./package.json', { encoding: 'utf8' })).version);") 7 | echo $VERSION 8 | } 9 | 10 | run_prebuild() { 11 | if [[ -z $NODE_GITHUB_TOKEN ]];then 12 | echo "No github token set. Cannot run prebuild." 13 | exit 1 14 | else 15 | echo "Github token detected. Running prebuild." 16 | npm run prebuild -- -u $NODE_GITHUB_TOKEN 17 | echo "Prebuild's successfully submitted" 18 | fi 19 | } 20 | 21 | NODE_ARTIFACTS_PATH="${PROJECT_DIRECTORY}/node-artifacts" 22 | 23 | if [ "$OS" == "Windows_NT" ]; then 24 | export NVM_HOME=`cygpath -w "$NODE_ARTIFACTS_PATH/nvm"` 25 | export NVM_SYMLINK=`cygpath -w "$NODE_ARTIFACTS_PATH/bin"` 26 | export PATH=`cygpath $NVM_SYMLINK`:`cygpath $NVM_HOME`:$PATH 27 | else 28 | export PATH="/opt/mongodbtoolchain/v2/bin:$PATH" 29 | export NVM_DIR="${NODE_ARTIFACTS_PATH}/nvm" 30 | [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" 31 | fi 32 | 33 | 34 | VERSION_AT_HEAD=$(get_version_at_git_rev "HEAD") 35 | VERSION_AT_HEAD_1=$(get_version_at_git_rev "HEAD~1") 36 | 37 | if [[ ! -z $NODE_FORCE_PUBLISH ]]; then 38 | echo '$NODE_FORCE_PUBLISH detected' 39 | echo "Beginning prebuild" 40 | run_prebuild 41 | elif [[ $VERSION_AT_HEAD != $VERSION_AT_HEAD_1 ]]; then 42 | echo "Difference is package version ($VERSION_AT_HEAD_1 -> $VERSION_AT_HEAD)" 43 | echo "Beginning prebuild" 44 | run_prebuild 45 | else 46 | echo "No difference is package version ($VERSION_AT_HEAD_1 -> $VERSION_AT_HEAD)" 47 | echo "Will prebuild without submit" 48 | npm run prebuild 49 | echo "Local prebuild successful." 50 | ls ./prebuilds 51 | fi 52 | -------------------------------------------------------------------------------- /.evergreen/run-tests.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # set -o xtrace # Write all commands first to stderr 3 | set -o errexit # Exit the script with error if any of the commands fail 4 | 5 | if [ -z "$NODE_VERSION" ]; then 6 | echo "NODE_VERSION environment variable must be specified" 7 | exit 1 8 | fi 9 | 10 | NODE_ARTIFACTS_PATH="${PROJECT_DIRECTORY}/node-artifacts" 11 | 12 | if [ "$OS" == "Windows_NT" ]; then 13 | export NVM_HOME=`cygpath -w "$NODE_ARTIFACTS_PATH/nvm"` 14 | export NVM_SYMLINK=`cygpath -w "$NODE_ARTIFACTS_PATH/bin"` 15 | export PATH=`cygpath $NVM_SYMLINK`:`cygpath $NVM_HOME`:$PATH 16 | else 17 | export PATH="/opt/mongodbtoolchain/v2/bin:$PATH" 18 | export NVM_DIR="${NODE_ARTIFACTS_PATH}/nvm" 19 | [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" 20 | fi 21 | 22 | npm test 23 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/Issue_template.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Issue 3 | about: Please use JIRA instead 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | --- 8 | 9 | 17 | 18 | **BSON-EXT Version:** 2.0.x 19 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | --- 2 | blank_issues_enabled: false 3 | contact_links: 4 | - 5 | about: "Please ask and answer usage questions on our Community Forums." 6 | name: Questions 7 | url: "https://developer.mongodb.com/community/forums/tags/c/drivers-odms/7/node-js" 8 | - 9 | about: "Please submit all issues or feature requests to our JIRA." 10 | name: Issues & Feature Requests 11 | url: "https://jira.mongodb.org/browse/NODE" 12 | - 13 | about: "Please check the FAQ before filing new issues" 14 | name: "MongoDB NodeJS FAQ" 15 | url: "https://docs.mongodb.com/drivers/node/faq" 16 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | ## Description 2 | 3 | **What changed?** 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | 5 | # Runtime data 6 | pids 7 | *.pid 8 | *.seed 9 | 10 | # Directory for instrumented libs generated by jscoverage/JSCover 11 | lib-cov 12 | 13 | # Coverage directory used by tools like istanbul 14 | coverage 15 | 16 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 17 | .grunt 18 | 19 | # Compiled binary addons (http://nodejs.org/api/addons.html) 20 | build/Release 21 | 22 | # Dependency directory 23 | # Commenting this out is preferred by some people, see 24 | # https://www.npmjs.org/doc/misc/npm-faq.html#should-i-check-my-node_modules-folder-into-git- 25 | node_modules 26 | 27 | # Users Environment Variables 28 | .lock-wscript 29 | 30 | .DS_Store 31 | *.swp 32 | *.seed 33 | .project 34 | .settings 35 | ./data 36 | node_modules/ 37 | 38 | output 39 | build 40 | .bin 41 | npm-debug.log 42 | builderror.log 43 | 44 | bson.sublime-project 45 | bson.sublime-workspace 46 | .vagrant/ 47 | 48 | prebuilds/ 49 | 50 | .vscode/ 51 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | .travis.yml 2 | appveyor.yml 3 | test.bat 4 | build/ 5 | test/ 6 | .vagrant/ 7 | 8 | # Users Environment Variables 9 | .lock-wscript 10 | 11 | .DS_Store 12 | *.swp 13 | *.seed 14 | .project 15 | .settings 16 | ./data 17 | node_modules/ 18 | 19 | # Logs 20 | logs 21 | *.log 22 | 23 | # Runtime data 24 | pids 25 | *.pid 26 | *.seed 27 | 28 | # Directory for instrumented libs generated by jscoverage/JSCover 29 | lib-cov 30 | 31 | # Coverage directory used by tools like istanbul 32 | coverage 33 | 34 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 35 | .grunt 36 | 37 | # Compiled binary addons (http://nodejs.org/api/addons.html) 38 | build/Release 39 | -------------------------------------------------------------------------------- /HISTORY.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. 4 | 5 | ### [4.0.3](https://github.com/mongodb-js/bson-ext/compare/v4.0.2...v4.0.3) (2022-09-15) 6 | 7 | 8 | ### Bug Fixes 9 | 10 | * **NODE-4532:** round trip double values consistently ([#89](https://github.com/mongodb-js/bson-ext/issues/89)) ([8714e7d](https://github.com/mongodb-js/bson-ext/commit/8714e7d849647115395b89ecf56b623398bad5f3)) 11 | 12 | ### [4.0.2](https://github.com/mongodb-js/bson-ext/compare/v4.0.1...v4.0.2) (2021-10-05) 13 | 14 | 15 | ### Bug Fixes 16 | 17 | * **NODE-3619:** serialization of BSON with embedded null bytes in strings ([#79](https://github.com/mongodb-js/bson-ext/issues/79)) ([39c87d2](https://github.com/mongodb-js/bson-ext/commit/39c87d2a288ae3ace3cbd59ade21bcbb05525fba)) 18 | 19 | ### [4.0.1](https://github.com/mongodb-js/bson-ext/compare/v4.0.0...v4.0.1) (2021-09-14) 20 | 21 | ## [4.0.0](https://github.com/mongodb-js/bson-ext/compare/v2.0.5...v4.0.0) (2021-05-25) 22 | 23 | 24 | ## [2.0.5](https://github.com/mongodb-js/bson-ext/compare/v2.0.4...v2.0.5) (2020-07-31) 25 | 26 | 27 | 28 | 29 | ## [2.0.4](https://github.com/mongodb-js/bson-ext/compare/v2.0.3...v2.0.4) (2020-07-31) 30 | 31 | 32 | 33 | 34 | ## [2.03](https://github.com/christkv/bson-ext/compare/v2.0.2...v2.03) (2019-04-08) 35 | 36 | 37 | ### Bug Fixes 38 | 39 | * **compat:** node >= v12 compatibility 40 | 41 | 42 | 43 | 44 | ## [2.0.2](https://github.com/christkv/bson-ext/compare/v2.0.0...v2.0.2) (2019-04-08) 45 | 46 | 47 | ### Bug Fixes 48 | 49 | * **checkKeys:** allow through certain keys beginning with $ ([8382dde](https://github.com/christkv/bson-ext/commit/8382dde)) 50 | 51 | 52 | 53 | 54 | # 2.0.0 (2018-03-02) 55 | 56 | 57 | ### Bug Fixes 58 | 59 | * **db-ref:** correctly avoid parsing DBRef for invalid shapes ([fb58633](https://github.com/christkv/bson-ext/commit/fb58633)) 60 | * **db-ref:** support additional fields on DBRef type ([ff13e82](https://github.com/christkv/bson-ext/commit/ff13e82)) 61 | * **object-id:** move length assertion into ReadObjectId ([f3e7812](https://github.com/christkv/bson-ext/commit/f3e7812)) 62 | * **symbol:** upgrade deprecated symbol type to string ([5189f85](https://github.com/christkv/bson-ext/commit/5189f85)) 63 | 64 | 65 | ### Features 66 | 67 | * **mocha:** switch to using mocha for testing the module ([4d4a3a7](https://github.com/christkv/bson-ext/commit/4d4a3a7)) 68 | * **utf8-parsing:** add utf8 string validation ([6f01469](https://github.com/christkv/bson-ext/commit/6f01469)) 69 | 70 | 71 | 72 | 1.0.5 2016-01-20 73 | ---------------- 74 | - Better identify Map instances, avoid throwing on anything containing an entries field, #37. 75 | 76 | 1.0.4 2016-01-11 77 | ---------------- 78 | - #204 remove Buffer.from as it's partially broken in early 4.x.x. series of node releases. 79 | 80 | 1.0.3 2016-01-03 81 | ---------------- 82 | - Fixed toString for ObjectId so it will work with inspect. 83 | 84 | 1.0.2 2016-01-02 85 | ---------------- 86 | - Minor optimizations for ObjectID to use Buffer.from where available. 87 | 88 | 1.0.1 2016-12-06 89 | ---------------- 90 | - Reverse behavior for undefined to be serialized as NULL. MongoDB 3.4 does not allow for undefined comparisons. 91 | 92 | 1.0.0 2016-12-06 93 | ---------------- 94 | - Introduced new BSON API and documentation. 95 | 96 | 0.1.13 2015-10-05 97 | ----------------- 98 | - Upgrade to nan 2.0.9 (Issue #27, https://github.com/guymguym) 99 | - Removed pre-gyp code as no longer needed. 100 | 101 | 0.1.12 2015-08-06 102 | ----------------- 103 | - Undefined fields serialized as null values in arrays. 104 | 105 | 0.1.11 2015-08-06 106 | ----------------- 107 | - Undefined fields are omitted from serialization. 108 | 109 | 0.1.10 2015-06-17 110 | ----------------- 111 | - No longer print to console.error if driver did not load correctly. 112 | 113 | 0.1.9 2015-06-17 114 | ---------------- 115 | - Removed deprecated Node C++ method for Nan.h based ones. 116 | 117 | 0.1.8 2015-06-12 118 | ---------------- 119 | - Refactored to use single 16MB buffer for all serialization. 120 | 121 | 0.1.7 2015-05-15 122 | ---------------- 123 | - Attempt node-pre-gyp otherwise fallback to node-gyp or just fail. 124 | 125 | 0.1.6 2015-05-07 126 | ---------------- 127 | - Updated to use bundled node-pre-gyp as install method. 128 | 129 | 0.1.5 2015-05-07 130 | ---------------- 131 | - Updated npmignore to remove any non-needed artifacts. 132 | 133 | 0.1.4 2015-05-05 134 | ---------------- 135 | - Updated nan.h dependency to 1.8.x. 136 | 137 | 0.1.3 2015-04-23 138 | ---------------- 139 | - Windows only prebuilt support (Issue #6, https://github.com/imlucas) 140 | 141 | 0.1.2 2015-04-20 142 | ---------------- 143 | - Removed pre-packaged binaries from build. 144 | 145 | 0.1.1 2015-04-20 146 | ---------------- 147 | - Merged node-pre-gyp support (Issue #1, https://github.com/imlucas) 148 | 149 | 0.1.0 2015-03-26 150 | ---------------- 151 | - First pusht to npm, cleanup of project and left only c++ and test harnesses. 152 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | NODE = node 2 | NPM = npm 3 | NODEUNIT = node_modules/nodeunit/bin/nodeunit 4 | 5 | all: clean node_gyp 6 | 7 | test: clean node_gyp 8 | npm test 9 | 10 | node_gyp: clean 11 | node-gyp configure build 12 | 13 | clean: 14 | node-gyp clean 15 | 16 | .PHONY: all 17 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # BSON-EXT 2 | 3 | _A BSON parser Node.JS native addon._ 4 | 5 | BSON is short for Bin­ary JSON and is the bin­ary-en­coded seri­al­iz­a­tion of JSON-like doc­u­ments. You can learn more about it in [the specification](http://bsonspec.org). 6 | 7 | While this library is compatible with the mongodb driver version 4+, bson-ext will soon be deprecated and no longer supported. It is strongly recommended 8 | that [js-bson](https://github.com/mongodb/js-bson) be used instead. 9 | 10 | ### MongoDB Node.js Driver Version Compatibility 11 | 12 | Only the following version combinations with the [MongoDB Node.js Driver](https://github.com/mongodb/node-mongodb-native) are considered stable. 13 | 14 | | | `bson-ext@1.x` | `bson-ext@2.x` | `bson-ext@4.x` | 15 | | ------------- | -------------- | -------------- | -------------- | 16 | | `mongodb@6.x` | N/A | N/A | N/A | 17 | | `mongodb@5.x` | N/A | N/A | N/A | 18 | | `mongodb@4.x` | N/A | N/A | ✓ | 19 | | `mongodb@3.x` | ✓ | ✓ | N/A | 20 | 21 | ## Installation 22 | 23 | ```sh 24 | npm install bson-ext 25 | ``` 26 | 27 | ## Usage 28 | 29 | A simple example of how to use BSON in `Node.js`: 30 | 31 | ```js 32 | // Get BSON parser class 33 | const BSON = require('bson-ext') 34 | // Get the Long type 35 | const Long = BSON.Long; 36 | 37 | // Serialize document 38 | const doc = { long: Long.fromNumber(100) } 39 | 40 | // Serialize a document 41 | const data = BSON.serialize(doc) 42 | console.log('data:', data) 43 | 44 | // Deserialize the resulting Buffer 45 | var docRoundTrip = bson.deserialize(data) 46 | console.log('docRoundTrip:', docRoundTrip) 47 | ``` 48 | 49 | ## Compiling 50 | 51 | To build a new version perform the following operation. 52 | 53 | ```sh 54 | npm install 55 | npm run build 56 | ``` 57 | 58 | ## API 59 | 60 | ### BSON types 61 | 62 | For all BSON types documentation, please refer to the documentation for the [MongoDB Node.js driver](http://mongodb.github.io/node-mongodb-native/4.0). 63 | 64 | #### BSON.serialize 65 | 66 | The BSON `serialize` method takes a JavaScript object and an optional options object and returns a Node.js Buffer. 67 | 68 | ```typescript 69 | /** 70 | * The BSON library accepts plain javascript objects. 71 | * It serializes to BSON by iterating the keys 72 | */ 73 | interface Document { 74 | [key: string]: any; 75 | } 76 | 77 | interface SerializeOptions { 78 | /** the serializer will check if keys are valid. */ 79 | checkKeys?: boolean; 80 | /** serialize the javascript functions **(default:false)**. */ 81 | serializeFunctions?: boolean; 82 | /** serialize will not emit undefined fields **(default:true)** */ 83 | ignoreUndefined?: boolean; 84 | } 85 | 86 | /** 87 | * Serialize a Javascript object. 88 | * 89 | * @param object - the Javascript object to serialize. 90 | * @returns Buffer object containing the serialized object. 91 | */ 92 | function serialize(object: Document, options?: SerializeOptions): Buffer; 93 | ``` 94 | 95 | #### BSON.serializeWithBufferAndIndex 96 | 97 | The BSON `serializeWithBufferAndIndex` method takes an object, a target buffer instance and an optional options object and returns the end serialization index in the final buffer. 98 | 99 | ```typescript 100 | /** 101 | * Serialize a Javascript object using a predefined Buffer and index into the buffer, 102 | * useful when pre-allocating the space for serialization. 103 | * 104 | * @param object - the Javascript object to serialize. 105 | * @param finalBuffer - the Buffer you pre-allocated to store the serialized BSON object. 106 | * @returns the index pointing to the last written byte in the buffer. 107 | */ 108 | function serializeWithBufferAndIndex(object: Document, finalBuffer: Buffer, options?: SerializeOptions): number; 109 | ``` 110 | 111 | #### BSON.calculateObjectSize 112 | 113 | The BSON `calculateObjectSize` method takes a JavaScript object and an optional options object and returns the size of the BSON object. 114 | 115 | ```typescript 116 | interface CalculateObjectSizeOptions { 117 | /** serialize the javascript functions **(default:false)**. */ 118 | serializeFunctions?: boolean; 119 | /** serialize will not emit undefined fields **(default:true)** */ 120 | ignoreUndefined?: boolean; 121 | } 122 | 123 | 124 | /** 125 | * Calculate the bson size for a passed in Javascript object. 126 | * 127 | * @param object - the Javascript object to calculate the BSON byte size for 128 | * @returns size of BSON object in bytes 129 | * @public 130 | */ 131 | function calculateObjectSize(object: Document, options?: CalculateObjectSizeOptions): number; 132 | ``` 133 | 134 | #### BSON.deserialize 135 | 136 | The BSON `deserialize` method takes a Node.js Buffer and an optional options object and returns a deserialized JavaScript object. 137 | 138 | ```typescript 139 | interface DeserializeOptions { 140 | /** evaluate functions in the BSON document scoped to the object deserialized. */ 141 | evalFunctions?: boolean; 142 | /** cache evaluated functions for reuse. */ 143 | cacheFunctions?: boolean; 144 | /** when deserializing a Long will fit it into a Number if it's smaller than 53 bits */ 145 | promoteLongs?: boolean; 146 | /** when deserializing a Binary will return it as a node.js Buffer instance. */ 147 | promoteBuffers?: boolean; 148 | /** when deserializing will promote BSON values to their Node.js closest equivalent types. */ 149 | promoteValues?: boolean; 150 | /** allow to specify if there what fields we wish to return as unserialized raw buffer. */ 151 | fieldsAsRaw?: Document; 152 | /** return BSON regular expressions as BSONRegExp instances. */ 153 | bsonRegExp?: boolean; 154 | /** allows the buffer to be larger than the parsed BSON object */ 155 | allowObjectSmallerThanBufferSize?: boolean; 156 | /** Offset into buffer to begin reading document from */ 157 | index?: number; 158 | } 159 | 160 | /** 161 | * Deserialize data as BSON. 162 | * 163 | * @param buffer - the buffer containing the serialized set of BSON documents. 164 | * @returns returns the deserialized Javascript Object. 165 | * @public 166 | */ 167 | function deserialize(buffer: Buffer | ArrayBufferView | ArrayBuffer, options?: DeserializeOptions): Document; 168 | ``` 169 | 170 | #### BSON.deserializeStream 171 | 172 | The BSON `deserializeStream` method takes a Node.js Buffer, `startIndex` and allow more control over deserialization of a Buffer containing concatenated BSON documents. 173 | 174 | ```typescript 175 | /** 176 | * Deserialize stream data as BSON documents. 177 | * 178 | * @param data - the buffer containing the serialized set of BSON documents. 179 | * @param startIndex - the start index in the data Buffer where the deserialization is to start. 180 | * @param numberOfDocuments - number of documents to deserialize. 181 | * @param documents - an array where to store the deserialized documents. 182 | * @param docStartIndex - the index in the documents array from where to start inserting documents. 183 | * @param options - additional options used for the deserialization. 184 | * @returns next index in the buffer after deserialization **x** numbers of documents. 185 | * @public 186 | */ 187 | function deserializeStream(data: Buffer | ArrayBufferView | ArrayBuffer, startIndex: number, numberOfDocuments: number, documents: Document[], docStartIndex: number, options: DeserializeOptions): number; 188 | ``` 189 | -------------------------------------------------------------------------------- /benchmarks/serialization_benchmark.js: -------------------------------------------------------------------------------- 1 | var BSON = require('../'); 2 | var BSONJS = require('bson'); 3 | 4 | function generateRecord(recnum) { 5 | // Definition of a 'Document' 6 | var topFields = 20; // 20 top level fields 7 | var arrObjSize = 10; // 10 fields in each array object 8 | var arrSize = 20; // Array of 20 elements 9 | var fldpfx = "val" 10 | 11 | //This is a shard friendly _id, a low cardinality, variable prefix then an incrementing value as string 12 | var id = (recnum % 256).toString() + "-" + recnum 13 | var rec = { 14 | _id: id, 15 | arr: [] 16 | } 17 | 18 | for (var tf = 0; tf < topFields; tf++) { 19 | var fieldval 20 | switch (tf % 4) { 21 | case 0: 22 | fieldval = "Lorem ipsum dolor sit amet, consectetur adipiscing elit." //Text 23 | break 24 | case 1: 25 | fieldval = new Date(tf * recnum) //A date 26 | break 27 | case 2: 28 | fieldval = Math.PI * tf // A float 29 | break 30 | case 3: 31 | fieldval = BSON.Long(recnum + tf) // A 64 bit integer 32 | break 33 | } 34 | // fieldval = Math.PI * tf // A float 35 | // fieldval = new Date(tf * recnum) //A date 36 | // fieldval = "Lorem ipsum dolor sit amet, consectetur adipiscing elit." //Text 37 | // fieldval = BSON.Long(recnum + tf) // A 64 bit integer 38 | // fieldval = [BSON.Long(recnum + tf), BSON.Long(recnum + tf), BSON.Long(recnum + tf)] 39 | rec[fldpfx + tf] = fieldval 40 | } 41 | 42 | // populate array of subdocuments 43 | for (var el = 0; el < arrSize; el++) { 44 | var subrec = {} 45 | for (var subRecField = 0; subRecField < arrObjSize; subRecField++) { 46 | var fieldval 47 | switch (subRecField % 4) { 48 | case 0: 49 | fieldval = "Nunc finibus pretium dignissim. Aenean ut nisi finibus" 50 | break 51 | case 1: 52 | fieldval = new Date(tf * recnum * el) 53 | break 54 | case 2: 55 | fieldval = Math.PI * tf * el 56 | break 57 | case 3: 58 | fieldval = BSON.Long(recnum + tf * el) 59 | break 60 | } 61 | // fieldval = Math.PI * tf * el 62 | // fieldval = new Date(tf * recnum * el) 63 | // fieldval = "Nunc finibus pretium dignissim. Aenean ut nisi finibus" 64 | // fieldval = BSON.Long(recnum + tf * el) 65 | // fieldval = [BSON.Long(recnum + tf * el), BSON.Long(recnum + tf * el), BSON.Long(recnum + tf * el)] 66 | subrec['subval' + subRecField] = fieldval 67 | } 68 | rec['arr'].push(subrec) 69 | } 70 | 71 | return rec 72 | } 73 | 74 | var iterations = 100000; 75 | // var iterations = 10000; 76 | // var iterations = 1; 77 | var doc = generateRecord(0) 78 | var buffer = BSON.serialize(doc); 79 | var start = new Date(); 80 | 81 | // console.log("=====================================================") 82 | // console.log(JSON.stringify(doc, null, 2)) 83 | 84 | // 85 | // Serialize 86 | // 87 | for(var i = 0; i < iterations; i++) { 88 | BSON.serialize(doc); 89 | } 90 | 91 | var end = new Date(); 92 | console.log("======================== Serialization total time MS C++"); 93 | console.log("totalMS = " + (end.getTime() - start.getTime())); 94 | 95 | var start = new Date(); 96 | 97 | for(var i = 0; i < iterations; i++) { 98 | BSONJS.serialize(doc); 99 | } 100 | 101 | var end = new Date(); 102 | console.log("======================== Serialization total time MS JS"); 103 | console.log("totalMS = " + (end.getTime() - start.getTime())); 104 | 105 | // 106 | // Deserialize 107 | // 108 | var start = new Date(); 109 | 110 | for(var i = 0; i < iterations; i++) { 111 | BSON.deserialize(buffer); 112 | } 113 | 114 | var end = new Date(); 115 | console.log("======================== Deserialization total time MS C++"); 116 | console.log("totalMS = " + (end.getTime() - start.getTime())); 117 | 118 | var start = new Date(); 119 | 120 | for(var i = 0; i < iterations; i++) { 121 | BSONJS.deserialize(buffer); 122 | } 123 | 124 | var end = new Date(); 125 | console.log("======================== Deserialization total time MS JS"); 126 | console.log("totalMS = " + (end.getTime() - start.getTime())); 127 | -------------------------------------------------------------------------------- /binding.gyp: -------------------------------------------------------------------------------- 1 | { 2 | 'targets': [ 3 | { 4 | 'win_delay_load_hook': 'true', 5 | 'target_name': 'bson', 6 | 'sources': [ 'src/bson.cc' ], 7 | 'cflags!': [ '-fno-exceptions' ], 8 | 'cflags_cc!': [ '-fno-exceptions' ], 9 | 'include_dirs': [ '=6.9.0" 39 | }, 40 | "scripts": { 41 | "format": "clang-format -i --style=file --glob=\"src/*.{h,cc}\"", 42 | "install": "prebuild-install || node-gyp rebuild", 43 | "rebuild": "prebuild --compile", 44 | "prebuild": "prebuild --strip --verbose --all", 45 | "release": "standard-version -i HISTORY.md", 46 | "pretest": "npm run rebuild", 47 | "test": "mocha ./test/node" 48 | }, 49 | "license": "Apache-2.0" 50 | } 51 | -------------------------------------------------------------------------------- /src/utf8decoder.h: -------------------------------------------------------------------------------- 1 | // See http://bjoern.hoehrmann.de/utf-8/decoder/dfa/ for details. 2 | // The remapped transition table is justified at 3 | // https://docs.google.com/spreadsheets/d/1AZcQwuEL93HmNCljJWUwFMGqf7JAQ0puawZaUgP0E14 4 | 5 | #include 6 | 7 | #ifndef __UTF8_DFA_DECODER_H 8 | #define __UTF8_DFA_DECODER_H 9 | 10 | namespace Utf8DfaDecoder { 11 | 12 | enum State : uint8_t { 13 | kReject = 0, 14 | kAccept = 12, 15 | kTwoByte = 24, 16 | kThreeByte = 36, 17 | kThreeByteLowMid = 48, 18 | kFourByte = 60, 19 | kFourByteLow = 72, 20 | kThreeByteHigh = 84, 21 | kFourByteMidHigh = 96, 22 | }; 23 | 24 | static inline void Decode(uint8_t byte, State *state, uint32_t *buffer) { 25 | // This first table maps bytes to character to a transition. 26 | static constexpr uint8_t transitions[] = { 27 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 00-0F 28 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 10-1F 29 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 20-2F 30 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 30-3F 31 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 40-4F 32 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 50-5F 33 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 60-6F 34 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 70-7F 35 | 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 80-8F 36 | 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, // 90-9F 37 | 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, // A0-AF 38 | 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, // B0-BF 39 | 9, 9, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, // C0-CF 40 | 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, // D0-DF 41 | 10, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 6, 5, 5, // E0-EF 42 | 11, 7, 7, 7, 8, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, // F0-FF 43 | }; 44 | 45 | // This second table maps a state to a new state when adding a transition. 46 | // 00-7F 47 | // | 80-8F 48 | // | | 90-9F 49 | // | | | A0-BF 50 | // | | | | C2-DF 51 | // | | | | | E1-EC, EE, EF 52 | // | | | | | | ED 53 | // | | | | | | | F1-F3 54 | // | | | | | | | | F4 55 | // | | | | | | | | | C0, C1, F5-FF 56 | // | | | | | | | | | | E0 57 | // | | | | | | | | | | | F0 58 | static constexpr uint8_t states[] = { 59 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // REJECT = 0 60 | 12, 0, 0, 0, 24, 36, 48, 60, 72, 0, 84, 96, // ACCEPT = 12 61 | 0, 12, 12, 12, 0, 0, 0, 0, 0, 0, 0, 0, // 2-byte = 24 62 | 0, 24, 24, 24, 0, 0, 0, 0, 0, 0, 0, 0, // 3-byte = 36 63 | 0, 24, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 3-byte low/mid = 48 64 | 0, 36, 36, 36, 0, 0, 0, 0, 0, 0, 0, 0, // 4-byte = 60 65 | 0, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 4-byte low = 72 66 | 0, 0, 0, 24, 0, 0, 0, 0, 0, 0, 0, 0, // 3-byte high = 84 67 | 0, 0, 36, 36, 0, 0, 0, 0, 0, 0, 0, 0, // 4-byte mid/high = 96 68 | }; 69 | 70 | uint8_t type = transitions[byte]; 71 | *state = static_cast(states[*state + type]); 72 | *buffer = (*buffer << 6) | (byte & (0x7F >> (type >> 1))); 73 | } 74 | 75 | } // namespace Utf8DfaDecoder 76 | 77 | #endif /* __UTF8_DFA_DECODER_H */ 78 | -------------------------------------------------------------------------------- /test/binary_parser.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Binary Parser. 3 | * Jonas Raoni Soares Silva 4 | * http://jsfromhell.com/classes/binary-parser [v1.0] 5 | */ 6 | var chr = String.fromCharCode; 7 | 8 | var maxBits = []; 9 | for (var i = 0; i < 64; i++) { 10 | maxBits[i] = Math.pow(2, i); 11 | } 12 | 13 | function BinaryParser (bigEndian, allowExceptions) { 14 | if(!(this instanceof BinaryParser)) return new BinaryParser(bigEndian, allowExceptions); 15 | 16 | this.bigEndian = bigEndian; 17 | this.allowExceptions = allowExceptions; 18 | }; 19 | 20 | BinaryParser.warn = function warn (msg) { 21 | if (this.allowExceptions) { 22 | throw new Error(msg); 23 | } 24 | 25 | return 1; 26 | }; 27 | 28 | BinaryParser.decodeFloat = function decodeFloat (data, precisionBits, exponentBits) { 29 | var b = new this.Buffer(this.bigEndian, data); 30 | 31 | b.checkBuffer(precisionBits + exponentBits + 1); 32 | 33 | var bias = maxBits[exponentBits - 1] - 1 34 | , signal = b.readBits(precisionBits + exponentBits, 1) 35 | , exponent = b.readBits(precisionBits, exponentBits) 36 | , significand = 0 37 | , divisor = 2 38 | , curByte = b.buffer.length + (-precisionBits >> 3) - 1; 39 | 40 | do { 41 | for (var byteValue = b.buffer[ ++curByte ], startBit = precisionBits % 8 || 8, mask = 1 << startBit; mask >>= 1; ( byteValue & mask ) && ( significand += 1 / divisor ), divisor *= 2 ); 42 | } while (precisionBits -= startBit); 43 | 44 | return exponent == ( bias << 1 ) + 1 ? significand ? NaN : signal ? -Infinity : +Infinity : ( 1 + signal * -2 ) * ( exponent || significand ? !exponent ? Math.pow( 2, -bias + 1 ) * significand : Math.pow( 2, exponent - bias ) * ( 1 + significand ) : 0 ); 45 | }; 46 | 47 | BinaryParser.decodeInt = function decodeInt (data, bits, signed, forceBigEndian) { 48 | var b = new this.Buffer(this.bigEndian || forceBigEndian, data) 49 | , x = b.readBits(0, bits) 50 | , max = maxBits[bits]; //max = Math.pow( 2, bits ); 51 | 52 | return signed && x >= max / 2 53 | ? x - max 54 | : x; 55 | }; 56 | 57 | BinaryParser.encodeFloat = function encodeFloat (data, precisionBits, exponentBits) { 58 | var bias = maxBits[exponentBits - 1] - 1 59 | , minExp = -bias + 1 60 | , maxExp = bias 61 | , minUnnormExp = minExp - precisionBits 62 | , n = parseFloat(data) 63 | , status = isNaN(n) || n == -Infinity || n == +Infinity ? n : 0 64 | , exp = 0 65 | , len = 2 * bias + 1 + precisionBits + 3 66 | , bin = new Array(len) 67 | , signal = (n = status !== 0 ? 0 : n) < 0 68 | , intPart = Math.floor(n = Math.abs(n)) 69 | , floatPart = n - intPart 70 | , lastBit 71 | , rounded 72 | , result 73 | , i 74 | , j; 75 | 76 | for (i = len; i; bin[--i] = 0); 77 | 78 | for (i = bias + 2; intPart && i; bin[--i] = intPart % 2, intPart = Math.floor(intPart / 2)); 79 | 80 | for (i = bias + 1; floatPart > 0 && i; (bin[++i] = ((floatPart *= 2) >= 1) - 0 ) && --floatPart); 81 | 82 | for (i = -1; ++i < len && !bin[i];); 83 | 84 | if (bin[(lastBit = precisionBits - 1 + (i = (exp = bias + 1 - i) >= minExp && exp <= maxExp ? i + 1 : bias + 1 - (exp = minExp - 1))) + 1]) { 85 | if (!(rounded = bin[lastBit])) { 86 | for (j = lastBit + 2; !rounded && j < len; rounded = bin[j++]); 87 | } 88 | 89 | for (j = lastBit + 1; rounded && --j >= 0; (bin[j] = !bin[j] - 0) && (rounded = 0)); 90 | } 91 | 92 | for (i = i - 2 < 0 ? -1 : i - 3; ++i < len && !bin[i];); 93 | 94 | if ((exp = bias + 1 - i) >= minExp && exp <= maxExp) { 95 | ++i; 96 | } else if (exp < minExp) { 97 | exp != bias + 1 - len && exp < minUnnormExp && this.warn("encodeFloat::float underflow"); 98 | i = bias + 1 - (exp = minExp - 1); 99 | } 100 | 101 | if (intPart || status !== 0) { 102 | this.warn(intPart ? "encodeFloat::float overflow" : "encodeFloat::" + status); 103 | exp = maxExp + 1; 104 | i = bias + 2; 105 | 106 | if (status == -Infinity) { 107 | signal = 1; 108 | } else if (isNaN(status)) { 109 | bin[i] = 1; 110 | } 111 | } 112 | 113 | for (n = Math.abs(exp + bias), j = exponentBits + 1, result = ""; --j; result = (n % 2) + result, n = n >>= 1); 114 | 115 | for (n = 0, j = 0, i = (result = (signal ? "1" : "0") + result + bin.slice(i, i + precisionBits).join("")).length, r = []; i; j = (j + 1) % 8) { 116 | n += (1 << j) * result.charAt(--i); 117 | if (j == 7) { 118 | r[r.length] = String.fromCharCode(n); 119 | n = 0; 120 | } 121 | } 122 | 123 | r[r.length] = n 124 | ? String.fromCharCode(n) 125 | : ""; 126 | 127 | return (this.bigEndian ? r.reverse() : r).join(""); 128 | }; 129 | 130 | BinaryParser.encodeInt = function encodeInt (data, bits, signed, forceBigEndian) { 131 | var max = maxBits[bits]; 132 | 133 | if (data >= max || data < -(max / 2)) { 134 | this.warn("encodeInt::overflow"); 135 | data = 0; 136 | } 137 | 138 | if (data < 0) { 139 | data += max; 140 | } 141 | 142 | for (var r = []; data; r[r.length] = String.fromCharCode(data % 256), data = Math.floor(data / 256)); 143 | 144 | for (bits = -(-bits >> 3) - r.length; bits--; r[r.length] = "\0"); 145 | 146 | return ((this.bigEndian || forceBigEndian) ? r.reverse() : r).join(""); 147 | }; 148 | 149 | BinaryParser.toSmall = function( data ){ return this.decodeInt( data, 8, true ); }; 150 | BinaryParser.fromSmall = function( data ){ return this.encodeInt( data, 8, true ); }; 151 | BinaryParser.toByte = function( data ){ return this.decodeInt( data, 8, false ); }; 152 | BinaryParser.fromByte = function( data ){ return this.encodeInt( data, 8, false ); }; 153 | BinaryParser.toShort = function( data ){ return this.decodeInt( data, 16, true ); }; 154 | BinaryParser.fromShort = function( data ){ return this.encodeInt( data, 16, true ); }; 155 | BinaryParser.toWord = function( data ){ return this.decodeInt( data, 16, false ); }; 156 | BinaryParser.fromWord = function( data ){ return this.encodeInt( data, 16, false ); }; 157 | BinaryParser.toInt = function( data ){ return this.decodeInt( data, 32, true ); }; 158 | BinaryParser.fromInt = function( data ){ return this.encodeInt( data, 32, true ); }; 159 | BinaryParser.toLong = function( data ){ return this.decodeInt( data, 64, true ); }; 160 | BinaryParser.fromLong = function( data ){ return this.encodeInt( data, 64, true ); }; 161 | BinaryParser.toDWord = function( data ){ return this.decodeInt( data, 32, false ); }; 162 | BinaryParser.fromDWord = function( data ){ return this.encodeInt( data, 32, false ); }; 163 | BinaryParser.toQWord = function( data ){ return this.decodeInt( data, 64, true ); }; 164 | BinaryParser.fromQWord = function( data ){ return this.encodeInt( data, 64, true ); }; 165 | BinaryParser.toFloat = function( data ){ return this.decodeFloat( data, 23, 8 ); }; 166 | BinaryParser.fromFloat = function( data ){ return this.encodeFloat( data, 23, 8 ); }; 167 | BinaryParser.toDouble = function( data ){ return this.decodeFloat( data, 52, 11 ); }; 168 | BinaryParser.fromDouble = function( data ){ return this.encodeFloat( data, 52, 11 ); }; 169 | 170 | // Factor out the encode so it can be shared by add_header and push_int32 171 | BinaryParser.encode_int32 = function encode_int32 (number, asArray) { 172 | var a, b, c, d, unsigned; 173 | unsigned = (number < 0) ? (number + 0x100000000) : number; 174 | a = Math.floor(unsigned / 0xffffff); 175 | unsigned &= 0xffffff; 176 | b = Math.floor(unsigned / 0xffff); 177 | unsigned &= 0xffff; 178 | c = Math.floor(unsigned / 0xff); 179 | unsigned &= 0xff; 180 | d = Math.floor(unsigned); 181 | return asArray ? [chr(a), chr(b), chr(c), chr(d)] : chr(a) + chr(b) + chr(c) + chr(d); 182 | }; 183 | 184 | BinaryParser.encode_int64 = function encode_int64 (number) { 185 | var a, b, c, d, e, f, g, h, unsigned; 186 | unsigned = (number < 0) ? (number + 0x10000000000000000) : number; 187 | a = Math.floor(unsigned / 0xffffffffffffff); 188 | unsigned &= 0xffffffffffffff; 189 | b = Math.floor(unsigned / 0xffffffffffff); 190 | unsigned &= 0xffffffffffff; 191 | c = Math.floor(unsigned / 0xffffffffff); 192 | unsigned &= 0xffffffffff; 193 | d = Math.floor(unsigned / 0xffffffff); 194 | unsigned &= 0xffffffff; 195 | e = Math.floor(unsigned / 0xffffff); 196 | unsigned &= 0xffffff; 197 | f = Math.floor(unsigned / 0xffff); 198 | unsigned &= 0xffff; 199 | g = Math.floor(unsigned / 0xff); 200 | unsigned &= 0xff; 201 | h = Math.floor(unsigned); 202 | return chr(a) + chr(b) + chr(c) + chr(d) + chr(e) + chr(f) + chr(g) + chr(h); 203 | }; 204 | 205 | /** 206 | * UTF8 methods 207 | */ 208 | 209 | // Take a raw binary string and return a utf8 string 210 | BinaryParser.decode_utf8 = function decode_utf8 (binaryStr) { 211 | var len = binaryStr.length 212 | , decoded = '' 213 | , i = 0 214 | , c = 0 215 | , c1 = 0 216 | , c2 = 0 217 | , c3; 218 | 219 | while (i < len) { 220 | c = binaryStr.charCodeAt(i); 221 | if (c < 128) { 222 | decoded += String.fromCharCode(c); 223 | i++; 224 | } else if ((c > 191) && (c < 224)) { 225 | c2 = binaryStr.charCodeAt(i+1); 226 | decoded += String.fromCharCode(((c & 31) << 6) | (c2 & 63)); 227 | i += 2; 228 | } else { 229 | c2 = binaryStr.charCodeAt(i+1); 230 | c3 = binaryStr.charCodeAt(i+2); 231 | decoded += String.fromCharCode(((c & 15) << 12) | ((c2 & 63) << 6) | (c3 & 63)); 232 | i += 3; 233 | } 234 | } 235 | 236 | return decoded; 237 | }; 238 | 239 | // Encode a cstring 240 | BinaryParser.encode_cstring = function encode_cstring (s) { 241 | return unescape(encodeURIComponent(s)) + BinaryParser.fromByte(0); 242 | }; 243 | 244 | // Take a utf8 string and return a binary string 245 | BinaryParser.encode_utf8 = function encode_utf8 (s) { 246 | var a = "" 247 | , c; 248 | 249 | for (var n = 0, len = s.length; n < len; n++) { 250 | c = s.charCodeAt(n); 251 | 252 | if (c < 128) { 253 | a += String.fromCharCode(c); 254 | } else if ((c > 127) && (c < 2048)) { 255 | a += String.fromCharCode((c>>6) | 192) ; 256 | a += String.fromCharCode((c&63) | 128); 257 | } else { 258 | a += String.fromCharCode((c>>12) | 224); 259 | a += String.fromCharCode(((c>>6) & 63) | 128); 260 | a += String.fromCharCode((c&63) | 128); 261 | } 262 | } 263 | 264 | return a; 265 | }; 266 | 267 | BinaryParser.hprint = function hprint (s) { 268 | var number; 269 | 270 | for (var i = 0, len = s.length; i < len; i++) { 271 | if (s.charCodeAt(i) < 32) { 272 | number = s.charCodeAt(i) <= 15 273 | ? "0" + s.charCodeAt(i).toString(16) 274 | : s.charCodeAt(i).toString(16); 275 | process.stdout.write(number + " ") 276 | } else { 277 | number = s.charCodeAt(i) <= 15 278 | ? "0" + s.charCodeAt(i).toString(16) 279 | : s.charCodeAt(i).toString(16); 280 | process.stdout.write(number + " ") 281 | } 282 | } 283 | 284 | process.stdout.write("\n\n"); 285 | }; 286 | 287 | BinaryParser.ilprint = function hprint (s) { 288 | var number; 289 | 290 | for (var i = 0, len = s.length; i < len; i++) { 291 | if (s.charCodeAt(i) < 32) { 292 | number = s.charCodeAt(i) <= 15 293 | ? "0" + s.charCodeAt(i).toString(10) 294 | : s.charCodeAt(i).toString(10); 295 | 296 | require('util').debug(number+' : '); 297 | } else { 298 | number = s.charCodeAt(i) <= 15 299 | ? "0" + s.charCodeAt(i).toString(10) 300 | : s.charCodeAt(i).toString(10); 301 | require('util').debug(number+' : '+ s.charAt(i)); 302 | } 303 | } 304 | }; 305 | 306 | BinaryParser.hlprint = function hprint (s) { 307 | var number; 308 | 309 | for (var i = 0, len = s.length; i < len; i++) { 310 | if (s.charCodeAt(i) < 32) { 311 | number = s.charCodeAt(i) <= 15 312 | ? "0" + s.charCodeAt(i).toString(16) 313 | : s.charCodeAt(i).toString(16); 314 | require('util').debug(number+' : '); 315 | } else { 316 | number = s.charCodeAt(i) <= 15 317 | ? "0" + s.charCodeAt(i).toString(16) 318 | : s.charCodeAt(i).toString(16); 319 | require('util').debug(number+' : '+ s.charAt(i)); 320 | } 321 | } 322 | }; 323 | 324 | /** 325 | * BinaryParser buffer constructor. 326 | */ 327 | function BinaryParserBuffer (bigEndian, buffer) { 328 | this.bigEndian = bigEndian || 0; 329 | this.buffer = []; 330 | this.setBuffer(buffer); 331 | }; 332 | 333 | BinaryParserBuffer.prototype.setBuffer = function setBuffer (data) { 334 | var l, i, b; 335 | 336 | if (data) { 337 | i = l = data.length; 338 | b = this.buffer = new Array(l); 339 | for (; i; b[l - i] = data.charCodeAt(--i)); 340 | this.bigEndian && b.reverse(); 341 | } 342 | }; 343 | 344 | BinaryParserBuffer.prototype.hasNeededBits = function hasNeededBits (neededBits) { 345 | return this.buffer.length >= -(-neededBits >> 3); 346 | }; 347 | 348 | BinaryParserBuffer.prototype.checkBuffer = function checkBuffer (neededBits) { 349 | if (!this.hasNeededBits(neededBits)) { 350 | throw new Error("checkBuffer::missing bytes"); 351 | } 352 | }; 353 | 354 | BinaryParserBuffer.prototype.readBits = function readBits (start, length) { 355 | //shl fix: Henri Torgemane ~1996 (compressed by Jonas Raoni) 356 | 357 | function shl (a, b) { 358 | for (; b--; a = ((a %= 0x7fffffff + 1) & 0x40000000) == 0x40000000 ? a * 2 : (a - 0x40000000) * 2 + 0x7fffffff + 1); 359 | return a; 360 | } 361 | 362 | if (start < 0 || length <= 0) { 363 | return 0; 364 | } 365 | 366 | this.checkBuffer(start + length); 367 | 368 | var offsetLeft 369 | , offsetRight = start % 8 370 | , curByte = this.buffer.length - ( start >> 3 ) - 1 371 | , lastByte = this.buffer.length + ( -( start + length ) >> 3 ) 372 | , diff = curByte - lastByte 373 | , sum = ((this.buffer[ curByte ] >> offsetRight) & ((1 << (diff ? 8 - offsetRight : length)) - 1)) + (diff && (offsetLeft = (start + length) % 8) ? (this.buffer[lastByte++] & ((1 << offsetLeft) - 1)) << (diff-- << 3) - offsetRight : 0); 374 | 375 | for(; diff; sum += shl(this.buffer[lastByte++], (diff-- << 3) - offsetRight)); 376 | 377 | return sum; 378 | }; 379 | 380 | /** 381 | * Expose. 382 | */ 383 | BinaryParser.Buffer = BinaryParserBuffer; 384 | 385 | exports.BinaryParser = BinaryParser; 386 | -------------------------------------------------------------------------------- /test/node/bson_array_test.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | // var hexStringToBinary = function(string) { 4 | // var numberofValues = string.length / 2; 5 | // var array = ''; 6 | 7 | // for (var i = 0; i < numberofValues; i++) { 8 | // array += String.fromCharCode(parseInt(string[i * 2] + string[i * 2 + 1], 16)); 9 | // } 10 | // return array; 11 | // }; 12 | 13 | // var assertBuffersEqual = function(test, buffer1, buffer2) { 14 | // if (buffer1.length !== buffer2.length) 15 | // test.fail('Buffers do not have the same length', buffer1, buffer2); 16 | 17 | // for (var i = 0; i < buffer1.length; i++) { 18 | // test.equal(buffer1[i], buffer2[i]); 19 | // } 20 | // }; 21 | 22 | /** 23 | * Module for parsing an ISO 8601 formatted string into a Date object. 24 | */ 25 | // var ISODate = function(string) { 26 | // var match; 27 | 28 | // if (typeof string.getTime === 'function') return string; 29 | // else if ( 30 | // (match = string.match( 31 | // /^(\d{4})(-(\d{2})(-(\d{2})(T(\d{2}):(\d{2})(:(\d{2})(\.(\d+))?)?(Z|((\+|-)(\d{2}):(\d{2}))))?)?)?$/ 32 | // )) 33 | // ) { 34 | // var date = new Date(); 35 | // date.setUTCFullYear(Number(match[1])); 36 | // date.setUTCMonth(Number(match[3]) - 1 || 0); 37 | // date.setUTCDate(Number(match[5]) || 0); 38 | // date.setUTCHours(Number(match[7]) || 0); 39 | // date.setUTCMinutes(Number(match[8]) || 0); 40 | // date.setUTCSeconds(Number(match[10]) || 0); 41 | // date.setUTCMilliseconds(Number('.' + match[12]) * 1000 || 0); 42 | 43 | // if (match[13] && match[13] !== 'Z') { 44 | // var h = Number(match[16]) || 0, 45 | // m = Number(match[17]) || 0; 46 | 47 | // h *= 3600000; 48 | // m *= 60000; 49 | 50 | // var offset = h + m; 51 | // if (match[15] === '+') offset = -offset; 52 | 53 | // date = new Date(date.valueOf() + offset); 54 | // } 55 | 56 | // return date; 57 | // } else throw new Error('Invalid ISO 8601 date given.', __filename); 58 | // }; 59 | 60 | var _Uint8Array = null; 61 | 62 | /** 63 | * Retrieve the server information for the current 64 | * instance of the db client 65 | * 66 | * @ignore 67 | */ 68 | exports.setUp = function(callback) { 69 | _Uint8Array = global.Uint8Array; 70 | delete global['Uint8Array']; 71 | callback(); 72 | }; 73 | 74 | /** 75 | * Retrieve the server information for the current 76 | * instance of the db client 77 | * 78 | * @ignore 79 | */ 80 | exports.tearDown = function(callback) { 81 | global['Uint8Array'] = _Uint8Array; 82 | callback(); 83 | }; 84 | 85 | // /** 86 | // * @ignore 87 | // */ 88 | // exports.shouldCorrectlyDeserializeUsingTypedArray = function(test) { 89 | // var motherOfAllDocuments = { 90 | // 'string': '客家话', 91 | // 'array': [1,2,3], 92 | // 'hash': {'a':1, 'b':2}, 93 | // 'date': new Date(), 94 | // 'oid': new ObjectID(), 95 | // 'binary': new Binary(new Buffer("hello")), 96 | // 'int': 42, 97 | // 'float': 33.3333, 98 | // 'regexp': /regexp/, 99 | // 'boolean': true, 100 | // 'long': Long.fromNumber(100), 101 | // 'where': new Code('this.a > i', {i:1}), 102 | // 'dbref': new DBRef('namespace', new ObjectID(), 'integration_tests_'), 103 | // 'minkey': new MinKey(), 104 | // 'maxkey': new MaxKey() 105 | // } 106 | // 107 | // // Let's serialize it 108 | // var data = BSONSE.BSON.serialize(motherOfAllDocuments, true, true, false); 109 | // // Build a typed array 110 | // var arr = new Uint8Array(new ArrayBuffer(data.length)); 111 | // // Iterate over all the fields and copy 112 | // for(var i = 0; i < data.length; i++) { 113 | // arr[i] = data[i] 114 | // } 115 | // 116 | // // Deserialize the object 117 | // var object = BSONDE.BSON.deserialize(arr); 118 | // // Asserts 119 | // test.equal(motherOfAllDocuments.string, object.string); 120 | // test.deepEqual(motherOfAllDocuments.array, object.array); 121 | // test.deepEqual(motherOfAllDocuments.date, object.date); 122 | // test.deepEqual(motherOfAllDocuments.oid.toHexString(), object.oid.toHexString()); 123 | // test.deepEqual(motherOfAllDocuments.binary.length(), object.binary.length()); 124 | // // Assert the values of the binary 125 | // for(var i = 0; i < motherOfAllDocuments.binary.length(); i++) { 126 | // test.equal(motherOfAllDocuments.binary.value[i], object.binary[i]); 127 | // } 128 | // test.deepEqual(motherOfAllDocuments.int, object.int); 129 | // test.deepEqual(motherOfAllDocuments.float, object.float); 130 | // test.deepEqual(motherOfAllDocuments.regexp, object.regexp); 131 | // test.deepEqual(motherOfAllDocuments.boolean, object.boolean); 132 | // test.deepEqual(motherOfAllDocuments.long.toNumber(), object.long); 133 | // test.deepEqual(motherOfAllDocuments.where, object.where); 134 | // test.deepEqual(motherOfAllDocuments.dbref.oid.toHexString(), object.dbref.oid.toHexString()); 135 | // test.deepEqual(motherOfAllDocuments.dbref.namespace, object.dbref.namespace); 136 | // test.deepEqual(motherOfAllDocuments.dbref.db, object.dbref.db); 137 | // test.deepEqual(motherOfAllDocuments.minkey, object.minkey); 138 | // test.deepEqual(motherOfAllDocuments.maxkey, object.maxkey); 139 | // test.done(); 140 | // } 141 | 142 | // /** 143 | // * Should make sure that arrays by themselves can be either be properly 144 | // * serialized and deserialized, or that serializing throws an error 145 | // */ 146 | // exports.shouldCorrectlyDeserializeArray = function(test) { 147 | // var testArray = [1,2,3]; 148 | // var data = null; 149 | // 150 | // try { 151 | // data = bson.serialize(testArray, true, false, false); 152 | // test.ok(false); 153 | // } catch(e) { 154 | // } 155 | // 156 | // try { 157 | // data = bson.serialize(testArray, true, false, false); 158 | // test.ok(false); 159 | // } catch(e) { 160 | // test.done(); 161 | // } 162 | // }; 163 | // 164 | // /** 165 | // * @ignore 166 | // */ 167 | // exports.shouldCorrectlySerializeUsingTypedArray = function(test) { 168 | // var motherOfAllDocuments = { 169 | // 'string': 'hello', 170 | // 'array': [1,2,3], 171 | // 'hash': {'a':1, 'b':2}, 172 | // 'date': new Date(), 173 | // 'oid': new ObjectID(), 174 | // 'binary': new Binary(new Buffer("hello")), 175 | // 'int': 42, 176 | // 'float': 33.3333, 177 | // 'regexp': /regexp/, 178 | // 'boolean': true, 179 | // 'long': Long.fromNumber(100), 180 | // 'where': new Code('this.a > i', {i:1}), 181 | // 'dbref': new DBRef('namespace', new ObjectID(), 'integration_tests_'), 182 | // 'minkey': new MinKey(), 183 | // 'maxkey': new MaxKey() 184 | // } 185 | // 186 | // // Let's serialize it 187 | // var data = bson.serialize(motherOfAllDocuments, true, false, false); 188 | // // And deserialize it again 189 | // var object = bson.deserialize(data); 190 | // // Asserts 191 | // test.equal(motherOfAllDocuments.string, object.string); 192 | // test.deepEqual(motherOfAllDocuments.array, object.array); 193 | // test.deepEqual(motherOfAllDocuments.date, object.date); 194 | // test.deepEqual(motherOfAllDocuments.oid.toHexString(), object.oid.toHexString()); 195 | // test.deepEqual(motherOfAllDocuments.binary.length(), object.binary.length()); 196 | // // Assert the values of the binary 197 | // for(var i = 0; i < motherOfAllDocuments.binary.length(); i++) { 198 | // test.equal(motherOfAllDocuments.binary.value[i], object.binary[i]); 199 | // } 200 | // test.deepEqual(motherOfAllDocuments.int, object.int); 201 | // test.deepEqual(motherOfAllDocuments.float, object.float); 202 | // test.deepEqual(motherOfAllDocuments.regexp, object.regexp); 203 | // test.deepEqual(motherOfAllDocuments.boolean, object.boolean); 204 | // test.deepEqual(motherOfAllDocuments.long.toNumber(), object.long); 205 | // test.deepEqual(motherOfAllDocuments.where, object.where); 206 | // test.deepEqual(motherOfAllDocuments.dbref.oid.toHexString(), object.dbref.oid.toHexString()); 207 | // test.deepEqual(motherOfAllDocuments.dbref.namespace, object.dbref.namespace); 208 | // test.deepEqual(motherOfAllDocuments.dbref.db, object.dbref.db); 209 | // test.deepEqual(motherOfAllDocuments.minkey, object.minkey); 210 | // test.deepEqual(motherOfAllDocuments.maxkey, object.maxkey); 211 | // test.done(); 212 | // } 213 | -------------------------------------------------------------------------------- /test/node/bson_compliance_test.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var BSON = require('../..'), 4 | Code = BSON.Code, 5 | Binary = BSON.Binary, 6 | Timestamp = BSON.Timestamp, 7 | Long = BSON.Long, 8 | ObjectID = BSON.ObjectID, 9 | DBRef = BSON.DBRef, 10 | MinKey = BSON.MinKey, 11 | MaxKey = BSON.MaxKey, 12 | expect = require('chai').expect; 13 | 14 | var createBSON = require('../utils'); 15 | 16 | describe('BSON Compliance', function() { 17 | /** 18 | * @ignore 19 | */ 20 | it('Pass all corrupt BSON scenarios ./compliance/corrupt.json', function(done) { 21 | // Read and parse the json file 22 | var scenarios = require(__dirname + '/compliance/corrupt'); 23 | 24 | // Create a new BSON instance 25 | var bson = createBSON(); 26 | 27 | for (var i = 0; i < scenarios.documents.length; i++) { 28 | var doc = scenarios.documents[i]; 29 | if (doc.skip) continue; 30 | 31 | try { 32 | // Create a buffer containing the payload 33 | var buffer = new Buffer(doc.encoded, 'hex'); 34 | // Attempt to deserialize 35 | bson.deserialize(buffer); 36 | expect(false).to.be.ok; 37 | } catch (err) { 38 | expect(true).to.be.ok; 39 | } 40 | } 41 | 42 | done(); 43 | }); 44 | 45 | /** 46 | * @ignore 47 | */ 48 | it('Pass all valid BSON serialization scenarios ./compliance/valid.json', function(done) { 49 | // Read and parse the json file 50 | var scenarios = require(__dirname + '/compliance/valid'); 51 | 52 | // Create a new BSON instance 53 | var bson = createBSON(); 54 | 55 | // Translate extended json to correctly typed doc 56 | var translate = function(doc, object) { 57 | for (var name in doc) { 58 | if ( 59 | typeof doc[name] === 'number' || 60 | typeof doc[name] === 'string' || 61 | typeof doc[name] === 'boolean' 62 | ) { 63 | object[name] = doc[name]; 64 | } else if (Array.isArray(doc[name])) { 65 | object[name] = translate(doc[name], []); 66 | } else if (doc[name]['$numberLong']) { 67 | object[name] = Long.fromString(doc[name]['$numberLong']); 68 | } else if (doc[name]['$undefined']) { 69 | object[name] = null; 70 | } else if (doc[name]['$date']) { 71 | var date = new Date(); 72 | date.setTime(parseInt(doc[name]['$date']['$numberLong'], 10)); 73 | object[name] = date; 74 | } else if (doc[name]['$regexp']) { 75 | object[name] = new RegExp(doc[name]['$regexp'], doc[name]['$options'] || ''); 76 | } else if (doc[name]['$oid']) { 77 | object[name] = new ObjectID(doc[name]['$oid']); 78 | } else if (doc[name]['$binary']) { 79 | object[name] = new Binary(doc[name]['$binary'], doc[name]['$type'] || 1); 80 | } else if (doc[name]['$timestamp']) { 81 | object[name] = Timestamp.fromBits( 82 | parseInt(doc[name]['$timestamp']['t'], 10), 83 | parseInt(doc[name]['$timestamp']['i']) 84 | ); 85 | } else if (doc[name]['$ref']) { 86 | object[name] = new DBRef(doc[name]['$ref'], doc[name]['$id'], doc[name]['$db']); 87 | } else if (doc[name]['$minKey']) { 88 | object[name] = new MinKey(); 89 | } else if (doc[name]['$maxKey']) { 90 | object[name] = new MaxKey(); 91 | } else if (doc[name]['$code']) { 92 | object[name] = new Code(doc[name]['$code'], doc[name]['$scope'] || {}); 93 | } else if (doc[name] != null && typeof doc[name] === 'object') { 94 | object[name] = translate(doc[name], {}); 95 | } 96 | } 97 | 98 | return object; 99 | }; 100 | 101 | // Iterate over all the results 102 | scenarios.documents.forEach(function(doc) { 103 | if (doc.skip) return; 104 | // Create a buffer containing the payload 105 | var expectedData = new Buffer(doc.encoded, 'hex'); 106 | // Get the expectedDocument 107 | var expectedDocument = translate(doc.document, {}); 108 | // Serialize to buffer 109 | var buffer = bson.serialize(expectedDocument); 110 | // Validate the output 111 | expect(expectedData.toString('hex')).to.equal(buffer.toString('hex')); 112 | // Attempt to deserialize 113 | var object = bson.deserialize(buffer, { promoteLongs: false }); 114 | // // Validate the object 115 | expect(JSON.stringify(expectedDocument)).to.deep.equal(JSON.stringify(object)); 116 | }); 117 | 118 | done(); 119 | }); 120 | }); 121 | -------------------------------------------------------------------------------- /test/node/bson_corpus_tests.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var BSON = require('../..'), 4 | Decimal128 = BSON.Decimal128, 5 | fs = require('fs'), 6 | expect = require('chai').expect, 7 | path = require('path'), 8 | createBSON = require('../utils'), 9 | bson = createBSON(); 10 | 11 | var deserializeOptions = { 12 | bsonRegExp: true, 13 | promoteLongs: true, 14 | promoteValues: false 15 | }; 16 | 17 | var serializeOptions = { 18 | ignoreUndefined: false 19 | }; 20 | 21 | // tests from the corpus that we need to skip, and explanations why 22 | 23 | var skip = { 24 | 'NaN with payload': 25 | 'passing this would require building a custom type to store the NaN payload data.', 26 | 'flags not alphabetized': 'Un-skip when NODE-3282 is fixed in JS BSON', 27 | }; 28 | 29 | function findScenarios() { 30 | return fs 31 | .readdirSync(path.join(__dirname, 'specs/bson-corpus')) 32 | .filter(x => x.indexOf('json') !== -1) 33 | .map(x => JSON.parse(fs.readFileSync(path.join(__dirname, 'specs/bson-corpus', x), 'utf8'))); 34 | } 35 | 36 | describe('BSON Corpus', function() { 37 | findScenarios().forEach(scenario => { 38 | describe(scenario.description, function() { 39 | if (scenario.valid) { 40 | describe('valid', function() { 41 | scenario.valid.forEach(v => { 42 | if (skip.hasOwnProperty(v.description)) { 43 | it.skip(v.description, () => {}); 44 | return; 45 | } 46 | 47 | it(v.description, function() { 48 | var cB = new Buffer(v.canonical_bson, 'hex'); 49 | if (v.degenerate_bson) var dB = new Buffer(v.degenerate_bson, 'hex'); 50 | if (v.converted_bson) var convB = new Buffer(v.converted_bson, 'hex'); 51 | 52 | var roundTripped = bson.serialize( 53 | bson.deserialize(cB, deserializeOptions), 54 | serializeOptions 55 | ); 56 | 57 | if (scenario.deprecated) expect(convB).to.deep.equal(roundTripped); 58 | else expect(cB).to.deep.equal(roundTripped); 59 | 60 | if (dB) { 61 | expect(cB).to.deep.equal( 62 | bson.serialize(bson.deserialize(dB, deserializeOptions), serializeOptions) 63 | ); 64 | } 65 | }); 66 | }); 67 | }); 68 | } 69 | 70 | if (scenario.decodeErrors) { 71 | describe('decodeErrors', function() { 72 | scenario.decodeErrors.forEach(d => { 73 | it(d.description, function() { 74 | var B = new Buffer(d.bson, 'hex'); 75 | expect(() => bson.deserialize(B, deserializeOptions)).to.throw(); 76 | }); 77 | }); 78 | }); 79 | } 80 | 81 | if (scenario.parseErrors) { 82 | describe('parseErrors', function() { 83 | scenario.parseErrors.forEach(p => { 84 | it(p.description, function() { 85 | expect(() => Decimal128.fromString(scenario.string)).to.throw(); 86 | }); 87 | }); 88 | }); 89 | } 90 | }); 91 | }); 92 | }); 93 | -------------------------------------------------------------------------------- /test/node/compliance/corrupt.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var data = { 4 | description: 'Corrupted BSON', 5 | documents: [ 6 | { 7 | encoded: '0400000000', 8 | error: 'basic' 9 | }, 10 | { 11 | encoded: '0500000001', 12 | error: 'basic' 13 | }, 14 | { 15 | encoded: '05000000', 16 | error: 'basic' 17 | }, 18 | { 19 | encoded: '0700000002610078563412', 20 | error: 'basic' 21 | }, 22 | { 23 | encoded: '090000001061000500', 24 | error: 'basic' 25 | }, 26 | { 27 | encoded: '00000000000000000000', 28 | error: 'basic' 29 | }, 30 | { 31 | encoded: '1300000002666f6f00040000006261720000', 32 | error: 'basic' 33 | }, 34 | { 35 | encoded: '1800000003666f6f000f0000001062617200ffffff7f0000', 36 | error: 'basic' 37 | }, 38 | { 39 | encoded: '1500000003666f6f000c0000000862617200010000', 40 | error: 'basic' 41 | }, 42 | { 43 | encoded: '1c00000003666f6f001200000002626172000500000062617a000000', 44 | error: 'basic', 45 | skip: true 46 | }, 47 | { 48 | encoded: '1000000002610004000000616263ff00', 49 | error: 'string is not null-terminated' 50 | }, 51 | { 52 | encoded: '0c0000000200000000000000', 53 | error: 'bad_string_length' 54 | }, 55 | { 56 | encoded: '120000000200ffffffff666f6f6261720000', 57 | error: 'bad_string_length' 58 | }, 59 | { 60 | encoded: '0c0000000e00000000000000', 61 | error: 'bad_string_length' 62 | }, 63 | { 64 | encoded: '120000000e00ffffffff666f6f6261720000', 65 | error: 'bad_string_length' 66 | }, 67 | { 68 | encoded: '180000000c00fa5bd841d6585d9900', 69 | error: '' 70 | }, 71 | { 72 | encoded: '1e0000000c00ffffffff666f6f626172005259b56afa5bd841d6585d9900', 73 | error: 'bad_string_length' 74 | }, 75 | { 76 | encoded: '0c0000000d00000000000000', 77 | error: 'bad_string_length' 78 | }, 79 | { 80 | encoded: '0c0000000d00ffffffff0000', 81 | error: 'bad_string_length' 82 | }, 83 | { 84 | encoded: '1c0000000f001500000000000000000c000000020001000000000000', 85 | error: 'bad_string_length' 86 | }, 87 | { 88 | encoded: '1c0000000f0015000000ffffffff000c000000020001000000000000', 89 | error: 'bad_string_length' 90 | }, 91 | { 92 | encoded: '1c0000000f001500000001000000000c000000020000000000000000', 93 | error: 'bad_string_length' 94 | }, 95 | { 96 | encoded: '1c0000000f001500000001000000000c0000000200ffffffff000000', 97 | error: 'bad_string_length' 98 | }, 99 | { 100 | encoded: '0100000000', 101 | error: 102 | "An object size that's too small to even include the object size, but is correctly encoded, along with a correct EOO (and no data)" 103 | }, 104 | { 105 | encoded: '05000000', 106 | error: 'One object, missing the EOO at the end' 107 | }, 108 | { 109 | encoded: '05000000ff', 110 | error: "One object, sized correctly, with a spot for an EOO, but the EOO isn't 0x00" 111 | } 112 | ] 113 | }; 114 | 115 | module.exports = data; 116 | -------------------------------------------------------------------------------- /test/node/compliance/valid.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var data = { 4 | description: 'Valid bson documents', 5 | documents: [ 6 | { 7 | encoded: '160000000268656c6c6f0006000000776f726c640000', 8 | document: { 9 | hello: 'world' 10 | } 11 | }, 12 | { 13 | encoded: 14 | 'ab0100000268656c6c6f0006000000776f726c640008626f6f6c65616e000110696e743332000200000012696e743634003868d9f60400000001646f75626c65001f85eb51b81e09400562696e6172790020000000044667414141414a6f5a5778736277414741414141643239796247514141413d3d0964617465008805c3fb4d0100001174696d657374616d700001000000010000000b646174615f7265676578006100730007646174615f6f69640011111111111111111111111103646174615f72656600250000000224726566000b000000636f6c6c656374696f6e000224696400020000003100000a646174615f756e646566696e656400ff646174615f6d696e6b6579007f646174615f6d61786b6579000f636f646500210000000d00000066756e6374696f6e28297b7d000c000000106100010000000003656d626564646564002c0000000f636f646500210000000d00000066756e6374696f6e28297b7d000c00000010610001000000000004617272617900300000000f3000210000000d00000066756e6374696f6e28297b7d000c0000001061000100000000103100010000000000', 15 | document: { 16 | hello: 'world', 17 | boolean: true, 18 | int32: 2, 19 | int64: { 20 | $numberLong: '21321312312' 21 | }, 22 | double: 3.14, 23 | binary: { 24 | $binary: 'FgAAAAJoZWxsbwAGAAAAd29ybGQAAA==', 25 | $type: 4 26 | }, 27 | date: { 28 | $date: { $numberLong: '1434447971720' } 29 | }, 30 | timestamp: { 31 | $timestamp: { t: '1', i: '1' } 32 | }, 33 | data_regex: { 34 | $regexp: 'a', 35 | $options: 'g' 36 | }, 37 | data_oid: { 38 | $oid: '111111111111111111111111' 39 | }, 40 | data_ref: { 41 | $ref: 'collection', 42 | $id: '1' 43 | }, 44 | data_undefined: { 45 | $undefined: true 46 | }, 47 | data_minkey: { 48 | $minKey: 1 49 | }, 50 | data_maxkey: { 51 | $maxKey: 1 52 | }, 53 | code: { 54 | $code: 'function(){}', 55 | $scope: { a: 1 } 56 | }, 57 | embedded: { 58 | code: { 59 | $code: 'function(){}', 60 | $scope: { a: 1 } 61 | } 62 | }, 63 | array: [{ $code: 'function(){}', $scope: { a: 1 } }, 1] 64 | } 65 | } 66 | ] 67 | }; 68 | 69 | module.exports = data; 70 | -------------------------------------------------------------------------------- /test/node/data/mongodump.airpair.tags.bson: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mongodb-js/bson-ext/c1284d1a473261be57342cc70b8ab3c235180032/test/node/data/mongodump.airpair.tags.bson -------------------------------------------------------------------------------- /test/node/data/test.bson: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mongodb-js/bson-ext/c1284d1a473261be57342cc70b8ab3c235180032/test/node/data/test.bson -------------------------------------------------------------------------------- /test/node/data/test_gs_weird_bug.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mongodb-js/bson-ext/c1284d1a473261be57342cc70b8ab3c235180032/test/node/data/test_gs_weird_bug.png -------------------------------------------------------------------------------- /test/node/detect_cyclic_dep_tests.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var createBSON = require('../utils'), 4 | expect = require('chai').expect; 5 | 6 | describe('Cyclic Dependencies', function() { 7 | /** 8 | * @ignore 9 | */ 10 | it('Should correctly detect cyclic dependency in nested objects', function(done) { 11 | // Force cyclic dependency 12 | var a = { b: {} }; 13 | a.b.c = a; 14 | try { 15 | // Attempt to serialize cyclic dependency 16 | createBSON().serialize(a); 17 | } catch (err) { 18 | expect('cyclic dependency detected').to.equal(err.message); 19 | } 20 | 21 | done(); 22 | }); 23 | 24 | /** 25 | * @ignore 26 | */ 27 | it('Should correctly detect cyclic dependency in deeploy nested objects', function(done) { 28 | // Force cyclic dependency 29 | var a = { b: { c: [{ d: {} }] } }; 30 | a.b.c[0].d.a = a; 31 | 32 | try { 33 | // Attempt to serialize cyclic dependency 34 | createBSON().serialize(a); 35 | } catch (err) { 36 | expect('cyclic dependency detected').to.equal(err.message); 37 | } 38 | 39 | done(); 40 | }); 41 | 42 | /** 43 | * @ignore 44 | */ 45 | it('Should correctly detect cyclic dependency in nested array', function(done) { 46 | // Force cyclic dependency 47 | var a = { b: {} }; 48 | a.b.c = [a]; 49 | try { 50 | // Attempt to serialize cyclic dependency 51 | createBSON().serialize(a); 52 | } catch (err) { 53 | expect('cyclic dependency detected').to.equal(err.message); 54 | } 55 | 56 | done(); 57 | }); 58 | }); 59 | -------------------------------------------------------------------------------- /test/node/map_tests.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var BSON = require('../..'); 4 | var M = BSON.Map; 5 | var createBSON = require('../utils'); 6 | const expect = require('chai').expect; 7 | 8 | describe('Map', function() { 9 | /** 10 | * @ignore 11 | */ 12 | it('should correctly exercise the map', function(done) { 13 | var m = new M([['a', 1], ['b', 2]]); 14 | expect(m.has('a')).to.be.ok; 15 | expect(m.has('b')).to.be.ok; 16 | expect(1).to.equal(m.get('a')); 17 | expect(2).to.equal(m.get('b')); 18 | expect(m.set('a', 3) === m).to.be.ok; 19 | expect(m.has('a')).to.be.ok; 20 | expect(3).to.equal(m.get('a')); 21 | 22 | // Get the values 23 | var iterator = m.values(); 24 | expect(3).to.equal(iterator.next().value); 25 | expect(2).to.equal(iterator.next().value); 26 | expect(true).to.equal(iterator.next().done); 27 | 28 | // Get the entries 29 | iterator = m.entries(); 30 | expect(['a', 3]).to.deep.equal(iterator.next().value); 31 | expect(['b', 2]).to.deep.equal(iterator.next().value); 32 | expect(true).to.deep.equal(iterator.next().done); 33 | 34 | // Get the keys 35 | iterator = m.keys(); 36 | expect('a').to.deep.equal(iterator.next().value); 37 | expect('b').to.deep.equal(iterator.next().value); 38 | expect(true).to.deep.equal(iterator.next().done); 39 | 40 | // Collect values 41 | var values = []; 42 | // Get entries forEach 43 | m.forEach(function(value, key, map) { 44 | expect(value != null).to.be.ok; 45 | expect(key != null).to.be.ok; 46 | expect(map != null).to.be.ok; 47 | expect(m === this).to.be.ok; 48 | values.push([key, value]); 49 | }, m); 50 | 51 | expect([['a', 3], ['b', 2]]).to.deep.equal(values); 52 | 53 | // Modify the state 54 | expect(true).to.equal(m.delete('a')); 55 | m.set('c', 5); 56 | m.set('a', 7); 57 | 58 | // Validate order is preserved 59 | // Get the keys 60 | iterator = m.keys(); 61 | expect('b').to.deep.equal(iterator.next().value); 62 | expect('c').to.deep.equal(iterator.next().value); 63 | expect('a').to.deep.equal(iterator.next().value); 64 | expect(true).to.deep.equal(iterator.next().done); 65 | 66 | // Get the entries 67 | iterator = m.entries(); 68 | expect(['b', 2]).to.deep.equal(iterator.next().value); 69 | expect(['c', 5]).to.deep.equal(iterator.next().value); 70 | expect(['a', 7]).to.deep.equal(iterator.next().value); 71 | expect(true).to.deep.equal(iterator.next().done); 72 | 73 | // Get the values 74 | iterator = m.values(); 75 | expect(2).to.equal(iterator.next().value); 76 | expect(5).to.equal(iterator.next().value); 77 | expect(7).to.equal(iterator.next().value); 78 | expect(true).to.equal(iterator.next().done); 79 | done(); 80 | }); 81 | 82 | /** 83 | * @ignore 84 | */ 85 | it('should serialize a map', function(done) { 86 | // Serialize top level map only 87 | var m = new M([['a', 1], ['b', 2]]); 88 | var bson = createBSON(); 89 | // Serialize the map 90 | var data = bson.serialize(m, false, true); 91 | // Deserialize the data 92 | var object = bson.deserialize(data); 93 | expect({ a: 1, b: 2 }).to.deep.equal(object); 94 | 95 | // Serialize nested maps 96 | var m1 = new M([['a', 1], ['b', 2]]); 97 | m = new M([['c', m1]]); 98 | // Serialize the map 99 | data = bson.serialize(m, false, true); 100 | // Deserialize the data 101 | object = bson.deserialize(data); 102 | expect({ c: { a: 1, b: 2 } }).to.deep.equal(object); 103 | done(); 104 | 105 | // Serialize top level map only 106 | m = new M([['1', 1], ['0', 2]]); 107 | bson = createBSON(); 108 | // Serialize the map, validating that the order in the resulting BSON is preserved 109 | data = bson.serialize(m, false, true); 110 | expect('13000000103100010000001030000200000000').to.equal(data.toString('hex')); 111 | }); 112 | 113 | /** 114 | * @ignore 115 | */ 116 | it('should not crash due to object that looks like map', function(done) { 117 | // Serialize top level map only 118 | var m = { entries: 'test' }; 119 | var bson = createBSON(); 120 | // Serialize the map 121 | var data = bson.serialize(m, false, true); 122 | // Deserialize the data 123 | var object = bson.deserialize(data); 124 | expect(m).to.deep.equal(object); 125 | done(); 126 | }); 127 | }); 128 | -------------------------------------------------------------------------------- /test/node/null_byte_test.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const expect = require('chai').expect; 4 | const BSONLib = require('../../lib/index'); 5 | const createBSON = require('../utils'); 6 | const BSON = Object.assign({}, BSONLib, Object.getPrototypeOf(createBSON())); 7 | 8 | describe('null byte handling in serializing', () => { 9 | it('should throw when null byte in BSON Field name within a root document', () => { 10 | expect(() => BSON.serialize({ 'a\x00b': 1 })).to.throw(/null bytes/); 11 | }); 12 | 13 | it('should throw when null byte in BSON Field name within a sub-document', () => { 14 | expect(() => BSON.serialize({ a: { 'a\x00b': 1 } })).to.throw(/null bytes/); 15 | }); 16 | 17 | it('should throw when null byte in Pattern for a regular expression', () => { 18 | // eslint-disable-next-line no-control-regex 19 | expect(() => BSON.serialize({ a: new RegExp('a\x00b') })).to.throw(/null bytes/); 20 | expect(() => BSON.serialize({ a: new BSON.BSONRegExp('a\x00b') })).to.throw(/null bytes/); 21 | }); 22 | 23 | it('should throw when null byte in Flags/options for a regular expression', () => { 24 | expect(() => BSON.serialize({ a: new BSON.BSONRegExp('a', 'i\x00m') })).to.throw(/null bytes/); 25 | }); 26 | }) 27 | -------------------------------------------------------------------------------- /test/node/number_test.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const expect = require('chai').expect; 4 | 5 | const BSON = require('../..'); 6 | 7 | const BSON_DOUBLE_TYPE = 0x01; 8 | const BSON_INT32_TYPE = 0x10; 9 | 10 | const INT32_MAX = 2147483647; 11 | const INT32_MIN = -2147483648; 12 | 13 | const nanPayloadBuffer = Buffer.from('120000000000F87F', 'hex'); 14 | const nanPayloadDV = new DataView( 15 | nanPayloadBuffer.buffer, 16 | nanPayloadBuffer.byteOffset, 17 | nanPayloadBuffer.byteLength 18 | ); 19 | const nanPayloadDouble = nanPayloadDV.getFloat64(0, true); 20 | 21 | describe('serializing javascript numbers', () => { 22 | it('serialize a number that exceeds int32.max as a double', () => { 23 | const document = { a: INT32_MAX + 1 }; 24 | const bytes = BSON.serialize(document); 25 | 26 | expect(bytes[4]).to.equal(BSON_DOUBLE_TYPE); 27 | 28 | const returnedDocument = BSON.deserialize(bytes); 29 | expect(returnedDocument).to.deep.equal(document); 30 | }); 31 | 32 | it('serialize a number that negatively exceeds int32.min as a double', () => { 33 | const document = { a: INT32_MIN - 1 }; 34 | const bytes = BSON.serialize(document); 35 | 36 | expect(bytes[4]).to.equal(BSON_DOUBLE_TYPE); 37 | 38 | const returnedDocument = BSON.deserialize(bytes); 39 | expect(returnedDocument).to.deep.equal(document); 40 | }); 41 | 42 | it('serialize a number that is exactly int32.max as int32', () => { 43 | const document = { a: INT32_MAX }; 44 | const bytes = BSON.serialize(document); 45 | 46 | expect(bytes[4]).to.equal(BSON_INT32_TYPE); 47 | 48 | const returnedDocument = BSON.deserialize(bytes); 49 | expect(returnedDocument).to.deep.equal(document); 50 | }); 51 | 52 | it('serialize a number that is exactly int32.min as int32', () => { 53 | const document = { a: INT32_MIN }; 54 | const bytes = BSON.serialize(document); 55 | 56 | expect(bytes[4]).to.equal(BSON_INT32_TYPE); 57 | 58 | const returnedDocument = BSON.deserialize(bytes); 59 | expect(returnedDocument).to.deep.equal(document); 60 | }); 61 | 62 | it('serialize a number that has a fractional component as double', () => { 63 | const document = { a: 2.3 }; 64 | const bytes = BSON.serialize(document); 65 | 66 | expect(bytes[4]).to.equal(BSON_DOUBLE_TYPE); 67 | 68 | const returnedDocument = BSON.deserialize(bytes); 69 | expect(returnedDocument).to.deep.equal(document); 70 | }); 71 | 72 | it.skip('preserve -0.0 through round trip -- TODO(NODE-4335)', () => { 73 | // TODO(NODE-4335): -0 be serialized as double 74 | const document = { a: -0.0 }; 75 | const bytes = BSON.serialize(document); 76 | 77 | expect(bytes[4]).to.equal(BSON_DOUBLE_TYPE); 78 | 79 | const returnedDocument = BSON.deserialize(bytes); 80 | expect(returnedDocument).to.deep.equal(document); 81 | expect(Object.is(returnedDocument.a, -0.0)).to.be.true; 82 | }); 83 | 84 | it('preserve Double(-0.0) through round trip -- TODO(NODE-4335)', () => { 85 | // TODO(NODE-4335): -0 be serialized as double 86 | const document = { a: new BSON.Double(-0.0) }; 87 | const bytes = BSON.serialize(document); 88 | 89 | expect(bytes[4]).to.equal(BSON_DOUBLE_TYPE); 90 | 91 | const returnedDocument = BSON.deserialize(bytes, { promoteValues: false }); 92 | expect(returnedDocument).to.deep.equal(document); 93 | expect(Object.is(returnedDocument.a.valueOf(), -0.0)).to.be.true; 94 | }); 95 | 96 | it('converts -0.0 to an int32 -- TODO(NODE-4335)', () => { 97 | // TODO(NODE-4335): -0 be serialized as double 98 | // This test is demonstrating the behavior of -0 being serialized as an int32 something we do NOT want to unintentionally change, but may want to change in the future, which the above ticket serves to track. 99 | const document = { a: -0.0 }; 100 | const bytes = BSON.serialize(document); 101 | 102 | expect(bytes[4]).to.equal(BSON_INT32_TYPE); 103 | 104 | const returnedDocument = BSON.deserialize(bytes); 105 | expect(returnedDocument).to.have.property('a', 0); 106 | expect(Object.is(returnedDocument.a, -0.0)).to.be.false; 107 | }); 108 | 109 | it('preserve NaN through round trip', () => { 110 | const document = { a: NaN }; 111 | const bytes = BSON.serialize(document); 112 | 113 | expect(bytes[4]).to.equal(BSON_DOUBLE_TYPE); 114 | 115 | const returnedDocument = BSON.deserialize(bytes); 116 | expect(returnedDocument).to.deep.equal(document); 117 | }); 118 | 119 | it('preserve NaN with payload through round trip', () => { 120 | const document = { a: nanPayloadDouble }; 121 | const bytes = BSON.serialize(document); 122 | 123 | expect(bytes[4]).to.equal(BSON_DOUBLE_TYPE); 124 | expect(bytes.subarray(7, 15)).to.deep.equal(nanPayloadBuffer); 125 | 126 | const returnedDocument = BSON.deserialize(bytes); 127 | expect(returnedDocument).to.deep.equal(document); 128 | }); 129 | 130 | it('preserve Infinity through round trip', () => { 131 | const document = { a: Infinity }; 132 | const bytes = BSON.serialize(document); 133 | 134 | expect(bytes[4]).to.equal(BSON_DOUBLE_TYPE); 135 | 136 | const returnedDocument = BSON.deserialize(bytes); 137 | expect(returnedDocument).to.deep.equal(document); 138 | }); 139 | 140 | it('preserve -Infinity through round trip', () => { 141 | const document = { a: -Infinity }; 142 | const bytes = BSON.serialize(document); 143 | 144 | expect(bytes[4]).to.equal(BSON_DOUBLE_TYPE); 145 | 146 | const returnedDocument = BSON.deserialize(bytes); 147 | expect(returnedDocument).to.deep.equal(document); 148 | }); 149 | }); 150 | -------------------------------------------------------------------------------- /test/node/object_id_tests.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var BSON = require('../..'); 4 | var util = require('util'); 5 | var ObjectId = BSON.ObjectID; 6 | const expect = require('chai').expect; 7 | 8 | describe('ObjectId', function() { 9 | /** 10 | * @ignore 11 | */ 12 | it('should correctly handle objectId timestamps', function(done) { 13 | // var test_number = {id: ObjectI()}; 14 | var a = ObjectId.createFromTime(1); 15 | expect(new Buffer([0, 0, 0, 1])).to.deep.equal(a.id.slice(0, 4)); 16 | expect(1000).to.equal(a.getTimestamp().getTime()); 17 | 18 | var b = new ObjectId(); 19 | b.generationTime = 1; 20 | expect(new Buffer([0, 0, 0, 1])).to.deep.equal(b.id.slice(0, 4)); 21 | expect(1).to.equal(b.generationTime); 22 | expect(1000).to.equal(b.getTimestamp().getTime()); 23 | 24 | done(); 25 | }); 26 | 27 | /** 28 | * @ignore 29 | */ 30 | it('should correctly create ObjectId from uppercase hexstring', function(done) { 31 | var a = 'AAAAAAAAAAAAAAAAAAAAAAAA'; 32 | var b = new ObjectId(a); 33 | var c = b.equals(a); // => false 34 | expect(true).to.equal(c); 35 | 36 | a = 'aaaaaaaaaaaaaaaaaaaaaaaa'; 37 | b = new ObjectId(a); 38 | c = b.equals(a); // => true 39 | expect(true).to.equal(c); 40 | expect(a).to.equal(b.toString()); 41 | 42 | done(); 43 | }); 44 | 45 | /** 46 | * @ignore 47 | */ 48 | it('should correctly create ObjectId from Buffer', function(done) { 49 | if (!Buffer.from) return done(); 50 | var a = 'AAAAAAAAAAAAAAAAAAAAAAAA'; 51 | var b = new ObjectId(new Buffer(a, 'hex')); 52 | var c = b.equals(a); // => false 53 | expect(true).to.equal(c); 54 | 55 | a = 'aaaaaaaaaaaaaaaaaaaaaaaa'; 56 | b = new ObjectId(new Buffer(a, 'hex')); 57 | c = b.equals(a); // => true 58 | expect(a).to.equal(b.toString()); 59 | expect(true).to.equal(c); 60 | done(); 61 | }); 62 | 63 | /** 64 | * @ignore 65 | */ 66 | it('should correctly allow for node.js inspect to work with ObjectId', function(done) { 67 | var a = 'AAAAAAAAAAAAAAAAAAAAAAAA'; 68 | var b = new ObjectId(a); 69 | util.inspect(b); 70 | 71 | // var c = b.equals(a); // => false 72 | // expect(true).to.equal(c); 73 | // 74 | // var a = 'aaaaaaaaaaaaaaaaaaaaaaaa'; 75 | // var b = new ObjectId(a); 76 | // var c = b.equals(a); // => true 77 | // expect(true).to.equal(c); 78 | // expect(a).to.equal(b.toString()); 79 | 80 | done(); 81 | }); 82 | }); 83 | -------------------------------------------------------------------------------- /test/node/promote_values_test.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var Buffer = require('buffer').Buffer, 4 | BSON = require('../..'), 5 | Int32 = BSON.Int32, 6 | Double = BSON.Double, 7 | BinaryParser = require('../binary_parser').BinaryParser, 8 | expect = require('chai').expect; 9 | 10 | var createBSON = require('../utils'); 11 | 12 | describe('promote values', function() { 13 | /** 14 | * @ignore 15 | */ 16 | it('Should Correctly Deserialize object with all wrapper types', function(done) { 17 | var bytes = [ 18 | 26, 19 | 1, 20 | 0, 21 | 0, 22 | 7, 23 | 95, 24 | 105, 25 | 100, 26 | 0, 27 | 161, 28 | 190, 29 | 98, 30 | 75, 31 | 118, 32 | 169, 33 | 3, 34 | 0, 35 | 0, 36 | 3, 37 | 0, 38 | 0, 39 | 4, 40 | 97, 41 | 114, 42 | 114, 43 | 97, 44 | 121, 45 | 0, 46 | 26, 47 | 0, 48 | 0, 49 | 0, 50 | 16, 51 | 48, 52 | 0, 53 | 1, 54 | 0, 55 | 0, 56 | 0, 57 | 16, 58 | 49, 59 | 0, 60 | 2, 61 | 0, 62 | 0, 63 | 0, 64 | 16, 65 | 50, 66 | 0, 67 | 3, 68 | 0, 69 | 0, 70 | 0, 71 | 0, 72 | 2, 73 | 115, 74 | 116, 75 | 114, 76 | 105, 77 | 110, 78 | 103, 79 | 0, 80 | 6, 81 | 0, 82 | 0, 83 | 0, 84 | 104, 85 | 101, 86 | 108, 87 | 108, 88 | 111, 89 | 0, 90 | 3, 91 | 104, 92 | 97, 93 | 115, 94 | 104, 95 | 0, 96 | 19, 97 | 0, 98 | 0, 99 | 0, 100 | 16, 101 | 97, 102 | 0, 103 | 1, 104 | 0, 105 | 0, 106 | 0, 107 | 16, 108 | 98, 109 | 0, 110 | 2, 111 | 0, 112 | 0, 113 | 0, 114 | 0, 115 | 9, 116 | 100, 117 | 97, 118 | 116, 119 | 101, 120 | 0, 121 | 161, 122 | 190, 123 | 98, 124 | 75, 125 | 0, 126 | 0, 127 | 0, 128 | 0, 129 | 7, 130 | 111, 131 | 105, 132 | 100, 133 | 0, 134 | 161, 135 | 190, 136 | 98, 137 | 75, 138 | 90, 139 | 217, 140 | 18, 141 | 0, 142 | 0, 143 | 1, 144 | 0, 145 | 0, 146 | 5, 147 | 98, 148 | 105, 149 | 110, 150 | 97, 151 | 114, 152 | 121, 153 | 0, 154 | 7, 155 | 0, 156 | 0, 157 | 0, 158 | 2, 159 | 3, 160 | 0, 161 | 0, 162 | 0, 163 | 49, 164 | 50, 165 | 51, 166 | 16, 167 | 105, 168 | 110, 169 | 116, 170 | 0, 171 | 42, 172 | 0, 173 | 0, 174 | 0, 175 | 1, 176 | 102, 177 | 108, 178 | 111, 179 | 97, 180 | 116, 181 | 0, 182 | 223, 183 | 224, 184 | 11, 185 | 147, 186 | 169, 187 | 170, 188 | 64, 189 | 64, 190 | 11, 191 | 114, 192 | 101, 193 | 103, 194 | 101, 195 | 120, 196 | 112, 197 | 0, 198 | 102, 199 | 111, 200 | 111, 201 | 98, 202 | 97, 203 | 114, 204 | 0, 205 | 105, 206 | 0, 207 | 8, 208 | 98, 209 | 111, 210 | 111, 211 | 108, 212 | 101, 213 | 97, 214 | 110, 215 | 0, 216 | 1, 217 | 15, 218 | 119, 219 | 104, 220 | 101, 221 | 114, 222 | 101, 223 | 0, 224 | 25, 225 | 0, 226 | 0, 227 | 0, 228 | 12, 229 | 0, 230 | 0, 231 | 0, 232 | 116, 233 | 104, 234 | 105, 235 | 115, 236 | 46, 237 | 120, 238 | 32, 239 | 61, 240 | 61, 241 | 32, 242 | 51, 243 | 0, 244 | 5, 245 | 0, 246 | 0, 247 | 0, 248 | 0, 249 | 3, 250 | 100, 251 | 98, 252 | 114, 253 | 101, 254 | 102, 255 | 0, 256 | 37, 257 | 0, 258 | 0, 259 | 0, 260 | 2, 261 | 36, 262 | 114, 263 | 101, 264 | 102, 265 | 0, 266 | 5, 267 | 0, 268 | 0, 269 | 0, 270 | 116, 271 | 101, 272 | 115, 273 | 116, 274 | 0, 275 | 7, 276 | 36, 277 | 105, 278 | 100, 279 | 0, 280 | 161, 281 | 190, 282 | 98, 283 | 75, 284 | 2, 285 | 180, 286 | 1, 287 | 0, 288 | 0, 289 | 2, 290 | 0, 291 | 0, 292 | 0, 293 | 10, 294 | 110, 295 | 117, 296 | 108, 297 | 108, 298 | 0, 299 | 0 300 | ]; 301 | var serialized_data = ''; 302 | 303 | // Convert to chars 304 | for (var i = 0; i < bytes.length; i++) { 305 | serialized_data = serialized_data + BinaryParser.fromByte(bytes[i]); 306 | } 307 | 308 | var object = createBSON().deserialize(new Buffer(serialized_data, 'binary'), { 309 | promoteValues: false 310 | }); 311 | 312 | // Perform tests 313 | expect('hello').to.equal(object.string); 314 | expect([new Int32(1), new Int32(2), new Int32(3)]).to.deep.equal(object.array); 315 | expect(new Int32(1)).to.deep.equal(object.hash.a); 316 | expect(new Int32(2)).to.deep.equal(object.hash.b); 317 | expect(object.date != null).to.be.ok; 318 | expect(object.oid != null).to.be.ok; 319 | expect(object.binary != null).to.be.ok; 320 | expect(new Int32(42)).to.deep.equal(object.int); 321 | expect(new Double(33.3333)).to.deep.equal(object.float); 322 | expect(object.regexp != null).to.be.ok; 323 | expect(true).to.equal(object.boolean); 324 | expect(object.where != null).to.be.ok; 325 | expect(object.dbref != null).to.be.ok; 326 | expect(object[null] == null).to.be.ok; 327 | done(); 328 | }); 329 | }); 330 | -------------------------------------------------------------------------------- /test/node/serialize_with_buffer_tests.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var createBSON = require('../utils'); 4 | var expect = require('chai').expect; 5 | 6 | describe('serializeWithBuffer', function() { 7 | /** 8 | * @ignore 9 | */ 10 | it('correctly serialize into buffer using serializeWithBufferAndIndex', function(done) { 11 | var bson = createBSON(); 12 | // Create a buffer 13 | var b = new Buffer(256); 14 | // Serialize from index 0 15 | var r = bson.serializeWithBufferAndIndex({ a: 1 }, b); 16 | expect(11).to.equal(r); 17 | 18 | // Serialize from index r+1 19 | r = bson.serializeWithBufferAndIndex({ a: 1 }, b, { 20 | index: r + 1 21 | }); 22 | expect(23).to.equal(r); 23 | 24 | // Deserialize the buffers 25 | var doc = bson.deserialize(b.slice(0, 12)); 26 | expect({ a: 1 }).to.deep.equal(doc); 27 | doc = bson.deserialize(b.slice(12, 24)); 28 | expect({ a: 1 }).to.deep.equal(doc); 29 | done(); 30 | }); 31 | 32 | it('correctly serialize 3 different docs into buffer using serializeWithBufferAndIndex', function( 33 | done 34 | ) { 35 | const MAXSIZE = 1024 * 1024 * 17; 36 | var bson = createBSON(); 37 | let bf = new Buffer(MAXSIZE); 38 | 39 | const data = [ 40 | { 41 | a: 1, 42 | b: new Date('2019-01-01') 43 | }, 44 | { 45 | a: 2, 46 | b: new Date('2019-01-02') 47 | }, 48 | { 49 | a: 3, 50 | b: new Date('2019-01-03') 51 | } 52 | ]; 53 | 54 | let idx = 0; 55 | data.forEach(item => { 56 | idx = 57 | bson.serializeWithBufferAndIndex(item, bf, { 58 | index: idx 59 | }) + 1; 60 | }); 61 | 62 | expect(bson.deserialize(bf.slice(0, 23))).to.deep.equal(data[0]); 63 | expect(bson.deserialize(bf.slice(23, 46))).to.deep.equal(data[1]); 64 | expect(bson.deserialize(bf.slice(46, 69))).to.deep.equal(data[2]); 65 | done(); 66 | }); 67 | }); 68 | -------------------------------------------------------------------------------- /test/node/specs/bson-corpus/README.md: -------------------------------------------------------------------------------- 1 | # BSON Corpus 2 | 3 | This BSON test data corpus consists of a JSON file for each BSON type, plus 4 | a `top.json` file for testing the overall, enclosing document. 5 | 6 | Top level keys include: 7 | 8 | * `description`: human-readable description of what is in the file 9 | * `bson_type`: hex string of the first byte of a BSON element (e.g. "0x01" 10 | for type "double"); this will be the synthetic value "0x00" for `top.json`. 11 | * `test_key`: name of a field in a `valid` test case `extjson` document 12 | should be checked against the case's `string` field. 13 | * `valid` (optional): an array of valid test cases (see below). 14 | * `decodeErrors` (optional): an array of decode error cases (see below). 15 | * `parseErrors` (optional): an array of type-specific parse error case (see 16 | below). 17 | 18 | Valid test case keys include: 19 | 20 | * `description`: human-readable test case label. 21 | * `subject`: an (uppercase) big-endian hex representation of a BSON byte 22 | string. Be sure to mangle the case as appropriate in any roundtrip 23 | tests. 24 | * `string`: (optional) a representation of an element in the `extjson` 25 | field that can be checked to verify correct extjson decoding. How to 26 | check is language and bson-type specific. 27 | * `extjson`: a document representing the decoded extended JSON document 28 | equivalent to the subject. 29 | * `decodeOnly` (optional): if true, indicates that the BSON can not 30 | roundtrip; decoding the BSON in 'subject' and re-encoding the result will 31 | not generate identical BSON; otherwise, encode(decode(subject)) should be 32 | the same as the subject. 33 | 34 | Decode error cases provide an invalid BSON document or field that 35 | should result in an error. For each case, keys include: 36 | 37 | * `description`: human-readable test case label. 38 | * `subject`: an (uppercase) big-endian hex representation of an invalid 39 | BSON string that should fail to decode correctly. 40 | 41 | Parse error cases are type-specific and represent some input that can not 42 | be encoded to the `bson_type` under test. For each case, keys include: 43 | 44 | * `description`: human-readable test case label. 45 | * `subject`: a text or numeric representation of an input that can't be 46 | encoded. 47 | 48 | ## Extended JSON extensions 49 | 50 | The extended JSON documentation doesn't include extensions for all BSON 51 | types. These are supported by `mongoexport`: 52 | 53 | # Javascript 54 | { "$code": "" } 55 | 56 | # Javascript with scope 57 | { "$code": "": "$scope": { "x":1, "y":1 } } 58 | 59 | # Int32 60 | { "$numberInt": "" } 61 | 62 | However, this corpus extends JSON further to include the following: 63 | 64 | # Double (needed for NaN, etc.) 65 | { "$numberDouble": "" } 66 | 67 | # DBpointer (deprecated): is 24 hex chars 68 | { "$dbpointer": "", "$ns":"" } 69 | 70 | # Symbol (deprecated) 71 | { "$symbol": "" } 72 | 73 | ## Visualizing BSON 74 | 75 | The directory includes a Perl script `bsonview`, which will decompose and 76 | highlight elements of a BSON document. It may be used like this: 77 | 78 | echo "0900000010610005000000" | perl bsonview -x 79 | 80 | ## Open Questions 81 | 82 | These issues are still TBD: 83 | 84 | * Can "-0.0" be represented "canonically" in bson? Some languages might 85 | not round-trip it. (Do we need a "lossy_bson" field to capture this?) 86 | 87 | * How should DBPointer round-trip? Should we expect it to be turned into a 88 | DBRef or round-trip faithfully? 89 | 90 | * How should Symbol roundtrip? Should we expect it to be turned into a 91 | string? 92 | 93 | * How should Undefined roundtrip? Should we expect it to be turned into a 94 | null? 95 | 96 | * Should we flag cases where extjson is lossy compared to bson? 97 | -------------------------------------------------------------------------------- /test/node/specs/bson-corpus/array.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Array", 3 | "bson_type": "0x04", 4 | "test_key": "a", 5 | "valid": [ 6 | { 7 | "description": "Empty", 8 | "canonical_bson": "0D000000046100050000000000", 9 | "canonical_extjson": "{\"a\" : []}" 10 | }, 11 | { 12 | "description": "Single Element Array", 13 | "canonical_bson": "140000000461000C0000001030000A0000000000", 14 | "canonical_extjson": "{\"a\" : [{\"$numberInt\": \"10\"}]}" 15 | }, 16 | { 17 | "description": "Single Element Array with index set incorrectly", 18 | "degenerate_bson": "130000000461000B00000010000A0000000000", 19 | "canonical_bson": "140000000461000C0000001030000A0000000000", 20 | "canonical_extjson": "{\"a\" : [{\"$numberInt\": \"10\"}]}" 21 | }, 22 | { 23 | "description": "Single Element Array with index set incorrectly", 24 | "degenerate_bson": "150000000461000D000000106162000A0000000000", 25 | "canonical_bson": "140000000461000C0000001030000A0000000000", 26 | "canonical_extjson": "{\"a\" : [{\"$numberInt\": \"10\"}]}" 27 | } 28 | ], 29 | "decodeErrors": [ 30 | { 31 | "description": "Array length too long: eats outer terminator", 32 | "bson": "140000000461000D0000001030000A0000000000" 33 | }, 34 | { 35 | "description": "Array length too short: leaks terminator", 36 | "bson": "140000000461000B0000001030000A0000000000" 37 | }, 38 | { 39 | "description": "Invalid Array: bad string length in field", 40 | "bson": "1A00000004666F6F00100000000230000500000062617A000000" 41 | } 42 | ] 43 | } 44 | -------------------------------------------------------------------------------- /test/node/specs/bson-corpus/binary.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Binary type", 3 | "bson_type": "0x05", 4 | "test_key": "x", 5 | "valid": [ 6 | { 7 | "description": "subtype 0x00 (Zero-length)", 8 | "canonical_bson": "0D000000057800000000000000", 9 | "canonical_extjson": "{\"x\" : { \"$binary\" : {\"base64\" : \"\", \"subType\" : \"00\"}}}" 10 | }, 11 | { 12 | "description": "subtype 0x00 (Zero-length, keys reversed)", 13 | "canonical_bson": "0D000000057800000000000000", 14 | "canonical_extjson": "{\"x\" : { \"$binary\" : {\"base64\" : \"\", \"subType\" : \"00\"}}}", 15 | "degenerate_extjson": "{\"x\" : { \"$binary\" : {\"subType\" : \"00\", \"base64\" : \"\"}}}" 16 | }, 17 | { 18 | "description": "subtype 0x00", 19 | "canonical_bson": "0F0000000578000200000000FFFF00", 20 | "canonical_extjson": "{\"x\" : { \"$binary\" : {\"base64\" : \"//8=\", \"subType\" : \"00\"}}}" 21 | }, 22 | { 23 | "description": "subtype 0x01", 24 | "canonical_bson": "0F0000000578000200000001FFFF00", 25 | "canonical_extjson": "{\"x\" : { \"$binary\" : {\"base64\" : \"//8=\", \"subType\" : \"01\"}}}" 26 | }, 27 | { 28 | "description": "subtype 0x02", 29 | "canonical_bson": "13000000057800060000000202000000FFFF00", 30 | "canonical_extjson": "{\"x\" : { \"$binary\" : {\"base64\" : \"//8=\", \"subType\" : \"02\"}}}" 31 | }, 32 | { 33 | "description": "subtype 0x03", 34 | "canonical_bson": "1D000000057800100000000373FFD26444B34C6990E8E7D1DFC035D400", 35 | "canonical_extjson": "{\"x\" : { \"$binary\" : {\"base64\" : \"c//SZESzTGmQ6OfR38A11A==\", \"subType\" : \"03\"}}}" 36 | }, 37 | { 38 | "description": "subtype 0x04", 39 | "canonical_bson": "1D000000057800100000000473FFD26444B34C6990E8E7D1DFC035D400", 40 | "canonical_extjson": "{\"x\" : { \"$binary\" : {\"base64\" : \"c//SZESzTGmQ6OfR38A11A==\", \"subType\" : \"04\"}}}" 41 | }, 42 | { 43 | "description": "subtype 0x05", 44 | "canonical_bson": "1D000000057800100000000573FFD26444B34C6990E8E7D1DFC035D400", 45 | "canonical_extjson": "{\"x\" : { \"$binary\" : {\"base64\" : \"c//SZESzTGmQ6OfR38A11A==\", \"subType\" : \"05\"}}}" 46 | }, 47 | { 48 | "description": "subtype 0x80", 49 | "canonical_bson": "0F0000000578000200000080FFFF00", 50 | "canonical_extjson": "{\"x\" : { \"$binary\" : {\"base64\" : \"//8=\", \"subType\" : \"80\"}}}" 51 | }, 52 | { 53 | "description": "$type query operator (conflicts with legacy $binary form with $type field)", 54 | "canonical_bson": "1F000000037800170000000224747970650007000000737472696E67000000", 55 | "canonical_extjson": "{\"x\" : { \"$type\" : \"string\"}}" 56 | }, 57 | { 58 | "description": "$type query operator (conflicts with legacy $binary form with $type field)", 59 | "canonical_bson": "180000000378001000000010247479706500020000000000", 60 | "canonical_extjson": "{\"x\" : { \"$type\" : {\"$numberInt\": \"2\"}}}" 61 | } 62 | ], 63 | "decodeErrors": [ 64 | { 65 | "description": "Length longer than document", 66 | "bson": "1D000000057800FF0000000573FFD26444B34C6990E8E7D1DFC035D400" 67 | }, 68 | { 69 | "description": "Negative length", 70 | "bson": "0D000000057800FFFFFFFF0000" 71 | }, 72 | { 73 | "description": "subtype 0x02 length too long ", 74 | "bson": "13000000057800060000000203000000FFFF00" 75 | }, 76 | { 77 | "description": "subtype 0x02 length too short", 78 | "bson": "13000000057800060000000201000000FFFF00" 79 | }, 80 | { 81 | "description": "subtype 0x02 length negative one", 82 | "bson": "130000000578000600000002FFFFFFFFFFFF00" 83 | } 84 | ] 85 | } 86 | -------------------------------------------------------------------------------- /test/node/specs/bson-corpus/boolean.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Boolean", 3 | "bson_type": "0x08", 4 | "test_key": "b", 5 | "valid": [ 6 | { 7 | "description": "True", 8 | "canonical_bson": "090000000862000100", 9 | "canonical_extjson": "{\"b\" : true}" 10 | }, 11 | { 12 | "description": "False", 13 | "canonical_bson": "090000000862000000", 14 | "canonical_extjson": "{\"b\" : false}" 15 | } 16 | ], 17 | "decodeErrors": [ 18 | { 19 | "description": "Invalid boolean value of 2", 20 | "bson": "090000000862000200" 21 | }, 22 | { 23 | "description": "Invalid boolean value of -1", 24 | "bson": "09000000086200FF00" 25 | } 26 | ] 27 | } 28 | -------------------------------------------------------------------------------- /test/node/specs/bson-corpus/bsonview: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env perl 2 | use v5.10; 3 | use strict; 4 | use warnings; 5 | use utf8; 6 | use open qw/:std :utf8/; 7 | 8 | use Getopt::Long; 9 | use Pod::Usage; 10 | 11 | use if $^O eq 'MSWin32', 'Win32::Console::ANSI'; 12 | use Term::ANSIColor; 13 | 14 | use constant { 15 | NULL => "\x00", 16 | BSON_TYPE => "C", 17 | BSON_ENAME => "Z*", 18 | BSON_TYPE_NAME => "CZ*", 19 | BSON_DOUBLE => "d", 20 | BSON_STRING => "l/A", 21 | BSON_BOOLEAN => "C", 22 | BSON_REGEX => "Z*Z*", 23 | BSON_JSCODE => "", 24 | BSON_INT32 => "l", 25 | BSON_INT64 => "q", 26 | BSON_TIMESTAMP => "q", 27 | BSON_CODE_W_SCOPE => "l", 28 | BSON_REMAINING => 'a*', 29 | BSON_SKIP_4_BYTES => 'x4', 30 | BSON_OBJECTID => 'a12', 31 | BSON_BINARY_TYPE => 'C', 32 | BSON_CSTRING => 'Z*', 33 | BSON_BYTES => 'a*' 34 | }; 35 | 36 | my $BOLD = $^O eq 'MSWin32' ? "bold " : ""; 37 | 38 | # minimum field size 39 | my %FIELD_SIZES = ( 40 | 0x01 => 8, 41 | 0x02 => 5, 42 | 0x03 => 5, 43 | 0x04 => 5, 44 | 0x05 => 5, 45 | 0x06 => 0, 46 | 0x07 => 12, 47 | 0x08 => 1, 48 | 0x09 => 8, 49 | 0x0A => 0, 50 | 0x0B => 2, 51 | 0x0C => 17, 52 | 0x0D => 5, 53 | 0x0E => 5, 54 | 0x0F => 14, 55 | 0x10 => 4, 56 | 0x11 => 8, 57 | 0x12 => 8, 58 | 0x7F => 0, 59 | 0xFF => 0, 60 | ); 61 | 62 | sub main { 63 | my ( $hex, $file, $help ); 64 | GetOptions( 65 | "file=s" => \$file, 66 | "x" => \$hex, 67 | "help|h" => \$help, 68 | ) or die("Error in command line args"); 69 | pod2usage( { -exitval => 2, -verbose => 2, } ) if $help; 70 | 71 | if ( $file ) { 72 | dump_file($file); 73 | } 74 | else { 75 | dump_stdin($hex); 76 | } 77 | } 78 | 79 | sub dump_stdin { 80 | my $hex = shift; 81 | while ( defined( my $bson = ) ) { 82 | chomp $bson; 83 | if ( !length($bson) ) { 84 | print_error("[ no document ]\n"); 85 | next; 86 | } 87 | # in -x mode, treat leading # as a comment 88 | if ( $hex && index( $bson, "#" ) == 0 ) { 89 | say $bson; 90 | next; 91 | } 92 | $bson =~ s[ ][]g if $hex; 93 | $bson = pack( "H*", $bson ) if $hex; 94 | dump_document( \$bson ); 95 | print "\n"; 96 | } 97 | } 98 | 99 | sub dump_file { 100 | my $file = shift; 101 | open my $fh, "<", $file; 102 | binmode($fh); 103 | my $data = do { local $/; <$fh> }; 104 | while ( length $data ) { 105 | my $len = unpack( BSON_INT32, $data ); 106 | my $bson = substr($data,0,$len,''); 107 | dump_document(\$bson); 108 | print "\n"; 109 | } 110 | } 111 | 112 | sub dump_document { 113 | my ( $ref, $is_array ) = @_; 114 | print $is_array ? " [" : " {" if defined $is_array; 115 | dump_header($ref); 116 | 1 while dump_field($ref); 117 | print_error( " " . unpack( "H*", $$ref ) ) if length($$ref); 118 | print $is_array ? " ]" : " }" if defined $is_array; 119 | return; 120 | } 121 | 122 | sub dump_header { 123 | my ($ref) = @_; 124 | 125 | my $len = get_length( $ref, 4 ); 126 | return unless defined $len; 127 | 128 | if ( $len < 5 || $len < length($$ref) + 4 ) { 129 | print_length( $len, 'red' ); 130 | } 131 | else { 132 | print_length( $len, 'blue' ); 133 | } 134 | } 135 | 136 | sub dump_field { 137 | my ($ref) = @_; 138 | 139 | # detect end of document 140 | if ( length($$ref) < 2 ) { 141 | if ( length($$ref) == 0 ) { 142 | print_error(" [missing terminator]"); 143 | } 144 | else { 145 | my $end = substr( $$ref, 0, 1, '' ); 146 | print_hex( $end, $end eq NULL ? 'blue' : 'red' ); 147 | } 148 | return; 149 | } 150 | 151 | # unpack type 152 | my $type = unpack( BSON_TYPE, substr( $$ref, 0, 1, '' ) ); 153 | 154 | if ( !exists $FIELD_SIZES{$type} ) { 155 | print_type( $type, 'red' ); 156 | return; 157 | } 158 | 159 | print_type($type); 160 | 161 | # check for key termination 162 | my $key_end = index( $$ref, NULL ); 163 | return if $key_end == -1; 164 | 165 | # unpack key 166 | my $key = unpack( BSON_CSTRING, substr( $$ref, 0, $key_end + 1, '' ) ); 167 | print_key($key); 168 | 169 | # Check if there is enough data to complete field for this type 170 | # This is greedy, so it checks length, not length -1 171 | my $min_size = $FIELD_SIZES{$type}; 172 | return if length($$ref) < $min_size; 173 | 174 | # fields without payload: 0x06, 0x0A, 0x7F, 0xFF 175 | return 1 if $min_size == 0; 176 | 177 | # document or array 178 | if ( $type == 0x03 || $type == 0x04 ) { 179 | my ($len) = unpack( BSON_INT32, $$ref ); 180 | my $doc = substr( $$ref, 0, $len, '' ); 181 | dump_document( \$doc, $type == 0x04 ); 182 | return 1; 183 | } 184 | 185 | # fixed width fields 186 | if ( $type == 0x01 187 | || $type == 0x07 188 | || $type == 0x09 189 | || $type == 0x10 190 | || $type == 0x11 191 | || $type == 0x12 ) 192 | { 193 | my $len = ( $type == 0x10 ? 4 : $type == 0x07 ? 12 : 8 ); 194 | print_hex( substr( $$ref, 0, $len, '' ) ); 195 | return 1; 196 | } 197 | 198 | # boolean 199 | if ( $type == 0x08 ) { 200 | my $bool = substr( $$ref, 0, 1, '' ); 201 | print_hex( $bool, ( $bool eq "\x00" || $bool eq "\x01" ) ? 'green' : 'red' ); 202 | return 1; 203 | } 204 | 205 | # binary field 206 | if ( $type == 0x05 ) { 207 | my $len = get_length( $ref, -1 ); 208 | my $subtype = substr( $$ref, 0, 1, '' ); 209 | 210 | if ( !defined($len) ) { 211 | print_hex($subtype); 212 | return; 213 | } 214 | 215 | my $binary = substr( $$ref, 0, $len, '' ); 216 | 217 | print_length($len); 218 | print_hex($subtype); 219 | 220 | if ( $subtype eq "\x02" ) { 221 | my $bin_len = get_length( \$binary ); 222 | if ( !defined($bin_len) ) { 223 | print_hex( $binary, 'red' ); 224 | return; 225 | } 226 | if ( $bin_len != length($binary) ) { 227 | print_length( $bin_len, 'red' ); 228 | print_hex( $binary, 'red' ); 229 | return; 230 | } 231 | } 232 | 233 | print_hex($binary) if length($binary); 234 | return 1; 235 | } 236 | 237 | # string or symbol or code 238 | if ( $type == 0x02 || $type == 0x0e || $type == 0x0d ) { 239 | my ( $len, $string ) = get_string($ref); 240 | return unless defined $len; 241 | 242 | print_length( $len, 'cyan' ); 243 | print_string($string); 244 | return 1; 245 | 246 | } 247 | 248 | # regex 0x0B 249 | if ( $type == 0x0B ) { 250 | my ( $pattern, $flag ) = unpack( BSON_CSTRING . BSON_CSTRING, $$ref ); 251 | substr( $$ref, 0, length($pattern) + length($flag) + 2, '' ); 252 | print_string($pattern); 253 | print_string($flag); 254 | return 1; 255 | } 256 | 257 | # code with scope 0x0F 258 | if ( $type == 0x0F ) { 259 | my $len = get_length( $ref, 4 ); 260 | return unless defined $len; 261 | 262 | # len + string + doc minimum size is 4 + 5 + 5 263 | if ( $len < 14 ) { 264 | print_length( $len, 'red' ); 265 | return; 266 | } 267 | 268 | print_length($len); 269 | 270 | my $cws = substr( $$ref, 0, $len - 4, '' ); 271 | 272 | my ( $strlen, $string ) = get_string( \$cws ); 273 | 274 | if ( !defined $strlen ) { 275 | print_hex( $cws, 'red' ); 276 | return; 277 | } 278 | 279 | print_length($strlen); 280 | print_string($string); 281 | 282 | dump_document( \$cws, 0 ); 283 | 284 | return 1; 285 | } 286 | 287 | # dbpointer 0x0C 288 | if ( $type == 0x0C ) { 289 | my ( $len, $string ) = get_string($ref); 290 | return unless defined $len; 291 | 292 | print_length($len); 293 | print_string($string); 294 | 295 | # Check if there are 12 bytes (plus terminator) or more 296 | return if length($$ref) < 13; 297 | 298 | my $oid = substr( $$ref, 0, 12, '' ); 299 | print_hex($oid); 300 | 301 | return 1; 302 | } 303 | 304 | die "Shouldn't reach here"; 305 | } 306 | 307 | sub get_length { 308 | my ( $ref, $adj ) = @_; 309 | $adj ||= 0; 310 | my $len = unpack( BSON_INT32, substr( $$ref, 0, 4, '' ) ); 311 | return unless defined $len; 312 | 313 | # check if requested length is too long 314 | if ( $len < 0 || $len > length($$ref) + $adj ) { 315 | print_length( $len, 'red' ); 316 | return; 317 | } 318 | 319 | return $len; 320 | } 321 | 322 | sub get_string { 323 | my ($ref) = @_; 324 | 325 | my $len = get_length($ref); 326 | return unless defined $len; 327 | 328 | # len must be at least 1 for trailing 0x00 329 | if ( $len == 0 ) { 330 | print_length( $len, 'red' ); 331 | return; 332 | } 333 | 334 | my $string = substr( $$ref, 0, $len, '' ); 335 | 336 | # check if null terminated 337 | if ( substr( $string, -1, 1 ) ne NULL ) { 338 | print_length($len); 339 | print_hex( $string, 'red' ); 340 | return; 341 | } 342 | 343 | # remove trailing null 344 | chop($string); 345 | 346 | # try to decode to UTF-8 347 | if ( !utf8::decode($string) ) { 348 | print_length($len); 349 | print_hex( $string . "\x00", 'red' ); 350 | return; 351 | } 352 | 353 | return ( $len, $string ); 354 | } 355 | 356 | sub print_error { 357 | my ($text) = @_; 358 | print colored( ["${BOLD}red"], $text ); 359 | } 360 | 361 | sub print_type { 362 | my ( $type, $color ) = @_; 363 | $color ||= 'magenta'; 364 | print colored( ["$BOLD$color"], sprintf( " %02x", $type ) ); 365 | } 366 | 367 | sub print_key { 368 | my ($string) = @_; 369 | print_string( $string, 'yellow' ); 370 | } 371 | 372 | sub print_string { 373 | my ( $string, $color ) = @_; 374 | $color ||= 'green'; 375 | $string =~ s{([^[:graph:]])}{sprintf("\\x%02x",ord($1))}ge; 376 | print colored( ["$BOLD$color"], qq[ "$string"] . " 00" ); 377 | } 378 | 379 | sub print_length { 380 | my ( $len, $color ) = @_; 381 | $color ||= 'cyan'; 382 | print colored( ["$BOLD$color"], " " . unpack( "H*", pack( BSON_INT32, $len ) ) ); 383 | } 384 | 385 | sub print_hex { 386 | my ( $value, $color ) = @_; 387 | $color ||= 'green'; 388 | print colored( ["$BOLD$color"], " " . uc( unpack( "H*", $value ) ) ); 389 | } 390 | 391 | main(); 392 | 393 | __END__ 394 | 395 | =head1 NAME 396 | 397 | bsonview - dump a BSON string with color output showing structure 398 | 399 | =head1 SYNOPSIS 400 | 401 | cat file.bson | bsondump 402 | 403 | echo "0500000000" | bsondump -x 404 | 405 | =head1 OPTIONS 406 | 407 | -x input is in hex format (default is 0) 408 | --help, -h show help 409 | 410 | =head1 USAGE 411 | 412 | Reads from C and dumps colored structures to C. 413 | 414 | =head1 AUTHOR 415 | 416 | =over 4 417 | 418 | =item * 419 | 420 | David Golden 421 | 422 | =back 423 | 424 | =head1 COPYRIGHT AND LICENSE 425 | 426 | This software is Copyright (c) 2016 by MongoDB, Inc.. 427 | 428 | This is free software, licensed under: 429 | 430 | The Apache License, Version 2.0, January 2004 431 | 432 | =cut 433 | 434 | =cut 435 | -------------------------------------------------------------------------------- /test/node/specs/bson-corpus/code.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Javascript Code", 3 | "bson_type": "0x0D", 4 | "test_key": "a", 5 | "valid": [ 6 | { 7 | "description": "Empty string", 8 | "canonical_bson": "0D0000000D6100010000000000", 9 | "canonical_extjson": "{\"a\" : {\"$code\" : \"\"}}" 10 | }, 11 | { 12 | "description": "Single character", 13 | "canonical_bson": "0E0000000D610002000000620000", 14 | "canonical_extjson": "{\"a\" : {\"$code\" : \"b\"}}" 15 | }, 16 | { 17 | "description": "Multi-character", 18 | "canonical_bson": "190000000D61000D0000006162616261626162616261620000", 19 | "canonical_extjson": "{\"a\" : {\"$code\" : \"abababababab\"}}" 20 | }, 21 | { 22 | "description": "two-byte UTF-8 (\u00e9)", 23 | "canonical_bson": "190000000261000D000000C3A9C3A9C3A9C3A9C3A9C3A90000", 24 | "canonical_extjson": "{\"a\" : \"\\u00e9\\u00e9\\u00e9\\u00e9\\u00e9\\u00e9\"}" 25 | }, 26 | { 27 | "description": "three-byte UTF-8 (\u2606)", 28 | "canonical_bson": "190000000261000D000000E29886E29886E29886E298860000", 29 | "canonical_extjson": "{\"a\" : \"\\u2606\\u2606\\u2606\\u2606\"}" 30 | }, 31 | { 32 | "description": "Embedded nulls", 33 | "canonical_bson": "190000000261000D0000006162006261620062616261620000", 34 | "canonical_extjson": "{\"a\" : \"ab\\u0000bab\\u0000babab\"}" 35 | } 36 | ], 37 | "decodeErrors": [ 38 | { 39 | "description": "bad code string length: 0 (but no 0x00 either)", 40 | "bson": "0C0000000261000000000000" 41 | }, 42 | { 43 | "description": "bad code string length: -1", 44 | "bson": "0C000000026100FFFFFFFF00" 45 | }, 46 | { 47 | "description": "bad code string length: eats terminator", 48 | "bson": "10000000026100050000006200620000" 49 | }, 50 | { 51 | "description": "bad code string length: longer than rest of document", 52 | "bson": "120000000200FFFFFF00666F6F6261720000" 53 | }, 54 | { 55 | "description": "code string is not null-terminated", 56 | "bson": "1000000002610004000000616263FF00" 57 | }, 58 | { 59 | "description": "empty code string, but extra null", 60 | "bson": "0E00000002610001000000000000" 61 | }, 62 | { 63 | "description": "invalid UTF-8", 64 | "bson": "0E00000002610002000000E90000" 65 | } 66 | ] 67 | } 68 | -------------------------------------------------------------------------------- /test/node/specs/bson-corpus/code_w_scope.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Javascript Code with Scope", 3 | "bson_type": "0x0F", 4 | "test_key": "a", 5 | "valid": [ 6 | { 7 | "description": "Empty code string, empty scope", 8 | "canonical_bson": "160000000F61000E0000000100000000050000000000", 9 | "canonical_extjson": "{\"a\" : {\"$code\" : \"\", \"$scope\" : {}}}" 10 | }, 11 | { 12 | "description": "Non-empty code string, empty scope", 13 | "canonical_bson": "1A0000000F610012000000050000006162636400050000000000", 14 | "canonical_extjson": "{\"a\" : {\"$code\" : \"abcd\", \"$scope\" : {}}}" 15 | }, 16 | { 17 | "description": "Empty code string, non-empty scope", 18 | "canonical_bson": "1D0000000F61001500000001000000000C000000107800010000000000", 19 | "canonical_extjson": "{\"a\" : {\"$code\" : \"\", \"$scope\" : {\"x\" : {\"$numberInt\": \"1\"}}}}" 20 | }, 21 | { 22 | "description": "Non-empty code string and non-empty scope", 23 | "canonical_bson": "210000000F6100190000000500000061626364000C000000107800010000000000", 24 | "canonical_extjson": "{\"a\" : {\"$code\" : \"abcd\", \"$scope\" : {\"x\" : {\"$numberInt\": \"1\"}}}}" 25 | }, 26 | { 27 | "description": "Unicode and embedded null in code string, empty scope", 28 | "canonical_bson": "1A0000000F61001200000005000000C3A9006400050000000000", 29 | "canonical_extjson": "{\"a\" : {\"$code\" : \"\\u00e9\\u0000d\", \"$scope\" : {}}}" 30 | } 31 | ], 32 | "decodeErrors": [ 33 | { 34 | "description": "field length zero", 35 | "bson": "280000000F6100000000000500000061626364001300000010780001000000107900010000000000" 36 | }, 37 | { 38 | "description": "field length negative", 39 | "bson": "280000000F6100FFFFFFFF0500000061626364001300000010780001000000107900010000000000" 40 | }, 41 | { 42 | "description": "field length too short (less than minimum size)", 43 | "bson": "160000000F61000D0000000100000000050000000000" 44 | }, 45 | { 46 | "description": "field length too short (truncates scope)", 47 | "bson": "280000000F61001F0000000500000061626364001300000010780001000000107900010000000000" 48 | }, 49 | { 50 | "description": "field length too long (clips outer doc)", 51 | "bson": "280000000F6100210000000500000061626364001300000010780001000000107900010000000000" 52 | }, 53 | { 54 | "description": "field length too long (longer than outer doc)", 55 | "bson": "280000000F6100FF0000000500000061626364001300000010780001000000107900010000000000" 56 | }, 57 | { 58 | "description": "bad code string: length too short", 59 | "bson": "280000000F6100200000000400000061626364001300000010780001000000107900010000000000" 60 | }, 61 | { 62 | "description": "bad code string: length too long (clips scope)", 63 | "bson": "280000000F6100200000000600000061626364001300000010780001000000107900010000000000" 64 | }, 65 | { 66 | "description": "bad code string: negative length", 67 | "bson": "280000000F610020000000FFFFFFFF61626364001300000010780001000000107900010000000000" 68 | }, 69 | { 70 | "description": "bad code string: length longer than field", 71 | "bson": "280000000F610020000000FF00000061626364001300000010780001000000107900010000000000" 72 | }, 73 | { 74 | "description": "bad scope doc (field has bad string length)", 75 | "bson": "1C0000000F001500000001000000000C000000020000000000000000" 76 | } 77 | ] 78 | } 79 | -------------------------------------------------------------------------------- /test/node/specs/bson-corpus/datetime.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "DateTime", 3 | "bson_type": "0x09", 4 | "test_key": "a", 5 | "valid": [ 6 | { 7 | "description": "epoch", 8 | "canonical_bson": "10000000096100000000000000000000", 9 | "relaxed_extjson": "{\"a\" : {\"$date\" : \"1970-01-01T00:00:00Z\"}}", 10 | "canonical_extjson": "{\"a\" : {\"$date\" : {\"$numberLong\" : \"0\"}}}" 11 | }, 12 | { 13 | "description": "positive ms", 14 | "canonical_bson": "10000000096100C5D8D6CC3B01000000", 15 | "relaxed_extjson": "{\"a\" : {\"$date\" : \"2012-12-24T12:15:30.501Z\"}}", 16 | "canonical_extjson": "{\"a\" : {\"$date\" : {\"$numberLong\" : \"1356351330501\"}}}" 17 | }, 18 | { 19 | "description": "negative", 20 | "canonical_bson": "10000000096100C33CE7B9BDFFFFFF00", 21 | "relaxed_extjson": "{\"a\" : {\"$date\" : {\"$numberLong\" : \"-284643869501\"}}}", 22 | "canonical_extjson": "{\"a\" : {\"$date\" : {\"$numberLong\" : \"-284643869501\"}}}" 23 | }, 24 | { 25 | "description" : "Y10K", 26 | "canonical_bson" : "1000000009610000DC1FD277E6000000", 27 | "canonical_extjson" : "{\"a\":{\"$date\":{\"$numberLong\":\"253402300800000\"}}}" 28 | } 29 | ], 30 | "decodeErrors": [ 31 | { 32 | "description": "datetime field truncated", 33 | "bson": "0C0000000961001234567800" 34 | } 35 | ] 36 | } 37 | -------------------------------------------------------------------------------- /test/node/specs/bson-corpus/dbpointer.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "DBPointer type (deprecated)", 3 | "bson_type": "0x0C", 4 | "deprecated": true, 5 | "test_key": "a", 6 | "valid": [ 7 | { 8 | "description": "DBpointer", 9 | "canonical_bson": "1A0000000C610002000000620056E1FC72E0C917E9C471416100", 10 | "canonical_extjson": "{\"a\": {\"$dbPointer\": {\"$ref\": \"b\", \"$id\": {\"$oid\": \"56e1fc72e0c917e9c4714161\"}}}}", 11 | "converted_bson": "2a00000003610022000000022472656600020000006200072469640056e1fc72e0c917e9c47141610000", 12 | "converted_extjson": "{\"a\": {\"$ref\": \"b\", \"$id\": {\"$oid\": \"56e1fc72e0c917e9c4714161\"}}}" 13 | }, 14 | { 15 | "description": "DBpointer with opposite key order", 16 | "canonical_bson": "1A0000000C610002000000620056E1FC72E0C917E9C471416100", 17 | "canonical_extjson": "{\"a\": {\"$dbPointer\": {\"$ref\": \"b\", \"$id\": {\"$oid\": \"56e1fc72e0c917e9c4714161\"}}}}", 18 | "degenerate_extjson": "{\"a\": {\"$dbPointer\": {\"$id\": {\"$oid\": \"56e1fc72e0c917e9c4714161\"}, \"$ref\": \"b\"}}}", 19 | "converted_bson": "2a00000003610022000000022472656600020000006200072469640056e1fc72e0c917e9c47141610000", 20 | "converted_extjson": "{\"a\": {\"$ref\": \"b\", \"$id\": {\"$oid\": \"56e1fc72e0c917e9c4714161\"}}}" 21 | }, 22 | { 23 | "description": "With two-byte UTF-8", 24 | "canonical_bson": "1B0000000C610003000000C3A90056E1FC72E0C917E9C471416100", 25 | "canonical_extjson": "{\"a\": {\"$dbPointer\": {\"$ref\": \"é\", \"$id\": {\"$oid\": \"56e1fc72e0c917e9c4714161\"}}}}", 26 | "converted_bson": "2B0000000361002300000002247265660003000000C3A900072469640056E1FC72E0C917E9C47141610000", 27 | "converted_extjson": "{\"a\": {\"$ref\": \"é\", \"$id\": {\"$oid\": \"56e1fc72e0c917e9c4714161\"}}}" 28 | } 29 | ], 30 | "decodeErrors": [ 31 | { 32 | "description": "String with negative length", 33 | "bson": "1A0000000C6100FFFFFFFF620056E1FC72E0C917E9C471416100" 34 | }, 35 | { 36 | "description": "String with zero length", 37 | "bson": "1A0000000C610000000000620056E1FC72E0C917E9C471416100" 38 | }, 39 | { 40 | "description": "String not null terminated", 41 | "bson": "1A0000000C610002000000626256E1FC72E0C917E9C471416100" 42 | }, 43 | { 44 | "description": "short OID (less than minimum length for field)", 45 | "bson": "160000000C61000300000061620056E1FC72E0C91700" 46 | }, 47 | { 48 | "description": "short OID (greater than minimum, but truncated)", 49 | "bson": "1A0000000C61000300000061620056E1FC72E0C917E9C4716100" 50 | }, 51 | { 52 | "description": "String with bad UTF-8", 53 | "bson": "1A0000000C610002000000E90056E1FC72E0C917E9C471416100" 54 | } 55 | ] 56 | } 57 | -------------------------------------------------------------------------------- /test/node/specs/bson-corpus/dbref.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "DBRef", 3 | "bson_type": "0x03", 4 | "valid": [ 5 | { 6 | "description": "DBRef", 7 | "canonical_bson": "37000000036462726566002b0000000224726566000b000000636f6c6c656374696f6e00072469640058921b3e6e32ab156a22b59e0000", 8 | "canonical_extjson": "{\"dbref\": {\"$ref\": \"collection\", \"$id\": {\"$oid\": \"58921b3e6e32ab156a22b59e\"}}}" 9 | }, 10 | { 11 | "description": "DBRef with database", 12 | "canonical_bson": "4300000003646272656600370000000224726566000b000000636f6c6c656374696f6e00072469640058921b3e6e32ab156a22b59e0224646200030000006462000000", 13 | "canonical_extjson": "{\"dbref\": {\"$ref\": \"collection\", \"$id\": {\"$oid\": \"58921b3e6e32ab156a22b59e\"}, \"$db\": \"db\"}}" 14 | }, 15 | { 16 | "description": "DBRef with database and additional fields", 17 | "canonical_bson": "48000000036462726566003c0000000224726566000b000000636f6c6c656374696f6e0010246964002a00000002246462000300000064620002666f6f0004000000626172000000", 18 | "canonical_extjson": "{\"dbref\": {\"$ref\": \"collection\", \"$id\": {\"$numberInt\": \"42\"}, \"$db\": \"db\", \"foo\": \"bar\"}}" 19 | }, 20 | { 21 | "description": "DBRef with additional fields", 22 | "canonical_bson": "4400000003646272656600380000000224726566000b000000636f6c6c656374696f6e00072469640058921b3e6e32ab156a22b59e02666f6f0004000000626172000000", 23 | "canonical_extjson": "{\"dbref\": {\"$ref\": \"collection\", \"$id\": {\"$oid\": \"58921b3e6e32ab156a22b59e\"}, \"foo\": \"bar\"}}" 24 | }, 25 | { 26 | "description": "Document with key names similar to those of a DBRef", 27 | "canonical_bson": "3e0000000224726566000c0000006e6f742d612d646272656600072469640058921b3e6e32ab156a22b59e022462616e616e6100050000007065656c0000", 28 | "canonical_extjson": "{\"$ref\": \"not-a-dbref\", \"$id\": {\"$oid\": \"58921b3e6e32ab156a22b59e\"}, \"$banana\": \"peel\"}" 29 | } 30 | ] 31 | } 32 | -------------------------------------------------------------------------------- /test/node/specs/bson-corpus/decimal128-4.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Decimal128", 3 | "bson_type": "0x13", 4 | "test_key": "d", 5 | "valid": [ 6 | { 7 | "description": "[basx023] conform to rules and exponent will be in permitted range).", 8 | "canonical_bson": "1800000013640001000000000000000000000000003EB000", 9 | "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.1\"}}" 10 | }, 11 | 12 | { 13 | "description": "[basx045] strings without E cannot generate E in result", 14 | "canonical_bson": "1800000013640003000000000000000000000000003A3000", 15 | "degenerate_extjson": "{\"d\" : {\"$numberDecimal\" : \"+0.003\"}}", 16 | "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.003\"}}" 17 | }, 18 | { 19 | "description": "[basx610] Zeros", 20 | "canonical_bson": "1800000013640000000000000000000000000000003E3000", 21 | "degenerate_extjson": "{\"d\" : {\"$numberDecimal\" : \".0\"}}", 22 | "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0\"}}" 23 | }, 24 | { 25 | "description": "[basx612] Zeros", 26 | "canonical_bson": "1800000013640000000000000000000000000000003EB000", 27 | "degenerate_extjson": "{\"d\" : {\"$numberDecimal\" : \"-.0\"}}", 28 | "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.0\"}}" 29 | }, 30 | { 31 | "description": "[basx043] strings without E cannot generate E in result", 32 | "canonical_bson": "18000000136400FC040000000000000000000000003C3000", 33 | "degenerate_extjson": "{\"d\" : {\"$numberDecimal\" : \"+12.76\"}}", 34 | "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"12.76\"}}" 35 | }, 36 | { 37 | "description": "[basx055] strings without E cannot generate E in result", 38 | "canonical_bson": "180000001364000500000000000000000000000000303000", 39 | "degenerate_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00000005\"}}", 40 | "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"5E-8\"}}" 41 | }, 42 | { 43 | "description": "[basx054] strings without E cannot generate E in result", 44 | "canonical_bson": "180000001364000500000000000000000000000000323000", 45 | "degenerate_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0000005\"}}", 46 | "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"5E-7\"}}" 47 | }, 48 | { 49 | "description": "[basx052] strings without E cannot generate E in result", 50 | "canonical_bson": "180000001364000500000000000000000000000000343000", 51 | "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000005\"}}" 52 | }, 53 | { 54 | "description": "[basx051] strings without E cannot generate E in result", 55 | "canonical_bson": "180000001364000500000000000000000000000000363000", 56 | "degenerate_extjson": "{\"d\" : {\"$numberDecimal\" : \"00.00005\"}}", 57 | "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00005\"}}" 58 | }, 59 | { 60 | "description": "[basx050] strings without E cannot generate E in result", 61 | "canonical_bson": "180000001364000500000000000000000000000000383000", 62 | "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0005\"}}" 63 | }, 64 | { 65 | "description": "[basx047] strings without E cannot generate E in result", 66 | "canonical_bson": "1800000013640005000000000000000000000000003E3000", 67 | "degenerate_extjson": "{\"d\" : {\"$numberDecimal\" : \".5\"}}", 68 | "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.5\"}}" 69 | }, 70 | { 71 | "description": "[dqbsr431] check rounding modes heeded (Rounded)", 72 | "canonical_bson": "1800000013640099761CC7B548F377DC80A131C836FE2F00", 73 | "degenerate_extjson": 74 | "{\"d\" : {\"$numberDecimal\" : \"1.1111111111111111111111111111123450\"}}", 75 | "canonical_extjson": 76 | "{\"d\" : {\"$numberDecimal\" : \"1.111111111111111111111111111112345\"}}" 77 | }, 78 | { 79 | "description": "OK2", 80 | "canonical_bson": "18000000136400000000000A5BC138938D44C64D31FC2F00", 81 | "degenerate_extjson": 82 | "{\"d\" : {\"$numberDecimal\" : \".100000000000000000000000000000000000000000000000000000000000\"}}", 83 | "canonical_extjson": 84 | "{\"d\" : {\"$numberDecimal\" : \"0.1000000000000000000000000000000000\"}}" 85 | } 86 | ], 87 | "parseErrors": [ 88 | { 89 | "description": "[basx564] Near-specials (Conversion_syntax)", 90 | "string": "Infi" 91 | }, 92 | { 93 | "description": "[basx565] Near-specials (Conversion_syntax)", 94 | "string": "Infin" 95 | }, 96 | { 97 | "description": "[basx566] Near-specials (Conversion_syntax)", 98 | "string": "Infini" 99 | }, 100 | { 101 | "description": "[basx567] Near-specials (Conversion_syntax)", 102 | "string": "Infinit" 103 | }, 104 | { 105 | "description": "[basx568] Near-specials (Conversion_syntax)", 106 | "string": "-Infinit" 107 | }, 108 | { 109 | "description": 110 | "[basx590] some baddies with dots and Es and dots and specials (Conversion_syntax)", 111 | "string": ".Infinity" 112 | }, 113 | { 114 | "description": "[basx562] Near-specials (Conversion_syntax)", 115 | "string": "NaNq" 116 | }, 117 | { 118 | "description": "[basx563] Near-specials (Conversion_syntax)", 119 | "string": "NaNs" 120 | }, 121 | { 122 | "description": 123 | "[dqbas939] overflow results at different rounding modes (Overflow & Inexact & Rounded)", 124 | "string": "-7e10000" 125 | }, 126 | { 127 | "description": "[dqbsr534] negatives (Rounded & Inexact)", 128 | "string": "-1.11111111111111111111111111111234650" 129 | }, 130 | { 131 | "description": "[dqbsr535] negatives (Rounded & Inexact)", 132 | "string": "-1.11111111111111111111111111111234551" 133 | }, 134 | { 135 | "description": "[dqbsr533] negatives (Rounded & Inexact)", 136 | "string": "-1.11111111111111111111111111111234550" 137 | }, 138 | { 139 | "description": "[dqbsr532] negatives (Rounded & Inexact)", 140 | "string": "-1.11111111111111111111111111111234549" 141 | }, 142 | { 143 | "description": "[dqbsr432] check rounding modes heeded (Rounded & Inexact)", 144 | "string": "1.11111111111111111111111111111234549" 145 | }, 146 | { 147 | "description": "[dqbsr433] check rounding modes heeded (Rounded & Inexact)", 148 | "string": "1.11111111111111111111111111111234550" 149 | }, 150 | { 151 | "description": "[dqbsr435] check rounding modes heeded (Rounded & Inexact)", 152 | "string": "1.11111111111111111111111111111234551" 153 | }, 154 | { 155 | "description": "[dqbsr434] check rounding modes heeded (Rounded & Inexact)", 156 | "string": "1.11111111111111111111111111111234650" 157 | }, 158 | { 159 | "description": 160 | "[dqbas938] overflow results at different rounding modes (Overflow & Inexact & Rounded)", 161 | "string": "7e10000" 162 | }, 163 | { 164 | "description": "Inexact rounding#1", 165 | "string": "100000000000000000000000000000000000000000000000000000000001" 166 | }, 167 | { 168 | "description": "Inexact rounding#2", 169 | "string": "1E-6177" 170 | } 171 | ] 172 | } 173 | -------------------------------------------------------------------------------- /test/node/specs/bson-corpus/decimal128-6.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Decimal128", 3 | "bson_type": "0x13", 4 | "test_key": "d", 5 | "parseErrors": [ 6 | { 7 | "description": "Incomplete Exponent", 8 | "string": "1e" 9 | }, 10 | { 11 | "description": "Exponent at the beginning", 12 | "string": "E01" 13 | }, 14 | { 15 | "description": "Just a decimal place", 16 | "string": "." 17 | }, 18 | { 19 | "description": "2 decimal places", 20 | "string": "..3" 21 | }, 22 | { 23 | "description": "2 decimal places", 24 | "string": ".13.3" 25 | }, 26 | { 27 | "description": "2 decimal places", 28 | "string": "1..3" 29 | }, 30 | { 31 | "description": "2 decimal places", 32 | "string": "1.3.4" 33 | }, 34 | { 35 | "description": "2 decimal places", 36 | "string": "1.34." 37 | }, 38 | { 39 | "description": "Decimal with no digits", 40 | "string": ".e" 41 | }, 42 | { 43 | "description": "2 signs", 44 | "string": "+-32.4" 45 | }, 46 | { 47 | "description": "2 signs", 48 | "string": "-+32.4" 49 | }, 50 | { 51 | "description": "2 negative signs", 52 | "string": "--32.4" 53 | }, 54 | { 55 | "description": "2 negative signs", 56 | "string": "-32.-4" 57 | }, 58 | { 59 | "description": "End in negative sign", 60 | "string": "32.0-" 61 | }, 62 | { 63 | "description": "2 negative signs", 64 | "string": "32.4E--21" 65 | }, 66 | { 67 | "description": "2 negative signs", 68 | "string": "32.4E-2-1" 69 | }, 70 | { 71 | "description": "2 signs", 72 | "string": "32.4E+-21" 73 | }, 74 | { 75 | "description": "Empty string", 76 | "string": "" 77 | }, 78 | { 79 | "description": "leading white space positive number", 80 | "string": " 1" 81 | }, 82 | { 83 | "description": "leading white space negative number", 84 | "string": " -1" 85 | }, 86 | { 87 | "description": "trailing white space", 88 | "string": "1 " 89 | }, 90 | { 91 | "description": "Invalid", 92 | "string": "E" 93 | }, 94 | { 95 | "description": "Invalid", 96 | "string": "invalid" 97 | }, 98 | { 99 | "description": "Invalid", 100 | "string": "i" 101 | }, 102 | { 103 | "description": "Invalid", 104 | "string": "in" 105 | }, 106 | { 107 | "description": "Invalid", 108 | "string": "-in" 109 | }, 110 | { 111 | "description": "Invalid", 112 | "string": "Na" 113 | }, 114 | { 115 | "description": "Invalid", 116 | "string": "-Na" 117 | }, 118 | { 119 | "description": "Invalid", 120 | "string": "1.23abc" 121 | }, 122 | { 123 | "description": "Invalid", 124 | "string": "1.23abcE+02" 125 | }, 126 | { 127 | "description": "Invalid", 128 | "string": "1.23E+0aabs2" 129 | } 130 | ] 131 | } 132 | -------------------------------------------------------------------------------- /test/node/specs/bson-corpus/decimal128-7.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Decimal128", 3 | "bson_type": "0x13", 4 | "test_key": "d", 5 | "parseErrors": [ 6 | { 7 | "description": "[basx572] Near-specials (Conversion_syntax)", 8 | "string": "-9Inf" 9 | }, 10 | { 11 | "description": "[basx516] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 12 | "string": "-1-" 13 | }, 14 | { 15 | "description": "[basx533] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 16 | "string": "0000.." 17 | }, 18 | { 19 | "description": "[basx534] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 20 | "string": ".0000." 21 | }, 22 | { 23 | "description": "[basx535] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 24 | "string": "00..00" 25 | }, 26 | { 27 | "description": "[basx569] Near-specials (Conversion_syntax)", 28 | "string": "0Inf" 29 | }, 30 | { 31 | "description": "[basx571] Near-specials (Conversion_syntax)", 32 | "string": "-0Inf" 33 | }, 34 | { 35 | "description": "[basx575] Near-specials (Conversion_syntax)", 36 | "string": "0sNaN" 37 | }, 38 | { 39 | "description": "[basx503] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 40 | "string": "++1" 41 | }, 42 | { 43 | "description": "[basx504] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 44 | "string": "--1" 45 | }, 46 | { 47 | "description": "[basx505] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 48 | "string": "-+1" 49 | }, 50 | { 51 | "description": "[basx506] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 52 | "string": "+-1" 53 | }, 54 | { 55 | "description": "[basx510] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 56 | "string": " +1" 57 | }, 58 | { 59 | "description": "[basx513] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 60 | "string": " + 1" 61 | }, 62 | { 63 | "description": "[basx514] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 64 | "string": " - 1" 65 | }, 66 | { 67 | "description": "[basx501] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 68 | "string": "." 69 | }, 70 | { 71 | "description": "[basx502] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 72 | "string": ".." 73 | }, 74 | { 75 | "description": "[basx519] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 76 | "string": "" 77 | }, 78 | { 79 | "description": "[basx525] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 80 | "string": "e100" 81 | }, 82 | { 83 | "description": "[basx549] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 84 | "string": "e+1" 85 | }, 86 | { 87 | "description": "[basx577] some baddies with dots and Es and dots and specials (Conversion_syntax)", 88 | "string": ".e+1" 89 | }, 90 | { 91 | "description": "[basx578] some baddies with dots and Es and dots and specials (Conversion_syntax)", 92 | "string": "+.e+1" 93 | }, 94 | { 95 | "description": "[basx581] some baddies with dots and Es and dots and specials (Conversion_syntax)", 96 | "string": "E+1" 97 | }, 98 | { 99 | "description": "[basx582] some baddies with dots and Es and dots and specials (Conversion_syntax)", 100 | "string": ".E+1" 101 | }, 102 | { 103 | "description": "[basx583] some baddies with dots and Es and dots and specials (Conversion_syntax)", 104 | "string": "+.E+1" 105 | }, 106 | { 107 | "description": "[basx579] some baddies with dots and Es and dots and specials (Conversion_syntax)", 108 | "string": "-.e+" 109 | }, 110 | { 111 | "description": "[basx580] some baddies with dots and Es and dots and specials (Conversion_syntax)", 112 | "string": "-.e" 113 | }, 114 | { 115 | "description": "[basx584] some baddies with dots and Es and dots and specials (Conversion_syntax)", 116 | "string": "-.E+" 117 | }, 118 | { 119 | "description": "[basx585] some baddies with dots and Es and dots and specials (Conversion_syntax)", 120 | "string": "-.E" 121 | }, 122 | { 123 | "description": "[basx589] some baddies with dots and Es and dots and specials (Conversion_syntax)", 124 | "string": "+.Inf" 125 | }, 126 | { 127 | "description": "[basx586] some baddies with dots and Es and dots and specials (Conversion_syntax)", 128 | "string": ".NaN" 129 | }, 130 | { 131 | "description": "[basx587] some baddies with dots and Es and dots and specials (Conversion_syntax)", 132 | "string": "-.NaN" 133 | }, 134 | { 135 | "description": "[basx545] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 136 | "string": "ONE" 137 | }, 138 | { 139 | "description": "[basx561] Near-specials (Conversion_syntax)", 140 | "string": "qNaN" 141 | }, 142 | { 143 | "description": "[basx573] Near-specials (Conversion_syntax)", 144 | "string": "-sNa" 145 | }, 146 | { 147 | "description": "[basx588] some baddies with dots and Es and dots and specials (Conversion_syntax)", 148 | "string": "+.sNaN" 149 | }, 150 | { 151 | "description": "[basx544] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 152 | "string": "ten" 153 | }, 154 | { 155 | "description": "[basx527] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 156 | "string": "u0b65" 157 | }, 158 | { 159 | "description": "[basx526] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 160 | "string": "u0e5a" 161 | }, 162 | { 163 | "description": "[basx515] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 164 | "string": "x" 165 | }, 166 | { 167 | "description": "[basx574] Near-specials (Conversion_syntax)", 168 | "string": "xNaN" 169 | }, 170 | { 171 | "description": "[basx530] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 172 | "string": ".123.5" 173 | }, 174 | { 175 | "description": "[basx500] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 176 | "string": "1..2" 177 | }, 178 | { 179 | "description": "[basx542] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 180 | "string": "1e1.0" 181 | }, 182 | { 183 | "description": "[basx553] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 184 | "string": "1E+1.2.3" 185 | }, 186 | { 187 | "description": "[basx543] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 188 | "string": "1e123e" 189 | }, 190 | { 191 | "description": "[basx552] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 192 | "string": "1E+1.2" 193 | }, 194 | { 195 | "description": "[basx546] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 196 | "string": "1e.1" 197 | }, 198 | { 199 | "description": "[basx547] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 200 | "string": "1e1." 201 | }, 202 | { 203 | "description": "[basx554] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 204 | "string": "1E++1" 205 | }, 206 | { 207 | "description": "[basx555] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 208 | "string": "1E--1" 209 | }, 210 | { 211 | "description": "[basx556] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 212 | "string": "1E+-1" 213 | }, 214 | { 215 | "description": "[basx557] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 216 | "string": "1E-+1" 217 | }, 218 | { 219 | "description": "[basx558] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 220 | "string": "1E'1" 221 | }, 222 | { 223 | "description": "[basx559] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 224 | "string": "1E\"1" 225 | }, 226 | { 227 | "description": "[basx520] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 228 | "string": "1e-" 229 | }, 230 | { 231 | "description": "[basx560] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 232 | "string": "1E" 233 | }, 234 | { 235 | "description": "[basx548] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 236 | "string": "1ee" 237 | }, 238 | { 239 | "description": "[basx551] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 240 | "string": "1.2.1" 241 | }, 242 | { 243 | "description": "[basx550] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 244 | "string": "1.23.4" 245 | }, 246 | { 247 | "description": "[basx529] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 248 | "string": "1.34.5" 249 | }, 250 | { 251 | "description": "[basx531] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 252 | "string": "01.35." 253 | }, 254 | { 255 | "description": "[basx532] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 256 | "string": "01.35-" 257 | }, 258 | { 259 | "description": "[basx518] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 260 | "string": "3+" 261 | }, 262 | { 263 | "description": "[basx521] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 264 | "string": "7e99999a" 265 | }, 266 | { 267 | "description": "[basx570] Near-specials (Conversion_syntax)", 268 | "string": "9Inf" 269 | }, 270 | { 271 | "description": "[basx512] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 272 | "string": "12 " 273 | }, 274 | { 275 | "description": "[basx517] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 276 | "string": "12-" 277 | }, 278 | { 279 | "description": "[basx507] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 280 | "string": "12e" 281 | }, 282 | { 283 | "description": "[basx508] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 284 | "string": "12e++" 285 | }, 286 | { 287 | "description": "[basx509] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 288 | "string": "12f4" 289 | }, 290 | { 291 | "description": "[basx536] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 292 | "string": "111e*123" 293 | }, 294 | { 295 | "description": "[basx537] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 296 | "string": "111e123-" 297 | }, 298 | { 299 | "description": "[basx540] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 300 | "string": "111e1*23" 301 | }, 302 | { 303 | "description": "[basx538] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 304 | "string": "111e+12+" 305 | }, 306 | { 307 | "description": "[basx539] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 308 | "string": "111e1-3-" 309 | }, 310 | { 311 | "description": "[basx541] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 312 | "string": "111E1e+3" 313 | }, 314 | { 315 | "description": "[basx528] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 316 | "string": "123,65" 317 | }, 318 | { 319 | "description": "[basx523] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 320 | "string": "7e12356789012x" 321 | }, 322 | { 323 | "description": "[basx522] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)", 324 | "string": "7e123567890x" 325 | } 326 | ] 327 | } 328 | -------------------------------------------------------------------------------- /test/node/specs/bson-corpus/document.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Document type (sub-documents)", 3 | "bson_type": "0x03", 4 | "test_key": "x", 5 | "valid": [ 6 | { 7 | "description": "Empty subdoc", 8 | "canonical_bson": "0D000000037800050000000000", 9 | "canonical_extjson": "{\"x\" : {}}" 10 | }, 11 | { 12 | "description": "Empty-string key subdoc", 13 | "canonical_bson": "150000000378000D00000002000200000062000000", 14 | "canonical_extjson": "{\"x\" : {\"\" : \"b\"}}" 15 | }, 16 | { 17 | "description": "Single-character key subdoc", 18 | "canonical_bson": "160000000378000E0000000261000200000062000000", 19 | "canonical_extjson": "{\"x\" : {\"a\" : \"b\"}}" 20 | } 21 | ], 22 | "decodeErrors": [ 23 | { 24 | "description": "Subdocument length too long: eats outer terminator", 25 | "bson": "1800000003666F6F000F0000001062617200FFFFFF7F0000" 26 | }, 27 | { 28 | "description": "Subdocument length too short: leaks terminator", 29 | "bson": "1500000003666F6F000A0000000862617200010000" 30 | }, 31 | { 32 | "description": "Invalid subdocument: bad string length in field", 33 | "bson": "1C00000003666F6F001200000002626172000500000062617A000000" 34 | }, 35 | { 36 | "description": "Null byte in sub-document key", 37 | "bson": "150000000378000D00000010610000010000000000" 38 | } 39 | ] 40 | } 41 | -------------------------------------------------------------------------------- /test/node/specs/bson-corpus/double.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Double type", 3 | "bson_type": "0x01", 4 | "test_key": "d", 5 | "valid": [ 6 | { 7 | "description": "+1.0", 8 | "canonical_bson": "10000000016400000000000000F03F00", 9 | "canonical_extjson": "{\"d\" : {\"$numberDouble\": \"1.0\"}}", 10 | "relaxed_extjson": "{\"d\" : 1.0}" 11 | }, 12 | { 13 | "description": "-1.0", 14 | "canonical_bson": "10000000016400000000000000F0BF00", 15 | "canonical_extjson": "{\"d\" : {\"$numberDouble\": \"-1.0\"}}", 16 | "relaxed_extjson": "{\"d\" : -1.0}" 17 | }, 18 | { 19 | "description": "+1.0001220703125", 20 | "canonical_bson": "10000000016400000000008000F03F00", 21 | "canonical_extjson": "{\"d\" : {\"$numberDouble\": \"1.0001220703125\"}}", 22 | "relaxed_extjson": "{\"d\" : 1.0001220703125}" 23 | }, 24 | { 25 | "description": "-1.0001220703125", 26 | "canonical_bson": "10000000016400000000008000F0BF00", 27 | "canonical_extjson": "{\"d\" : {\"$numberDouble\": \"-1.0001220703125\"}}", 28 | "relaxed_extjson": "{\"d\" : -1.0001220703125}" 29 | }, 30 | { 31 | "description": "1.23456789012345677E+18", 32 | "canonical_bson": "1000000001640081E97DF41022B14300", 33 | "canonical_extjson": "{\"d\" : {\"$numberDouble\": \"1.23456789012345677E+18\"}}", 34 | "relaxed_extjson": "{\"d\" : 1.23456789012345677E+18}" 35 | }, 36 | { 37 | "description": "-1.23456789012345677E+18", 38 | "canonical_bson": "1000000001640081E97DF41022B1C300", 39 | "canonical_extjson": "{\"d\" : {\"$numberDouble\": \"-1.23456789012345677E+18\"}}", 40 | "relaxed_extjson": "{\"d\" : -1.23456789012345677E+18}" 41 | }, 42 | { 43 | "description": "0.0", 44 | "canonical_bson": "10000000016400000000000000000000", 45 | "canonical_extjson": "{\"d\" : {\"$numberDouble\": \"0.0\"}}", 46 | "relaxed_extjson": "{\"d\" : 0.0}" 47 | }, 48 | { 49 | "description": "-0.0", 50 | "canonical_bson": "10000000016400000000000000008000", 51 | "canonical_extjson": "{\"d\" : {\"$numberDouble\": \"-0.0\"}}", 52 | "relaxed_extjson": "{\"d\" : -0.0}" 53 | }, 54 | { 55 | "description": "NaN", 56 | "canonical_bson": "10000000016400000000000000F87F00", 57 | "canonical_extjson": "{\"d\": {\"$numberDouble\": \"NaN\"}}", 58 | "relaxed_extjson": "{\"d\": {\"$numberDouble\": \"NaN\"}}", 59 | "lossy": true 60 | }, 61 | { 62 | "description": "NaN with payload", 63 | "canonical_bson": "10000000016400120000000000F87F00", 64 | "canonical_extjson": "{\"d\": {\"$numberDouble\": \"NaN\"}}", 65 | "relaxed_extjson": "{\"d\": {\"$numberDouble\": \"NaN\"}}", 66 | "lossy": true 67 | }, 68 | { 69 | "description": "Inf", 70 | "canonical_bson": "10000000016400000000000000F07F00", 71 | "canonical_extjson": "{\"d\": {\"$numberDouble\": \"Infinity\"}}", 72 | "relaxed_extjson": "{\"d\": {\"$numberDouble\": \"Infinity\"}}" 73 | }, 74 | { 75 | "description": "-Inf", 76 | "canonical_bson": "10000000016400000000000000F0FF00", 77 | "canonical_extjson": "{\"d\": {\"$numberDouble\": \"-Infinity\"}}", 78 | "relaxed_extjson": "{\"d\": {\"$numberDouble\": \"-Infinity\"}}" 79 | } 80 | ], 81 | "decodeErrors": [ 82 | { 83 | "description": "double truncated", 84 | "bson": "0B0000000164000000F03F00" 85 | } 86 | ] 87 | } 88 | -------------------------------------------------------------------------------- /test/node/specs/bson-corpus/int32.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Int32 type", 3 | "bson_type": "0x10", 4 | "test_key": "i", 5 | "valid": [ 6 | { 7 | "description": "MinValue", 8 | "canonical_bson": "0C0000001069000000008000", 9 | "canonical_extjson": "{\"i\" : {\"$numberInt\": \"-2147483648\"}}", 10 | "relaxed_extjson": "{\"i\" : -2147483648}" 11 | }, 12 | { 13 | "description": "MaxValue", 14 | "canonical_bson": "0C000000106900FFFFFF7F00", 15 | "canonical_extjson": "{\"i\" : {\"$numberInt\": \"2147483647\"}}", 16 | "relaxed_extjson": "{\"i\" : 2147483647}" 17 | }, 18 | { 19 | "description": "-1", 20 | "canonical_bson": "0C000000106900FFFFFFFF00", 21 | "canonical_extjson": "{\"i\" : {\"$numberInt\": \"-1\"}}", 22 | "relaxed_extjson": "{\"i\" : -1}" 23 | }, 24 | { 25 | "description": "0", 26 | "canonical_bson": "0C0000001069000000000000", 27 | "canonical_extjson": "{\"i\" : {\"$numberInt\": \"0\"}}", 28 | "relaxed_extjson": "{\"i\" : 0}" 29 | }, 30 | { 31 | "description": "1", 32 | "canonical_bson": "0C0000001069000100000000", 33 | "canonical_extjson": "{\"i\" : {\"$numberInt\": \"1\"}}", 34 | "relaxed_extjson": "{\"i\" : 1}" 35 | } 36 | ], 37 | "decodeErrors": [ 38 | { 39 | "description": "Bad int32 field length", 40 | "bson": "090000001061000500" 41 | } 42 | ] 43 | } 44 | -------------------------------------------------------------------------------- /test/node/specs/bson-corpus/int64.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Int64 type", 3 | "bson_type": "0x12", 4 | "test_key": "a", 5 | "valid": [ 6 | { 7 | "description": "MinValue", 8 | "canonical_bson": "10000000126100000000000000008000", 9 | "canonical_extjson": "{\"a\" : {\"$numberLong\" : \"-9223372036854775808\"}}", 10 | "relaxed_extjson": "{\"a\" : -9223372036854775808}" 11 | }, 12 | { 13 | "description": "MaxValue", 14 | "canonical_bson": "10000000126100FFFFFFFFFFFFFF7F00", 15 | "canonical_extjson": "{\"a\" : {\"$numberLong\" : \"9223372036854775807\"}}", 16 | "relaxed_extjson": "{\"a\" : 9223372036854775807}" 17 | }, 18 | { 19 | "description": "-1", 20 | "canonical_bson": "10000000126100FFFFFFFFFFFFFFFF00", 21 | "canonical_extjson": "{\"a\" : {\"$numberLong\" : \"-1\"}}", 22 | "relaxed_extjson": "{\"a\" : -1}" 23 | }, 24 | { 25 | "description": "0", 26 | "canonical_bson": "10000000126100000000000000000000", 27 | "canonical_extjson": "{\"a\" : {\"$numberLong\" : \"0\"}}", 28 | "relaxed_extjson": "{\"a\" : 0}" 29 | }, 30 | { 31 | "description": "1", 32 | "canonical_bson": "10000000126100010000000000000000", 33 | "canonical_extjson": "{\"a\" : {\"$numberLong\" : \"1\"}}", 34 | "relaxed_extjson": "{\"a\" : 1}" 35 | } 36 | ], 37 | "decodeErrors": [ 38 | { 39 | "description": "int64 field truncated", 40 | "bson": "0C0000001261001234567800" 41 | } 42 | ] 43 | } 44 | -------------------------------------------------------------------------------- /test/node/specs/bson-corpus/maxkey.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Maxkey type", 3 | "bson_type": "0x7F", 4 | "test_key": "a", 5 | "valid": [ 6 | { 7 | "description": "Maxkey", 8 | "canonical_bson": "080000007F610000", 9 | "canonical_extjson": "{\"a\" : {\"$maxKey\" : 1}}" 10 | } 11 | ] 12 | } 13 | -------------------------------------------------------------------------------- /test/node/specs/bson-corpus/minkey.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Minkey type", 3 | "bson_type": "0xFF", 4 | "test_key": "a", 5 | "valid": [ 6 | { 7 | "description": "Minkey", 8 | "canonical_bson": "08000000FF610000", 9 | "canonical_extjson": "{\"a\" : {\"$minKey\" : 1}}" 10 | } 11 | ] 12 | } 13 | -------------------------------------------------------------------------------- /test/node/specs/bson-corpus/multi-type-deprecated.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Multiple types within the same document", 3 | "bson_type": "0x00", 4 | "deprecated": true, 5 | "valid": [ 6 | { 7 | "description": "All BSON types", 8 | "canonical_bson": "38020000075F69640057E193D7A9CC81B4027498B50E53796D626F6C000700000073796D626F6C0002537472696E670007000000737472696E670010496E743332002A00000012496E743634002A0000000000000001446F75626C6500000000000000F0BF0542696E617279001000000003A34C38F7C3ABEDC8A37814A992AB8DB60542696E61727955736572446566696E656400050000008001020304050D436F6465000E00000066756E6374696F6E2829207B7D000F436F64655769746853636F7065001B0000000E00000066756E6374696F6E2829207B7D00050000000003537562646F63756D656E74001200000002666F6F0004000000626172000004417272617900280000001030000100000010310002000000103200030000001033000400000010340005000000001154696D657374616D7000010000002A0000000B5265676578007061747465726E0000094461746574696D6545706F6368000000000000000000094461746574696D65506F73697469766500FFFFFF7F00000000094461746574696D654E656761746976650000000080FFFFFFFF085472756500010846616C736500000C4442506F696E746572000B000000636F6C6C656374696F6E0057E193D7A9CC81B4027498B1034442526566003D0000000224726566000B000000636F6C6C656374696F6E00072469640057FD71E96E32AB4225B723FB02246462000900000064617461626173650000FF4D696E6B6579007F4D61786B6579000A4E756C6C0006556E646566696E65640000", 9 | "converted_bson": "48020000075f69640057e193d7a9cc81b4027498b50253796d626f6c000700000073796d626f6c0002537472696e670007000000737472696e670010496e743332002a00000012496e743634002a0000000000000001446f75626c6500000000000000f0bf0542696e617279001000000003a34c38f7c3abedc8a37814a992ab8db60542696e61727955736572446566696e656400050000008001020304050d436f6465000e00000066756e6374696f6e2829207b7d000f436f64655769746853636f7065001b0000000e00000066756e6374696f6e2829207b7d00050000000003537562646f63756d656e74001200000002666f6f0004000000626172000004417272617900280000001030000100000010310002000000103200030000001033000400000010340005000000001154696d657374616d7000010000002a0000000b5265676578007061747465726e0000094461746574696d6545706f6368000000000000000000094461746574696d65506f73697469766500ffffff7f00000000094461746574696d654e656761746976650000000080ffffffff085472756500010846616c73650000034442506f696e746572002b0000000224726566000b000000636f6c6c656374696f6e00072469640057e193d7a9cc81b4027498b100034442526566003d0000000224726566000b000000636f6c6c656374696f6e00072469640057fd71e96e32ab4225b723fb02246462000900000064617461626173650000ff4d696e6b6579007f4d61786b6579000a4e756c6c000a556e646566696e65640000", 10 | "canonical_extjson": "{\"_id\": {\"$oid\": \"57e193d7a9cc81b4027498b5\"}, \"Symbol\": {\"$symbol\": \"symbol\"}, \"String\": \"string\", \"Int32\": {\"$numberInt\": \"42\"}, \"Int64\": {\"$numberLong\": \"42\"}, \"Double\": {\"$numberDouble\": \"-1.0\"}, \"Binary\": { \"$binary\" : {\"base64\": \"o0w498Or7cijeBSpkquNtg==\", \"subType\": \"03\"}}, \"BinaryUserDefined\": { \"$binary\" : {\"base64\": \"AQIDBAU=\", \"subType\": \"80\"}}, \"Code\": {\"$code\": \"function() {}\"}, \"CodeWithScope\": {\"$code\": \"function() {}\", \"$scope\": {}}, \"Subdocument\": {\"foo\": \"bar\"}, \"Array\": [{\"$numberInt\": \"1\"}, {\"$numberInt\": \"2\"}, {\"$numberInt\": \"3\"}, {\"$numberInt\": \"4\"}, {\"$numberInt\": \"5\"}], \"Timestamp\": {\"$timestamp\": {\"t\": 42, \"i\": 1}}, \"Regex\": {\"$regularExpression\": {\"pattern\": \"pattern\", \"options\": \"\"}}, \"DatetimeEpoch\": {\"$date\": {\"$numberLong\": \"0\"}}, \"DatetimePositive\": {\"$date\": {\"$numberLong\": \"2147483647\"}}, \"DatetimeNegative\": {\"$date\": {\"$numberLong\": \"-2147483648\"}}, \"True\": true, \"False\": false, \"DBPointer\": {\"$dbPointer\": {\"$ref\": \"collection\", \"$id\": {\"$oid\": \"57e193d7a9cc81b4027498b1\"}}}, \"DBRef\": {\"$ref\": \"collection\", \"$id\": {\"$oid\": \"57fd71e96e32ab4225b723fb\"}, \"$db\": \"database\"}, \"Minkey\": {\"$minKey\": 1}, \"Maxkey\": {\"$maxKey\": 1}, \"Null\": null, \"Undefined\": {\"$undefined\": true}}", 11 | "converted_extjson": "{\"_id\": {\"$oid\": \"57e193d7a9cc81b4027498b5\"}, \"Symbol\": \"symbol\", \"String\": \"string\", \"Int32\": {\"$numberInt\": \"42\"}, \"Int64\": {\"$numberLong\": \"42\"}, \"Double\": {\"$numberDouble\": \"-1.0\"}, \"Binary\": { \"$binary\" : {\"base64\": \"o0w498Or7cijeBSpkquNtg==\", \"subType\": \"03\"}}, \"BinaryUserDefined\": { \"$binary\" : {\"base64\": \"AQIDBAU=\", \"subType\": \"80\"}}, \"Code\": {\"$code\": \"function() {}\"}, \"CodeWithScope\": {\"$code\": \"function() {}\", \"$scope\": {}}, \"Subdocument\": {\"foo\": \"bar\"}, \"Array\": [{\"$numberInt\": \"1\"}, {\"$numberInt\": \"2\"}, {\"$numberInt\": \"3\"}, {\"$numberInt\": \"4\"}, {\"$numberInt\": \"5\"}], \"Timestamp\": {\"$timestamp\": {\"t\": 42, \"i\": 1}}, \"Regex\": {\"$regularExpression\": {\"pattern\": \"pattern\", \"options\": \"\"}}, \"DatetimeEpoch\": {\"$date\": {\"$numberLong\": \"0\"}}, \"DatetimePositive\": {\"$date\": {\"$numberLong\": \"2147483647\"}}, \"DatetimeNegative\": {\"$date\": {\"$numberLong\": \"-2147483648\"}}, \"True\": true, \"False\": false, \"DBPointer\": {\"$ref\": \"collection\", \"$id\": {\"$oid\": \"57e193d7a9cc81b4027498b1\"}}, \"DBRef\": {\"$ref\": \"collection\", \"$id\": {\"$oid\": \"57fd71e96e32ab4225b723fb\"}, \"$db\": \"database\"}, \"Minkey\": {\"$minKey\": 1}, \"Maxkey\": {\"$maxKey\": 1}, \"Null\": null, \"Undefined\": null}" 12 | } 13 | ] 14 | } 15 | 16 | -------------------------------------------------------------------------------- /test/node/specs/bson-corpus/multi-type.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Multiple types within the same document", 3 | "bson_type": "0x00", 4 | "valid": [ 5 | { 6 | "description": "All BSON types", 7 | "canonical_bson": "F4010000075F69640057E193D7A9CC81B4027498B502537472696E670007000000737472696E670010496E743332002A00000012496E743634002A0000000000000001446F75626C6500000000000000F0BF0542696E617279001000000003A34C38F7C3ABEDC8A37814A992AB8DB60542696E61727955736572446566696E656400050000008001020304050D436F6465000E00000066756E6374696F6E2829207B7D000F436F64655769746853636F7065001B0000000E00000066756E6374696F6E2829207B7D00050000000003537562646F63756D656E74001200000002666F6F0004000000626172000004417272617900280000001030000100000010310002000000103200030000001033000400000010340005000000001154696D657374616D7000010000002A0000000B5265676578007061747465726E0000094461746574696D6545706F6368000000000000000000094461746574696D65506F73697469766500FFFFFF7F00000000094461746574696D654E656761746976650000000080FFFFFFFF085472756500010846616C73650000034442526566003D0000000224726566000B000000636F6C6C656374696F6E00072469640057FD71E96E32AB4225B723FB02246462000900000064617461626173650000FF4D696E6B6579007F4D61786B6579000A4E756C6C0000", 8 | "canonical_extjson": "{\"_id\": {\"$oid\": \"57e193d7a9cc81b4027498b5\"}, \"String\": \"string\", \"Int32\": {\"$numberInt\": \"42\"}, \"Int64\": {\"$numberLong\": \"42\"}, \"Double\": {\"$numberDouble\": \"-1.0\"}, \"Binary\": { \"$binary\" : {\"base64\": \"o0w498Or7cijeBSpkquNtg==\", \"subType\": \"03\"}}, \"BinaryUserDefined\": { \"$binary\" : {\"base64\": \"AQIDBAU=\", \"subType\": \"80\"}}, \"Code\": {\"$code\": \"function() {}\"}, \"CodeWithScope\": {\"$code\": \"function() {}\", \"$scope\": {}}, \"Subdocument\": {\"foo\": \"bar\"}, \"Array\": [{\"$numberInt\": \"1\"}, {\"$numberInt\": \"2\"}, {\"$numberInt\": \"3\"}, {\"$numberInt\": \"4\"}, {\"$numberInt\": \"5\"}], \"Timestamp\": {\"$timestamp\": {\"t\": 42, \"i\": 1}}, \"Regex\": {\"$regularExpression\": {\"pattern\": \"pattern\", \"options\": \"\"}}, \"DatetimeEpoch\": {\"$date\": {\"$numberLong\": \"0\"}}, \"DatetimePositive\": {\"$date\": {\"$numberLong\": \"2147483647\"}}, \"DatetimeNegative\": {\"$date\": {\"$numberLong\": \"-2147483648\"}}, \"True\": true, \"False\": false, \"DBRef\": {\"$ref\": \"collection\", \"$id\": {\"$oid\": \"57fd71e96e32ab4225b723fb\"}, \"$db\": \"database\"}, \"Minkey\": {\"$minKey\": 1}, \"Maxkey\": {\"$maxKey\": 1}, \"Null\": null}" 9 | } 10 | ] 11 | } 12 | -------------------------------------------------------------------------------- /test/node/specs/bson-corpus/null.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Null type", 3 | "bson_type": "0x0A", 4 | "test_key": "a", 5 | "valid": [ 6 | { 7 | "description": "Null", 8 | "canonical_bson": "080000000A610000", 9 | "canonical_extjson": "{\"a\" : null}" 10 | } 11 | ] 12 | } 13 | -------------------------------------------------------------------------------- /test/node/specs/bson-corpus/oid.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "ObjectId", 3 | "bson_type": "0x07", 4 | "test_key": "a", 5 | "valid": [ 6 | { 7 | "description": "All zeroes", 8 | "canonical_bson": "1400000007610000000000000000000000000000", 9 | "canonical_extjson": "{\"a\" : {\"$oid\" : \"000000000000000000000000\"}}" 10 | }, 11 | { 12 | "description": "All ones", 13 | "canonical_bson": "14000000076100FFFFFFFFFFFFFFFFFFFFFFFF00", 14 | "canonical_extjson": "{\"a\" : {\"$oid\" : \"ffffffffffffffffffffffff\"}}" 15 | }, 16 | { 17 | "description": "Random", 18 | "canonical_bson": "1400000007610056E1FC72E0C917E9C471416100", 19 | "canonical_extjson": "{\"a\" : {\"$oid\" : \"56e1fc72e0c917e9c4714161\"}}" 20 | } 21 | ], 22 | "decodeErrors": [ 23 | { 24 | "description": "OID truncated", 25 | "bson": "1200000007610056E1FC72E0C917E9C471" 26 | } 27 | ] 28 | } 29 | -------------------------------------------------------------------------------- /test/node/specs/bson-corpus/regex.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Regular Expression type", 3 | "bson_type": "0x0B", 4 | "test_key": "a", 5 | "valid": [ 6 | { 7 | "description": "empty regex with no options", 8 | "canonical_bson": "0A0000000B6100000000", 9 | "canonical_extjson": 10 | "{\"a\" : {\"$regularExpression\" : { \"pattern\": \"\", \"options\" : \"\"}}}" 11 | }, 12 | { 13 | "description": "regex without options", 14 | "canonical_bson": "0D0000000B6100616263000000", 15 | "canonical_extjson": 16 | "{\"a\" : {\"$regularExpression\" : { \"pattern\": \"abc\", \"options\" : \"\"}}}" 17 | }, 18 | { 19 | "description": "regex with options", 20 | "canonical_bson": "0F0000000B610061626300696D0000", 21 | "canonical_extjson": 22 | "{\"a\" : {\"$regularExpression\" : { \"pattern\": \"abc\", \"options\" : \"im\"}}}" 23 | }, 24 | { 25 | "description": "regex with options (keys reversed)", 26 | "canonical_bson": "0F0000000B610061626300696D0000", 27 | "canonical_extjson": 28 | "{\"a\" : {\"$regularExpression\" : { \"pattern\": \"abc\", \"options\" : \"im\"}}}", 29 | "degenerate_extjson": 30 | "{\"a\" : {\"$regularExpression\" : {\"options\" : \"im\", \"pattern\": \"abc\"}}}" 31 | }, 32 | { 33 | "description": "regex with slash", 34 | "canonical_bson": "110000000B610061622F636400696D0000", 35 | "canonical_extjson": 36 | "{\"a\" : {\"$regularExpression\" : { \"pattern\": \"ab/cd\", \"options\" : \"im\"}}}" 37 | }, 38 | { 39 | "description": "flags not alphabetized", 40 | "degenerate_bson": "100000000B6100616263006D69780000", 41 | "canonical_bson": "100000000B610061626300696D780000", 42 | "canonical_extjson": 43 | "{\"a\" : {\"$regularExpression\" : { \"pattern\": \"abc\", \"options\" : \"imx\"}}}", 44 | "degenerate_extjson": 45 | "{\"a\" : {\"$regularExpression\" : { \"pattern\": \"abc\", \"options\" : \"mix\"}}}" 46 | }, 47 | { 48 | "description": "Required escapes", 49 | "canonical_bson": "100000000B610061625C226162000000", 50 | "canonical_extjson": 51 | "{\"a\" : {\"$regularExpression\" : { \"pattern\": \"ab\\\\\\\"ab\", \"options\" : \"\"}}}" 52 | }, 53 | { 54 | "description": "Regular expression as value of $regex query operator", 55 | "canonical_bson": "180000000B247265676578007061747465726E0069780000", 56 | "canonical_extjson": 57 | "{\"$regex\" : {\"$regularExpression\" : { \"pattern\": \"pattern\", \"options\" : \"ix\"}}}" 58 | }, 59 | { 60 | "description": "Regular expression as value of $regex query operator with $options", 61 | "canonical_bson": 62 | "270000000B247265676578007061747465726E000002246F7074696F6E73000300000069780000", 63 | "canonical_extjson": 64 | "{\"$regex\" : {\"$regularExpression\" : { \"pattern\": \"pattern\", \"options\" : \"\"}}, \"$options\" : \"ix\"}" 65 | } 66 | ], 67 | "decodeErrors": [ 68 | { 69 | "description": "Null byte in pattern string", 70 | "bson": "0F0000000B610061006300696D0000" 71 | }, 72 | { 73 | "description": "Null byte in flags string", 74 | "bson": "100000000B61006162630069006D0000" 75 | } 76 | ] 77 | } 78 | -------------------------------------------------------------------------------- /test/node/specs/bson-corpus/string.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "String", 3 | "bson_type": "0x02", 4 | "test_key": "a", 5 | "valid": [ 6 | { 7 | "description": "Empty string", 8 | "canonical_bson": "0D000000026100010000000000", 9 | "canonical_extjson": "{\"a\" : \"\"}" 10 | }, 11 | { 12 | "description": "Single character", 13 | "canonical_bson": "0E00000002610002000000620000", 14 | "canonical_extjson": "{\"a\" : \"b\"}" 15 | }, 16 | { 17 | "description": "Multi-character", 18 | "canonical_bson": "190000000261000D0000006162616261626162616261620000", 19 | "canonical_extjson": "{\"a\" : \"abababababab\"}" 20 | }, 21 | { 22 | "description": "two-byte UTF-8 (\u00e9)", 23 | "canonical_bson": "190000000261000D000000C3A9C3A9C3A9C3A9C3A9C3A90000", 24 | "canonical_extjson": "{\"a\" : \"\\u00e9\\u00e9\\u00e9\\u00e9\\u00e9\\u00e9\"}" 25 | }, 26 | { 27 | "description": "three-byte UTF-8 (\u2606)", 28 | "canonical_bson": "190000000261000D000000E29886E29886E29886E298860000", 29 | "canonical_extjson": "{\"a\" : \"\\u2606\\u2606\\u2606\\u2606\"}" 30 | }, 31 | { 32 | "description": "Embedded nulls", 33 | "canonical_bson": "190000000261000D0000006162006261620062616261620000", 34 | "canonical_extjson": "{\"a\" : \"ab\\u0000bab\\u0000babab\"}" 35 | }, 36 | { 37 | "description": "Required escapes", 38 | "canonical_bson" : "320000000261002600000061625C220102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F61620000", 39 | "canonical_extjson" : "{\"a\":\"ab\\\\\\\"\\u0001\\u0002\\u0003\\u0004\\u0005\\u0006\\u0007\\b\\t\\n\\u000b\\f\\r\\u000e\\u000f\\u0010\\u0011\\u0012\\u0013\\u0014\\u0015\\u0016\\u0017\\u0018\\u0019\\u001a\\u001b\\u001c\\u001d\\u001e\\u001fab\"}" 40 | } 41 | ], 42 | "decodeErrors": [ 43 | { 44 | "description": "bad string length: 0 (but no 0x00 either)", 45 | "bson": "0C0000000261000000000000" 46 | }, 47 | { 48 | "description": "bad string length: -1", 49 | "bson": "0C000000026100FFFFFFFF00" 50 | }, 51 | { 52 | "description": "bad string length: eats terminator", 53 | "bson": "10000000026100050000006200620000" 54 | }, 55 | { 56 | "description": "bad string length: longer than rest of document", 57 | "bson": "120000000200FFFFFF00666F6F6261720000" 58 | }, 59 | { 60 | "description": "string is not null-terminated", 61 | "bson": "1000000002610004000000616263FF00" 62 | }, 63 | { 64 | "description": "empty string, but extra null", 65 | "bson": "0E00000002610001000000000000" 66 | }, 67 | { 68 | "description": "invalid UTF-8", 69 | "bson": "0E00000002610002000000E90000" 70 | } 71 | ] 72 | } 73 | -------------------------------------------------------------------------------- /test/node/specs/bson-corpus/symbol.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Symbol", 3 | "bson_type": "0x0E", 4 | "deprecated": true, 5 | "test_key": "a", 6 | "valid": [ 7 | { 8 | "description": "Empty string", 9 | "canonical_bson": "0D0000000E6100010000000000", 10 | "canonical_extjson": "{\"a\": {\"$symbol\": \"\"}}", 11 | "converted_bson": "0D000000026100010000000000", 12 | "converted_extjson": "{\"a\": \"\"}" 13 | }, 14 | { 15 | "description": "Single character", 16 | "canonical_bson": "0E0000000E610002000000620000", 17 | "canonical_extjson": "{\"a\": {\"$symbol\": \"b\"}}", 18 | "converted_bson": "0E00000002610002000000620000", 19 | "converted_extjson": "{\"a\": \"b\"}" 20 | }, 21 | { 22 | "description": "Multi-character", 23 | "canonical_bson": "190000000E61000D0000006162616261626162616261620000", 24 | "canonical_extjson": "{\"a\": {\"$symbol\": \"abababababab\"}}", 25 | "converted_bson": "190000000261000D0000006162616261626162616261620000", 26 | "converted_extjson": "{\"a\": \"abababababab\"}" 27 | }, 28 | { 29 | "description": "two-byte UTF-8 (\u00e9)", 30 | "canonical_bson": "190000000E61000D000000C3A9C3A9C3A9C3A9C3A9C3A90000", 31 | "canonical_extjson": "{\"a\": {\"$symbol\": \"éééééé\"}}", 32 | "converted_bson": "190000000261000D000000C3A9C3A9C3A9C3A9C3A9C3A90000", 33 | "converted_extjson": "{\"a\": \"éééééé\"}" 34 | }, 35 | { 36 | "description": "three-byte UTF-8 (\u2606)", 37 | "canonical_bson": "190000000E61000D000000E29886E29886E29886E298860000", 38 | "canonical_extjson": "{\"a\": {\"$symbol\": \"☆☆☆☆\"}}", 39 | "converted_bson": "190000000261000D000000E29886E29886E29886E298860000", 40 | "converted_extjson": "{\"a\": \"☆☆☆☆\"}" 41 | }, 42 | { 43 | "description": "Embedded nulls", 44 | "canonical_bson": "190000000E61000D0000006162006261620062616261620000", 45 | "canonical_extjson": "{\"a\": {\"$symbol\": \"ab\\u0000bab\\u0000babab\"}}", 46 | "converted_bson": "190000000261000D0000006162006261620062616261620000", 47 | "converted_extjson": "{\"a\": \"ab\\u0000bab\\u0000babab\"}" 48 | } 49 | ], 50 | "decodeErrors": [ 51 | { 52 | "description": "bad symbol length: 0 (but no 0x00 either)", 53 | "bson": "0C0000000261000000000000" 54 | }, 55 | { 56 | "description": "bad symbol length: -1", 57 | "bson": "0C000000026100FFFFFFFF00" 58 | }, 59 | { 60 | "description": "bad symbol length: eats terminator", 61 | "bson": "10000000026100050000006200620000" 62 | }, 63 | { 64 | "description": "bad symbol length: longer than rest of document", 65 | "bson": "120000000200FFFFFF00666F6F6261720000" 66 | }, 67 | { 68 | "description": "symbol is not null-terminated", 69 | "bson": "1000000002610004000000616263FF00" 70 | }, 71 | { 72 | "description": "empty symbol, but extra null", 73 | "bson": "0E00000002610001000000000000" 74 | }, 75 | { 76 | "description": "invalid UTF-8", 77 | "bson": "0E00000002610002000000E90000" 78 | } 79 | ] 80 | } 81 | -------------------------------------------------------------------------------- /test/node/specs/bson-corpus/timestamp.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Timestamp type", 3 | "bson_type": "0x11", 4 | "test_key": "a", 5 | "valid": [ 6 | { 7 | "description": "Timestamp: (123456789, 42)", 8 | "canonical_bson": "100000001161002A00000015CD5B0700", 9 | "canonical_extjson": "{\"a\" : {\"$timestamp\" : {\"t\" : 123456789, \"i\" : 42} } }" 10 | }, 11 | { 12 | "description": "Timestamp: (123456789, 42) (keys reversed)", 13 | "canonical_bson": "100000001161002A00000015CD5B0700", 14 | "canonical_extjson": "{\"a\" : {\"$timestamp\" : {\"t\" : 123456789, \"i\" : 42} } }", 15 | "degenerate_extjson": "{\"a\" : {\"$timestamp\" : {\"i\" : 42, \"t\" : 123456789} } }" 16 | }, 17 | { 18 | "description": "Timestamp with high-order bit set on both seconds and increment", 19 | "canonical_bson": "10000000116100FFFFFFFFFFFFFFFF00", 20 | "canonical_extjson": "{\"a\" : {\"$timestamp\" : {\"t\" : 4294967295, \"i\" : 4294967295} } }" 21 | } 22 | ], 23 | "decodeErrors": [ 24 | { 25 | "description": "Truncated timestamp field", 26 | "bson": "0f0000001161002A00000015CD5B00" 27 | } 28 | ] 29 | } 30 | -------------------------------------------------------------------------------- /test/node/specs/bson-corpus/top.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Top-level document validity", 3 | "bson_type": "0x00", 4 | "valid": [ 5 | { 6 | "description": "Document with keys that start with $", 7 | "canonical_bson": "0F00000010246B6579002A00000000", 8 | "canonical_extjson": "{\"$key\": {\"$numberInt\": \"42\"}}" 9 | } 10 | ], 11 | "decodeErrors": [ 12 | { 13 | "description": 14 | "An object size that's too small to even include the object size, but is a well-formed, empty object", 15 | "bson": "0100000000" 16 | }, 17 | { 18 | "description": 19 | "An object size that's only enough for the object size, but is a well-formed, empty object", 20 | "bson": "0400000000" 21 | }, 22 | { 23 | "description": "One object, with length shorter than size (missing EOO)", 24 | "bson": "05000000" 25 | }, 26 | { 27 | "description": 28 | "One object, sized correctly, with a spot for an EOO, but the EOO is 0x01", 29 | "bson": "0500000001" 30 | }, 31 | { 32 | "description": 33 | "One object, sized correctly, with a spot for an EOO, but the EOO is 0xff", 34 | "bson": "05000000FF" 35 | }, 36 | { 37 | "description": 38 | "One object, sized correctly, with a spot for an EOO, but the EOO is 0x70", 39 | "bson": "0500000070" 40 | }, 41 | { 42 | "description": "Byte count is zero (with non-zero input length)", 43 | "bson": "00000000000000000000" 44 | }, 45 | { 46 | "description": "Stated length exceeds byte count, with truncated document", 47 | "bson": "1200000002666F6F0004000000626172" 48 | }, 49 | { 50 | "description": "Stated length less than byte count, with garbage after envelope", 51 | "bson": "1200000002666F6F00040000006261720000DEADBEEF" 52 | }, 53 | { 54 | "description": "Stated length exceeds byte count, with valid envelope", 55 | "bson": "1300000002666F6F00040000006261720000" 56 | }, 57 | { 58 | "description": "Stated length less than byte count, with valid envelope", 59 | "bson": "1100000002666F6F00040000006261720000" 60 | }, 61 | { 62 | "description": "Invalid BSON type low range", 63 | "bson": "07000000000000" 64 | }, 65 | { 66 | "description": "Invalid BSON type high range", 67 | "bson": "07000000800000" 68 | }, 69 | { 70 | "description": "Document truncated mid-key", 71 | "bson": "1200000002666F" 72 | }, 73 | { 74 | "description": "Null byte in document key", 75 | "bson": "0D000000107800000100000000" 76 | } 77 | ], 78 | "parseErrors": [ 79 | { 80 | "description" : "Bad $regularExpression (extra field)", 81 | "string" : "{\"a\" : {\"$regularExpression\": {\"pattern\": \"abc\", \"options\": \"\", \"unrelated\": true}}}" 82 | }, 83 | { 84 | "description" : "Bad $regularExpression (missing options field)", 85 | "string" : "{\"a\" : {\"$regularExpression\": {\"pattern\": \"abc\"}}}" 86 | }, 87 | { 88 | "description": "Bad $regularExpression (pattern is number, not string)", 89 | "string": "{\"x\" : {\"$regularExpression\" : { \"pattern\": 42, \"options\" : \"\"}}}" 90 | }, 91 | { 92 | "description": "Bad $regularExpression (options are number, not string)", 93 | "string": "{\"x\" : {\"$regularExpression\" : { \"pattern\": \"a\", \"options\" : 0}}}" 94 | }, 95 | { 96 | "description" : "Bad $regularExpression (missing pattern field)", 97 | "string" : "{\"a\" : {\"$regularExpression\": {\"options\":\"ix\"}}}" 98 | }, 99 | { 100 | "description": "Bad $oid (number, not string)", 101 | "string": "{\"a\" : {\"$oid\" : 42}}" 102 | }, 103 | { 104 | "description": "Bad $oid (extra field)", 105 | "string": "{\"a\" : {\"$oid\" : \"56e1fc72e0c917e9c4714161\", \"unrelated\": true}}" 106 | }, 107 | { 108 | "description": "Bad $numberInt (number, not string)", 109 | "string": "{\"a\" : {\"$numberInt\" : 42}}" 110 | }, 111 | { 112 | "description": "Bad $numberInt (extra field)", 113 | "string": "{\"a\" : {\"$numberInt\" : \"42\", \"unrelated\": true}}" 114 | }, 115 | { 116 | "description": "Bad $numberLong (number, not string)", 117 | "string": "{\"a\" : {\"$numberLong\" : 42}}" 118 | }, 119 | { 120 | "description": "Bad $numberLong (extra field)", 121 | "string": "{\"a\" : {\"$numberLong\" : \"42\", \"unrelated\": true}}" 122 | }, 123 | { 124 | "description": "Bad $numberDouble (number, not string)", 125 | "string": "{\"a\" : {\"$numberDouble\" : 42}}" 126 | }, 127 | { 128 | "description": "Bad $numberDouble (extra field)", 129 | "string": "{\"a\" : {\"$numberDouble\" : \".1\", \"unrelated\": true}}" 130 | }, 131 | { 132 | "description": "Bad $numberDecimal (number, not string)", 133 | "string": "{\"a\" : {\"$numberDecimal\" : 42}}" 134 | }, 135 | { 136 | "description": "Bad $numberDecimal (extra field)", 137 | "string": "{\"a\" : {\"$numberDecimal\" : \".1\", \"unrelated\": true}}" 138 | }, 139 | { 140 | "description": "Bad $binary (binary is number, not string)", 141 | "string": "{\"x\" : {\"$binary\" : {\"base64\" : 0, \"subType\" : \"00\"}}}" 142 | }, 143 | { 144 | "description": "Bad $binary (type is number, not string)", 145 | "string": "{\"x\" : {\"$binary\" : {\"base64\" : \"\", \"subType\" : 0}}}" 146 | }, 147 | { 148 | "description": "Bad $binary (missing $type)", 149 | "string": "{\"x\" : {\"$binary\" : {\"base64\" : \"//8=\"}}}" 150 | }, 151 | { 152 | "description": "Bad $binary (missing $binary)", 153 | "string": "{\"x\" : {\"$binary\" : {\"subType\" : \"00\"}}}" 154 | }, 155 | { 156 | "description": "Bad $binary (extra field)", 157 | "string": "{\"x\" : {\"$binary\" : {\"base64\" : \"//8=\", \"subType\" : 0, \"unrelated\": true}}}" 158 | }, 159 | { 160 | "description": "Bad $code (type is number, not string)", 161 | "string": "{\"a\" : {\"$code\" : 42}}" 162 | }, 163 | { 164 | "description": "Bad $code (type is number, not string) when $scope is also present", 165 | "string": "{\"a\" : {\"$code\" : 42, \"$scope\" : {}}}" 166 | }, 167 | { 168 | "description": "Bad $code (extra field)", 169 | "string": "{\"a\" : {\"$code\" : \"\", \"unrelated\": true}}" 170 | }, 171 | { 172 | "description": "Bad $code with $scope (scope is number, not doc)", 173 | "string": "{\"x\" : {\"$code\" : \"\", \"$scope\" : 42}}" 174 | }, 175 | { 176 | "description": "Bad $timestamp (type is number, not doc)", 177 | "string": "{\"a\" : {\"$timestamp\" : 42} }" 178 | }, 179 | { 180 | "description": "Bad $timestamp ('t' type is string, not number)", 181 | "string": "{\"a\" : {\"$timestamp\" : {\"t\" : \"123456789\", \"i\" : 42} } }" 182 | }, 183 | { 184 | "description": "Bad $timestamp ('i' type is string, not number)", 185 | "string": "{\"a\" : {\"$timestamp\" : {\"t\" : 123456789, \"i\" : \"42\"} } }" 186 | }, 187 | { 188 | "description": "Bad $timestamp (extra field at same level as $timestamp)", 189 | "string": "{\"a\" : {\"$timestamp\" : {\"t\" : \"123456789\", \"i\" : \"42\"}, \"unrelated\": true } }" 190 | }, 191 | { 192 | "description": "Bad $timestamp (extra field at same level as t and i)", 193 | "string": "{\"a\" : {\"$timestamp\" : {\"t\" : \"123456789\", \"i\" : \"42\", \"unrelated\": true} } }" 194 | }, 195 | { 196 | "description": "Bad $timestamp (missing t)", 197 | "string": "{\"a\" : {\"$timestamp\" : {\"i\" : \"42\"} } }" 198 | }, 199 | { 200 | "description": "Bad $timestamp (missing i)", 201 | "string": "{\"a\" : {\"$timestamp\" : {\"t\" : \"123456789\"} } }" 202 | }, 203 | { 204 | "description": "Bad $date (number, not string or hash)", 205 | "string": "{\"a\" : {\"$date\" : 42}}" 206 | }, 207 | { 208 | "description": "Bad $date (extra field)", 209 | "string": "{\"a\" : {\"$date\" : {\"$numberLong\" : \"1356351330501\"}, \"unrelated\": true}}" 210 | }, 211 | { 212 | "description": "Bad $minKey (boolean, not integer)", 213 | "string": "{\"a\" : {\"$minKey\" : true}}" 214 | }, 215 | { 216 | "description": "Bad $minKey (wrong integer)", 217 | "string": "{\"a\" : {\"$minKey\" : 0}}" 218 | }, 219 | { 220 | "description": "Bad $minKey (extra field)", 221 | "string": "{\"a\" : {\"$minKey\" : 1, \"unrelated\": true}}" 222 | }, 223 | { 224 | "description": "Bad $maxKey (boolean, not integer)", 225 | "string": "{\"a\" : {\"$maxKey\" : true}}" 226 | }, 227 | { 228 | "description": "Bad $maxKey (wrong integer)", 229 | "string": "{\"a\" : {\"$maxKey\" : 0}}" 230 | }, 231 | { 232 | "description": "Bad $maxKey (extra field)", 233 | "string": "{\"a\" : {\"$maxKey\" : 1, \"unrelated\": true}}" 234 | }, 235 | { 236 | "description": "Bad DBpointer (extra field)", 237 | "string": "{\"a\": {\"$dbPointer\": {\"a\": {\"$numberInt\": \"1\"}, \"$id\": {\"$oid\": \"56e1fc72e0c917e9c4714161\"}, \"c\": {\"$numberInt\": \"2\"}, \"$ref\": \"b\"}}}" 238 | }, 239 | { 240 | "description" : "Null byte in document key", 241 | "string" : "{\"a\\u0000\": 1 }" 242 | }, 243 | { 244 | "description" : "Null byte in sub-document key", 245 | "string" : "{\"a\" : {\"b\\u0000\": 1 }}" 246 | }, 247 | { 248 | "description": "Null byte in $regularExpression pattern", 249 | "string": "{\"a\" : {\"$regularExpression\" : { \"pattern\": \"b\\u0000\", \"options\" : \"i\"}}}" 250 | }, 251 | { 252 | "description": "Null byte in $regularExpression options", 253 | "string": "{\"a\" : {\"$regularExpression\" : { \"pattern\": \"b\", \"options\" : \"i\\u0000\"}}}" 254 | } 255 | ] 256 | } 257 | -------------------------------------------------------------------------------- /test/node/specs/bson-corpus/undefined.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Undefined type (deprecated)", 3 | "bson_type": "0x06", 4 | "deprecated": true, 5 | "test_key": "a", 6 | "valid": [ 7 | { 8 | "description": "Undefined", 9 | "canonical_bson": "0800000006610000", 10 | "canonical_extjson": "{\"a\" : {\"$undefined\" : true}}", 11 | "converted_bson": "080000000A610000", 12 | "converted_extjson": "{\"a\" : null}" 13 | } 14 | ] 15 | } 16 | -------------------------------------------------------------------------------- /test/node/to_bson_test.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var BSON = require('../..'), 4 | ObjectID = BSON.ObjectID, 5 | createBSON = require('../utils'), 6 | expect = require('chai').expect; 7 | 8 | describe('toBSON', function() { 9 | /** 10 | * @ignore 11 | */ 12 | it('Should correctly handle toBson function for an object', function(done) { 13 | // Test object 14 | var doc = { 15 | hello: new ObjectID(), 16 | a: 1 17 | }; 18 | 19 | // Add a toBson method to the object 20 | doc.toBSON = function() { 21 | return { b: 1 }; 22 | }; 23 | 24 | // Serialize the data 25 | var serialized_data = createBSON().serialize(doc, false, true); 26 | var deserialized_doc = createBSON().deserialize(serialized_data); 27 | expect({ b: 1 }).to.deep.equal(deserialized_doc); 28 | 29 | // Serialize the data 30 | serialized_data = createBSON().serialize(doc, false, true); 31 | deserialized_doc = createBSON().deserialize(serialized_data); 32 | expect({ b: 1 }).to.deep.equal(deserialized_doc); 33 | done(); 34 | }); 35 | 36 | /** 37 | * @ignore 38 | */ 39 | it('Should correctly handle embedded toBson function for an object', function(done) { 40 | // Test object 41 | var doc = { 42 | hello: new ObjectID(), 43 | a: 1, 44 | b: { 45 | d: 1 46 | } 47 | }; 48 | 49 | // Add a toBson method to the object 50 | doc.b.toBSON = function() { 51 | return { e: 1 }; 52 | }; 53 | 54 | // Serialize the data 55 | var serialized_data = createBSON().serialize(doc, false, true); 56 | var deserialized_doc = createBSON().deserialize(serialized_data); 57 | expect({ e: 1 }).to.deep.equal(deserialized_doc.b); 58 | 59 | serialized_data = createBSON().serialize(doc, false, true); 60 | deserialized_doc = createBSON().deserialize(serialized_data); 61 | expect({ e: 1 }).to.deep.equal(deserialized_doc.b); 62 | done(); 63 | }); 64 | 65 | /** 66 | * @ignore 67 | */ 68 | it('Should correctly serialize when embedded non object returned by toBSON', function(done) { 69 | // Test object 70 | var doc = { 71 | hello: new ObjectID(), 72 | a: 1, 73 | b: { 74 | d: 1 75 | } 76 | }; 77 | 78 | // Add a toBson method to the object 79 | doc.b.toBSON = function() { 80 | return 'hello'; 81 | }; 82 | 83 | // Serialize the data 84 | var serialized_data = createBSON().serialize(doc, false, true); 85 | var deserialized_doc = createBSON().deserialize(serialized_data); 86 | expect('hello').to.deep.equal(deserialized_doc.b); 87 | 88 | // Serialize the data 89 | serialized_data = createBSON().serialize(doc, false, true); 90 | deserialized_doc = createBSON().deserialize(serialized_data); 91 | expect('hello').to.deep.equal(deserialized_doc.b); 92 | done(); 93 | }); 94 | 95 | /** 96 | * @ignore 97 | */ 98 | it('Should fail when top level object returns a non object type', function(done) { 99 | // Test object 100 | var doc = { 101 | hello: new ObjectID(), 102 | a: 1, 103 | b: { 104 | d: 1 105 | } 106 | }; 107 | 108 | // Add a toBson method to the object 109 | doc.toBSON = function() { 110 | return 'hello'; 111 | }; 112 | 113 | var test1 = false; 114 | var test2 = false; 115 | 116 | try { 117 | var serialized_data = createBSON().serialize(doc, false, true); 118 | createBSON().deserialize(serialized_data); 119 | } catch (err) { 120 | test1 = true; 121 | } 122 | 123 | try { 124 | serialized_data = createBSON().serialize(doc, false, true); 125 | createBSON().deserialize(serialized_data); 126 | } catch (err) { 127 | test2 = true; 128 | } 129 | 130 | expect(true).to.equal(test1); 131 | expect(true).to.equal(test2); 132 | done(); 133 | }); 134 | }); 135 | -------------------------------------------------------------------------------- /test/node/tools/utils.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | exports.assertArrayEqual = function(array1, array2) { 4 | if (array1.length !== array2.length) return false; 5 | for (var i = 0; i < array1.length; i++) { 6 | if (array1[i] !== array2[i]) return false; 7 | } 8 | 9 | return true; 10 | }; 11 | 12 | // String to arraybuffer 13 | exports.stringToArrayBuffer = function(string) { 14 | var dataBuffer = new Uint8Array(new ArrayBuffer(string.length)); 15 | // Return the strings 16 | for (var i = 0; i < string.length; i++) { 17 | dataBuffer[i] = string.charCodeAt(i); 18 | } 19 | // Return the data buffer 20 | return dataBuffer; 21 | }; 22 | 23 | // String to arraybuffer 24 | exports.stringToArray = function(string) { 25 | var dataBuffer = new Array(string.length); 26 | // Return the strings 27 | for (var i = 0; i < string.length; i++) { 28 | dataBuffer[i] = string.charCodeAt(i); 29 | } 30 | // Return the data buffer 31 | return dataBuffer; 32 | }; 33 | 34 | exports.Utf8 = { 35 | // public method for url encoding 36 | encode: function(string) { 37 | string = string.replace(/\r\n/g, '\n'); 38 | var utftext = ''; 39 | 40 | for (var n = 0; n < string.length; n++) { 41 | var c = string.charCodeAt(n); 42 | if (c < 128) { 43 | utftext += String.fromCharCode(c); 44 | } else if (c > 127 && c < 2048) { 45 | utftext += String.fromCharCode((c >> 6) | 192); 46 | utftext += String.fromCharCode((c & 63) | 128); 47 | } else { 48 | utftext += String.fromCharCode((c >> 12) | 224); 49 | utftext += String.fromCharCode(((c >> 6) & 63) | 128); 50 | utftext += String.fromCharCode((c & 63) | 128); 51 | } 52 | } 53 | 54 | return utftext; 55 | }, 56 | 57 | // public method for url decoding 58 | decode: function(utftext) { 59 | var string = ''; 60 | var i = 0; 61 | var c = 0, 62 | c2 = 0, 63 | c3 = 0; 64 | 65 | while (i < utftext.length) { 66 | c = utftext.charCodeAt(i); 67 | if (c < 128) { 68 | string += String.fromCharCode(c); 69 | i++; 70 | } else if (c > 191 && c < 224) { 71 | c2 = utftext.charCodeAt(i + 1); 72 | string += String.fromCharCode(((c & 31) << 6) | (c2 & 63)); 73 | i += 2; 74 | } else { 75 | c2 = utftext.charCodeAt(i + 1); 76 | c3 = utftext.charCodeAt(i + 2); 77 | string += String.fromCharCode(((c & 15) << 12) | ((c2 & 63) << 6) | (c3 & 63)); 78 | i += 3; 79 | } 80 | } 81 | return string; 82 | } 83 | }; 84 | -------------------------------------------------------------------------------- /test/scripts/test.cmd: -------------------------------------------------------------------------------- 1 | SET GYP_MSVS_VERSION=2013 2 | SET PATH=%PATH%;%APPDATA%\npm 3 | SET PATH=%PATH%;C:\Program Files (x86)\Git\bin 4 | SET PATH=%PATH%;C:\Program Files (x86)\Microsoft Visual Studio 12.0\VC\bin 5 | call node -p "process.versions" 6 | call npm install -g https://github.com/mongodb-js/node-pre-gyp/archive/v0.6.5-appveyor.tar.gz https://github.com/mongodb-js/node-gyp/archive/v1.04-appveyor.tar.gz 7 | call npm install --build-from-source 8 | call npm test || echo ERROR && exit /b 9 | call npm install -g aws-sdk 10 | call node-pre-gyp publish-maybe 11 | -------------------------------------------------------------------------------- /test/utils.js: -------------------------------------------------------------------------------- 1 | var BSON = require('..'); 2 | 3 | module.exports = function createBSON() { 4 | return BSON; 5 | } 6 | --------------------------------------------------------------------------------