├── .evergreen ├── check-clippy.sh ├── check-rustdoc.sh ├── check-rustfmt.sh ├── compile-only.sh ├── config.yml ├── install-dependencies.sh ├── install-fuzzer.sh ├── release-danger-do-not-run-manually.sh ├── releases.yml ├── run-fuzzer.sh ├── run-tests.sh └── run-wasm-tests.sh ├── .github ├── CODEOWNERS ├── ISSUE_TEMPLATE │ └── bug_report.md ├── dependabot.yml └── workflows │ ├── close_stale_issues.yml │ ├── issue_assignment.yml │ └── remove_labels.yml ├── .gitignore ├── Cargo.lock ├── Cargo.toml ├── LICENSE ├── README.md ├── clippy.toml ├── etc └── update-spec-tests.sh ├── fuzz ├── .gitignore ├── Cargo.toml ├── fuzz_targets │ ├── decode.rs │ ├── encoding.rs │ ├── iterate.rs │ ├── raw_deserialize.rs │ ├── raw_deserialize_utf8_lossy.rs │ ├── string_handling.rs │ └── type_markers.rs └── generate_corpus.rs ├── rustfmt.toml ├── serde-tests ├── Cargo.lock ├── Cargo.toml ├── json.rs ├── lib.rs ├── rustfmt.toml └── test.rs ├── src ├── base64.rs ├── binary.rs ├── binary │ └── vector.rs ├── bson.rs ├── datetime.rs ├── datetime │ └── builder.rs ├── de.rs ├── de │ ├── raw.rs │ └── serde.rs ├── decimal128.rs ├── document.rs ├── error.rs ├── error │ ├── decimal128.rs │ ├── oid.rs │ ├── uuid.rs │ └── value_access.rs ├── extjson.rs ├── extjson │ ├── json.rs │ └── models.rs ├── lib.rs ├── macros.rs ├── oid.rs ├── raw.rs ├── raw │ ├── array.rs │ ├── array_buf.rs │ ├── bson.rs │ ├── bson_ref.rs │ ├── cstr.rs │ ├── document.rs │ ├── document_buf.rs │ ├── document_buf │ │ └── raw_writer.rs │ ├── iter.rs │ ├── serde.rs │ ├── serde │ │ ├── bson_visitor.rs │ │ └── seeded_visitor.rs │ ├── test.rs │ └── test │ │ ├── append.rs │ │ └── props.rs ├── ser.rs ├── ser │ ├── raw.rs │ ├── raw │ │ ├── document_serializer.rs │ │ └── value_serializer.rs │ └── serde.rs ├── serde_helpers.rs ├── spec.rs ├── tests.rs ├── tests │ ├── binary_subtype.rs │ ├── datetime.rs │ ├── modules.rs │ ├── modules │ │ ├── binary.rs │ │ ├── bson.rs │ │ ├── document.rs │ │ ├── lock.rs │ │ ├── macros.rs │ │ ├── oid.rs │ │ ├── ser.rs │ │ └── serializer_deserializer.rs │ ├── serde.rs │ ├── serde_helpers.rs │ ├── spec.rs │ └── spec │ │ ├── corpus.rs │ │ ├── json │ │ ├── bson-binary-vector │ │ │ ├── README.md │ │ │ ├── float32.json │ │ │ ├── int8.json │ │ │ └── packed_bit.json │ │ └── bson-corpus │ │ │ ├── array.json │ │ │ ├── binary.json │ │ │ ├── boolean.json │ │ │ ├── bsonview │ │ │ ├── code.json │ │ │ ├── code_w_scope.json │ │ │ ├── datetime.json │ │ │ ├── dbpointer.json │ │ │ ├── dbref.json │ │ │ ├── decimal128-1.json │ │ │ ├── decimal128-2.json │ │ │ ├── decimal128-3.json │ │ │ ├── decimal128-4.json │ │ │ ├── decimal128-5.json │ │ │ ├── decimal128-6.json │ │ │ ├── decimal128-7.json │ │ │ ├── document.json │ │ │ ├── double.json │ │ │ ├── int32.json │ │ │ ├── int64.json │ │ │ ├── maxkey.json │ │ │ ├── minkey.json │ │ │ ├── multi-type-deprecated.json │ │ │ ├── multi-type.json │ │ │ ├── null.json │ │ │ ├── oid.json │ │ │ ├── regex.json │ │ │ ├── string.json │ │ │ ├── symbol.json │ │ │ ├── timestamp.json │ │ │ ├── top.json │ │ │ └── undefined.json │ │ └── vector.rs ├── uuid.rs └── uuid │ └── test.rs └── wasm-test ├── .cargo └── config.toml ├── Cargo.lock ├── Cargo.toml └── src ├── lib.rs └── test.rs /.evergreen/check-clippy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -o errexit 4 | 5 | . ~/.cargo/env 6 | 7 | # Pin clippy to the latest version. This should be updated when new versions of Rust are released. 8 | CLIPPY_VERSION=1.83.0 9 | 10 | rustup install $CLIPPY_VERSION 11 | 12 | cargo +$CLIPPY_VERSION clippy --all-targets --all-features -p bson -- -D warnings 13 | 14 | cd serde-tests 15 | cargo +$CLIPPY_VERSION clippy --all-targets --all-features -p serde-tests -- -D warnings 16 | -------------------------------------------------------------------------------- /.evergreen/check-rustdoc.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -o errexit 4 | 5 | . ~/.cargo/env 6 | 7 | cargo +nightly rustdoc -p bson --all-features -- --cfg docsrs -D warnings -------------------------------------------------------------------------------- /.evergreen/check-rustfmt.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -o errexit 4 | 5 | . ~/.cargo/env 6 | cargo +nightly fmt -- --check 7 | 8 | cd serde-tests && cargo +nightly fmt -- --check 9 | -------------------------------------------------------------------------------- /.evergreen/compile-only.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -o errexit 4 | 5 | . ~/.cargo/env 6 | rustup update $RUST_VERSION 7 | 8 | if [ ! -z "$TARGET" ]; then 9 | if [[ "$TARGET" = "wasm32-wasi" && "$RUST_VERSION" = "nightly" ]]; then 10 | # renamed in newer versions of rustc 11 | TARGET="wasm32-wasip1" 12 | fi 13 | if [[ "$TARGET" = "wasm32-unknown-unknown" ]]; then 14 | export RUSTFLAGS='--cfg getrandom_backend="wasm_js"' 15 | fi 16 | rustup target add $TARGET --toolchain $RUST_VERSION 17 | TARGET="--target=$TARGET" 18 | fi 19 | 20 | # Generate a new lockfile with MSRV-compatible dependencies. 21 | if [ "$MSRV" = "true" ]; then 22 | CARGO_RESOLVER_INCOMPATIBLE_RUST_VERSIONS=fallback cargo +nightly -Zmsrv-policy generate-lockfile 23 | fi 24 | 25 | rustup run $RUST_VERSION cargo build $TARGET 26 | -------------------------------------------------------------------------------- /.evergreen/config.yml: -------------------------------------------------------------------------------- 1 | ######################################## 2 | # Evergreen Template for MongoDB Drivers 3 | ######################################## 4 | 5 | # When a task that used to pass starts to fail 6 | # Go through all versions that may have been skipped to detect 7 | # when the task started failing 8 | stepback: true 9 | 10 | # Mark a failure as a system/bootstrap failure (purple box) rather then a task 11 | # failure by default. 12 | # Actual testing tasks are marked with `type: test` 13 | command_type: system 14 | 15 | # Protect ourself against rogue test case, or curl gone wild, that runs forever 16 | # 60 minutes is the longest we'll ever run 17 | exec_timeout_secs: 3600 # 1 hour total for security-focused fuzzing 18 | 19 | # What to do when evergreen hits the timeout (`post:` tasks are run automatically) 20 | timeout: 21 | - command: shell.exec 22 | params: 23 | script: | 24 | echo "Fuzzing timed out. Collecting any available artifacts..." 25 | if [ -d "src/fuzz/artifacts" ]; then 26 | tar czf "${PROJECT_DIRECTORY}/crash-artifacts.tar.gz" src/fuzz/artifacts/ 27 | fi 28 | 29 | functions: 30 | "fetch source": 31 | # Executes git clone and applies the submitted patch, if any 32 | - command: git.get_project 33 | params: 34 | directory: "src" 35 | # Applies the subitted patch, if any 36 | # Deprecated. Should be removed. But still needed for certain agents (ZAP) 37 | - command: git.apply_patch 38 | # Make an evergreen exapanstion file with dynamic values 39 | - command: shell.exec 40 | params: 41 | working_dir: "src" 42 | script: | 43 | # Get the current unique version of this checkout 44 | if [ "${is_patch}" = "true" ]; then 45 | CURRENT_VERSION=$(git describe)-patch-${version_id} 46 | else 47 | CURRENT_VERSION=latest 48 | fi 49 | 50 | export PROJECT_DIRECTORY="$(pwd)" 51 | 52 | cat < expansion.yml 53 | CURRENT_VERSION: "$CURRENT_VERSION" 54 | PROJECT_DIRECTORY: "$PROJECT_DIRECTORY" 55 | PREPARE_SHELL: | 56 | set -o errexit 57 | set -o xtrace 58 | export PROJECT_DIRECTORY="$PROJECT_DIRECTORY" 59 | 60 | export PROJECT="${project}" 61 | EOT 62 | # See what we've done 63 | cat expansion.yml 64 | 65 | # Load the expansion file to make an evergreen variable with the current unique version 66 | - command: expansions.update 67 | params: 68 | file: src/expansion.yml 69 | 70 | "install dependencies": 71 | command: shell.exec 72 | params: 73 | working_dir: "src" 74 | script: | 75 | ${PREPARE_SHELL} 76 | .evergreen/install-dependencies.sh 77 | 78 | "run tests": 79 | - command: shell.exec 80 | type: test 81 | params: 82 | shell: bash 83 | working_dir: "src" 84 | script: | 85 | ${PREPARE_SHELL} 86 | .evergreen/run-tests.sh 87 | 88 | "compile only": 89 | - command: shell.exec 90 | type: test 91 | params: 92 | shell: bash 93 | working_dir: "src" 94 | script: | 95 | ${PREPARE_SHELL} 96 | RUST_VERSION=${RUST_VERSION} MSRV=${MSRV} TARGET=${TARGET} .evergreen/compile-only.sh 97 | 98 | "check rustfmt": 99 | - command: shell.exec 100 | type: test 101 | params: 102 | shell: bash 103 | working_dir: "src" 104 | script: | 105 | ${PREPARE_SHELL} 106 | .evergreen/check-rustfmt.sh 107 | 108 | "check clippy": 109 | - command: shell.exec 110 | type: test 111 | params: 112 | shell: bash 113 | working_dir: "src" 114 | script: | 115 | ${PREPARE_SHELL} 116 | .evergreen/check-clippy.sh 117 | 118 | "run fuzzer": 119 | - command: shell.exec 120 | type: test 121 | params: 122 | shell: bash 123 | working_dir: "src" 124 | script: | 125 | ${PREPARE_SHELL} 126 | .evergreen/install-fuzzer.sh 127 | .evergreen/run-fuzzer.sh 128 | 129 | "check rustdoc": 130 | - command: shell.exec 131 | type: test 132 | params: 133 | shell: bash 134 | working_dir: "src" 135 | script: | 136 | ${PREPARE_SHELL} 137 | .evergreen/check-rustdoc.sh 138 | 139 | "run wasm tests": 140 | - command: shell.exec 141 | type: test 142 | params: 143 | shell: bash 144 | working_dir: "src" 145 | script: | 146 | ${PREPARE_SHELL} 147 | .evergreen/run-wasm-tests.sh 148 | 149 | "init test-results": 150 | - command: shell.exec 151 | params: 152 | script: | 153 | ${PREPARE_SHELL} 154 | echo '{"results": [{ "status": "FAIL", "test_file": "Build", "log_raw": "No test-results.json found was created" } ]}' > ${PROJECT_DIRECTORY}/test-results.json 155 | 156 | "cleanup": 157 | - command: shell.exec 158 | params: 159 | script: | 160 | # Archive crash artifacts if they exist and contain crashes 161 | if [ -d "src/fuzz/artifacts" ] && [ "$(ls -A src/fuzz/artifacts)" ]; then 162 | echo "Creating artifacts archive..." 163 | tar czf "${PROJECT_DIRECTORY}/crash-artifacts.tar.gz" src/fuzz/artifacts/ 164 | else 165 | echo "No crashes found in artifacts directory. Skipping archive creation." 166 | fi 167 | # Upload crash artifacts if they exist 168 | - command: s3.put 169 | params: 170 | aws_key: ${aws_key} 171 | aws_secret: ${aws_secret} 172 | local_file: ${PROJECT_DIRECTORY}/crash-artifacts.tar.gz 173 | remote_file: ${CURRENT_VERSION}/crash-artifacts.tar.gz 174 | bucket: mciuploads 175 | permissions: public-read 176 | content_type: application/x-gzip 177 | optional: true 178 | 179 | pre: 180 | - func: "fetch source" 181 | - func: "install dependencies" 182 | 183 | post: 184 | - func: "cleanup" 185 | 186 | tasks: 187 | - name: "test" 188 | commands: 189 | - func: "run tests" 190 | 191 | - name: "compile-only" 192 | commands: 193 | - func: "compile only" 194 | 195 | - name: "check-rustfmt" 196 | commands: 197 | - func: "check rustfmt" 198 | 199 | - name: "check-clippy" 200 | commands: 201 | - func: "check clippy" 202 | 203 | - name: "check-rustdoc" 204 | commands: 205 | - func: "check rustdoc" 206 | 207 | - name: "run-fuzzer" 208 | commands: 209 | - func: "run fuzzer" 210 | 211 | - name: "wasm-test" 212 | commands: 213 | - func: "run wasm tests" 214 | 215 | axes: 216 | - id: "extra-rust-versions" 217 | values: 218 | - id: "min" 219 | display_name: "1.81 (minimum supported version)" 220 | variables: 221 | RUST_VERSION: "1.81" 222 | MSRV: "true" 223 | - id: "nightly" 224 | display_name: "nightly" 225 | variables: 226 | RUST_VERSION: "nightly" 227 | - id: "extra-targets" 228 | values: 229 | - id: "current" 230 | display_name: "current target" 231 | - id: "wasi" 232 | display_name: "WASI" 233 | variables: 234 | TARGET: "wasm32-wasi" 235 | - id: "wasm" 236 | display_name: "WASM" 237 | variables: 238 | TARGET: "wasm32-unknown-unknown" 239 | 240 | buildvariants: 241 | - 242 | name: "tests" 243 | display_name: "Tests" 244 | run_on: 245 | - ubuntu2204-small 246 | tasks: 247 | - name: "test" 248 | 249 | - matrix_name: "compile only" 250 | matrix_spec: 251 | extra-rust-versions: "*" 252 | extra-targets: "*" 253 | display_name: "Compile on Rust ${extra-rust-versions} (${extra-targets})" 254 | run_on: 255 | - ubuntu2204-small 256 | tasks: 257 | - name: "compile-only" 258 | 259 | - 260 | name: "lint" 261 | display_name: "Lint" 262 | run_on: 263 | - ubuntu2204-small 264 | tasks: 265 | - name: "check-clippy" 266 | - name: "check-rustfmt" 267 | - name: "check-rustdoc" 268 | 269 | - 270 | name: "fuzz" 271 | display_name: "Raw BSON Fuzzer" 272 | run_on: 273 | - ubuntu2204-small 274 | tasks: 275 | - name: "run-fuzzer" 276 | 277 | - 278 | name: "wasm" 279 | display_name: "WASM" 280 | run_on: 281 | - ubuntu2204-small 282 | tasks: 283 | - name: "wasm-test" 284 | -------------------------------------------------------------------------------- /.evergreen/install-dependencies.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | rm -rf ~/.rustup 4 | curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path 5 | 6 | echo "export CARGO_NET_GIT_FETCH_WITH_CLI=true" >> ~/.cargo/env 7 | . ~/.cargo/env 8 | rustup toolchain install nightly -c rustfmt 9 | -------------------------------------------------------------------------------- /.evergreen/install-fuzzer.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -o errexit 4 | 5 | . ~/.cargo/env 6 | 7 | cargo install cargo-fuzz 8 | -------------------------------------------------------------------------------- /.evergreen/release-danger-do-not-run-manually.sh: -------------------------------------------------------------------------------- 1 | # ☠ ☢ ☠ ☢ ☠ ☢ ☠ ☢ ☠ ☢ ☠ ☢ ☠ ☢ ☠ ☢ ☠ ☢ ☠ ☢ ☠ ☢ ☠ ☢ ☠ ☢ ☠ ☢ 2 | # # Danger! 3 | # 4 | # This script is used to publish a release of the driver to crates.io. 5 | # 6 | # Do not run it manually! 7 | # ☢ ☠ ☢ ☠ ☢ ☠ ☢ ☠ ☢ ☠ ☢ ☠ ☢ ☠ ☢ ☠ ☢ ☠ ☢ ☠ ☢ ☠ ☢ ☠ ☢ ☠ ☢ ☠ 8 | 9 | # Disable tracing 10 | set +x 11 | 12 | set -o errexit 13 | 14 | if [[ -z "$TAG" ]]; then 15 | >&2 echo "\$TAG must be set to the git tag of the release" 16 | exit 1 17 | fi 18 | 19 | if [[ -z "$CRATES_IO_TOKEN" ]]; then 20 | >&2 echo "\$CRATES_IO_TOKEN must be set to the crates.io authentication token" 21 | exit 1 22 | fi 23 | 24 | git fetch origin $TAG 25 | git checkout $TAG 26 | 27 | . ~/.cargo/env 28 | 29 | cargo publish --token $CRATES_IO_TOKEN 30 | -------------------------------------------------------------------------------- /.evergreen/releases.yml: -------------------------------------------------------------------------------- 1 | exec_timeout_secs: 3600 2 | 3 | functions: 4 | "fetch source": 5 | - command: git.get_project 6 | type: system 7 | params: 8 | directory: "src" 9 | 10 | "install dependencies": 11 | command: shell.exec 12 | params: 13 | working_dir: "src" 14 | script: | 15 | ${PREPARE_SHELL} 16 | .evergreen/install-dependencies.sh 17 | 18 | "publish release": 19 | - command: shell.exec 20 | type: test 21 | params: 22 | working_dir: "src" 23 | include_expansions_in_env: 24 | - CRATES_IO_TOKEN 25 | script: | 26 | set +x 27 | 28 | TAG=${GIT_TAG} \ 29 | bash .evergreen/release-danger-do-not-run-manually.sh 30 | 31 | tasks: 32 | - name: "publish-release" 33 | commands: 34 | - func: "fetch source" 35 | - func: "install dependencies" 36 | - func: "publish release" 37 | vars: 38 | GIT_TAG: ${triggered_by_git_tag} 39 | 40 | axes: 41 | - id: "os" 42 | display_name: OS 43 | values: 44 | - id: ubuntu-16.04 45 | display_name: "Ubuntu 16.04" 46 | run_on: ubuntu1604-test 47 | 48 | buildvariants: 49 | - 50 | matrix_name: "release" 51 | matrix_spec: 52 | os: 53 | - ubuntu-16.04 54 | display_name: "Publish driver release" 55 | tasks: 56 | - "publish-release" 57 | -------------------------------------------------------------------------------- /.evergreen/run-fuzzer.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -o errexit 4 | 5 | . ~/.cargo/env 6 | 7 | cd fuzz 8 | 9 | # Create directories for crashes and corpus 10 | mkdir -p artifacts 11 | mkdir -p corpus 12 | 13 | # Generate initial corpus if directory is empty 14 | if [ -z "$(ls -A corpus)" ]; then 15 | echo "Generating initial corpus..." 16 | cargo run --bin generate_corpus 17 | fi 18 | 19 | # Function to run fuzzer and collect crashes 20 | run_fuzzer() { 21 | target=$1 22 | echo "Running fuzzer for $target" 23 | # Run fuzzer and redirect crashes to artifacts directory 24 | RUST_BACKTRACE=1 cargo +nightly fuzz run $target -- \ 25 | -rss_limit_mb=4096 \ 26 | -max_total_time=60 \ 27 | -artifact_prefix=artifacts/ \ 28 | -print_final_stats=1 \ 29 | corpus/ 30 | } 31 | 32 | # Run existing targets 33 | run_fuzzer "decode" 34 | run_fuzzer "raw_deserialize" 35 | run_fuzzer "raw_deserialize_utf8_lossy" 36 | run_fuzzer "iterate" 37 | 38 | # Run new security-focused targets 39 | run_fuzzer "type_markers" 40 | run_fuzzer "string_handling" 41 | run_fuzzer "encoding" 42 | -------------------------------------------------------------------------------- /.evergreen/run-tests.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -o errexit 4 | 5 | . ~/.cargo/env 6 | 7 | # Test with default features and excluding doctests (some of which require the 'serde' feature) 8 | RUST_BACKTRACE=1 cargo test --all-targets 9 | # Test with all features and including doctests 10 | RUST_BACKTRACE=1 cargo test --all-features 11 | 12 | cd serde-tests 13 | RUST_BACKTRACE=1 cargo test 14 | -------------------------------------------------------------------------------- /.evergreen/run-wasm-tests.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -o errexit 4 | 5 | . ~/.cargo/env 6 | 7 | curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh 8 | 9 | # install nvm 10 | curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.40.1/install.sh | bash 11 | . ~/.nvm/nvm.sh 12 | # install node 13 | nvm install --no-progress node 14 | node --version 15 | 16 | cd $(dirname $0)/../wasm-test 17 | wasm-pack test --node -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Listing code owners is required by DRIVERS-3098 2 | * @mongodb/dbx-rust 3 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | 14 | 15 | ## Versions/Environment 16 | 1. What version of Rust are you using? 17 | 2. What operating system are you using? 18 | 3. What versions of the driver and its dependencies are you using? (Run 19 | `cargo pkgid mongodb` & `cargo pkgid bson`) 20 | 4. What version of MongoDB are you using? (Check with the MongoDB shell using `db.version()`) 21 | 5. What is your MongoDB topology (standalone, replica set, sharded cluster, serverless)? 22 | 23 | 24 | 25 | ## Describe the bug 26 | A clear and concise description of what the bug is. 27 | 28 | **BE SPECIFIC**: 29 | * What is the _expected_ behavior and what is _actually_ happening? 30 | * Do you have any particular output that demonstrates this problem? 31 | * Do you have any ideas on _why_ this may be happening that could give us a 32 | clue in the right direction? 33 | * Did this issue arise out of nowhere, or after an update (of the driver, 34 | server, and/or Rust)? 35 | * Are there multiple ways of triggering this bug (perhaps more than one 36 | function produce a crash)? 37 | * If you know how to reproduce this bug, please include a code snippet here: 38 | ``` 39 | 40 | ``` 41 | 42 | 43 | **To Reproduce** 44 | Steps to reproduce the behavior: 45 | 1. First, do this. 46 | 2. Then do this. 47 | 3. After doing that, do this. 48 | 4. And then, finally, do this. 49 | 5. Bug occurs. 50 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: cargo 4 | directory: / 5 | schedule: 6 | interval: weekly 7 | # Only bump to the latest version compatible with the dependency's version 8 | # in Cargo.toml. This is the equivalent of running `cargo update`. 9 | versioning-strategy: lockfile-only 10 | # Update all dependencies in a single PR. 11 | groups: 12 | rust-dependencies: 13 | patterns: 14 | - "*" 15 | # Include transitive dependencies. 16 | allow: 17 | - dependency-type: all 18 | -------------------------------------------------------------------------------- /.github/workflows/close_stale_issues.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | name: 'Close stale issues' 4 | on: 5 | schedule: 6 | - cron: '30 1 * * *' 7 | permissions: 8 | issues: write 9 | jobs: 10 | stale: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/stale@v4 14 | with: 15 | stale-issue-message: 'There has not been any recent activity on this ticket, so we are marking it as stale. If we do not hear anything further from you, this issue will be automatically closed in one week.' 16 | days-before-issue-stale: 7 17 | days-before-pr-stale: -1 18 | days-before-close: 7 19 | close-issue-message: 'There has not been any recent activity on this ticket, so we are closing it. Thanks for reaching out and please feel free to file a new issue if you have further questions.' 20 | only-issue-labels: 'waiting-for-reporter' 21 | -------------------------------------------------------------------------------- /.github/workflows/issue_assignment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | name: Issue assignment 4 | on: 5 | issues: 6 | types: [opened] 7 | jobs: 8 | auto-assign: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - name: 'Auto-assign issue' 12 | uses: pozil/auto-assign-issue@v1.1.0 13 | with: 14 | assignees: abr-egn,isabelatkinson 15 | numOfAssignee: 1 16 | add-labels: 17 | runs-on: ubuntu-latest 18 | steps: 19 | - name: initial labeling 20 | uses: andymckay/labeler@master 21 | with: 22 | add-labels: "triage" 23 | -------------------------------------------------------------------------------- /.github/workflows/remove_labels.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | name: Remove Labels 4 | on: 5 | issue_comment: 6 | types: [created, edited] 7 | jobs: 8 | remove-labels: 9 | if: ${{ github.actor != 'Tom Selander' && github.actor != 'patrickfreed' 10 | && github.actor != 'abr-egn' && github.actor != 'isabelatkinson'}} 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: initial labeling 14 | uses: andymckay/labeler@master 15 | with: 16 | remove-labels: "waiting-for-reporter, Stale" 17 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | .vscode 3 | .idea 4 | *~ 5 | *.swp 6 | **/.DS_Store 7 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "bson" 3 | version = "3.0.0" 4 | authors = [ 5 | "Y. T. Chung ", 6 | "Kevin Yeh ", 7 | "Saghm Rossi ", 8 | "Patrick Freed ", 9 | "Isabel Atkinson ", 10 | "Abraham Egnor ", 11 | ] 12 | description = "Encoding and decoding support for BSON in Rust" 13 | license = "MIT" 14 | readme = "README.md" 15 | repository = "https://github.com/mongodb/bson-rust" 16 | edition = "2021" 17 | keywords = ["bson", "mongodb", "serde", "serialization", "deserialization"] 18 | categories = ["encoding"] 19 | rust-version = "1.81" 20 | 21 | # By default cargo include everything git include 22 | # cargo diet can help to manage what's not useful. 23 | exclude = [ 24 | "etc/**", 25 | "examples/**", 26 | "fuzz/**", 27 | "serde-tests/**", 28 | "src/tests/**", 29 | "rustfmt.toml", 30 | ".travis.yml", 31 | ".evergreen/**", 32 | ".gitignore" 33 | ] 34 | 35 | [features] 36 | default = ["compat-3-0-0"] 37 | compat-3-0-0 = [] 38 | # if enabled, include API for interfacing with chrono 0.4 39 | chrono-0_4 = ["dep:chrono"] 40 | # enable the large-dates feature for the time crate 41 | large_dates = ["time/large-dates"] 42 | # if enabled, include API for interfacing with uuid 1.x 43 | uuid-1 = [] 44 | # if enabled, include API for interfacing with time 0.3 45 | time-0_3 = [] 46 | serde_path_to_error = ["dep:serde_path_to_error"] 47 | # if enabled, include serde_with interop. 48 | # should be used in conjunction with chrono-0_4 or uuid-0_8. 49 | serde_with-3 = ["dep:serde_with", "dep:serde"] 50 | serde = ["dep:serde"] 51 | serde_json-1 = ["dep:serde_json"] 52 | 53 | [lib] 54 | name = "bson" 55 | 56 | [dependencies] 57 | ahash = "0.8.0" 58 | chrono = { version = "0.4.15", features = ["std"], default-features = false, optional = true } 59 | rand = "0.9" 60 | serde = { version = "1.0", features = ["derive"], optional = true } 61 | serde_json = { version = "1.0", features = ["preserve_order"], optional = true } 62 | indexmap = "2.1.0" 63 | hex = "0.4.2" 64 | base64 = "0.22.1" 65 | once_cell = "1.5.1" 66 | uuid = { version = "1.1.2", features = ["serde", "v4"] } 67 | serde_bytes = "0.11.5" 68 | serde_with = { version = "3.1.0", optional = true } 69 | time = { version = "0.3.9", features = ["formatting", "parsing", "macros"] } 70 | thiserror = "2" 71 | bitvec = "1.0.1" 72 | serde_path_to_error = { version = "0.1.16", optional = true } 73 | simdutf8 = "0.1.5" 74 | 75 | [target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dependencies] 76 | js-sys = "0.3" 77 | uuid = { version = "1.1.2", features = ["serde", "v4", "js"] } 78 | getrandom = { version = "0.2", features = ["js"] } 79 | getrandom_03 = { package = "getrandom", version = "0.3", features = ["wasm_js"] } 80 | 81 | [dev-dependencies] 82 | assert_matches = "1.2" 83 | criterion = "0.3.0" 84 | pretty_assertions = "0.6.1" 85 | proptest = "1.0.0" 86 | serde_bytes = "0.11" 87 | serde_path_to_error = "0.1.16" 88 | serde_json = "1" 89 | chrono = { version = "0.4", features = ["serde", "clock", "std"], default-features = false } 90 | 91 | [package.metadata.docs.rs] 92 | all-features = true 93 | rustdoc-args = ["--cfg", "docsrs"] 94 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2015 Y. T. CHUNG 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of 6 | this software and associated documentation files (the "Software"), to deal in 7 | the Software without restriction, including without limitation the rights to 8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software is furnished to do so, 10 | subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 17 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 18 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 19 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 20 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | 22 | -------------------------------------------------------------------------------- /clippy.toml: -------------------------------------------------------------------------------- 1 | msrv = "1.81" -------------------------------------------------------------------------------- /etc/update-spec-tests.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # This script is used to fetch the latest JSON tests for the CRUD spec. It puts the tests in the 4 | # direcory $reporoot/data/crud. It should be run from the root of the repository. 5 | 6 | set -o errexit 7 | set -o nounset 8 | 9 | if [ ! -d ".git" ]; then 10 | echo "$0: This script must be run from the root of the repository" >&2 11 | exit 1 12 | fi 13 | 14 | if [ $# -ne 1 ]; then 15 | echo "$0: This script must be passed exactly one argument for which tests to sync" >&2 16 | exit 1 17 | fi 18 | 19 | tmpdir=`perl -MFile::Temp=tempdir -wle 'print tempdir(TMPDIR => 1, CLEANUP => 0)'` 20 | curl -sL https://github.com/mongodb/specifications/archive/master.zip -o "$tmpdir/specs.zip" 21 | unzip -d "$tmpdir" "$tmpdir/specs.zip" > /dev/null 22 | mkdir -p "src/tests/spec/json/$1" 23 | rsync -ah "$tmpdir/specifications-master/source/$1/tests/" "src/tests/spec/json/$1" --delete 24 | rm -rf "$tmpdir" 25 | -------------------------------------------------------------------------------- /fuzz/.gitignore: -------------------------------------------------------------------------------- 1 | 2 | target 3 | corpus 4 | artifacts 5 | Cargo.lock 6 | -------------------------------------------------------------------------------- /fuzz/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "bson-fuzz" 3 | version = "0.0.1" 4 | authors = ["Automatically generated"] 5 | publish = false 6 | edition = "2021" 7 | 8 | [package.metadata] 9 | cargo-fuzz = true 10 | 11 | [dependencies.bson] 12 | path = ".." 13 | features = ["serde"] 14 | 15 | [dependencies.libfuzzer-sys] 16 | version = "0.4.0" 17 | 18 | [dependencies.serde] 19 | version = "1.0" 20 | 21 | [dependencies.serde_json] 22 | version = "1.0" 23 | 24 | [workspace] 25 | members = ["."] 26 | 27 | [[bin]] 28 | name = "decode" 29 | path = "fuzz_targets/decode.rs" 30 | 31 | [[bin]] 32 | name = "iterate" 33 | path = "fuzz_targets/iterate.rs" 34 | 35 | [[bin]] 36 | name = "raw_deserialize" 37 | path = "fuzz_targets/raw_deserialize.rs" 38 | 39 | [[bin]] 40 | name = "raw_deserialize_utf8_lossy" 41 | path = "fuzz_targets/raw_deserialize_utf8_lossy.rs" 42 | 43 | [[bin]] 44 | name = "type_markers" 45 | path = "fuzz_targets/type_markers.rs" 46 | 47 | [[bin]] 48 | name = "string_handling" 49 | path = "fuzz_targets/string_handling.rs" 50 | 51 | [[bin]] 52 | name = "encoding" 53 | path = "fuzz_targets/encoding.rs" 54 | 55 | [[bin]] 56 | name = "generate_corpus" 57 | path = "generate_corpus.rs" 58 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/decode.rs: -------------------------------------------------------------------------------- 1 | #![no_main] 2 | #[macro_use] 3 | extern crate libfuzzer_sys; 4 | extern crate bson; 5 | 6 | use bson::Document; 7 | use std::io::Cursor; 8 | 9 | fuzz_target!(|buf: &[u8]| { 10 | if let Ok(doc) = Document::decode_from_reader(&mut Cursor::new(&buf[..])) { 11 | let mut vec = Vec::with_capacity(buf.len()); 12 | let _ = doc.encode_to_writer(&mut vec); 13 | } 14 | }); 15 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/encoding.rs: -------------------------------------------------------------------------------- 1 | #![no_main] 2 | use bson::{ 3 | raw::{RawDocument, RawDocumentBuf}, 4 | Bson, 5 | Document, 6 | }; 7 | use libfuzzer_sys::fuzz_target; 8 | 9 | fn compare_docs(doc1: &Document, doc2: &Document) -> bool { 10 | if doc1.len() != doc2.len() { 11 | return false; 12 | } 13 | for (key, value) in doc1 { 14 | if let Some(val2) = doc2.get(key) { 15 | if !compare_values(value, val2) { 16 | return false; 17 | } 18 | } else { 19 | return false; 20 | } 21 | } 22 | true 23 | } 24 | 25 | fn compare_values(val1: &Bson, val2: &Bson) -> bool { 26 | match (val1, val2) { 27 | (Bson::Double(d1), Bson::Double(d2)) => (d1.is_nan() && d2.is_nan()) || d1 == d2, 28 | (Bson::Document(doc1), Bson::Document(doc2)) => compare_docs(doc1, doc2), 29 | (Bson::Array(arr1), Bson::Array(arr2)) => { 30 | if arr1.len() != arr2.len() { 31 | return false; 32 | } 33 | for (subval1, subval2) in std::iter::zip(arr1, arr2) { 34 | if !compare_values(subval1, subval2) { 35 | return false; 36 | } 37 | } 38 | true 39 | } 40 | (Bson::JavaScriptCodeWithScope(jsc1), Bson::JavaScriptCodeWithScope(jsc2)) => { 41 | jsc1.code == jsc2.code && compare_docs(&jsc1.scope, &jsc2.scope) 42 | } 43 | (v1, v2) => v1 == v2, 44 | } 45 | } 46 | 47 | fuzz_target!(|input: &[u8]| { 48 | if let Ok(rawdoc) = RawDocument::decode_from_bytes(&input) { 49 | if let Ok(doc) = Document::try_from(rawdoc) { 50 | let out = RawDocumentBuf::try_from(&doc).unwrap(); 51 | let out_bytes = out.as_bytes(); 52 | if input != out_bytes { 53 | let reencoded = RawDocument::decode_from_bytes(&out_bytes).unwrap(); 54 | let reencoded_doc = Document::try_from(reencoded).unwrap(); 55 | // Ensure that the re-encoded document is the same as the original document, the 56 | // bytes can differ while still resulting in the same Document. 57 | if !compare_docs(&doc, &reencoded_doc) { 58 | panic!( 59 | "Reencoded document is not the same as the original document: {:?} != {:?}", 60 | doc, reencoded_doc 61 | ); 62 | } 63 | } 64 | } 65 | } 66 | }); 67 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/iterate.rs: -------------------------------------------------------------------------------- 1 | #![no_main] 2 | #[macro_use] 3 | extern crate libfuzzer_sys; 4 | extern crate bson; 5 | use bson::RawDocument; 6 | 7 | fuzz_target!(|buf: &[u8]| { 8 | if let Ok(doc) = RawDocument::decode_from_bytes(buf) { 9 | for _ in doc {} 10 | } 11 | }); 12 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/raw_deserialize.rs: -------------------------------------------------------------------------------- 1 | #![no_main] 2 | #[macro_use] 3 | extern crate libfuzzer_sys; 4 | extern crate bson; 5 | use bson::Document; 6 | 7 | fuzz_target!(|buf: &[u8]| { 8 | if let Ok(doc) = bson::deserialize_from_slice::(buf) { 9 | let _ = bson::serialize_to_vec(&doc); 10 | } 11 | }); 12 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/raw_deserialize_utf8_lossy.rs: -------------------------------------------------------------------------------- 1 | #![no_main] 2 | #[macro_use] 3 | extern crate libfuzzer_sys; 4 | extern crate bson; 5 | use bson::{serde_helpers::Utf8LossyDeserialization, Document}; 6 | 7 | fuzz_target!(|buf: &[u8]| { 8 | if let Ok(doc) = bson::deserialize_from_slice::>(buf) { 9 | let _ = bson::serialize_to_vec(&doc.0); 10 | } 11 | }); 12 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/string_handling.rs: -------------------------------------------------------------------------------- 1 | #![no_main] 2 | #[macro_use] 3 | extern crate libfuzzer_sys; 4 | extern crate bson; 5 | use bson::{RawBsonRef, RawDocument}; 6 | use std::convert::TryInto; 7 | 8 | fuzz_target!(|buf: &[u8]| { 9 | if let Ok(doc) = RawDocument::decode_from_bytes(buf) { 10 | for elem in doc.iter_elements().flatten() { 11 | // Convert to RawBsonRef and check string-related types 12 | if let Ok(bson) = elem.try_into() { 13 | match bson { 14 | RawBsonRef::String(s) => { 15 | let _ = s.len(); 16 | let _ = s.chars().count(); 17 | } 18 | _ => {} 19 | } 20 | } 21 | } 22 | } 23 | }); 24 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/type_markers.rs: -------------------------------------------------------------------------------- 1 | #![no_main] 2 | #[macro_use] 3 | extern crate libfuzzer_sys; 4 | extern crate bson; 5 | use bson::{RawBsonRef, RawDocument}; 6 | use std::convert::TryInto; 7 | 8 | fuzz_target!(|buf: &[u8]| { 9 | if let Ok(doc) = RawDocument::decode_from_bytes(buf) { 10 | for elem in doc.iter_elements().flatten() { 11 | let _: Result = elem.try_into(); 12 | } 13 | } 14 | }); 15 | -------------------------------------------------------------------------------- /fuzz/generate_corpus.rs: -------------------------------------------------------------------------------- 1 | use bson::{cstr, doc, Bson, Decimal128}; 2 | use std::{ 3 | fs, 4 | io::{Error, ErrorKind}, 5 | path::Path, 6 | str::FromStr, 7 | }; 8 | 9 | fn main() -> std::io::Result<()> { 10 | let corpus_dir = Path::new("fuzz/corpus"); 11 | fs::create_dir_all(corpus_dir)?; 12 | 13 | // Generate edge cases for each fuzz target 14 | generate_length_edge_cases(corpus_dir)?; 15 | generate_type_marker_cases(corpus_dir)?; 16 | generate_string_edge_cases(corpus_dir)?; 17 | generate_serialization_cases(corpus_dir)?; 18 | Ok(()) 19 | } 20 | 21 | fn generate_length_edge_cases(dir: &Path) -> std::io::Result<()> { 22 | let target_dir = dir.join("malformed_length"); 23 | fs::create_dir_all(&target_dir)?; 24 | 25 | // Invalid length 26 | fs::write(target_dir.join("invalid_len"), vec![4, 5])?; 27 | 28 | // Minimal valid document 29 | let min_doc = doc! {}; 30 | fs::write( 31 | target_dir.join("min_doc"), 32 | min_doc 33 | .encode_to_vec() 34 | .map_err(|e| Error::new(ErrorKind::Other, e.to_string()))?, 35 | )?; 36 | 37 | // Document with length near i32::MAX 38 | let large_doc = doc! { "a": "b".repeat(i32::MAX as usize / 2) }; 39 | fs::write( 40 | target_dir.join("large_doc"), 41 | large_doc 42 | .encode_to_vec() 43 | .map_err(|e| Error::new(ErrorKind::Other, e.to_string()))?, 44 | )?; 45 | 46 | Ok(()) 47 | } 48 | 49 | fn generate_type_marker_cases(dir: &Path) -> std::io::Result<()> { 50 | let target_dir = dir.join("type_markers"); 51 | fs::create_dir_all(&target_dir)?; 52 | 53 | // Document with all BSON types 54 | let all_types = doc! { 55 | "double": 1.0f64, 56 | "double_nan": f64::NAN, 57 | "double_infinity": f64::INFINITY, 58 | "double_neg_infinity": f64::NEG_INFINITY, 59 | "string": "test", 60 | "document": doc! {}, 61 | "array": vec![1, 2, 3], 62 | "binary": Bson::Binary(bson::Binary { subtype: bson::spec::BinarySubtype::Generic, bytes: vec![1, 2, 3] }), 63 | "object_id": bson::oid::ObjectId::new(), 64 | "bool": true, 65 | "date": bson::DateTime::now(), 66 | "null": Bson::Null, 67 | "regex": Bson::RegularExpression(bson::Regex { pattern: cstr!("pattern").into(), options: cstr!("i").into() }), 68 | "int32": 123i32, 69 | "timestamp": bson::Timestamp { time: 12345, increment: 1 }, 70 | "int64": 123i64, 71 | "decimal128_nan": Decimal128::from_str("NaN").unwrap(), 72 | "decimal128_infinity": Decimal128::from_str("Infinity").unwrap(), 73 | "decimal128_neg_infinity": Decimal128::from_str("-Infinity").unwrap(), 74 | "min_key": Bson::MinKey, 75 | "max_key": Bson::MaxKey, 76 | "undefined": Bson::Undefined 77 | }; 78 | fs::write( 79 | target_dir.join("all_types"), 80 | all_types 81 | .encode_to_vec() 82 | .map_err(|e| Error::new(ErrorKind::Other, e.to_string()))?, 83 | )?; 84 | 85 | Ok(()) 86 | } 87 | 88 | fn generate_string_edge_cases(dir: &Path) -> std::io::Result<()> { 89 | let target_dir = dir.join("string_handling"); 90 | fs::create_dir_all(&target_dir)?; 91 | 92 | // UTF-8 edge cases 93 | let utf8_cases = doc! { 94 | "empty": "", 95 | "null_bytes": "hello\0world", 96 | "unicode": "🦀💻🔒", 97 | "high_surrogate": "\u{10000}", 98 | "invalid_continuation": Bson::Binary(bson::Binary { 99 | subtype: bson::spec::BinarySubtype::Generic, 100 | bytes: vec![0x80u8, 0x80u8, 0x80u8] 101 | }), 102 | "overlong": Bson::Binary(bson::Binary { 103 | subtype: bson::spec::BinarySubtype::Generic, 104 | bytes: vec![0xC0u8, 0x80u8] 105 | }) 106 | }; 107 | fs::write( 108 | target_dir.join("utf8_cases"), 109 | utf8_cases 110 | .encode_to_vec() 111 | .map_err(|e| Error::new(ErrorKind::Other, e.to_string()))?, 112 | )?; 113 | 114 | Ok(()) 115 | } 116 | 117 | fn generate_serialization_cases(dir: &Path) -> std::io::Result<()> { 118 | let target_dir = dir.join("serialization"); 119 | fs::create_dir_all(&target_dir)?; 120 | 121 | // Deeply nested document 122 | let mut nested_doc = doc! {}; 123 | let mut current = &mut nested_doc; 124 | for i in 0..100 { 125 | let next_doc = doc! {}; 126 | current.insert(i.to_string(), next_doc); 127 | current = current 128 | .get_mut(&i.to_string()) 129 | .unwrap() 130 | .as_document_mut() 131 | .unwrap(); 132 | } 133 | fs::write( 134 | target_dir.join("nested_doc"), 135 | nested_doc 136 | .encode_to_vec() 137 | .map_err(|e| Error::new(ErrorKind::Other, e.to_string()))?, 138 | )?; 139 | 140 | // Document with large binary data 141 | let large_binary = doc! { 142 | "binary": Bson::Binary(bson::Binary { 143 | subtype: bson::spec::BinarySubtype::Generic, 144 | bytes: vec![0xFF; 1024 * 1024] // 1MB of data 145 | }) 146 | }; 147 | fs::write( 148 | target_dir.join("large_binary"), 149 | large_binary 150 | .encode_to_vec() 151 | .map_err(|e| Error::new(ErrorKind::Other, e.to_string()))?, 152 | )?; 153 | 154 | Ok(()) 155 | } 156 | -------------------------------------------------------------------------------- /rustfmt.toml: -------------------------------------------------------------------------------- 1 | combine_control_expr = false 2 | comment_width = 100 3 | condense_wildcard_suffixes = true 4 | format_strings = true 5 | use_try_shorthand = true 6 | wrap_comments = true 7 | imports_layout = "HorizontalVertical" 8 | imports_granularity = "Crate" 9 | ignore = ["src/lib.rs"] -------------------------------------------------------------------------------- /serde-tests/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "serde-tests" 3 | version = "0.1.0" 4 | authors = ["Kevin Yeh "] 5 | edition = "2018" 6 | 7 | [features] 8 | default = [] 9 | 10 | [dependencies] 11 | bson = { path = "..", features = ["uuid-1", "chrono-0_4", "serde", "serde_with-3", "serde_json-1"] } 12 | serde = { version = "1.0", features = ["derive"] } 13 | pretty_assertions = "0.6.1" 14 | hex = "0.4.2" 15 | serde_with = "3" 16 | chrono = "0.4" 17 | uuid = "1" 18 | 19 | [dev-dependencies] 20 | serde_json = "1" 21 | rmp-serde = "0.15" 22 | base64 = "0.13.0" 23 | 24 | [lib] 25 | name = "serde_tests" 26 | path = "lib.rs" 27 | 28 | [[test]] 29 | name = "serde" 30 | path = "test.rs" 31 | -------------------------------------------------------------------------------- /serde-tests/json.rs: -------------------------------------------------------------------------------- 1 | use pretty_assertions::assert_eq; 2 | use serde_json::json; 3 | 4 | use super::AllTypes; 5 | 6 | use bson::{cstr, doc, Bson, JavaScriptCodeWithScope, RawArrayBuf, RawBson, RawDocumentBuf}; 7 | 8 | use serde::{Deserialize, Serialize}; 9 | 10 | #[test] 11 | fn all_types_json() { 12 | let (mut v, _) = AllTypes::fixtures(); 13 | 14 | let code = match v.code { 15 | Bson::JavaScriptCode(ref c) => c.clone(), 16 | c => panic!("expected code, found {:?}", c), 17 | }; 18 | 19 | let code_w_scope = JavaScriptCodeWithScope { 20 | code: "hello world".to_string(), 21 | scope: doc! { "x": 1 }, 22 | }; 23 | let scope_json = serde_json::json!({ "x": 1 }); 24 | v.code_w_scope = code_w_scope.clone(); 25 | 26 | let json = serde_json::json!({ 27 | "x": 1, 28 | "y": 2, 29 | "s": "oke", 30 | "array": vec![ 31 | serde_json::json!(true), 32 | serde_json::json!("oke".to_string()), 33 | serde_json::json!({ "12": 24 }), 34 | ], 35 | "bson": 1234.5, 36 | "oid": { "$oid": v.oid.to_hex() }, 37 | "null": serde_json::Value::Null, 38 | "subdoc": { "k": true, "b": { "hello": "world" } }, 39 | "b": true, 40 | "d": 12.5, 41 | "binary": v.binary.bytes, 42 | "binary_old": { "$binary": { "base64": base64::encode(&v.binary_old.bytes), "subType": "02" } }, 43 | "binary_other": { "$binary": { "base64": base64::encode(&v.binary_old.bytes), "subType": "81" } }, 44 | "date": { "$date": { "$numberLong": v.date.timestamp_millis().to_string() } }, 45 | "regex": { "$regularExpression": { "pattern": v.regex.pattern, "options": v.regex.options } }, 46 | "ts": { "$timestamp": { "t": 123, "i": 456 } }, 47 | "i": { "a": v.i.a, "b": v.i.b }, 48 | "undefined": { "$undefined": true }, 49 | "code": { "$code": code }, 50 | "code_w_scope": { "$code": code_w_scope.code, "$scope": scope_json }, 51 | "decimal": { "$numberDecimal": v.decimal.to_string() }, 52 | "symbol": { "$symbol": "ok" }, 53 | "min_key": { "$minKey": 1 }, 54 | "max_key": { "$maxKey": 1 }, 55 | }); 56 | 57 | assert_eq!(serde_json::to_value(&v).unwrap(), json); 58 | } 59 | 60 | #[test] 61 | fn owned_raw_bson() { 62 | #[derive(Serialize, Deserialize, Debug, PartialEq)] 63 | struct Foo { 64 | doc_buf: RawDocumentBuf, 65 | array_buf: RawArrayBuf, 66 | bson_array: RawBson, 67 | bson_doc: RawBson, 68 | bson_integer: RawBson, 69 | bson_string: RawBson, 70 | bson_bool: RawBson, 71 | bson_null: RawBson, 72 | bson_float: RawBson, 73 | } 74 | 75 | let json = json!({ 76 | "doc_buf": { 77 | "a": "key", 78 | "number": 12, 79 | "bool": false, 80 | "nu": null 81 | }, 82 | "array_buf": [ 83 | json!(1), 84 | json!("string"), 85 | ], 86 | "bson_array": [ 87 | json!(1), 88 | json!("string"), 89 | ], 90 | "bson_doc": { 91 | "first": true, 92 | "second": "string", 93 | }, 94 | "bson_integer": 12, 95 | "bson_string": "String", 96 | "bson_bool": true, 97 | "bson_null": null, 98 | "bson_float": 123.4 99 | }); 100 | 101 | let mut doc_buf = RawDocumentBuf::new(); 102 | doc_buf.append(cstr!("a"), "key"); 103 | doc_buf.append(cstr!("number"), 12); 104 | doc_buf.append(cstr!("bool"), false); 105 | doc_buf.append(cstr!("nu"), RawBson::Null); 106 | 107 | let mut array_buf = RawArrayBuf::new(); 108 | array_buf.push(1); 109 | array_buf.push("string"); 110 | 111 | let mut bson_doc = RawDocumentBuf::new(); 112 | bson_doc.append(cstr!("first"), true); 113 | bson_doc.append(cstr!("second"), "string"); 114 | 115 | let expected = Foo { 116 | doc_buf, 117 | array_buf: array_buf.clone(), 118 | bson_array: RawBson::Array(array_buf), 119 | bson_doc: RawBson::Document(bson_doc), 120 | bson_integer: RawBson::Int32(12), 121 | bson_string: RawBson::String("String".to_string()), 122 | bson_bool: RawBson::Boolean(true), 123 | bson_null: RawBson::Null, 124 | bson_float: RawBson::Double(123.4), 125 | }; 126 | 127 | let f: Foo = serde_json::from_value(json.clone()).unwrap(); 128 | assert_eq!(f, expected); 129 | 130 | let round_trip = serde_json::to_value(&f).unwrap(); 131 | assert_eq!(round_trip, json); 132 | } 133 | -------------------------------------------------------------------------------- /serde-tests/lib.rs: -------------------------------------------------------------------------------- 1 | // intentionally blank 2 | -------------------------------------------------------------------------------- /serde-tests/rustfmt.toml: -------------------------------------------------------------------------------- 1 | combine_control_expr = false 2 | comment_width = 100 3 | condense_wildcard_suffixes = true 4 | format_strings = true 5 | normalize_comments = true 6 | use_try_shorthand = true 7 | wrap_comments = true 8 | imports_layout = "HorizontalVertical" 9 | imports_granularity = "Crate" 10 | -------------------------------------------------------------------------------- /src/base64.rs: -------------------------------------------------------------------------------- 1 | use base64::{engine::general_purpose::STANDARD, DecodeError, Engine}; 2 | 3 | pub fn decode>(input: T) -> Result, DecodeError> { 4 | STANDARD.decode(input) 5 | } 6 | 7 | pub fn encode>(input: T) -> String { 8 | STANDARD.encode(input) 9 | } 10 | -------------------------------------------------------------------------------- /src/binary.rs: -------------------------------------------------------------------------------- 1 | #! Module containing functionality related to BSON binary values. 2 | mod vector; 3 | 4 | use std::fmt::{self, Display}; 5 | 6 | use crate::{ 7 | base64, 8 | error::{Error, Result}, 9 | spec::BinarySubtype, 10 | RawBinaryRef, 11 | }; 12 | 13 | pub use vector::{PackedBitVector, Vector}; 14 | 15 | /// Represents a BSON binary value. 16 | #[derive(Debug, Clone, Eq, PartialEq, Hash)] 17 | pub struct Binary { 18 | /// The subtype of the bytes. 19 | pub subtype: BinarySubtype, 20 | 21 | /// The binary bytes. 22 | pub bytes: Vec, 23 | } 24 | 25 | impl Display for Binary { 26 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { 27 | write!( 28 | fmt, 29 | "Binary({:#x}, {})", 30 | u8::from(self.subtype), 31 | base64::encode(&self.bytes) 32 | ) 33 | } 34 | } 35 | 36 | impl Binary { 37 | /// Creates a [`Binary`] from a base64 string and optional [`BinarySubtype`]. If the 38 | /// `subtype` argument is [`None`], the [`Binary`] constructed will default to 39 | /// [`BinarySubtype::Generic`]. 40 | /// 41 | /// ```rust 42 | /// # use bson::{Binary, error::Result}; 43 | /// # fn example() -> Result<()> { 44 | /// let input = base64::encode("hello"); 45 | /// let binary = Binary::from_base64(input, None)?; 46 | /// println!("{:?}", binary); 47 | /// // binary: Binary { subtype: Generic, bytes: [104, 101, 108, 108, 111] } 48 | /// # Ok(()) 49 | /// # } 50 | /// ``` 51 | pub fn from_base64( 52 | input: impl AsRef, 53 | subtype: impl Into>, 54 | ) -> Result { 55 | let bytes = base64::decode(input.as_ref()).map_err(Error::binary)?; 56 | let subtype = match subtype.into() { 57 | Some(s) => s, 58 | None => BinarySubtype::Generic, 59 | }; 60 | Ok(Binary { subtype, bytes }) 61 | } 62 | 63 | #[cfg(feature = "serde")] 64 | pub(crate) fn from_extended_doc(doc: &crate::Document) -> Option { 65 | use std::convert::TryFrom; 66 | 67 | let binary_doc = doc.get_document("$binary").ok()?; 68 | 69 | if let Ok(bytes) = binary_doc.get_str("base64") { 70 | let bytes = base64::decode(bytes).ok()?; 71 | let subtype = binary_doc.get_str("subType").ok()?; 72 | let subtype = hex::decode(subtype).ok()?; 73 | if subtype.len() == 1 { 74 | Some(Self { 75 | bytes, 76 | subtype: subtype[0].into(), 77 | }) 78 | } else { 79 | None 80 | } 81 | } else { 82 | // in non-human-readable mode, RawBinary will serialize as 83 | // { "$binary": { "bytes": , "subType": } }; 84 | let binary = binary_doc.get_binary_generic("bytes").ok()?; 85 | let subtype = binary_doc.get_i32("subType").ok()?; 86 | 87 | Some(Self { 88 | bytes: binary.clone(), 89 | subtype: u8::try_from(subtype).ok()?.into(), 90 | }) 91 | } 92 | } 93 | 94 | /// Borrow the contents as a [`RawBinaryRef`]. 95 | pub fn as_raw_binary(&self) -> RawBinaryRef<'_> { 96 | RawBinaryRef { 97 | bytes: self.bytes.as_slice(), 98 | subtype: self.subtype, 99 | } 100 | } 101 | } 102 | -------------------------------------------------------------------------------- /src/datetime/builder.rs: -------------------------------------------------------------------------------- 1 | use std::convert::TryFrom; 2 | 3 | use time::Date; 4 | 5 | use crate::{ 6 | datetime::DateTime, 7 | error::{Error, Result}, 8 | }; 9 | 10 | /// Builder for constructing a BSON [`DateTime`] 11 | pub struct DateTimeBuilder { 12 | pub(crate) year: Y, 13 | pub(crate) month: M, 14 | pub(crate) day: D, 15 | 16 | pub(crate) hour: Option, 17 | pub(crate) minute: Option, 18 | pub(crate) second: Option, 19 | pub(crate) millisecond: Option, 20 | } 21 | 22 | impl Default for DateTimeBuilder { 23 | fn default() -> Self { 24 | Self { 25 | year: NoYear, 26 | month: NoMonth, 27 | day: NoDay, 28 | hour: None, 29 | minute: None, 30 | second: None, 31 | millisecond: None, 32 | } 33 | } 34 | } 35 | 36 | pub struct Year(i32); 37 | pub struct NoYear; 38 | 39 | pub struct Month(u8); 40 | pub struct NoMonth; 41 | 42 | pub struct Day(u8); 43 | pub struct NoDay; 44 | 45 | impl DateTimeBuilder { 46 | /// Sets the year for the builder instance. Years between ±9999 inclusive are valid. 47 | /// If the specified value is out of range, calling the `build()` method will return 48 | /// an error. 49 | /// 50 | /// Note: This is a required method. You will not be able to call `build()` before calling 51 | /// this method. 52 | pub fn year(self, y: i32) -> DateTimeBuilder { 53 | let Self { 54 | year: _, 55 | month, 56 | day, 57 | hour, 58 | minute, 59 | second, 60 | millisecond, 61 | } = self; 62 | DateTimeBuilder { 63 | year: Year(y), 64 | month, 65 | day, 66 | hour, 67 | minute, 68 | second, 69 | millisecond, 70 | } 71 | } 72 | } 73 | 74 | impl DateTimeBuilder { 75 | /// Sets the month for the builder instance. Maps months as 1-January to 12-December. 76 | /// If the specified value is out of range, calling the `build()` method will return 77 | /// an error. 78 | /// 79 | /// Note: This is a required method. You will not be able to call `build()` before calling 80 | /// this method. 81 | pub fn month(self, m: u8) -> DateTimeBuilder { 82 | let Self { 83 | year, 84 | month: _, 85 | day, 86 | hour, 87 | minute, 88 | second, 89 | millisecond, 90 | } = self; 91 | DateTimeBuilder { 92 | year, 93 | month: Month(m), 94 | day, 95 | hour, 96 | minute, 97 | second, 98 | millisecond, 99 | } 100 | } 101 | } 102 | 103 | impl DateTimeBuilder { 104 | /// Sets the day for the builder instance. Values in the range `1..=31` are valid. 105 | /// If the specified value does not exist for the provided month/year or is out of range, 106 | /// calling the `build()` method will return an error. 107 | /// 108 | /// Note: This is a required method. You will not be able to call `build()` before calling 109 | /// this method. 110 | pub fn day(self, d: u8) -> DateTimeBuilder { 111 | let Self { 112 | year, 113 | month, 114 | day: _, 115 | hour, 116 | minute, 117 | second, 118 | millisecond, 119 | } = self; 120 | DateTimeBuilder { 121 | year, 122 | month, 123 | day: Day(d), 124 | hour, 125 | minute, 126 | second, 127 | millisecond, 128 | } 129 | } 130 | } 131 | 132 | impl DateTimeBuilder { 133 | /// Sets the hour (24-hour format) for the builder instance. Values must be in the range 134 | /// `0..=23`. If the specified value is out of range, calling the `build()` method will 135 | /// return an error. 136 | /// 137 | /// Note: This is an optional method. The hour will default to 0 if not explicitly set. 138 | pub fn hour(mut self, hour: u8) -> DateTimeBuilder { 139 | self.hour = Some(hour); 140 | self 141 | } 142 | 143 | /// Sets the minute for the builder instance. Values must be in the range `0..=59`. 144 | /// If the specified value is out of range, calling the `build()` method will return an error. 145 | /// 146 | /// Note: This is an optional method. The minute will default to 0 if not explicitly set. 147 | pub fn minute(mut self, minute: u8) -> DateTimeBuilder { 148 | self.minute = Some(minute); 149 | self 150 | } 151 | 152 | /// Sets the second for the builder instance. Values must be in range `0..=59`. 153 | /// If the specified value is out of range, calling the `build()` method will return an error. 154 | /// 155 | /// Note: This is an optional method. The second will default to 0 if not explicitly set. 156 | pub fn second(mut self, second: u8) -> DateTimeBuilder { 157 | self.second = Some(second); 158 | self 159 | } 160 | 161 | /// Sets the millisecond for the builder instance. Values must be in the range `0..=999`. 162 | /// If the specified value is out of range, calling the `build()` method will return an error. 163 | /// 164 | /// Note: This is an optional method. The millisecond will default to 0 if not explicitly set. 165 | pub fn millisecond(mut self, millisecond: u16) -> DateTimeBuilder { 166 | self.millisecond = Some(millisecond); 167 | self 168 | } 169 | } 170 | 171 | impl DateTimeBuilder { 172 | /// Convert a builder with a specified year, month, day, and optionally, an hour, minute, second 173 | /// and millisecond to a [`DateTime`]. 174 | /// 175 | /// Note: You cannot call `build()` before setting at least the year, month and day. 176 | pub fn build(self) -> Result { 177 | let month = time::Month::try_from(self.month.0).map_err(Error::datetime)?; 178 | let dt = Date::from_calendar_date(self.year.0, month, self.day.0) 179 | .map_err(Error::datetime)? 180 | .with_hms_milli( 181 | self.hour.unwrap_or(0), 182 | self.minute.unwrap_or(0), 183 | self.second.unwrap_or(0), 184 | self.millisecond.unwrap_or(0), 185 | ) 186 | .map_err(Error::datetime)?; 187 | Ok(DateTime::from_time_private(dt.assume_utc())) 188 | } 189 | } 190 | -------------------------------------------------------------------------------- /src/de.rs: -------------------------------------------------------------------------------- 1 | // The MIT License (MIT) 2 | 3 | // Copyright (c) 2015 Y. T. Chung 4 | 5 | // Permission is hereby granted, free of charge, to any person obtaining a copy of 6 | // this software and associated documentation files (the "Software"), to deal in 7 | // the Software without restriction, including without limitation the rights to 8 | // use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 9 | // the Software, and to permit persons to whom the Software is furnished to do so, 10 | // subject to the following conditions: 11 | 12 | // The above copyright notice and this permission notice shall be included in all 13 | // copies or substantial portions of the Software. 14 | 15 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 17 | // FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 18 | // COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 19 | // IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 20 | // CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | 22 | //! Deserializer 23 | 24 | mod raw; 25 | mod serde; 26 | 27 | pub use self::serde::Deserializer; 28 | 29 | use std::io::Read; 30 | 31 | use crate::{ 32 | bson::{Bson, Document}, 33 | error::{Error, Result}, 34 | raw::reader_to_vec, 35 | spec::BinarySubtype, 36 | }; 37 | 38 | #[rustfmt::skip] 39 | use ::serde::{de::DeserializeOwned, Deserialize}; 40 | 41 | pub(crate) use self::serde::{convert_unsigned_to_signed_raw, BsonVisitor}; 42 | 43 | #[cfg(test)] 44 | pub(crate) use self::raw::Deserializer as RawDeserializer; 45 | 46 | /// Hint provided to the deserializer via `deserialize_newtype_struct` as to the type of thing 47 | /// being deserialized. 48 | #[derive(Debug, Clone, Copy)] 49 | enum DeserializerHint { 50 | /// No hint provided, deserialize normally. 51 | None, 52 | 53 | /// The type being deserialized expects the BSON to contain a binary value with the provided 54 | /// subtype. This is currently used to deserialize [`bson::Uuid`] values. 55 | BinarySubtype(BinarySubtype), 56 | 57 | /// The type being deserialized is raw BSON, meaning no allocations should occur as part of 58 | /// deserializing and everything should be visited via borrowing or [`Copy`] if possible. 59 | RawBson, 60 | } 61 | 62 | /// Deserialize a `T` from the provided [`Bson`] value. 63 | /// 64 | /// The [`Deserializer`] used by this function presents itself as human readable, whereas the 65 | /// one used in [`deserialize_from_slice`] does not. This means that this function may deserialize 66 | /// differently than [`deserialize_from_slice`] for types that change their deserialization logic 67 | /// depending on whether the format is human readable or not. 68 | pub fn deserialize_from_bson(bson: Bson) -> Result 69 | where 70 | T: DeserializeOwned, 71 | { 72 | let de = Deserializer::new(bson); 73 | #[cfg(feature = "serde_path_to_error")] 74 | { 75 | serde_path_to_error::deserialize(de).map_err(Error::with_path) 76 | } 77 | #[cfg(not(feature = "serde_path_to_error"))] 78 | { 79 | Deserialize::deserialize(de) 80 | } 81 | } 82 | 83 | /// Deserialize a `T` from the provided [`Document`]. 84 | /// 85 | /// The [`Deserializer`] used by this function presents itself as human readable, whereas the 86 | /// one used in [`deserialize_from_slice`] does not. This means that this function may deserialize 87 | /// differently than [`deserialize_from_slice`] for types that change their deserialization logic 88 | /// depending on whether the format is human readable or not. 89 | pub fn deserialize_from_document(doc: Document) -> Result 90 | where 91 | T: DeserializeOwned, 92 | { 93 | deserialize_from_bson(Bson::Document(doc)) 94 | } 95 | 96 | /// Deserialize an instance of type `T` from an I/O stream of BSON. 97 | pub fn deserialize_from_reader(reader: R) -> Result 98 | where 99 | T: DeserializeOwned, 100 | R: Read, 101 | { 102 | let bytes = reader_to_vec(reader)?; 103 | deserialize_from_slice(bytes.as_slice()) 104 | } 105 | 106 | /// Deserialize an instance of type `T` from a slice of BSON bytes. 107 | pub fn deserialize_from_slice<'de, T>(bytes: &'de [u8]) -> Result 108 | where 109 | T: Deserialize<'de>, 110 | { 111 | deserialize_from_raw(raw::Deserializer::new(bytes)?) 112 | } 113 | 114 | pub(crate) fn deserialize_from_raw<'de, T: Deserialize<'de>>( 115 | deserializer: raw::Deserializer<'de>, 116 | ) -> Result { 117 | #[cfg(feature = "serde_path_to_error")] 118 | { 119 | serde_path_to_error::deserialize(deserializer).map_err(Error::with_path) 120 | } 121 | #[cfg(not(feature = "serde_path_to_error"))] 122 | { 123 | T::deserialize(deserializer) 124 | } 125 | } 126 | -------------------------------------------------------------------------------- /src/error.rs: -------------------------------------------------------------------------------- 1 | //! Contains the error-related types for the `bson` crate. 2 | 3 | mod decimal128; 4 | mod oid; 5 | mod uuid; 6 | mod value_access; 7 | 8 | use thiserror::Error; 9 | 10 | pub use decimal128::Decimal128ErrorKind; 11 | pub use oid::ObjectIdErrorKind; 12 | pub use uuid::UuidErrorKind; 13 | pub use value_access::ValueAccessErrorKind; 14 | 15 | pub type Result = std::result::Result; 16 | 17 | /// An error that can occur in the `bson` crate. 18 | #[derive(Clone, Debug, Error)] 19 | #[non_exhaustive] 20 | pub struct Error { 21 | /// The kind of error that occurred. 22 | pub kind: ErrorKind, 23 | 24 | /// An optional message describing the error. 25 | pub message: Option, 26 | 27 | /// The document key associated with the error, if any. 28 | pub key: Option, 29 | 30 | /// The array index associated with the error, if any. 31 | pub index: Option, 32 | 33 | /// The path to a deserialization error, if any. 34 | #[cfg(feature = "serde_path_to_error")] 35 | pub path: Option, 36 | } 37 | 38 | impl std::fmt::Display for Error { 39 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 40 | write!(f, "BSON error")?; 41 | 42 | if let Some(key) = self.key.as_deref() { 43 | write!(f, " at key \"{key}\"")?; 44 | } else if let Some(index) = self.index { 45 | write!(f, " at array index {index}")?; 46 | } 47 | 48 | write!(f, ". Kind: {}", self.kind)?; 49 | if let Some(ref message) = self.message { 50 | write!(f, ". Message: {}", message)?; 51 | } 52 | #[cfg(feature = "serde_path_to_error")] 53 | if let Some(ref path) = self.path { 54 | write!(f, ". Path: {}", path)?; 55 | } 56 | 57 | write!(f, ".") 58 | } 59 | } 60 | 61 | /// The types of errors that can occur in the `bson` crate. 62 | #[derive(Clone, Debug, Error)] 63 | #[non_exhaustive] 64 | pub enum ErrorKind { 65 | /// An error related to the [`Binary`](crate::Binary) type occurred. 66 | #[error("A Binary-related error occurred")] 67 | #[non_exhaustive] 68 | Binary {}, 69 | 70 | /// An error related to the [`DateTime`](crate::DateTime) type occurred. 71 | #[error("A DateTime-related error occurred")] 72 | #[non_exhaustive] 73 | DateTime {}, 74 | 75 | /// An error related to the [`Decimal128`](crate::Decimal128) type occurred. 76 | #[error("A Decimal128-related error occurred: {kind}")] 77 | #[non_exhaustive] 78 | Decimal128 { 79 | /// The kind of error that occurred. 80 | kind: Decimal128ErrorKind, 81 | }, 82 | 83 | /// A general error occurred during deserialization. This variant is constructed in the 84 | /// [`serde::de::Error`] implementation for the [`Error`](struct@Error) type. 85 | #[cfg(feature = "serde")] 86 | #[error("A deserialization-related error occurred")] 87 | #[non_exhaustive] 88 | Deserialization {}, 89 | 90 | /// The end of the BSON input was reached too soon. 91 | #[error("End of stream")] 92 | #[non_exhaustive] 93 | EndOfStream {}, 94 | 95 | /// Malformed BSON bytes were encountered. 96 | #[error("Malformed BSON bytes")] 97 | #[non_exhaustive] 98 | MalformedBytes {}, 99 | 100 | /// An error related to the [`ObjectId`](crate::oid::ObjectId) type occurred. 101 | #[error("An ObjectId-related error occurred: {kind}")] 102 | #[non_exhaustive] 103 | ObjectId { 104 | /// The kind of error that occurred. 105 | kind: ObjectIdErrorKind, 106 | }, 107 | 108 | /// A general error occurred during serialization. This variant is constructed in the 109 | /// [`serde::ser::Error`] implementation for the [`Error`](struct@Error) type. 110 | #[cfg(feature = "serde")] 111 | #[error("A serialization error occurred")] 112 | #[non_exhaustive] 113 | Serialization {}, 114 | 115 | /// An unsigned integer could not fit into a BSON integer type. 116 | #[error("Unsigned integer {n} cannot fit into BSON")] 117 | #[non_exhaustive] 118 | TooLargeUnsignedInteger { 119 | /// The too-large unsigned integer. 120 | n: u64, 121 | }, 122 | 123 | /// Invalid UTF-8 bytes were encountered. 124 | #[error("Invalid UTF-8")] 125 | #[non_exhaustive] 126 | Utf8Encoding {}, 127 | 128 | /// An error related to the [`Uuid`](crate::uuid::Uuid) type occurred. 129 | #[error("A UUID-related error occurred: {kind}")] 130 | #[non_exhaustive] 131 | Uuid { 132 | /// The kind of error that occurred. 133 | kind: UuidErrorKind, 134 | }, 135 | 136 | /// An error occurred when attempting to access a value in a document. 137 | #[error("An error occurred when attempting to access a document value: {kind}")] 138 | #[non_exhaustive] 139 | ValueAccess { 140 | /// The kind of error that occurred. 141 | kind: ValueAccessErrorKind, 142 | }, 143 | 144 | /// An IO error occurred. 145 | #[error("An IO error occurred")] 146 | #[non_exhaustive] 147 | Io {}, 148 | } 149 | 150 | impl From for Error { 151 | fn from(kind: ErrorKind) -> Self { 152 | Self { 153 | kind, 154 | key: None, 155 | index: None, 156 | message: None, 157 | #[cfg(feature = "serde_path_to_error")] 158 | path: None, 159 | } 160 | } 161 | } 162 | 163 | impl From for Error { 164 | fn from(value: std::io::Error) -> Self { 165 | Error::from(ErrorKind::Io {}).with_message(value) 166 | } 167 | } 168 | 169 | #[cfg(feature = "serde")] 170 | impl serde::de::Error for Error { 171 | fn custom(message: T) -> Self 172 | where 173 | T: std::fmt::Display, 174 | { 175 | Self::deserialization(message) 176 | } 177 | } 178 | 179 | #[cfg(feature = "serde")] 180 | impl serde::ser::Error for Error { 181 | fn custom(message: T) -> Self 182 | where 183 | T: std::fmt::Display, 184 | { 185 | Self::serialization(message) 186 | } 187 | } 188 | 189 | impl Error { 190 | pub(crate) fn with_key(mut self, key: impl Into) -> Self { 191 | self.key = Some(key.into()); 192 | self 193 | } 194 | 195 | pub(crate) fn with_index(mut self, index: usize) -> Self { 196 | self.index = Some(index); 197 | self 198 | } 199 | 200 | pub(crate) fn with_message(mut self, message: impl ToString) -> Self { 201 | self.message = Some(message.to_string()); 202 | self 203 | } 204 | 205 | #[cfg(feature = "serde_path_to_error")] 206 | pub(crate) fn with_path(error: serde_path_to_error::Error) -> Self { 207 | let path = error.path().clone(); 208 | let mut error = error.into_inner(); 209 | error.path = Some(path); 210 | error 211 | } 212 | 213 | pub(crate) fn binary(message: impl ToString) -> Self { 214 | Self::from(ErrorKind::Binary {}).with_message(message) 215 | } 216 | 217 | pub(crate) fn datetime(message: impl ToString) -> Self { 218 | Self::from(ErrorKind::DateTime {}).with_message(message) 219 | } 220 | 221 | #[cfg(feature = "serde")] 222 | pub(crate) fn serialization(message: impl ToString) -> Self { 223 | Self::from(ErrorKind::Serialization {}).with_message(message) 224 | } 225 | 226 | #[cfg(feature = "serde")] 227 | pub(crate) fn invalid_key_type(key: impl AsRef) -> Self { 228 | Self::serialization(format!("invalid document key type: {}", key.as_ref())) 229 | } 230 | 231 | #[cfg(feature = "serde")] 232 | pub(crate) fn deserialization(message: impl ToString) -> Self { 233 | Self::from(ErrorKind::Deserialization {}).with_message(message) 234 | } 235 | 236 | #[cfg(feature = "serde")] 237 | pub(crate) fn end_of_stream() -> Self { 238 | ErrorKind::EndOfStream {}.into() 239 | } 240 | 241 | pub(crate) fn malformed_bytes(message: impl ToString) -> Self { 242 | Self::from(ErrorKind::MalformedBytes {}).with_message(message) 243 | } 244 | 245 | #[cfg(all(test, feature = "serde"))] 246 | pub(crate) fn is_malformed_bytes(&self) -> bool { 247 | matches!(self.kind, ErrorKind::MalformedBytes { .. },) 248 | } 249 | 250 | #[cfg(feature = "serde")] 251 | pub(crate) fn too_large_integer(n: u64) -> Self { 252 | Self::from(ErrorKind::TooLargeUnsignedInteger { n }) 253 | } 254 | } 255 | -------------------------------------------------------------------------------- /src/error/decimal128.rs: -------------------------------------------------------------------------------- 1 | use thiserror::Error as ThisError; 2 | 3 | use crate::error::{Error, ErrorKind}; 4 | 5 | /// The kinds of errors that can occur when working with the [`Decimal128`](crate::Decimal128) type. 6 | #[derive(Clone, Debug, ThisError)] 7 | #[non_exhaustive] 8 | pub enum Decimal128ErrorKind { 9 | /// Empty exponent. 10 | #[error("empty exponent")] 11 | #[non_exhaustive] 12 | EmptyExponent {}, 13 | 14 | /// Invalid exponent. 15 | #[error("invalid exponent")] 16 | #[non_exhaustive] 17 | InvalidExponent {}, 18 | 19 | /// Invalid coefficient. 20 | #[error("invalid coefficient")] 21 | #[non_exhaustive] 22 | InvalidCoefficient {}, 23 | 24 | /// Overflow. 25 | #[error("overflow")] 26 | #[non_exhaustive] 27 | Overflow {}, 28 | 29 | /// Underflow. 30 | #[error("underflow")] 31 | #[non_exhaustive] 32 | Underflow {}, 33 | 34 | /// Inexact rounding. 35 | #[error("inexact rounding")] 36 | #[non_exhaustive] 37 | InexactRounding {}, 38 | 39 | /// Unparseable. 40 | #[error("unparseable")] 41 | #[non_exhaustive] 42 | Unparseable {}, 43 | } 44 | 45 | impl Error { 46 | pub(crate) fn decimal128(kind: Decimal128ErrorKind) -> Self { 47 | ErrorKind::Decimal128 { kind }.into() 48 | } 49 | 50 | #[cfg(test)] 51 | pub(crate) fn is_decimal128_unparseable(&self) -> bool { 52 | matches!( 53 | self.kind, 54 | ErrorKind::Decimal128 { 55 | kind: Decimal128ErrorKind::Unparseable {}, 56 | } 57 | ) 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /src/error/oid.rs: -------------------------------------------------------------------------------- 1 | use hex::FromHexError; 2 | use thiserror::Error as ThisError; 3 | 4 | use crate::error::{Error, ErrorKind}; 5 | 6 | /// The kinds of errors that can occur when working with the [`ObjectId`](crate::oid::ObjectId) 7 | /// type. 8 | #[derive(Clone, Debug, ThisError)] 9 | #[non_exhaustive] 10 | pub enum ObjectIdErrorKind { 11 | /// An invalid character was found in the provided hex string. Valid characters are: `0...9`, 12 | /// `a...f`, or `A...F`. 13 | #[error("invalid character '{c}' encountered at index {index}")] 14 | #[non_exhaustive] 15 | InvalidHexStringCharacter { 16 | /// The invalid character. 17 | c: char, 18 | 19 | /// The index at which the invalid character was encountered. 20 | index: usize, 21 | }, 22 | 23 | /// An `ObjectId` with an invalid length was encountered. 24 | #[error("invalid hex string length {length}")] 25 | #[non_exhaustive] 26 | InvalidHexStringLength { 27 | /// The length of the invalid hex string. 28 | length: usize, 29 | }, 30 | } 31 | 32 | impl Error { 33 | // This method is not a From implementation so that it is not part of the public API. 34 | pub(crate) fn from_hex_error(error: FromHexError, length: usize) -> Self { 35 | let kind = match error { 36 | FromHexError::InvalidHexCharacter { c, index } => { 37 | ObjectIdErrorKind::InvalidHexStringCharacter { c, index } 38 | } 39 | FromHexError::InvalidStringLength | FromHexError::OddLength => { 40 | ObjectIdErrorKind::InvalidHexStringLength { length } 41 | } 42 | }; 43 | ErrorKind::ObjectId { kind }.into() 44 | } 45 | 46 | pub(crate) fn oid_invalid_length(length: usize) -> Self { 47 | ErrorKind::ObjectId { 48 | kind: ObjectIdErrorKind::InvalidHexStringLength { length }, 49 | } 50 | .into() 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /src/error/uuid.rs: -------------------------------------------------------------------------------- 1 | use thiserror::Error as ThisError; 2 | 3 | use crate::{ 4 | error::{Error, ErrorKind}, 5 | spec::BinarySubtype, 6 | UuidRepresentation, 7 | }; 8 | 9 | /// The kinds of errors that can occur when working with the [`Uuid`](crate::uuid::Uuid) type. 10 | #[derive(Clone, Debug, ThisError)] 11 | #[non_exhaustive] 12 | pub enum UuidErrorKind { 13 | /// An invalid string was used to construct a UUID. 14 | #[error("invalid UUID string")] 15 | #[non_exhaustive] 16 | InvalidString {}, 17 | 18 | /// The requested `UuidRepresentation` does not match the binary subtype of a `Binary` 19 | /// value. 20 | #[error( 21 | "expected binary subtype {expected_binary_subtype:?} for representation \ 22 | {requested_representation:?}, got {actual_binary_subtype:?}" 23 | )] 24 | #[non_exhaustive] 25 | RepresentationMismatch { 26 | /// The subtype that was expected given the requested representation. 27 | expected_binary_subtype: BinarySubtype, 28 | 29 | /// The actual subtype of the binary value. 30 | actual_binary_subtype: BinarySubtype, 31 | 32 | /// The requested representation. 33 | requested_representation: UuidRepresentation, 34 | }, 35 | 36 | /// An invalid length of bytes was used to construct a UUID value. 37 | #[error("expected length of 16 bytes, got {length}")] 38 | #[non_exhaustive] 39 | InvalidLength { 40 | /// The actual length of the data. 41 | length: usize, 42 | }, 43 | } 44 | 45 | impl Error { 46 | pub(crate) fn invalid_uuid_string(message: impl ToString) -> Self { 47 | Self::from(ErrorKind::Uuid { 48 | kind: UuidErrorKind::InvalidString {}, 49 | }) 50 | .with_message(message) 51 | } 52 | 53 | pub(crate) fn uuid_representation_mismatch( 54 | requested_representation: UuidRepresentation, 55 | actual_binary_subtype: BinarySubtype, 56 | expected_binary_subtype: BinarySubtype, 57 | ) -> Self { 58 | ErrorKind::Uuid { 59 | kind: UuidErrorKind::RepresentationMismatch { 60 | expected_binary_subtype, 61 | actual_binary_subtype, 62 | requested_representation, 63 | }, 64 | } 65 | .into() 66 | } 67 | 68 | pub(crate) fn invalid_uuid_length(length: usize) -> Self { 69 | ErrorKind::Uuid { 70 | kind: UuidErrorKind::InvalidLength { length }, 71 | } 72 | .into() 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /src/error/value_access.rs: -------------------------------------------------------------------------------- 1 | use thiserror::Error as ThisError; 2 | 3 | use crate::{ 4 | error::{Error, ErrorKind}, 5 | spec::ElementType, 6 | }; 7 | 8 | /// The types of errors that can occur when attempting to access a value in a document. 9 | #[derive(Clone, Debug, ThisError)] 10 | #[non_exhaustive] 11 | pub enum ValueAccessErrorKind { 12 | /// No value for the specified key was present in the document. 13 | #[error("the key was not present in the document")] 14 | #[non_exhaustive] 15 | NotPresent {}, 16 | 17 | /// The type of the value in the document did not match the requested type. 18 | #[error("expected type {expected:?}, got type {actual:?}")] 19 | #[non_exhaustive] 20 | UnexpectedType { 21 | /// The actual type of the value. 22 | actual: ElementType, 23 | 24 | /// The expected type of the value. 25 | expected: ElementType, 26 | }, 27 | 28 | /// An error occurred when attempting to parse the document's BSON bytes. 29 | #[error("invalid BSON bytes")] 30 | #[non_exhaustive] 31 | InvalidBson {}, 32 | } 33 | 34 | impl Error { 35 | pub(crate) fn value_access_not_present() -> Self { 36 | ErrorKind::ValueAccess { 37 | kind: ValueAccessErrorKind::NotPresent {}, 38 | } 39 | .into() 40 | } 41 | 42 | pub(crate) fn value_access_unexpected_type(actual: ElementType, expected: ElementType) -> Self { 43 | ErrorKind::ValueAccess { 44 | kind: ValueAccessErrorKind::UnexpectedType { actual, expected }, 45 | } 46 | .into() 47 | } 48 | 49 | pub(crate) fn value_access_invalid_bson(message: String) -> Self { 50 | Self::from(ErrorKind::ValueAccess { 51 | kind: ValueAccessErrorKind::InvalidBson {}, 52 | }) 53 | .with_message(message) 54 | } 55 | 56 | #[cfg(test)] 57 | pub(crate) fn is_value_access_not_present(&self) -> bool { 58 | matches!( 59 | self.kind, 60 | ErrorKind::ValueAccess { 61 | kind: ValueAccessErrorKind::NotPresent {}, 62 | .. 63 | } 64 | ) 65 | } 66 | 67 | #[cfg(test)] 68 | pub(crate) fn is_value_access_unexpected_type(&self) -> bool { 69 | matches!( 70 | self.kind, 71 | ErrorKind::ValueAccess { 72 | kind: ValueAccessErrorKind::UnexpectedType { .. }, 73 | .. 74 | } 75 | ) 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /src/extjson.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "serde_json-1")] 2 | pub(crate) mod json; 3 | pub(crate) mod models; 4 | -------------------------------------------------------------------------------- /src/raw/array_buf.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | borrow::{Borrow, Cow}, 3 | fmt::Debug, 4 | }; 5 | 6 | use crate::{RawArray, RawBsonRef, RawDocumentBuf}; 7 | 8 | use super::{document_buf::BindRawBsonRef, RawArrayIter}; 9 | 10 | /// An owned BSON array value (akin to [`std::path::PathBuf`]), backed by a buffer of raw BSON 11 | /// bytes. This type can be used to construct owned array values, which can be used to append to 12 | /// [`RawDocumentBuf`] or as a field in a [`Deserialize`](serde::Deserialize) struct. 13 | /// 14 | /// Iterating over a [`RawArrayBuf`] yields either an error or a [`RawBson`](crate::raw::RawBson) 15 | /// value that borrows from the original document without making any additional allocations. 16 | /// ``` 17 | /// # use bson::error::Error; 18 | /// use bson::raw::RawArrayBuf; 19 | /// 20 | /// let mut array = RawArrayBuf::new(); 21 | /// array.push("a string"); 22 | /// array.push(12_i32); 23 | /// 24 | /// let mut iter = array.into_iter(); 25 | /// 26 | /// let value = iter.next().unwrap()?; 27 | /// assert_eq!(value.as_str(), Some("a string")); 28 | /// 29 | /// let value = iter.next().unwrap()?; 30 | /// assert_eq!(value.as_i32(), Some(12)); 31 | /// 32 | /// assert!(iter.next().is_none()); 33 | /// # Ok::<(), Error>(()) 34 | /// ``` 35 | /// 36 | /// This type implements [`Deref`](std::ops::Deref) to [`RawArray`], meaning that all methods on 37 | /// [`RawArray`] are available on [`RawArrayBuf`] values as well. This includes [`RawArray::get`] or 38 | /// any of the type-specific getters, such as [`RawArray::get_object_id`] or [`RawArray::get_str`]. 39 | /// Note that accessing elements is an O(N) operation, as it requires iterating through the document 40 | /// from the beginning to find the requested key. 41 | #[derive(Clone, PartialEq)] 42 | pub struct RawArrayBuf { 43 | inner: RawDocumentBuf, 44 | len: usize, 45 | } 46 | 47 | impl RawArrayBuf { 48 | /// Construct a new, empty [`RawArrayBuf`]. 49 | pub fn new() -> RawArrayBuf { 50 | Self { 51 | inner: RawDocumentBuf::new(), 52 | len: 0, 53 | } 54 | } 55 | 56 | /// Construct a new [`RawArrayBuf`] from the provided [`Vec`] of bytes. 57 | /// 58 | /// This involves a traversal of the array to count the values. 59 | pub(crate) fn from_raw_document_buf(doc: RawDocumentBuf) -> Self { 60 | let len = doc.iter().count(); 61 | Self { inner: doc, len } 62 | } 63 | 64 | /// Append a value to the end of the array. 65 | /// 66 | /// ``` 67 | /// # use bson::error::Error; 68 | /// use bson::raw::{cstr, RawArrayBuf, RawDocumentBuf}; 69 | /// 70 | /// let mut array = RawArrayBuf::new(); 71 | /// array.push("a string"); 72 | /// array.push(12_i32); 73 | /// 74 | /// let mut doc = RawDocumentBuf::new(); 75 | /// doc.append(cstr!("a key"), "a value"); 76 | /// array.push(doc.clone()); 77 | /// 78 | /// let mut iter = array.into_iter(); 79 | /// 80 | /// let value = iter.next().unwrap()?; 81 | /// assert_eq!(value.as_str(), Some("a string")); 82 | /// 83 | /// let value = iter.next().unwrap()?; 84 | /// assert_eq!(value.as_i32(), Some(12)); 85 | /// 86 | /// let value = iter.next().unwrap()?; 87 | /// assert_eq!(value.as_document(), Some(doc.as_ref())); 88 | /// 89 | /// assert!(iter.next().is_none()); 90 | /// # Ok::<(), Error>(()) 91 | /// ``` 92 | pub fn push(&mut self, value: impl BindRawBsonRef) { 93 | self.inner.append( 94 | super::CString::from_string_unchecked(self.len.to_string()), 95 | value, 96 | ); 97 | self.len += 1; 98 | } 99 | } 100 | 101 | impl FromIterator for RawArrayBuf { 102 | fn from_iter>(iter: T) -> Self { 103 | let mut array_buf = RawArrayBuf::new(); 104 | for item in iter { 105 | array_buf.push(item); 106 | } 107 | array_buf 108 | } 109 | } 110 | 111 | impl Debug for RawArrayBuf { 112 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 113 | f.debug_struct("RawArrayBuf") 114 | .field("data", &hex::encode(self.as_bytes())) 115 | .field("len", &self.len) 116 | .finish() 117 | } 118 | } 119 | 120 | impl std::ops::Deref for RawArrayBuf { 121 | type Target = RawArray; 122 | 123 | fn deref(&self) -> &Self::Target { 124 | RawArray::from_doc(&self.inner) 125 | } 126 | } 127 | 128 | impl AsRef for RawArrayBuf { 129 | fn as_ref(&self) -> &RawArray { 130 | RawArray::from_doc(&self.inner) 131 | } 132 | } 133 | 134 | impl Borrow for RawArrayBuf { 135 | fn borrow(&self) -> &RawArray { 136 | self.as_ref() 137 | } 138 | } 139 | 140 | impl<'a> IntoIterator for &'a RawArrayBuf { 141 | type IntoIter = RawArrayIter<'a>; 142 | type Item = super::Result>; 143 | 144 | fn into_iter(self) -> RawArrayIter<'a> { 145 | self.as_ref().into_iter() 146 | } 147 | } 148 | 149 | impl From for Cow<'_, RawArray> { 150 | fn from(rd: RawArrayBuf) -> Self { 151 | Cow::Owned(rd) 152 | } 153 | } 154 | 155 | impl<'a> From<&'a RawArrayBuf> for Cow<'a, RawArray> { 156 | fn from(rd: &'a RawArrayBuf) -> Self { 157 | Cow::Borrowed(rd.as_ref()) 158 | } 159 | } 160 | 161 | #[cfg(feature = "serde")] 162 | impl<'de> serde::Deserialize<'de> for RawArrayBuf { 163 | fn deserialize(deserializer: D) -> std::result::Result 164 | where 165 | D: serde::Deserializer<'de>, 166 | { 167 | Ok(super::serde::OwnedOrBorrowedRawArray::deserialize(deserializer)?.into_owned()) 168 | } 169 | } 170 | 171 | #[cfg(feature = "serde")] 172 | impl serde::Serialize for RawArrayBuf { 173 | fn serialize(&self, serializer: S) -> Result 174 | where 175 | S: serde::Serializer, 176 | { 177 | self.as_ref().serialize(serializer) 178 | } 179 | } 180 | 181 | impl Default for RawArrayBuf { 182 | fn default() -> Self { 183 | Self::new() 184 | } 185 | } 186 | 187 | impl TryFrom<&crate::Array> for RawArrayBuf { 188 | type Error = crate::error::Error; 189 | 190 | fn try_from(value: &crate::Array) -> Result { 191 | Self::try_from(value.clone()) 192 | } 193 | } 194 | 195 | impl TryFrom for RawArrayBuf { 196 | type Error = crate::error::Error; 197 | 198 | fn try_from(value: crate::Array) -> Result { 199 | let mut tmp = RawArrayBuf::new(); 200 | for val in value { 201 | let raw: super::RawBson = val.try_into()?; 202 | tmp.push(raw); 203 | } 204 | Ok(tmp) 205 | } 206 | } 207 | -------------------------------------------------------------------------------- /src/raw/document_buf/raw_writer.rs: -------------------------------------------------------------------------------- 1 | use crate::{raw::CStr, RawBsonRef}; 2 | 3 | pub(super) struct RawWriter<'a> { 4 | data: &'a mut Vec, 5 | } 6 | 7 | impl<'a> RawWriter<'a> { 8 | pub(super) fn new(data: &'a mut Vec) -> Self { 9 | Self { data } 10 | } 11 | 12 | pub(super) fn append(&mut self, key: &CStr, value: RawBsonRef) { 13 | let original_len = self.data.len(); 14 | self.data[original_len - 1] = value.element_type() as u8; 15 | 16 | key.append_to(self.data); 17 | value.append_to(self.data); 18 | 19 | // append trailing null byte 20 | self.data.push(0); 21 | // update length 22 | let new_len = (self.data.len() as i32).to_le_bytes(); 23 | self.data[0..4].copy_from_slice(&new_len); 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /src/raw/serde.rs: -------------------------------------------------------------------------------- 1 | pub(crate) mod bson_visitor; 2 | pub(crate) mod seeded_visitor; 3 | 4 | use std::{borrow::Cow, convert::TryFrom, fmt::Debug}; 5 | 6 | use serde::{de::Error as SerdeError, Deserialize}; 7 | 8 | use crate::{ 9 | raw::{RAW_ARRAY_NEWTYPE, RAW_DOCUMENT_NEWTYPE}, 10 | spec::BinarySubtype, 11 | RawArray, 12 | RawArrayBuf, 13 | RawBsonRef, 14 | RawDocument, 15 | RawDocumentBuf, 16 | }; 17 | 18 | use super::{bson::RawBson, RAW_BSON_NEWTYPE}; 19 | use bson_visitor::*; 20 | use seeded_visitor::*; 21 | 22 | /// Wrapper around a `Cow` to enable borrowed deserialization. 23 | /// The default [`Deserialize`] impl for [`Cow`] always uses the owned version. 24 | #[derive(Debug, Deserialize)] 25 | pub(crate) struct CowStr<'a>(#[serde(borrow)] Cow<'a, str>); 26 | 27 | /// A raw BSON value that may either be borrowed or owned. 28 | /// 29 | /// This is used to consolidate the [`Serialize`] and [`Deserialize`] implementations for 30 | /// [`RawBson`] and [`OwnedRawBson`]. 31 | pub(crate) enum OwnedOrBorrowedRawBson<'a> { 32 | Owned(RawBson), 33 | Borrowed(RawBsonRef<'a>), 34 | } 35 | 36 | impl<'a> OwnedOrBorrowedRawBson<'a> { 37 | pub(crate) fn as_ref<'b>(&'b self) -> RawBsonRef<'b> 38 | where 39 | 'a: 'b, 40 | { 41 | match self { 42 | Self::Borrowed(r) => *r, 43 | Self::Owned(bson) => bson.as_raw_bson_ref(), 44 | } 45 | } 46 | } 47 | 48 | impl<'a, 'de: 'a> Deserialize<'de> for OwnedOrBorrowedRawBson<'a> { 49 | fn deserialize(deserializer: D) -> Result 50 | where 51 | D: serde::Deserializer<'de>, 52 | { 53 | deserializer.deserialize_newtype_struct(RAW_BSON_NEWTYPE, OwnedOrBorrowedRawBsonVisitor) 54 | } 55 | } 56 | 57 | impl Debug for OwnedOrBorrowedRawBson<'_> { 58 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 59 | match self { 60 | Self::Owned(o) => o.fmt(f), 61 | Self::Borrowed(b) => b.fmt(f), 62 | } 63 | } 64 | } 65 | 66 | impl<'a> From> for OwnedOrBorrowedRawBson<'a> { 67 | fn from(b: RawBsonRef<'a>) -> Self { 68 | OwnedOrBorrowedRawBson::Borrowed(b) 69 | } 70 | } 71 | 72 | impl From for OwnedOrBorrowedRawBson<'_> { 73 | fn from(b: RawBson) -> Self { 74 | OwnedOrBorrowedRawBson::Owned(b) 75 | } 76 | } 77 | 78 | /// Wrapper type that can deserialize either an owned or a borrowed raw BSON document. 79 | #[derive(Debug)] 80 | pub(crate) enum OwnedOrBorrowedRawDocument<'a> { 81 | Owned(RawDocumentBuf), 82 | Borrowed(&'a RawDocument), 83 | } 84 | 85 | impl OwnedOrBorrowedRawDocument<'_> { 86 | pub(crate) fn into_owned(self) -> RawDocumentBuf { 87 | match self { 88 | Self::Owned(o) => o, 89 | Self::Borrowed(b) => b.to_owned(), 90 | } 91 | } 92 | } 93 | 94 | impl From for OwnedOrBorrowedRawDocument<'_> { 95 | fn from(doc: RawDocumentBuf) -> Self { 96 | Self::Owned(doc) 97 | } 98 | } 99 | 100 | impl<'a> From<&'a RawDocument> for OwnedOrBorrowedRawDocument<'a> { 101 | fn from(doc: &'a RawDocument) -> Self { 102 | Self::Borrowed(doc) 103 | } 104 | } 105 | 106 | impl<'a, 'de: 'a> TryFrom> for OwnedOrBorrowedRawDocument<'a> { 107 | type Error = crate::raw::Error; 108 | 109 | fn try_from(buffer: CowByteBuffer<'de>) -> Result { 110 | let doc = match buffer.0 { 111 | Some(Cow::Borrowed(borrowed)) => RawDocument::decode_from_bytes(borrowed)?.into(), 112 | Some(Cow::Owned(owned)) => RawDocumentBuf::decode_from_bytes(owned)?.into(), 113 | None => RawDocumentBuf::new().into(), 114 | }; 115 | Ok(doc) 116 | } 117 | } 118 | 119 | impl<'a, 'de: 'a> Deserialize<'de> for OwnedOrBorrowedRawDocument<'a> { 120 | fn deserialize(deserializer: D) -> Result 121 | where 122 | D: serde::Deserializer<'de>, 123 | { 124 | match deserializer 125 | .deserialize_newtype_struct(RAW_DOCUMENT_NEWTYPE, OwnedOrBorrowedRawBsonVisitor)? 126 | { 127 | OwnedOrBorrowedRawBson::Borrowed(RawBsonRef::Document(d)) => Ok(Self::Borrowed(d)), 128 | OwnedOrBorrowedRawBson::Owned(RawBson::Document(d)) => Ok(Self::Owned(d)), 129 | 130 | // For non-BSON formats, RawDocument gets serialized as bytes, so we need to deserialize 131 | // from them here too. For BSON, the deserializer will return an error if it 132 | // sees the RAW_DOCUMENT_NEWTYPE but the next type isn't a document. 133 | OwnedOrBorrowedRawBson::Borrowed(RawBsonRef::Binary(b)) 134 | if b.subtype == BinarySubtype::Generic => 135 | { 136 | Ok(Self::Borrowed( 137 | RawDocument::decode_from_bytes(b.bytes).map_err(SerdeError::custom)?, 138 | )) 139 | } 140 | OwnedOrBorrowedRawBson::Owned(RawBson::Binary(b)) 141 | if b.subtype == BinarySubtype::Generic => 142 | { 143 | Ok(Self::Owned( 144 | RawDocumentBuf::decode_from_bytes(b.bytes).map_err(SerdeError::custom)?, 145 | )) 146 | } 147 | 148 | o => Err(SerdeError::custom(format!( 149 | "expected raw document, instead got {:?}", 150 | o 151 | ))), 152 | } 153 | } 154 | } 155 | 156 | /// Wrapper type that can deserialize either an owned or a borrowed raw BSON array. 157 | #[derive(Debug)] 158 | pub(crate) enum OwnedOrBorrowedRawArray<'a> { 159 | Owned(RawArrayBuf), 160 | Borrowed(&'a RawArray), 161 | } 162 | 163 | impl OwnedOrBorrowedRawArray<'_> { 164 | pub(crate) fn into_owned(self) -> RawArrayBuf { 165 | match self { 166 | Self::Owned(o) => o, 167 | Self::Borrowed(b) => b.to_owned(), 168 | } 169 | } 170 | } 171 | 172 | impl<'a, 'de: 'a> Deserialize<'de> for OwnedOrBorrowedRawArray<'a> { 173 | fn deserialize(deserializer: D) -> Result 174 | where 175 | D: serde::Deserializer<'de>, 176 | { 177 | match deserializer 178 | .deserialize_newtype_struct(RAW_ARRAY_NEWTYPE, OwnedOrBorrowedRawBsonVisitor)? 179 | { 180 | OwnedOrBorrowedRawBson::Borrowed(RawBsonRef::Array(d)) => Ok(Self::Borrowed(d)), 181 | OwnedOrBorrowedRawBson::Owned(RawBson::Array(d)) => Ok(Self::Owned(d)), 182 | 183 | // For non-BSON formats, RawArray gets serialized as bytes, so we need to deserialize 184 | // from them here too. For BSON, the deserializer will return an error if it 185 | // sees the RAW_DOCUMENT_NEWTYPE but the next type isn't a document. 186 | OwnedOrBorrowedRawBson::Borrowed(RawBsonRef::Binary(b)) 187 | if b.subtype == BinarySubtype::Generic => 188 | { 189 | let doc = RawDocument::decode_from_bytes(b.bytes).map_err(SerdeError::custom)?; 190 | Ok(Self::Borrowed(RawArray::from_doc(doc))) 191 | } 192 | OwnedOrBorrowedRawBson::Owned(RawBson::Binary(b)) 193 | if b.subtype == BinarySubtype::Generic => 194 | { 195 | let doc = RawDocumentBuf::decode_from_bytes(b.bytes).map_err(SerdeError::custom)?; 196 | Ok(Self::Owned(RawArrayBuf::from_raw_document_buf(doc))) 197 | } 198 | 199 | o => Err(SerdeError::custom(format!( 200 | "expected raw array, instead got {:?}", 201 | o 202 | ))), 203 | } 204 | } 205 | } 206 | -------------------------------------------------------------------------------- /src/raw/test/props.rs: -------------------------------------------------------------------------------- 1 | use crate::{spec::BinarySubtype, Binary, Bson, Document, JavaScriptCodeWithScope, Regex}; 2 | 3 | use proptest::prelude::*; 4 | 5 | fn arbitrary_binary_subtype() -> impl Strategy { 6 | prop_oneof![ 7 | Just(BinarySubtype::Generic), 8 | Just(BinarySubtype::Function), 9 | Just(BinarySubtype::BinaryOld), 10 | Just(BinarySubtype::UuidOld), 11 | Just(BinarySubtype::Uuid), 12 | Just(BinarySubtype::Md5), 13 | ] 14 | } 15 | 16 | pub(crate) fn arbitrary_bson() -> impl Strategy { 17 | let leaf = prop_oneof![ 18 | Just(Bson::Null), 19 | any::().prop_map(Bson::String), 20 | any::().prop_map(Bson::Boolean), 21 | any::().prop_map(Bson::Double), 22 | any::().prop_map(Bson::Int32), 23 | any::().prop_map(Bson::Int64), 24 | any::<(String, String)>().prop_map(|(pattern, options)| { 25 | Bson::RegularExpression(Regex::from_strings(pattern, options).unwrap()) 26 | }), 27 | any::<[u8; 12]>().prop_map(|bytes| Bson::ObjectId(crate::oid::ObjectId::from_bytes(bytes))), 28 | (arbitrary_binary_subtype(), any::>()).prop_map(|(subtype, bytes)| { 29 | let bytes = if let BinarySubtype::BinaryOld = subtype { 30 | // BinarySubtype::BinaryOld expects a four byte prefix, which the bson::Bson type 31 | // leaves up to the caller. 32 | 33 | let mut newbytes = Vec::with_capacity(bytes.len() + 4); 34 | newbytes.extend_from_slice(&(bytes.len() as i32).to_le_bytes()); 35 | newbytes.extend_from_slice(&bytes); 36 | newbytes 37 | } else { 38 | bytes 39 | }; 40 | Bson::Binary(Binary { subtype, bytes }) 41 | }), 42 | any::().prop_map(Bson::JavaScriptCode), 43 | ]; 44 | 45 | leaf.prop_recursive(4, 256, 10, |inner| { 46 | prop_oneof![ 47 | prop::collection::hash_map("[^\0]*", inner.clone(), 0..12) 48 | .prop_map(|map| Bson::Document(map.into_iter().collect())), 49 | prop::collection::vec(inner.clone(), 0..12).prop_map(Bson::Array), 50 | ( 51 | prop::collection::hash_map("[^\0]*", inner, 0..12) 52 | .prop_map(|map| map.into_iter().collect::()), 53 | any::() 54 | ) 55 | .prop_map(|(scope, code)| Bson::JavaScriptCodeWithScope( 56 | JavaScriptCodeWithScope { code, scope } 57 | )), 58 | ] 59 | }) 60 | } 61 | -------------------------------------------------------------------------------- /src/ser.rs: -------------------------------------------------------------------------------- 1 | // The MIT License (MIT) 2 | 3 | // Copyright (c) 2015 Y. T. Chung 4 | 5 | // Permission is hereby granted, free of charge, to any person obtaining a copy of 6 | // this software and associated documentation files (the "Software"), to deal in 7 | // the Software without restriction, including without limitation the rights to 8 | // use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 9 | // the Software, and to permit persons to whom the Software is furnished to do so, 10 | // subject to the following conditions: 11 | 12 | // The above copyright notice and this permission notice shall be included in all 13 | // copies or substantial portions of the Software. 14 | 15 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 17 | // FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 18 | // COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 19 | // IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 20 | // CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | 22 | //! Serializer 23 | 24 | mod raw; 25 | mod serde; 26 | 27 | pub use self::serde::Serializer; 28 | 29 | #[rustfmt::skip] 30 | use ::serde::{ser::Error as SerdeError, Serialize}; 31 | 32 | use crate::{ 33 | bson::{Bson, Document}, 34 | error::{Error, Result}, 35 | ser::serde::SerializerOptions, 36 | RawDocumentBuf, 37 | }; 38 | 39 | /// Encode a `T` Serializable into a [`Bson`] value. 40 | /// 41 | /// The [`Serializer`] used by this function presents itself as human readable, whereas the 42 | /// one used in [`serialize_to_vec`] does not. This means that this function will produce different 43 | /// BSON than [`serialize_to_vec`] for types that change their serialization output depending on 44 | /// whether the format is human readable or not. 45 | pub fn serialize_to_bson(value: &T) -> Result 46 | where 47 | T: Serialize + ?Sized, 48 | { 49 | let ser = Serializer::new(); 50 | #[cfg(feature = "serde_path_to_error")] 51 | { 52 | serde_path_to_error::serialize(value, ser).map_err(Error::with_path) 53 | } 54 | #[cfg(not(feature = "serde_path_to_error"))] 55 | value.serialize(ser) 56 | } 57 | 58 | /// Internal-only method to serialize data to BSON with the given options. 59 | pub(crate) fn to_bson_with_options(value: &T, options: SerializerOptions) -> Result 60 | where 61 | T: Serialize + ?Sized, 62 | { 63 | let ser = Serializer::new_with_options(options); 64 | value.serialize(ser) 65 | } 66 | 67 | /// Serialize a `T` Serializable into a BSON [`Document`]. 68 | /// 69 | /// The [`Serializer`] used by this function presents itself as human readable, whereas the 70 | /// one used in [`serialize_to_vec`] does not. This means that this function will produce different 71 | /// BSON than [`serialize_to_vec`] for types that change their serialization output depending on 72 | /// whether the format is human readable or not. 73 | pub fn serialize_to_document(value: &T) -> Result 74 | where 75 | T: Serialize + ?Sized, 76 | { 77 | match serialize_to_bson(value)? { 78 | Bson::Document(doc) => Ok(doc), 79 | bson => Err(Error::serialization(format!( 80 | "expected to serialize document, got type {:?} instead", 81 | bson.element_type() 82 | ))), 83 | } 84 | } 85 | 86 | /// Serialize the given `T` as a BSON byte vector. 87 | #[inline] 88 | pub fn serialize_to_vec(value: &T) -> Result> 89 | where 90 | T: Serialize, 91 | { 92 | let mut serializer = raw::Serializer::new(); 93 | #[cfg(feature = "serde_path_to_error")] 94 | { 95 | serde_path_to_error::serialize(value, &mut serializer).map_err(Error::with_path)?; 96 | } 97 | #[cfg(not(feature = "serde_path_to_error"))] 98 | { 99 | value.serialize(&mut serializer)?; 100 | } 101 | Ok(serializer.into_vec()) 102 | } 103 | 104 | /// Serialize the given `T` as a [`RawDocumentBuf`]. 105 | /// 106 | /// ```rust 107 | /// use serde::Serialize; 108 | /// use bson::rawdoc; 109 | /// 110 | /// #[derive(Serialize)] 111 | /// struct Cat { 112 | /// name: String, 113 | /// age: i32 114 | /// } 115 | /// 116 | /// let cat = Cat { name: "Garfield".to_string(), age: 43 }; 117 | /// let doc = bson::serialize_to_raw_document_buf(&cat)?; 118 | /// assert_eq!(doc, rawdoc! { "name": "Garfield", "age": 43 }); 119 | /// # Ok::<(), Box>(()) 120 | /// ``` 121 | #[inline] 122 | pub fn serialize_to_raw_document_buf(value: &T) -> Result 123 | where 124 | T: Serialize, 125 | { 126 | RawDocumentBuf::decode_from_bytes(serialize_to_vec(value)?).map_err(Error::custom) 127 | } 128 | -------------------------------------------------------------------------------- /src/tests.rs: -------------------------------------------------------------------------------- 1 | mod binary_subtype; 2 | mod datetime; 3 | mod modules; 4 | #[cfg(feature = "serde")] 5 | mod serde; 6 | #[cfg(feature = "serde")] 7 | mod serde_helpers; 8 | #[cfg(feature = "serde")] 9 | mod spec; 10 | 11 | use modules::TestLock; 12 | use once_cell::sync::Lazy; 13 | 14 | pub(crate) static LOCK: Lazy = Lazy::new(TestLock::new); 15 | -------------------------------------------------------------------------------- /src/tests/binary_subtype.rs: -------------------------------------------------------------------------------- 1 | use crate::{spec::BinarySubtype, tests::LOCK}; 2 | 3 | #[test] 4 | fn from_u8() { 5 | let _guard = LOCK.run_concurrently(); 6 | // Check the endpoints of the defined, reserved, and user-defined subtype ranges. 7 | assert_eq!(BinarySubtype::from(0x00), BinarySubtype::Generic); 8 | assert_eq!(BinarySubtype::from(0x06), BinarySubtype::Encrypted); 9 | assert_eq!(BinarySubtype::from(0x07), BinarySubtype::Column); 10 | assert_eq!(BinarySubtype::from(0x08), BinarySubtype::Sensitive); 11 | assert_eq!(BinarySubtype::from(0x7F), BinarySubtype::Reserved(0x7F)); 12 | assert_eq!(BinarySubtype::from(0x80), BinarySubtype::UserDefined(0x80)); 13 | assert_eq!(BinarySubtype::from(0xFF), BinarySubtype::UserDefined(0xFF)); 14 | } 15 | -------------------------------------------------------------------------------- /src/tests/datetime.rs: -------------------------------------------------------------------------------- 1 | use std::time::Duration; 2 | 3 | use crate::tests::LOCK; 4 | 5 | #[test] 6 | fn rfc3339_to_datetime() { 7 | let _guard = LOCK.run_concurrently(); 8 | 9 | let rfc = "2020-06-09T10:58:07.095Z"; 10 | let date = 11 | time::OffsetDateTime::parse(rfc, &time::format_description::well_known::Rfc3339).unwrap(); 12 | let parsed = crate::DateTime::parse_rfc3339_str(rfc).unwrap(); 13 | assert_eq!(parsed, crate::DateTime::from_time_0_3(date)); 14 | assert_eq!(crate::DateTime::try_to_rfc3339_string(parsed).unwrap(), rfc); 15 | } 16 | 17 | #[test] 18 | fn invalid_rfc3339_to_datetime() { 19 | let _guard = LOCK.run_concurrently(); 20 | 21 | let a = "2020-06-09T10:58:07-095Z"; 22 | let b = "2020-06-09T10:58:07.095"; 23 | let c = "2020-06-09T10:62:07.095Z"; 24 | assert!(crate::DateTime::parse_rfc3339_str(a).is_err()); 25 | assert!(crate::DateTime::parse_rfc3339_str(b).is_err()); 26 | assert!(crate::DateTime::parse_rfc3339_str(c).is_err()); 27 | } 28 | 29 | #[test] 30 | fn datetime_to_rfc3339() { 31 | assert_eq!( 32 | crate::DateTime::from_millis(0) 33 | .try_to_rfc3339_string() 34 | .unwrap(), 35 | "1970-01-01T00:00:00Z" 36 | ); 37 | } 38 | 39 | #[test] 40 | #[cfg(feature = "large_dates")] 41 | fn invalid_datetime_to_rfc3339() { 42 | assert!(crate::DateTime::MAX.try_to_rfc3339_string().is_err()); 43 | } 44 | 45 | #[test] 46 | fn duration_since() { 47 | let _guard = LOCK.run_concurrently(); 48 | 49 | let date1 = crate::DateTime::from_millis(100); 50 | let date2 = crate::DateTime::from_millis(1000); 51 | 52 | assert_eq!( 53 | date2.checked_duration_since(date1), 54 | Some(Duration::from_millis(900)) 55 | ); 56 | assert_eq!( 57 | date2.saturating_duration_since(date1), 58 | Duration::from_millis(900) 59 | ); 60 | assert!(date1.checked_duration_since(date2).is_none()); 61 | assert_eq!(date1.saturating_duration_since(date2), Duration::ZERO); 62 | } 63 | -------------------------------------------------------------------------------- /src/tests/modules.rs: -------------------------------------------------------------------------------- 1 | mod binary; 2 | mod bson; 3 | mod document; 4 | mod lock; 5 | mod macros; 6 | mod oid; 7 | #[cfg(feature = "serde")] 8 | mod ser; 9 | #[cfg(feature = "serde")] 10 | mod serializer_deserializer; 11 | 12 | pub use self::lock::TestLock; 13 | -------------------------------------------------------------------------------- /src/tests/modules/binary.rs: -------------------------------------------------------------------------------- 1 | use crate::{base64, spec::BinarySubtype, tests::LOCK, Binary}; 2 | 3 | #[test] 4 | fn binary_from_base64() { 5 | let _guard = LOCK.run_concurrently(); 6 | 7 | let input = base64::encode("hello"); 8 | let produced = Binary::from_base64(input, None).unwrap(); 9 | let expected = Binary { 10 | bytes: "hello".as_bytes().to_vec(), 11 | subtype: BinarySubtype::Generic, 12 | }; 13 | assert_eq!(produced, expected); 14 | 15 | let produced = Binary::from_base64("", BinarySubtype::Uuid).unwrap(); 16 | let expected = Binary { 17 | bytes: "".as_bytes().to_vec(), 18 | subtype: BinarySubtype::Uuid, 19 | }; 20 | assert_eq!(produced, expected); 21 | } 22 | -------------------------------------------------------------------------------- /src/tests/modules/lock.rs: -------------------------------------------------------------------------------- 1 | use std::sync::{RwLock, RwLockReadGuard, RwLockWriteGuard}; 2 | 3 | #[derive(Default)] 4 | pub struct TestLock { 5 | inner: RwLock<()>, 6 | } 7 | 8 | impl TestLock { 9 | pub fn new() -> Self { 10 | Default::default() 11 | } 12 | 13 | pub fn run_concurrently(&self) -> RwLockReadGuard<'_, ()> { 14 | self.inner.read().unwrap() 15 | } 16 | 17 | pub fn run_exclusively(&self) -> RwLockWriteGuard<'_, ()> { 18 | self.inner.write().unwrap() 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /src/tests/modules/oid.rs: -------------------------------------------------------------------------------- 1 | use crate::{oid::ObjectId, tests::LOCK}; 2 | 3 | #[test] 4 | fn string_oid() { 5 | let _guard = LOCK.run_concurrently(); 6 | let s = "123456789012123456789012"; 7 | let oid_res = ObjectId::parse_str(s); 8 | assert!(oid_res.is_ok()); 9 | let actual_s = hex::encode(oid_res.unwrap().bytes()); 10 | assert_eq!(s.to_owned(), actual_s); 11 | } 12 | 13 | #[test] 14 | fn byte_string_oid() { 15 | let _guard = LOCK.run_concurrently(); 16 | let s = "541b1a00e8a23afa832b218e"; 17 | let oid_res = ObjectId::parse_str(s); 18 | assert!(oid_res.is_ok()); 19 | let oid = oid_res.unwrap(); 20 | let bytes: [u8; 12] = [ 21 | 0x54u8, 0x1Bu8, 0x1Au8, 0x00u8, 0xE8u8, 0xA2u8, 0x3Au8, 0xFAu8, 0x83u8, 0x2Bu8, 0x21u8, 22 | 0x8Eu8, 23 | ]; 24 | 25 | assert_eq!(bytes, oid.bytes()); 26 | assert_eq!(s, oid.to_string()); 27 | } 28 | 29 | #[test] 30 | #[allow(clippy::eq_op)] 31 | fn oid_equals() { 32 | let _guard = LOCK.run_concurrently(); 33 | let oid = ObjectId::new(); 34 | assert_eq!(oid, oid); 35 | } 36 | 37 | #[test] 38 | fn oid_not_equals() { 39 | let _guard = LOCK.run_concurrently(); 40 | assert!(ObjectId::new() != ObjectId::new()); 41 | } 42 | 43 | // check that the last byte in objectIDs is increasing 44 | #[test] 45 | fn counter_increasing() { 46 | let _guard = LOCK.run_concurrently(); 47 | let oid1_bytes = ObjectId::new().bytes(); 48 | let oid2_bytes = ObjectId::new().bytes(); 49 | assert!(oid1_bytes[11] < oid2_bytes[11]); 50 | } 51 | 52 | #[test] 53 | fn fromstr_oid() { 54 | let _guard = LOCK.run_concurrently(); 55 | let s = "123456789012123456789012"; 56 | let oid_res = s.parse::(); 57 | assert!(oid_res.is_ok(), "oid parse failed"); 58 | let actual_s = hex::encode(oid_res.unwrap().bytes()); 59 | assert_eq!(s, &actual_s, "parsed and expected oids differ"); 60 | } 61 | 62 | #[test] 63 | fn oid_from_parts() { 64 | let _guard = LOCK.run_concurrently(); 65 | let seconds_since_epoch = 123; 66 | let process_id = [4, 5, 6, 7, 8]; 67 | let counter = [9, 10, 11]; 68 | let oid = ObjectId::from_parts(seconds_since_epoch, process_id, counter); 69 | assert_eq!( 70 | oid.timestamp().timestamp_millis(), 71 | i64::from(seconds_since_epoch) * 1000 72 | ); 73 | assert_eq!(&oid.bytes()[4..9], &process_id); 74 | assert_eq!(&oid.bytes()[9..], &counter); 75 | } 76 | -------------------------------------------------------------------------------- /src/tests/modules/ser.rs: -------------------------------------------------------------------------------- 1 | use std::collections::BTreeMap; 2 | 3 | use assert_matches::assert_matches; 4 | 5 | use crate::{ 6 | deserialize_from_bson, 7 | error::ErrorKind, 8 | oid::ObjectId, 9 | serialize_to_bson, 10 | serialize_to_vec, 11 | tests::LOCK, 12 | Bson, 13 | Document, 14 | }; 15 | 16 | #[test] 17 | #[allow(clippy::float_cmp)] 18 | fn floating_point() { 19 | let _guard = LOCK.run_concurrently(); 20 | let obj = Bson::Double(240.5); 21 | let f: f64 = deserialize_from_bson(obj.clone()).unwrap(); 22 | assert_eq!(f, 240.5); 23 | 24 | let deser: Bson = serialize_to_bson(&f).unwrap(); 25 | assert_eq!(obj, deser); 26 | } 27 | 28 | #[test] 29 | fn string() { 30 | let _guard = LOCK.run_concurrently(); 31 | let obj = Bson::String("avocado".to_owned()); 32 | let s: String = deserialize_from_bson(obj.clone()).unwrap(); 33 | assert_eq!(s, "avocado"); 34 | 35 | let deser: Bson = serialize_to_bson(&s).unwrap(); 36 | assert_eq!(obj, deser); 37 | } 38 | 39 | #[test] 40 | fn arr() { 41 | let _guard = LOCK.run_concurrently(); 42 | let obj = Bson::Array(vec![ 43 | Bson::Int32(0), 44 | Bson::Int32(1), 45 | Bson::Int32(2), 46 | Bson::Int32(3), 47 | ]); 48 | let arr: Vec = deserialize_from_bson(obj.clone()).unwrap(); 49 | assert_eq!(arr, vec![0i32, 1i32, 2i32, 3i32]); 50 | 51 | let deser: Bson = serialize_to_bson(&arr).unwrap(); 52 | assert_eq!(deser, obj); 53 | } 54 | 55 | #[test] 56 | fn boolean() { 57 | let _guard = LOCK.run_concurrently(); 58 | let obj = Bson::Boolean(true); 59 | let b: bool = deserialize_from_bson(obj.clone()).unwrap(); 60 | assert!(b); 61 | 62 | let deser: Bson = serialize_to_bson(&b).unwrap(); 63 | assert_eq!(deser, obj); 64 | } 65 | 66 | #[test] 67 | fn int32() { 68 | let _guard = LOCK.run_concurrently(); 69 | let obj = Bson::Int32(101); 70 | let i: i32 = deserialize_from_bson(obj.clone()).unwrap(); 71 | 72 | assert_eq!(i, 101); 73 | 74 | let deser: Bson = serialize_to_bson(&i).unwrap(); 75 | assert_eq!(deser, obj); 76 | } 77 | 78 | #[test] 79 | fn uint8_u2i() { 80 | let _guard = LOCK.run_concurrently(); 81 | let obj: Bson = serialize_to_bson(&u8::MIN).unwrap(); 82 | let deser: u8 = deserialize_from_bson(obj).unwrap(); 83 | assert_eq!(deser, u8::MIN); 84 | 85 | let obj_max: Bson = serialize_to_bson(&u8::MAX).unwrap(); 86 | let deser_max: u8 = deserialize_from_bson(obj_max).unwrap(); 87 | assert_eq!(deser_max, u8::MAX); 88 | } 89 | 90 | #[test] 91 | fn uint16_u2i() { 92 | let _guard = LOCK.run_concurrently(); 93 | let obj: Bson = serialize_to_bson(&u16::MIN).unwrap(); 94 | let deser: u16 = deserialize_from_bson(obj).unwrap(); 95 | assert_eq!(deser, u16::MIN); 96 | 97 | let obj_max: Bson = serialize_to_bson(&u16::MAX).unwrap(); 98 | let deser_max: u16 = deserialize_from_bson(obj_max).unwrap(); 99 | assert_eq!(deser_max, u16::MAX); 100 | } 101 | 102 | #[test] 103 | fn uint32_u2i() { 104 | let _guard = LOCK.run_concurrently(); 105 | let obj_min: Bson = serialize_to_bson(&u32::MIN).unwrap(); 106 | let deser_min: u32 = deserialize_from_bson(obj_min).unwrap(); 107 | assert_eq!(deser_min, u32::MIN); 108 | 109 | let obj_max: Bson = serialize_to_bson(&u32::MAX).unwrap(); 110 | let deser_max: u32 = deserialize_from_bson(obj_max).unwrap(); 111 | assert_eq!(deser_max, u32::MAX); 112 | } 113 | 114 | #[test] 115 | fn uint64_u2i() { 116 | let _guard = LOCK.run_concurrently(); 117 | let obj_min: Bson = serialize_to_bson(&u64::MIN).unwrap(); 118 | let deser_min: u64 = deserialize_from_bson(obj_min).unwrap(); 119 | assert_eq!(deser_min, u64::MIN); 120 | 121 | let error = serialize_to_bson(&u64::MAX).unwrap_err(); 122 | assert_matches!( 123 | error.kind, 124 | ErrorKind::TooLargeUnsignedInteger { n: u64::MAX } 125 | ); 126 | } 127 | 128 | #[test] 129 | fn int64() { 130 | let _guard = LOCK.run_concurrently(); 131 | let obj = Bson::Int64(101); 132 | let i: i64 = deserialize_from_bson(obj.clone()).unwrap(); 133 | assert_eq!(i, 101); 134 | 135 | let deser: Bson = serialize_to_bson(&i).unwrap(); 136 | assert_eq!(deser, obj); 137 | } 138 | 139 | #[test] 140 | fn oid() { 141 | let _guard = LOCK.run_concurrently(); 142 | let oid = ObjectId::new(); 143 | let obj = Bson::ObjectId(oid); 144 | let s: BTreeMap = deserialize_from_bson(obj.clone()).unwrap(); 145 | 146 | let mut expected = BTreeMap::new(); 147 | expected.insert("$oid".to_owned(), oid.to_string()); 148 | assert_eq!(s, expected); 149 | 150 | let deser: Bson = serialize_to_bson(&s).unwrap(); 151 | assert_eq!(deser, obj); 152 | } 153 | 154 | #[test] 155 | fn cstring_null_bytes_error() { 156 | let _guard = LOCK.run_concurrently(); 157 | 158 | let doc = doc! { "\0": "a" }; 159 | verify_doc(doc); 160 | 161 | let doc = doc! { "a": { "\0": "b" } }; 162 | verify_doc(doc); 163 | 164 | fn verify_doc(doc: Document) { 165 | let result = doc.encode_to_vec(); 166 | assert!(result.is_err(), "unexpected success"); 167 | let err = result.unwrap_err(); 168 | assert!(err.is_malformed_bytes(), "unexpected error: {:?}", err); 169 | let result = serialize_to_vec(&doc); 170 | assert!(result.is_err(), "unexpected success"); 171 | assert!(result.unwrap_err().is_malformed_bytes()); 172 | } 173 | } 174 | -------------------------------------------------------------------------------- /src/tests/serde_helpers.rs: -------------------------------------------------------------------------------- 1 | use core::str; 2 | 3 | use serde::{de::Visitor, Deserialize, Serialize}; 4 | 5 | use crate::{ 6 | deserialize_from_slice, 7 | serde_helpers::{HumanReadable, Utf8LossyDeserialization}, 8 | }; 9 | 10 | #[test] 11 | fn human_readable_wrapper() { 12 | #[derive(PartialEq, Eq, Debug)] 13 | struct Detector { 14 | serialized_as: bool, 15 | deserialized_as: bool, 16 | } 17 | impl Detector { 18 | fn new() -> Self { 19 | Detector { 20 | serialized_as: false, 21 | deserialized_as: false, 22 | } 23 | } 24 | } 25 | impl Serialize for Detector { 26 | fn serialize(&self, serializer: S) -> Result 27 | where 28 | S: serde::Serializer, 29 | { 30 | let s = if serializer.is_human_readable() { 31 | "human readable" 32 | } else { 33 | "not human readable" 34 | }; 35 | serializer.serialize_str(s) 36 | } 37 | } 38 | impl<'de> Deserialize<'de> for Detector { 39 | fn deserialize(deserializer: D) -> Result 40 | where 41 | D: serde::Deserializer<'de>, 42 | { 43 | struct V; 44 | impl Visitor<'_> for V { 45 | type Value = bool; 46 | 47 | fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { 48 | formatter.write_str("Detector") 49 | } 50 | 51 | fn visit_str(self, v: &str) -> Result 52 | where 53 | E: serde::de::Error, 54 | { 55 | match v { 56 | "human readable" => Ok(true), 57 | "not human readable" => Ok(false), 58 | _ => Err(E::custom(format!("invalid detector string {:?}", v))), 59 | } 60 | } 61 | } 62 | let deserialized_as = deserializer.is_human_readable(); 63 | let serialized_as = deserializer.deserialize_str(V)?; 64 | Ok(Detector { 65 | serialized_as, 66 | deserialized_as, 67 | }) 68 | } 69 | } 70 | #[derive(PartialEq, Eq, Debug, Serialize, Deserialize)] 71 | struct Data { 72 | first: HumanReadable, 73 | outer: Detector, 74 | wrapped: HumanReadable, 75 | inner: HumanReadable, 76 | } 77 | #[derive(PartialEq, Eq, Debug, Serialize, Deserialize)] 78 | struct SubData { 79 | value: Detector, 80 | } 81 | 82 | let data = Data { 83 | first: HumanReadable(Detector::new()), 84 | outer: Detector::new(), 85 | wrapped: HumanReadable(Detector::new()), 86 | inner: HumanReadable(SubData { 87 | value: Detector::new(), 88 | }), 89 | }; 90 | // use the raw serializer, which is non-human-readable 91 | let data_doc = crate::serialize_to_raw_document_buf(&data).unwrap(); 92 | let expected_data_doc = rawdoc! { 93 | "first": "human readable", 94 | "outer": "not human readable", 95 | "wrapped": "human readable", 96 | "inner": { 97 | "value": "human readable", 98 | } 99 | }; 100 | assert_eq!(data_doc, expected_data_doc); 101 | 102 | let tripped: Data = crate::deserialize_from_slice(expected_data_doc.as_bytes()).unwrap(); 103 | let expected = Data { 104 | first: HumanReadable(Detector { 105 | serialized_as: true, 106 | deserialized_as: true, 107 | }), 108 | outer: Detector { 109 | serialized_as: false, 110 | deserialized_as: false, 111 | }, 112 | wrapped: HumanReadable(Detector { 113 | serialized_as: true, 114 | deserialized_as: true, 115 | }), 116 | inner: HumanReadable(SubData { 117 | value: Detector { 118 | serialized_as: true, 119 | deserialized_as: true, 120 | }, 121 | }), 122 | }; 123 | assert_eq!(&tripped, &expected); 124 | } 125 | 126 | #[test] 127 | #[allow(dead_code)] // suppress warning for unread fields 128 | fn utf8_lossy_wrapper() { 129 | let invalid_bytes = b"\x80\xae".to_vec(); 130 | let invalid_string = unsafe { String::from_utf8_unchecked(invalid_bytes) }; 131 | 132 | let both_strings_invalid_bytes = 133 | rawdoc! { "s1": invalid_string.clone(), "s2": invalid_string.clone() }.into_bytes(); 134 | let first_string_invalid_bytes = 135 | rawdoc! { "s1": invalid_string.clone(), "s2": ":)" }.into_bytes(); 136 | 137 | let expected_replacement = "��".to_string(); 138 | 139 | #[derive(Debug, Deserialize)] 140 | struct NoUtf8Lossy { 141 | s1: String, 142 | s2: String, 143 | } 144 | 145 | deserialize_from_slice::(&both_strings_invalid_bytes).unwrap_err(); 146 | 147 | let s = deserialize_from_slice::>( 148 | &both_strings_invalid_bytes, 149 | ) 150 | .unwrap() 151 | .0; 152 | assert_eq!(s.s1, expected_replacement); 153 | assert_eq!(s.s2, expected_replacement); 154 | 155 | #[derive(Debug, Deserialize)] 156 | struct FirstStringUtf8Lossy { 157 | s1: Utf8LossyDeserialization, 158 | s2: String, 159 | } 160 | 161 | let s = deserialize_from_slice::(&first_string_invalid_bytes).unwrap(); 162 | assert_eq!(s.s1.0, expected_replacement); 163 | assert_eq!(&s.s2, ":)"); 164 | 165 | deserialize_from_slice::(&both_strings_invalid_bytes).unwrap_err(); 166 | 167 | let s = deserialize_from_slice::>( 168 | &both_strings_invalid_bytes, 169 | ) 170 | .unwrap() 171 | .0; 172 | assert_eq!(s.s1.0, expected_replacement); 173 | assert_eq!(s.s2, expected_replacement); 174 | } 175 | -------------------------------------------------------------------------------- /src/tests/spec.rs: -------------------------------------------------------------------------------- 1 | mod corpus; 2 | mod vector; 3 | 4 | use std::{ 5 | any::type_name, 6 | ffi::OsStr, 7 | fs::{self, File}, 8 | path::PathBuf, 9 | }; 10 | 11 | use crate::Bson; 12 | use serde::de::DeserializeOwned; 13 | 14 | pub(crate) fn run_spec_test(spec: &[&str], run_test_file: F) 15 | where 16 | F: Fn(T), 17 | T: DeserializeOwned, 18 | { 19 | let base_path: PathBuf = [env!("CARGO_MANIFEST_DIR"), "src", "tests", "spec", "json"] 20 | .iter() 21 | .chain(spec.iter()) 22 | .collect(); 23 | 24 | for entry in fs::read_dir(&base_path) 25 | .unwrap_or_else(|e| panic!("Failed to read directory at {:?}: {}", base_path, e)) 26 | { 27 | let path = entry.unwrap().path(); 28 | if path.extension() != Some(OsStr::new("json")) { 29 | continue; 30 | } 31 | 32 | let file = File::open(&path) 33 | .unwrap_or_else(|e| panic!("Failed to open file at {:?}: {}", path, e)); 34 | 35 | let mut json_deserializer = serde_json::Deserializer::from_reader(file); 36 | let test_bson: Bson = serde_path_to_error::deserialize(&mut json_deserializer) 37 | .unwrap_or_else(|e| { 38 | panic!( 39 | "Failed to deserialize test JSON to BSON in {:?}: {}", 40 | path, e 41 | ) 42 | }); 43 | let bson_deserializer = crate::Deserializer::new(test_bson); 44 | let test: T = serde_path_to_error::deserialize(bson_deserializer).unwrap_or_else(|e| { 45 | panic!( 46 | "Failed to deserialize test BSON to {} in {:?}: {}", 47 | type_name::(), 48 | path, 49 | e 50 | ) 51 | }); 52 | 53 | run_test_file(test) 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /src/tests/spec/json/bson-binary-vector/README.md: -------------------------------------------------------------------------------- 1 | # Testing Binary subtype 9: Vector 2 | 3 | The JSON files in this directory tree are platform-independent tests that drivers can use to prove their conformance to 4 | the specification. 5 | 6 | These tests focus on the roundtrip of the list of numbers as input/output, along with their data type and byte padding. 7 | 8 | Additional tests exist in `bson_corpus/tests/binary.json` but do not sufficiently test the end-to-end process of Vector 9 | to BSON. For this reason, drivers must create a bespoke test runner for the vector subtype. 10 | 11 | ## Format 12 | 13 | The test data corpus consists of a JSON file for each data type (dtype). Each file contains a number of test cases, 14 | under the top-level key "tests". Each test case pertains to a single vector. The keys provide the specification of the 15 | vector. Valid cases also include the Canonical BSON format of a document {test_key: binary}. The "test_key" is common, 16 | and specified at the top level. 17 | 18 | #### Top level keys 19 | 20 | Each JSON file contains three top-level keys. 21 | 22 | - `description`: human-readable description of what is in the file 23 | - `test_key`: name used for key when encoding/decoding a BSON document containing the single BSON Binary for the test 24 | case. Applies to *every* case. 25 | - `tests`: array of test case objects, each of which have the following keys. Valid cases will also contain additional 26 | binary and json encoding values. 27 | 28 | #### Keys of individual tests cases 29 | 30 | - `description`: string describing the test. 31 | - `valid`: boolean indicating if the vector, dtype, and padding should be considered a valid input. 32 | - `vector`: (required if valid is true) list of numbers 33 | - `dtype_hex`: string defining the data type in hex (e.g. "0x10", "0x27") 34 | - `dtype_alias`: (optional) string defining the data dtype, perhaps as Enum. 35 | - `padding`: (optional) integer for byte padding. Defaults to 0. 36 | - `canonical_bson`: (required if valid is true) an (uppercase) big-endian hex representation of a BSON byte string. 37 | 38 | ## Required tests 39 | 40 | #### To prove correct in a valid case (`valid: true`), one MUST 41 | 42 | - encode a document from the numeric values, dtype, and padding, along with the "test_key", and assert this matches the 43 | canonical_bson string. 44 | - decode the canonical_bson into its binary form, and then assert that the numeric values, dtype, and padding all match 45 | those provided in the JSON. 46 | 47 | Note: For floating point number types, exact numerical matches may not be possible. Drivers that natively support the 48 | floating-point type being tested (e.g., when testing float32 vector values in a driver that natively supports float32), 49 | MUST assert that the input float array is the same after encoding and decoding. 50 | 51 | #### To prove correct in an invalid case (`valid:false`), one MUST 52 | 53 | - if the vector field is present, raise an exception when attempting to encode a document from the numeric values, 54 | dtype, and padding. 55 | - if the canonical_bson field is present, raise an exception when attempting to deserialize it into the corresponding 56 | numeric values, as the field contains corrupted data. 57 | 58 | ## FAQ 59 | 60 | - What MongoDB Server version does this apply to? 61 | - Files in the "specifications" repository have no version scheme. They are not tied to a MongoDB server version. 62 | -------------------------------------------------------------------------------- /src/tests/spec/json/bson-binary-vector/float32.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Tests of Binary subtype 9, Vectors, with dtype FLOAT32", 3 | "test_key": "vector", 4 | "tests": [ 5 | { 6 | "description": "Simple Vector FLOAT32", 7 | "valid": true, 8 | "vector": [127.0, 7.0], 9 | "dtype_hex": "0x27", 10 | "dtype_alias": "FLOAT32", 11 | "padding": 0, 12 | "canonical_bson": "1C00000005766563746F72000A0000000927000000FE420000E04000" 13 | }, 14 | { 15 | "description": "Vector with decimals and negative value FLOAT32", 16 | "valid": true, 17 | "vector": [127.7, -7.7], 18 | "dtype_hex": "0x27", 19 | "dtype_alias": "FLOAT32", 20 | "padding": 0, 21 | "canonical_bson": "1C00000005766563746F72000A0000000927006666FF426666F6C000" 22 | }, 23 | { 24 | "description": "Empty Vector FLOAT32", 25 | "valid": true, 26 | "vector": [], 27 | "dtype_hex": "0x27", 28 | "dtype_alias": "FLOAT32", 29 | "padding": 0, 30 | "canonical_bson": "1400000005766563746F72000200000009270000" 31 | }, 32 | { 33 | "description": "Infinity Vector FLOAT32", 34 | "valid": true, 35 | "vector": [{"$numberDouble": "-Infinity"}, 0.0, {"$numberDouble": "Infinity"} ], 36 | "dtype_hex": "0x27", 37 | "dtype_alias": "FLOAT32", 38 | "padding": 0, 39 | "canonical_bson": "2000000005766563746F72000E000000092700000080FF000000000000807F00" 40 | }, 41 | { 42 | "description": "FLOAT32 with padding", 43 | "valid": false, 44 | "vector": [127.0, 7.0], 45 | "dtype_hex": "0x27", 46 | "dtype_alias": "FLOAT32", 47 | "padding": 3, 48 | "canonical_bson": "1C00000005766563746F72000A0000000927030000FE420000E04000" 49 | }, 50 | { 51 | "description": "Insufficient vector data with 3 bytes FLOAT32", 52 | "valid": false, 53 | "dtype_hex": "0x27", 54 | "dtype_alias": "FLOAT32", 55 | "canonical_bson": "1700000005766563746F7200050000000927002A2A2A00" 56 | }, 57 | { 58 | "description": "Insufficient vector data with 5 bytes FLOAT32", 59 | "valid": false, 60 | "dtype_hex": "0x27", 61 | "dtype_alias": "FLOAT32", 62 | "canonical_bson": "1900000005766563746F7200070000000927002A2A2A2A2A00" 63 | } 64 | ] 65 | } 66 | -------------------------------------------------------------------------------- /src/tests/spec/json/bson-binary-vector/int8.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Tests of Binary subtype 9, Vectors, with dtype INT8", 3 | "test_key": "vector", 4 | "tests": [ 5 | { 6 | "description": "Simple Vector INT8", 7 | "valid": true, 8 | "vector": [127, 7], 9 | "dtype_hex": "0x03", 10 | "dtype_alias": "INT8", 11 | "padding": 0, 12 | "canonical_bson": "1600000005766563746F7200040000000903007F0700" 13 | }, 14 | { 15 | "description": "Empty Vector INT8", 16 | "valid": true, 17 | "vector": [], 18 | "dtype_hex": "0x03", 19 | "dtype_alias": "INT8", 20 | "padding": 0, 21 | "canonical_bson": "1400000005766563746F72000200000009030000" 22 | }, 23 | { 24 | "description": "Overflow Vector INT8", 25 | "valid": false, 26 | "vector": [128], 27 | "dtype_hex": "0x03", 28 | "dtype_alias": "INT8", 29 | "padding": 0 30 | }, 31 | { 32 | "description": "Underflow Vector INT8", 33 | "valid": false, 34 | "vector": [-129], 35 | "dtype_hex": "0x03", 36 | "dtype_alias": "INT8", 37 | "padding": 0 38 | }, 39 | { 40 | "description": "INT8 with padding", 41 | "valid": false, 42 | "vector": [127, 7], 43 | "dtype_hex": "0x03", 44 | "dtype_alias": "INT8", 45 | "padding": 3, 46 | "canonical_bson": "1600000005766563746F7200040000000903037F0700" 47 | }, 48 | { 49 | "description": "INT8 with float inputs", 50 | "valid": false, 51 | "vector": [127.77, 7.77], 52 | "dtype_hex": "0x03", 53 | "dtype_alias": "INT8", 54 | "padding": 0 55 | } 56 | ] 57 | } 58 | -------------------------------------------------------------------------------- /src/tests/spec/json/bson-binary-vector/packed_bit.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Tests of Binary subtype 9, Vectors, with dtype PACKED_BIT", 3 | "test_key": "vector", 4 | "tests": [ 5 | { 6 | "description": "Padding specified with no vector data PACKED_BIT", 7 | "valid": false, 8 | "vector": [], 9 | "dtype_hex": "0x10", 10 | "dtype_alias": "PACKED_BIT", 11 | "padding": 1, 12 | "canonical_bson": "1400000005766563746F72000200000009100100" 13 | }, 14 | { 15 | "description": "Simple Vector PACKED_BIT", 16 | "valid": true, 17 | "vector": [127, 7], 18 | "dtype_hex": "0x10", 19 | "dtype_alias": "PACKED_BIT", 20 | "padding": 0, 21 | "canonical_bson": "1600000005766563746F7200040000000910007F0700" 22 | }, 23 | { 24 | "description": "Empty Vector PACKED_BIT", 25 | "valid": true, 26 | "vector": [], 27 | "dtype_hex": "0x10", 28 | "dtype_alias": "PACKED_BIT", 29 | "padding": 0, 30 | "canonical_bson": "1400000005766563746F72000200000009100000" 31 | }, 32 | { 33 | "description": "PACKED_BIT with padding", 34 | "valid": true, 35 | "vector": [127, 7], 36 | "dtype_hex": "0x10", 37 | "dtype_alias": "PACKED_BIT", 38 | "padding": 3, 39 | "canonical_bson": "1600000005766563746F7200040000000910037F0700" 40 | }, 41 | { 42 | "description": "Overflow Vector PACKED_BIT", 43 | "valid": false, 44 | "vector": [256], 45 | "dtype_hex": "0x10", 46 | "dtype_alias": "PACKED_BIT", 47 | "padding": 0 48 | }, 49 | { 50 | "description": "Underflow Vector PACKED_BIT", 51 | "valid": false, 52 | "vector": [-1], 53 | "dtype_hex": "0x10", 54 | "dtype_alias": "PACKED_BIT", 55 | "padding": 0 56 | }, 57 | { 58 | "description": "Vector with float values PACKED_BIT", 59 | "valid": false, 60 | "vector": [127.5], 61 | "dtype_hex": "0x10", 62 | "dtype_alias": "PACKED_BIT", 63 | "padding": 0 64 | }, 65 | { 66 | "description": "Exceeding maximum padding PACKED_BIT", 67 | "valid": false, 68 | "vector": [1], 69 | "dtype_hex": "0x10", 70 | "dtype_alias": "PACKED_BIT", 71 | "padding": 8, 72 | "canonical_bson": "1500000005766563746F7200030000000910080100" 73 | }, 74 | { 75 | "description": "Negative padding PACKED_BIT", 76 | "valid": false, 77 | "vector": [1], 78 | "dtype_hex": "0x10", 79 | "dtype_alias": "PACKED_BIT", 80 | "padding": -1 81 | } 82 | ] 83 | } 84 | -------------------------------------------------------------------------------- /src/tests/spec/json/bson-corpus/array.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Array", 3 | "bson_type": "0x04", 4 | "test_key": "a", 5 | "valid": [ 6 | { 7 | "description": "Empty", 8 | "canonical_bson": "0D000000046100050000000000", 9 | "canonical_extjson": "{\"a\" : []}" 10 | }, 11 | { 12 | "description": "Single Element Array", 13 | "canonical_bson": "140000000461000C0000001030000A0000000000", 14 | "canonical_extjson": "{\"a\" : [{\"$numberInt\": \"10\"}]}" 15 | }, 16 | { 17 | "description": "Single Element Array with index set incorrectly to empty string", 18 | "degenerate_bson": "130000000461000B00000010000A0000000000", 19 | "canonical_bson": "140000000461000C0000001030000A0000000000", 20 | "canonical_extjson": "{\"a\" : [{\"$numberInt\": \"10\"}]}" 21 | }, 22 | { 23 | "description": "Single Element Array with index set incorrectly to ab", 24 | "degenerate_bson": "150000000461000D000000106162000A0000000000", 25 | "canonical_bson": "140000000461000C0000001030000A0000000000", 26 | "canonical_extjson": "{\"a\" : [{\"$numberInt\": \"10\"}]}" 27 | }, 28 | { 29 | "description": "Multi Element Array with duplicate indexes", 30 | "degenerate_bson": "1b000000046100130000001030000a000000103000140000000000", 31 | "canonical_bson": "1b000000046100130000001030000a000000103100140000000000", 32 | "canonical_extjson": "{\"a\" : [{\"$numberInt\": \"10\"}, {\"$numberInt\": \"20\"}]}" 33 | } 34 | ], 35 | "decodeErrors": [ 36 | { 37 | "description": "Array length too long: eats outer terminator", 38 | "bson": "140000000461000D0000001030000A0000000000" 39 | }, 40 | { 41 | "description": "Array length too short: leaks terminator", 42 | "bson": "140000000461000B0000001030000A0000000000" 43 | }, 44 | { 45 | "description": "Invalid Array: bad string length in field", 46 | "bson": "1A00000004666F6F00100000000230000500000062617A000000" 47 | } 48 | ] 49 | } 50 | -------------------------------------------------------------------------------- /src/tests/spec/json/bson-corpus/binary.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Binary type", 3 | "bson_type": "0x05", 4 | "test_key": "x", 5 | "valid": [ 6 | { 7 | "description": "subtype 0x00 (Zero-length)", 8 | "canonical_bson": "0D000000057800000000000000", 9 | "canonical_extjson": "{\"x\" : { \"$binary\" : {\"base64\" : \"\", \"subType\" : \"00\"}}}" 10 | }, 11 | { 12 | "description": "subtype 0x00 (Zero-length, keys reversed)", 13 | "canonical_bson": "0D000000057800000000000000", 14 | "canonical_extjson": "{\"x\" : { \"$binary\" : {\"base64\" : \"\", \"subType\" : \"00\"}}}", 15 | "degenerate_extjson": "{\"x\" : { \"$binary\" : {\"subType\" : \"00\", \"base64\" : \"\"}}}" 16 | }, 17 | { 18 | "description": "subtype 0x00", 19 | "canonical_bson": "0F0000000578000200000000FFFF00", 20 | "canonical_extjson": "{\"x\" : { \"$binary\" : {\"base64\" : \"//8=\", \"subType\" : \"00\"}}}" 21 | }, 22 | { 23 | "description": "subtype 0x01", 24 | "canonical_bson": "0F0000000578000200000001FFFF00", 25 | "canonical_extjson": "{\"x\" : { \"$binary\" : {\"base64\" : \"//8=\", \"subType\" : \"01\"}}}" 26 | }, 27 | { 28 | "description": "subtype 0x02", 29 | "canonical_bson": "13000000057800060000000202000000FFFF00", 30 | "canonical_extjson": "{\"x\" : { \"$binary\" : {\"base64\" : \"//8=\", \"subType\" : \"02\"}}}" 31 | }, 32 | { 33 | "description": "subtype 0x03", 34 | "canonical_bson": "1D000000057800100000000373FFD26444B34C6990E8E7D1DFC035D400", 35 | "canonical_extjson": "{\"x\" : { \"$binary\" : {\"base64\" : \"c//SZESzTGmQ6OfR38A11A==\", \"subType\" : \"03\"}}}" 36 | }, 37 | { 38 | "description": "subtype 0x04", 39 | "canonical_bson": "1D000000057800100000000473FFD26444B34C6990E8E7D1DFC035D400", 40 | "canonical_extjson": "{\"x\" : { \"$binary\" : {\"base64\" : \"c//SZESzTGmQ6OfR38A11A==\", \"subType\" : \"04\"}}}" 41 | }, 42 | { 43 | "description": "subtype 0x04 UUID", 44 | "canonical_bson": "1D000000057800100000000473FFD26444B34C6990E8E7D1DFC035D400", 45 | "canonical_extjson": "{\"x\" : { \"$binary\" : {\"base64\" : \"c//SZESzTGmQ6OfR38A11A==\", \"subType\" : \"04\"}}}", 46 | "degenerate_extjson": "{\"x\" : { \"$uuid\" : \"73ffd264-44b3-4c69-90e8-e7d1dfc035d4\"}}" 47 | }, 48 | { 49 | "description": "subtype 0x05", 50 | "canonical_bson": "1D000000057800100000000573FFD26444B34C6990E8E7D1DFC035D400", 51 | "canonical_extjson": "{\"x\" : { \"$binary\" : {\"base64\" : \"c//SZESzTGmQ6OfR38A11A==\", \"subType\" : \"05\"}}}" 52 | }, 53 | { 54 | "description": "subtype 0x07", 55 | "canonical_bson": "1D000000057800100000000773FFD26444B34C6990E8E7D1DFC035D400", 56 | "canonical_extjson": "{\"x\" : { \"$binary\" : {\"base64\" : \"c//SZESzTGmQ6OfR38A11A==\", \"subType\" : \"07\"}}}" 57 | }, 58 | { 59 | "description": "subtype 0x08", 60 | "canonical_bson": "1D000000057800100000000873FFD26444B34C6990E8E7D1DFC035D400", 61 | "canonical_extjson": "{\"x\" : { \"$binary\" : {\"base64\" : \"c//SZESzTGmQ6OfR38A11A==\", \"subType\" : \"08\"}}}" 62 | }, 63 | { 64 | "description": "subtype 0x80", 65 | "canonical_bson": "0F0000000578000200000080FFFF00", 66 | "canonical_extjson": "{\"x\" : { \"$binary\" : {\"base64\" : \"//8=\", \"subType\" : \"80\"}}}" 67 | }, 68 | { 69 | "description": "$type query operator (conflicts with legacy $binary form with $type field)", 70 | "canonical_bson": "1F000000037800170000000224747970650007000000737472696E67000000", 71 | "canonical_extjson": "{\"x\" : { \"$type\" : \"string\"}}" 72 | }, 73 | { 74 | "description": "$type query operator (conflicts with legacy $binary form with $type field)", 75 | "canonical_bson": "180000000378001000000010247479706500020000000000", 76 | "canonical_extjson": "{\"x\" : { \"$type\" : {\"$numberInt\": \"2\"}}}" 77 | } 78 | ], 79 | "decodeErrors": [ 80 | { 81 | "description": "Length longer than document", 82 | "bson": "1D000000057800FF0000000573FFD26444B34C6990E8E7D1DFC035D400" 83 | }, 84 | { 85 | "description": "Negative length", 86 | "bson": "0D000000057800FFFFFFFF0000" 87 | }, 88 | { 89 | "description": "subtype 0x02 length too long ", 90 | "bson": "13000000057800060000000203000000FFFF00" 91 | }, 92 | { 93 | "description": "subtype 0x02 length too short", 94 | "bson": "13000000057800060000000201000000FFFF00" 95 | }, 96 | { 97 | "description": "subtype 0x02 length negative one", 98 | "bson": "130000000578000600000002FFFFFFFFFFFF00" 99 | } 100 | ], 101 | "parseErrors": [ 102 | { 103 | "description": "$uuid wrong type", 104 | "string": "{\"x\" : { \"$uuid\" : { \"data\" : \"73ffd264-44b3-4c69-90e8-e7d1dfc035d4\"}}}" 105 | }, 106 | { 107 | "description": "$uuid invalid value--too short", 108 | "string": "{\"x\" : { \"$uuid\" : \"73ffd264-44b3-90e8-e7d1dfc035d4\"}}" 109 | }, 110 | { 111 | "description": "$uuid invalid value--too long", 112 | "string": "{\"x\" : { \"$uuid\" : \"73ffd264-44b3-4c69-90e8-e7d1dfc035d4-789e4\"}}" 113 | }, 114 | { 115 | "description": "$uuid invalid value--misplaced hyphens", 116 | "string": "{\"x\" : { \"$uuid\" : \"73ff-d26444b-34c6-990e8e-7d1dfc035d4\"}}" 117 | }, 118 | { 119 | "description": "$uuid invalid value--too many hyphens", 120 | "string": "{\"x\" : { \"$uuid\" : \"----d264-44b3-4--9-90e8-e7d1dfc0----\"}}" 121 | } 122 | ] 123 | } 124 | -------------------------------------------------------------------------------- /src/tests/spec/json/bson-corpus/boolean.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Boolean", 3 | "bson_type": "0x08", 4 | "test_key": "b", 5 | "valid": [ 6 | { 7 | "description": "True", 8 | "canonical_bson": "090000000862000100", 9 | "canonical_extjson": "{\"b\" : true}" 10 | }, 11 | { 12 | "description": "False", 13 | "canonical_bson": "090000000862000000", 14 | "canonical_extjson": "{\"b\" : false}" 15 | } 16 | ], 17 | "decodeErrors": [ 18 | { 19 | "description": "Invalid boolean value of 2", 20 | "bson": "090000000862000200" 21 | }, 22 | { 23 | "description": "Invalid boolean value of -1", 24 | "bson": "09000000086200FF00" 25 | } 26 | ] 27 | } 28 | -------------------------------------------------------------------------------- /src/tests/spec/json/bson-corpus/code.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Javascript Code", 3 | "bson_type": "0x0D", 4 | "test_key": "a", 5 | "valid": [ 6 | { 7 | "description": "Empty string", 8 | "canonical_bson": "0D0000000D6100010000000000", 9 | "canonical_extjson": "{\"a\" : {\"$code\" : \"\"}}" 10 | }, 11 | { 12 | "description": "Single character", 13 | "canonical_bson": "0E0000000D610002000000620000", 14 | "canonical_extjson": "{\"a\" : {\"$code\" : \"b\"}}" 15 | }, 16 | { 17 | "description": "Multi-character", 18 | "canonical_bson": "190000000D61000D0000006162616261626162616261620000", 19 | "canonical_extjson": "{\"a\" : {\"$code\" : \"abababababab\"}}" 20 | }, 21 | { 22 | "description": "two-byte UTF-8 (\u00e9)", 23 | "canonical_bson": "190000000D61000D000000C3A9C3A9C3A9C3A9C3A9C3A90000", 24 | "canonical_extjson": "{\"a\" : {\"$code\" : \"\\u00e9\\u00e9\\u00e9\\u00e9\\u00e9\\u00e9\"}}" 25 | }, 26 | { 27 | "description": "three-byte UTF-8 (\u2606)", 28 | "canonical_bson": "190000000D61000D000000E29886E29886E29886E298860000", 29 | "canonical_extjson": "{\"a\" : {\"$code\" : \"\\u2606\\u2606\\u2606\\u2606\"}}" 30 | }, 31 | { 32 | "description": "Embedded nulls", 33 | "canonical_bson": "190000000D61000D0000006162006261620062616261620000", 34 | "canonical_extjson": "{\"a\" : {\"$code\" : \"ab\\u0000bab\\u0000babab\"}}" 35 | } 36 | ], 37 | "decodeErrors": [ 38 | { 39 | "description": "bad code string length: 0 (but no 0x00 either)", 40 | "bson": "0C0000000D61000000000000" 41 | }, 42 | { 43 | "description": "bad code string length: -1", 44 | "bson": "0C0000000D6100FFFFFFFF00" 45 | }, 46 | { 47 | "description": "bad code string length: eats terminator", 48 | "bson": "100000000D6100050000006200620000" 49 | }, 50 | { 51 | "description": "bad code string length: longer than rest of document", 52 | "bson": "120000000D00FFFFFF00666F6F6261720000" 53 | }, 54 | { 55 | "description": "code string is not null-terminated", 56 | "bson": "100000000D610004000000616263FF00" 57 | }, 58 | { 59 | "description": "empty code string, but extra null", 60 | "bson": "0E0000000D610001000000000000" 61 | }, 62 | { 63 | "description": "invalid UTF-8", 64 | "bson": "0E0000000D610002000000E90000" 65 | } 66 | ] 67 | } 68 | -------------------------------------------------------------------------------- /src/tests/spec/json/bson-corpus/code_w_scope.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Javascript Code with Scope", 3 | "bson_type": "0x0F", 4 | "test_key": "a", 5 | "valid": [ 6 | { 7 | "description": "Empty code string, empty scope", 8 | "canonical_bson": "160000000F61000E0000000100000000050000000000", 9 | "canonical_extjson": "{\"a\" : {\"$code\" : \"\", \"$scope\" : {}}}" 10 | }, 11 | { 12 | "description": "Non-empty code string, empty scope", 13 | "canonical_bson": "1A0000000F610012000000050000006162636400050000000000", 14 | "canonical_extjson": "{\"a\" : {\"$code\" : \"abcd\", \"$scope\" : {}}}" 15 | }, 16 | { 17 | "description": "Empty code string, non-empty scope", 18 | "canonical_bson": "1D0000000F61001500000001000000000C000000107800010000000000", 19 | "canonical_extjson": "{\"a\" : {\"$code\" : \"\", \"$scope\" : {\"x\" : {\"$numberInt\": \"1\"}}}}" 20 | }, 21 | { 22 | "description": "Non-empty code string and non-empty scope", 23 | "canonical_bson": "210000000F6100190000000500000061626364000C000000107800010000000000", 24 | "canonical_extjson": "{\"a\" : {\"$code\" : \"abcd\", \"$scope\" : {\"x\" : {\"$numberInt\": \"1\"}}}}" 25 | }, 26 | { 27 | "description": "Unicode and embedded null in code string, empty scope", 28 | "canonical_bson": "1A0000000F61001200000005000000C3A9006400050000000000", 29 | "canonical_extjson": "{\"a\" : {\"$code\" : \"\\u00e9\\u0000d\", \"$scope\" : {}}}" 30 | } 31 | ], 32 | "decodeErrors": [ 33 | { 34 | "description": "field length zero", 35 | "bson": "280000000F6100000000000500000061626364001300000010780001000000107900010000000000" 36 | }, 37 | { 38 | "description": "field length negative", 39 | "bson": "280000000F6100FFFFFFFF0500000061626364001300000010780001000000107900010000000000" 40 | }, 41 | { 42 | "description": "field length too short (less than minimum size)", 43 | "bson": "160000000F61000D0000000100000000050000000000" 44 | }, 45 | { 46 | "description": "field length too short (truncates scope)", 47 | "bson": "280000000F61001F0000000500000061626364001300000010780001000000107900010000000000" 48 | }, 49 | { 50 | "description": "field length too long (clips outer doc)", 51 | "bson": "280000000F6100210000000500000061626364001300000010780001000000107900010000000000" 52 | }, 53 | { 54 | "description": "field length too long (longer than outer doc)", 55 | "bson": "280000000F6100FF0000000500000061626364001300000010780001000000107900010000000000" 56 | }, 57 | { 58 | "description": "bad code string: length too short", 59 | "bson": "280000000F6100200000000400000061626364001300000010780001000000107900010000000000" 60 | }, 61 | { 62 | "description": "bad code string: length too long (clips scope)", 63 | "bson": "280000000F6100200000000600000061626364001300000010780001000000107900010000000000" 64 | }, 65 | { 66 | "description": "bad code string: negative length", 67 | "bson": "280000000F610020000000FFFFFFFF61626364001300000010780001000000107900010000000000" 68 | }, 69 | { 70 | "description": "bad code string: length longer than field", 71 | "bson": "280000000F610020000000FF00000061626364001300000010780001000000107900010000000000" 72 | }, 73 | { 74 | "description": "bad scope doc (field has bad string length)", 75 | "bson": "1C0000000F001500000001000000000C000000020000000000000000" 76 | } 77 | ] 78 | } 79 | -------------------------------------------------------------------------------- /src/tests/spec/json/bson-corpus/datetime.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "DateTime", 3 | "bson_type": "0x09", 4 | "test_key": "a", 5 | "valid": [ 6 | { 7 | "description": "epoch", 8 | "canonical_bson": "10000000096100000000000000000000", 9 | "relaxed_extjson": "{\"a\" : {\"$date\" : \"1970-01-01T00:00:00Z\"}}", 10 | "canonical_extjson": "{\"a\" : {\"$date\" : {\"$numberLong\" : \"0\"}}}" 11 | }, 12 | { 13 | "description": "positive ms", 14 | "canonical_bson": "10000000096100C5D8D6CC3B01000000", 15 | "relaxed_extjson": "{\"a\" : {\"$date\" : \"2012-12-24T12:15:30.501Z\"}}", 16 | "canonical_extjson": "{\"a\" : {\"$date\" : {\"$numberLong\" : \"1356351330501\"}}}" 17 | }, 18 | { 19 | "description": "negative", 20 | "canonical_bson": "10000000096100C33CE7B9BDFFFFFF00", 21 | "relaxed_extjson": "{\"a\" : {\"$date\" : {\"$numberLong\" : \"-284643869501\"}}}", 22 | "canonical_extjson": "{\"a\" : {\"$date\" : {\"$numberLong\" : \"-284643869501\"}}}" 23 | }, 24 | { 25 | "description" : "Y10K", 26 | "canonical_bson" : "1000000009610000DC1FD277E6000000", 27 | "canonical_extjson" : "{\"a\":{\"$date\":{\"$numberLong\":\"253402300800000\"}}}" 28 | }, 29 | { 30 | "description": "leading zero ms", 31 | "canonical_bson": "10000000096100D1D6D6CC3B01000000", 32 | "relaxed_extjson": "{\"a\" : {\"$date\" : \"2012-12-24T12:15:30.001Z\"}}", 33 | "canonical_extjson": "{\"a\" : {\"$date\" : {\"$numberLong\" : \"1356351330001\"}}}" 34 | } 35 | ], 36 | "decodeErrors": [ 37 | { 38 | "description": "datetime field truncated", 39 | "bson": "0C0000000961001234567800" 40 | } 41 | ] 42 | } 43 | -------------------------------------------------------------------------------- /src/tests/spec/json/bson-corpus/dbpointer.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "DBPointer type (deprecated)", 3 | "bson_type": "0x0C", 4 | "deprecated": true, 5 | "test_key": "a", 6 | "valid": [ 7 | { 8 | "description": "DBpointer", 9 | "canonical_bson": "1A0000000C610002000000620056E1FC72E0C917E9C471416100", 10 | "canonical_extjson": "{\"a\": {\"$dbPointer\": {\"$ref\": \"b\", \"$id\": {\"$oid\": \"56e1fc72e0c917e9c4714161\"}}}}", 11 | "converted_bson": "2a00000003610022000000022472656600020000006200072469640056e1fc72e0c917e9c47141610000", 12 | "converted_extjson": "{\"a\": {\"$ref\": \"b\", \"$id\": {\"$oid\": \"56e1fc72e0c917e9c4714161\"}}}" 13 | }, 14 | { 15 | "description": "DBpointer with opposite key order", 16 | "canonical_bson": "1A0000000C610002000000620056E1FC72E0C917E9C471416100", 17 | "canonical_extjson": "{\"a\": {\"$dbPointer\": {\"$ref\": \"b\", \"$id\": {\"$oid\": \"56e1fc72e0c917e9c4714161\"}}}}", 18 | "degenerate_extjson": "{\"a\": {\"$dbPointer\": {\"$id\": {\"$oid\": \"56e1fc72e0c917e9c4714161\"}, \"$ref\": \"b\"}}}", 19 | "converted_bson": "2a00000003610022000000022472656600020000006200072469640056e1fc72e0c917e9c47141610000", 20 | "converted_extjson": "{\"a\": {\"$ref\": \"b\", \"$id\": {\"$oid\": \"56e1fc72e0c917e9c4714161\"}}}" 21 | }, 22 | { 23 | "description": "With two-byte UTF-8", 24 | "canonical_bson": "1B0000000C610003000000C3A90056E1FC72E0C917E9C471416100", 25 | "canonical_extjson": "{\"a\": {\"$dbPointer\": {\"$ref\": \"é\", \"$id\": {\"$oid\": \"56e1fc72e0c917e9c4714161\"}}}}", 26 | "converted_bson": "2B0000000361002300000002247265660003000000C3A900072469640056E1FC72E0C917E9C47141610000", 27 | "converted_extjson": "{\"a\": {\"$ref\": \"é\", \"$id\": {\"$oid\": \"56e1fc72e0c917e9c4714161\"}}}" 28 | } 29 | ], 30 | "decodeErrors": [ 31 | { 32 | "description": "String with negative length", 33 | "bson": "1A0000000C6100FFFFFFFF620056E1FC72E0C917E9C471416100" 34 | }, 35 | { 36 | "description": "String with zero length", 37 | "bson": "1A0000000C610000000000620056E1FC72E0C917E9C471416100" 38 | }, 39 | { 40 | "description": "String not null terminated", 41 | "bson": "1A0000000C610002000000626256E1FC72E0C917E9C471416100" 42 | }, 43 | { 44 | "description": "short OID (less than minimum length for field)", 45 | "bson": "160000000C61000300000061620056E1FC72E0C91700" 46 | }, 47 | { 48 | "description": "short OID (greater than minimum, but truncated)", 49 | "bson": "1A0000000C61000300000061620056E1FC72E0C917E9C4716100" 50 | }, 51 | { 52 | "description": "String with bad UTF-8", 53 | "bson": "1A0000000C610002000000E90056E1FC72E0C917E9C471416100" 54 | } 55 | ] 56 | } 57 | -------------------------------------------------------------------------------- /src/tests/spec/json/bson-corpus/dbref.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Document type (DBRef sub-documents)", 3 | "bson_type": "0x03", 4 | "valid": [ 5 | { 6 | "description": "DBRef", 7 | "canonical_bson": "37000000036462726566002b0000000224726566000b000000636f6c6c656374696f6e00072469640058921b3e6e32ab156a22b59e0000", 8 | "canonical_extjson": "{\"dbref\": {\"$ref\": \"collection\", \"$id\": {\"$oid\": \"58921b3e6e32ab156a22b59e\"}}}" 9 | }, 10 | { 11 | "description": "DBRef with database", 12 | "canonical_bson": "4300000003646272656600370000000224726566000b000000636f6c6c656374696f6e00072469640058921b3e6e32ab156a22b59e0224646200030000006462000000", 13 | "canonical_extjson": "{\"dbref\": {\"$ref\": \"collection\", \"$id\": {\"$oid\": \"58921b3e6e32ab156a22b59e\"}, \"$db\": \"db\"}}" 14 | }, 15 | { 16 | "description": "DBRef with database and additional fields", 17 | "canonical_bson": "48000000036462726566003c0000000224726566000b000000636f6c6c656374696f6e0010246964002a00000002246462000300000064620002666f6f0004000000626172000000", 18 | "canonical_extjson": "{\"dbref\": {\"$ref\": \"collection\", \"$id\": {\"$numberInt\": \"42\"}, \"$db\": \"db\", \"foo\": \"bar\"}}" 19 | }, 20 | { 21 | "description": "DBRef with additional fields", 22 | "canonical_bson": "4400000003646272656600380000000224726566000b000000636f6c6c656374696f6e00072469640058921b3e6e32ab156a22b59e02666f6f0004000000626172000000", 23 | "canonical_extjson": "{\"dbref\": {\"$ref\": \"collection\", \"$id\": {\"$oid\": \"58921b3e6e32ab156a22b59e\"}, \"foo\": \"bar\"}}" 24 | }, 25 | { 26 | "description": "Document with key names similar to those of a DBRef", 27 | "canonical_bson": "3e0000000224726566000c0000006e6f742d612d646272656600072469640058921b3e6e32ab156a22b59e022462616e616e6100050000007065656c0000", 28 | "canonical_extjson": "{\"$ref\": \"not-a-dbref\", \"$id\": {\"$oid\": \"58921b3e6e32ab156a22b59e\"}, \"$banana\": \"peel\"}" 29 | }, 30 | { 31 | "description": "DBRef with additional dollar-prefixed and dotted fields", 32 | "canonical_bson": "48000000036462726566003c0000000224726566000b000000636f6c6c656374696f6e00072469640058921b3e6e32ab156a22b59e10612e62000100000010246300010000000000", 33 | "canonical_extjson": "{\"dbref\": {\"$ref\": \"collection\", \"$id\": {\"$oid\": \"58921b3e6e32ab156a22b59e\"}, \"a.b\": {\"$numberInt\": \"1\"}, \"$c\": {\"$numberInt\": \"1\"}}}" 34 | }, 35 | { 36 | "description": "Sub-document resembles DBRef but $id is missing", 37 | "canonical_bson": "26000000036462726566001a0000000224726566000b000000636f6c6c656374696f6e000000", 38 | "canonical_extjson": "{\"dbref\": {\"$ref\": \"collection\"}}" 39 | }, 40 | { 41 | "description": "Sub-document resembles DBRef but $ref is not a string", 42 | "canonical_bson": "2c000000036462726566002000000010247265660001000000072469640058921b3e6e32ab156a22b59e0000", 43 | "canonical_extjson": "{\"dbref\": {\"$ref\": {\"$numberInt\": \"1\"}, \"$id\": {\"$oid\": \"58921b3e6e32ab156a22b59e\"}}}" 44 | }, 45 | { 46 | "description": "Sub-document resembles DBRef but $db is not a string", 47 | "canonical_bson": "4000000003646272656600340000000224726566000b000000636f6c6c656374696f6e00072469640058921b3e6e32ab156a22b59e1024646200010000000000", 48 | "canonical_extjson": "{\"dbref\": {\"$ref\": \"collection\", \"$id\": {\"$oid\": \"58921b3e6e32ab156a22b59e\"}, \"$db\": {\"$numberInt\": \"1\"}}}" 49 | } 50 | ] 51 | } 52 | -------------------------------------------------------------------------------- /src/tests/spec/json/bson-corpus/decimal128-4.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Decimal128", 3 | "bson_type": "0x13", 4 | "test_key": "d", 5 | "valid": [ 6 | { 7 | "description": "[basx023] conform to rules and exponent will be in permitted range).", 8 | "canonical_bson": "1800000013640001000000000000000000000000003EB000", 9 | "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.1\"}}" 10 | }, 11 | 12 | { 13 | "description": "[basx045] strings without E cannot generate E in result", 14 | "canonical_bson": "1800000013640003000000000000000000000000003A3000", 15 | "degenerate_extjson": "{\"d\" : {\"$numberDecimal\" : \"+0.003\"}}", 16 | "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.003\"}}" 17 | }, 18 | { 19 | "description": "[basx610] Zeros", 20 | "canonical_bson": "1800000013640000000000000000000000000000003E3000", 21 | "degenerate_extjson": "{\"d\" : {\"$numberDecimal\" : \".0\"}}", 22 | "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0\"}}" 23 | }, 24 | { 25 | "description": "[basx612] Zeros", 26 | "canonical_bson": "1800000013640000000000000000000000000000003EB000", 27 | "degenerate_extjson": "{\"d\" : {\"$numberDecimal\" : \"-.0\"}}", 28 | "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.0\"}}" 29 | }, 30 | { 31 | "description": "[basx043] strings without E cannot generate E in result", 32 | "canonical_bson": "18000000136400FC040000000000000000000000003C3000", 33 | "degenerate_extjson": "{\"d\" : {\"$numberDecimal\" : \"+12.76\"}}", 34 | "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"12.76\"}}" 35 | }, 36 | { 37 | "description": "[basx055] strings without E cannot generate E in result", 38 | "canonical_bson": "180000001364000500000000000000000000000000303000", 39 | "degenerate_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00000005\"}}", 40 | "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"5E-8\"}}" 41 | }, 42 | { 43 | "description": "[basx054] strings without E cannot generate E in result", 44 | "canonical_bson": "180000001364000500000000000000000000000000323000", 45 | "degenerate_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0000005\"}}", 46 | "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"5E-7\"}}" 47 | }, 48 | { 49 | "description": "[basx052] strings without E cannot generate E in result", 50 | "canonical_bson": "180000001364000500000000000000000000000000343000", 51 | "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000005\"}}" 52 | }, 53 | { 54 | "description": "[basx051] strings without E cannot generate E in result", 55 | "canonical_bson": "180000001364000500000000000000000000000000363000", 56 | "degenerate_extjson": "{\"d\" : {\"$numberDecimal\" : \"00.00005\"}}", 57 | "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00005\"}}" 58 | }, 59 | { 60 | "description": "[basx050] strings without E cannot generate E in result", 61 | "canonical_bson": "180000001364000500000000000000000000000000383000", 62 | "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0005\"}}" 63 | }, 64 | { 65 | "description": "[basx047] strings without E cannot generate E in result", 66 | "canonical_bson": "1800000013640005000000000000000000000000003E3000", 67 | "degenerate_extjson": "{\"d\" : {\"$numberDecimal\" : \".5\"}}", 68 | "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.5\"}}" 69 | }, 70 | { 71 | "description": "[dqbsr431] check rounding modes heeded (Rounded)", 72 | "canonical_bson": "1800000013640099761CC7B548F377DC80A131C836FE2F00", 73 | "degenerate_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.1111111111111111111111111111123450\"}}", 74 | "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.111111111111111111111111111112345\"}}" 75 | }, 76 | { 77 | "description": "OK2", 78 | "canonical_bson": "18000000136400000000000A5BC138938D44C64D31FC2F00", 79 | "degenerate_extjson": "{\"d\" : {\"$numberDecimal\" : \".100000000000000000000000000000000000000000000000000000000000\"}}", 80 | "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1000000000000000000000000000000000\"}}" 81 | } 82 | ], 83 | "parseErrors": [ 84 | { 85 | "description": "[basx564] Near-specials (Conversion_syntax)", 86 | "string": "Infi" 87 | }, 88 | { 89 | "description": "[basx565] Near-specials (Conversion_syntax)", 90 | "string": "Infin" 91 | }, 92 | { 93 | "description": "[basx566] Near-specials (Conversion_syntax)", 94 | "string": "Infini" 95 | }, 96 | { 97 | "description": "[basx567] Near-specials (Conversion_syntax)", 98 | "string": "Infinit" 99 | }, 100 | { 101 | "description": "[basx568] Near-specials (Conversion_syntax)", 102 | "string": "-Infinit" 103 | }, 104 | { 105 | "description": "[basx590] some baddies with dots and Es and dots and specials (Conversion_syntax)", 106 | "string": ".Infinity" 107 | }, 108 | { 109 | "description": "[basx562] Near-specials (Conversion_syntax)", 110 | "string": "NaNq" 111 | }, 112 | { 113 | "description": "[basx563] Near-specials (Conversion_syntax)", 114 | "string": "NaNs" 115 | }, 116 | { 117 | "description": "[dqbas939] overflow results at different rounding modes (Overflow & Inexact & Rounded)", 118 | "string": "-7e10000" 119 | }, 120 | { 121 | "description": "[dqbsr534] negatives (Rounded & Inexact)", 122 | "string": "-1.11111111111111111111111111111234650" 123 | }, 124 | { 125 | "description": "[dqbsr535] negatives (Rounded & Inexact)", 126 | "string": "-1.11111111111111111111111111111234551" 127 | }, 128 | { 129 | "description": "[dqbsr533] negatives (Rounded & Inexact)", 130 | "string": "-1.11111111111111111111111111111234550" 131 | }, 132 | { 133 | "description": "[dqbsr532] negatives (Rounded & Inexact)", 134 | "string": "-1.11111111111111111111111111111234549" 135 | }, 136 | { 137 | "description": "[dqbsr432] check rounding modes heeded (Rounded & Inexact)", 138 | "string": "1.11111111111111111111111111111234549" 139 | }, 140 | { 141 | "description": "[dqbsr433] check rounding modes heeded (Rounded & Inexact)", 142 | "string": "1.11111111111111111111111111111234550" 143 | }, 144 | { 145 | "description": "[dqbsr435] check rounding modes heeded (Rounded & Inexact)", 146 | "string": "1.11111111111111111111111111111234551" 147 | }, 148 | { 149 | "description": "[dqbsr434] check rounding modes heeded (Rounded & Inexact)", 150 | "string": "1.11111111111111111111111111111234650" 151 | }, 152 | { 153 | "description": "[dqbas938] overflow results at different rounding modes (Overflow & Inexact & Rounded)", 154 | "string": "7e10000" 155 | }, 156 | { 157 | "description": "Inexact rounding#1", 158 | "string": "100000000000000000000000000000000000000000000000000000000001" 159 | }, 160 | { 161 | "description": "Inexact rounding#2", 162 | "string": "1E-6177" 163 | } 164 | ] 165 | } 166 | -------------------------------------------------------------------------------- /src/tests/spec/json/bson-corpus/decimal128-6.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Decimal128", 3 | "bson_type": "0x13", 4 | "test_key": "d", 5 | "parseErrors": [ 6 | { 7 | "description": "Incomplete Exponent", 8 | "string": "1e" 9 | }, 10 | { 11 | "description": "Exponent at the beginning", 12 | "string": "E01" 13 | }, 14 | { 15 | "description": "Just a decimal place", 16 | "string": "." 17 | }, 18 | { 19 | "description": "2 decimal places", 20 | "string": "..3" 21 | }, 22 | { 23 | "description": "2 decimal places", 24 | "string": ".13.3" 25 | }, 26 | { 27 | "description": "2 decimal places", 28 | "string": "1..3" 29 | }, 30 | { 31 | "description": "2 decimal places", 32 | "string": "1.3.4" 33 | }, 34 | { 35 | "description": "2 decimal places", 36 | "string": "1.34." 37 | }, 38 | { 39 | "description": "Decimal with no digits", 40 | "string": ".e" 41 | }, 42 | { 43 | "description": "2 signs", 44 | "string": "+-32.4" 45 | }, 46 | { 47 | "description": "2 signs", 48 | "string": "-+32.4" 49 | }, 50 | { 51 | "description": "2 negative signs", 52 | "string": "--32.4" 53 | }, 54 | { 55 | "description": "2 negative signs", 56 | "string": "-32.-4" 57 | }, 58 | { 59 | "description": "End in negative sign", 60 | "string": "32.0-" 61 | }, 62 | { 63 | "description": "2 negative signs", 64 | "string": "32.4E--21" 65 | }, 66 | { 67 | "description": "2 negative signs", 68 | "string": "32.4E-2-1" 69 | }, 70 | { 71 | "description": "2 signs", 72 | "string": "32.4E+-21" 73 | }, 74 | { 75 | "description": "Empty string", 76 | "string": "" 77 | }, 78 | { 79 | "description": "leading white space positive number", 80 | "string": " 1" 81 | }, 82 | { 83 | "description": "leading white space negative number", 84 | "string": " -1" 85 | }, 86 | { 87 | "description": "trailing white space", 88 | "string": "1 " 89 | }, 90 | { 91 | "description": "Invalid", 92 | "string": "E" 93 | }, 94 | { 95 | "description": "Invalid", 96 | "string": "invalid" 97 | }, 98 | { 99 | "description": "Invalid", 100 | "string": "i" 101 | }, 102 | { 103 | "description": "Invalid", 104 | "string": "in" 105 | }, 106 | { 107 | "description": "Invalid", 108 | "string": "-in" 109 | }, 110 | { 111 | "description": "Invalid", 112 | "string": "Na" 113 | }, 114 | { 115 | "description": "Invalid", 116 | "string": "-Na" 117 | }, 118 | { 119 | "description": "Invalid", 120 | "string": "1.23abc" 121 | }, 122 | { 123 | "description": "Invalid", 124 | "string": "1.23abcE+02" 125 | }, 126 | { 127 | "description": "Invalid", 128 | "string": "1.23E+0aabs2" 129 | } 130 | ] 131 | } 132 | -------------------------------------------------------------------------------- /src/tests/spec/json/bson-corpus/document.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Document type (sub-documents)", 3 | "bson_type": "0x03", 4 | "test_key": "x", 5 | "valid": [ 6 | { 7 | "description": "Empty subdoc", 8 | "canonical_bson": "0D000000037800050000000000", 9 | "canonical_extjson": "{\"x\" : {}}" 10 | }, 11 | { 12 | "description": "Empty-string key subdoc", 13 | "canonical_bson": "150000000378000D00000002000200000062000000", 14 | "canonical_extjson": "{\"x\" : {\"\" : \"b\"}}" 15 | }, 16 | { 17 | "description": "Single-character key subdoc", 18 | "canonical_bson": "160000000378000E0000000261000200000062000000", 19 | "canonical_extjson": "{\"x\" : {\"a\" : \"b\"}}" 20 | }, 21 | { 22 | "description": "Dollar-prefixed key in sub-document", 23 | "canonical_bson": "170000000378000F000000022461000200000062000000", 24 | "canonical_extjson": "{\"x\" : {\"$a\" : \"b\"}}" 25 | }, 26 | { 27 | "description": "Dollar as key in sub-document", 28 | "canonical_bson": "160000000378000E0000000224000200000061000000", 29 | "canonical_extjson": "{\"x\" : {\"$\" : \"a\"}}" 30 | }, 31 | { 32 | "description": "Dotted key in sub-document", 33 | "canonical_bson": "180000000378001000000002612E62000200000063000000", 34 | "canonical_extjson": "{\"x\" : {\"a.b\" : \"c\"}}" 35 | }, 36 | { 37 | "description": "Dot as key in sub-document", 38 | "canonical_bson": "160000000378000E000000022E000200000061000000", 39 | "canonical_extjson": "{\"x\" : {\".\" : \"a\"}}" 40 | } 41 | ], 42 | "decodeErrors": [ 43 | { 44 | "description": "Subdocument length too long: eats outer terminator", 45 | "bson": "1800000003666F6F000F0000001062617200FFFFFF7F0000" 46 | }, 47 | { 48 | "description": "Subdocument length too short: leaks terminator", 49 | "bson": "1500000003666F6F000A0000000862617200010000" 50 | }, 51 | { 52 | "description": "Invalid subdocument: bad string length in field", 53 | "bson": "1C00000003666F6F001200000002626172000500000062617A000000" 54 | }, 55 | { 56 | "description": "Null byte in sub-document key", 57 | "bson": "150000000378000D00000010610000010000000000" 58 | } 59 | ] 60 | } 61 | -------------------------------------------------------------------------------- /src/tests/spec/json/bson-corpus/double.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Double type", 3 | "bson_type": "0x01", 4 | "test_key": "d", 5 | "valid": [ 6 | { 7 | "description": "+1.0", 8 | "canonical_bson": "10000000016400000000000000F03F00", 9 | "canonical_extjson": "{\"d\" : {\"$numberDouble\": \"1.0\"}}", 10 | "relaxed_extjson": "{\"d\" : 1.0}" 11 | }, 12 | { 13 | "description": "-1.0", 14 | "canonical_bson": "10000000016400000000000000F0BF00", 15 | "canonical_extjson": "{\"d\" : {\"$numberDouble\": \"-1.0\"}}", 16 | "relaxed_extjson": "{\"d\" : -1.0}" 17 | }, 18 | { 19 | "description": "+1.0001220703125", 20 | "canonical_bson": "10000000016400000000008000F03F00", 21 | "canonical_extjson": "{\"d\" : {\"$numberDouble\": \"1.0001220703125\"}}", 22 | "relaxed_extjson": "{\"d\" : 1.0001220703125}" 23 | }, 24 | { 25 | "description": "-1.0001220703125", 26 | "canonical_bson": "10000000016400000000008000F0BF00", 27 | "canonical_extjson": "{\"d\" : {\"$numberDouble\": \"-1.0001220703125\"}}", 28 | "relaxed_extjson": "{\"d\" : -1.0001220703125}" 29 | }, 30 | { 31 | "description": "1.2345678921232E+18", 32 | "canonical_bson": "100000000164002a1bf5f41022b14300", 33 | "canonical_extjson": "{\"d\" : {\"$numberDouble\": \"1.2345678921232E+18\"}}", 34 | "relaxed_extjson": "{\"d\" : 1.2345678921232E+18}" 35 | }, 36 | { 37 | "description": "-1.2345678921232E+18", 38 | "canonical_bson": "100000000164002a1bf5f41022b1c300", 39 | "canonical_extjson": "{\"d\" : {\"$numberDouble\": \"-1.2345678921232E+18\"}}", 40 | "relaxed_extjson": "{\"d\" : -1.2345678921232E+18}" 41 | }, 42 | { 43 | "description": "0.0", 44 | "canonical_bson": "10000000016400000000000000000000", 45 | "canonical_extjson": "{\"d\" : {\"$numberDouble\": \"0.0\"}}", 46 | "relaxed_extjson": "{\"d\" : 0.0}" 47 | }, 48 | { 49 | "description": "-0.0", 50 | "canonical_bson": "10000000016400000000000000008000", 51 | "canonical_extjson": "{\"d\" : {\"$numberDouble\": \"-0.0\"}}", 52 | "relaxed_extjson": "{\"d\" : -0.0}" 53 | }, 54 | { 55 | "description": "NaN", 56 | "canonical_bson": "10000000016400000000000000F87F00", 57 | "canonical_extjson": "{\"d\": {\"$numberDouble\": \"NaN\"}}", 58 | "relaxed_extjson": "{\"d\": {\"$numberDouble\": \"NaN\"}}", 59 | "lossy": true 60 | }, 61 | { 62 | "description": "NaN with payload", 63 | "canonical_bson": "10000000016400120000000000F87F00", 64 | "canonical_extjson": "{\"d\": {\"$numberDouble\": \"NaN\"}}", 65 | "relaxed_extjson": "{\"d\": {\"$numberDouble\": \"NaN\"}}", 66 | "lossy": true 67 | }, 68 | { 69 | "description": "Inf", 70 | "canonical_bson": "10000000016400000000000000F07F00", 71 | "canonical_extjson": "{\"d\": {\"$numberDouble\": \"Infinity\"}}", 72 | "relaxed_extjson": "{\"d\": {\"$numberDouble\": \"Infinity\"}}" 73 | }, 74 | { 75 | "description": "-Inf", 76 | "canonical_bson": "10000000016400000000000000F0FF00", 77 | "canonical_extjson": "{\"d\": {\"$numberDouble\": \"-Infinity\"}}", 78 | "relaxed_extjson": "{\"d\": {\"$numberDouble\": \"-Infinity\"}}" 79 | } 80 | ], 81 | "decodeErrors": [ 82 | { 83 | "description": "double truncated", 84 | "bson": "0B0000000164000000F03F00" 85 | } 86 | ] 87 | } 88 | -------------------------------------------------------------------------------- /src/tests/spec/json/bson-corpus/int32.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Int32 type", 3 | "bson_type": "0x10", 4 | "test_key": "i", 5 | "valid": [ 6 | { 7 | "description": "MinValue", 8 | "canonical_bson": "0C0000001069000000008000", 9 | "canonical_extjson": "{\"i\" : {\"$numberInt\": \"-2147483648\"}}", 10 | "relaxed_extjson": "{\"i\" : -2147483648}" 11 | }, 12 | { 13 | "description": "MaxValue", 14 | "canonical_bson": "0C000000106900FFFFFF7F00", 15 | "canonical_extjson": "{\"i\" : {\"$numberInt\": \"2147483647\"}}", 16 | "relaxed_extjson": "{\"i\" : 2147483647}" 17 | }, 18 | { 19 | "description": "-1", 20 | "canonical_bson": "0C000000106900FFFFFFFF00", 21 | "canonical_extjson": "{\"i\" : {\"$numberInt\": \"-1\"}}", 22 | "relaxed_extjson": "{\"i\" : -1}" 23 | }, 24 | { 25 | "description": "0", 26 | "canonical_bson": "0C0000001069000000000000", 27 | "canonical_extjson": "{\"i\" : {\"$numberInt\": \"0\"}}", 28 | "relaxed_extjson": "{\"i\" : 0}" 29 | }, 30 | { 31 | "description": "1", 32 | "canonical_bson": "0C0000001069000100000000", 33 | "canonical_extjson": "{\"i\" : {\"$numberInt\": \"1\"}}", 34 | "relaxed_extjson": "{\"i\" : 1}" 35 | } 36 | ], 37 | "decodeErrors": [ 38 | { 39 | "description": "Bad int32 field length", 40 | "bson": "090000001061000500" 41 | } 42 | ] 43 | } 44 | -------------------------------------------------------------------------------- /src/tests/spec/json/bson-corpus/int64.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Int64 type", 3 | "bson_type": "0x12", 4 | "test_key": "a", 5 | "valid": [ 6 | { 7 | "description": "MinValue", 8 | "canonical_bson": "10000000126100000000000000008000", 9 | "canonical_extjson": "{\"a\" : {\"$numberLong\" : \"-9223372036854775808\"}}", 10 | "relaxed_extjson": "{\"a\" : -9223372036854775808}" 11 | }, 12 | { 13 | "description": "MaxValue", 14 | "canonical_bson": "10000000126100FFFFFFFFFFFFFF7F00", 15 | "canonical_extjson": "{\"a\" : {\"$numberLong\" : \"9223372036854775807\"}}", 16 | "relaxed_extjson": "{\"a\" : 9223372036854775807}" 17 | }, 18 | { 19 | "description": "-1", 20 | "canonical_bson": "10000000126100FFFFFFFFFFFFFFFF00", 21 | "canonical_extjson": "{\"a\" : {\"$numberLong\" : \"-1\"}}", 22 | "relaxed_extjson": "{\"a\" : -1}" 23 | }, 24 | { 25 | "description": "0", 26 | "canonical_bson": "10000000126100000000000000000000", 27 | "canonical_extjson": "{\"a\" : {\"$numberLong\" : \"0\"}}", 28 | "relaxed_extjson": "{\"a\" : 0}" 29 | }, 30 | { 31 | "description": "1", 32 | "canonical_bson": "10000000126100010000000000000000", 33 | "canonical_extjson": "{\"a\" : {\"$numberLong\" : \"1\"}}", 34 | "relaxed_extjson": "{\"a\" : 1}" 35 | } 36 | ], 37 | "decodeErrors": [ 38 | { 39 | "description": "int64 field truncated", 40 | "bson": "0C0000001261001234567800" 41 | } 42 | ] 43 | } 44 | -------------------------------------------------------------------------------- /src/tests/spec/json/bson-corpus/maxkey.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Maxkey type", 3 | "bson_type": "0x7F", 4 | "test_key": "a", 5 | "valid": [ 6 | { 7 | "description": "Maxkey", 8 | "canonical_bson": "080000007F610000", 9 | "canonical_extjson": "{\"a\" : {\"$maxKey\" : 1}}" 10 | } 11 | ] 12 | } 13 | -------------------------------------------------------------------------------- /src/tests/spec/json/bson-corpus/minkey.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Minkey type", 3 | "bson_type": "0xFF", 4 | "test_key": "a", 5 | "valid": [ 6 | { 7 | "description": "Minkey", 8 | "canonical_bson": "08000000FF610000", 9 | "canonical_extjson": "{\"a\" : {\"$minKey\" : 1}}" 10 | } 11 | ] 12 | } 13 | -------------------------------------------------------------------------------- /src/tests/spec/json/bson-corpus/multi-type-deprecated.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Multiple types within the same document", 3 | "bson_type": "0x00", 4 | "deprecated": true, 5 | "valid": [ 6 | { 7 | "description": "All BSON types", 8 | "canonical_bson": "38020000075F69640057E193D7A9CC81B4027498B50E53796D626F6C000700000073796D626F6C0002537472696E670007000000737472696E670010496E743332002A00000012496E743634002A0000000000000001446F75626C6500000000000000F0BF0542696E617279001000000003A34C38F7C3ABEDC8A37814A992AB8DB60542696E61727955736572446566696E656400050000008001020304050D436F6465000E00000066756E6374696F6E2829207B7D000F436F64655769746853636F7065001B0000000E00000066756E6374696F6E2829207B7D00050000000003537562646F63756D656E74001200000002666F6F0004000000626172000004417272617900280000001030000100000010310002000000103200030000001033000400000010340005000000001154696D657374616D7000010000002A0000000B5265676578007061747465726E0000094461746574696D6545706F6368000000000000000000094461746574696D65506F73697469766500FFFFFF7F00000000094461746574696D654E656761746976650000000080FFFFFFFF085472756500010846616C736500000C4442506F696E746572000B000000636F6C6C656374696F6E0057E193D7A9CC81B4027498B1034442526566003D0000000224726566000B000000636F6C6C656374696F6E00072469640057FD71E96E32AB4225B723FB02246462000900000064617461626173650000FF4D696E6B6579007F4D61786B6579000A4E756C6C0006556E646566696E65640000", 9 | "converted_bson": "48020000075f69640057e193d7a9cc81b4027498b50253796d626f6c000700000073796d626f6c0002537472696e670007000000737472696e670010496e743332002a00000012496e743634002a0000000000000001446f75626c6500000000000000f0bf0542696e617279001000000003a34c38f7c3abedc8a37814a992ab8db60542696e61727955736572446566696e656400050000008001020304050d436f6465000e00000066756e6374696f6e2829207b7d000f436f64655769746853636f7065001b0000000e00000066756e6374696f6e2829207b7d00050000000003537562646f63756d656e74001200000002666f6f0004000000626172000004417272617900280000001030000100000010310002000000103200030000001033000400000010340005000000001154696d657374616d7000010000002a0000000b5265676578007061747465726e0000094461746574696d6545706f6368000000000000000000094461746574696d65506f73697469766500ffffff7f00000000094461746574696d654e656761746976650000000080ffffffff085472756500010846616c73650000034442506f696e746572002b0000000224726566000b000000636f6c6c656374696f6e00072469640057e193d7a9cc81b4027498b100034442526566003d0000000224726566000b000000636f6c6c656374696f6e00072469640057fd71e96e32ab4225b723fb02246462000900000064617461626173650000ff4d696e6b6579007f4d61786b6579000a4e756c6c000a556e646566696e65640000", 10 | "canonical_extjson": "{\"_id\": {\"$oid\": \"57e193d7a9cc81b4027498b5\"}, \"Symbol\": {\"$symbol\": \"symbol\"}, \"String\": \"string\", \"Int32\": {\"$numberInt\": \"42\"}, \"Int64\": {\"$numberLong\": \"42\"}, \"Double\": {\"$numberDouble\": \"-1.0\"}, \"Binary\": { \"$binary\" : {\"base64\": \"o0w498Or7cijeBSpkquNtg==\", \"subType\": \"03\"}}, \"BinaryUserDefined\": { \"$binary\" : {\"base64\": \"AQIDBAU=\", \"subType\": \"80\"}}, \"Code\": {\"$code\": \"function() {}\"}, \"CodeWithScope\": {\"$code\": \"function() {}\", \"$scope\": {}}, \"Subdocument\": {\"foo\": \"bar\"}, \"Array\": [{\"$numberInt\": \"1\"}, {\"$numberInt\": \"2\"}, {\"$numberInt\": \"3\"}, {\"$numberInt\": \"4\"}, {\"$numberInt\": \"5\"}], \"Timestamp\": {\"$timestamp\": {\"t\": 42, \"i\": 1}}, \"Regex\": {\"$regularExpression\": {\"pattern\": \"pattern\", \"options\": \"\"}}, \"DatetimeEpoch\": {\"$date\": {\"$numberLong\": \"0\"}}, \"DatetimePositive\": {\"$date\": {\"$numberLong\": \"2147483647\"}}, \"DatetimeNegative\": {\"$date\": {\"$numberLong\": \"-2147483648\"}}, \"True\": true, \"False\": false, \"DBPointer\": {\"$dbPointer\": {\"$ref\": \"collection\", \"$id\": {\"$oid\": \"57e193d7a9cc81b4027498b1\"}}}, \"DBRef\": {\"$ref\": \"collection\", \"$id\": {\"$oid\": \"57fd71e96e32ab4225b723fb\"}, \"$db\": \"database\"}, \"Minkey\": {\"$minKey\": 1}, \"Maxkey\": {\"$maxKey\": 1}, \"Null\": null, \"Undefined\": {\"$undefined\": true}}", 11 | "converted_extjson": "{\"_id\": {\"$oid\": \"57e193d7a9cc81b4027498b5\"}, \"Symbol\": \"symbol\", \"String\": \"string\", \"Int32\": {\"$numberInt\": \"42\"}, \"Int64\": {\"$numberLong\": \"42\"}, \"Double\": {\"$numberDouble\": \"-1.0\"}, \"Binary\": { \"$binary\" : {\"base64\": \"o0w498Or7cijeBSpkquNtg==\", \"subType\": \"03\"}}, \"BinaryUserDefined\": { \"$binary\" : {\"base64\": \"AQIDBAU=\", \"subType\": \"80\"}}, \"Code\": {\"$code\": \"function() {}\"}, \"CodeWithScope\": {\"$code\": \"function() {}\", \"$scope\": {}}, \"Subdocument\": {\"foo\": \"bar\"}, \"Array\": [{\"$numberInt\": \"1\"}, {\"$numberInt\": \"2\"}, {\"$numberInt\": \"3\"}, {\"$numberInt\": \"4\"}, {\"$numberInt\": \"5\"}], \"Timestamp\": {\"$timestamp\": {\"t\": 42, \"i\": 1}}, \"Regex\": {\"$regularExpression\": {\"pattern\": \"pattern\", \"options\": \"\"}}, \"DatetimeEpoch\": {\"$date\": {\"$numberLong\": \"0\"}}, \"DatetimePositive\": {\"$date\": {\"$numberLong\": \"2147483647\"}}, \"DatetimeNegative\": {\"$date\": {\"$numberLong\": \"-2147483648\"}}, \"True\": true, \"False\": false, \"DBPointer\": {\"$ref\": \"collection\", \"$id\": {\"$oid\": \"57e193d7a9cc81b4027498b1\"}}, \"DBRef\": {\"$ref\": \"collection\", \"$id\": {\"$oid\": \"57fd71e96e32ab4225b723fb\"}, \"$db\": \"database\"}, \"Minkey\": {\"$minKey\": 1}, \"Maxkey\": {\"$maxKey\": 1}, \"Null\": null, \"Undefined\": null}" 12 | } 13 | ] 14 | } 15 | 16 | -------------------------------------------------------------------------------- /src/tests/spec/json/bson-corpus/multi-type.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Multiple types within the same document", 3 | "bson_type": "0x00", 4 | "valid": [ 5 | { 6 | "description": "All BSON types", 7 | "canonical_bson": "F4010000075F69640057E193D7A9CC81B4027498B502537472696E670007000000737472696E670010496E743332002A00000012496E743634002A0000000000000001446F75626C6500000000000000F0BF0542696E617279001000000003A34C38F7C3ABEDC8A37814A992AB8DB60542696E61727955736572446566696E656400050000008001020304050D436F6465000E00000066756E6374696F6E2829207B7D000F436F64655769746853636F7065001B0000000E00000066756E6374696F6E2829207B7D00050000000003537562646F63756D656E74001200000002666F6F0004000000626172000004417272617900280000001030000100000010310002000000103200030000001033000400000010340005000000001154696D657374616D7000010000002A0000000B5265676578007061747465726E0000094461746574696D6545706F6368000000000000000000094461746574696D65506F73697469766500FFFFFF7F00000000094461746574696D654E656761746976650000000080FFFFFFFF085472756500010846616C73650000034442526566003D0000000224726566000B000000636F6C6C656374696F6E00072469640057FD71E96E32AB4225B723FB02246462000900000064617461626173650000FF4D696E6B6579007F4D61786B6579000A4E756C6C0000", 8 | "canonical_extjson": "{\"_id\": {\"$oid\": \"57e193d7a9cc81b4027498b5\"}, \"String\": \"string\", \"Int32\": {\"$numberInt\": \"42\"}, \"Int64\": {\"$numberLong\": \"42\"}, \"Double\": {\"$numberDouble\": \"-1.0\"}, \"Binary\": { \"$binary\" : {\"base64\": \"o0w498Or7cijeBSpkquNtg==\", \"subType\": \"03\"}}, \"BinaryUserDefined\": { \"$binary\" : {\"base64\": \"AQIDBAU=\", \"subType\": \"80\"}}, \"Code\": {\"$code\": \"function() {}\"}, \"CodeWithScope\": {\"$code\": \"function() {}\", \"$scope\": {}}, \"Subdocument\": {\"foo\": \"bar\"}, \"Array\": [{\"$numberInt\": \"1\"}, {\"$numberInt\": \"2\"}, {\"$numberInt\": \"3\"}, {\"$numberInt\": \"4\"}, {\"$numberInt\": \"5\"}], \"Timestamp\": {\"$timestamp\": {\"t\": 42, \"i\": 1}}, \"Regex\": {\"$regularExpression\": {\"pattern\": \"pattern\", \"options\": \"\"}}, \"DatetimeEpoch\": {\"$date\": {\"$numberLong\": \"0\"}}, \"DatetimePositive\": {\"$date\": {\"$numberLong\": \"2147483647\"}}, \"DatetimeNegative\": {\"$date\": {\"$numberLong\": \"-2147483648\"}}, \"True\": true, \"False\": false, \"DBRef\": {\"$ref\": \"collection\", \"$id\": {\"$oid\": \"57fd71e96e32ab4225b723fb\"}, \"$db\": \"database\"}, \"Minkey\": {\"$minKey\": 1}, \"Maxkey\": {\"$maxKey\": 1}, \"Null\": null}" 9 | } 10 | ] 11 | } 12 | -------------------------------------------------------------------------------- /src/tests/spec/json/bson-corpus/null.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Null type", 3 | "bson_type": "0x0A", 4 | "test_key": "a", 5 | "valid": [ 6 | { 7 | "description": "Null", 8 | "canonical_bson": "080000000A610000", 9 | "canonical_extjson": "{\"a\" : null}" 10 | } 11 | ] 12 | } 13 | -------------------------------------------------------------------------------- /src/tests/spec/json/bson-corpus/oid.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "ObjectId", 3 | "bson_type": "0x07", 4 | "test_key": "a", 5 | "valid": [ 6 | { 7 | "description": "All zeroes", 8 | "canonical_bson": "1400000007610000000000000000000000000000", 9 | "canonical_extjson": "{\"a\" : {\"$oid\" : \"000000000000000000000000\"}}" 10 | }, 11 | { 12 | "description": "All ones", 13 | "canonical_bson": "14000000076100FFFFFFFFFFFFFFFFFFFFFFFF00", 14 | "canonical_extjson": "{\"a\" : {\"$oid\" : \"ffffffffffffffffffffffff\"}}" 15 | }, 16 | { 17 | "description": "Random", 18 | "canonical_bson": "1400000007610056E1FC72E0C917E9C471416100", 19 | "canonical_extjson": "{\"a\" : {\"$oid\" : \"56e1fc72e0c917e9c4714161\"}}" 20 | } 21 | ], 22 | "decodeErrors": [ 23 | { 24 | "description": "OID truncated", 25 | "bson": "1200000007610056E1FC72E0C917E9C471" 26 | } 27 | ] 28 | } 29 | -------------------------------------------------------------------------------- /src/tests/spec/json/bson-corpus/regex.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Regular Expression type", 3 | "bson_type": "0x0B", 4 | "test_key": "a", 5 | "valid": [ 6 | { 7 | "description": "empty regex with no options", 8 | "canonical_bson": "0A0000000B6100000000", 9 | "canonical_extjson": "{\"a\" : {\"$regularExpression\" : { \"pattern\": \"\", \"options\" : \"\"}}}" 10 | }, 11 | { 12 | "description": "regex without options", 13 | "canonical_bson": "0D0000000B6100616263000000", 14 | "canonical_extjson": "{\"a\" : {\"$regularExpression\" : { \"pattern\": \"abc\", \"options\" : \"\"}}}" 15 | }, 16 | { 17 | "description": "regex with options", 18 | "canonical_bson": "0F0000000B610061626300696D0000", 19 | "canonical_extjson": "{\"a\" : {\"$regularExpression\" : { \"pattern\": \"abc\", \"options\" : \"im\"}}}" 20 | }, 21 | { 22 | "description": "regex with options (keys reversed)", 23 | "canonical_bson": "0F0000000B610061626300696D0000", 24 | "canonical_extjson": "{\"a\" : {\"$regularExpression\" : { \"pattern\": \"abc\", \"options\" : \"im\"}}}", 25 | "degenerate_extjson": "{\"a\" : {\"$regularExpression\" : {\"options\" : \"im\", \"pattern\": \"abc\"}}}" 26 | }, 27 | { 28 | "description": "regex with slash", 29 | "canonical_bson": "110000000B610061622F636400696D0000", 30 | "canonical_extjson": "{\"a\" : {\"$regularExpression\" : { \"pattern\": \"ab/cd\", \"options\" : \"im\"}}}" 31 | }, 32 | { 33 | "description": "flags not alphabetized", 34 | "degenerate_bson": "100000000B6100616263006D69780000", 35 | "canonical_bson": "100000000B610061626300696D780000", 36 | "canonical_extjson": "{\"a\" : {\"$regularExpression\" : { \"pattern\": \"abc\", \"options\" : \"imx\"}}}", 37 | "degenerate_extjson": "{\"a\" : {\"$regularExpression\" : { \"pattern\": \"abc\", \"options\" : \"mix\"}}}" 38 | }, 39 | { 40 | "description" : "Required escapes", 41 | "canonical_bson" : "100000000B610061625C226162000000", 42 | "canonical_extjson": "{\"a\" : {\"$regularExpression\" : { \"pattern\": \"ab\\\\\\\"ab\", \"options\" : \"\"}}}" 43 | }, 44 | { 45 | "description" : "Regular expression as value of $regex query operator", 46 | "canonical_bson" : "180000000B247265676578007061747465726E0069780000", 47 | "canonical_extjson": "{\"$regex\" : {\"$regularExpression\" : { \"pattern\": \"pattern\", \"options\" : \"ix\"}}}" 48 | }, 49 | { 50 | "description" : "Regular expression as value of $regex query operator with $options", 51 | "canonical_bson" : "270000000B247265676578007061747465726E000002246F7074696F6E73000300000069780000", 52 | "canonical_extjson": "{\"$regex\" : {\"$regularExpression\" : { \"pattern\": \"pattern\", \"options\" : \"\"}}, \"$options\" : \"ix\"}" 53 | } 54 | ], 55 | "decodeErrors": [ 56 | { 57 | "description": "Null byte in pattern string", 58 | "bson": "0F0000000B610061006300696D0000" 59 | }, 60 | { 61 | "description": "Null byte in flags string", 62 | "bson": "100000000B61006162630069006D0000" 63 | } 64 | ] 65 | } 66 | -------------------------------------------------------------------------------- /src/tests/spec/json/bson-corpus/string.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "String", 3 | "bson_type": "0x02", 4 | "test_key": "a", 5 | "valid": [ 6 | { 7 | "description": "Empty string", 8 | "canonical_bson": "0D000000026100010000000000", 9 | "canonical_extjson": "{\"a\" : \"\"}" 10 | }, 11 | { 12 | "description": "Single character", 13 | "canonical_bson": "0E00000002610002000000620000", 14 | "canonical_extjson": "{\"a\" : \"b\"}" 15 | }, 16 | { 17 | "description": "Multi-character", 18 | "canonical_bson": "190000000261000D0000006162616261626162616261620000", 19 | "canonical_extjson": "{\"a\" : \"abababababab\"}" 20 | }, 21 | { 22 | "description": "two-byte UTF-8 (\u00e9)", 23 | "canonical_bson": "190000000261000D000000C3A9C3A9C3A9C3A9C3A9C3A90000", 24 | "canonical_extjson": "{\"a\" : \"\\u00e9\\u00e9\\u00e9\\u00e9\\u00e9\\u00e9\"}" 25 | }, 26 | { 27 | "description": "three-byte UTF-8 (\u2606)", 28 | "canonical_bson": "190000000261000D000000E29886E29886E29886E298860000", 29 | "canonical_extjson": "{\"a\" : \"\\u2606\\u2606\\u2606\\u2606\"}" 30 | }, 31 | { 32 | "description": "Embedded nulls", 33 | "canonical_bson": "190000000261000D0000006162006261620062616261620000", 34 | "canonical_extjson": "{\"a\" : \"ab\\u0000bab\\u0000babab\"}" 35 | }, 36 | { 37 | "description": "Required escapes", 38 | "canonical_bson" : "320000000261002600000061625C220102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F61620000", 39 | "canonical_extjson" : "{\"a\":\"ab\\\\\\\"\\u0001\\u0002\\u0003\\u0004\\u0005\\u0006\\u0007\\b\\t\\n\\u000b\\f\\r\\u000e\\u000f\\u0010\\u0011\\u0012\\u0013\\u0014\\u0015\\u0016\\u0017\\u0018\\u0019\\u001a\\u001b\\u001c\\u001d\\u001e\\u001fab\"}" 40 | } 41 | ], 42 | "decodeErrors": [ 43 | { 44 | "description": "bad string length: 0 (but no 0x00 either)", 45 | "bson": "0C0000000261000000000000" 46 | }, 47 | { 48 | "description": "bad string length: -1", 49 | "bson": "0C000000026100FFFFFFFF00" 50 | }, 51 | { 52 | "description": "bad string length: eats terminator", 53 | "bson": "10000000026100050000006200620000" 54 | }, 55 | { 56 | "description": "bad string length: longer than rest of document", 57 | "bson": "120000000200FFFFFF00666F6F6261720000" 58 | }, 59 | { 60 | "description": "string is not null-terminated", 61 | "bson": "1000000002610004000000616263FF00" 62 | }, 63 | { 64 | "description": "empty string, but extra null", 65 | "bson": "0E00000002610001000000000000" 66 | }, 67 | { 68 | "description": "invalid UTF-8", 69 | "bson": "0E00000002610002000000E90000" 70 | } 71 | ] 72 | } 73 | -------------------------------------------------------------------------------- /src/tests/spec/json/bson-corpus/symbol.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Symbol", 3 | "bson_type": "0x0E", 4 | "deprecated": true, 5 | "test_key": "a", 6 | "valid": [ 7 | { 8 | "description": "Empty string", 9 | "canonical_bson": "0D0000000E6100010000000000", 10 | "canonical_extjson": "{\"a\": {\"$symbol\": \"\"}}", 11 | "converted_bson": "0D000000026100010000000000", 12 | "converted_extjson": "{\"a\": \"\"}" 13 | }, 14 | { 15 | "description": "Single character", 16 | "canonical_bson": "0E0000000E610002000000620000", 17 | "canonical_extjson": "{\"a\": {\"$symbol\": \"b\"}}", 18 | "converted_bson": "0E00000002610002000000620000", 19 | "converted_extjson": "{\"a\": \"b\"}" 20 | }, 21 | { 22 | "description": "Multi-character", 23 | "canonical_bson": "190000000E61000D0000006162616261626162616261620000", 24 | "canonical_extjson": "{\"a\": {\"$symbol\": \"abababababab\"}}", 25 | "converted_bson": "190000000261000D0000006162616261626162616261620000", 26 | "converted_extjson": "{\"a\": \"abababababab\"}" 27 | }, 28 | { 29 | "description": "two-byte UTF-8 (\u00e9)", 30 | "canonical_bson": "190000000E61000D000000C3A9C3A9C3A9C3A9C3A9C3A90000", 31 | "canonical_extjson": "{\"a\": {\"$symbol\": \"éééééé\"}}", 32 | "converted_bson": "190000000261000D000000C3A9C3A9C3A9C3A9C3A9C3A90000", 33 | "converted_extjson": "{\"a\": \"éééééé\"}" 34 | }, 35 | { 36 | "description": "three-byte UTF-8 (\u2606)", 37 | "canonical_bson": "190000000E61000D000000E29886E29886E29886E298860000", 38 | "canonical_extjson": "{\"a\": {\"$symbol\": \"☆☆☆☆\"}}", 39 | "converted_bson": "190000000261000D000000E29886E29886E29886E298860000", 40 | "converted_extjson": "{\"a\": \"☆☆☆☆\"}" 41 | }, 42 | { 43 | "description": "Embedded nulls", 44 | "canonical_bson": "190000000E61000D0000006162006261620062616261620000", 45 | "canonical_extjson": "{\"a\": {\"$symbol\": \"ab\\u0000bab\\u0000babab\"}}", 46 | "converted_bson": "190000000261000D0000006162006261620062616261620000", 47 | "converted_extjson": "{\"a\": \"ab\\u0000bab\\u0000babab\"}" 48 | } 49 | ], 50 | "decodeErrors": [ 51 | { 52 | "description": "bad symbol length: 0 (but no 0x00 either)", 53 | "bson": "0C0000000E61000000000000" 54 | }, 55 | { 56 | "description": "bad symbol length: -1", 57 | "bson": "0C0000000E6100FFFFFFFF00" 58 | }, 59 | { 60 | "description": "bad symbol length: eats terminator", 61 | "bson": "100000000E6100050000006200620000" 62 | }, 63 | { 64 | "description": "bad symbol length: longer than rest of document", 65 | "bson": "120000000E00FFFFFF00666F6F6261720000" 66 | }, 67 | { 68 | "description": "symbol is not null-terminated", 69 | "bson": "100000000E610004000000616263FF00" 70 | }, 71 | { 72 | "description": "empty symbol, but extra null", 73 | "bson": "0E0000000E610001000000000000" 74 | }, 75 | { 76 | "description": "invalid UTF-8", 77 | "bson": "0E0000000E610002000000E90000" 78 | } 79 | ] 80 | } 81 | -------------------------------------------------------------------------------- /src/tests/spec/json/bson-corpus/timestamp.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Timestamp type", 3 | "bson_type": "0x11", 4 | "test_key": "a", 5 | "valid": [ 6 | { 7 | "description": "Timestamp: (123456789, 42)", 8 | "canonical_bson": "100000001161002A00000015CD5B0700", 9 | "canonical_extjson": "{\"a\" : {\"$timestamp\" : {\"t\" : 123456789, \"i\" : 42} } }" 10 | }, 11 | { 12 | "description": "Timestamp: (123456789, 42) (keys reversed)", 13 | "canonical_bson": "100000001161002A00000015CD5B0700", 14 | "canonical_extjson": "{\"a\" : {\"$timestamp\" : {\"t\" : 123456789, \"i\" : 42} } }", 15 | "degenerate_extjson": "{\"a\" : {\"$timestamp\" : {\"i\" : 42, \"t\" : 123456789} } }" 16 | }, 17 | { 18 | "description": "Timestamp with high-order bit set on both seconds and increment", 19 | "canonical_bson": "10000000116100FFFFFFFFFFFFFFFF00", 20 | "canonical_extjson": "{\"a\" : {\"$timestamp\" : {\"t\" : 4294967295, \"i\" : 4294967295} } }" 21 | }, 22 | { 23 | "description": "Timestamp with high-order bit set on both seconds and increment (not UINT32_MAX)", 24 | "canonical_bson": "1000000011610000286BEE00286BEE00", 25 | "canonical_extjson": "{\"a\" : {\"$timestamp\" : {\"t\" : 4000000000, \"i\" : 4000000000} } }" 26 | } 27 | ], 28 | "decodeErrors": [ 29 | { 30 | "description": "Truncated timestamp field", 31 | "bson": "0f0000001161002A00000015CD5B00" 32 | } 33 | ] 34 | } 35 | -------------------------------------------------------------------------------- /src/tests/spec/json/bson-corpus/undefined.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Undefined type (deprecated)", 3 | "bson_type": "0x06", 4 | "deprecated": true, 5 | "test_key": "a", 6 | "valid": [ 7 | { 8 | "description": "Undefined", 9 | "canonical_bson": "0800000006610000", 10 | "canonical_extjson": "{\"a\" : {\"$undefined\" : true}}", 11 | "converted_bson": "080000000A610000", 12 | "converted_extjson": "{\"a\" : null}" 13 | } 14 | ] 15 | } 16 | -------------------------------------------------------------------------------- /src/tests/spec/vector.rs: -------------------------------------------------------------------------------- 1 | use std::convert::TryFrom; 2 | 3 | use serde::{Deserialize, Deserializer, Serialize}; 4 | 5 | use crate::{ 6 | binary::{Binary, PackedBitVector, Vector}, 7 | deserialize_from_document, 8 | deserialize_from_slice, 9 | serialize_to_document, 10 | serialize_to_raw_document_buf, 11 | spec::BinarySubtype, 12 | Bson, 13 | Document, 14 | RawDocumentBuf, 15 | }; 16 | 17 | use super::run_spec_test; 18 | 19 | const INT8: u8 = 0x03; 20 | const FLOAT32: u8 = 0x27; 21 | const PACKED_BIT: u8 = 0x10; 22 | 23 | #[derive(Deserialize)] 24 | struct TestFile { 25 | description: String, 26 | test_key: String, 27 | tests: Vec, 28 | } 29 | 30 | #[derive(Deserialize)] 31 | struct Test { 32 | description: String, 33 | valid: bool, 34 | vector: Option>, 35 | #[serde( 36 | rename = "dtype_hex", 37 | deserialize_with = "deserialize_u8_from_hex_string" 38 | )] 39 | d_type: u8, 40 | padding: Option, 41 | canonical_bson: Option, 42 | } 43 | 44 | fn deserialize_u8_from_hex_string<'de, D>(deserializer: D) -> Result 45 | where 46 | D: Deserializer<'de>, 47 | { 48 | let s = String::deserialize(deserializer)?; 49 | u8::from_str_radix(s.trim_start_matches("0x"), 16).map_err(serde::de::Error::custom) 50 | } 51 | 52 | #[derive(Deserialize)] 53 | #[serde(untagged)] 54 | enum Number { 55 | Int(i16), 56 | Float(f32), 57 | } 58 | 59 | // Some of the invalid cases (e.g. mixed number types, padding for non-packed-bit vectors) are 60 | // impossible to construct, so we return an error from this method. 61 | fn vector_from_numbers( 62 | numbers: Vec, 63 | d_type: u8, 64 | padding: Option, 65 | ) -> Result { 66 | let padding = u8::try_from(padding.unwrap_or(0)).map_err(|e| e.to_string())?; 67 | if padding != 0 && d_type != PACKED_BIT { 68 | return Err(format!("got nonzero padding for data type {}", d_type)); 69 | } 70 | match d_type { 71 | INT8 => { 72 | let vector = numbers 73 | .into_iter() 74 | .map(|n| match n { 75 | Number::Int(n) => i8::try_from(n).map_err(|e| e.to_string()), 76 | Number::Float(n) => Err(format!("expected i8, got float {}", n)), 77 | }) 78 | .collect::, String>>()?; 79 | Ok(Vector::Int8(vector)) 80 | } 81 | FLOAT32 => { 82 | let vector = numbers 83 | .into_iter() 84 | .map(|n| match n { 85 | Number::Int(n) => Err(format!("expected f32, got int {}", n)), 86 | Number::Float(n) => Ok(n), 87 | }) 88 | .collect::, String>>()?; 89 | Ok(Vector::Float32(vector)) 90 | } 91 | PACKED_BIT => { 92 | let vector = numbers 93 | .into_iter() 94 | .map(|n| match n { 95 | Number::Int(n) => u8::try_from(n).map_err(|e| e.to_string()), 96 | Number::Float(n) => Err(format!("expected u8, got float {}", n)), 97 | }) 98 | .collect::, String>>()?; 99 | Ok(Vector::PackedBit( 100 | PackedBitVector::new(vector, padding).map_err(|e| e.to_string())?, 101 | )) 102 | } 103 | other => Err(format!("invalid data type: {}", other)), 104 | } 105 | } 106 | 107 | // Only return the binary if it represents a valid vector; otherwise, return an error. 108 | fn binary_from_bytes(bson: &str, test_key: &str, description: &str) -> Result { 109 | let bytes = hex::decode(bson).expect(description); 110 | let mut test_document = Document::decode_from_reader(bytes.as_slice()).expect(description); 111 | let bson = test_document.remove(test_key).expect(description); 112 | let binary = match bson { 113 | Bson::Binary(binary) => binary, 114 | other => panic!("{}: expected binary, got {}", description, other), 115 | }; 116 | if let Err(error) = Vector::try_from(&binary) { 117 | Err(error.to_string()) 118 | } else { 119 | Ok(binary) 120 | } 121 | } 122 | 123 | fn run_test_file(test_file: TestFile) { 124 | for test in test_file.tests { 125 | let description = format!("{} ({})", test.description, test_file.description); 126 | 127 | let test_vector = match test.vector { 128 | Some(vector) => match vector_from_numbers(vector, test.d_type, test.padding) { 129 | Ok(vector) => { 130 | assert!(test.valid, "{}", description); 131 | Some(vector) 132 | } 133 | Err(error) => { 134 | assert!(!test.valid, "{}: {}", description, error); 135 | None 136 | } 137 | }, 138 | None => None, 139 | }; 140 | 141 | let test_binary = match test.canonical_bson { 142 | Some(bson) => match binary_from_bytes(&bson, &test_file.test_key, &description) { 143 | Ok(vector) => { 144 | assert!(test.valid, "{}", description); 145 | Some(vector) 146 | } 147 | Err(error) => { 148 | assert!(!test.valid, "{}: {}", description, error); 149 | None 150 | } 151 | }, 152 | None => None, 153 | }; 154 | 155 | let (Some(test_vector), Some(test_binary)) = (test_vector, test_binary) else { 156 | return; 157 | }; 158 | 159 | let test_document = doc! { "vector": &test_binary }; 160 | 161 | // TryFrom for Vector 162 | let parsed_vector = Vector::try_from(&test_binary).expect(&description); 163 | assert_eq!(parsed_vector, test_vector); 164 | 165 | // From for Binary 166 | let binary = Binary::from(&test_vector); 167 | assert_eq!(binary.subtype, BinarySubtype::Vector); 168 | assert_eq!(binary, test_binary); 169 | 170 | // From for Bson 171 | let document = doc! { "vector": &test_vector }; 172 | assert_eq!(document, test_document); 173 | let document = doc! { "vector": test_vector.clone() }; 174 | assert_eq!(document, test_document); 175 | 176 | // From for RawBson 177 | let raw_document = rawdoc! { "vector": &test_vector }; 178 | let test_raw_document = RawDocumentBuf::from_document(&test_document).expect(&description); 179 | assert_eq!(raw_document, test_raw_document); 180 | 181 | #[derive(Debug, Deserialize, PartialEq, Serialize)] 182 | struct Data { 183 | vector: Vector, 184 | } 185 | let data = Data { 186 | vector: test_vector, 187 | }; 188 | 189 | // Serialize for Vector (Document) 190 | let serialized_document = serialize_to_document(&data).expect(&description); 191 | assert_eq!(serialized_document, test_document); 192 | 193 | // Deserialize for Vector (Document) 194 | let deserialized_data: Data = 195 | deserialize_from_document(serialized_document).expect(&description); 196 | assert_eq!(deserialized_data, data); 197 | 198 | // Serialize for Vector (RawDocumentBuf) 199 | let serialized_raw_document = serialize_to_raw_document_buf(&data).expect(&description); 200 | assert_eq!(serialized_raw_document, test_raw_document); 201 | 202 | // Deserialize for Vector (RawDocumentBuf) 203 | let deserialized_data: Data = 204 | deserialize_from_slice(serialized_raw_document.as_bytes()).expect(&description); 205 | assert_eq!(deserialized_data, data); 206 | } 207 | } 208 | 209 | #[test] 210 | fn run_vector_tests() { 211 | run_spec_test(&["bson-binary-vector"], run_test_file); 212 | } 213 | -------------------------------------------------------------------------------- /src/uuid/test.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | spec::BinarySubtype, 3 | uuid::{Uuid, UuidRepresentation}, 4 | Binary, 5 | Bson, 6 | }; 7 | 8 | #[cfg(feature = "serde")] 9 | #[derive(Debug, serde::Serialize, serde::Deserialize, PartialEq)] 10 | struct U { 11 | uuid: Uuid, 12 | } 13 | 14 | #[test] 15 | fn into_bson() { 16 | let uuid = Uuid::new(); 17 | 18 | let bson: Bson = uuid.into(); 19 | let binary = Binary { 20 | bytes: uuid.bytes().to_vec(), 21 | subtype: BinarySubtype::Uuid, 22 | }; 23 | 24 | assert_eq!(bson, Bson::Binary(binary)); 25 | } 26 | 27 | #[cfg(feature = "serde")] 28 | #[test] 29 | fn raw_serialization() { 30 | let u = U { uuid: Uuid::new() }; 31 | let bytes = crate::serialize_to_vec(&u).unwrap(); 32 | 33 | let doc: crate::Document = crate::deserialize_from_slice(bytes.as_slice()).unwrap(); 34 | assert_eq!(doc, doc! { "uuid": u.uuid }); 35 | 36 | let u_roundtrip: U = crate::deserialize_from_slice(bytes.as_slice()).unwrap(); 37 | assert_eq!(u_roundtrip, u); 38 | } 39 | 40 | #[cfg(feature = "serde")] 41 | #[test] 42 | fn bson_serialization() { 43 | let u = U { uuid: Uuid::new() }; 44 | let correct = doc! { 45 | "uuid": Binary { 46 | bytes: u.uuid.bytes().to_vec(), 47 | subtype: BinarySubtype::Uuid 48 | } 49 | }; 50 | 51 | assert_eq!(doc! { "uuid": u.uuid }, correct); 52 | 53 | let doc = crate::serialize_to_document(&u).unwrap(); 54 | assert_eq!(doc, correct); 55 | 56 | let u_roundtrip: U = crate::deserialize_from_document(doc).unwrap(); 57 | assert_eq!(u_roundtrip, u); 58 | } 59 | 60 | #[cfg(feature = "serde")] 61 | #[test] 62 | fn json() { 63 | let u = U { uuid: Uuid::new() }; 64 | 65 | let json = serde_json::to_value(&u).unwrap(); 66 | assert_eq!(json, serde_json::json!({ "uuid": u.uuid.to_string() })); 67 | 68 | let u_roundtrip_json: U = serde_json::from_value(json).unwrap(); 69 | assert_eq!(u_roundtrip_json, u); 70 | } 71 | 72 | #[cfg(feature = "serde")] 73 | #[test] 74 | fn wrong_subtype() { 75 | let generic = doc! { 76 | "uuid": Binary { 77 | bytes: Uuid::new().bytes().to_vec(), 78 | subtype: BinarySubtype::Generic 79 | } 80 | }; 81 | crate::deserialize_from_document::(generic.clone()).unwrap_err(); 82 | let generic_bytes = crate::serialize_to_vec(&generic).unwrap(); 83 | crate::deserialize_from_slice::(&generic_bytes).unwrap_err(); 84 | 85 | let old = doc! { 86 | "uuid": Binary { 87 | bytes: Uuid::new().bytes().to_vec(), 88 | subtype: BinarySubtype::UuidOld 89 | } 90 | }; 91 | crate::deserialize_from_document::(old.clone()).unwrap_err(); 92 | let old_bytes = crate::serialize_to_vec(&old).unwrap(); 93 | crate::deserialize_from_slice::(&old_bytes).unwrap_err(); 94 | 95 | let other = doc! { 96 | "uuid": Binary { 97 | bytes: Uuid::new().bytes().to_vec(), 98 | subtype: BinarySubtype::UserDefined(100) 99 | } 100 | }; 101 | crate::deserialize_from_document::(other.clone()).unwrap_err(); 102 | let other_bytes = crate::serialize_to_vec(&other).unwrap(); 103 | crate::deserialize_from_slice::(&other_bytes).unwrap_err(); 104 | } 105 | 106 | #[test] 107 | fn test_binary_constructors() { 108 | let uuid = crate::Uuid::parse_str("00112233445566778899AABBCCDDEEFF").unwrap(); 109 | let bin = Binary::from_uuid(uuid); 110 | assert_eq!(bin.bytes, uuid.bytes()); 111 | assert_eq!(bin.subtype, BinarySubtype::Uuid); 112 | 113 | let bin = Binary::from_uuid_with_representation(uuid, UuidRepresentation::Standard); 114 | assert_eq!(bin.bytes, uuid.bytes()); 115 | assert_eq!(bin.subtype, BinarySubtype::Uuid); 116 | 117 | let bin = Binary::from_uuid_with_representation(uuid, UuidRepresentation::JavaLegacy); 118 | assert_eq!( 119 | bin.bytes, 120 | Uuid::parse_str("7766554433221100FFEEDDCCBBAA9988") 121 | .unwrap() 122 | .bytes() 123 | ); 124 | assert_eq!(bin.subtype, BinarySubtype::UuidOld); 125 | 126 | let bin = Binary::from_uuid_with_representation(uuid, UuidRepresentation::CSharpLegacy); 127 | assert_eq!( 128 | bin.bytes, 129 | Uuid::parse_str("33221100554477668899AABBCCDDEEFF") 130 | .unwrap() 131 | .bytes() 132 | ); 133 | assert_eq!(bin.subtype, BinarySubtype::UuidOld); 134 | 135 | // Same byte ordering as standard representation 136 | let bin = Binary::from_uuid_with_representation(uuid, UuidRepresentation::PythonLegacy); 137 | assert_eq!( 138 | bin.bytes, 139 | Uuid::parse_str("00112233445566778899AABBCCDDEEFF") 140 | .unwrap() 141 | .bytes() 142 | ); 143 | assert_eq!(bin.subtype, BinarySubtype::UuidOld); 144 | } 145 | 146 | #[test] 147 | fn test_binary_to_uuid_standard_rep() { 148 | let uuid = crate::Uuid::parse_str("00112233445566778899AABBCCDDEEFF").unwrap(); 149 | let bin = Binary::from_uuid(uuid); 150 | 151 | assert_eq!(bin.to_uuid().unwrap(), uuid); 152 | assert_eq!( 153 | bin.to_uuid_with_representation(UuidRepresentation::Standard) 154 | .unwrap(), 155 | uuid 156 | ); 157 | 158 | assert!(bin 159 | .to_uuid_with_representation(UuidRepresentation::CSharpLegacy) 160 | .is_err()); 161 | assert!(bin 162 | .to_uuid_with_representation(UuidRepresentation::PythonLegacy) 163 | .is_err()); 164 | assert!(bin 165 | .to_uuid_with_representation(UuidRepresentation::PythonLegacy) 166 | .is_err()); 167 | } 168 | 169 | #[test] 170 | fn test_binary_to_uuid_explicitly_standard_rep() { 171 | let uuid = crate::Uuid::parse_str("00112233445566778899AABBCCDDEEFF").unwrap(); 172 | let bin = Binary::from_uuid_with_representation(uuid, UuidRepresentation::Standard); 173 | 174 | assert_eq!(bin.to_uuid().unwrap(), uuid); 175 | assert_eq!( 176 | bin.to_uuid_with_representation(UuidRepresentation::Standard) 177 | .unwrap(), 178 | uuid 179 | ); 180 | 181 | assert!(bin 182 | .to_uuid_with_representation(UuidRepresentation::CSharpLegacy) 183 | .is_err()); 184 | assert!(bin 185 | .to_uuid_with_representation(UuidRepresentation::PythonLegacy) 186 | .is_err()); 187 | assert!(bin 188 | .to_uuid_with_representation(UuidRepresentation::PythonLegacy) 189 | .is_err()); 190 | } 191 | 192 | #[test] 193 | fn test_binary_to_uuid_java_rep() { 194 | let uuid = crate::Uuid::parse_str("00112233445566778899AABBCCDDEEFF").unwrap(); 195 | let bin = Binary::from_uuid_with_representation(uuid, UuidRepresentation::JavaLegacy); 196 | 197 | assert!(bin.to_uuid().is_err()); 198 | assert!(bin 199 | .to_uuid_with_representation(UuidRepresentation::Standard) 200 | .is_err()); 201 | 202 | assert_eq!( 203 | bin.to_uuid_with_representation(UuidRepresentation::JavaLegacy) 204 | .unwrap(), 205 | uuid 206 | ); 207 | } 208 | 209 | #[test] 210 | fn test_binary_to_uuid_csharp_legacy_rep() { 211 | let uuid = crate::Uuid::parse_str("00112233445566778899AABBCCDDEEFF").unwrap(); 212 | let bin = Binary::from_uuid_with_representation(uuid, UuidRepresentation::CSharpLegacy); 213 | 214 | assert!(bin.to_uuid().is_err()); 215 | assert!(bin 216 | .to_uuid_with_representation(UuidRepresentation::Standard) 217 | .is_err()); 218 | 219 | assert_eq!( 220 | bin.to_uuid_with_representation(UuidRepresentation::CSharpLegacy) 221 | .unwrap(), 222 | uuid 223 | ); 224 | } 225 | 226 | #[test] 227 | fn test_binary_to_uuid_python_legacy_rep() { 228 | let uuid = crate::Uuid::parse_str("00112233445566778899AABBCCDDEEFF").unwrap(); 229 | let bin = Binary::from_uuid_with_representation(uuid, UuidRepresentation::PythonLegacy); 230 | 231 | assert!(bin.to_uuid().is_err()); 232 | assert!(bin 233 | .to_uuid_with_representation(UuidRepresentation::Standard) 234 | .is_err()); 235 | 236 | assert_eq!( 237 | bin.to_uuid_with_representation(UuidRepresentation::PythonLegacy) 238 | .unwrap(), 239 | uuid 240 | ); 241 | } 242 | 243 | #[cfg(feature = "uuid-1")] 244 | #[test] 245 | fn interop_1() { 246 | let uuid = crate::Uuid::new(); 247 | let uuid_uuid = uuid.to_uuid_1(); 248 | assert_eq!(uuid.to_string(), uuid_uuid.to_string()); 249 | assert_eq!(&uuid.bytes(), uuid_uuid.as_bytes()); 250 | 251 | let back: crate::Uuid = uuid_uuid.into(); 252 | assert_eq!(back, uuid); 253 | 254 | let d_bson = doc! { "uuid": uuid }; 255 | let d_uuid = doc! { "uuid": uuid_uuid }; 256 | assert_eq!(d_bson, d_uuid); 257 | } 258 | 259 | #[cfg(feature = "serde")] 260 | #[test] 261 | fn deserialize_uuid_from_string() { 262 | #[derive(serde::Deserialize)] 263 | struct UuidWrapper { 264 | uuid: Uuid, 265 | } 266 | 267 | let uuid = Uuid::new(); 268 | 269 | let doc = doc! { "uuid": uuid.to_string() }; 270 | let wrapper: UuidWrapper = 271 | crate::deserialize_from_document(doc).expect("failed to deserialize document"); 272 | assert_eq!(wrapper.uuid, uuid); 273 | 274 | let raw_doc = rawdoc! { "uuid": uuid.to_string() }; 275 | let wrapper: UuidWrapper = crate::deserialize_from_slice(raw_doc.as_bytes()) 276 | .expect("failed to deserialize raw document"); 277 | assert_eq!(wrapper.uuid, uuid); 278 | } 279 | -------------------------------------------------------------------------------- /wasm-test/.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [build] 2 | rustflags = ["--cfg", "getrandom_backend=\"wasm_js\""] -------------------------------------------------------------------------------- /wasm-test/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "bson-wasm-test" 3 | version = "0.1.0" 4 | authors = ["Abraham Egnor "] 5 | edition = "2018" 6 | 7 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 8 | 9 | [lib] 10 | crate-type = ["cdylib", "rlib"] 11 | 12 | [dependencies] 13 | bson = { path = ".." } 14 | 15 | [dev-dependencies] 16 | wasm-bindgen-test = "0.3.0" -------------------------------------------------------------------------------- /wasm-test/src/lib.rs: -------------------------------------------------------------------------------- 1 | #[cfg(test)] 2 | mod test; -------------------------------------------------------------------------------- /wasm-test/src/test.rs: -------------------------------------------------------------------------------- 1 | use wasm_bindgen_test::wasm_bindgen_test; 2 | 3 | #[wasm_bindgen_test] 4 | fn objectid_new() { 5 | let _ = bson::oid::ObjectId::new(); 6 | } --------------------------------------------------------------------------------