├── .github ├── FUNDING.yml └── workflows │ └── ci.yml ├── .gitignore ├── CHANGELOG.md ├── Cargo.toml ├── LICENSE-APACHE ├── LICENSE-MIT ├── README-zh_CN.md ├── README.md ├── benches └── bench.rs ├── ci ├── miri.sh ├── sanitizer.sh └── test-stable.sh ├── examples └── foo.rs └── src ├── lib.rs ├── proofs.rs ├── proofs └── tests.rs ├── smt.rs ├── smt └── tests.rs └── tree_hasher.rs /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: al8n 2 | patreon: al8n 3 | ko_fi: al8n9434 4 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | # Ignore bors branches, since they are covered by `clippy_bors.yml` 6 | branches: 7 | - main 8 | # Don't run Clippy tests, when only textfiles were modified 9 | paths-ignore: 10 | - "README" 11 | - "COPYRIGHT" 12 | - "LICENSE-*" 13 | - "**.md" 14 | - "**.txt" 15 | pull_request: 16 | # Don't run Clippy tests, when only textfiles were modified 17 | paths-ignore: 18 | - "README" 19 | - "COPYRIGHT" 20 | - "LICENSE-*" 21 | - "**.md" 22 | - "**.txt" 23 | 24 | env: 25 | CARGO_TERM_COLOR: always 26 | RUSTFLAGS: -Dwarnings 27 | RUST_BACKTRACE: 1 28 | nightly: nightly-2022-07-13 29 | stable: stable 30 | 31 | defaults: 32 | run: 33 | shell: bash 34 | 35 | jobs: 36 | # Check formatting 37 | rustfmt: 38 | name: rustfmt 39 | runs-on: ubuntu-latest 40 | steps: 41 | - uses: actions/checkout@v3 42 | - name: Install Rust 43 | run: rustup update stable && rustup default stable 44 | - name: Check formatting 45 | run: cargo fmt --all -- --check 46 | 47 | # Apply clippy lints 48 | clippy: 49 | name: clippy 50 | runs-on: ubuntu-latest 51 | steps: 52 | - uses: actions/checkout@v3 53 | - name: Apply clippy lints 54 | run: cargo clippy --all-features 55 | 56 | # This represents the minimum Rust version supported by 57 | # Bytes. Updating this should be done in a dedicated PR. 58 | # 59 | # Tests are not run as tests may require newer versions of 60 | # rust. 61 | minrust: 62 | name: minrust 63 | runs-on: ubuntu-latest 64 | steps: 65 | - uses: actions/checkout@v3 66 | - name: Install Rust 67 | run: rustup update 1.62.0 && rustup default 1.62.0 68 | - name: Check 69 | run: . ci/test-stable.sh check 70 | 71 | # Stable 72 | stable: 73 | name: stable 74 | strategy: 75 | matrix: 76 | os: 77 | - ubuntu-latest 78 | - macos-latest 79 | - windows-latest 80 | runs-on: ${{ matrix.os }} 81 | steps: 82 | - uses: actions/checkout@v3 83 | - name: Install Rust 84 | # --no-self-update is necessary because the windows environment cannot self-update rustup.exe. 85 | run: rustup update stable --no-self-update && rustup default stable 86 | - name: Test 87 | run: . ci/test-stable.sh test 88 | 89 | # Nightly 90 | nightly: 91 | name: nightly 92 | runs-on: ubuntu-latest 93 | steps: 94 | - uses: actions/checkout@v3 95 | - name: Install Rust 96 | run: rustup update $nightly && rustup default $nightly 97 | - name: Test 98 | run: . ci/test-stable.sh test 99 | 100 | # Run tests on some extra platforms 101 | cross: 102 | name: cross 103 | strategy: 104 | matrix: 105 | target: 106 | - aarch64-unknown-linux-gnu 107 | - aarch64-linux-android 108 | - aarch64-unknown-linux-musl 109 | - i686-linux-android 110 | - x86_64-linux-android 111 | - i686-pc-windows-gnu 112 | - x86_64-pc-windows-gnu 113 | - i686-unknown-linux-gnu 114 | - powerpc64-unknown-linux-gnu 115 | - mips64-unknown-linux-gnuabi64 116 | - riscv64gc-unknown-linux-gnu 117 | - wasm32-unknown-unknown 118 | - wasm32-unknown-emscripten 119 | runs-on: ubuntu-latest 120 | steps: 121 | - uses: actions/checkout@v3 122 | - name: Install Rust 123 | run: rustup update stable && rustup default stable 124 | - name: cross build --target ${{ matrix.target }} 125 | run: | 126 | cargo install cross 127 | cross build --target ${{ matrix.target }} 128 | if: matrix.target != 'wasm32-unknown-unknown' 129 | # WASM support 130 | - name: cargo build --target ${{ matrix.target }} 131 | run: | 132 | rustup target add ${{ matrix.target }} 133 | cargo build --target ${{ matrix.target }} 134 | if: matrix.target == 'wasm32-unknown-unknown' 135 | 136 | # Sanitizers 137 | sanitizer: 138 | name: sanitizer 139 | runs-on: ubuntu-latest 140 | steps: 141 | - uses: actions/checkout@v3 142 | - name: Install Rust 143 | run: rustup update $nightly && rustup default $nightly 144 | - name: Install rust-src 145 | run: rustup component add rust-src 146 | - name: ASAN / LSAN 147 | run: . ci/sanitizer.sh 148 | 149 | # # valgrind 150 | # valgrind: 151 | # name: valgrind 152 | # runs-on: ubuntu-latest 153 | # steps: 154 | # - uses: actions/checkout@v3 155 | # - name: Install Rust ${{ env.stable }} 156 | # uses: actions-rs/toolchain@v1 157 | # with: 158 | # toolchain: ${{ env.stable }} 159 | # override: true 160 | # - uses: Swatinem/rust-cache@v1 161 | 162 | # - name: Install Valgrind 163 | # run: | 164 | # sudo apt-get update -y 165 | # sudo apt-get install -y valgrind 166 | # # Compile tests 167 | # - name: cargo build test-fixed 168 | # run: cargo build --bin test-fixed 169 | # working-directory: integration 170 | 171 | # # Run with valgrind 172 | # - name: Run valgrind test-fixed 173 | # run: valgrind --error-exitcode=1 --leak-check=full --show-leak-kinds=all ./target/debug/test-fixed 174 | # working-directory: integration 175 | 176 | # # Compile tests 177 | # - name: cargo build test-growable 178 | # run: cargo build --bin test-growable 179 | # working-directory: integration 180 | 181 | # # Run with valgrind 182 | # - name: Run valgrind test-growable 183 | # run: valgrind --error-exitcode=1 --leak-check=full --show-leak-kinds=all ./target/debug/test-growable 184 | # working-directory: integration 185 | 186 | # miri: 187 | # name: miri 188 | # runs-on: ubuntu-latest 189 | # steps: 190 | # - uses: actions/checkout@v3 191 | # - name: Miri 192 | # run: ci/miri.sh 193 | 194 | tarpaulin: 195 | name: cargo tarpaulin 196 | runs-on: ubuntu-latest 197 | needs: 198 | - rustfmt 199 | - clippy 200 | - stable 201 | - nightly 202 | - sanitizer 203 | # - miri 204 | # - valgrind 205 | steps: 206 | - uses: actions/checkout@v3 207 | - name: Install latest nightly 208 | uses: actions-rs/toolchain@v1 209 | with: 210 | toolchain: nightly 211 | override: true 212 | components: rustfmt, clippy 213 | - uses: Swatinem/rust-cache@v1 214 | - uses: actions/cache@v2 215 | with: 216 | path: | 217 | ~/.cargo/registry 218 | ~/.cargo/git 219 | target 220 | key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} 221 | 222 | - uses: actions-rs/tarpaulin@v0.1 223 | - name: Run cargo tarpaulin 224 | run: cargo tarpaulin --all-features --run-types Tests,Doctests --out Xml 225 | 226 | - name: Upload to codecov.io 227 | uses: codecov/codecov-action@v1.0.2 228 | with: 229 | token: ${{ secrets.CODECOV_TOKEN }} 230 | fail_ci_if_error: file -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | **.idea/ 2 | 3 | # Generated by Cargo 4 | # will have compiled files and executables 5 | **target/ 6 | 7 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 8 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 9 | **Cargo.lock 10 | 11 | # These are backup files generated by rustfmt 12 | **/*.rs.bk 13 | 14 | 15 | # Added by cargo 16 | 17 | /target 18 | Cargo.lock 19 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # UNRELEASED 2 | 3 | # 0.1.0 (Aug 7th, 2022) 4 | 5 | FEATURES 6 | 7 | - Finish porting [https://github.com/celestiaorg/smt](https://github.com/celestiaorg/smt) 8 | 9 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "lsmtree" 3 | version = "0.1.1" 4 | edition = "2021" 5 | repository = "https://github.com/al8n/lsmtree" 6 | description = "Implements a Sparse Merkle tree for a key-value store. The tree implements the same optimisations specified in the libra whitepaper, to reduce the number of hash operations required per tree operation to O(k) where k is the number of non-empty elements in the tree." 7 | license = "MIT/Apache-2.0" 8 | keywords = ["merkletree", "merkle", "merkle-tree", "sparse_merkle_tree", "smt"] 9 | categories = ["cryptography", "data-structures", "no-std"] 10 | 11 | [[bench]] 12 | path = "benches/bench.rs" 13 | name = "bench" 14 | harness = false 15 | 16 | [profile.bench] 17 | opt-level = 3 18 | debug = false 19 | codegen-units = 1 20 | lto = 'thin' 21 | incremental = false 22 | debug-assertions = false 23 | overflow-checks = false 24 | rpath = false 25 | 26 | [features] 27 | default = ["std"] 28 | std = ["bytes/default"] 29 | 30 | [dependencies] 31 | bytes = { version = "1.2", default-features = false } 32 | digest = "0.10" 33 | 34 | [target.'cfg(target_arch = "aarch64")'.dev-dependencies] 35 | sha2 = { version = "0.10", features = ["asm-aarch64"]} 36 | 37 | [dev-dependencies] 38 | criterion = "0.3" 39 | tempfile = "3" 40 | parking_lot = "0.12" 41 | rand = "0.8" 42 | sha2 = "0.10" 43 | hashbrown = "0.12" 44 | 45 | [package.metadata.docs.rs] 46 | all-features = true 47 | rustdoc-args = ["--cfg", "docsrs"] 48 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | Copyright (c) 2015 The Rust Project Developers 2 | 3 | Permission is hereby granted, free of charge, to any 4 | person obtaining a copy of this software and associated 5 | documentation files (the "Software"), to deal in the 6 | Software without restriction, including without 7 | limitation the rights to use, copy, modify, merge, 8 | publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software 10 | is furnished to do so, subject to the following 11 | conditions: 12 | 13 | The above copyright notice and this permission notice 14 | shall be included in all copies or substantial portions 15 | of the Software. 16 | 17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 18 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 19 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 20 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT 21 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 22 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 23 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR 24 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 25 | DEALINGS IN THE SOFTWARE. 26 | -------------------------------------------------------------------------------- /README-zh_CN.md: -------------------------------------------------------------------------------- 1 |
2 |

LSMTree

3 |
4 |
5 | 6 | A Rust library that implements a Sparse Merkle tree for a key-value map. The tree implements the same optimisations specified in the [Libra whitepaper][libra whitepaper], to reduce the number of hash operations required per tree operation to O(k) where k is the number of non-empty elements in the tree. 7 | 8 | [github][Github-url] 9 | [Build][CI-url] 10 | [codecov][codecov-url] 11 | 12 | [docs.rs][doc-url] 13 | [crates.io][crates-url] 14 | [rustc][rustc-url] 15 | 16 | [license-apache][license-apache-url] 17 | [license-mit][license-mit-url] 18 | 19 | [English][en-url] | 简体中文 20 | 21 |
22 | 23 | ## Installation 24 | ```toml 25 | [dependencies] 26 | lsmtree = "0.0.6" 27 | ``` 28 | 29 | #### License 30 | 31 | `lsmtree` is under the terms of both the MIT license and the 32 | Apache License (Version 2.0). 33 | 34 | See [LICENSE-APACHE](LICENSE-APACHE), [LICENSE-MIT](LICENSE-MIT) for details. 35 | 36 | Copyright (c) 2022 Al Liu. 37 | 38 | [Github-url]: https://github.com/al8n/lsmtree/ 39 | [CI-url]: https://github.com/al8n/lsmtree/actions/workflows/ci.yml 40 | [doc-url]: https://docs.rs/lsmtree 41 | [crates-url]: https://crates.io/crates/lsmtree 42 | [codecov-url]: https://app.codecov.io/gh/al8n/lsmtree/ 43 | [license-url]: https://opensource.org/licenses/Apache-2.0 44 | [rustc-url]: https://github.com/rust-lang/rust/blob/master/RELEASES.md 45 | [license-apache-url]: https://opensource.org/licenses/Apache-2.0 46 | [license-mit-url]: https://opensource.org/licenses/MIT 47 | [zh-cn-url]: https://github.com/al8n/lsmtree/tree/main/README-zh_CN.md 48 | [libra whitepaper]: https://diem-developers-components.netlify.app/papers/the-diem-blockchain/2020-05-26.pdf 49 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |
2 |

LSMTree

3 |
4 |
5 | 6 | A Rust library that implements a Sparse Merkle tree for a key-value store. The tree implements the same optimisations specified in the [Libra whitepaper][libra whitepaper], to reduce the number of hash operations required per tree operation to O(k) where k is the number of non-empty elements in the tree. 7 | 8 | [github][Github-url] 9 | [Build][CI-url] 10 | [codecov][codecov-url] 11 | 12 | [docs.rs][doc-url] 13 | [crates.io][crates-url] 14 | [rustc][rustc-url] 15 | 16 | [license-apache][license-apache-url] 17 | [license-mit][license-mit-url] 18 | 19 | English | [简体中文][zh-cn-url] 20 | 21 |
22 | 23 | 24 | ## Features 25 | 26 | - `no_std` supports, but needs `alloc`. 27 | - Reduce the number of hash operations required per tree operation to O(k) where k is the number of non-empty elements in the tree. 28 | - Internal implementation uses shallow copy, which powered by [`bytes::Bytes`](https://crates.io/crates/bytes). 29 | - Performance almost depends on the cryptographic crate, e.g. `sha2`. 30 | - Adaptable with [RustCrypto's crates](https://github.com/RustCrypto). All cryptographic structs which implement [`digest::Digest`](https://docs.rs/digest/latest/digest/trait.Digest.html) trait are adaptable with this crate. 31 | - Easily compactable with any other cryptographic crates. When you want to use a cryptographic crate which does not implement [`digest::Digest`](https://docs.rs/digest/latest/digest/trait.Digest.html) trait, you actually do not need to fully implement [`digest::Digest`](https://docs.rs/digest/latest/digest/trait.Digest.html) trait. 32 | 33 | e.g. only need to implement 5 methods (`new`, `update`, `digest`, `output_size`, `finalize`, actually only 3 methods) and just leave other methods `unreachable!()`. 34 | 35 | ```rust 36 | pub struct DummyHasher { 37 | data: Vec, 38 | } 39 | 40 | impl digest::OutputSizeUser for DummyHasher { 41 | type OutputSize = digest::typenum::U32; 42 | } 43 | 44 | impl digest::Digest for DummyHasher { 45 | fn new() -> Self { 46 | // your implementation here 47 | } 48 | 49 | fn finalize(mut self) -> digest::Output { 50 | // your implementation here 51 | } 52 | 53 | fn update(&mut self, data: impl AsRef<[u8]>) { 54 | // your implementation here 55 | } 56 | 57 | fn output_size() -> usize { 58 | ::output_size() 59 | } 60 | 61 | fn digest(data: impl AsRef<[u8]>) -> digest::Output { 62 | let mut h = Self::new(); 63 | h.update(data); 64 | h.finalize() 65 | } 66 | 67 | fn new_with_prefix(_data: impl AsRef<[u8]>) -> Self { 68 | unreachable!() 69 | } 70 | 71 | fn chain_update(self, _data: impl AsRef<[u8]>) -> Self { 72 | unreachable!() 73 | } 74 | 75 | fn finalize_into(self, _out: &mut digest::Output) { 76 | unreachable!() 77 | } 78 | 79 | fn finalize_reset(&mut self) -> digest::Output { 80 | unreachable!() 81 | } 82 | 83 | fn finalize_into_reset(&mut self, _out: &mut digest::Output) { 84 | unreachable!() 85 | } 86 | 87 | fn reset(&mut self) { 88 | unreachable!() 89 | } 90 | } 91 | ``` 92 | 93 | ## Installation 94 | ```toml 95 | [dependencies] 96 | lsmtree = "0.1" 97 | ``` 98 | 99 | ## Example 100 | ```rust 101 | use lsmtree::{bytes::Bytes, BadProof, KVStore, SparseMerkleTree}; 102 | use sha2::Sha256; 103 | use std::collections::HashMap; 104 | 105 | #[derive(Debug)] 106 | pub enum Error { 107 | NotFound, 108 | BadProof(BadProof), 109 | } 110 | 111 | impl From for Error { 112 | fn from(e: BadProof) -> Self { 113 | Error::BadProof(e) 114 | } 115 | } 116 | 117 | impl core::fmt::Display for Error { 118 | fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { 119 | write!(f, "Error") 120 | } 121 | } 122 | 123 | impl std::error::Error for Error {} 124 | 125 | #[derive(Debug, Clone, Default)] 126 | pub struct SimpleStore { 127 | data: HashMap, 128 | } 129 | 130 | impl SimpleStore { 131 | pub fn new() -> Self { 132 | Self { 133 | data: HashMap::new(), 134 | } 135 | } 136 | } 137 | 138 | impl KVStore for SimpleStore { 139 | type Error = Error; 140 | type Hasher = Sha256; 141 | 142 | fn get(&self, key: &[u8]) -> Result, Self::Error> { 143 | Ok(self.data.get(key).map(core::clone::Clone::clone)) 144 | } 145 | 146 | fn set(&mut self, key: Bytes, value: Bytes) -> Result<(), Self::Error> { 147 | self.data.insert(key, value); 148 | Ok(()) 149 | } 150 | 151 | fn remove(&mut self, key: &[u8]) -> Result { 152 | self.data.remove(key).ok_or(Error::NotFound) 153 | } 154 | 155 | fn contains(&self, key: &[u8]) -> Result { 156 | Ok(self.data.contains_key(key)) 157 | } 158 | } 159 | 160 | fn main() { 161 | let mut smt = SparseMerkleTree::::new(); 162 | 163 | // insert 164 | smt.update(b"key1", Bytes::from("val1")).unwrap(); 165 | 166 | // get 167 | assert_eq!(smt.get(b"key1").unwrap(), Some(Bytes::from("val1"))); 168 | 169 | // prove 170 | let proof = smt.prove(b"key1").unwrap(); 171 | assert!(proof.verify(smt.root_ref(), b"key1", b"val1")); 172 | } 173 | ``` 174 | 175 | ## Acknowledge 176 | - Thanks celestiaorg's developers for providing amazing Go version [smt](https://github.com/celestiaorg/smt) implementation. 177 | 178 | 179 | #### License 180 | 181 | `lsmtree` is under the terms of both the MIT license and the 182 | Apache License (Version 2.0). 183 | 184 | See [LICENSE-APACHE](LICENSE-APACHE), [LICENSE-MIT](LICENSE-MIT) for details. 185 | 186 | Copyright (c) 2022 Al Liu. 187 | 188 | [Github-url]: https://github.com/al8n/lsmtree/ 189 | [CI-url]: https://github.com/al8n/lsmtree/actions/workflows/ci.yml 190 | [doc-url]: https://docs.rs/lsmtree 191 | [crates-url]: https://crates.io/crates/lsmtree 192 | [codecov-url]: https://app.codecov.io/gh/al8n/lsmtree/ 193 | [license-url]: https://opensource.org/licenses/Apache-2.0 194 | [rustc-url]: https://github.com/rust-lang/rust/blob/master/RELEASES.md 195 | [license-apache-url]: https://opensource.org/licenses/Apache-2.0 196 | [license-mit-url]: https://opensource.org/licenses/MIT 197 | [zh-cn-url]: https://github.com/al8n/lsmtree/tree/main/README-zh_CN.md 198 | [libra whitepaper]: https://diem-developers-components.netlify.app/papers/the-diem-blockchain/2020-05-26.pdf 199 | -------------------------------------------------------------------------------- /benches/bench.rs: -------------------------------------------------------------------------------- 1 | use bytes::Bytes; 2 | use criterion::*; 3 | use hashbrown::HashMap; 4 | use lsmtree::{BadProof, KVStore, SparseMerkleTree}; 5 | 6 | #[derive(Debug)] 7 | pub enum Error { 8 | NotFound, 9 | BadProof(BadProof), 10 | } 11 | 12 | impl From for Error { 13 | fn from(e: BadProof) -> Self { 14 | Error::BadProof(e) 15 | } 16 | } 17 | 18 | impl core::fmt::Display for Error { 19 | fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { 20 | write!(f, "Error") 21 | } 22 | } 23 | 24 | impl std::error::Error for Error {} 25 | 26 | #[derive(Debug, Clone, Default)] 27 | pub struct SimpleStore { 28 | data: HashMap, 29 | } 30 | 31 | impl SimpleStore { 32 | pub fn new() -> Self { 33 | Self { 34 | data: HashMap::new(), 35 | } 36 | } 37 | } 38 | 39 | impl KVStore for SimpleStore { 40 | type Error = Error; 41 | type Hasher = sha2::Sha256; 42 | 43 | fn get(&self, key: &[u8]) -> Result, Self::Error> { 44 | Ok(self.data.get(key).map(core::clone::Clone::clone)) 45 | } 46 | 47 | fn set(&mut self, key: Bytes, value: Bytes) -> Result<(), Self::Error> { 48 | self.data.insert(key, value); 49 | Ok(()) 50 | } 51 | 52 | fn remove(&mut self, key: &[u8]) -> Result { 53 | self.data.remove(key).ok_or(Error::NotFound) 54 | } 55 | 56 | fn contains(&self, key: &[u8]) -> Result { 57 | Ok(self.data.contains_key(key)) 58 | } 59 | } 60 | 61 | fn bench_update(c: &mut Criterion) { 62 | let (smn, smv) = (SimpleStore::new(), SimpleStore::new()); 63 | let mut smt = SparseMerkleTree::::new_with_stores(smn, smv); 64 | let mut count = 0; 65 | c.bench_function("smt update", |b| { 66 | b.iter_batched( 67 | || { 68 | let c = count; 69 | count += 1; 70 | let s = Bytes::from(c.to_string()); 71 | (s.clone(), s) 72 | }, 73 | |s| { 74 | let _ = smt.update(&s.0, s.1); 75 | }, 76 | BatchSize::NumIterations(150_000), 77 | ) 78 | }); 79 | } 80 | 81 | fn bench_remove(c: &mut Criterion) { 82 | let (smn, smv) = (SimpleStore::new(), SimpleStore::new()); 83 | let mut smt = SparseMerkleTree::::new_with_stores(smn, smv); 84 | 85 | for i in 0..100_000 { 86 | let s = Bytes::from(i.to_string()); 87 | let _ = smt.update(&s, s.clone()); 88 | } 89 | let mut count = 0; 90 | c.bench_function("smt remove", |b| { 91 | b.iter_batched( 92 | || { 93 | let c = count; 94 | count += 1; 95 | c 96 | }, 97 | |s| { 98 | let s = s.to_string(); 99 | let _ = smt.remove(s.as_bytes()); 100 | }, 101 | BatchSize::NumIterations(150_000), 102 | ) 103 | }); 104 | } 105 | 106 | criterion_group! { 107 | benches, 108 | bench_update, 109 | bench_remove, 110 | } 111 | 112 | criterion_main!(benches); 113 | -------------------------------------------------------------------------------- /ci/miri.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | rustup toolchain install nightly --component miri 5 | rustup override set nightly 6 | cargo miri setup 7 | 8 | export MIRIFLAGS="-Zmiri-strict-provenance -Zmiri-disable-isolation" 9 | 10 | cargo miri test --no-default-features --target x86_64-unknown-linux-gnu -------------------------------------------------------------------------------- /ci/sanitizer.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -ex 4 | 5 | export ASAN_OPTIONS="detect_odr_violation=0 detect_leaks=0" 6 | 7 | # Run address sanitizer 8 | RUSTFLAGS="-Z sanitizer=address" \ 9 | cargo test --lib --all-features --target x86_64-unknown-linux-gnu 10 | 11 | # Run leak sanitizer 12 | RUSTFLAGS="-Z sanitizer=leak" \ 13 | cargo test --lib --all-features --target x86_64-unknown-linux-gnu 14 | 15 | 16 | 17 | -------------------------------------------------------------------------------- /ci/test-stable.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -ex 4 | 5 | cmd="${1:-test}" 6 | 7 | # Run with all features 8 | cargo "${cmd}" --all-features 9 | 10 | cargo doc --no-deps --all-features 11 | 12 | if [[ "${RUST_VERSION}" == "nightly"* ]]; then 13 | # Check benchmarks 14 | cargo check --benches 15 | 16 | # Check minimal versions 17 | cargo clean 18 | cargo update -Zminimal-versions 19 | cargo check --all-features 20 | fi 21 | -------------------------------------------------------------------------------- /examples/foo.rs: -------------------------------------------------------------------------------- 1 | use lsmtree::{bytes::Bytes, BadProof, KVStore, SparseMerkleTree}; 2 | use sha2::Sha256; 3 | use std::collections::HashMap; 4 | 5 | #[derive(Debug)] 6 | pub enum Error { 7 | NotFound, 8 | BadProof(BadProof), 9 | } 10 | 11 | impl From for Error { 12 | fn from(e: BadProof) -> Self { 13 | Error::BadProof(e) 14 | } 15 | } 16 | 17 | impl core::fmt::Display for Error { 18 | fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { 19 | write!(f, "Error") 20 | } 21 | } 22 | 23 | impl std::error::Error for Error {} 24 | 25 | #[derive(Debug, Clone, Default)] 26 | pub struct SimpleStore { 27 | data: HashMap, 28 | } 29 | 30 | impl SimpleStore { 31 | pub fn new() -> Self { 32 | Self { 33 | data: HashMap::new(), 34 | } 35 | } 36 | } 37 | 38 | impl KVStore for SimpleStore { 39 | type Error = Error; 40 | type Hasher = Sha256; 41 | 42 | fn get(&self, key: &[u8]) -> Result, Self::Error> { 43 | Ok(self.data.get(key).map(core::clone::Clone::clone)) 44 | } 45 | 46 | fn set(&mut self, key: Bytes, value: Bytes) -> Result<(), Self::Error> { 47 | self.data.insert(key, value); 48 | Ok(()) 49 | } 50 | 51 | fn remove(&mut self, key: &[u8]) -> Result { 52 | self.data.remove(key).ok_or(Error::NotFound) 53 | } 54 | 55 | fn contains(&self, key: &[u8]) -> Result { 56 | Ok(self.data.contains_key(key)) 57 | } 58 | } 59 | 60 | fn main() { 61 | let mut smt = SparseMerkleTree::::new(); 62 | 63 | // insert 64 | smt.update(b"key1", Bytes::from("val1")).unwrap(); 65 | 66 | // get 67 | assert_eq!(smt.get(b"key1").unwrap(), Some(Bytes::from("val1"))); 68 | 69 | // prove 70 | let proof = smt.prove(b"key1").unwrap(); 71 | assert!(proof.verify(smt.root_ref(), b"key1", b"val1")); 72 | } 73 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | //! A Rust library that implements a Sparse Merkle tree for a key-value map. The tree implements the same optimisations specified in the [Libra whitepaper][libra whitepaper], to reduce the number of hash operations required per tree operation to O(k) where k is the number of non-empty elements in the tree. 2 | //! 3 | //! [libra whitepaper]: https://diem-developers-components.netlify.app/papers/the-diem-blockchain/2020-05-26.pdf 4 | #![cfg_attr(not(feature = "std"), no_std)] 5 | // #![cfg_attr(feature = "nightly", feature(generic_const_exprs))] 6 | #![cfg_attr(docsrs, feature(doc_cfg))] 7 | #![cfg_attr(docsrs, allow(unused_attributes))] 8 | #![deny(missing_docs)] 9 | #![allow(clippy::declare_interior_mutable_const)] 10 | #![allow(clippy::borrow_interior_mutable_const)] 11 | extern crate alloc; 12 | mod smt; 13 | #[cfg(test)] 14 | pub use self::smt::tests::{new_sparse_merkle_tree, Error, SimpleStore}; 15 | pub use self::smt::SparseMerkleTree; 16 | 17 | mod proofs; 18 | mod tree_hasher; 19 | 20 | pub use bytes; 21 | use bytes::Bytes; 22 | pub use digest; 23 | pub use proofs::*; 24 | 25 | /// Key-Value store 26 | pub trait KVStore { 27 | /// The hasher to use for the underlying tree. 28 | type Hasher: digest::Digest; 29 | 30 | /// The Error type 31 | #[cfg(not(feature = "std"))] 32 | type Error: core::fmt::Debug + core::fmt::Display + From; 33 | 34 | /// The Error type 35 | #[cfg(feature = "std")] 36 | type Error: std::error::Error + From; 37 | 38 | /// Gets the value for a key. If not exists, returns `Ok(None)`. 39 | fn get(&self, key: &[u8]) -> Result, Self::Error>; 40 | /// Updates the value for a key. 41 | fn set(&mut self, key: Bytes, value: Bytes) -> Result<(), Self::Error>; 42 | /// Remove value by key. 43 | fn remove(&mut self, key: &[u8]) -> Result; 44 | /// Returns if key exists in the store. 45 | fn contains(&self, key: &[u8]) -> Result; 46 | } 47 | 48 | /// Gets the bit at an offset from the most significant bit 49 | #[inline] 50 | fn get_bit_at_from_msb(data: &[u8], position: usize) -> usize { 51 | if (data[position / 8] as usize) & (1 << (8 - 1 - (position % 8))) > 0 { 52 | return 1; 53 | } 54 | 0 55 | } 56 | 57 | /// Sets the bit at an offset from the most significant bit 58 | #[inline] 59 | fn set_bit_at_from_msb(data: &mut [u8], position: usize) { 60 | let mut n = data[position / 8] as usize; 61 | n |= 1 << (8 - 1 - (position % 8)); 62 | data[position / 8] = n as u8; 63 | } 64 | 65 | #[inline] 66 | fn count_set_bits(data: &[u8]) -> usize { 67 | let mut count = 0; 68 | for i in 0..data.len() * 8 { 69 | if get_bit_at_from_msb(data, i) == 1 { 70 | count += 1; 71 | } 72 | } 73 | count 74 | } 75 | 76 | #[inline] 77 | fn count_common_prefix(a: &[u8], b: &[u8]) -> usize { 78 | let mut cnt = 0; 79 | for i in 0..a.len() * 8 { 80 | if get_bit_at_from_msb(a, i) == get_bit_at_from_msb(b, i) { 81 | cnt += 1; 82 | continue; 83 | } 84 | break; 85 | } 86 | cnt 87 | } 88 | -------------------------------------------------------------------------------- /src/proofs.rs: -------------------------------------------------------------------------------- 1 | #[cfg(test)] 2 | mod tests; 3 | 4 | use super::{ 5 | count_set_bits, get_bit_at_from_msb, set_bit_at_from_msb, 6 | smt::{DEFAULT_VALUE, RIGHT}, 7 | tree_hasher::{TreeHasher, LEAF_PREFIX}, 8 | }; 9 | use alloc::{vec, vec::Vec}; 10 | use bytes::Bytes; 11 | use core::marker::PhantomData; 12 | use digest::Digest; 13 | 14 | /// Returned when an invalid Merkle proof is supplied. 15 | pub struct BadProof; 16 | 17 | impl core::fmt::Debug for BadProof { 18 | fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { 19 | write!(f, "bad proof") 20 | } 21 | } 22 | 23 | impl core::fmt::Display for BadProof { 24 | fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { 25 | write!(f, "bad proof") 26 | } 27 | } 28 | 29 | #[cfg(feature = "std")] 30 | impl std::error::Error for BadProof {} 31 | 32 | /// SparseMerkleProof is a Merkle proof for an element in a SparseMerkleTree. 33 | #[derive(Debug, Clone)] 34 | pub struct SparseMerkleProof { 35 | /// An array of the sibling nodes leading up to the leaf of the proof. 36 | pub(crate) side_nodes: Vec, 37 | 38 | /// The data of the unrelated leaf at the position 39 | /// of the key being proven, in the case of a non-membership proof. For 40 | /// membership proofs, is nil. 41 | pub(crate) non_membership_leaf_data: Option, 42 | 43 | /// the data of the sibling node to the leaf being proven, 44 | /// required for updatable proofs. For unupdatable proofs, is nil. 45 | pub(crate) sibling_data: Option, 46 | pub(crate) _marker: PhantomData, 47 | } 48 | 49 | impl SparseMerkleProof { 50 | /// Creates a new SparseMerkleProof. 51 | pub fn new( 52 | side_nodes: Vec, 53 | non_membership_leaf_data: Option, 54 | sibling_data: Option, 55 | ) -> Self { 56 | Self { 57 | side_nodes, 58 | non_membership_leaf_data, 59 | sibling_data, 60 | _marker: PhantomData, 61 | } 62 | } 63 | 64 | /// Get the sibling data for this proof 65 | #[inline] 66 | pub fn sibling_data(&self) -> Option<&Bytes> { 67 | self.sibling_data.as_ref() 68 | } 69 | 70 | /// get the non-membership leaf data for this proof 71 | #[inline] 72 | pub fn non_membership_leaf_data(&self) -> Option<&Bytes> { 73 | self.non_membership_leaf_data.as_ref() 74 | } 75 | 76 | /// get the side nodes for this proof 77 | #[inline] 78 | pub fn side_nodes(&self) -> &[Bytes] { 79 | &self.side_nodes 80 | } 81 | } 82 | 83 | impl SparseMerkleProof { 84 | /// Verifies a Merkle proof 85 | pub fn verify( 86 | &self, 87 | root: impl AsRef<[u8]>, 88 | key: impl AsRef<[u8]>, 89 | value: impl AsRef<[u8]>, 90 | ) -> bool { 91 | self.verify_proof(root, key, value) 92 | } 93 | 94 | /// Compacts a proof, to reduce its size. 95 | pub fn compact(&self) -> Result, BadProof> { 96 | let mut th = TreeHasher::::new(vec![0; TreeHasher::::path_size()].into()); 97 | 98 | if !self.sanity_check(&mut th) { 99 | return Err(BadProof); 100 | } 101 | 102 | let mut bit_mask = vec![0u8; ((self.side_nodes.len() as f64) / 8.0).ceil() as usize]; 103 | 104 | let compacted_side_nodes = self 105 | .side_nodes 106 | .iter() 107 | .enumerate() 108 | .filter_map(|(idx, node)| { 109 | let node = node.slice(..TreeHasher::::path_size()); 110 | if node.eq(th.placeholder_ref()) { 111 | set_bit_at_from_msb(bit_mask.as_mut_slice(), idx); 112 | None 113 | } else { 114 | Some(node) 115 | } 116 | }) 117 | .collect::>(); 118 | 119 | Ok(SparseCompactMerkleProof { 120 | side_nodes: compacted_side_nodes, 121 | non_membership_leaf_data: self.non_membership_leaf_data.clone(), 122 | bitmask: bit_mask.into(), 123 | num_side_nodes: self.side_nodes.len(), 124 | sibling_data: self.sibling_data.clone(), 125 | _marker: PhantomData, 126 | }) 127 | } 128 | 129 | /// Compacts a proof, to reduce its size. 130 | pub fn compact_into(self) -> Result, BadProof> { 131 | let mut th = TreeHasher::::new(vec![0; TreeHasher::::path_size()].into()); 132 | 133 | if !self.sanity_check(&mut th) { 134 | return Err(BadProof); 135 | } 136 | 137 | let num_side_nodes = self.side_nodes.len(); 138 | let SparseMerkleProof { 139 | side_nodes, 140 | non_membership_leaf_data, 141 | sibling_data, 142 | _marker: _, 143 | } = self; 144 | let mut bit_mask = vec![0u8; ((num_side_nodes as f64) / 8.0).ceil() as usize]; 145 | 146 | let compacted_side_nodes = side_nodes 147 | .into_iter() 148 | .enumerate() 149 | .filter_map(|(idx, node)| { 150 | let node = node.slice(..TreeHasher::::path_size()); 151 | if node.eq(th.placeholder_ref()) { 152 | set_bit_at_from_msb(bit_mask.as_mut_slice(), idx); 153 | None 154 | } else { 155 | Some(node) 156 | } 157 | }) 158 | .collect::>(); 159 | 160 | Ok(SparseCompactMerkleProof { 161 | side_nodes: compacted_side_nodes, 162 | non_membership_leaf_data, 163 | bitmask: bit_mask.into(), 164 | num_side_nodes, 165 | sibling_data, 166 | _marker: PhantomData, 167 | }) 168 | } 169 | 170 | #[inline] 171 | fn verify_proof( 172 | &self, 173 | root: impl AsRef<[u8]>, 174 | key: impl AsRef<[u8]>, 175 | value: impl AsRef<[u8]>, 176 | ) -> bool { 177 | let mut th = TreeHasher::::new(vec![0; TreeHasher::::path_size()].into()); 178 | let path = th.path(key); 179 | 180 | if !self.sanity_check(&mut th) { 181 | return false; 182 | } 183 | 184 | let mut current_hash; 185 | // Determine what the leaf hash should be. 186 | if value.as_ref().eq(&DEFAULT_VALUE) { 187 | // Non-membership proof. 188 | match &self.non_membership_leaf_data { 189 | Some(data) => { 190 | let (actual_path, value_hash) = TreeHasher::::parse_leaf(data); 191 | if actual_path.eq(path.as_slice()) { 192 | // This is not an unrelated leaf; non-membership proof failed. 193 | return false; 194 | } 195 | 196 | current_hash = th.digest_leaf_hash(actual_path, value_hash); 197 | } 198 | None => { 199 | current_hash = th.placeholder(); 200 | } 201 | } 202 | } else { 203 | let value_hash = th.digest(value); 204 | 205 | current_hash = th.digest_leaf_hash(path, value_hash); 206 | } 207 | let num = self.side_nodes.len(); 208 | // Recompute root. 209 | self.side_nodes.iter().enumerate().for_each(|(idx, path)| { 210 | let node = path.slice(..TreeHasher::::path_size()); 211 | if get_bit_at_from_msb(path, num - 1 - idx) == RIGHT { 212 | (current_hash, _) = th.digest_node(node, ¤t_hash); 213 | } else { 214 | (current_hash, _) = th.digest_node(¤t_hash, node); 215 | } 216 | }); 217 | 218 | current_hash.eq(root.as_ref()) 219 | } 220 | 221 | pub(crate) fn verify_proof_with_updates( 222 | &self, 223 | root: impl AsRef<[u8]>, 224 | key: impl AsRef<[u8]>, 225 | value: impl AsRef<[u8]>, 226 | ) -> (bool, Vec<(Bytes, Bytes)>) 227 | where 228 | H: Digest, 229 | { 230 | let mut th = TreeHasher::::new(vec![0; TreeHasher::::path_size()].into()); 231 | let path = th.path(key); 232 | if !self.sanity_check(&mut th) { 233 | return (false, vec![]); 234 | } 235 | 236 | let mut updates = Vec::with_capacity(self.side_nodes.len() + 1); 237 | let mut current_hash; 238 | // Determine what the leaf hash should be. 239 | if value.as_ref().eq(&DEFAULT_VALUE) { 240 | // Non-membership proof. 241 | match &self.non_membership_leaf_data { 242 | Some(data) => { 243 | let (actual_path, value_hash) = TreeHasher::::parse_leaf(data); 244 | if actual_path.eq(path.as_slice()) { 245 | // This is not an unrelated leaf; non-membership proof failed. 246 | return (false, vec![]); 247 | } 248 | 249 | let (hash, data) = th.digest_leaf(actual_path, value_hash); 250 | current_hash = hash; 251 | updates.push((current_hash.clone(), data)); 252 | } 253 | None => { 254 | current_hash = th.placeholder(); 255 | } 256 | } 257 | } else { 258 | let value_hash = th.digest(value); 259 | 260 | let (hash, data) = th.digest_leaf(path.as_ref(), value_hash); 261 | current_hash = hash; 262 | updates.push((current_hash.clone(), data)); 263 | } 264 | 265 | // Recompute root. 266 | let num = self.side_nodes.len(); 267 | self.side_nodes 268 | .iter() 269 | .enumerate() 270 | .for_each(|(idx, side_node)| { 271 | let node = side_node.slice(..TreeHasher::::path_size()); 272 | if get_bit_at_from_msb(path.as_ref(), num - 1 - idx) == RIGHT { 273 | let (hash, data) = th.digest_node(node, ¤t_hash); 274 | current_hash = hash; 275 | updates.push((current_hash.clone(), data)); 276 | } else { 277 | let (hash, data) = th.digest_node(¤t_hash, node); 278 | current_hash = hash; 279 | updates.push((current_hash.clone(), data)); 280 | } 281 | }); 282 | (current_hash.eq(root.as_ref()), updates) 283 | } 284 | 285 | fn sanity_check(&self, th: &mut TreeHasher) -> bool { 286 | // Do a basic sanity check on the proof, so that a malicious proof cannot 287 | // cause the verifier to fatally exit (e.g. due to an index out-of-range 288 | // error) or cause a CPU DoS attack. 289 | 290 | // Check that the number of supplied sidenodes does not exceed the maximum possible. 291 | if self.side_nodes.len() > TreeHasher::::path_size() * 8 || 292 | // Check that leaf data for non-membership proofs is the correct size. 293 | self.check_non_membership_proofs_size(th) 294 | { 295 | return false; 296 | } 297 | 298 | // Check that all supplied sidenodes are the correct size. 299 | for side_node in &self.side_nodes { 300 | if side_node.len() != ::output_size() { 301 | return false; 302 | } 303 | } 304 | 305 | if self.side_nodes.is_empty() { 306 | return true; 307 | } 308 | 309 | // Check that the sibling data hashes to the first side node if not nil 310 | match &self.sibling_data { 311 | Some(sibling_data) => { 312 | let sibling_hash = th.digest(sibling_data); 313 | self.side_nodes[0].eq(sibling_hash.as_slice()) 314 | } 315 | None => true, 316 | } 317 | } 318 | 319 | #[inline] 320 | fn check_non_membership_proofs_size(&self, _th: &TreeHasher) -> bool { 321 | if let Some(non_membership_proofs) = &self.non_membership_leaf_data { 322 | non_membership_proofs.len() 323 | != LEAF_PREFIX.len() 324 | + TreeHasher::::path_size() 325 | + ::output_size() 326 | } else { 327 | false 328 | } 329 | } 330 | } 331 | 332 | /// SparseCompactMerkleProof is a compact Merkle proof for an element in a SparseMerkleTree. 333 | #[derive(Debug, Clone)] 334 | pub struct SparseCompactMerkleProof { 335 | /// An array of the sibling nodes leading up to the leaf of the proof. 336 | side_nodes: Vec, 337 | 338 | /// The data of the unrelated leaf at the position 339 | /// of the key being proven, in the case of a non-membership proof. For 340 | /// membership proofs, is nil. 341 | non_membership_leaf_data: Option, 342 | 343 | /// BitMask, in the case of a compact proof, is a bit mask of the sidenodes 344 | /// of the proof where an on-bit indicates that the sidenode at the bit's 345 | /// index is a placeholder. This is only set if the proof is compact. 346 | bitmask: Bytes, 347 | 348 | /// In the case of a compact proof, indicates the number of 349 | /// sidenodes in the proof when decompacted. This is only set if the proof is compact. 350 | num_side_nodes: usize, 351 | 352 | // the data of the sibling node to the leaf being proven, 353 | /// required for updatable proofs. For unupdatable proofs, is nil. 354 | sibling_data: Option, 355 | 356 | _marker: PhantomData, 357 | } 358 | 359 | impl SparseCompactMerkleProof { 360 | /// Creates a new SparseCompactMerkleProof. 361 | pub fn new( 362 | side_nodes: Vec, 363 | non_membership_leaf_data: Option, 364 | bitmask: Bytes, 365 | num_side_nodes: usize, 366 | sibling_data: Option, 367 | ) -> Self { 368 | Self { 369 | side_nodes, 370 | non_membership_leaf_data, 371 | bitmask, 372 | num_side_nodes, 373 | sibling_data, 374 | _marker: PhantomData, 375 | } 376 | } 377 | 378 | /// Get the sibility of for this proof 379 | #[inline] 380 | pub fn sibling_data(&self) -> Option<&Bytes> { 381 | self.sibling_data.as_ref() 382 | } 383 | 384 | /// Get the non-membership leaf data for this proof 385 | #[inline] 386 | pub fn non_membership_leaf_data(&self) -> Option<&Bytes> { 387 | self.non_membership_leaf_data.as_ref() 388 | } 389 | 390 | /// Get the original number of side nodes 391 | #[inline] 392 | pub fn original_side_nodes_len(&self) -> usize { 393 | self.num_side_nodes 394 | } 395 | 396 | /// Get the side nodes for this compacted proof 397 | #[inline] 398 | pub fn side_nodes(&self) -> &[Bytes] { 399 | &self.side_nodes 400 | } 401 | } 402 | 403 | impl SparseCompactMerkleProof { 404 | fn sanity_check(&self, _th: &mut TreeHasher) -> bool { 405 | // Do a basic sanity check on the proof on the fields of the proof specific to 406 | // the compact proof only. 407 | // 408 | // When the proof is de-compacted and verified, the sanity check for the 409 | // de-compacted proof should be executed. 410 | 411 | // Compact proofs: check that NumSideNodes is within the right range. 412 | if self.num_side_nodes > TreeHasher::::path_size() * 8 || 413 | // Compact proofs: check that the length of the bit mask is as expected 414 | // according to NumSideNodes. 415 | self.bitmask.len() != ((self.num_side_nodes as f64 ) / 8f64).ceil() as usize || 416 | // Compact proofs: check that the correct number of sidenodes have been 417 | // supplied according to the bit mask. 418 | (self.num_side_nodes > 0 && self.side_nodes.len() != self.num_side_nodes - count_set_bits(&self.bitmask)) 419 | { 420 | return false; 421 | } 422 | 423 | true 424 | } 425 | 426 | /// Verifies a Merkle proof 427 | pub fn verify( 428 | &self, 429 | root: impl AsRef<[u8]>, 430 | key: impl AsRef<[u8]>, 431 | value: impl AsRef<[u8]>, 432 | ) -> bool { 433 | self.decompact() 434 | .map(|proof| proof.verify(root, key, value)) 435 | .unwrap_or(false) 436 | } 437 | 438 | /// Decompacts a proof, so that it can be used for verify 439 | pub fn decompact(&self) -> Result, BadProof> { 440 | let mut th = TreeHasher::::new(vec![0; TreeHasher::::path_size()].into()); 441 | 442 | if !self.sanity_check(&mut th) { 443 | return Err(BadProof); 444 | } 445 | 446 | let mut position = 0; 447 | let nodes = (0..self.num_side_nodes) 448 | .map(|idx| { 449 | if get_bit_at_from_msb(&self.bitmask, idx) == 1 { 450 | th.placeholder() 451 | } else { 452 | position += 1; 453 | self.side_nodes[position - 1].clone() 454 | } 455 | }) 456 | .collect::>(); 457 | 458 | Ok(SparseMerkleProof { 459 | side_nodes: nodes, 460 | non_membership_leaf_data: self.non_membership_leaf_data.clone(), 461 | sibling_data: self.sibling_data.clone(), 462 | _marker: PhantomData, 463 | }) 464 | } 465 | 466 | /// Decompacts a proof, so that it can be used for verify 467 | pub fn decompact_into(self) -> Result, BadProof> { 468 | let mut th = TreeHasher::::new(vec![0; TreeHasher::::path_size()].into()); 469 | 470 | if !self.sanity_check(&mut th) { 471 | return Err(BadProof); 472 | } 473 | 474 | let mut position = 0; 475 | let SparseCompactMerkleProof { 476 | side_nodes, 477 | non_membership_leaf_data, 478 | sibling_data, 479 | bitmask, 480 | num_side_nodes, 481 | _marker, 482 | } = self; 483 | 484 | let nodes = (0..num_side_nodes) 485 | .map(|idx| { 486 | if get_bit_at_from_msb(&bitmask, idx) == 1 { 487 | th.placeholder() 488 | } else { 489 | position += 1; 490 | side_nodes[position - 1].clone() 491 | } 492 | }) 493 | .collect::>(); 494 | 495 | Ok(SparseMerkleProof { 496 | side_nodes: nodes, 497 | non_membership_leaf_data, 498 | sibling_data, 499 | _marker, 500 | }) 501 | } 502 | } 503 | -------------------------------------------------------------------------------- /src/proofs/tests.rs: -------------------------------------------------------------------------------- 1 | use super::*; 2 | use bytes::Bytes; 3 | use rand::RngCore; 4 | 5 | use crate::{ 6 | new_sparse_merkle_tree, smt::DEFAULT_VALUE, tree_hasher::TreeHasher, SparseCompactMerkleProof, 7 | SparseMerkleProof, 8 | }; 9 | 10 | // Test base case Merkle proof operations. 11 | #[test] 12 | fn test_proofs_basic() { 13 | let mut smt = new_sparse_merkle_tree(); 14 | 15 | // Generate and verify a proof on an empty key. 16 | let proof = smt.prove(b"testKey3").unwrap(); 17 | check_compact_equivalence(&proof); 18 | 19 | assert!(proof.verify( 20 | vec![0; ::output_size()], 21 | b"testKey3", 22 | DEFAULT_VALUE 23 | )); 24 | 25 | assert!(!proof.verify(vec![], b"testKey3", b"badValue")); 26 | 27 | // Add a key, generate and verify a Merkle proof. 28 | smt.update(b"testKey", Bytes::from("testValue")).unwrap(); 29 | let root = smt.root(); 30 | let proof = smt.prove(b"testKey").unwrap(); 31 | check_compact_equivalence(&proof); 32 | 33 | assert!(proof.verify(root.clone(), b"testKey", b"testValue")); 34 | 35 | assert!(!proof.verify(root, b"testKey", b"badValue")); 36 | 37 | // Add a key, generate and verify both Merkle proofs. 38 | smt.update(b"testKey2", Bytes::from("testValue")).unwrap(); 39 | let root = smt.root(); 40 | let proof = smt.prove(b"testKey2").unwrap(); 41 | check_compact_equivalence(&proof); 42 | assert!(proof.verify(root.clone(), b"testKey2", b"testValue")); 43 | assert!(!proof.verify(root.clone(), b"testKey2", b"badValue")); 44 | 45 | assert!(!randomise_proof(&proof).verify(root.clone(), b"testKey2", b"testValue")); 46 | 47 | // Try proving a default value for a non-default leaf. 48 | let th = TreeHasher::::default(); 49 | let (_, leaf_data) = th.digest_leaf(th.path(b"testKey2"), th.digest(b"testValue")); 50 | let proof = SparseMerkleProof::::new(proof.side_nodes, Some(leaf_data), None); 51 | 52 | assert!(!proof.verify(root.clone(), b"testKey2", DEFAULT_VALUE)); 53 | 54 | // Generate and verify a proof on an empty key. 55 | let proof = smt.prove(b"testKey3").unwrap(); 56 | check_compact_equivalence(&proof); 57 | assert!(proof.verify(root.clone(), b"testKey3", DEFAULT_VALUE)); 58 | assert!(!proof.verify(root.clone(), b"testKey3", b"badValue")); 59 | assert!(!randomise_proof(&proof).verify(root, b"testKey3", DEFAULT_VALUE)); 60 | } 61 | 62 | // Test sanity check cases for non-compact proofs. 63 | #[test] 64 | fn test_proofs_sanity_check() { 65 | let mut smt = new_sparse_merkle_tree(); 66 | 67 | smt.update(b"testKey1", Bytes::from("testValue1")).unwrap(); 68 | smt.update(b"testKey2", Bytes::from("testValue2")).unwrap(); 69 | smt.update(b"testKey3", Bytes::from("testValue3")).unwrap(); 70 | 71 | smt.update(b"testKey4", Bytes::from("testValue4")).unwrap(); 72 | let root = smt.root(); 73 | let mut th = TreeHasher::::default(); 74 | 75 | // Case: invalid number of sidenodes. 76 | let mut proof = smt.prove(b"testKey1").unwrap(); 77 | let side_nodes = (0..TreeHasher::::path_size() * 8 + 1) 78 | .map(|_| proof.side_nodes[0].clone()) 79 | .collect(); 80 | 81 | proof.side_nodes = side_nodes; 82 | assert!(!proof.sanity_check(&mut th)); 83 | assert!(!proof.verify(root.clone(), b"testKey1", b"testValue1")); 84 | assert!(proof.compact().is_err()); 85 | 86 | // Case: incorrect size for NonMembershipLeafData. 87 | let mut proof = smt.prove(b"testKey1").unwrap(); 88 | proof.non_membership_leaf_data = Some(Bytes::from(vec![0; 1])); 89 | assert!(!proof.sanity_check(&mut th)); 90 | assert!(!proof.verify(root.clone(), b"testKey1", b"testValue1")); 91 | assert!(proof.compact().is_err()); 92 | 93 | // Case: unexpected sidenode size. 94 | let mut proof = smt.prove(b"testKey1").unwrap(); 95 | proof.side_nodes[0] = Bytes::from(vec![0; 1]); 96 | assert!(!proof.sanity_check(&mut th)); 97 | assert!(!proof.verify(root.clone(), b"testKey1", b"testValue1")); 98 | assert!(proof.compact().is_err()); 99 | 100 | // Case: incorrect non-nil sibling data 101 | let mut proof = smt.prove(b"testKey1").unwrap(); 102 | proof.sibling_data = Some( 103 | th.digest(proof.sibling_data.unwrap_or_default()) 104 | .as_slice() 105 | .to_vec() 106 | .into(), 107 | ); 108 | assert!(!proof.sanity_check(&mut th)); 109 | assert!(!proof.verify(root, b"testKey1", b"testValue1")); 110 | assert!(proof.compact().is_err()); 111 | } 112 | 113 | // Test sanity check cases for compact proofs. 114 | #[test] 115 | fn test_compact_proofs_sanity_check() { 116 | let mut smt = new_sparse_merkle_tree(); 117 | 118 | smt.update(b"testKey1", Bytes::from("testValue1")).unwrap(); 119 | smt.update(b"testKey2", Bytes::from("testValue2")).unwrap(); 120 | smt.update(b"testKey3", Bytes::from("testValue3")).unwrap(); 121 | 122 | smt.update(b"testKey4", Bytes::from("testValue4")).unwrap(); 123 | let root = smt.root(); 124 | 125 | // Case (compact proofs): NumSideNodes out of range. 126 | let mut proof = smt.prove_compact(b"testKey1").unwrap(); 127 | proof.num_side_nodes = 0; 128 | let mut th = TreeHasher::::default(); 129 | assert!(!proof.sanity_check(&mut th)); 130 | 131 | proof.num_side_nodes = TreeHasher::::path_size() * 8 + 1; 132 | assert!(!proof.sanity_check(&mut th)); 133 | 134 | assert!(!proof.verify(root.clone(), b"testKey1", b"testValue1")); 135 | 136 | // Case (compact proofs): unexpected bit mask length. 137 | let mut proof = smt.prove_compact(b"testKey1").unwrap(); 138 | proof.num_side_nodes = 10; 139 | assert!(!proof.verify(root.clone(), b"testKey1", b"testValue1")); 140 | 141 | // Case (compact proofs): unexpected number of sidenodes for number of side nodes. 142 | let mut proof = smt.prove_compact(b"testKey1").unwrap(); 143 | proof.side_nodes.extend(proof.side_nodes.clone()); 144 | assert!(!proof.sanity_check(&mut th)); 145 | assert!(!proof.verify(root, b"testKey1", b"testValue1")); 146 | } 147 | 148 | fn check_compact_equivalence(proof: &SparseMerkleProof) { 149 | let compact = proof.compact().unwrap(); 150 | let decompact = SparseCompactMerkleProof::::decompact(&compact).unwrap(); 151 | 152 | proof.side_nodes.iter().enumerate().for_each(|(idx, node)| { 153 | assert_eq!(node, &decompact.side_nodes[idx]); 154 | }); 155 | 156 | assert_eq!( 157 | proof.non_membership_leaf_data, 158 | decompact.non_membership_leaf_data 159 | ); 160 | } 161 | 162 | fn randomise_proof(proof: &SparseMerkleProof) -> SparseMerkleProof { 163 | let mut rng = rand::thread_rng(); 164 | let nodes = (0..proof.side_nodes.len()) 165 | .map(|i| { 166 | let mut node = vec![0; proof.side_nodes[i].len()]; 167 | rng.fill_bytes(node.as_mut_slice()); 168 | Bytes::from(node) 169 | }) 170 | .collect::>(); 171 | 172 | SparseMerkleProof::new(nodes, proof.non_membership_leaf_data.clone(), None) 173 | } 174 | -------------------------------------------------------------------------------- /src/smt.rs: -------------------------------------------------------------------------------- 1 | use crate::BadProof; 2 | 3 | use super::{ 4 | count_common_prefix, get_bit_at_from_msb, tree_hasher::TreeHasher, KVStore, 5 | SparseCompactMerkleProof, SparseMerkleProof, 6 | }; 7 | use alloc::boxed::Box; 8 | use alloc::{vec, vec::Vec}; 9 | use bytes::Bytes; 10 | use core::ops::Deref; 11 | #[cfg(test)] 12 | pub mod tests; 13 | 14 | pub(crate) const RIGHT: usize = 1; 15 | pub(crate) const DEFAULT_VALUE: Bytes = Bytes::new(); 16 | 17 | /// Sparse Merkle tree. 18 | pub struct SparseMerkleTree { 19 | th: TreeHasher, 20 | nodes: S, 21 | values: S, 22 | root: Bytes, 23 | } 24 | 25 | impl core::fmt::Debug for SparseMerkleTree { 26 | fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { 27 | f.debug_struct(core::any::type_name::()) 28 | .field("nodes", &self.nodes) 29 | .field("values", &self.values) 30 | .field("root", &self.root().as_ref()) 31 | .field("tree_hasher", &self.th) 32 | .finish() 33 | } 34 | } 35 | 36 | impl Default for SparseMerkleTree { 37 | fn default() -> Self { 38 | let th = TreeHasher::new(vec![0; TreeHasher::::path_size()].into()); 39 | let root = th.placeholder(); 40 | Self { 41 | th, 42 | nodes: S::default(), 43 | values: S::default(), 44 | root, 45 | } 46 | } 47 | } 48 | 49 | impl Clone for SparseMerkleTree { 50 | fn clone(&self) -> Self { 51 | Self { 52 | th: self.th.clone(), 53 | nodes: self.nodes.clone(), 54 | values: self.values.clone(), 55 | root: self.root.clone(), 56 | } 57 | } 58 | } 59 | 60 | impl SparseMerkleTree { 61 | /// Create a new sparse merkle tree 62 | pub fn new() -> Self { 63 | Self::default() 64 | } 65 | } 66 | 67 | impl SparseMerkleTree { 68 | /// Create a sparse merkle tree based on the given stores 69 | #[inline] 70 | pub fn new_with_stores(nodes_store: S, values_store: S) -> Self { 71 | let th = TreeHasher::new(vec![0; TreeHasher::::path_size()].into()); 72 | let root = th.placeholder(); 73 | Self { 74 | th, 75 | nodes: nodes_store, 76 | values: values_store, 77 | root, 78 | } 79 | } 80 | 81 | /// Imports a Sparse Merkle tree from non-empty `KVStore`. 82 | #[inline] 83 | pub fn import(nodes_store: S, values_store: S, root: impl Into) -> Self { 84 | Self { 85 | th: TreeHasher::new(vec![0; TreeHasher::::path_size()].into()), 86 | nodes: nodes_store, 87 | values: values_store, 88 | root: root.into(), 89 | } 90 | } 91 | 92 | /// Returns the root of the sparse merkle tree 93 | #[inline] 94 | pub fn root(&self) -> Bytes { 95 | self.root.clone() 96 | } 97 | 98 | /// Returns the root reference of the sparse merkle tree 99 | #[inline] 100 | pub fn root_ref(&self) -> &[u8] { 101 | &self.root 102 | } 103 | 104 | /// Set new root for the tree 105 | #[inline] 106 | pub fn set_root(&mut self, root: impl Into) { 107 | self.root = root.into(); 108 | } 109 | 110 | #[inline] 111 | fn depth(&self) -> usize { 112 | TreeHasher::::path_size() * 8 113 | } 114 | 115 | /// Gets the value of a key from the tree. 116 | pub fn get(&self, key: &[u8]) -> Result, ::Error> { 117 | if self.root.as_ref().eq(self.th.placeholder_ref()) { 118 | return Ok(None); 119 | } 120 | 121 | let path = self.th.path(key); 122 | match self.values.get(path.as_ref()) { 123 | Ok(value) => Ok(value), 124 | Err(e) => Err(e), 125 | } 126 | } 127 | 128 | /// Returns true if the value at the given key is non-default, false 129 | /// otherwise. 130 | pub fn contains(&self, key: &[u8]) -> Result::Error> { 131 | if self.root.as_ref().eq(self.th.placeholder_ref()) { 132 | return Ok(false); 133 | } 134 | let path = self.th.path(key); 135 | self.values.contains(path.as_ref()) 136 | } 137 | 138 | /// Removes a value from tree. 139 | pub fn remove(&mut self, key: &[u8]) -> Result<(), ::Error> { 140 | self.update(key, DEFAULT_VALUE) 141 | } 142 | 143 | /// Removes a value from tree at a specific root. It returns the new root of the tree. 144 | pub fn remove_for_root( 145 | &mut self, 146 | key: &[u8], 147 | root: Bytes, 148 | ) -> Result::Error> { 149 | self.update_for_root(key, DEFAULT_VALUE, root) 150 | } 151 | 152 | fn remove_with_side_nodes( 153 | &mut self, 154 | path: &[u8], 155 | side_nodes: Vec, 156 | path_nodes: Vec, 157 | old_leaf_data: Option, 158 | ) -> Result, ::Error> { 159 | if path_nodes[0].eq(self.th.placeholder_ref()) { 160 | // This key is already empty as it is a placeholder; return an None. 161 | return Ok(None); 162 | } 163 | 164 | let (actual_path, _) = TreeHasher::::parse_leaf(old_leaf_data.as_ref().unwrap()); 165 | if path.ne(actual_path) { 166 | // This key is already empty as a different key was found its place; return an error. 167 | return Ok(None); 168 | } 169 | 170 | // All nodes above the deleted leaf are now orphaned 171 | for node in path_nodes { 172 | self.nodes.remove(node.as_ref())?; 173 | } 174 | 175 | let side_nodes_num = side_nodes.len(); 176 | let mut current_data = Bytes::new(); 177 | let mut current_hash = Bytes::new(); 178 | let mut non_placeholder_reached = false; 179 | for (idx, side_node) in side_nodes.into_iter().enumerate() { 180 | if current_data.is_empty() { 181 | let side_node_value = self.nodes.get(side_node.as_ref())?; 182 | if TreeHasher::::is_leaf(&side_node_value) { 183 | // This is the leaf sibling that needs to be bubbled up the tree. 184 | current_hash = side_node.clone(); 185 | current_data = side_node.clone(); 186 | continue; 187 | } else { 188 | // This is the node sibling that needs to be left in its place. 189 | current_data = self.th.placeholder(); 190 | non_placeholder_reached = true; 191 | } 192 | } 193 | 194 | if !non_placeholder_reached && side_node.eq(self.th.placeholder_ref()) { 195 | // We found another placeholder sibling node, keep going up the 196 | // tree until we find the first sibling that is not a placeholder. 197 | continue; 198 | } else if !non_placeholder_reached { 199 | // We found the first sibling node that is not a placeholder, it is 200 | // time to insert our leaf sibling node here. 201 | non_placeholder_reached = true; 202 | } 203 | 204 | if get_bit_at_from_msb(path, side_nodes_num - idx - 1) == RIGHT { 205 | (current_hash, current_data) = self.th.digest_node(side_node, ¤t_data); 206 | } else { 207 | (current_hash, current_data) = self.th.digest_node(¤t_data, side_node); 208 | } 209 | 210 | self.nodes.set(current_hash.clone(), current_data.clone())?; 211 | 212 | current_data = current_hash.clone(); 213 | } 214 | 215 | if current_hash.is_empty() { 216 | // The tree is empty; return placeholder value as root. 217 | current_hash = self.th.placeholder(); 218 | } 219 | Ok(Some(current_hash)) 220 | } 221 | 222 | /// Sets a new value for a key in the tree. 223 | pub fn update(&mut self, key: &[u8], value: Bytes) -> Result<(), ::Error> { 224 | let new_root = self.update_for_root(key, value, self.root())?; 225 | self.set_root(new_root); 226 | Ok(()) 227 | } 228 | 229 | /// Sets a new value for a key in the tree at a specific root, and returns the new root. 230 | pub fn update_for_root( 231 | &mut self, 232 | key: &[u8], 233 | value: Bytes, 234 | root: Bytes, 235 | ) -> Result::Error> { 236 | let path = { 237 | let path = self.th.path(key); 238 | let len = path.len(); 239 | let ptr = Box::into_raw(Box::new(path)) as *mut u8; 240 | Bytes::from(unsafe { Vec::from_raw_parts(ptr, len, len) }) 241 | }; 242 | 243 | let UpdateResult { 244 | side_nodes, 245 | path_nodes, 246 | sibling_data: _, 247 | current_data: old_leaf_data, 248 | } = self.side_nodes_for_root(&path, root.clone(), false)?; 249 | 250 | if value.eq(&DEFAULT_VALUE) { 251 | // Delete operation. 252 | let new_root = 253 | self.remove_with_side_nodes(&path, side_nodes, path_nodes, old_leaf_data)?; 254 | match new_root { 255 | Some(new_root) => { 256 | self.values.remove(&path)?; 257 | Ok(new_root) 258 | } 259 | // This key is already empty; return the old root. 260 | None => Ok(root), 261 | } 262 | } else { 263 | // Insert operation. 264 | self.update_with_side_notes(path, value, side_nodes, path_nodes, old_leaf_data) 265 | } 266 | } 267 | 268 | fn update_with_side_notes( 269 | &mut self, 270 | path: Bytes, 271 | value: Bytes, 272 | side_nodes: Vec, 273 | path_nodes: Vec, 274 | old_leaf_data: Option, 275 | ) -> Result::Error> { 276 | let depth = self.depth(); 277 | let value_hash = self.th.digest(&value); 278 | let (mut current_hash, mut current_data) = self.th.digest_leaf(&path, &value_hash); 279 | self.nodes.set(current_hash.clone(), current_data.clone())?; 280 | current_data = current_hash.clone(); 281 | 282 | // If the leaf node that sibling nodes lead to has a different actual path 283 | // than the leaf node being updated, we need to create an intermediate node 284 | // with this leaf node and the new leaf node as children. 285 | // 286 | // First, get the number of bits that the paths of the two leaf nodes share 287 | // in common as a prefix. 288 | let (common_prefix_count, old_value_hash) = if path_nodes[0].eq(self.th.placeholder_ref()) { 289 | (depth, None) 290 | } else { 291 | let (actual_path, value_hash) = 292 | TreeHasher::::parse_leaf(old_leaf_data.as_ref().unwrap()); 293 | (count_common_prefix(&path, actual_path), Some(value_hash)) 294 | }; 295 | 296 | if common_prefix_count != depth { 297 | if get_bit_at_from_msb(&path, common_prefix_count) == RIGHT { 298 | (current_hash, current_data) = self.th.digest_node(&path_nodes[0], ¤t_data); 299 | } else { 300 | (current_hash, current_data) = self.th.digest_node(¤t_data, &path_nodes[0]); 301 | } 302 | 303 | self.nodes.set(current_hash.clone(), current_data.clone())?; 304 | current_data = current_hash.clone(); 305 | } else if let Some(old_value_hash) = old_value_hash { 306 | // Short-circuit if the same value is being set 307 | if value_hash.deref().eq(old_value_hash) { 308 | return Ok(self.root()); 309 | } 310 | 311 | // If an old leaf exists, remove it 312 | self.nodes.remove(&path_nodes[0])?; 313 | self.values.remove(&path)?; 314 | } 315 | 316 | // All remaining path nodes are orphaned 317 | for node in path_nodes.into_iter().skip(1) { 318 | self.nodes.remove(&node)?; 319 | } 320 | 321 | // The offset from the bottom of the tree to the start of the side nodes. 322 | // Note: i-offsetOfSideNodes is the index into sideNodes[] 323 | let offset_of_side_nodes = depth - side_nodes.len(); 324 | 325 | for i in 0..self.depth() { 326 | match i.checked_sub(offset_of_side_nodes) { 327 | Some(val) => { 328 | if get_bit_at_from_msb(&path, depth - i - 1) == RIGHT { 329 | (current_hash, current_data) = 330 | self.th.digest_node(&side_nodes[val], ¤t_data); 331 | } else { 332 | (current_hash, current_data) = 333 | self.th.digest_node(¤t_data, &side_nodes[val]); 334 | } 335 | 336 | self.nodes.set(current_hash.clone(), current_data.clone())?; 337 | current_data = current_hash.clone(); 338 | } 339 | None => { 340 | if common_prefix_count != depth && common_prefix_count > depth - i - 1 { 341 | // If there are no sidenodes at this height, but the number of 342 | // bits that the paths of the two leaf nodes share in common is 343 | // greater than this depth, then we need to build up the tree 344 | // to this depth with placeholder values at siblings. 345 | if get_bit_at_from_msb(&path, depth - i - 1) == RIGHT { 346 | (current_hash, current_data) = self.th.digest_right_node(¤t_data); 347 | } else { 348 | (current_hash, current_data) = self.th.digest_left_node(¤t_data); 349 | } 350 | 351 | self.nodes.set(current_hash.clone(), current_data.clone())?; 352 | current_data = current_hash.clone(); 353 | } else { 354 | continue; 355 | } 356 | } 357 | }; 358 | } 359 | 360 | self.values.set(path, value).map(|_| current_hash) 361 | } 362 | 363 | /// Gets the value of a key from the tree by descending it. 364 | /// Use if a key was _not_ previously added with AddBranch, otherwise use Get. 365 | /// Errors if the key cannot be reached by descending. 366 | pub fn get_descend(&self, key: impl AsRef<[u8]>) -> Result, S::Error> { 367 | if self.root.eq(self.th.placeholder_ref()) { 368 | // The tree is empty 369 | return Ok(None); 370 | } 371 | 372 | let path = self.th.path(key); 373 | let depth = self.depth(); 374 | 375 | // avoid call shallow clone on root 376 | let current_data = self.nodes.get(&self.root)?; 377 | if TreeHasher::<::Hasher>::is_leaf(¤t_data) { 378 | // We've reached the end. Is this the actual leaf? 379 | let (actual_path, _) = 380 | TreeHasher::<::Hasher>::parse_leaf(current_data.as_ref().unwrap()); 381 | if path.as_ref().ne(actual_path) { 382 | // Nope. Therefore the key is actually empty. 383 | return Ok(None); 384 | } 385 | 386 | // Otherwise, yes. Return the value. 387 | return self.values.get(path.as_ref()); 388 | } 389 | 390 | let (left, right) = TreeHasher::<::Hasher>::parse_node(¤t_data); 391 | 392 | let mut current_hash = if get_bit_at_from_msb(path.as_ref(), 0) == RIGHT { 393 | right 394 | } else { 395 | left 396 | }; 397 | 398 | if current_hash.eq(self.th.placeholder_ref()) { 399 | // We've hit a placeholder value; this is the end. 400 | return Ok(None); 401 | } 402 | 403 | for i in 1..depth { 404 | let current_data = self.nodes.get(¤t_hash)?; 405 | if TreeHasher::<::Hasher>::is_leaf(¤t_data) { 406 | // We've reached the end. Is this the actual leaf? 407 | let (actual_path, _) = TreeHasher::<::Hasher>::parse_leaf( 408 | current_data.as_ref().unwrap(), 409 | ); 410 | if path.as_ref().ne(actual_path) { 411 | // Nope. Therefore the key is actually empty. 412 | return Ok(None); 413 | } 414 | 415 | // Otherwise, yes. Return the value. 416 | return self.values.get(path.as_ref()); 417 | } 418 | 419 | let (left, right) = TreeHasher::<::Hasher>::parse_node(¤t_data); 420 | if get_bit_at_from_msb(path.as_ref(), i) == RIGHT { 421 | current_hash = right; 422 | } else { 423 | current_hash = left; 424 | } 425 | 426 | if current_hash.eq(self.th.placeholder_ref()) { 427 | // We've hit a placeholder value; this is the end. 428 | return Ok(None); 429 | } 430 | } 431 | 432 | // The following lines of code should only be reached if the path is 256 433 | // nodes high, which should be very unlikely if the underlying hash function 434 | // is collision-resistant. 435 | self.values.get(path.as_ref()) 436 | } 437 | 438 | /// Returns true if the value at the given key is non-default, false 439 | /// otherwise. 440 | /// Use if a key was _not_ previously added with AddBranch, otherwise use Has. 441 | /// Errors if the key cannot be reached by descending. 442 | pub fn has_descend(&self, key: impl AsRef<[u8]>) -> Result { 443 | self.get_descend(key).map(|v| v.is_some()) 444 | } 445 | 446 | /// Adds a branch to the tree. 447 | /// These branches are generated by `prove_for_root`. 448 | /// If the proof is invalid, a ErrBadProof is returned. 449 | /// 450 | /// If the leaf may be updated (e.g. during a state transition fraud proof), 451 | /// an updatable proof should be used. See SparseMerkleTree.ProveUpdatable. 452 | pub fn add_branch( 453 | &mut self, 454 | proof: SparseMerkleProof, 455 | key: impl AsRef<[u8]>, 456 | val: impl Into + AsRef<[u8]>, 457 | ) -> Result<(), S::Error> { 458 | let val_ref = val.as_ref(); 459 | let (result, updates) = proof.verify_proof_with_updates(&self.root, key.as_ref(), val_ref); 460 | if !result { 461 | return Err(BadProof.into()); 462 | } 463 | 464 | if val.as_ref().ne(DEFAULT_VALUE.as_ref()) { 465 | // Membership proof. 466 | self.values.set(self.th.path_into(key), val.into())?; 467 | } 468 | 469 | let SparseMerkleProof { 470 | side_nodes, 471 | non_membership_leaf_data: _, 472 | sibling_data, 473 | _marker, 474 | } = proof; 475 | 476 | // Update nodes along branch 477 | for (hash, data) in updates { 478 | self.nodes.set(hash, data)?; 479 | } 480 | 481 | // Update sibling node 482 | if let Some(sibling) = sibling_data { 483 | if !side_nodes.is_empty() { 484 | self.nodes 485 | .set(side_nodes.into_iter().take(1).next().unwrap(), sibling)?; 486 | } 487 | } 488 | 489 | Ok(()) 490 | } 491 | 492 | /// Generates a Merkle proof for a key against the current root. 493 | /// 494 | /// This proof can be used for read-only applications, but should not be used if 495 | /// the leaf may be updated (e.g. in a state transition fraud proof). For 496 | /// updatable proofs, see `prove_updatable`. 497 | pub fn prove(&self, key: impl AsRef<[u8]>) -> Result, S::Error> { 498 | self.prove_for_root(key, self.root()) 499 | } 500 | 501 | /// ProveForRoot generates a Merkle proof for a key, against a specific node. 502 | /// This is primarily useful for generating Merkle proofs for subtrees. 503 | /// 504 | /// This proof can be used for read-only applications, but should not be used if 505 | /// the leaf may be updated (e.g. in a state transition fraud proof). For 506 | /// updatable proofs, see `prove_updatable_for_root`. 507 | pub fn prove_for_root( 508 | &self, 509 | key: impl AsRef<[u8]>, 510 | root: Bytes, 511 | ) -> Result, S::Error> { 512 | self.do_prove_for_root(key, root, false) 513 | } 514 | 515 | /// Generates an updatable Merkle proof for a key against the current root. 516 | pub fn prove_updatable( 517 | &self, 518 | key: impl AsRef<[u8]>, 519 | ) -> Result, S::Error> { 520 | self.prove_updatable_for_root(key, self.root()) 521 | } 522 | 523 | /// Generates an updatable Merkle proof for a key, against a specific node. 524 | /// This is primarily useful for generating Merkle proofs for subtrees. 525 | pub fn prove_updatable_for_root( 526 | &self, 527 | key: impl AsRef<[u8]>, 528 | root: Bytes, 529 | ) -> Result, S::Error> { 530 | self.do_prove_for_root(key, root, true) 531 | } 532 | 533 | /// Generates a compacted Merkle proof for a key against the current root. 534 | pub fn prove_compact( 535 | &self, 536 | key: impl AsRef<[u8]>, 537 | ) -> Result, S::Error> { 538 | self.prove_compact_for_root(key, self.root()) 539 | } 540 | 541 | /// Generates a compacted Merkle proof for a key, at a specific root. 542 | pub fn prove_compact_for_root( 543 | &self, 544 | key: impl AsRef<[u8]>, 545 | root: Bytes, 546 | ) -> Result, S::Error> { 547 | let proof = self.do_prove_for_root(key, root, false)?; 548 | proof.compact_into().map_err(Into::into) 549 | } 550 | 551 | #[inline] 552 | fn do_prove_for_root( 553 | &self, 554 | key: impl AsRef<[u8]>, 555 | root: Bytes, 556 | is_updatable: bool, 557 | ) -> Result, S::Error> { 558 | let path = self.th.path(key); 559 | let UpdateResult { 560 | side_nodes, 561 | path_nodes, 562 | sibling_data, 563 | current_data: leaf_data, 564 | } = self.side_nodes_for_root(path.as_ref(), root, is_updatable)?; 565 | 566 | let non_empty_side_nodes = side_nodes 567 | .into_iter() 568 | .filter(|n| !n.is_empty()) 569 | .collect::>(); 570 | 571 | // Deal with non-membership proofs. If the leaf hash is the placeholder 572 | // value, we do not need to add anything else to the proof. 573 | let non_membership_leaf_data = leaf_data.and_then(|leaf_data| { 574 | if path_nodes[0].ne(self.th.placeholder_ref()) { 575 | let (actual_path, _) = TreeHasher::<::Hasher>::parse_leaf(&leaf_data); 576 | if actual_path.ne(path.as_ref()) { 577 | // This is a non-membership proof that involves showing a different leaf. 578 | // Add the leaf data to the proof. 579 | return Some(leaf_data); 580 | } 581 | } 582 | None 583 | }); 584 | 585 | Ok(SparseMerkleProof::new( 586 | non_empty_side_nodes, 587 | non_membership_leaf_data, 588 | sibling_data, 589 | )) 590 | } 591 | 592 | /// Get all the sibling nodes (sidenodes) for a given path from a given root. 593 | /// Returns an array of sibling nodes, the leaf hash found at that path, the 594 | /// leaf data, and the sibling data. 595 | /// 596 | /// If the leaf is a placeholder, the leaf data is nil. 597 | fn side_nodes_for_root( 598 | &self, 599 | path: &[u8], 600 | root: Bytes, 601 | get_sibling_data: bool, 602 | ) -> Result::Error> { 603 | // Side nodes for the path. Nodes are inserted in reverse order, then the 604 | // slice is reversed at the end. 605 | let mut side_nodes = Vec::with_capacity(self.depth()); 606 | let mut path_nodes = Vec::with_capacity(self.depth() + 1); 607 | path_nodes.push(root.clone()); 608 | 609 | if root.eq(self.th.placeholder_ref()) { 610 | return Ok(UpdateResult { 611 | side_nodes, 612 | path_nodes, 613 | sibling_data: None, 614 | current_data: None, 615 | }); 616 | } 617 | 618 | let mut current_data = self.nodes.get(&root)?; 619 | if TreeHasher::::is_leaf(¤t_data) { 620 | // If the root is a leaf, there are also no sidenodes to return. 621 | return Ok(UpdateResult { 622 | side_nodes, 623 | path_nodes, 624 | sibling_data: None, 625 | current_data, 626 | }); 627 | } 628 | 629 | for i in 0..self.depth() { 630 | let (left_node, right_node) = TreeHasher::::parse_node(¤t_data); 631 | 632 | // Get sidenode depending on whether the path bit is on or off. 633 | let (side_node, node_hash) = if get_bit_at_from_msb(path, i) == RIGHT { 634 | (left_node, right_node) 635 | } else { 636 | (right_node, left_node) 637 | }; 638 | 639 | if node_hash.eq(self.th.placeholder_ref()) { 640 | // If the node is a placeholder, we've reached the end. 641 | if get_sibling_data { 642 | let sibling_data = self.nodes.get(&side_node)?; 643 | 644 | side_nodes.push(side_node); 645 | path_nodes.push(node_hash); 646 | side_nodes.reverse(); 647 | path_nodes.reverse(); 648 | return Ok(UpdateResult { 649 | side_nodes, 650 | path_nodes, 651 | sibling_data, 652 | current_data: None, 653 | }); 654 | } 655 | 656 | side_nodes.push(side_node); 657 | path_nodes.push(node_hash); 658 | side_nodes.reverse(); 659 | path_nodes.reverse(); 660 | 661 | return Ok(UpdateResult { 662 | side_nodes, 663 | path_nodes, 664 | sibling_data: None, 665 | current_data: None, 666 | }); 667 | } 668 | 669 | current_data = self.nodes.get(&node_hash)?; 670 | if TreeHasher::::is_leaf(¤t_data) { 671 | // If the node is a leaf, we've reached the end. 672 | if get_sibling_data { 673 | let sibling_data = self.nodes.get(&side_node)?; 674 | 675 | side_nodes.push(side_node); 676 | path_nodes.push(node_hash); 677 | side_nodes.reverse(); 678 | path_nodes.reverse(); 679 | return Ok(UpdateResult { 680 | side_nodes, 681 | path_nodes, 682 | sibling_data, 683 | current_data, 684 | }); 685 | } 686 | 687 | side_nodes.push(side_node); 688 | path_nodes.push(node_hash); 689 | side_nodes.reverse(); 690 | path_nodes.reverse(); 691 | return Ok(UpdateResult { 692 | side_nodes, 693 | path_nodes, 694 | sibling_data: None, 695 | current_data, 696 | }); 697 | } 698 | 699 | side_nodes.push(side_node); 700 | path_nodes.push(node_hash); 701 | } 702 | 703 | side_nodes.reverse(); 704 | path_nodes.reverse(); 705 | Ok(UpdateResult { 706 | side_nodes, 707 | path_nodes, 708 | sibling_data: None, 709 | current_data, 710 | }) 711 | } 712 | } 713 | 714 | struct UpdateResult { 715 | side_nodes: Vec, 716 | path_nodes: Vec, 717 | sibling_data: Option, 718 | current_data: Option, 719 | } 720 | -------------------------------------------------------------------------------- /src/smt/tests.rs: -------------------------------------------------------------------------------- 1 | use digest::generic_array::GenericArray; 2 | use hashbrown::HashMap; 3 | 4 | use crate::proofs::BadProof; 5 | 6 | use super::*; 7 | 8 | #[derive(Debug)] 9 | pub enum Error { 10 | NotFound, 11 | BadProof(BadProof), 12 | } 13 | 14 | impl From for Error { 15 | fn from(e: BadProof) -> Self { 16 | Error::BadProof(e) 17 | } 18 | } 19 | 20 | impl core::fmt::Display for Error { 21 | fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { 22 | write!(f, "Error") 23 | } 24 | } 25 | 26 | #[cfg(feature = "std")] 27 | impl std::error::Error for Error {} 28 | 29 | #[derive(Debug, Clone, Default)] 30 | pub struct SimpleStore { 31 | data: HashMap, 32 | } 33 | 34 | impl SimpleStore { 35 | pub fn new() -> Self { 36 | Self { 37 | data: HashMap::new(), 38 | } 39 | } 40 | } 41 | 42 | impl KVStore for SimpleStore { 43 | type Error = Error; 44 | type Hasher = sha2::Sha256; 45 | 46 | fn get(&self, key: &[u8]) -> Result, Self::Error> { 47 | Ok(self.data.get(key).map(core::clone::Clone::clone)) 48 | } 49 | 50 | fn set(&mut self, key: Bytes, value: Bytes) -> Result<(), Self::Error> { 51 | self.data.insert(key, value); 52 | Ok(()) 53 | } 54 | 55 | fn remove(&mut self, key: &[u8]) -> Result { 56 | self.data.remove(key).ok_or(Error::NotFound) 57 | } 58 | 59 | fn contains(&self, key: &[u8]) -> Result { 60 | Ok(self.data.contains_key(key)) 61 | } 62 | } 63 | 64 | #[derive(Debug, Clone, Default)] 65 | pub struct DummyStore { 66 | data: HashMap, 67 | } 68 | 69 | impl DummyStore { 70 | pub fn new() -> Self { 71 | Self { 72 | data: HashMap::new(), 73 | } 74 | } 75 | } 76 | 77 | impl KVStore for DummyStore { 78 | type Error = Error; 79 | type Hasher = DummyHasher; 80 | 81 | fn get(&self, key: &[u8]) -> Result, Self::Error> { 82 | Ok(self.data.get(key).map(core::clone::Clone::clone)) 83 | } 84 | 85 | fn set(&mut self, key: Bytes, value: Bytes) -> Result<(), Self::Error> { 86 | self.data.insert(key, value); 87 | Ok(()) 88 | } 89 | 90 | fn remove(&mut self, key: &[u8]) -> Result { 91 | self.data.remove(key).ok_or(Error::NotFound) 92 | } 93 | 94 | fn contains(&self, key: &[u8]) -> Result { 95 | Ok(self.data.contains_key(key)) 96 | } 97 | } 98 | 99 | pub fn new_sparse_merkle_tree() -> SparseMerkleTree { 100 | let (smn, smv) = (SimpleStore::new(), SimpleStore::new()); 101 | SparseMerkleTree::::new_with_stores(smn, smv) 102 | } 103 | 104 | pub struct DummyHasher { 105 | base_hasher: D, 106 | data: Vec, 107 | } 108 | 109 | impl DummyHasher { 110 | fn new() -> Self { 111 | Self { 112 | base_hasher: D::new(), 113 | data: Vec::new(), 114 | } 115 | } 116 | } 117 | 118 | impl digest::OutputSizeUser for DummyHasher { 119 | type OutputSize = D::OutputSize; 120 | } 121 | 122 | impl digest::Digest for DummyHasher { 123 | fn new() -> Self { 124 | Self { 125 | base_hasher: D::new(), 126 | data: Vec::new(), 127 | } 128 | } 129 | 130 | fn new_with_prefix(_data: impl AsRef<[u8]>) -> Self { 131 | todo!() 132 | } 133 | 134 | fn update(&mut self, data: impl AsRef<[u8]>) { 135 | self.data.extend_from_slice(data.as_ref()); 136 | } 137 | 138 | fn chain_update(self, _data: impl AsRef<[u8]>) -> Self { 139 | todo!() 140 | } 141 | 142 | fn finalize(mut self) -> digest::Output { 143 | let mut prefix = vec![]; 144 | let mut preimage = self.data.clone(); 145 | prefix.extend(preimage.iter()); 146 | preimage = prefix; 147 | 148 | if preimage.len() >= 4 149 | && preimage[..4].eq(&[0, 0, 0, 0]) 150 | && preimage.len() == ::output_size() + 4 151 | { 152 | let digest = preimage[4..].to_vec(); 153 | GenericArray::from_iter(digest) 154 | } else { 155 | self.base_hasher.update(preimage); 156 | self.base_hasher.finalize() 157 | } 158 | } 159 | 160 | fn finalize_into(self, _out: &mut digest::Output) { 161 | todo!() 162 | } 163 | 164 | fn finalize_reset(&mut self) -> digest::Output 165 | where 166 | Self: digest::FixedOutputReset, 167 | { 168 | todo!() 169 | } 170 | 171 | fn finalize_into_reset(&mut self, _out: &mut digest::Output) 172 | where 173 | Self: digest::FixedOutputReset, 174 | { 175 | todo!() 176 | } 177 | 178 | fn reset(&mut self) 179 | where 180 | Self: digest::Reset, 181 | { 182 | todo!() 183 | } 184 | 185 | fn output_size() -> usize { 186 | ::output_size() 187 | } 188 | 189 | fn digest(data: impl AsRef<[u8]>) -> digest::Output { 190 | let mut x = Self::new(); 191 | x.update(data); 192 | x.finalize() 193 | } 194 | } 195 | 196 | #[test] 197 | fn test_smt_update_basic() { 198 | let mut smt = new_sparse_merkle_tree(); 199 | 200 | // Test getting an empty key. 201 | assert!(smt.get(&Bytes::from("testKey")).unwrap().is_none()); 202 | 203 | assert!(!smt.contains(&Bytes::from("testKey")).unwrap()); 204 | 205 | // Test updating the empty key. 206 | smt.update(&Bytes::from("testKey"), Bytes::from("testValue")) 207 | .unwrap(); 208 | 209 | assert_eq!( 210 | smt.get(&Bytes::from("testKey")).unwrap(), 211 | Some(Bytes::from("testValue")) 212 | ); 213 | assert!(smt.contains(&Bytes::from("testKey")).unwrap()); 214 | 215 | // Test updating the non-empty key. 216 | smt.update(&Bytes::from("testKey"), Bytes::from("testValue2")) 217 | .unwrap(); 218 | assert_eq!( 219 | smt.get(&Bytes::from("testKey")).unwrap(), 220 | Some(Bytes::from("testValue2")) 221 | ); 222 | 223 | // Test updating a second empty key where the path for both keys share the 224 | // first 2 bits (when using SHA256). 225 | smt.update(b"foo", Bytes::from("testValue")).unwrap(); 226 | assert_eq!(smt.get(b"foo").unwrap(), Some(Bytes::from("testValue"))); 227 | 228 | // Test updating a third empty key. 229 | smt.update(b"testKey2", Bytes::from("testValue")).unwrap(); 230 | assert_eq!( 231 | smt.get(b"testKey2").unwrap(), 232 | Some(Bytes::from("testValue")) 233 | ); 234 | assert_eq!( 235 | smt.get(b"testKey").unwrap(), 236 | Some(Bytes::from("testValue2")) 237 | ); 238 | 239 | // Test that a tree can be imported from a KVStore. 240 | let smt2 = 241 | SparseMerkleTree::::import(smt.nodes.clone(), smt.values.clone(), smt.root()); 242 | assert_eq!( 243 | smt2.get(b"testKey").unwrap(), 244 | Some(Bytes::from("testValue2")) 245 | ); 246 | } 247 | 248 | #[test] 249 | fn test_smt_remove_basic() { 250 | let mut smt = new_sparse_merkle_tree(); 251 | 252 | // Testing inserting, deleting a key, and inserting it again. 253 | smt.update(b"testKey", Bytes::from("testValue")).unwrap(); 254 | 255 | let root1 = smt.root(); 256 | smt.update(b"testKey", Bytes::new()).unwrap(); 257 | assert!(smt.get(b"testKey").unwrap().is_none()); 258 | assert!(!smt.contains(b"testKey").unwrap()); 259 | 260 | smt.update(b"testKey", Bytes::from("testValue")).unwrap(); 261 | assert_eq!(smt.get(b"testKey").unwrap(), Some(Bytes::from("testValue"))); 262 | assert_eq!(smt.root(), root1); 263 | 264 | // Test inserting and deleting a second key. 265 | smt.update(b"testKey2", Bytes::from("testValue")).unwrap(); 266 | smt.remove(b"testKey2").unwrap(); 267 | assert!(smt.get(b"testKey2").unwrap().is_none()); 268 | assert_eq!(smt.get(b"testKey").unwrap(), Some(Bytes::from("testValue"))); 269 | assert_eq!(root1, smt.root()); 270 | 271 | // Test inserting and deleting a different second key, when the the first 2 272 | // bits of the path for the two keys in the tree are the same (when using SHA256). 273 | 274 | smt.update(b"foo", Bytes::from("testValue")).unwrap(); 275 | assert_eq!(smt.get(b"foo").unwrap(), Some(Bytes::from("testValue"))); 276 | smt.remove(b"foo").unwrap(); 277 | assert!(smt.get(b"foo").unwrap().is_none()); 278 | assert_eq!(smt.get(b"testKey").unwrap(), Some(Bytes::from("testValue"))); 279 | assert_eq!(root1, smt.root()); 280 | 281 | // Testing inserting, deleting a key, and inserting it again, using Delete 282 | smt.update(b"testKey", Bytes::from("testValue")).unwrap(); 283 | let root1 = smt.root(); 284 | smt.remove(b"testKey").unwrap(); 285 | assert!(smt.get(b"testKey").unwrap().is_none()); 286 | assert!(!smt.contains(b"testKey").unwrap()); 287 | smt.update(b"testKey", Bytes::from("testValue")).unwrap(); 288 | assert_eq!(smt.get(b"testKey").unwrap(), Some(Bytes::from("testValue"))); 289 | assert_eq!(smt.root(), root1); 290 | } 291 | 292 | #[test] 293 | fn test_sparse_merkle_tree_known() { 294 | let (smn, smv) = (DummyStore::new(), DummyStore::new()); 295 | let mut smt = SparseMerkleTree::::new_with_stores(smn, smv); 296 | 297 | const SIZE: usize = 36; 298 | 299 | let key1: Bytes = vec![0; SIZE].into(); 300 | 301 | let key2: Bytes = { 302 | let mut key2 = vec![0; SIZE]; 303 | key2[4] = 0b0100_0000; 304 | key2.into() 305 | }; 306 | 307 | let key3 = { 308 | let mut key3 = vec![0; SIZE]; 309 | key3[4] = 0b1000_0000; 310 | Bytes::from(key3) 311 | }; 312 | let key4 = { 313 | let mut key4 = vec![0; SIZE]; 314 | key4[4] = 0b1100_0000; 315 | Bytes::from(key4) 316 | }; 317 | let key5 = { 318 | let mut key5 = vec![0; SIZE]; 319 | key5[4] = 0b1101_0000; 320 | Bytes::from(key5) 321 | }; 322 | 323 | smt.update(&key1, Bytes::from("testValue1")).unwrap(); 324 | smt.update(&key2, Bytes::from("testValue2")).unwrap(); 325 | smt.update(&key3, Bytes::from("testValue3")).unwrap(); 326 | smt.update(&key4, Bytes::from("testValue4")).unwrap(); 327 | smt.update(&key5, Bytes::from("testValue5")).unwrap(); 328 | 329 | assert_eq!(smt.get(&key1).unwrap(), Some(Bytes::from("testValue1"))); 330 | assert_eq!(smt.get(&key2).unwrap(), Some(Bytes::from("testValue2"))); 331 | assert_eq!(smt.get(&key3).unwrap(), Some(Bytes::from("testValue3"))); 332 | assert_eq!(smt.get(&key4).unwrap(), Some(Bytes::from("testValue4"))); 333 | assert_eq!(smt.get(&key5).unwrap(), Some(Bytes::from("testValue5"))); 334 | 335 | let proof1 = smt.prove(&key1).unwrap(); 336 | let proof2 = smt.prove(&key2).unwrap(); 337 | let proof3 = smt.prove(&key3).unwrap(); 338 | let proof4 = smt.prove(&key4).unwrap(); 339 | let proof5 = smt.prove(&key5).unwrap(); 340 | 341 | let mut dsmst = 342 | SparseMerkleTree::::import(DummyStore::new(), DummyStore::new(), smt.root()); 343 | dsmst 344 | .add_branch(proof1, &key1, Bytes::from("testValue1")) 345 | .unwrap(); 346 | dsmst 347 | .add_branch(proof2, &key2, Bytes::from("testValue2")) 348 | .unwrap(); 349 | dsmst 350 | .add_branch(proof3, &key3, Bytes::from("testValue3")) 351 | .unwrap(); 352 | dsmst 353 | .add_branch(proof4, &key4, Bytes::from("testValue4")) 354 | .unwrap(); 355 | dsmst 356 | .add_branch(proof5, &key5, Bytes::from("testValue5")) 357 | .unwrap(); 358 | } 359 | 360 | #[test] 361 | fn test_sparse_merkle_tree_max_height_case() { 362 | const SIZE: usize = 36; 363 | 364 | let (smn, smv) = (DummyStore::new(), DummyStore::new()); 365 | let mut smt = SparseMerkleTree::::new_with_stores(smn, smv); 366 | 367 | // Make two neighboring keys. 368 | // 369 | // The dummy hash function expects keys to prefixed with four bytes of 0, 370 | // which will cause it to return the preimage itself as the digest, without 371 | // the first four bytes. 372 | let key1 = Bytes::from(vec![0; SIZE]); 373 | let key2: Bytes = { 374 | let mut key2 = vec![0; SIZE]; 375 | // We make key2's least significant bit different than key1's 376 | key2[SIZE - 1] = 1; 377 | key2.into() 378 | }; 379 | 380 | smt.update(&key1, Bytes::from("testValue1")).unwrap(); 381 | smt.update(&key2, Bytes::from("testValue2")).unwrap(); 382 | 383 | assert_eq!(smt.get(&key1).unwrap(), Some(Bytes::from("testValue1"))); 384 | assert_eq!(smt.get(&key2).unwrap(), Some(Bytes::from("testValue2"))); 385 | 386 | let proof1 = smt.prove(&key1).unwrap(); 387 | assert_eq!(proof1.side_nodes().len(), 256); 388 | } 389 | 390 | #[test] 391 | fn test_deep_sparse_merkle_sub_tree_basic() { 392 | let mut smt = new_sparse_merkle_tree(); 393 | 394 | smt.update(b"testKey1", Bytes::from("testValue1")).unwrap(); 395 | smt.update(b"testKey2", Bytes::from("testValue2")).unwrap(); 396 | smt.update(b"testKey3", Bytes::from("testValue3")).unwrap(); 397 | smt.update(b"testKey4", Bytes::from("testValue4")).unwrap(); 398 | smt.update(b"testKey6", Bytes::from("testValue6")).unwrap(); 399 | 400 | let original_root = smt.root(); 401 | 402 | let proof1 = smt.prove_updatable(b"testKey1").unwrap(); 403 | let proof2 = smt.prove_updatable(b"testKey2").unwrap(); 404 | let proof5 = smt.prove_updatable(b"testKey5").unwrap(); 405 | 406 | let mut dsmst = SparseMerkleTree::import(SimpleStore::new(), SimpleStore::new(), smt.root()); 407 | dsmst 408 | .add_branch(proof1, b"testKey1", Bytes::from("testValue1")) 409 | .unwrap(); 410 | 411 | dsmst 412 | .add_branch(proof2, b"testKey2", Bytes::from("testValue2")) 413 | .unwrap(); 414 | dsmst 415 | .add_branch(proof5, b"testKey5", DEFAULT_VALUE) 416 | .unwrap(); 417 | 418 | let val = dsmst.get(b"testKey1").unwrap().unwrap(); 419 | assert_eq!(val, Bytes::from("testValue1")); 420 | 421 | let val = dsmst.get_descend(b"testKey1").unwrap().unwrap(); 422 | assert_eq!(val, Bytes::from("testValue1")); 423 | 424 | let val = dsmst.get(b"testKey2").unwrap().unwrap(); 425 | assert_eq!(val, Bytes::from("testValue2")); 426 | 427 | let val = dsmst.get_descend(b"testKey2").unwrap().unwrap(); 428 | assert_eq!(val, Bytes::from("testValue2")); 429 | 430 | let val = dsmst.get(b"testKey5").unwrap(); 431 | assert!(val.is_none()); 432 | 433 | let val = dsmst.get_descend(b"testKey5").unwrap(); 434 | assert!(val.is_none()); 435 | 436 | assert!(dsmst.get_descend(b"testKey6").unwrap().is_none()); 437 | 438 | dsmst 439 | .update(b"testKey1", Bytes::from("testValue3")) 440 | .unwrap(); 441 | dsmst.update(b"testKey2", Bytes::new()).unwrap(); 442 | dsmst 443 | .update(b"testKey5", Bytes::from("testValue5")) 444 | .unwrap(); 445 | 446 | let val = dsmst.get(b"testKey1").unwrap().unwrap(); 447 | assert_eq!(val, Bytes::from("testValue3")); 448 | 449 | let val = dsmst.get(b"testKey2").unwrap(); 450 | assert!(val.is_none()); 451 | 452 | let val = dsmst.get(b"testKey5").unwrap().unwrap(); 453 | assert_eq!(val, Bytes::from("testValue5")); 454 | 455 | smt.update(b"testKey1", Bytes::from("testValue3")).unwrap(); 456 | smt.update(b"testKey2", DEFAULT_VALUE).unwrap(); 457 | smt.update(b"testKey5", Bytes::from("testValue5")).unwrap(); 458 | assert_eq!(smt.root(), dsmst.root()); 459 | assert_ne!(smt.root(), original_root); 460 | } 461 | 462 | #[test] 463 | fn test_deep_sparse_merkle_sub_tree_bad_input() { 464 | let mut smt = new_sparse_merkle_tree(); 465 | 466 | smt.update(b"testKey1", Bytes::from("testValue1")).unwrap(); 467 | smt.update(b"testKey2", Bytes::from("testValue2")).unwrap(); 468 | smt.update(b"testKey3", Bytes::from("testValue3")).unwrap(); 469 | smt.update(b"testKey4", Bytes::from("testValue4")).unwrap(); 470 | 471 | let mut bad_proof = smt.prove(b"testKey1").unwrap(); 472 | let mut vec = vec![0; bad_proof.side_nodes[0].len()]; 473 | vec[1..].copy_from_slice(bad_proof.side_nodes[0][1..].as_ref()); 474 | bad_proof.side_nodes[0] = vec.into(); 475 | 476 | let mut dsmst = SparseMerkleTree::import(SimpleStore::new(), SimpleStore::new(), smt.root()); 477 | dsmst 478 | .add_branch(bad_proof, b"testKey1", Bytes::from("testValue1")) 479 | .unwrap_err(); 480 | } 481 | 482 | // TODO: implement this test 483 | #[test] 484 | fn test_orphan_removal() {} 485 | 486 | // // Test all tree operations in bulk. 487 | // #[test] 488 | // fn test_sparse_merkle_tree() { 489 | // for i in 0..5 { 490 | // eprintln!("{}: {} {} {} {}", i, 200, 100, 100, 50); 491 | // bulk_operations(200, 100, 100, 50); 492 | // } 493 | 494 | // for i in 0..5 { 495 | // eprintln!("{}: {} {} {} {}", i, 200, 100, 100, 500); 496 | // bulk_operations(200, 100, 100, 500); 497 | // } 498 | // } 499 | 500 | // fn bulk_operations(operations: usize, insert: usize, update: usize, remove: usize) { 501 | // let mut smt = new_sparse_merkle_tree(); 502 | // let max = insert + update + remove; 503 | 504 | // let mut kv = hashbrown::HashMap::new(); 505 | // let mut rng = rand::thread_rng(); 506 | // for _ in 0..operations { 507 | // let n = rng.gen_range(0..max); 508 | // if n < insert { 509 | // // insert 510 | // let key_len = 16 + rng.gen_range(0..32); 511 | // let mut key = vec![0; key_len]; 512 | // rng.fill_bytes(key.as_mut_slice()); 513 | 514 | // let val_len = 1 + rng.gen_range(0..64); 515 | // let mut val = vec![0; val_len]; 516 | // rng.fill_bytes(val.as_mut_slice()); 517 | // let val = Bytes::from(val); 518 | // smt.update(key.as_slice(), val.clone()).unwrap(); 519 | 520 | // kv.insert(Bytes::from(key), val); 521 | // } else if n > insert && n < insert + update { 522 | // // update 523 | // let keys = kv.keys().cloned().collect::>(); 524 | // if keys.is_empty() { 525 | // continue; 526 | // } 527 | 528 | // let key = keys[rng.gen_range(0..keys.len())].clone(); 529 | // let val_len = 1 + rng.gen_range(0..64); 530 | // let mut val = vec![0; val_len]; 531 | // rng.fill_bytes(val.as_mut_slice()); 532 | 533 | // let val = Bytes::from(val); 534 | 535 | // smt.update(&key, val.clone()).unwrap(); 536 | // kv.insert(key, val); 537 | // } else { 538 | // // delete 539 | // let keys = kv.keys().cloned().collect::>(); 540 | // if keys.is_empty() { 541 | // continue; 542 | // } 543 | // let key = keys[rng.gen_range(0..keys.len())].clone(); 544 | // smt.update(&key, DEFAULT_VALUE).unwrap(); 545 | // kv.insert(key, DEFAULT_VALUE); 546 | // } 547 | 548 | // bulk_check_all(&smt, &kv); 549 | // } 550 | // } 551 | 552 | // fn bulk_check_all(smt: &SparseMerkleTree, kv: &hashbrown::HashMap) { 553 | // for (k, v) in kv { 554 | // assert!(smt.get(k).unwrap().unwrap_or(DEFAULT_VALUE).eq(v)); 555 | 556 | // // Generate and verify a Merkle proof for this key. 557 | // let proof = smt.prove(k).unwrap(); 558 | // assert!(proof.verify(smt.root(), k, v)); 559 | // let compact = smt.prove_compact(k).unwrap(); 560 | // assert!(compact.verify(smt.root(), k, v)); 561 | 562 | // if v.eq(&DEFAULT_VALUE) { 563 | // continue; 564 | // } 565 | 566 | // // Check that the key is at the correct height in the tree. 567 | // let mut largest_common_prefix = 0; 568 | // for (k2, v2) in kv { 569 | // if v2.eq(&DEFAULT_VALUE) { 570 | // continue; 571 | // } 572 | 573 | // let common_prefix = 574 | // count_common_prefix(smt.th.path(k).as_ref(), smt.th.path(k2).as_ref()); 575 | // if common_prefix == smt.depth() && common_prefix > largest_common_prefix { 576 | // largest_common_prefix = common_prefix; 577 | // } 578 | // } 579 | 580 | // let UpdateResult { 581 | // side_nodes, 582 | // path_nodes: _, 583 | // sibling_data: _, 584 | // current_data: _, 585 | // } = smt 586 | // .side_nodes_for_root(smt.th.path(k).as_ref(), smt.root(), false) 587 | // .unwrap(); 588 | 589 | // let mut num_side_nodes = 0; 590 | // for node in side_nodes { 591 | // if !node.is_empty() { 592 | // num_side_nodes += 1; 593 | // } 594 | // } 595 | 596 | // if num_side_nodes != largest_common_prefix + 1 597 | // && (num_side_nodes != 0 && largest_common_prefix != 0) 598 | // { 599 | // panic!("leaf is at unexpected height"); 600 | // } 601 | // } 602 | // } 603 | -------------------------------------------------------------------------------- /src/tree_hasher.rs: -------------------------------------------------------------------------------- 1 | use alloc::boxed::Box; 2 | use alloc::{vec, vec::Vec}; 3 | use bytes::Bytes; 4 | use digest::{generic_array::GenericArray, Digest, OutputSizeUser}; 5 | 6 | pub(crate) const LEAF_PREFIX: [u8; 1] = [0]; 7 | const NODE_PREFIX: [u8; 1] = [1]; 8 | 9 | pub(crate) struct TreeHasher { 10 | zero_value: Bytes, 11 | _marker: core::marker::PhantomData, 12 | } 13 | 14 | impl Clone for TreeHasher { 15 | fn clone(&self) -> Self { 16 | Self { 17 | zero_value: self.zero_value.clone(), 18 | _marker: core::marker::PhantomData, 19 | } 20 | } 21 | } 22 | 23 | impl core::fmt::Debug for TreeHasher { 24 | fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { 25 | f.debug_struct(core::any::type_name::()) 26 | .field("zero_value", &self.zero_value) 27 | .finish() 28 | } 29 | } 30 | 31 | impl Default for TreeHasher { 32 | fn default() -> Self { 33 | Self { 34 | zero_value: vec![0; ::output_size()].into(), 35 | _marker: core::marker::PhantomData, 36 | } 37 | } 38 | } 39 | 40 | impl TreeHasher { 41 | pub(crate) fn new(zero_value: Bytes) -> Self { 42 | Self { 43 | _marker: Default::default(), 44 | zero_value, 45 | } 46 | } 47 | 48 | pub(crate) fn digest( 49 | &self, 50 | data: impl AsRef<[u8]>, 51 | ) -> GenericArray::OutputSize> { 52 | ::digest(data) 53 | } 54 | 55 | pub(crate) fn digest_leaf_hash( 56 | &self, 57 | path: impl AsRef<[u8]>, 58 | leaf_data: impl AsRef<[u8]>, 59 | ) -> Bytes { 60 | let path = path.as_ref(); 61 | let leaf_data = leaf_data.as_ref(); 62 | let mut value = Vec::with_capacity(1 + path.len() + leaf_data.len()); 63 | value.push(LEAF_PREFIX[0]); 64 | value.extend_from_slice(path); 65 | value.extend_from_slice(leaf_data); 66 | let ptr = Box::into_raw(Box::new(::digest(&value))) as *mut u8; 67 | let size = ::output_size(); 68 | Bytes::from(unsafe { Vec::from_raw_parts(ptr, size, size) }) 69 | } 70 | 71 | pub(crate) fn digest_leaf( 72 | &self, 73 | path: impl AsRef<[u8]>, 74 | leaf_data: impl AsRef<[u8]>, 75 | ) -> (Bytes, Bytes) { 76 | let path = path.as_ref(); 77 | let leaf_data = leaf_data.as_ref(); 78 | let mut value = Vec::with_capacity(1 + path.len() + leaf_data.len()); 79 | value.push(LEAF_PREFIX[0]); 80 | value.extend_from_slice(path); 81 | value.extend_from_slice(leaf_data); 82 | let ptr = Box::into_raw(Box::new(::digest(&value))) as *mut u8; 83 | let size = ::output_size(); 84 | let sum = Bytes::from(unsafe { Vec::from_raw_parts(ptr, size, size) }); 85 | (sum, value.into()) 86 | } 87 | 88 | pub(crate) fn digest_node( 89 | &self, 90 | left_data: impl AsRef<[u8]>, 91 | right_data: impl AsRef<[u8]>, 92 | ) -> (Bytes, Bytes) { 93 | let left_data = left_data.as_ref(); 94 | let right_data = right_data.as_ref(); 95 | self.digest_node_helper(left_data, right_data) 96 | } 97 | 98 | #[inline] 99 | fn digest_node_helper(&self, left_data: &[u8], right_data: &[u8]) -> (Bytes, Bytes) { 100 | let mut value = Vec::with_capacity(1 + left_data.len() + right_data.len()); 101 | value.push(NODE_PREFIX[0]); 102 | value.extend_from_slice(left_data); 103 | value.extend_from_slice(right_data); 104 | let ptr = Box::into_raw(Box::new(::digest(&value))) as *mut u8; 105 | let size = ::output_size(); 106 | let sum = Bytes::from(unsafe { Vec::from_raw_parts(ptr, size, size) }); 107 | (sum, value.into()) 108 | } 109 | 110 | pub(crate) fn digest_left_node(&self, left_data: impl AsRef<[u8]>) -> (Bytes, Bytes) { 111 | let left_data = left_data.as_ref(); 112 | let right_data = self.placeholder_ref(); 113 | self.digest_node_helper(left_data, right_data) 114 | } 115 | 116 | pub(crate) fn digest_right_node(&self, right_data: impl AsRef<[u8]>) -> (Bytes, Bytes) { 117 | let left_data = self.placeholder_ref(); 118 | let right_data = right_data.as_ref(); 119 | self.digest_node_helper(left_data, right_data) 120 | } 121 | 122 | pub(crate) fn parse_leaf(data: &[u8]) -> (&[u8], &[u8]) { 123 | let leaf_prefix_len = LEAF_PREFIX.len(); 124 | let path_size = Self::path_size(); 125 | ( 126 | &data[leaf_prefix_len..path_size + leaf_prefix_len], 127 | &data[leaf_prefix_len + path_size..], 128 | ) 129 | } 130 | 131 | pub(crate) fn parse_node(data: &Option) -> (Bytes, Bytes) { 132 | match data { 133 | Some(data) => { 134 | let node_prefix_len = NODE_PREFIX.len(); 135 | let left_size = Self::path_size(); 136 | let right_size = Self::path_size(); 137 | ( 138 | data.slice(node_prefix_len..left_size + node_prefix_len), 139 | data.slice( 140 | node_prefix_len + left_size..node_prefix_len + left_size + right_size, 141 | ), 142 | ) 143 | } 144 | None => (Bytes::new(), Bytes::new()), 145 | } 146 | } 147 | 148 | pub(crate) fn is_leaf(data: &Option>) -> bool { 149 | match data { 150 | Some(data) => { 151 | let data = data.as_ref(); 152 | let leaf_prefix_len = LEAF_PREFIX.len(); 153 | data[..leaf_prefix_len].eq(&LEAF_PREFIX) 154 | } 155 | None => false, 156 | } 157 | } 158 | 159 | pub(crate) fn path( 160 | &self, 161 | key: impl AsRef<[u8]>, 162 | ) -> GenericArray::OutputSize> { 163 | ::digest(key) 164 | } 165 | 166 | pub(crate) fn path_into(&self, key: impl AsRef<[u8]>) -> Bytes { 167 | let ptr = Box::into_raw(Box::new(::digest(key))) as *mut u8; 168 | let size = ::output_size(); 169 | Bytes::from(unsafe { Vec::from_raw_parts(ptr, size, size) }) 170 | } 171 | 172 | pub(crate) fn path_size() -> usize { 173 | ::output_size() 174 | } 175 | 176 | pub(crate) fn placeholder(&self) -> Bytes { 177 | self.zero_value.clone() 178 | } 179 | 180 | pub(crate) fn placeholder_ref(&self) -> &[u8] { 181 | &self.zero_value 182 | } 183 | } 184 | --------------------------------------------------------------------------------