├── .DS_Store ├── .gitignore ├── .travis.yml ├── Cargo.toml ├── LICENSE ├── README.md ├── benches ├── insert_benchmark.rs └── trie.rs └── src ├── db.rs ├── errors.rs ├── lib.rs ├── nibbles.rs ├── node.rs ├── tests └── mod.rs └── trie.rs /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/citahub/cita_trie/9a8659f9f40feb3b89868f3964cdfb250f23a1c4/.DS_Store -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Compiled files 2 | *.o 3 | *.so 4 | *.rlib 5 | *.dll 6 | *.pyc 7 | 8 | # Executables 9 | *.exe 10 | 11 | # intermedia file directory 12 | target/ 13 | */target/ 14 | 15 | # Cargo lock in subs 16 | **/Cargo.lock 17 | 18 | 19 | # editor specific 20 | # vim stuff 21 | *.swp 22 | # emacs stuff 23 | *~ 24 | # vscode 25 | .vscode 26 | # jetbrains ide stuff 27 | .idea 28 | *.iml 29 | # mac stuff 30 | .DS_Store 31 | 32 | # gdb files 33 | .gdb_history 34 | 35 | # ctags 36 | TAGS 37 | 38 | # sphinx 39 | docs/build 40 | 41 | # vagrant file 42 | .vagrant 43 | 44 | # node 45 | **/node_modules 46 | yarn-error.log 47 | 48 | # docs 49 | docs/site 50 | 51 | # localtime for macos users 52 | localtime 53 | 54 | # test data 55 | **/test-rocksdb 56 | 57 | # log files 58 | logs/ 59 | 60 | # private key 61 | *_privkey 62 | 63 | # db file 64 | *.db 65 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: rust 2 | rust: 3 | - stable 4 | script: 5 | - RUSTFLAGS='-F warnings' cargo build --verbose --all 6 | - RUSTFLAGS='-F warnings' cargo test --verbose --all 7 | stages: 8 | - Check 9 | - Test 10 | jobs: 11 | include: 12 | - stage: Check 13 | name: Format 14 | script: 15 | - rustup component add rustfmt 16 | - cargo fmt --all -- --check 17 | - stage: Check 18 | name: Clippy 19 | script: 20 | - rustup component add clippy 21 | - RUSTFLAGS='-F warnings' cargo clippy --all --all-targets --all-features 22 | - stage: Test 23 | name: UnitTest 24 | script: 25 | - cargo test -- --nocapture 26 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "cita_trie" 3 | version = "5.0.1" 4 | authors = ["yejiayu "] 5 | description = "Modified Patricia Tree (aka Trie)." 6 | license = "Apache-2.0" 7 | edition = "2021" 8 | readme = "README.md" 9 | keywords = ["patricia", "mpt", "evm", "trie", "ethereum"] 10 | repository = "https://github.com/cryptape/cita-trie" 11 | homepage = "https://github.com/cryptape/cita-trie" 12 | documentation = "https://docs.rs/cita_trie" 13 | 14 | [dependencies] 15 | parking_lot = "0.12" 16 | rlp = "0.5" 17 | hasher = { version = "0.1", features = ["hash-keccak"] } 18 | 19 | [dev-dependencies] 20 | rand = "0.7" 21 | hex = "0.4" 22 | criterion = "0.5" 23 | ethereum-types = "0.14" 24 | uuid = { version = "1.1", features = ["serde", "v4"] } 25 | 26 | [[bench]] 27 | name = "trie" 28 | harness = false 29 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## CITA-Trie 2 | 3 | [![Latest Version](https://img.shields.io/crates/v/cita_trie.svg)](https://crates.io/crates/cita_trie) 4 | [![](https://travis-ci.org/cryptape/cita-trie.svg?branch=master)](https://travis-ci.org/cryptape/cita-trie) 5 | [![](https://img.shields.io/hexpm/l/plug.svg)](https://github.com/cryptape/cita-trie/blob/master/LICENSE) 6 | 7 | Rust implementation of the Modified Patricia Tree (aka Trie), 8 | 9 | The implementation is strongly inspired by [go-ethereum trie](https://github.com/ethereum/go-ethereum/tree/master/trie) 10 | 11 | ## Features 12 | 13 | - Implementation of the Modified Patricia Tree 14 | - Custom hash algorithm (Keccak is provided by default) 15 | - Custom storage interface 16 | 17 | ## Example 18 | 19 | ```rust 20 | use std::sync::Arc; 21 | 22 | use hasher::{Hasher, HasherKeccak}; // https://crates.io/crates/hasher 23 | 24 | use cita_trie::MemoryDB; 25 | use cita_trie::{PatriciaTrie, Trie}; 26 | 27 | fn main() { 28 | let memdb = Arc::new(MemoryDB::new(true)); 29 | let hasher = Arc::new(HasherKeccak::new()); 30 | 31 | let key = "test-key".as_bytes(); 32 | let value = "test-value".as_bytes(); 33 | 34 | let root = { 35 | let mut trie = PatriciaTrie::new(Arc::clone(&memdb), Arc::clone(&hasher)); 36 | trie.insert(key.to_vec(), value.to_vec()).unwrap(); 37 | 38 | let v = trie.get(key).unwrap(); 39 | assert_eq!(Some(value.to_vec()), v); 40 | trie.root().unwrap() 41 | }; 42 | 43 | let mut trie = PatriciaTrie::from(Arc::clone(&memdb), Arc::clone(&hasher), &root).unwrap(); 44 | 45 | let exists = trie.contains(key).unwrap(); 46 | assert_eq!(exists, true); 47 | let removed = trie.remove(key).unwrap(); 48 | assert_eq!(removed, true); 49 | let new_root = trie.root().unwrap(); 50 | println!("new root = {:?}", new_root); 51 | 52 | } 53 | 54 | ``` 55 | 56 | ## Benchmark 57 | 58 | ```sh 59 | cargo bench 60 | 61 | Gnuplot not found, disabling plotting 62 | insert one time: [1.6564 us 1.7287 us 1.7955 us] 63 | change: [-2.2715% +1.5151% +5.1789%] (p = 0.42 > 0.05) 64 | No change in performance detected. 65 | 66 | insert 1k time: [1.1620 ms 1.1763 ms 1.1942 ms] 67 | change: [-2.3339% +0.7190% +3.7809%] (p = 0.65 > 0.05) 68 | No change in performance detected. 69 | Found 16 outliers among 100 measurements (16.00%) 70 | 9 (9.00%) high mild 71 | 7 (7.00%) high severe 72 | 73 | insert 10k time: [13.491 ms 13.677 ms 13.891 ms] 74 | change: [-5.3670% -1.2847% +2.8328%] (p = 0.54 > 0.05) 75 | No change in performance detected. 76 | Found 10 outliers among 100 measurements (10.00%) 77 | 9 (9.00%) high mild 78 | 1 (1.00%) high severe 79 | 80 | get based 10k time: [1.0707 us 1.0965 us 1.1270 us] 81 | change: [-10.331% -6.5107% -2.6793%] (p = 0.00 < 0.05) 82 | Performance has improved. 83 | Found 11 outliers among 100 measurements (11.00%) 84 | 11 (11.00%) high mild 85 | 86 | remove 1k time: [538.54 us 545.18 us 553.96 us] 87 | change: [-7.3508% -0.7110% +7.0860%] (p = 0.86 > 0.05) 88 | No change in performance detected. 89 | Found 12 outliers among 100 measurements (12.00%) 90 | 5 (5.00%) high mild 91 | 7 (7.00%) high severe 92 | 93 | remove 10k time: [5.7277 ms 5.7780 ms 5.8367 ms] 94 | change: [-18.778% -5.4831% +10.503%] (p = 0.51 > 0.05) 95 | No change in performance detected. 96 | Found 11 outliers among 100 measurements (11.00%) 97 | 1 (1.00%) high mild 98 | 10 (10.00%) high severe 99 | ``` 100 | 101 | ### Custom hash algorithm 102 | See: https://crates.io/crates/hasher 103 | 104 | ### Custom storage 105 | 106 | [Refer](https://github.com/cryptape/cita-trie/blob/master/src/db.rs) 107 | -------------------------------------------------------------------------------- /benches/insert_benchmark.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use criterion::{criterion_group, criterion_main, Criterion}; 4 | 5 | use hasher::HasherKeccak; 6 | use uuid::Uuid; 7 | 8 | use cita_trie::MemoryDB; 9 | use cita_trie::{PatriciaTrie, Trie}; 10 | 11 | fn insert_worse_case_benchmark(c: &mut Criterion) { 12 | c.bench_function("cita-trie insert one", |b| { 13 | let mut trie = PatriciaTrie::new( 14 | Arc::new(MemoryDB::new(false)), 15 | Arc::new(HasherKeccak::new()), 16 | ); 17 | 18 | b.iter(|| { 19 | let key = Uuid::new_v4().as_bytes().to_vec(); 20 | let value = Uuid::new_v4().as_bytes().to_vec(); 21 | trie.insert(key, value).unwrap() 22 | }) 23 | }); 24 | 25 | c.bench_function("cita-trie insert 1k", |b| { 26 | let mut trie = PatriciaTrie::new( 27 | Arc::new(MemoryDB::new(false)), 28 | Arc::new(HasherKeccak::new()), 29 | ); 30 | 31 | let (keys, values) = random_data(1000); 32 | b.iter(|| { 33 | for i in 0..keys.len() { 34 | trie.insert(keys[i].clone(), values[i].clone()).unwrap() 35 | } 36 | }); 37 | }); 38 | 39 | c.bench_function("cita-trie insert 10k", |b| { 40 | let mut trie = PatriciaTrie::new( 41 | Arc::new(MemoryDB::new(false)), 42 | Arc::new(HasherKeccak::new()), 43 | ); 44 | 45 | let (keys, values) = random_data(10000); 46 | b.iter(|| { 47 | for i in 0..keys.len() { 48 | trie.insert(keys[i].clone(), values[i].clone()).unwrap() 49 | } 50 | }); 51 | }); 52 | } 53 | 54 | fn random_data(n: usize) -> (Vec>, Vec>) { 55 | let mut keys = Vec::with_capacity(n); 56 | let mut values = Vec::with_capacity(n); 57 | for _ in 0..n { 58 | let key = Uuid::new_v4().as_bytes().to_vec(); 59 | let value = Uuid::new_v4().as_bytes().to_vec(); 60 | keys.push(key); 61 | values.push(value); 62 | } 63 | 64 | (keys, values) 65 | } 66 | 67 | criterion_group!(benches, insert_worse_case_benchmark); 68 | criterion_main!(benches); 69 | -------------------------------------------------------------------------------- /benches/trie.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use criterion::{criterion_group, criterion_main, Criterion}; 4 | 5 | use hasher::HasherKeccak; 6 | use uuid::Uuid; 7 | 8 | use cita_trie::MemoryDB; 9 | use cita_trie::{PatriciaTrie, Trie}; 10 | 11 | fn insert_worse_case_benchmark(c: &mut Criterion) { 12 | c.bench_function("insert one", |b| { 13 | let mut trie = PatriciaTrie::new( 14 | Arc::new(MemoryDB::new(false)), 15 | Arc::new(HasherKeccak::new()), 16 | ); 17 | 18 | b.iter(|| { 19 | let key = Uuid::new_v4().as_bytes().to_vec(); 20 | let value = Uuid::new_v4().as_bytes().to_vec(); 21 | trie.insert(key, value).unwrap() 22 | }) 23 | }); 24 | 25 | c.bench_function("insert 1k", |b| { 26 | let mut trie = PatriciaTrie::new( 27 | Arc::new(MemoryDB::new(false)), 28 | Arc::new(HasherKeccak::new()), 29 | ); 30 | 31 | let (keys, values) = random_data(1000); 32 | b.iter(|| { 33 | for i in 0..keys.len() { 34 | trie.insert(keys[i].clone(), values[i].clone()).unwrap() 35 | } 36 | }); 37 | }); 38 | 39 | c.bench_function("insert 10k", |b| { 40 | let mut trie = PatriciaTrie::new( 41 | Arc::new(MemoryDB::new(false)), 42 | Arc::new(HasherKeccak::new()), 43 | ); 44 | 45 | let (keys, values) = random_data(10000); 46 | b.iter(|| { 47 | for i in 0..keys.len() { 48 | trie.insert(keys[i].clone(), values[i].clone()).unwrap() 49 | } 50 | }); 51 | }); 52 | 53 | c.bench_function("get based 10k", |b| { 54 | let mut trie = PatriciaTrie::new( 55 | Arc::new(MemoryDB::new(false)), 56 | Arc::new(HasherKeccak::new()), 57 | ); 58 | 59 | let (keys, values) = random_data(10000); 60 | for i in 0..keys.len() { 61 | trie.insert(keys[i].clone(), values[i].clone()).unwrap() 62 | } 63 | 64 | b.iter(|| { 65 | let key = trie.get(&keys[7777]).unwrap(); 66 | assert_ne!(key, None); 67 | }); 68 | }); 69 | 70 | c.bench_function("remove 1k", |b| { 71 | let mut trie = PatriciaTrie::new( 72 | Arc::new(MemoryDB::new(false)), 73 | Arc::new(HasherKeccak::new()), 74 | ); 75 | 76 | let (keys, values) = random_data(1000); 77 | for i in 0..keys.len() { 78 | trie.insert(keys[i].clone(), values[i].clone()).unwrap() 79 | } 80 | 81 | b.iter(|| { 82 | for key in keys.iter() { 83 | trie.remove(key).unwrap(); 84 | } 85 | }); 86 | }); 87 | 88 | c.bench_function("remove 10k", |b| { 89 | let mut trie = PatriciaTrie::new( 90 | Arc::new(MemoryDB::new(false)), 91 | Arc::new(HasherKeccak::new()), 92 | ); 93 | 94 | let (keys, values) = random_data(10000); 95 | for i in 0..keys.len() { 96 | trie.insert(keys[i].clone(), values[i].clone()).unwrap() 97 | } 98 | 99 | b.iter(|| { 100 | for key in keys.iter() { 101 | trie.remove(key).unwrap(); 102 | } 103 | }); 104 | }); 105 | } 106 | 107 | fn random_data(n: usize) -> (Vec>, Vec>) { 108 | let mut keys = Vec::with_capacity(n); 109 | let mut values = Vec::with_capacity(n); 110 | for _ in 0..n { 111 | let key = Uuid::new_v4().as_bytes().to_vec(); 112 | let value = Uuid::new_v4().as_bytes().to_vec(); 113 | keys.push(key); 114 | values.push(value); 115 | } 116 | 117 | (keys, values) 118 | } 119 | 120 | criterion_group!(benches, insert_worse_case_benchmark); 121 | criterion_main!(benches); 122 | -------------------------------------------------------------------------------- /src/db.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use std::io::Error; 3 | use std::sync::Arc; 4 | 5 | use parking_lot::RwLock; 6 | 7 | /// "DB" defines the "trait" of trie and database interaction. 8 | /// You should first write the data to the cache and write the data 9 | /// to the database in bulk after the end of a set of operations. 10 | pub trait DB { 11 | fn get(&self, key: &[u8]) -> Result>, Error>; 12 | 13 | fn contains(&self, key: &[u8]) -> Result; 14 | 15 | /// Insert data into the cache. 16 | fn insert(&self, key: Vec, value: Vec) -> Result<(), Error>; 17 | 18 | /// Insert data into the cache. 19 | fn remove(&self, key: &[u8]) -> Result<(), Error>; 20 | 21 | /// Insert a batch of data into the cache. 22 | fn insert_batch(&self, keys: Vec>, values: Vec>) -> Result<(), Error> { 23 | for i in 0..keys.len() { 24 | let key = keys[i].clone(); 25 | let value = values[i].clone(); 26 | self.insert(key, value)?; 27 | } 28 | Ok(()) 29 | } 30 | 31 | /// Remove a batch of data into the cache. 32 | fn remove_batch(&self, keys: &[Vec]) -> Result<(), Error> { 33 | for key in keys { 34 | self.remove(key)?; 35 | } 36 | Ok(()) 37 | } 38 | 39 | /// Flush data to the DB from the cache. 40 | fn flush(&self) -> Result<(), Error>; 41 | 42 | #[cfg(test)] 43 | fn len(&self) -> Result; 44 | #[cfg(test)] 45 | fn is_empty(&self) -> Result; 46 | } 47 | 48 | // cross DB is raw DB 49 | pub trait CDB: DB + Sync + Send {} 50 | 51 | #[derive(Default, Clone, Debug)] 52 | pub struct MemoryDB { 53 | // If "light" is true, the data is deleted from the database at the time of submission. 54 | light: bool, 55 | storage: Arc, Vec>>>, 56 | } 57 | 58 | impl MemoryDB { 59 | pub fn new(light: bool) -> Self { 60 | MemoryDB { 61 | light, 62 | storage: Arc::new(RwLock::new(HashMap::new())), 63 | } 64 | } 65 | } 66 | 67 | impl DB for MemoryDB { 68 | fn get(&self, key: &[u8]) -> Result>, Error> { 69 | if let Some(value) = self.storage.read().get(key) { 70 | Ok(Some(value.clone())) 71 | } else { 72 | Ok(None) 73 | } 74 | } 75 | 76 | fn insert(&self, key: Vec, value: Vec) -> Result<(), Error> { 77 | self.storage.write().insert(key, value); 78 | Ok(()) 79 | } 80 | 81 | fn contains(&self, key: &[u8]) -> Result { 82 | Ok(self.storage.read().contains_key(key)) 83 | } 84 | 85 | fn remove(&self, key: &[u8]) -> Result<(), Error> { 86 | if self.light { 87 | self.storage.write().remove(key); 88 | } 89 | Ok(()) 90 | } 91 | 92 | fn flush(&self) -> Result<(), Error> { 93 | Ok(()) 94 | } 95 | 96 | #[cfg(test)] 97 | fn len(&self) -> Result { 98 | Ok(self.storage.try_read().unwrap().len()) 99 | } 100 | #[cfg(test)] 101 | fn is_empty(&self) -> Result { 102 | Ok(self.storage.try_read().unwrap().is_empty()) 103 | } 104 | } 105 | 106 | impl CDB for MemoryDB {} 107 | 108 | #[cfg(test)] 109 | mod tests { 110 | use super::*; 111 | 112 | #[test] 113 | fn test_memdb_get() { 114 | let memdb = MemoryDB::new(true); 115 | memdb 116 | .insert(b"test-key".to_vec(), b"test-value".to_vec()) 117 | .unwrap(); 118 | let v = memdb.get(b"test-key").unwrap().unwrap(); 119 | 120 | assert_eq!(v, b"test-value") 121 | } 122 | 123 | #[test] 124 | fn test_memdb_contains() { 125 | let memdb = MemoryDB::new(true); 126 | memdb.insert(b"test".to_vec(), b"test".to_vec()).unwrap(); 127 | 128 | let contains = memdb.contains(b"test").unwrap(); 129 | assert!(contains) 130 | } 131 | 132 | #[test] 133 | fn test_memdb_remove() { 134 | let memdb = MemoryDB::new(true); 135 | memdb.insert(b"test".to_vec(), b"test".to_vec()).unwrap(); 136 | 137 | memdb.remove(b"test").unwrap(); 138 | let contains = memdb.contains(b"test").unwrap(); 139 | assert!(!contains) 140 | } 141 | } 142 | -------------------------------------------------------------------------------- /src/errors.rs: -------------------------------------------------------------------------------- 1 | use std::error::Error; 2 | use std::fmt; 3 | 4 | use rlp::DecoderError; 5 | 6 | #[derive(Debug)] 7 | pub enum TrieError { 8 | DB(String), 9 | Decoder(DecoderError), 10 | InvalidData, 11 | InvalidStateRoot, 12 | InvalidProof, 13 | } 14 | 15 | impl Error for TrieError {} 16 | 17 | impl fmt::Display for TrieError { 18 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 19 | let printable = match *self { 20 | TrieError::DB(ref err) => format!("trie error: {:?}", err), 21 | TrieError::Decoder(ref err) => format!("trie error: {:?}", err), 22 | TrieError::InvalidData => "trie error: invali data".to_owned(), 23 | TrieError::InvalidStateRoot => "trie error: invali state root".to_owned(), 24 | TrieError::InvalidProof => "trie error: invali proof".to_owned(), 25 | }; 26 | write!(f, "{}", printable) 27 | } 28 | } 29 | 30 | impl From for TrieError { 31 | fn from(error: DecoderError) -> Self { 32 | TrieError::Decoder(error) 33 | } 34 | } 35 | 36 | #[derive(Debug)] 37 | pub enum MemDBError {} 38 | 39 | impl Error for MemDBError {} 40 | 41 | impl fmt::Display for MemDBError { 42 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 43 | write!(f, "error") 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | #![allow(clippy::needless_doctest_main)] 2 | //! ## Usage 3 | //! 4 | //! ```rust 5 | //! use std::sync::Arc; 6 | //! 7 | //! use hasher::{Hasher, HasherKeccak}; // https://crates.io/crates/hasher 8 | //! 9 | //! use cita_trie::MemoryDB; 10 | //! use cita_trie::{PatriciaTrie, Trie}; 11 | 12 | //! fn main() { 13 | //! let memdb = Arc::new(MemoryDB::new(true)); 14 | //! let hasher = Arc::new(HasherKeccak::new()); 15 | //! 16 | //! let key = "test-key".as_bytes(); 17 | //! let value = "test-value".as_bytes(); 18 | //! 19 | //! let root = { 20 | //! let mut trie = PatriciaTrie::new(Arc::clone(&memdb), Arc::clone(&hasher)); 21 | //! trie.insert(key.to_vec(), value.to_vec()).unwrap(); 22 | //! 23 | //! let v = trie.get(key).unwrap(); 24 | //! assert_eq!(Some(value.to_vec()), v); 25 | //! trie.root().unwrap() 26 | //! }; 27 | //! 28 | //! let mut trie = PatriciaTrie::from(Arc::clone(&memdb), Arc::clone(&hasher), &root).unwrap(); 29 | //! let exists = trie.contains(key).unwrap(); 30 | //! assert_eq!(exists, true); 31 | //! let removed = trie.remove(key).unwrap(); 32 | //! assert_eq!(removed, true); 33 | //! let new_root = trie.root().unwrap(); 34 | //! println!("new root = {:?}", new_root); 35 | //! 36 | //! } 37 | //! ``` 38 | 39 | mod nibbles; 40 | mod node; 41 | mod tests; 42 | 43 | mod db; 44 | mod errors; 45 | mod trie; 46 | 47 | pub use db::{MemoryDB, CDB, DB}; 48 | pub use errors::{MemDBError, TrieError}; 49 | pub use hasher::Hasher; 50 | pub use trie::{PatriciaTrie, Trie}; 51 | pub use verify::verify_proof; 52 | 53 | mod verify { 54 | use std::sync::Arc; 55 | 56 | use hasher::Hasher; 57 | 58 | use crate::{trie::TrieResult, MemoryDB, PatriciaTrie, Trie, TrieError, DB}; 59 | 60 | pub fn verify_proof( 61 | root_hash: &[u8], 62 | key: &[u8], 63 | proof: Vec>, 64 | hasher: H, 65 | ) -> TrieResult>> { 66 | let memdb = Arc::new(MemoryDB::new(true)); 67 | for node_encoded in proof.into_iter() { 68 | let hash = hasher.digest(&node_encoded); 69 | 70 | if root_hash.eq(&hash) || node_encoded.len() >= H::LENGTH { 71 | memdb.insert(hash, node_encoded).unwrap(); 72 | } 73 | } 74 | 75 | PatriciaTrie::from(memdb, Arc::new(hasher), root_hash) 76 | .or(Err(TrieError::InvalidProof))? 77 | .get(key) 78 | .or(Err(TrieError::InvalidProof)) 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /src/nibbles.rs: -------------------------------------------------------------------------------- 1 | use std::cmp::min; 2 | 3 | #[derive(Debug, Clone, Eq, PartialEq)] 4 | pub struct Nibbles { 5 | hex_data: Vec, 6 | } 7 | 8 | impl Nibbles { 9 | pub fn from_hex(hex: Vec) -> Self { 10 | Nibbles { hex_data: hex } 11 | } 12 | 13 | pub fn from_raw(raw: Vec, is_leaf: bool) -> Self { 14 | let mut hex_data = vec![]; 15 | for item in raw.into_iter() { 16 | hex_data.push(item / 16); 17 | hex_data.push(item % 16); 18 | } 19 | if is_leaf { 20 | hex_data.push(16); 21 | } 22 | Nibbles { hex_data } 23 | } 24 | 25 | pub fn from_compact(compact: Vec) -> Self { 26 | let mut hex = vec![]; 27 | let flag = compact[0]; 28 | 29 | let mut is_leaf = false; 30 | match flag >> 4 { 31 | 0x0 => {} 32 | 0x1 => hex.push(flag % 16), 33 | 0x2 => is_leaf = true, 34 | 0x3 => { 35 | is_leaf = true; 36 | hex.push(flag % 16); 37 | } 38 | _ => panic!("invalid data"), 39 | } 40 | 41 | for item in &compact[1..] { 42 | hex.push(item / 16); 43 | hex.push(item % 16); 44 | } 45 | if is_leaf { 46 | hex.push(16); 47 | } 48 | 49 | Nibbles { hex_data: hex } 50 | } 51 | 52 | pub fn is_leaf(&self) -> bool { 53 | self.hex_data[self.hex_data.len() - 1] == 16 54 | } 55 | 56 | pub fn encode_compact(&self) -> Vec { 57 | let mut compact = vec![]; 58 | let is_leaf = self.is_leaf(); 59 | let mut hex = if is_leaf { 60 | &self.hex_data[0..self.hex_data.len() - 1] 61 | } else { 62 | &self.hex_data[0..] 63 | }; 64 | // node type path length | prefix hexchar 65 | // -------------------------------------------------- 66 | // extension even | 0000 0x0 67 | // extension odd | 0001 0x1 68 | // leaf even | 0010 0x2 69 | // leaf odd | 0011 0x3 70 | let v = if hex.len() % 2 == 1 { 71 | let v = 0x10 + hex[0]; 72 | hex = &hex[1..]; 73 | v 74 | } else { 75 | 0x00 76 | }; 77 | 78 | compact.push(v + if is_leaf { 0x20 } else { 0x00 }); 79 | for i in 0..(hex.len() / 2) { 80 | compact.push((hex[i * 2] * 16) + (hex[i * 2 + 1])); 81 | } 82 | 83 | compact 84 | } 85 | 86 | pub fn encode_raw(&self) -> (Vec, bool) { 87 | let mut raw = vec![]; 88 | let is_leaf = self.is_leaf(); 89 | let hex = if is_leaf { 90 | &self.hex_data[0..self.hex_data.len() - 1] 91 | } else { 92 | &self.hex_data[0..] 93 | }; 94 | 95 | for i in 0..(hex.len() / 2) { 96 | raw.push((hex[i * 2] * 16) + (hex[i * 2 + 1])); 97 | } 98 | 99 | (raw, is_leaf) 100 | } 101 | 102 | pub fn len(&self) -> usize { 103 | self.hex_data.len() 104 | } 105 | 106 | pub fn is_empty(&self) -> bool { 107 | self.len() == 0 108 | } 109 | 110 | pub fn at(&self, i: usize) -> usize { 111 | self.hex_data[i] as usize 112 | } 113 | 114 | pub fn common_prefix(&self, other_partial: &Nibbles) -> usize { 115 | let s = min(self.len(), other_partial.len()); 116 | let mut i = 0usize; 117 | while i < s { 118 | if self.at(i) != other_partial.at(i) { 119 | break; 120 | } 121 | i += 1; 122 | } 123 | i 124 | } 125 | 126 | pub fn offset(&self, index: usize) -> Nibbles { 127 | self.slice(index, self.hex_data.len()) 128 | } 129 | 130 | pub fn slice(&self, start: usize, end: usize) -> Nibbles { 131 | Nibbles::from_hex(self.hex_data[start..end].to_vec()) 132 | } 133 | 134 | pub fn get_data(&self) -> &[u8] { 135 | &self.hex_data 136 | } 137 | 138 | pub fn join(&self, b: &Nibbles) -> Nibbles { 139 | let mut hex_data = vec![]; 140 | hex_data.extend_from_slice(self.get_data()); 141 | hex_data.extend_from_slice(b.get_data()); 142 | Nibbles::from_hex(hex_data) 143 | } 144 | 145 | pub fn extend(&mut self, b: &Nibbles) { 146 | self.hex_data.extend_from_slice(b.get_data()); 147 | } 148 | 149 | pub fn truncate(&mut self, len: usize) { 150 | self.hex_data.truncate(len) 151 | } 152 | 153 | pub fn pop(&mut self) -> Option { 154 | self.hex_data.pop() 155 | } 156 | 157 | pub fn push(&mut self, e: u8) { 158 | self.hex_data.push(e) 159 | } 160 | } 161 | 162 | #[cfg(test)] 163 | mod tests { 164 | use super::*; 165 | 166 | #[test] 167 | fn test_nibble() { 168 | let n = Nibbles::from_raw(b"key1".to_vec(), true); 169 | let compact = n.encode_compact(); 170 | let n2 = Nibbles::from_compact(compact); 171 | let (raw, is_leaf) = n2.encode_raw(); 172 | assert!(is_leaf); 173 | assert_eq!(raw, b"key1"); 174 | } 175 | } 176 | -------------------------------------------------------------------------------- /src/node.rs: -------------------------------------------------------------------------------- 1 | use std::cell::RefCell; 2 | use std::rc::Rc; 3 | 4 | use crate::nibbles::Nibbles; 5 | 6 | #[derive(Debug, Clone)] 7 | pub enum Node { 8 | Empty, 9 | Leaf(Rc>), 10 | Extension(Rc>), 11 | Branch(Rc>), 12 | Hash(Rc>), 13 | } 14 | 15 | impl Node { 16 | pub fn from_leaf(key: Nibbles, value: Vec) -> Self { 17 | let leaf = Rc::new(RefCell::new(LeafNode { key, value })); 18 | Node::Leaf(leaf) 19 | } 20 | 21 | pub fn from_branch(children: [Node; 16], value: Option>) -> Self { 22 | let branch = Rc::new(RefCell::new(BranchNode { children, value })); 23 | Node::Branch(branch) 24 | } 25 | 26 | pub fn from_extension(prefix: Nibbles, node: Node) -> Self { 27 | let ext = Rc::new(RefCell::new(ExtensionNode { prefix, node })); 28 | Node::Extension(ext) 29 | } 30 | 31 | pub fn from_hash(hash: Vec) -> Self { 32 | let hash_node = Rc::new(RefCell::new(HashNode { hash })); 33 | Node::Hash(hash_node) 34 | } 35 | } 36 | 37 | #[derive(Debug)] 38 | pub struct LeafNode { 39 | pub key: Nibbles, 40 | pub value: Vec, 41 | } 42 | 43 | #[derive(Debug)] 44 | pub struct BranchNode { 45 | pub children: [Node; 16], 46 | pub value: Option>, 47 | } 48 | 49 | impl BranchNode { 50 | pub fn insert(&mut self, i: usize, n: Node) { 51 | if i == 16 { 52 | match n { 53 | Node::Leaf(leaf) => { 54 | self.value = Some(leaf.borrow().value.clone()); 55 | } 56 | _ => panic!("The n must be leaf node"), 57 | } 58 | } else { 59 | self.children[i] = n 60 | } 61 | } 62 | } 63 | 64 | #[derive(Debug)] 65 | pub struct ExtensionNode { 66 | pub prefix: Nibbles, 67 | pub node: Node, 68 | } 69 | 70 | #[derive(Debug)] 71 | pub struct HashNode { 72 | pub hash: Vec, 73 | } 74 | 75 | pub fn empty_children() -> [Node; 16] { 76 | [ 77 | Node::Empty, 78 | Node::Empty, 79 | Node::Empty, 80 | Node::Empty, 81 | Node::Empty, 82 | Node::Empty, 83 | Node::Empty, 84 | Node::Empty, 85 | Node::Empty, 86 | Node::Empty, 87 | Node::Empty, 88 | Node::Empty, 89 | Node::Empty, 90 | Node::Empty, 91 | Node::Empty, 92 | Node::Empty, 93 | ] 94 | } 95 | -------------------------------------------------------------------------------- /src/tests/mod.rs: -------------------------------------------------------------------------------- 1 | #[cfg(test)] 2 | mod trie_tests { 3 | use hex::FromHex; 4 | use rand::Rng; 5 | use std::sync::Arc; 6 | 7 | use hasher::HasherKeccak; 8 | 9 | use crate::db::MemoryDB; 10 | use crate::trie::{PatriciaTrie, Trie}; 11 | use crate::verify_proof; 12 | 13 | fn assert_root(data: Vec<(&[u8], &[u8])>, hash: &str) { 14 | let memdb = Arc::new(MemoryDB::new(true)); 15 | let mut trie = PatriciaTrie::new(Arc::clone(&memdb), Arc::new(HasherKeccak::new())); 16 | for (k, v) in data.into_iter() { 17 | trie.insert(k.to_vec(), v.to_vec()).unwrap(); 18 | } 19 | let r = trie.root().unwrap(); 20 | let rs = format!("0x{}", hex::encode(r.clone())); 21 | assert_eq!(rs.as_str(), hash); 22 | let mut trie = 23 | PatriciaTrie::from(Arc::clone(&memdb), Arc::new(HasherKeccak::new()), &r).unwrap(); 24 | let r2 = trie.root().unwrap(); 25 | let rs2 = format!("0x{}", hex::encode(r2)); 26 | assert_eq!(rs2.as_str(), hash); 27 | } 28 | 29 | #[test] 30 | fn test_root() { 31 | // See: https://github.com/ethereum/tests/blob/develop/TrieTests 32 | // Copy from trietest.json and trieanyorder.json 33 | assert_root( 34 | vec![(b"A", b"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")], 35 | "0xd23786fb4a010da3ce639d66d5e904a11dbc02746d1ce25029e53290cabf28ab", 36 | ); 37 | assert_root( 38 | vec![ 39 | (b"doe", b"reindeer"), 40 | (b"dog", b"puppy"), 41 | (b"dogglesworth", b"cat"), 42 | ], 43 | "0x8aad789dff2f538bca5d8ea56e8abe10f4c7ba3a5dea95fea4cd6e7c3a1168d3", 44 | ); 45 | assert_root( 46 | vec![ 47 | (b"do", b"verb"), 48 | (b"horse", b"stallion"), 49 | (b"doge", b"coin"), 50 | (b"dog", b"puppy"), 51 | ], 52 | "0x5991bb8c6514148a29db676a14ac506cd2cd5775ace63c30a4fe457715e9ac84", 53 | ); 54 | assert_root( 55 | vec![(b"foo", b"bar"), (b"food", b"bass")], 56 | "0x17beaa1648bafa633cda809c90c04af50fc8aed3cb40d16efbddee6fdf63c4c3", 57 | ); 58 | 59 | assert_root( 60 | vec![(b"be", b"e"), (b"dog", b"puppy"), (b"bed", b"d")], 61 | "0x3f67c7a47520f79faa29255d2d3c084a7a6df0453116ed7232ff10277a8be68b", 62 | ); 63 | assert_root( 64 | vec![(b"test", b"test"), (b"te", b"testy")], 65 | "0x8452568af70d8d140f58d941338542f645fcca50094b20f3c3d8c3df49337928", 66 | ); 67 | assert_root( 68 | vec![ 69 | ( 70 | Vec::from_hex("0045").unwrap().as_slice(), 71 | Vec::from_hex("0123456789").unwrap().as_slice(), 72 | ), 73 | ( 74 | Vec::from_hex("4500").unwrap().as_slice(), 75 | Vec::from_hex("9876543210").unwrap().as_slice(), 76 | ), 77 | ], 78 | "0x285505fcabe84badc8aa310e2aae17eddc7d120aabec8a476902c8184b3a3503", 79 | ); 80 | assert_root( 81 | vec![ 82 | (b"do", b"verb"), 83 | (b"ether", b"wookiedoo"), 84 | (b"horse", b"stallion"), 85 | (b"shaman", b"horse"), 86 | (b"doge", b"coin"), 87 | (b"ether", b""), 88 | (b"dog", b"puppy"), 89 | (b"shaman", b""), 90 | ], 91 | "0x5991bb8c6514148a29db676a14ac506cd2cd5775ace63c30a4fe457715e9ac84", 92 | ); 93 | assert_root( 94 | vec![ 95 | (b"do", b"verb"), 96 | (b"ether", b"wookiedoo"), 97 | (b"horse", b"stallion"), 98 | (b"shaman", b"horse"), 99 | (b"doge", b"coin"), 100 | (b"ether", b""), 101 | (b"dog", b"puppy"), 102 | (b"shaman", b""), 103 | ], 104 | "0x5991bb8c6514148a29db676a14ac506cd2cd5775ace63c30a4fe457715e9ac84", 105 | ); 106 | assert_root( 107 | vec![ 108 | ( 109 | Vec::from_hex("04110d816c380812a427968ece99b1c963dfbce6") 110 | .unwrap() 111 | .as_slice(), 112 | b"something", 113 | ), 114 | ( 115 | Vec::from_hex("095e7baea6a6c7c4c2dfeb977efac326af552d87") 116 | .unwrap() 117 | .as_slice(), 118 | b"something", 119 | ), 120 | ( 121 | Vec::from_hex("0a517d755cebbf66312b30fff713666a9cb917e0") 122 | .unwrap() 123 | .as_slice(), 124 | b"something", 125 | ), 126 | ( 127 | Vec::from_hex("24dd378f51adc67a50e339e8031fe9bd4aafab36") 128 | .unwrap() 129 | .as_slice(), 130 | b"something", 131 | ), 132 | ( 133 | Vec::from_hex("293f982d000532a7861ab122bdc4bbfd26bf9030") 134 | .unwrap() 135 | .as_slice(), 136 | b"something", 137 | ), 138 | ( 139 | Vec::from_hex("2cf5732f017b0cf1b1f13a1478e10239716bf6b5") 140 | .unwrap() 141 | .as_slice(), 142 | b"something", 143 | ), 144 | ( 145 | Vec::from_hex("31c640b92c21a1f1465c91070b4b3b4d6854195f") 146 | .unwrap() 147 | .as_slice(), 148 | b"something", 149 | ), 150 | ( 151 | Vec::from_hex("37f998764813b136ddf5a754f34063fd03065e36") 152 | .unwrap() 153 | .as_slice(), 154 | b"something", 155 | ), 156 | ( 157 | Vec::from_hex("37fa399a749c121f8a15ce77e3d9f9bec8020d7a") 158 | .unwrap() 159 | .as_slice(), 160 | b"something", 161 | ), 162 | ( 163 | Vec::from_hex("4f36659fa632310b6ec438dea4085b522a2dd077") 164 | .unwrap() 165 | .as_slice(), 166 | b"something", 167 | ), 168 | ( 169 | Vec::from_hex("62c01474f089b07dae603491675dc5b5748f7049") 170 | .unwrap() 171 | .as_slice(), 172 | b"something", 173 | ), 174 | ( 175 | Vec::from_hex("729af7294be595a0efd7d891c9e51f89c07950c7") 176 | .unwrap() 177 | .as_slice(), 178 | b"something", 179 | ), 180 | ( 181 | Vec::from_hex("83e3e5a16d3b696a0314b30b2534804dd5e11197") 182 | .unwrap() 183 | .as_slice(), 184 | b"something", 185 | ), 186 | ( 187 | Vec::from_hex("8703df2417e0d7c59d063caa9583cb10a4d20532") 188 | .unwrap() 189 | .as_slice(), 190 | b"something", 191 | ), 192 | ( 193 | Vec::from_hex("8dffcd74e5b5923512916c6a64b502689cfa65e1") 194 | .unwrap() 195 | .as_slice(), 196 | b"something", 197 | ), 198 | ( 199 | Vec::from_hex("95a4d7cccb5204733874fa87285a176fe1e9e240") 200 | .unwrap() 201 | .as_slice(), 202 | b"something", 203 | ), 204 | ( 205 | Vec::from_hex("99b2fcba8120bedd048fe79f5262a6690ed38c39") 206 | .unwrap() 207 | .as_slice(), 208 | b"something", 209 | ), 210 | ( 211 | Vec::from_hex("a4202b8b8afd5354e3e40a219bdc17f6001bf2cf") 212 | .unwrap() 213 | .as_slice(), 214 | b"something", 215 | ), 216 | ( 217 | Vec::from_hex("a94f5374fce5edbc8e2a8697c15331677e6ebf0b") 218 | .unwrap() 219 | .as_slice(), 220 | b"something", 221 | ), 222 | ( 223 | Vec::from_hex("a9647f4a0a14042d91dc33c0328030a7157c93ae") 224 | .unwrap() 225 | .as_slice(), 226 | b"something", 227 | ), 228 | ( 229 | Vec::from_hex("aa6cffe5185732689c18f37a7f86170cb7304c2a") 230 | .unwrap() 231 | .as_slice(), 232 | b"something", 233 | ), 234 | ( 235 | Vec::from_hex("aae4a2e3c51c04606dcb3723456e58f3ed214f45") 236 | .unwrap() 237 | .as_slice(), 238 | b"something", 239 | ), 240 | ( 241 | Vec::from_hex("c37a43e940dfb5baf581a0b82b351d48305fc885") 242 | .unwrap() 243 | .as_slice(), 244 | b"something", 245 | ), 246 | ( 247 | Vec::from_hex("d2571607e241ecf590ed94b12d87c94babe36db6") 248 | .unwrap() 249 | .as_slice(), 250 | b"something", 251 | ), 252 | ( 253 | Vec::from_hex("f735071cbee190d76b704ce68384fc21e389fbe7") 254 | .unwrap() 255 | .as_slice(), 256 | b"something", 257 | ), 258 | ( 259 | Vec::from_hex("04110d816c380812a427968ece99b1c963dfbce6") 260 | .unwrap() 261 | .as_slice(), 262 | b"", 263 | ), 264 | ( 265 | Vec::from_hex("095e7baea6a6c7c4c2dfeb977efac326af552d87") 266 | .unwrap() 267 | .as_slice(), 268 | b"", 269 | ), 270 | ( 271 | Vec::from_hex("0a517d755cebbf66312b30fff713666a9cb917e0") 272 | .unwrap() 273 | .as_slice(), 274 | b"", 275 | ), 276 | ( 277 | Vec::from_hex("24dd378f51adc67a50e339e8031fe9bd4aafab36") 278 | .unwrap() 279 | .as_slice(), 280 | b"", 281 | ), 282 | ( 283 | Vec::from_hex("293f982d000532a7861ab122bdc4bbfd26bf9030") 284 | .unwrap() 285 | .as_slice(), 286 | b"", 287 | ), 288 | ( 289 | Vec::from_hex("2cf5732f017b0cf1b1f13a1478e10239716bf6b5") 290 | .unwrap() 291 | .as_slice(), 292 | b"", 293 | ), 294 | ( 295 | Vec::from_hex("31c640b92c21a1f1465c91070b4b3b4d6854195f") 296 | .unwrap() 297 | .as_slice(), 298 | b"", 299 | ), 300 | ( 301 | Vec::from_hex("37f998764813b136ddf5a754f34063fd03065e36") 302 | .unwrap() 303 | .as_slice(), 304 | b"", 305 | ), 306 | ( 307 | Vec::from_hex("37fa399a749c121f8a15ce77e3d9f9bec8020d7a") 308 | .unwrap() 309 | .as_slice(), 310 | b"", 311 | ), 312 | ( 313 | Vec::from_hex("4f36659fa632310b6ec438dea4085b522a2dd077") 314 | .unwrap() 315 | .as_slice(), 316 | b"", 317 | ), 318 | ( 319 | Vec::from_hex("62c01474f089b07dae603491675dc5b5748f7049") 320 | .unwrap() 321 | .as_slice(), 322 | b"", 323 | ), 324 | ( 325 | Vec::from_hex("729af7294be595a0efd7d891c9e51f89c07950c7") 326 | .unwrap() 327 | .as_slice(), 328 | b"", 329 | ), 330 | ( 331 | Vec::from_hex("83e3e5a16d3b696a0314b30b2534804dd5e11197") 332 | .unwrap() 333 | .as_slice(), 334 | b"", 335 | ), 336 | ( 337 | Vec::from_hex("8703df2417e0d7c59d063caa9583cb10a4d20532") 338 | .unwrap() 339 | .as_slice(), 340 | b"", 341 | ), 342 | ( 343 | Vec::from_hex("8dffcd74e5b5923512916c6a64b502689cfa65e1") 344 | .unwrap() 345 | .as_slice(), 346 | b"", 347 | ), 348 | ( 349 | Vec::from_hex("95a4d7cccb5204733874fa87285a176fe1e9e240") 350 | .unwrap() 351 | .as_slice(), 352 | b"", 353 | ), 354 | ( 355 | Vec::from_hex("99b2fcba8120bedd048fe79f5262a6690ed38c39") 356 | .unwrap() 357 | .as_slice(), 358 | b"", 359 | ), 360 | ( 361 | Vec::from_hex("a4202b8b8afd5354e3e40a219bdc17f6001bf2cf") 362 | .unwrap() 363 | .as_slice(), 364 | b"", 365 | ), 366 | ( 367 | Vec::from_hex("a94f5374fce5edbc8e2a8697c15331677e6ebf0b") 368 | .unwrap() 369 | .as_slice(), 370 | b"", 371 | ), 372 | ( 373 | Vec::from_hex("a9647f4a0a14042d91dc33c0328030a7157c93ae") 374 | .unwrap() 375 | .as_slice(), 376 | b"", 377 | ), 378 | ( 379 | Vec::from_hex("aa6cffe5185732689c18f37a7f86170cb7304c2a") 380 | .unwrap() 381 | .as_slice(), 382 | b"", 383 | ), 384 | ( 385 | Vec::from_hex("aae4a2e3c51c04606dcb3723456e58f3ed214f45") 386 | .unwrap() 387 | .as_slice(), 388 | b"", 389 | ), 390 | ( 391 | Vec::from_hex("c37a43e940dfb5baf581a0b82b351d48305fc885") 392 | .unwrap() 393 | .as_slice(), 394 | b"", 395 | ), 396 | ( 397 | Vec::from_hex("d2571607e241ecf590ed94b12d87c94babe36db6") 398 | .unwrap() 399 | .as_slice(), 400 | b"", 401 | ), 402 | ( 403 | Vec::from_hex("f735071cbee190d76b704ce68384fc21e389fbe7") 404 | .unwrap() 405 | .as_slice(), 406 | b"", 407 | ), 408 | ], 409 | "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", 410 | ); 411 | assert_root( 412 | vec![ 413 | ( 414 | Vec::from_hex( 415 | "0000000000000000000000000000000000000000000000000000000000000045", 416 | ) 417 | .unwrap() 418 | .as_slice(), 419 | Vec::from_hex("22b224a1420a802ab51d326e29fa98e34c4f24ea") 420 | .unwrap() 421 | .as_slice(), 422 | ), 423 | ( 424 | Vec::from_hex( 425 | "0000000000000000000000000000000000000000000000000000000000000046", 426 | ) 427 | .unwrap() 428 | .as_slice(), 429 | Vec::from_hex( 430 | "67706c2076330000000000000000000000000000000000000000000000000000", 431 | ) 432 | .unwrap() 433 | .as_slice(), 434 | ), 435 | ( 436 | Vec::from_hex( 437 | "0000000000000000000000000000000000000000000000000000001234567890", 438 | ) 439 | .unwrap() 440 | .as_slice(), 441 | Vec::from_hex("697c7b8c961b56f675d570498424ac8de1a918f6") 442 | .unwrap() 443 | .as_slice(), 444 | ), 445 | ( 446 | Vec::from_hex( 447 | "000000000000000000000000697c7b8c961b56f675d570498424ac8de1a918f6", 448 | ) 449 | .unwrap() 450 | .as_slice(), 451 | Vec::from_hex("1234567890").unwrap().as_slice(), 452 | ), 453 | ( 454 | Vec::from_hex( 455 | "0000000000000000000000007ef9e639e2733cb34e4dfc576d4b23f72db776b2", 456 | ) 457 | .unwrap() 458 | .as_slice(), 459 | Vec::from_hex( 460 | "4655474156000000000000000000000000000000000000000000000000000000", 461 | ) 462 | .unwrap() 463 | .as_slice(), 464 | ), 465 | ( 466 | Vec::from_hex( 467 | "000000000000000000000000ec4f34c97e43fbb2816cfd95e388353c7181dab1", 468 | ) 469 | .unwrap() 470 | .as_slice(), 471 | Vec::from_hex( 472 | "4e616d6552656700000000000000000000000000000000000000000000000000", 473 | ) 474 | .unwrap() 475 | .as_slice(), 476 | ), 477 | ( 478 | Vec::from_hex( 479 | "4655474156000000000000000000000000000000000000000000000000000000", 480 | ) 481 | .unwrap() 482 | .as_slice(), 483 | Vec::from_hex("7ef9e639e2733cb34e4dfc576d4b23f72db776b2") 484 | .unwrap() 485 | .as_slice(), 486 | ), 487 | ( 488 | Vec::from_hex( 489 | "4e616d6552656700000000000000000000000000000000000000000000000000", 490 | ) 491 | .unwrap() 492 | .as_slice(), 493 | Vec::from_hex("ec4f34c97e43fbb2816cfd95e388353c7181dab1") 494 | .unwrap() 495 | .as_slice(), 496 | ), 497 | ( 498 | Vec::from_hex( 499 | "0000000000000000000000000000000000000000000000000000001234567890", 500 | ) 501 | .unwrap() 502 | .as_slice(), 503 | Vec::from_hex("").unwrap().as_slice(), 504 | ), 505 | ( 506 | Vec::from_hex( 507 | "000000000000000000000000697c7b8c961b56f675d570498424ac8de1a918f6", 508 | ) 509 | .unwrap() 510 | .as_slice(), 511 | Vec::from_hex( 512 | "6f6f6f6820736f2067726561742c207265616c6c6c793f000000000000000000", 513 | ) 514 | .unwrap() 515 | .as_slice(), 516 | ), 517 | ( 518 | Vec::from_hex( 519 | "6f6f6f6820736f2067726561742c207265616c6c6c793f000000000000000000", 520 | ) 521 | .unwrap() 522 | .as_slice(), 523 | Vec::from_hex("697c7b8c961b56f675d570498424ac8de1a918f6") 524 | .unwrap() 525 | .as_slice(), 526 | ), 527 | ], 528 | "0x9f6221ebb8efe7cff60a716ecb886e67dd042014be444669f0159d8e68b42100", 529 | ); 530 | assert_root( 531 | vec![ 532 | (b"key1aa", b"0123456789012345678901234567890123456789xxx"), 533 | ( 534 | b"key1", 535 | b"0123456789012345678901234567890123456789Very_Long", 536 | ), 537 | (b"key2bb", b"aval3"), 538 | (b"key2", b"short"), 539 | (b"key3cc", b"aval3"), 540 | (b"key3", b"1234567890123456789012345678901"), 541 | ], 542 | "0xcb65032e2f76c48b82b5c24b3db8f670ce73982869d38cd39a624f23d62a9e89", 543 | ); 544 | assert_root( 545 | vec![(b"abc", b"123"), (b"abcd", b"abcd"), (b"abc", b"abc")], 546 | "0x7a320748f780ad9ad5b0837302075ce0eeba6c26e3d8562c67ccc0f1b273298a", 547 | ); 548 | } 549 | 550 | // proof test ref: 551 | // - https://github.com/ethereum/go-ethereum/blob/master/trie/proof_test.go 552 | // - https://github.com/ethereum/py-trie/blob/master/tests/test_proof.py 553 | #[test] 554 | fn test_proof_basic() { 555 | let memdb = Arc::new(MemoryDB::new(true)); 556 | let mut trie = PatriciaTrie::new(Arc::clone(&memdb), Arc::new(HasherKeccak::new())); 557 | trie.insert(b"doe".to_vec(), b"reindeer".to_vec()).unwrap(); 558 | trie.insert(b"dog".to_vec(), b"puppy".to_vec()).unwrap(); 559 | trie.insert(b"dogglesworth".to_vec(), b"cat".to_vec()) 560 | .unwrap(); 561 | let root = trie.root().unwrap(); 562 | let r = format!("0x{}", hex::encode(trie.root().unwrap())); 563 | assert_eq!( 564 | r.as_str(), 565 | "0x8aad789dff2f538bca5d8ea56e8abe10f4c7ba3a5dea95fea4cd6e7c3a1168d3" 566 | ); 567 | 568 | // proof of key exists 569 | let proof = trie.get_proof(b"doe").unwrap(); 570 | let expected = vec![ 571 | "e5831646f6a0db6ae1fda66890f6693f36560d36b4dca68b4d838f17016b151efe1d4c95c453", 572 | "f83b8080808080ca20887265696e6465657280a037efd11993cb04a54048c25320e9f29c50a432d28afdf01598b2978ce1ca3068808080808080808080", 573 | ]; 574 | assert_eq!( 575 | proof 576 | .clone() 577 | .into_iter() 578 | .map(hex::encode) 579 | .collect::>(), 580 | expected 581 | ); 582 | let value = trie.verify_proof(&root, b"doe", proof.clone()).unwrap(); 583 | assert_eq!(value, Some(b"reindeer".to_vec())); 584 | assert_eq!( 585 | verify_proof(&root, b"doe", proof, HasherKeccak::new()).unwrap(), 586 | Some(b"reindeer".to_vec()) 587 | ); 588 | 589 | // proof of key not exist 590 | let proof = trie.get_proof(b"dogg").unwrap(); 591 | let expected = vec![ 592 | "e5831646f6a0db6ae1fda66890f6693f36560d36b4dca68b4d838f17016b151efe1d4c95c453", 593 | "f83b8080808080ca20887265696e6465657280a037efd11993cb04a54048c25320e9f29c50a432d28afdf01598b2978ce1ca3068808080808080808080", 594 | "e4808080808080ce89376c6573776f72746883636174808080808080808080857075707079", 595 | ]; 596 | assert_eq!( 597 | proof 598 | .clone() 599 | .into_iter() 600 | .map(hex::encode) 601 | .collect::>(), 602 | expected 603 | ); 604 | let value = trie.verify_proof(&root, b"dogg", proof.clone()).unwrap(); 605 | assert_eq!(value, None); 606 | assert_eq!( 607 | verify_proof(&root, b"dogg", proof, HasherKeccak::new()).unwrap(), 608 | None 609 | ); 610 | 611 | // empty proof 612 | let proof = vec![]; 613 | let value = trie.verify_proof(&root, b"doe", proof.clone()); 614 | assert!(verify_proof(&root, b"doe", proof, HasherKeccak::new()).is_err()); 615 | assert!(value.is_err()); 616 | 617 | // bad proof 618 | let proof = vec![b"aaa".to_vec(), b"ccc".to_vec()]; 619 | let value = trie.verify_proof(&root, b"doe", proof.clone()); 620 | assert!(value.is_err()); 621 | assert!(verify_proof(&root, b"doe", proof, HasherKeccak::new()).is_err()); 622 | } 623 | 624 | #[test] 625 | fn test_proof_random() { 626 | let memdb = Arc::new(MemoryDB::new(true)); 627 | let mut trie = PatriciaTrie::new(Arc::clone(&memdb), Arc::new(HasherKeccak::new())); 628 | let mut rng = rand::thread_rng(); 629 | let mut keys = vec![]; 630 | for _ in 0..100 { 631 | let random_bytes: Vec = (0..rng.gen_range(2, 30)) 632 | .map(|_| rand::random::()) 633 | .collect(); 634 | trie.insert(random_bytes.to_vec(), random_bytes.clone()) 635 | .unwrap(); 636 | keys.push(random_bytes.clone()); 637 | } 638 | for k in keys.clone().into_iter() { 639 | trie.insert(k.clone(), k.clone()).unwrap(); 640 | } 641 | let root = trie.root().unwrap(); 642 | for k in keys.into_iter() { 643 | let proof = trie.get_proof(&k).unwrap(); 644 | let value = trie 645 | .verify_proof(&root, &k, proof.clone()) 646 | .unwrap() 647 | .unwrap(); 648 | assert_eq!(value, k); 649 | assert_eq!( 650 | verify_proof(&root, &k, proof, HasherKeccak::new()) 651 | .unwrap() 652 | .unwrap(), 653 | k 654 | ); 655 | } 656 | } 657 | 658 | #[test] 659 | fn test_proof_empty_trie() { 660 | let memdb = Arc::new(MemoryDB::new(true)); 661 | let mut trie = PatriciaTrie::new(Arc::clone(&memdb), Arc::new(HasherKeccak::new())); 662 | trie.root().unwrap(); 663 | let proof = trie.get_proof(b"not-exist").unwrap(); 664 | assert_eq!(proof.len(), 0); 665 | } 666 | 667 | #[test] 668 | fn test_proof_one_element() { 669 | let memdb = Arc::new(MemoryDB::new(true)); 670 | let mut trie = PatriciaTrie::new(Arc::clone(&memdb), Arc::new(HasherKeccak::new())); 671 | trie.insert(b"k".to_vec(), b"v".to_vec()).unwrap(); 672 | let root = trie.root().unwrap(); 673 | let proof = trie.get_proof(b"k").unwrap(); 674 | assert_eq!(proof.len(), 1); 675 | let value = trie.verify_proof(&root, b"k", proof.clone()).unwrap(); 676 | assert_eq!(value, Some(b"v".to_vec())); 677 | 678 | // remove key does not affect the verify process 679 | trie.remove(b"k").unwrap(); 680 | let _root = trie.root().unwrap(); 681 | let value = trie.verify_proof(&root, b"k", proof.clone()).unwrap(); 682 | assert_eq!(value, Some(b"v".to_vec())); 683 | assert_eq!( 684 | verify_proof(&root, b"k", proof, HasherKeccak::new()).unwrap(), 685 | Some(b"v".to_vec()) 686 | ); 687 | } 688 | } 689 | -------------------------------------------------------------------------------- /src/trie.rs: -------------------------------------------------------------------------------- 1 | use std::cell::RefCell; 2 | use std::collections::{HashMap, HashSet}; 3 | use std::rc::Rc; 4 | use std::sync::Arc; 5 | 6 | use hasher::Hasher; 7 | use rlp::{Prototype, Rlp, RlpStream}; 8 | 9 | use crate::db::{MemoryDB, DB}; 10 | use crate::errors::TrieError; 11 | use crate::nibbles::Nibbles; 12 | use crate::node::{empty_children, BranchNode, Node}; 13 | 14 | pub type TrieResult = Result; 15 | 16 | pub trait Trie { 17 | /// Returns the value for key stored in the trie. 18 | fn get(&self, key: &[u8]) -> TrieResult>>; 19 | 20 | /// Checks that the key is present in the trie 21 | fn contains(&self, key: &[u8]) -> TrieResult; 22 | 23 | /// Inserts value into trie and modifies it if it exists 24 | fn insert(&mut self, key: Vec, value: Vec) -> TrieResult<()>; 25 | 26 | /// Removes any existing value for key from the trie. 27 | fn remove(&mut self, key: &[u8]) -> TrieResult; 28 | 29 | /// Saves all the nodes in the db, clears the cache data, recalculates the root. 30 | /// Returns the root hash of the trie. 31 | fn root(&mut self) -> TrieResult>; 32 | 33 | /// Prove constructs a merkle proof for key. The result contains all encoded nodes 34 | /// on the path to the value at key. The value itself is also included in the last 35 | /// node and can be retrieved by verifying the proof. 36 | /// 37 | /// If the trie does not contain a value for key, the returned proof contains all 38 | /// nodes of the longest existing prefix of the key (at least the root node), ending 39 | /// with the node that proves the absence of the key. 40 | fn get_proof(&self, key: &[u8]) -> TrieResult>>; 41 | 42 | /// return value if key exists, None if key not exist, Error if proof is wrong 43 | fn verify_proof( 44 | &self, 45 | root_hash: &[u8], 46 | key: &[u8], 47 | proof: Vec>, 48 | ) -> TrieResult>>; 49 | } 50 | 51 | #[derive(Debug)] 52 | pub struct PatriciaTrie 53 | where 54 | D: DB, 55 | H: Hasher, 56 | { 57 | root: Node, 58 | root_hash: Vec, 59 | 60 | db: Arc, 61 | hasher: Arc, 62 | backup_db: Option>, 63 | 64 | cache: RefCell, Vec>>, 65 | passing_keys: RefCell>>, 66 | gen_keys: RefCell>>, 67 | } 68 | 69 | #[derive(Clone, Debug)] 70 | enum TraceStatus { 71 | Start, 72 | Doing, 73 | Child(u8), 74 | End, 75 | } 76 | 77 | #[derive(Clone, Debug)] 78 | struct TraceNode { 79 | node: Node, 80 | status: TraceStatus, 81 | } 82 | 83 | impl TraceNode { 84 | fn advance(&mut self) { 85 | self.status = match &self.status { 86 | TraceStatus::Start => TraceStatus::Doing, 87 | TraceStatus::Doing => match self.node { 88 | Node::Branch(_) => TraceStatus::Child(0), 89 | _ => TraceStatus::End, 90 | }, 91 | TraceStatus::Child(i) if *i < 15 => TraceStatus::Child(i + 1), 92 | _ => TraceStatus::End, 93 | } 94 | } 95 | } 96 | 97 | impl From for TraceNode { 98 | fn from(node: Node) -> TraceNode { 99 | TraceNode { 100 | node, 101 | status: TraceStatus::Start, 102 | } 103 | } 104 | } 105 | 106 | pub struct TrieIterator<'a, D, H> 107 | where 108 | D: DB, 109 | H: Hasher, 110 | { 111 | trie: &'a PatriciaTrie, 112 | nibble: Nibbles, 113 | nodes: Vec, 114 | } 115 | 116 | impl<'a, D, H> Iterator for TrieIterator<'a, D, H> 117 | where 118 | D: DB, 119 | H: Hasher, 120 | { 121 | type Item = (Vec, Vec); 122 | 123 | fn next(&mut self) -> Option { 124 | loop { 125 | let mut now = self.nodes.last().cloned(); 126 | if let Some(ref mut now) = now { 127 | self.nodes.last_mut().unwrap().advance(); 128 | 129 | match (now.status.clone(), &now.node) { 130 | (TraceStatus::End, node) => { 131 | match *node { 132 | Node::Leaf(ref leaf) => { 133 | let cur_len = self.nibble.len(); 134 | self.nibble.truncate(cur_len - leaf.borrow().key.len()); 135 | } 136 | 137 | Node::Extension(ref ext) => { 138 | let cur_len = self.nibble.len(); 139 | self.nibble.truncate(cur_len - ext.borrow().prefix.len()); 140 | } 141 | 142 | Node::Branch(_) => { 143 | self.nibble.pop(); 144 | } 145 | _ => {} 146 | } 147 | self.nodes.pop(); 148 | } 149 | 150 | (TraceStatus::Doing, Node::Extension(ref ext)) => { 151 | self.nibble.extend(&ext.borrow().prefix); 152 | self.nodes.push((ext.borrow().node.clone()).into()); 153 | } 154 | 155 | (TraceStatus::Doing, Node::Leaf(ref leaf)) => { 156 | self.nibble.extend(&leaf.borrow().key); 157 | return Some((self.nibble.encode_raw().0, leaf.borrow().value.clone())); 158 | } 159 | 160 | (TraceStatus::Doing, Node::Branch(ref branch)) => { 161 | let value = branch.borrow().value.clone(); 162 | if let Some(data) = value { 163 | return Some((self.nibble.encode_raw().0, data)); 164 | } else { 165 | continue; 166 | } 167 | } 168 | 169 | (TraceStatus::Doing, Node::Hash(ref hash_node)) => { 170 | if let Ok(n) = self.trie.recover_from_db(&hash_node.borrow().hash.clone()) { 171 | self.nodes.pop(); 172 | self.nodes.push(n.into()); 173 | } else { 174 | //error!(); 175 | return None; 176 | } 177 | } 178 | 179 | (TraceStatus::Child(i), Node::Branch(ref branch)) => { 180 | if i == 0 { 181 | self.nibble.push(0); 182 | } else { 183 | self.nibble.pop(); 184 | self.nibble.push(i); 185 | } 186 | self.nodes 187 | .push((branch.borrow().children[i as usize].clone()).into()); 188 | } 189 | 190 | (_, Node::Empty) => { 191 | self.nodes.pop(); 192 | } 193 | _ => {} 194 | } 195 | } else { 196 | return None; 197 | } 198 | } 199 | } 200 | } 201 | 202 | impl PatriciaTrie 203 | where 204 | D: DB, 205 | H: Hasher, 206 | { 207 | pub fn iter(&self) -> TrieIterator { 208 | let nodes = vec![self.root.clone().into()]; 209 | TrieIterator { 210 | trie: self, 211 | nibble: Nibbles::from_raw(vec![], false), 212 | nodes, 213 | } 214 | } 215 | pub fn new(db: Arc, hasher: Arc) -> Self { 216 | Self { 217 | root: Node::Empty, 218 | root_hash: hasher.digest(rlp::NULL_RLP.as_ref()), 219 | 220 | cache: RefCell::new(HashMap::new()), 221 | passing_keys: RefCell::new(HashSet::new()), 222 | gen_keys: RefCell::new(HashSet::new()), 223 | 224 | db, 225 | hasher, 226 | backup_db: None, 227 | } 228 | } 229 | 230 | pub fn from(db: Arc, hasher: Arc, root: &[u8]) -> TrieResult { 231 | match db.get(root).map_err(|e| TrieError::DB(e.to_string()))? { 232 | Some(data) => { 233 | let mut trie = Self { 234 | root: Node::Empty, 235 | root_hash: root.to_vec(), 236 | 237 | cache: RefCell::new(HashMap::new()), 238 | passing_keys: RefCell::new(HashSet::new()), 239 | gen_keys: RefCell::new(HashSet::new()), 240 | 241 | db, 242 | hasher, 243 | backup_db: None, 244 | }; 245 | 246 | trie.root = trie.decode_node(&data)?; 247 | Ok(trie) 248 | } 249 | None => Err(TrieError::InvalidStateRoot), 250 | } 251 | } 252 | 253 | // extract specified height statedb in full node mode 254 | pub fn extract_backup( 255 | db: Arc, 256 | backup_db: Option>, 257 | hasher: Arc, 258 | root_hash: &[u8], 259 | ) -> TrieResult<(Self, Vec>)> { 260 | let mut pt = Self { 261 | root: Node::Empty, 262 | root_hash: hasher.digest(rlp::NULL_RLP.as_ref()), 263 | 264 | cache: RefCell::new(HashMap::new()), 265 | passing_keys: RefCell::new(HashSet::new()), 266 | gen_keys: RefCell::new(HashSet::new()), 267 | 268 | db, 269 | hasher, 270 | backup_db, 271 | }; 272 | 273 | let root = pt.recover_from_db(root_hash)?; 274 | pt.root = root.clone(); 275 | pt.root_hash = root_hash.to_vec(); 276 | 277 | let mut addr_list = vec![]; 278 | pt.iter().for_each(|(k, _v)| addr_list.push(k)); 279 | let encoded = pt.cache_node(root)?; 280 | pt.cache 281 | .borrow_mut() 282 | .insert(pt.hasher.digest(&encoded), encoded); 283 | 284 | let mut keys = Vec::with_capacity(pt.cache.borrow().len()); 285 | let mut values = Vec::with_capacity(pt.cache.borrow().len()); 286 | for (k, v) in pt.cache.borrow_mut().drain() { 287 | keys.push(k.to_vec()); 288 | values.push(v); 289 | } 290 | 291 | // store data in backup db 292 | pt.backup_db 293 | .clone() 294 | .unwrap() 295 | .insert_batch(keys, values) 296 | .map_err(|e| TrieError::DB(e.to_string()))?; 297 | pt.backup_db 298 | .clone() 299 | .unwrap() 300 | .flush() 301 | .map_err(|e| TrieError::DB(e.to_string()))?; 302 | Ok((pt, addr_list)) 303 | } 304 | } 305 | 306 | impl Trie for PatriciaTrie 307 | where 308 | D: DB, 309 | H: Hasher, 310 | { 311 | /// Returns the value for key stored in the trie. 312 | fn get(&self, key: &[u8]) -> TrieResult>> { 313 | self.get_at(self.root.clone(), &Nibbles::from_raw(key.to_vec(), true)) 314 | } 315 | 316 | /// Checks that the key is present in the trie 317 | fn contains(&self, key: &[u8]) -> TrieResult { 318 | Ok(self 319 | .get_at(self.root.clone(), &Nibbles::from_raw(key.to_vec(), true))? 320 | .map_or(false, |_| true)) 321 | } 322 | 323 | /// Inserts value into trie and modifies it if it exists 324 | fn insert(&mut self, key: Vec, value: Vec) -> TrieResult<()> { 325 | if value.is_empty() { 326 | self.remove(&key)?; 327 | return Ok(()); 328 | } 329 | let root = self.root.clone(); 330 | self.root = self.insert_at(root, Nibbles::from_raw(key, true), value.to_vec())?; 331 | Ok(()) 332 | } 333 | 334 | /// Removes any existing value for key from the trie. 335 | fn remove(&mut self, key: &[u8]) -> TrieResult { 336 | let (n, removed) = 337 | self.delete_at(self.root.clone(), &Nibbles::from_raw(key.to_vec(), true))?; 338 | self.root = n; 339 | Ok(removed) 340 | } 341 | 342 | /// Saves all the nodes in the db, clears the cache data, recalculates the root. 343 | /// Returns the root hash of the trie. 344 | fn root(&mut self) -> TrieResult> { 345 | self.commit() 346 | } 347 | 348 | /// Prove constructs a merkle proof for key. The result contains all encoded nodes 349 | /// on the path to the value at key. The value itself is also included in the last 350 | /// node and can be retrieved by verifying the proof. 351 | /// 352 | /// If the trie does not contain a value for key, the returned proof contains all 353 | /// nodes of the longest existing prefix of the key (at least the root node), ending 354 | /// with the node that proves the absence of the key. 355 | fn get_proof(&self, key: &[u8]) -> TrieResult>> { 356 | let mut path = 357 | self.get_path_at(self.root.clone(), &Nibbles::from_raw(key.to_vec(), true))?; 358 | match self.root { 359 | Node::Empty => {} 360 | _ => path.push(self.root.clone()), 361 | } 362 | Ok(path.into_iter().rev().map(|n| self.encode_raw(n)).collect()) 363 | } 364 | 365 | /// return value if key exists, None if key not exist, Error if proof is wrong 366 | fn verify_proof( 367 | &self, 368 | root_hash: &[u8], 369 | key: &[u8], 370 | proof: Vec>, 371 | ) -> TrieResult>> { 372 | let memdb = Arc::new(MemoryDB::new(true)); 373 | for node_encoded in proof.into_iter() { 374 | let hash = self.hasher.digest(&node_encoded); 375 | 376 | if root_hash.eq(&hash) || node_encoded.len() >= H::LENGTH { 377 | memdb.insert(hash, node_encoded).unwrap(); 378 | } 379 | } 380 | let trie = PatriciaTrie::from(memdb, Arc::clone(&self.hasher), root_hash) 381 | .or(Err(TrieError::InvalidProof))?; 382 | trie.get(key).or(Err(TrieError::InvalidProof)) 383 | } 384 | } 385 | 386 | impl PatriciaTrie 387 | where 388 | D: DB, 389 | H: Hasher, 390 | { 391 | fn get_at(&self, n: Node, partial: &Nibbles) -> TrieResult>> { 392 | match n { 393 | Node::Empty => Ok(None), 394 | Node::Leaf(leaf) => { 395 | let borrow_leaf = leaf.borrow(); 396 | 397 | if &borrow_leaf.key == partial { 398 | Ok(Some(borrow_leaf.value.clone())) 399 | } else { 400 | Ok(None) 401 | } 402 | } 403 | Node::Branch(branch) => { 404 | let borrow_branch = branch.borrow(); 405 | 406 | if partial.is_empty() || partial.at(0) == 16 { 407 | Ok(borrow_branch.value.clone()) 408 | } else { 409 | let index = partial.at(0); 410 | self.get_at(borrow_branch.children[index].clone(), &partial.offset(1)) 411 | } 412 | } 413 | Node::Extension(extension) => { 414 | let extension = extension.borrow(); 415 | 416 | let prefix = &extension.prefix; 417 | let match_len = partial.common_prefix(prefix); 418 | if match_len == prefix.len() { 419 | self.get_at(extension.node.clone(), &partial.offset(match_len)) 420 | } else { 421 | Ok(None) 422 | } 423 | } 424 | Node::Hash(hash_node) => { 425 | let borrow_hash_node = hash_node.borrow(); 426 | let n = self.recover_from_db(&borrow_hash_node.hash)?; 427 | self.get_at(n, partial) 428 | } 429 | } 430 | } 431 | 432 | fn insert_at(&self, n: Node, partial: Nibbles, value: Vec) -> TrieResult { 433 | match n { 434 | Node::Empty => Ok(Node::from_leaf(partial, value)), 435 | Node::Leaf(leaf) => { 436 | let mut borrow_leaf = leaf.borrow_mut(); 437 | 438 | let old_partial = &borrow_leaf.key; 439 | let match_index = partial.common_prefix(old_partial); 440 | if match_index == old_partial.len() { 441 | // replace leaf value 442 | borrow_leaf.value = value; 443 | return Ok(Node::Leaf(leaf.clone())); 444 | } 445 | 446 | let mut branch = BranchNode { 447 | children: empty_children(), 448 | value: None, 449 | }; 450 | 451 | let n = Node::from_leaf( 452 | old_partial.offset(match_index + 1), 453 | borrow_leaf.value.clone(), 454 | ); 455 | branch.insert(old_partial.at(match_index), n); 456 | 457 | let n = Node::from_leaf(partial.offset(match_index + 1), value); 458 | branch.insert(partial.at(match_index), n); 459 | 460 | if match_index == 0 { 461 | return Ok(Node::Branch(Rc::new(RefCell::new(branch)))); 462 | } 463 | 464 | // if include a common prefix 465 | Ok(Node::from_extension( 466 | partial.slice(0, match_index), 467 | Node::Branch(Rc::new(RefCell::new(branch))), 468 | )) 469 | } 470 | Node::Branch(branch) => { 471 | let mut borrow_branch = branch.borrow_mut(); 472 | 473 | if partial.at(0) == 0x10 { 474 | borrow_branch.value = Some(value); 475 | return Ok(Node::Branch(branch.clone())); 476 | } 477 | 478 | let child = borrow_branch.children[partial.at(0)].clone(); 479 | let new_child = self.insert_at(child, partial.offset(1), value)?; 480 | borrow_branch.children[partial.at(0)] = new_child; 481 | Ok(Node::Branch(branch.clone())) 482 | } 483 | Node::Extension(ext) => { 484 | let mut borrow_ext = ext.borrow_mut(); 485 | 486 | let prefix = &borrow_ext.prefix; 487 | let sub_node = borrow_ext.node.clone(); 488 | let match_index = partial.common_prefix(prefix); 489 | 490 | if match_index == 0 { 491 | let mut branch = BranchNode { 492 | children: empty_children(), 493 | value: None, 494 | }; 495 | branch.insert( 496 | prefix.at(0), 497 | if prefix.len() == 1 { 498 | sub_node 499 | } else { 500 | Node::from_extension(prefix.offset(1), sub_node) 501 | }, 502 | ); 503 | let node = Node::Branch(Rc::new(RefCell::new(branch))); 504 | 505 | return self.insert_at(node, partial, value); 506 | } 507 | 508 | if match_index == prefix.len() { 509 | let new_node = self.insert_at(sub_node, partial.offset(match_index), value)?; 510 | return Ok(Node::from_extension(prefix.clone(), new_node)); 511 | } 512 | 513 | let new_ext = Node::from_extension(prefix.offset(match_index), sub_node); 514 | let new_node = self.insert_at(new_ext, partial.offset(match_index), value)?; 515 | borrow_ext.prefix = prefix.slice(0, match_index); 516 | borrow_ext.node = new_node; 517 | Ok(Node::Extension(ext.clone())) 518 | } 519 | Node::Hash(hash_node) => { 520 | let borrow_hash_node = hash_node.borrow(); 521 | 522 | self.passing_keys 523 | .borrow_mut() 524 | .insert(borrow_hash_node.hash.to_vec()); 525 | let n = self.recover_from_db(&borrow_hash_node.hash)?; 526 | self.insert_at(n, partial, value) 527 | } 528 | } 529 | } 530 | 531 | fn delete_at(&self, n: Node, partial: &Nibbles) -> TrieResult<(Node, bool)> { 532 | let (new_n, deleted) = match n { 533 | Node::Empty => Ok((Node::Empty, false)), 534 | Node::Leaf(leaf) => { 535 | let borrow_leaf = leaf.borrow(); 536 | 537 | if &borrow_leaf.key == partial { 538 | return Ok((Node::Empty, true)); 539 | } 540 | Ok((Node::Leaf(leaf.clone()), false)) 541 | } 542 | Node::Branch(branch) => { 543 | let mut borrow_branch = branch.borrow_mut(); 544 | 545 | if partial.at(0) == 0x10 { 546 | borrow_branch.value = None; 547 | return Ok((Node::Branch(branch.clone()), true)); 548 | } 549 | 550 | let index = partial.at(0); 551 | let node = borrow_branch.children[index].clone(); 552 | 553 | let (new_n, deleted) = self.delete_at(node, &partial.offset(1))?; 554 | if deleted { 555 | borrow_branch.children[index] = new_n; 556 | } 557 | 558 | Ok((Node::Branch(branch.clone()), deleted)) 559 | } 560 | Node::Extension(ext) => { 561 | let mut borrow_ext = ext.borrow_mut(); 562 | 563 | let prefix = &borrow_ext.prefix; 564 | let match_len = partial.common_prefix(prefix); 565 | 566 | if match_len == prefix.len() { 567 | let (new_n, deleted) = 568 | self.delete_at(borrow_ext.node.clone(), &partial.offset(match_len))?; 569 | 570 | if deleted { 571 | borrow_ext.node = new_n; 572 | } 573 | 574 | Ok((Node::Extension(ext.clone()), deleted)) 575 | } else { 576 | Ok((Node::Extension(ext.clone()), false)) 577 | } 578 | } 579 | Node::Hash(hash_node) => { 580 | let hash = hash_node.borrow().hash.clone(); 581 | self.passing_keys.borrow_mut().insert(hash.clone()); 582 | 583 | let n = self.recover_from_db(&hash)?; 584 | self.delete_at(n, partial) 585 | } 586 | }?; 587 | 588 | if deleted { 589 | Ok((self.degenerate(new_n)?, deleted)) 590 | } else { 591 | Ok((new_n, deleted)) 592 | } 593 | } 594 | 595 | fn degenerate(&self, n: Node) -> TrieResult { 596 | match n { 597 | Node::Branch(branch) => { 598 | let borrow_branch = branch.borrow(); 599 | 600 | let mut used_indexs = vec![]; 601 | for (index, node) in borrow_branch.children.iter().enumerate() { 602 | match node { 603 | Node::Empty => continue, 604 | _ => used_indexs.push(index), 605 | } 606 | } 607 | 608 | // if only a value node, transmute to leaf. 609 | if used_indexs.is_empty() && borrow_branch.value.is_some() { 610 | let key = Nibbles::from_raw([].to_vec(), true); 611 | let value = borrow_branch.value.clone().unwrap(); 612 | Ok(Node::from_leaf(key, value)) 613 | // if only one node. make an extension. 614 | } else if used_indexs.len() == 1 && borrow_branch.value.is_none() { 615 | let used_index = used_indexs[0]; 616 | let n = borrow_branch.children[used_index].clone(); 617 | 618 | let new_node = 619 | Node::from_extension(Nibbles::from_hex(vec![used_index as u8]), n); 620 | self.degenerate(new_node) 621 | } else { 622 | Ok(Node::Branch(branch.clone())) 623 | } 624 | } 625 | Node::Extension(ext) => { 626 | let borrow_ext = ext.borrow(); 627 | 628 | let prefix = &borrow_ext.prefix; 629 | match borrow_ext.node.clone() { 630 | Node::Extension(sub_ext) => { 631 | let borrow_sub_ext = sub_ext.borrow(); 632 | 633 | let new_prefix = prefix.join(&borrow_sub_ext.prefix); 634 | let new_n = Node::from_extension(new_prefix, borrow_sub_ext.node.clone()); 635 | self.degenerate(new_n) 636 | } 637 | Node::Leaf(leaf) => { 638 | let borrow_leaf = leaf.borrow(); 639 | 640 | let new_prefix = prefix.join(&borrow_leaf.key); 641 | Ok(Node::from_leaf(new_prefix, borrow_leaf.value.clone())) 642 | } 643 | // try again after recovering node from the db. 644 | Node::Hash(hash_node) => { 645 | let hash = hash_node.borrow().hash.clone(); 646 | self.passing_keys.borrow_mut().insert(hash.clone()); 647 | 648 | let new_node = self.recover_from_db(&hash)?; 649 | 650 | let n = Node::from_extension(borrow_ext.prefix.clone(), new_node); 651 | self.degenerate(n) 652 | } 653 | _ => Ok(Node::Extension(ext.clone())), 654 | } 655 | } 656 | _ => Ok(n), 657 | } 658 | } 659 | 660 | // Get nodes path along the key, only the nodes whose encode length is greater than 661 | // hash length are added. 662 | // For embedded nodes whose data are already contained in their parent node, we don't need to 663 | // add them in the path. 664 | // In the code below, we only add the nodes get by `get_node_from_hash`, because they contains 665 | // all data stored in db, including nodes whose encoded data is less than hash length. 666 | fn get_path_at(&self, n: Node, partial: &Nibbles) -> TrieResult> { 667 | match n { 668 | Node::Empty | Node::Leaf(_) => Ok(vec![]), 669 | Node::Branch(branch) => { 670 | let borrow_branch = branch.borrow(); 671 | 672 | if partial.is_empty() || partial.at(0) == 16 { 673 | Ok(vec![]) 674 | } else { 675 | let node = borrow_branch.children[partial.at(0)].clone(); 676 | self.get_path_at(node, &partial.offset(1)) 677 | } 678 | } 679 | Node::Extension(ext) => { 680 | let borrow_ext = ext.borrow(); 681 | 682 | let prefix = &borrow_ext.prefix; 683 | let match_len = partial.common_prefix(prefix); 684 | 685 | if match_len == prefix.len() { 686 | self.get_path_at(borrow_ext.node.clone(), &partial.offset(match_len)) 687 | } else { 688 | Ok(vec![]) 689 | } 690 | } 691 | Node::Hash(hash_node) => { 692 | let n = self.recover_from_db(&hash_node.borrow().hash.clone())?; 693 | let mut rest = self.get_path_at(n.clone(), partial)?; 694 | rest.push(n); 695 | Ok(rest) 696 | } 697 | } 698 | } 699 | 700 | fn commit(&mut self) -> TrieResult> { 701 | let encoded = self.encode_node(self.root.clone()); 702 | let root_hash = if encoded.len() < H::LENGTH { 703 | let hash = self.hasher.digest(&encoded); 704 | self.cache.borrow_mut().insert(hash.clone(), encoded); 705 | hash 706 | } else { 707 | encoded 708 | }; 709 | 710 | let mut keys = Vec::with_capacity(self.cache.borrow().len()); 711 | let mut values = Vec::with_capacity(self.cache.borrow().len()); 712 | for (k, v) in self.cache.borrow_mut().drain() { 713 | keys.push(k.to_vec()); 714 | values.push(v); 715 | } 716 | 717 | self.db 718 | .insert_batch(keys, values) 719 | .map_err(|e| TrieError::DB(e.to_string()))?; 720 | 721 | let removed_keys: Vec> = self 722 | .passing_keys 723 | .borrow() 724 | .iter() 725 | .filter(|h| !self.gen_keys.borrow().contains(*h)) 726 | .map(|h| h.to_vec()) 727 | .collect(); 728 | 729 | self.db 730 | .remove_batch(&removed_keys) 731 | .map_err(|e| TrieError::DB(e.to_string()))?; 732 | 733 | self.root_hash = root_hash.to_vec(); 734 | self.gen_keys.borrow_mut().clear(); 735 | self.passing_keys.borrow_mut().clear(); 736 | self.root = self.recover_from_db(&root_hash)?; 737 | Ok(root_hash) 738 | } 739 | 740 | fn encode_node(&self, n: Node) -> Vec { 741 | // Returns the hash value directly to avoid double counting. 742 | if let Node::Hash(hash_node) = n { 743 | return hash_node.borrow().hash.clone(); 744 | } 745 | 746 | let data = self.encode_raw(n.clone()); 747 | // Nodes smaller than 32 bytes are stored inside their parent, 748 | // Nodes equal to 32 bytes are returned directly 749 | if data.len() < H::LENGTH { 750 | data 751 | } else { 752 | let hash = self.hasher.digest(&data); 753 | self.cache.borrow_mut().insert(hash.clone(), data); 754 | 755 | self.gen_keys.borrow_mut().insert(hash.clone()); 756 | hash 757 | } 758 | } 759 | 760 | fn encode_raw(&self, n: Node) -> Vec { 761 | match n { 762 | Node::Empty => rlp::NULL_RLP.to_vec(), 763 | Node::Leaf(leaf) => { 764 | let borrow_leaf = leaf.borrow(); 765 | 766 | let mut stream = RlpStream::new_list(2); 767 | stream.append(&borrow_leaf.key.encode_compact()); 768 | stream.append(&borrow_leaf.value); 769 | stream.out().to_vec() 770 | } 771 | Node::Branch(branch) => { 772 | let borrow_branch = branch.borrow(); 773 | 774 | let mut stream = RlpStream::new_list(17); 775 | for i in 0..16 { 776 | let n = borrow_branch.children[i].clone(); 777 | let data = self.encode_node(n); 778 | if data.len() == H::LENGTH { 779 | stream.append(&data); 780 | } else { 781 | stream.append_raw(&data, 1); 782 | } 783 | } 784 | 785 | match &borrow_branch.value { 786 | Some(v) => stream.append(v), 787 | None => stream.append_empty_data(), 788 | }; 789 | stream.out().to_vec() 790 | } 791 | Node::Extension(ext) => { 792 | let borrow_ext = ext.borrow(); 793 | 794 | let mut stream = RlpStream::new_list(2); 795 | stream.append(&borrow_ext.prefix.encode_compact()); 796 | let data = self.encode_node(borrow_ext.node.clone()); 797 | if data.len() == H::LENGTH { 798 | stream.append(&data); 799 | } else { 800 | stream.append_raw(&data, 1); 801 | } 802 | stream.out().to_vec() 803 | } 804 | Node::Hash(_hash) => unreachable!(), 805 | } 806 | } 807 | 808 | #[allow(clippy::only_used_in_recursion)] 809 | fn decode_node(&self, data: &[u8]) -> TrieResult { 810 | let r = Rlp::new(data); 811 | 812 | match r.prototype()? { 813 | Prototype::Data(0) => Ok(Node::Empty), 814 | Prototype::List(2) => { 815 | let key = r.at(0)?.data()?; 816 | let key = Nibbles::from_compact(key.to_vec()); 817 | 818 | if key.is_leaf() { 819 | Ok(Node::from_leaf(key, r.at(1)?.data()?.to_vec())) 820 | } else { 821 | let n = self.decode_node(r.at(1)?.as_raw())?; 822 | 823 | Ok(Node::from_extension(key, n)) 824 | } 825 | } 826 | Prototype::List(17) => { 827 | let mut nodes = empty_children(); 828 | #[allow(clippy::needless_range_loop)] 829 | for i in 0..nodes.len() { 830 | let rlp_data = r.at(i)?; 831 | let n = self.decode_node(rlp_data.as_raw())?; 832 | nodes[i] = n; 833 | } 834 | 835 | // The last element is a value node. 836 | let value_rlp = r.at(16)?; 837 | let value = if value_rlp.is_empty() { 838 | None 839 | } else { 840 | Some(value_rlp.data()?.to_vec()) 841 | }; 842 | 843 | Ok(Node::from_branch(nodes, value)) 844 | } 845 | _ => { 846 | if r.is_data() && r.size() == H::LENGTH { 847 | Ok(Node::from_hash(r.data()?.to_vec())) 848 | } else { 849 | Err(TrieError::InvalidData) 850 | } 851 | } 852 | } 853 | } 854 | 855 | fn recover_from_db(&self, key: &[u8]) -> TrieResult { 856 | match self.db.get(key).map_err(|e| TrieError::DB(e.to_string()))? { 857 | Some(value) => Ok(self.decode_node(&value)?), 858 | None => Ok(Node::Empty), 859 | } 860 | } 861 | 862 | fn cache_node(&self, n: Node) -> TrieResult> { 863 | match n { 864 | Node::Empty => Ok(rlp::NULL_RLP.to_vec()), 865 | Node::Leaf(leaf) => { 866 | let borrow_leaf = leaf.borrow(); 867 | 868 | let mut stream = RlpStream::new_list(2); 869 | stream.append(&borrow_leaf.key.encode_compact()); 870 | stream.append(&borrow_leaf.value); 871 | Ok(stream.out().to_vec()) 872 | } 873 | Node::Branch(branch) => { 874 | let borrow_branch = branch.borrow(); 875 | 876 | let mut stream = RlpStream::new_list(17); 877 | for i in 0..16 { 878 | let n = borrow_branch.children[i].clone(); 879 | let data = self.cache_node(n)?; 880 | if data.len() == H::LENGTH { 881 | stream.append(&data); 882 | } else { 883 | stream.append_raw(&data, 1); 884 | } 885 | } 886 | 887 | match &borrow_branch.value { 888 | Some(v) => stream.append(v), 889 | None => stream.append_empty_data(), 890 | }; 891 | Ok(stream.out().to_vec()) 892 | } 893 | Node::Extension(ext) => { 894 | let borrow_ext = ext.borrow(); 895 | 896 | let mut stream = RlpStream::new_list(2); 897 | stream.append(&borrow_ext.prefix.encode_compact()); 898 | let data = self.cache_node(borrow_ext.node.clone())?; 899 | if data.len() == H::LENGTH { 900 | stream.append(&data); 901 | } else { 902 | stream.append_raw(&data, 1); 903 | } 904 | Ok(stream.out().to_vec()) 905 | } 906 | Node::Hash(hash_node) => { 907 | let hash = hash_node.borrow().hash.clone(); 908 | let next_node = self.recover_from_db(&hash)?; 909 | let data = self.cache_node(next_node)?; 910 | self.cache.borrow_mut().insert(hash.clone(), data); 911 | Ok(hash) 912 | } 913 | } 914 | } 915 | } 916 | 917 | #[cfg(test)] 918 | mod tests { 919 | use rand::distributions::Alphanumeric; 920 | use rand::seq::SliceRandom; 921 | use rand::{thread_rng, Rng}; 922 | use std::collections::{HashMap, HashSet}; 923 | use std::sync::Arc; 924 | 925 | use hasher::{Hasher, HasherKeccak}; 926 | 927 | use super::{PatriciaTrie, Trie}; 928 | use crate::db::{MemoryDB, DB}; 929 | 930 | #[test] 931 | fn test_trie_insert() { 932 | let memdb = Arc::new(MemoryDB::new(true)); 933 | let mut trie = PatriciaTrie::new(memdb, Arc::new(HasherKeccak::new())); 934 | trie.insert(b"test".to_vec(), b"test".to_vec()).unwrap(); 935 | } 936 | 937 | #[test] 938 | fn test_trie_get() { 939 | let memdb = Arc::new(MemoryDB::new(true)); 940 | let mut trie = PatriciaTrie::new(memdb, Arc::new(HasherKeccak::new())); 941 | trie.insert(b"test".to_vec(), b"test".to_vec()).unwrap(); 942 | let v = trie.get(b"test").unwrap(); 943 | 944 | assert_eq!(Some(b"test".to_vec()), v) 945 | } 946 | 947 | #[test] 948 | fn test_trie_random_insert() { 949 | let memdb = Arc::new(MemoryDB::new(true)); 950 | let mut trie = PatriciaTrie::new(memdb, Arc::new(HasherKeccak::new())); 951 | 952 | for _ in 0..1000 { 953 | let rand_str: String = thread_rng().sample_iter(&Alphanumeric).take(30).collect(); 954 | let val = rand_str.as_bytes(); 955 | trie.insert(val.to_vec(), val.to_vec()).unwrap(); 956 | 957 | let v = trie.get(val).unwrap(); 958 | assert_eq!(v.map(|v| v.to_vec()), Some(val.to_vec())); 959 | } 960 | } 961 | 962 | #[test] 963 | fn test_trie_contains() { 964 | let memdb = Arc::new(MemoryDB::new(true)); 965 | let mut trie = PatriciaTrie::new(memdb, Arc::new(HasherKeccak::new())); 966 | trie.insert(b"test".to_vec(), b"test".to_vec()).unwrap(); 967 | assert!(trie.contains(b"test").unwrap()); 968 | assert!(!trie.contains(b"test2").unwrap()); 969 | } 970 | 971 | #[test] 972 | fn test_trie_remove() { 973 | let memdb = Arc::new(MemoryDB::new(true)); 974 | let mut trie = PatriciaTrie::new(memdb, Arc::new(HasherKeccak::new())); 975 | trie.insert(b"test".to_vec(), b"test".to_vec()).unwrap(); 976 | let removed = trie.remove(b"test").unwrap(); 977 | assert!(removed) 978 | } 979 | 980 | #[test] 981 | fn test_trie_random_remove() { 982 | let memdb = Arc::new(MemoryDB::new(true)); 983 | let mut trie = PatriciaTrie::new(memdb, Arc::new(HasherKeccak::new())); 984 | 985 | for _ in 0..1000 { 986 | let rand_str: String = thread_rng().sample_iter(&Alphanumeric).take(30).collect(); 987 | let val = rand_str.as_bytes(); 988 | trie.insert(val.to_vec(), val.to_vec()).unwrap(); 989 | 990 | let removed = trie.remove(val).unwrap(); 991 | assert!(removed); 992 | } 993 | } 994 | 995 | #[test] 996 | fn test_trie_from_root() { 997 | let memdb = Arc::new(MemoryDB::new(true)); 998 | let root = { 999 | let mut trie = PatriciaTrie::new(Arc::clone(&memdb), Arc::new(HasherKeccak::new())); 1000 | trie.insert(b"test".to_vec(), b"test".to_vec()).unwrap(); 1001 | trie.insert(b"test1".to_vec(), b"test".to_vec()).unwrap(); 1002 | trie.insert(b"test2".to_vec(), b"test".to_vec()).unwrap(); 1003 | trie.insert(b"test23".to_vec(), b"test".to_vec()).unwrap(); 1004 | trie.insert(b"test33".to_vec(), b"test".to_vec()).unwrap(); 1005 | trie.insert(b"test44".to_vec(), b"test".to_vec()).unwrap(); 1006 | trie.root().unwrap() 1007 | }; 1008 | 1009 | let mut trie = 1010 | PatriciaTrie::from(Arc::clone(&memdb), Arc::new(HasherKeccak::new()), &root).unwrap(); 1011 | let v1 = trie.get(b"test33").unwrap(); 1012 | assert_eq!(Some(b"test".to_vec()), v1); 1013 | let v2 = trie.get(b"test44").unwrap(); 1014 | assert_eq!(Some(b"test".to_vec()), v2); 1015 | let root2 = trie.root().unwrap(); 1016 | assert_eq!(hex::encode(root), hex::encode(root2)); 1017 | } 1018 | 1019 | #[test] 1020 | fn test_trie_from_root_and_insert() { 1021 | let memdb = Arc::new(MemoryDB::new(true)); 1022 | let root = { 1023 | let mut trie = PatriciaTrie::new(Arc::clone(&memdb), Arc::new(HasherKeccak::new())); 1024 | trie.insert(b"test".to_vec(), b"test".to_vec()).unwrap(); 1025 | trie.insert(b"test1".to_vec(), b"test".to_vec()).unwrap(); 1026 | trie.insert(b"test2".to_vec(), b"test".to_vec()).unwrap(); 1027 | trie.insert(b"test23".to_vec(), b"test".to_vec()).unwrap(); 1028 | trie.insert(b"test33".to_vec(), b"test".to_vec()).unwrap(); 1029 | trie.insert(b"test44".to_vec(), b"test".to_vec()).unwrap(); 1030 | trie.commit().unwrap() 1031 | }; 1032 | 1033 | let mut trie = 1034 | PatriciaTrie::from(Arc::clone(&memdb), Arc::new(HasherKeccak::new()), &root).unwrap(); 1035 | trie.insert(b"test55".to_vec(), b"test55".to_vec()).unwrap(); 1036 | trie.commit().unwrap(); 1037 | let v = trie.get(b"test55").unwrap(); 1038 | assert_eq!(Some(b"test55".to_vec()), v); 1039 | } 1040 | 1041 | #[test] 1042 | fn test_trie_from_root_and_delete() { 1043 | let memdb = Arc::new(MemoryDB::new(true)); 1044 | let root = { 1045 | let mut trie = PatriciaTrie::new(Arc::clone(&memdb), Arc::new(HasherKeccak::new())); 1046 | trie.insert(b"test".to_vec(), b"test".to_vec()).unwrap(); 1047 | trie.insert(b"test1".to_vec(), b"test".to_vec()).unwrap(); 1048 | trie.insert(b"test2".to_vec(), b"test".to_vec()).unwrap(); 1049 | trie.insert(b"test23".to_vec(), b"test".to_vec()).unwrap(); 1050 | trie.insert(b"test33".to_vec(), b"test".to_vec()).unwrap(); 1051 | trie.insert(b"test44".to_vec(), b"test".to_vec()).unwrap(); 1052 | trie.commit().unwrap() 1053 | }; 1054 | 1055 | let mut trie = 1056 | PatriciaTrie::from(Arc::clone(&memdb), Arc::new(HasherKeccak::new()), &root).unwrap(); 1057 | let removed = trie.remove(b"test44").unwrap(); 1058 | assert!(removed); 1059 | let removed = trie.remove(b"test33").unwrap(); 1060 | assert!(removed); 1061 | let removed = trie.remove(b"test23").unwrap(); 1062 | assert!(removed); 1063 | } 1064 | 1065 | #[test] 1066 | fn test_multiple_trie_roots() { 1067 | let k0 = ethereum_types::H256::from_low_u64_le(0); 1068 | let k1 = ethereum_types::H256::from_low_u64_le(1); 1069 | let v = ethereum_types::H256::from_low_u64_le(0x1234); 1070 | 1071 | let root1 = { 1072 | let memdb = Arc::new(MemoryDB::new(true)); 1073 | let mut trie = PatriciaTrie::new(memdb, Arc::new(HasherKeccak::new())); 1074 | trie.insert(k0.as_bytes().to_vec(), v.as_bytes().to_vec()) 1075 | .unwrap(); 1076 | trie.root().unwrap() 1077 | }; 1078 | 1079 | let root2 = { 1080 | let memdb = Arc::new(MemoryDB::new(true)); 1081 | let mut trie = PatriciaTrie::new(memdb, Arc::new(HasherKeccak::new())); 1082 | trie.insert(k0.as_bytes().to_vec(), v.as_bytes().to_vec()) 1083 | .unwrap(); 1084 | trie.insert(k1.as_bytes().to_vec(), v.as_bytes().to_vec()) 1085 | .unwrap(); 1086 | trie.root().unwrap(); 1087 | trie.remove(k1.as_ref()).unwrap(); 1088 | trie.root().unwrap() 1089 | }; 1090 | 1091 | let root3 = { 1092 | let memdb = Arc::new(MemoryDB::new(true)); 1093 | let mut trie1 = PatriciaTrie::new(Arc::clone(&memdb), Arc::new(HasherKeccak::new())); 1094 | trie1 1095 | .insert(k0.as_bytes().to_vec(), v.as_bytes().to_vec()) 1096 | .unwrap(); 1097 | trie1 1098 | .insert(k1.as_bytes().to_vec(), v.as_bytes().to_vec()) 1099 | .unwrap(); 1100 | trie1.root().unwrap(); 1101 | let root = trie1.root().unwrap(); 1102 | let mut trie2 = 1103 | PatriciaTrie::from(Arc::clone(&memdb), Arc::new(HasherKeccak::new()), &root) 1104 | .unwrap(); 1105 | trie2.remove(k1.as_bytes()).unwrap(); 1106 | trie2.root().unwrap() 1107 | }; 1108 | 1109 | assert_eq!(root1, root2); 1110 | assert_eq!(root2, root3); 1111 | } 1112 | 1113 | #[test] 1114 | fn test_delete_stale_keys_with_random_insert_and_delete() { 1115 | let memdb = Arc::new(MemoryDB::new(true)); 1116 | let mut trie = PatriciaTrie::new(memdb, Arc::new(HasherKeccak::new())); 1117 | 1118 | let mut rng = rand::thread_rng(); 1119 | let mut keys = vec![]; 1120 | for _ in 0..100 { 1121 | let random_bytes: Vec = (0..rng.gen_range(2, 30)) 1122 | .map(|_| rand::random::()) 1123 | .collect(); 1124 | trie.insert(random_bytes.clone(), random_bytes.clone()) 1125 | .unwrap(); 1126 | keys.push(random_bytes.clone()); 1127 | } 1128 | trie.commit().unwrap(); 1129 | let slice = &mut keys; 1130 | slice.shuffle(&mut rng); 1131 | 1132 | for key in slice.iter() { 1133 | trie.remove(key).unwrap(); 1134 | } 1135 | trie.commit().unwrap(); 1136 | 1137 | let empty_node_key = HasherKeccak::new().digest(&rlp::NULL_RLP); 1138 | let value = trie.db.get(empty_node_key.as_ref()).unwrap().unwrap(); 1139 | assert_eq!(value, &rlp::NULL_RLP) 1140 | } 1141 | 1142 | #[test] 1143 | fn insert_full_branch() { 1144 | let memdb = Arc::new(MemoryDB::new(true)); 1145 | let mut trie = PatriciaTrie::new(memdb, Arc::new(HasherKeccak::new())); 1146 | 1147 | trie.insert(b"test".to_vec(), b"test".to_vec()).unwrap(); 1148 | trie.insert(b"test1".to_vec(), b"test".to_vec()).unwrap(); 1149 | trie.insert(b"test2".to_vec(), b"test".to_vec()).unwrap(); 1150 | trie.insert(b"test23".to_vec(), b"test".to_vec()).unwrap(); 1151 | trie.insert(b"test33".to_vec(), b"test".to_vec()).unwrap(); 1152 | trie.insert(b"test44".to_vec(), b"test".to_vec()).unwrap(); 1153 | trie.root().unwrap(); 1154 | 1155 | let v = trie.get(b"test").unwrap(); 1156 | assert_eq!(Some(b"test".to_vec()), v); 1157 | } 1158 | 1159 | #[test] 1160 | fn iterator_trie() { 1161 | let memdb = Arc::new(MemoryDB::new(true)); 1162 | let root1; 1163 | let mut kv = HashMap::new(); 1164 | kv.insert(b"test".to_vec(), b"test".to_vec()); 1165 | kv.insert(b"test1".to_vec(), b"test1".to_vec()); 1166 | kv.insert(b"test11".to_vec(), b"test2".to_vec()); 1167 | kv.insert(b"test14".to_vec(), b"test3".to_vec()); 1168 | kv.insert(b"test16".to_vec(), b"test4".to_vec()); 1169 | kv.insert(b"test18".to_vec(), b"test5".to_vec()); 1170 | kv.insert(b"test2".to_vec(), b"test6".to_vec()); 1171 | kv.insert(b"test23".to_vec(), b"test7".to_vec()); 1172 | kv.insert(b"test9".to_vec(), b"test8".to_vec()); 1173 | { 1174 | let mut trie = PatriciaTrie::new(memdb.clone(), Arc::new(HasherKeccak::new())); 1175 | let mut kv = kv.clone(); 1176 | kv.iter().for_each(|(k, v)| { 1177 | trie.insert(k.clone(), v.clone()).unwrap(); 1178 | }); 1179 | root1 = trie.root().unwrap(); 1180 | 1181 | trie.iter() 1182 | .for_each(|(k, v)| assert_eq!(kv.remove(&k).unwrap(), v)); 1183 | assert!(kv.is_empty()); 1184 | } 1185 | 1186 | { 1187 | let mut trie = PatriciaTrie::new(memdb.clone(), Arc::new(HasherKeccak::new())); 1188 | let mut kv2 = HashMap::new(); 1189 | kv2.insert(b"test".to_vec(), b"test11".to_vec()); 1190 | kv2.insert(b"test1".to_vec(), b"test12".to_vec()); 1191 | kv2.insert(b"test14".to_vec(), b"test13".to_vec()); 1192 | kv2.insert(b"test22".to_vec(), b"test14".to_vec()); 1193 | kv2.insert(b"test9".to_vec(), b"test15".to_vec()); 1194 | kv2.insert(b"test16".to_vec(), b"test16".to_vec()); 1195 | kv2.insert(b"test2".to_vec(), b"test17".to_vec()); 1196 | kv2.iter().for_each(|(k, v)| { 1197 | trie.insert(k.clone(), v.clone()).unwrap(); 1198 | }); 1199 | 1200 | trie.root().unwrap(); 1201 | 1202 | let mut kv_delete = HashSet::new(); 1203 | kv_delete.insert(b"test".to_vec()); 1204 | kv_delete.insert(b"test1".to_vec()); 1205 | kv_delete.insert(b"test14".to_vec()); 1206 | 1207 | kv_delete.iter().for_each(|k| { 1208 | trie.remove(k).unwrap(); 1209 | }); 1210 | 1211 | kv2.retain(|k, _| !kv_delete.contains(k)); 1212 | 1213 | trie.root().unwrap(); 1214 | trie.iter() 1215 | .for_each(|(k, v)| assert_eq!(kv2.remove(&k).unwrap(), v)); 1216 | assert!(kv2.is_empty()); 1217 | } 1218 | 1219 | let trie = PatriciaTrie::from(memdb, Arc::new(HasherKeccak::new()), &root1).unwrap(); 1220 | trie.iter() 1221 | .for_each(|(k, v)| assert_eq!(kv.remove(&k).unwrap(), v)); 1222 | assert!(kv.is_empty()); 1223 | } 1224 | } 1225 | --------------------------------------------------------------------------------