├── .gitignore ├── CITATION.bib ├── Cargo.toml ├── LICENSE ├── README.md ├── vchain-exonum ├── Cargo.toml ├── build.rs └── src │ ├── api.rs │ ├── bin │ ├── vchain-node.rs │ ├── vchain-send-tx.rs │ └── vchain-server.rs │ ├── lib.rs │ ├── proto │ ├── mod.rs │ └── service.proto │ ├── schema.rs │ ├── service.rs │ ├── tests.rs │ └── transactions.rs ├── vchain-simchain ├── Cargo.toml └── src │ ├── bin │ ├── simchain-build.rs │ └── simchain-server.rs │ └── lib.rs └── vchain ├── Cargo.toml ├── benches ├── fixed_base_pow.rs └── points_mul_sum.rs └── src ├── acc ├── digest_set.rs ├── mod.rs ├── serde_impl.rs └── utils.rs ├── chain ├── build.rs ├── historical_query.rs ├── index.rs ├── mod.rs ├── object.rs ├── query.rs ├── query_result.rs ├── tests.rs └── utils.rs ├── digest.rs ├── lib.rs └── set.rs /.gitignore: -------------------------------------------------------------------------------- 1 | **/target 2 | **/*.rs.bk 3 | Cargo.lock 4 | -------------------------------------------------------------------------------- /CITATION.bib: -------------------------------------------------------------------------------- 1 | @inproceedings{SIGMOD19:vchain, 2 | author = {Xu, Cheng and Zhang, Ce and Xu, Jianliang}, 3 | title = {{vChain}: Enabling Verifiable Boolean Range Queries over Blockchain Databases}, 4 | booktitle = {Proceedings of the 2019 ACM SIGMOD International Conference on Management of Data}, 5 | year = {2019}, 6 | month = jun, 7 | address = {Amsterdam, Netherlands}, 8 | pages = {141--158}, 9 | isbn = {978-1-4503-5643-5}, 10 | doi = {10.1145/3299869.3300083} 11 | } 12 | 13 | @inproceedings{SIGMOD20:vchain-demo, 14 | author = {Wang, Haixin and Xu, Cheng and Zhang, Ce and Xu, Jianliang}, 15 | title = {{vChain}: A Blockchain System Ensuring Query Integrity}, 16 | booktitle = {Proceedings of the 2020 ACM SIGMOD International Conference on Management of Data}, 17 | year = {2020}, 18 | month = jun, 19 | address = {Portland, OR, USA}, 20 | pages = {2693--2696}, 21 | isbn = {978-1-4503-6735-6}, 22 | doi = {10.1145/3318464.3384682} 23 | } 24 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = [ 3 | "vchain", 4 | "vchain-exonum", 5 | "vchain-simchain", 6 | ] 7 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # vChain Demo 2 | 3 | **WARNING**: This is an academic proof-of-concept prototype, and in particular has not received careful code review. This implementation is NOT ready for production use. 4 | 5 | If you find the code here useful, please consider to cite the following papers: 6 | 7 | ```bibtex 8 | @inproceedings{SIGMOD19:vchain, 9 | author = {Xu, Cheng and Zhang, Ce and Xu, Jianliang}, 10 | title = {{vChain}: Enabling Verifiable Boolean Range Queries over Blockchain Databases}, 11 | booktitle = {Proceedings of the 2019 ACM SIGMOD International Conference on Management of Data}, 12 | year = {2019}, 13 | month = jun, 14 | address = {Amsterdam, Netherlands}, 15 | pages = {141--158}, 16 | isbn = {978-1-4503-5643-5}, 17 | doi = {10.1145/3299869.3300083} 18 | } 19 | 20 | @inproceedings{SIGMOD20:vchain-demo, 21 | author = {Wang, Haixin and Xu, Cheng and Zhang, Ce and Xu, Jianliang}, 22 | title = {{vChain}: A Blockchain System Ensuring Query Integrity}, 23 | booktitle = {Proceedings of the 2020 ACM SIGMOD International Conference on Management of Data}, 24 | year = {2020}, 25 | month = jun, 26 | address = {Portland, OR, USA}, 27 | pages = {2693--2696}, 28 | isbn = {978-1-4503-6735-6}, 29 | doi = {10.1145/3318464.3384682} 30 | } 31 | ``` 32 | 33 | ## Build 34 | 35 | * Install Rust from . 36 | * Run `cargo test` for unit test. 37 | * Run `cargo build --release` to build the binaries, which will be located at `target/release/` folder. 38 | 39 | ## SimChain 40 | 41 | ### Create Blockchain DB 42 | 43 | #### Input Format 44 | 45 | The input is a text file with each line represent an object. 46 | 47 | ``` 48 | obj := block_id [ v_data ] { w_data } 49 | v_data := v_1, v_2, ... 50 | w_data := w_1, w_2, ... 51 | ``` 52 | 53 | For example 54 | 55 | ``` 56 | 1 [1,2] {a,b,c} 57 | 1 [1,5] {a} 58 | 2 [3,4] {a,e} 59 | ``` 60 | 61 | ### Build DB 62 | 63 | Run `simchain-build` to build the database. You need to specify the bit length for each dimension of the v data. For example: 64 | 65 | ```sh 66 | ./target/release/simchain-build --bit-len 16,16 --skip-list-max-level 10 -i /path/to/data.txt -o /path/to/output_database 67 | ``` 68 | 69 | Run `simchain-build --help` for more info. 70 | 71 | ### Start the Server 72 | 73 | Run `simchain-server` after the database is built. For example: 74 | 75 | ```sh 76 | ./target/release/simchain-server -b 127.0.0.1:8000 --db /path/to/database 77 | ``` 78 | 79 | Run `simchain-server --help` for more info. 80 | 81 | ### Server REST API 82 | 83 | #### Inspect 84 | 85 | Use following API endpoints to inspect the blockchain. Returned response is a JSON object. Refer to source code for their definitions. 86 | 87 | ``` 88 | GET /get/param 89 | GET /get/blk_header/{id} 90 | GET /get/blk_data/{id} 91 | GET /get/intraindex/{id} 92 | GET /get/skiplist/{id} 93 | GET /get/index/{id} 94 | GET /get/obj/{id} 95 | ``` 96 | 97 | #### Query 98 | 99 | API endpoint is: 100 | 101 | ``` 102 | POST /query 103 | ``` 104 | 105 | Encode query parameter as a JSON object. The following example specifies range as [(1, *, 2), (3, *, 4)] for 3 dimension objects, and bool expression as "A" AND ("B" OR "C"). 106 | 107 | ```json 108 | { 109 | "start_block": 1, 110 | "end_block": 10, 111 | "range": [[1, null, 2], [3, null, 4]], 112 | "bool": [["a"], ["b", "c"]] 113 | } 114 | ``` 115 | 116 | The response is a JSON object like: 117 | 118 | ```json 119 | { 120 | "result": ..., 121 | "vo": ..., 122 | "query_time_in_ms": ..., 123 | "vo_size": ... // in bytes 124 | "stats": ..., 125 | ... 126 | } 127 | ``` 128 | 129 | Refer to the source code for their definitions. 130 | 131 | #### Verify 132 | 133 | Pass the query response directly to the following endpoint for verification. 134 | 135 | ``` 136 | POST /verify 137 | ``` 138 | 139 | The response is a JSON object like: 140 | 141 | ```json 142 | { 143 | "pass": true, 144 | "detail": ... // detail reason for failure 145 | "verify_time_in_ms": ... 146 | } 147 | ``` 148 | 149 | ## Real Chain 150 | 151 | ### Start the Node 152 | 153 | Run `vchain-node` to start up a single node blockchain network. For example: 154 | 155 | ```sh 156 | ./vchain-node -- --bit-len 16,16 --skip-list-max-level 5 --db /path/to/database 157 | ``` 158 | 159 | Run `vchain-node --help` for more info. 160 | 161 | ### Send TX 162 | 163 | Run `vchain-send-tx` to send TX to the node. The data input format is the same as that in the SimChain. 164 | 165 | ```sh 166 | ./vchain-send-tx -- -i /path/to/data.txt 167 | ``` 168 | 169 | Run `vchain-send-tx --help` for more info. 170 | 171 | ### Start the Server 172 | 173 | Run `vchain-server` to start a server. The REST APIs are the same as those in the SimChain. 174 | 175 | ```sh 176 | ./vchain-server -b 127.0.0.1:8000 177 | ``` 178 | 179 | Run `vchain-server --help` for more info. 180 | -------------------------------------------------------------------------------- /vchain-exonum/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "vchain-exonum" 3 | version = "0.1.0" 4 | authors = ["Cheng XU "] 5 | edition = "2018" 6 | publish = false 7 | build = "build.rs" 8 | 9 | [dependencies] 10 | actix-cors = "0.5" 11 | actix-rt = "1.1" 12 | actix-web = "3.3" 13 | anyhow = "1.0" 14 | async-trait = "0.1" 15 | bincode = "1.3" 16 | env_logger = "0.8" 17 | exonum = "0.13.0-rc.2" 18 | exonum-crypto = "0.13.0-rc.2" 19 | exonum-derive = "0.13.0-rc.2" 20 | exonum-merkledb = "0.13.0-rc.2" 21 | exonum-proto = "0.13.0-rc.2" 22 | failure = "0.1" 23 | futures = "0.3" 24 | lazy_static = "1.4" 25 | log = "0.4" 26 | lru = "0.6" 27 | os_info = "=1.0.1" # Fix `version_regex` does not live long enough 28 | protobuf = "2.19" 29 | reqwest = { version = "0.11", features = ["json"] } 30 | serde = { version = "1.0", features = ["derive"] } 31 | serde_derive = "1.0" 32 | serde_json = "1.0" 33 | structopt = "0.3" 34 | vchain = { path = "../vchain" } 35 | 36 | [dev-dependencies] 37 | exonum-testkit = "0.13.0-rc.2" 38 | 39 | [build-dependencies] 40 | exonum-build = "0.13.0-rc.2" 41 | -------------------------------------------------------------------------------- /vchain-exonum/build.rs: -------------------------------------------------------------------------------- 1 | use exonum_build::ProtobufGenerator; 2 | 3 | fn main() { 4 | ProtobufGenerator::with_mod_name("protobuf_mod.rs") 5 | .with_input_dir("src/proto") 6 | .with_crypto() 7 | .generate(); 8 | } 9 | -------------------------------------------------------------------------------- /vchain-exonum/src/api.rs: -------------------------------------------------------------------------------- 1 | use crate::schema::VChainSchema; 2 | use exonum::runtime::rust::api::{self, ServiceApiBuilder, ServiceApiState}; 3 | use serde_json::json; 4 | use vchain::{acc, historical_query, IdType, OverallResult, ReadInterface}; 5 | 6 | #[derive(Debug, Clone, Copy)] 7 | pub struct VChainApi; 8 | 9 | #[derive(Debug, Serialize, Deserialize, Clone, Copy)] 10 | pub struct QueryInput { 11 | pub id: IdType, 12 | } 13 | 14 | fn handle_err(e: anyhow::Error) -> api::Error { 15 | api::Error::InternalError(failure::format_err!("{:?}", e)) 16 | } 17 | 18 | impl VChainApi { 19 | pub fn get_param(self, state: &ServiceApiState<'_>) -> api::Result { 20 | let schema = VChainSchema::new(state.service_data()); 21 | schema.get_parameter().map_err(handle_err) 22 | } 23 | 24 | pub fn get_object( 25 | self, 26 | state: &ServiceApiState<'_>, 27 | query: QueryInput, 28 | ) -> api::Result { 29 | let schema = VChainSchema::new(state.service_data()); 30 | schema.read_object(query.id).map_err(handle_err) 31 | } 32 | 33 | pub fn get_block_header( 34 | self, 35 | state: &ServiceApiState<'_>, 36 | query: QueryInput, 37 | ) -> api::Result { 38 | let schema = VChainSchema::new(state.service_data()); 39 | schema.read_block_header(query.id).map_err(handle_err) 40 | } 41 | 42 | pub fn get_block_data( 43 | self, 44 | state: &ServiceApiState<'_>, 45 | query: QueryInput, 46 | ) -> api::Result { 47 | let schema = VChainSchema::new(state.service_data()); 48 | schema.read_block_data(query.id).map_err(handle_err) 49 | } 50 | 51 | pub fn get_intra_index_node( 52 | self, 53 | state: &ServiceApiState<'_>, 54 | query: QueryInput, 55 | ) -> api::Result { 56 | let schema = VChainSchema::new(state.service_data()); 57 | schema.read_intra_index_node(query.id).map_err(handle_err) 58 | } 59 | 60 | pub fn get_skip_list_node( 61 | self, 62 | state: &ServiceApiState<'_>, 63 | query: QueryInput, 64 | ) -> api::Result { 65 | let schema = VChainSchema::new(state.service_data()); 66 | schema.read_skip_list_node(query.id).map_err(handle_err) 67 | } 68 | 69 | pub fn get_index_node( 70 | self, 71 | state: &ServiceApiState<'_>, 72 | query: QueryInput, 73 | ) -> api::Result { 74 | match self.get_intra_index_node(state, query) { 75 | Ok(data) => Ok(json!(data)), 76 | _ => { 77 | let data = self.get_skip_list_node(state, query).map_err(|_| { 78 | api::Error::NotFound(format!("no index node for id: {}", query.id)) 79 | })?; 80 | Ok(json!({ "SkipListNode": data })) 81 | } 82 | } 83 | } 84 | 85 | pub fn query( 86 | self, 87 | state: &ServiceApiState<'_>, 88 | query: vchain::Query, 89 | ) -> api::Result { 90 | let schema = VChainSchema::new(state.service_data()); 91 | let param = schema 92 | .get_parameter() 93 | .map_err(|e| api::Error::NotFound(format!("{:?}", e)))?; 94 | match param.acc_type { 95 | acc::Type::ACC1 => { 96 | let res: OverallResult = 97 | historical_query(&query, &schema).map_err(handle_err)?; 98 | Ok(json!(res)) 99 | } 100 | acc::Type::ACC2 => { 101 | let res: OverallResult = 102 | historical_query(&query, &schema).map_err(handle_err)?; 103 | Ok(json!(res)) 104 | } 105 | } 106 | } 107 | 108 | pub fn wire(self, builder: &mut ServiceApiBuilder) { 109 | builder 110 | .public_scope() 111 | .endpoint( 112 | "get/param", 113 | move |state: &ServiceApiState<'_>, _query: ()| self.get_param(state), 114 | ) 115 | .endpoint( 116 | "get/obj", 117 | move |state: &ServiceApiState<'_>, query: QueryInput| self.get_object(state, query), 118 | ) 119 | .endpoint( 120 | "get/blk_header", 121 | move |state: &ServiceApiState<'_>, query: QueryInput| { 122 | self.get_block_header(state, query) 123 | }, 124 | ) 125 | .endpoint( 126 | "get/blk_data", 127 | move |state: &ServiceApiState<'_>, query: QueryInput| { 128 | self.get_block_data(state, query) 129 | }, 130 | ) 131 | .endpoint( 132 | "get/intraindex", 133 | move |state: &ServiceApiState<'_>, query: QueryInput| { 134 | self.get_intra_index_node(state, query) 135 | }, 136 | ) 137 | .endpoint( 138 | "get/skiplist", 139 | move |state: &ServiceApiState<'_>, query: QueryInput| { 140 | self.get_skip_list_node(state, query) 141 | }, 142 | ) 143 | .endpoint( 144 | "get/index", 145 | move |state: &ServiceApiState<'_>, query: QueryInput| { 146 | self.get_index_node(state, query) 147 | }, 148 | ) 149 | .endpoint_mut( 150 | "query", 151 | move |state: &ServiceApiState<'_>, query: vchain::Query| self.query(state, query), 152 | ); 153 | } 154 | } 155 | -------------------------------------------------------------------------------- /vchain-exonum/src/bin/vchain-node.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate log; 3 | 4 | use anyhow::{bail, Error, Result}; 5 | use exonum::{ 6 | api::backends::actix::AllowOrigin, 7 | blockchain::{config::GenesisConfigBuilder, ConsensusConfig, ValidatorKeys}, 8 | crypto::{self, PublicKey, SecretKey}, 9 | keys::Keys, 10 | node::{Node, NodeApiConfig, NodeConfig}, 11 | runtime::{rust::ServiceFactory, RuntimeInstance}, 12 | }; 13 | use exonum_merkledb::{DbOptions, RocksDB}; 14 | use serde::{Deserialize, Serialize}; 15 | use std::fs; 16 | use std::path::{Path, PathBuf}; 17 | use structopt::StructOpt; 18 | use vchain::acc; 19 | use vchain_exonum::{service::VChainService, transactions::InitParam}; 20 | 21 | #[derive(Debug, Clone, Serialize, Deserialize)] 22 | struct NodeKeys { 23 | consensus_key: (PublicKey, SecretKey), 24 | service_key: (PublicKey, SecretKey), 25 | } 26 | 27 | impl NodeKeys { 28 | fn new() -> Self { 29 | Self { 30 | consensus_key: crypto::gen_keypair(), 31 | service_key: crypto::gen_keypair(), 32 | } 33 | } 34 | 35 | fn load_from_file(path: &Path) -> Result { 36 | let data = fs::read_to_string(path)?; 37 | serde_json::from_str::(&data).map_err(Error::msg) 38 | } 39 | 40 | fn save_to_file(&self, path: &Path) -> Result<()> { 41 | let data = serde_json::to_string_pretty(self)?; 42 | fs::write(path, data)?; 43 | Ok(()) 44 | } 45 | } 46 | 47 | fn node_config(api_address: String, peer_address: String, keys: NodeKeys) -> Result { 48 | info!("api address: {}", &api_address); 49 | info!("peer address: {}", &peer_address); 50 | 51 | let (consensus_public_key, consensus_secret_key) = keys.consensus_key; 52 | let (service_public_key, service_secret_key) = keys.service_key; 53 | 54 | let consensus = ConsensusConfig { 55 | validator_keys: vec![ValidatorKeys { 56 | consensus_key: consensus_public_key, 57 | service_key: service_public_key, 58 | }], 59 | ..ConsensusConfig::default() 60 | }; 61 | 62 | let api_cfg = NodeApiConfig { 63 | public_api_address: Some(api_address.parse()?), 64 | public_allow_origin: Some(AllowOrigin::Any), 65 | ..Default::default() 66 | }; 67 | 68 | Ok(NodeConfig { 69 | listen_address: peer_address.parse()?, 70 | consensus, 71 | external_address: peer_address.to_owned(), 72 | network: Default::default(), 73 | connect_list: Default::default(), 74 | api: api_cfg, 75 | mempool: Default::default(), 76 | services_configs: Default::default(), 77 | database: Default::default(), 78 | thread_pool_size: Default::default(), 79 | master_key_path: Default::default(), 80 | keys: Keys::from_keys( 81 | consensus_public_key, 82 | consensus_secret_key, 83 | service_public_key, 84 | service_secret_key, 85 | ), 86 | }) 87 | } 88 | 89 | fn parse_acc(input: &str) -> Result { 90 | let input = input.to_ascii_lowercase(); 91 | if input == "acc1" { 92 | Ok(acc::Type::ACC1) 93 | } else if input == "acc2" { 94 | Ok(acc::Type::ACC2) 95 | } else { 96 | bail!("invalid acc type, please specify as acc1 or acc2."); 97 | } 98 | } 99 | 100 | #[allow(clippy::box_vec)] 101 | fn parse_v_bit_len(input: &str) -> Result>> { 102 | let x = input 103 | .split(',') 104 | .map(|s| s.trim().parse::().map_err(anyhow::Error::msg)) 105 | .collect::>>()?; 106 | Ok(Box::new(x)) 107 | } 108 | 109 | #[derive(StructOpt, Debug)] 110 | #[structopt(name = "vchain-node")] 111 | struct Opts { 112 | /// db path, should be a directory 113 | #[structopt(short = "-i", long, parse(from_os_str))] 114 | db: PathBuf, 115 | 116 | /// discard old database 117 | #[structopt(short = "-n", long)] 118 | create_new: bool, 119 | 120 | /// API Address 121 | #[structopt(long, default_value = "127.0.0.1:5000")] 122 | api_address: String, 123 | 124 | /// Peer Address 125 | #[structopt(long, default_value = "127.0.0.1:2000")] 126 | peer_address: String, 127 | 128 | /// acc type to be used 129 | #[structopt(long, default_value = "acc2", parse(try_from_str = parse_acc))] 130 | acc: acc::Type, 131 | 132 | /// bit len for each dimension of the v data (e.g. 16,8) 133 | #[structopt(long, parse(try_from_str = parse_v_bit_len))] 134 | #[allow(clippy::box_vec)] 135 | bit_len: Box>, 136 | 137 | /// don't build intra index 138 | #[structopt(short = "-f", long)] 139 | no_intra_index: bool, 140 | 141 | /// max skip list level, 0 means no skip list. 142 | #[structopt(long, default_value = "0")] 143 | skip_list_max_level: u32, 144 | } 145 | 146 | fn main() -> Result<()> { 147 | env_logger::init_from_env( 148 | env_logger::Env::default().filter_or("RUST_LOG", "vchain=info,vchain_exonum=info"), 149 | ); 150 | 151 | let opts = Opts::from_args(); 152 | 153 | let param = InitParam { 154 | v_bit_len: opts.bit_len.to_vec(), 155 | is_acc2: opts.acc == acc::Type::ACC2, 156 | intra_index: !opts.no_intra_index, 157 | skip_list_max_level: opts.skip_list_max_level, 158 | }; 159 | info!("param: {:?}", param); 160 | 161 | info!("db path: {:?}", opts.db); 162 | if opts.create_new && opts.db.exists() { 163 | fs::remove_dir_all(&opts.db)?; 164 | } 165 | fs::create_dir_all(&opts.db)?; 166 | 167 | let key = match NodeKeys::load_from_file(&opts.db.join("keys.json")) { 168 | Ok(key) => { 169 | info!("found old key"); 170 | key 171 | } 172 | _ => { 173 | warn!("create new key"); 174 | let key = NodeKeys::new(); 175 | key.save_to_file(&opts.db.join("keys.json"))?; 176 | key 177 | } 178 | }; 179 | let db = RocksDB::open(opts.db, &DbOptions::default()).map_err(anyhow::Error::msg)?; 180 | 181 | let external_runtimes: Vec = vec![]; 182 | let service = VChainService; 183 | let artifact_id = service.artifact_id(); 184 | let services = vec![service.into()]; 185 | let node_config = node_config(opts.api_address, opts.peer_address, key)?; 186 | let genesis_config = GenesisConfigBuilder::with_consensus_config(node_config.consensus.clone()) 187 | .with_artifact(artifact_id.clone()) 188 | .with_instance( 189 | artifact_id 190 | .into_default_instance(1, "vchain") 191 | .with_constructor(param), 192 | ) 193 | .build(); 194 | 195 | let node = Node::new( 196 | db, 197 | external_runtimes, 198 | services, 199 | node_config, 200 | genesis_config, 201 | None, 202 | ); 203 | info!("Starting a single node..."); 204 | info!("Blockchain is ready for transactions!"); 205 | node.run().map_err(anyhow::Error::msg) 206 | } 207 | -------------------------------------------------------------------------------- /vchain-exonum/src/bin/vchain-send-tx.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate log; 3 | 4 | use anyhow::Result; 5 | use exonum::{crypto, runtime::rust::Transaction}; 6 | use serde::{Deserialize, Serialize}; 7 | use serde_json::json; 8 | use std::collections::BTreeMap; 9 | use std::path::PathBuf; 10 | use std::thread::sleep; 11 | use std::time::Duration; 12 | use structopt::StructOpt; 13 | use vchain::{load_raw_obj_from_file, IdType}; 14 | use vchain_exonum::transactions::{RawObject, TxAddObjs}; 15 | 16 | #[derive(StructOpt, Debug)] 17 | #[structopt(name = "vchain-send-tx")] 18 | struct Opts { 19 | /// input data path 20 | #[structopt(short, long, parse(from_os_str))] 21 | input: PathBuf, 22 | 23 | /// api address 24 | #[structopt(short, long, default_value = "http://127.0.0.1:5000")] 25 | api_address: String, 26 | } 27 | 28 | #[derive(Debug, Clone, Serialize, Deserialize)] 29 | struct TxResponse { 30 | tx_hash: String, 31 | } 32 | 33 | #[actix_rt::main] 34 | async fn main() -> Result<()> { 35 | env_logger::init_from_env(env_logger::Env::default().filter_or("RUST_LOG", "info")); 36 | let opts = Opts::from_args(); 37 | let tx_url = format!("{}/api/explorer/v1/transactions", opts.api_address); 38 | 39 | info!("read data from {:?}", opts.input); 40 | warn!("blk id from data file will be ignored"); 41 | 42 | let raw_objs = load_raw_obj_from_file(&opts.input)?; 43 | let mut txs: BTreeMap = BTreeMap::new(); 44 | for (&id, objs) in raw_objs.iter() { 45 | let tx_objs: Vec<_> = objs.iter().map(|o| RawObject::create(o)).collect(); 46 | txs.insert(id, TxAddObjs { objs: tx_objs }); 47 | } 48 | 49 | let keypair = crypto::gen_keypair(); 50 | let client = reqwest::Client::new(); 51 | for (_, tx) in txs.into_iter() { 52 | let tx_message = tx.sign(1, keypair.0, &keypair.1).into_raw(); 53 | let res = client 54 | .post(&tx_url) 55 | .json(&json!({ "tx_body": tx_message })) 56 | .send() 57 | .await?; 58 | debug!("response: {:?}", &res); 59 | let tx_res = res.json::().await?; 60 | info!("tx_hash={:?}", tx_res.tx_hash); 61 | 62 | loop { 63 | let res2 = client 64 | .get(&tx_url) 65 | .query(&[("hash", tx_res.tx_hash.clone())]) 66 | .send() 67 | .await?; 68 | debug!("response: {:?}", &res2); 69 | let tx_info = res2.json::().await?; 70 | if tx_info.get("type").unwrap() == &json!("committed") { 71 | break; 72 | } 73 | sleep(Duration::from_millis(100)); 74 | } 75 | } 76 | 77 | Ok(()) 78 | } 79 | -------------------------------------------------------------------------------- /vchain-exonum/src/bin/vchain-server.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate lazy_static; 3 | 4 | use actix_cors::Cors; 5 | use actix_web::{web, App, HttpResponse, HttpServer, Responder}; 6 | use futures::{lock::Mutex, StreamExt}; 7 | use lru::LruCache; 8 | use serde::Serialize; 9 | use std::fmt; 10 | use structopt::StructOpt; 11 | use vchain::acc; 12 | use vchain::chain::*; 13 | 14 | static mut API_ADDRESS: Option = None; 15 | static mut PARAM: Option = None; 16 | 17 | lazy_static! { 18 | static ref BLK_HEAD_CACHE: Mutex> = 19 | Mutex::new(LruCache::new(1000)); 20 | } 21 | 22 | fn get_api_address() -> &'static str { 23 | unsafe { API_ADDRESS.as_ref().unwrap() } 24 | } 25 | 26 | fn get_param() -> &'static Parameter { 27 | unsafe { PARAM.as_ref().unwrap() } 28 | } 29 | 30 | #[derive(Debug)] 31 | struct MyErr(anyhow::Error); 32 | 33 | impl fmt::Display for MyErr { 34 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 35 | write!(f, "error: {}", self.0.to_string()) 36 | } 37 | } 38 | 39 | fn handle_err(e: E) -> MyErr { 40 | MyErr(anyhow::Error::msg(e)) 41 | } 42 | 43 | impl actix_web::error::ResponseError for MyErr {} 44 | 45 | async fn web_get_param() -> impl Responder { 46 | HttpResponse::Ok().json(get_param()) 47 | } 48 | 49 | macro_rules! impl_get_info { 50 | ($name: ident, $url: expr) => { 51 | async fn $name(req: web::Path<(IdType,)>) -> impl Responder { 52 | let id = req.into_inner().0; 53 | HttpResponse::TemporaryRedirect() 54 | .header( 55 | "Location", 56 | format!("{}/get/{}?id={}", get_api_address(), $url, id), 57 | ) 58 | .finish() 59 | } 60 | }; 61 | } 62 | 63 | impl_get_info!(web_get_blk_header, "blk_header"); 64 | impl_get_info!(web_get_blk_data, "blk_data"); 65 | impl_get_info!(web_get_intra_index_node, "intraindex"); 66 | impl_get_info!(web_get_index_node, "index"); 67 | impl_get_info!(web_get_skip_list_node, "skiplist"); 68 | impl_get_info!(web_get_object, "obj"); 69 | 70 | async fn web_query() -> impl Responder { 71 | HttpResponse::TemporaryRedirect() 72 | .header("Location", format!("{}/query", get_api_address())) 73 | .finish() 74 | } 75 | 76 | #[derive(Serialize)] 77 | struct VerifyResponse { 78 | pass: bool, 79 | detail: VerifyResult, 80 | verify_time_in_ms: u64, 81 | } 82 | 83 | #[derive(Debug, Clone)] 84 | struct LightChain { 85 | param: Parameter, 86 | blk_header_api: String, 87 | } 88 | 89 | impl LightChain { 90 | fn new(param: Parameter, api_address: &str) -> Self { 91 | Self { 92 | param, 93 | blk_header_api: format!("{}/get/blk_header", api_address), 94 | } 95 | } 96 | 97 | async fn get_block_header(&self, id: IdType) -> anyhow::Result { 98 | let client = reqwest::Client::new(); 99 | client 100 | .get(&self.blk_header_api) 101 | .query(&[("id", id)]) 102 | .send() 103 | .await? 104 | .json::() 105 | .await 106 | .map_err(anyhow::Error::msg) 107 | } 108 | } 109 | 110 | #[async_trait::async_trait] 111 | impl LightNodeInterface for LightChain { 112 | async fn lightnode_get_parameter(&self) -> anyhow::Result { 113 | Ok(self.param.clone()) 114 | } 115 | 116 | async fn lightnode_read_block_header(&self, id: IdType) -> anyhow::Result { 117 | if let Some(header) = BLK_HEAD_CACHE.lock().await.get(&id).cloned() { 118 | return Ok(header); 119 | } 120 | let header = self.get_block_header(id).await?; 121 | BLK_HEAD_CACHE.lock().await.put(id, header.clone()); 122 | Ok(header) 123 | } 124 | } 125 | 126 | async fn web_verify(mut body: web::Payload) -> actix_web::Result { 127 | let mut bytes = web::BytesMut::new(); 128 | while let Some(item) = body.next().await { 129 | bytes.extend_from_slice(&item?); 130 | } 131 | 132 | let lightnode = LightChain::new(get_param().clone(), get_api_address()); 133 | let (verify_result, time) = match lightnode.param.acc_type { 134 | acc::Type::ACC1 => { 135 | let res: OverallResult = 136 | serde_json::from_slice(&bytes).map_err(handle_err)?; 137 | res.verify(&lightnode).await 138 | } 139 | acc::Type::ACC2 => { 140 | let res: OverallResult = 141 | serde_json::from_slice(&bytes).map_err(handle_err)?; 142 | res.verify(&lightnode).await 143 | } 144 | } 145 | .map_err(handle_err)?; 146 | let response = VerifyResponse { 147 | pass: verify_result.is_ok(), 148 | detail: verify_result, 149 | verify_time_in_ms: time.as_millis() as u64, 150 | }; 151 | Ok(HttpResponse::Ok().json(response)) 152 | } 153 | 154 | #[derive(StructOpt, Debug)] 155 | #[structopt(name = "vchain-server")] 156 | struct Opts { 157 | /// api address 158 | #[structopt(short, long, default_value = "http://127.0.0.1:5000")] 159 | api_address: String, 160 | 161 | /// server binding address 162 | #[structopt(short, long, default_value = "127.0.0.1:8000")] 163 | binding: String, 164 | } 165 | 166 | #[actix_rt::main] 167 | async fn main() -> actix_web::Result<()> { 168 | env_logger::init_from_env(env_logger::Env::default().filter_or("RUST_LOG", "info")); 169 | let opts = Opts::from_args(); 170 | let api_address = format!("{}/api/services/vchain", opts.api_address); 171 | let param = reqwest::get(&format!("{}/get/param", api_address)) 172 | .await 173 | .map_err(handle_err)? 174 | .json::() 175 | .await 176 | .map_err(handle_err)?; 177 | unsafe { 178 | API_ADDRESS = Some(api_address); 179 | PARAM = Some(param); 180 | } 181 | 182 | HttpServer::new(|| { 183 | App::new() 184 | .wrap( 185 | Cors::default() 186 | .send_wildcard() 187 | .allowed_methods(vec!["GET", "POST"]), 188 | ) 189 | .route("/get/param", web::get().to(web_get_param)) 190 | .route("/get/blk_header/{id}", web::get().to(web_get_blk_header)) 191 | .route("/get/blk_data/{id}", web::get().to(web_get_blk_data)) 192 | .route( 193 | "/get/intraindex/{id}", 194 | web::get().to(web_get_intra_index_node), 195 | ) 196 | .route("/get/skiplist/{id}", web::get().to(web_get_skip_list_node)) 197 | .route("/get/index/{id}", web::get().to(web_get_index_node)) 198 | .route("/get/obj/{id}", web::get().to(web_get_object)) 199 | .route("/query", web::post().to(web_query)) 200 | .route("/verify", web::post().to(web_verify)) 201 | }) 202 | .bind(opts.binding)? 203 | .run() 204 | .await?; 205 | 206 | Ok(()) 207 | } 208 | -------------------------------------------------------------------------------- /vchain-exonum/src/lib.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate serde_derive; // Required for Protobuf 3 | #[macro_use] 4 | extern crate exonum_derive; 5 | #[macro_use] 6 | extern crate log; 7 | 8 | pub mod api; 9 | pub mod proto; 10 | pub mod schema; 11 | pub mod service; 12 | pub mod transactions; 13 | 14 | pub mod errors { 15 | #[derive(Debug, IntoExecutionError)] 16 | pub enum Error { 17 | Unknown = 1, 18 | } 19 | } 20 | 21 | #[cfg(test)] 22 | mod tests; 23 | -------------------------------------------------------------------------------- /vchain-exonum/src/proto/mod.rs: -------------------------------------------------------------------------------- 1 | pub use self::service::{ 2 | BlockData, BlockHeader, InitParam, IntraIndexNode, Object, Parameter, RawObject, SkipListNode, 3 | TxAddObjs, 4 | }; 5 | 6 | include!(concat!(env!("OUT_DIR"), "/protobuf_mod.rs")); 7 | -------------------------------------------------------------------------------- /vchain-exonum/src/proto/service.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package vchain; 4 | 5 | message Parameter 6 | { 7 | bytes data = 1; 8 | } 9 | 10 | message Object 11 | { 12 | bytes data = 1; 13 | } 14 | 15 | message BlockHeader 16 | { 17 | bytes data = 1; 18 | } 19 | 20 | message BlockData 21 | { 22 | bytes data = 1; 23 | } 24 | 25 | message IntraIndexNode 26 | { 27 | bytes data = 1; 28 | } 29 | 30 | message SkipListNode 31 | { 32 | bytes data = 1; 33 | } 34 | 35 | message RawObject 36 | { 37 | repeated uint32 v_data = 1; 38 | repeated string w_data = 2; 39 | } 40 | 41 | message TxAddObjs 42 | { 43 | repeated RawObject objs = 1; 44 | } 45 | 46 | message InitParam 47 | { 48 | repeated uint32 v_bit_len = 1; 49 | bool is_acc2 = 2; 50 | bool intra_index = 3; 51 | uint32 skip_list_max_level = 4; 52 | } 53 | -------------------------------------------------------------------------------- /vchain-exonum/src/schema.rs: -------------------------------------------------------------------------------- 1 | use crate::transactions::RawObject; 2 | use anyhow::{Context, Error, Result}; 3 | use exonum::crypto::Hash; 4 | use exonum_derive::{BinaryValue, FromAccess, ObjectHash}; 5 | use exonum_merkledb::{ 6 | access::{Access, RawAccessMut}, 7 | Entry, ListIndex, MapIndex, ObjectHash as _, ProofMapIndex, 8 | }; 9 | use exonum_proto::ProtobufConvert; 10 | use vchain::IdType; 11 | 12 | use super::proto; 13 | 14 | macro_rules! impl_schema_from_proto { 15 | ($type:ident) => { 16 | #[derive(Clone, Debug, Serialize, Deserialize, ProtobufConvert, BinaryValue, ObjectHash)] 17 | #[protobuf_convert(source = "proto::Parameter")] 18 | pub struct $type { 19 | pub data: Vec, 20 | } 21 | 22 | impl $type { 23 | pub fn create(input: &vchain::$type) -> Result { 24 | Ok(Self { 25 | data: bincode::serialize(input)?, 26 | }) 27 | } 28 | 29 | pub fn to_vchain_type(&self) -> Result { 30 | bincode::deserialize::(&self.data).map_err(Error::msg) 31 | } 32 | } 33 | }; 34 | } 35 | 36 | impl_schema_from_proto!(Parameter); 37 | impl_schema_from_proto!(Object); 38 | impl_schema_from_proto!(BlockHeader); 39 | impl_schema_from_proto!(BlockData); 40 | impl_schema_from_proto!(IntraIndexNode); 41 | impl_schema_from_proto!(SkipListNode); 42 | 43 | #[derive(Debug, FromAccess)] 44 | pub(crate) struct VChainSchema { 45 | pub param: Entry, 46 | pub objects: MapIndex, 47 | pub block_headers: ProofMapIndex, 48 | pub block_data: MapIndex, 49 | pub intra_index_nodes: MapIndex, 50 | pub skip_list_nodes: MapIndex, 51 | pub objs_in_this_round: ListIndex, 52 | } 53 | 54 | impl VChainSchema { 55 | pub fn state_hash(&self) -> Vec { 56 | vec![self.block_headers.object_hash()] 57 | } 58 | } 59 | 60 | impl vchain::ReadInterface for VChainSchema { 61 | fn get_parameter(&self) -> Result { 62 | self.param 63 | .get() 64 | .context("failed to get parameter")? 65 | .to_vchain_type() 66 | } 67 | fn read_block_header(&self, id: IdType) -> Result { 68 | self.block_headers 69 | .get(&id) 70 | .context("failed to read block header")? 71 | .to_vchain_type() 72 | } 73 | fn read_block_data(&self, id: IdType) -> Result { 74 | self.block_data 75 | .get(&id) 76 | .context("failed to read block data")? 77 | .to_vchain_type() 78 | } 79 | fn read_intra_index_node(&self, id: IdType) -> Result { 80 | self.intra_index_nodes 81 | .get(&id) 82 | .context("failed to read intra index node")? 83 | .to_vchain_type() 84 | } 85 | fn read_skip_list_node(&self, id: IdType) -> Result { 86 | self.skip_list_nodes 87 | .get(&id) 88 | .context("failed to read skip list node")? 89 | .to_vchain_type() 90 | } 91 | fn read_object(&self, id: IdType) -> Result { 92 | self.objects 93 | .get(&id) 94 | .context("failed to read object")? 95 | .to_vchain_type() 96 | } 97 | } 98 | 99 | impl vchain::WriteInterface for VChainSchema 100 | where 101 | T::Base: RawAccessMut, 102 | { 103 | fn set_parameter(&mut self, param: vchain::Parameter) -> Result<()> { 104 | self.param.set(Parameter::create(¶m)?); 105 | Ok(()) 106 | } 107 | fn write_block_header(&mut self, header: vchain::BlockHeader) -> Result<()> { 108 | let id = header.block_id; 109 | self.block_headers.put(&id, BlockHeader::create(&header)?); 110 | Ok(()) 111 | } 112 | fn write_block_data(&mut self, data: vchain::BlockData) -> Result<()> { 113 | let id = data.block_id; 114 | self.block_data.put(&id, BlockData::create(&data)?); 115 | Ok(()) 116 | } 117 | fn write_intra_index_node(&mut self, node: vchain::IntraIndexNode) -> Result<()> { 118 | let id = node.id(); 119 | self.intra_index_nodes 120 | .put(&id, IntraIndexNode::create(&node)?); 121 | Ok(()) 122 | } 123 | fn write_skip_list_node(&mut self, node: vchain::SkipListNode) -> Result<()> { 124 | let id = node.id; 125 | self.skip_list_nodes.put(&id, SkipListNode::create(&node)?); 126 | Ok(()) 127 | } 128 | fn write_object(&mut self, obj: vchain::Object) -> Result<()> { 129 | let id = obj.id; 130 | self.objects.put(&id, Object::create(&obj)?); 131 | Ok(()) 132 | } 133 | } 134 | -------------------------------------------------------------------------------- /vchain-exonum/src/service.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | api::VChainApi, 3 | errors::Error, 4 | schema::VChainSchema, 5 | transactions::{InitParam, TxAddObjs}, 6 | }; 7 | use exonum::{ 8 | crypto::Hash, 9 | runtime::{ 10 | rust::{api::ServiceApiBuilder, CallContext, Service}, 11 | BlockchainData, DispatcherError, ExecutionError, 12 | }, 13 | }; 14 | use exonum_merkledb::{BinaryValue, Snapshot}; 15 | use vchain::{Digest, Digestible, IdType, ReadInterface, WriteInterface}; 16 | 17 | #[exonum_interface] 18 | pub trait VChainInterface { 19 | fn add_objs(&self, ctx: CallContext<'_>, arg: TxAddObjs) -> Result<(), Error>; 20 | } 21 | 22 | #[derive(Debug, ServiceFactory, ServiceDispatcher)] 23 | #[service_dispatcher(implements("VChainInterface"))] 24 | #[service_factory(proto_sources = "crate::proto")] 25 | pub struct VChainService; 26 | 27 | impl VChainInterface for VChainService { 28 | fn add_objs(&self, ctx: CallContext<'_>, arg: TxAddObjs) -> Result<(), Error> { 29 | let core = ctx.data().for_core(); 30 | let block_id = core.height().0; 31 | warn!( 32 | "receive tx at blk #{} with {} objects", 33 | block_id, 34 | arg.objs.len() 35 | ); 36 | let mut schema = VChainSchema::new(ctx.service_data()); 37 | schema.objs_in_this_round.extend(arg.objs.iter().cloned()); 38 | Ok(()) 39 | } 40 | } 41 | 42 | impl Service for VChainService { 43 | fn initialize(&self, ctx: CallContext<'_>, params: Vec) -> Result<(), ExecutionError> { 44 | let param = InitParam::from_bytes(params.into()) 45 | .map_err(DispatcherError::malformed_arguments)? 46 | .into_vchain_type(); 47 | let mut schema = VChainSchema::new(ctx.service_data()); 48 | schema.set_parameter(param).expect("failed to set param"); 49 | Ok(()) 50 | } 51 | 52 | fn state_hash(&self, data: BlockchainData<&dyn Snapshot>) -> Vec { 53 | VChainSchema::new(data.for_executing_service()).state_hash() 54 | } 55 | 56 | fn wire_api(&self, builder: &mut ServiceApiBuilder) { 57 | VChainApi.wire(builder); 58 | } 59 | 60 | fn before_commit(&self, ctx: CallContext<'_>) { 61 | let core = ctx.data().for_core(); 62 | let block_id = core.height().0 as IdType; 63 | let mut schema = VChainSchema::new(ctx.service_data()); 64 | let objs: Vec<_> = schema 65 | .objs_in_this_round 66 | .into_iter() 67 | .map(|o| o.into_vchain_type(block_id)) 68 | .collect(); 69 | schema.objs_in_this_round.clear(); 70 | let prev_block_id = block_id - 1; 71 | info!("commit blk #{} with {} objects", block_id, objs.len()); 72 | let prev_hash = match schema.read_block_header(prev_block_id) { 73 | Ok(header) => header.to_digest(), 74 | _ => Digest::default(), 75 | }; 76 | if let Err(e) = vchain::build_block(block_id, prev_hash, objs.iter(), &mut schema) { 77 | panic!("err when building new block: {:?}", e); 78 | } 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /vchain-exonum/src/tests.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | service::VChainService, 3 | transactions::{InitParam, RawObject, TxAddObjs}, 4 | }; 5 | 6 | use exonum::{ 7 | api::node::public::explorer::TransactionQuery, 8 | crypto::{self, Hash, SecretKey}, 9 | messages::{AnyTx, Verified}, 10 | runtime::rust::Transaction, 11 | }; 12 | use exonum_merkledb::ObjectHash; 13 | use exonum_testkit::{ApiKind, TestKit, TestKitApi}; 14 | use serde_json::json; 15 | use vchain::acc; 16 | 17 | const INSTANCE_ID: u32 = 1; 18 | const INSTANCE_NAME: &str = "vchain"; 19 | 20 | struct VChainApi { 21 | pub inner: TestKitApi, 22 | } 23 | 24 | impl VChainApi { 25 | fn add_objs(&self, input: TxAddObjs) -> (Verified, SecretKey) { 26 | let (pubkey, key) = crypto::gen_keypair(); 27 | let tx = input.sign(INSTANCE_ID, pubkey, &key); 28 | let tx_info: serde_json::Value = self 29 | .inner 30 | .public(ApiKind::Explorer) 31 | .query(&json!({ "tx_body": tx })) 32 | .post("v1/transactions") 33 | .unwrap(); 34 | assert_eq!(tx_info, json!({ "tx_hash": tx.object_hash() })); 35 | (tx, key) 36 | } 37 | 38 | fn get_param(&self) -> vchain::Parameter { 39 | self.inner 40 | .public(ApiKind::Service(INSTANCE_NAME)) 41 | .get("get/param") 42 | .unwrap() 43 | } 44 | 45 | fn assert_tx_status(&self, tx_hash: Hash, expected_status: &serde_json::Value) { 46 | let info: serde_json::Value = self 47 | .inner 48 | .public(ApiKind::Explorer) 49 | .query(&TransactionQuery::new(tx_hash)) 50 | .get("v1/transactions") 51 | .unwrap(); 52 | 53 | if let serde_json::Value::Object(mut info) = info { 54 | let tx_status = info.remove("status").unwrap(); 55 | assert_eq!(tx_status, *expected_status); 56 | } else { 57 | panic!("Invalid transaction info format, object expected"); 58 | } 59 | } 60 | } 61 | 62 | fn create_testkit(param: InitParam) -> (TestKit, VChainApi) { 63 | let mut testkit = TestKit::for_rust_service(VChainService, INSTANCE_NAME, INSTANCE_ID, param); 64 | let api = VChainApi { 65 | inner: testkit.api(), 66 | }; 67 | (testkit, api) 68 | } 69 | 70 | #[test] 71 | fn test_initialize() { 72 | let (_, api) = create_testkit(InitParam { 73 | v_bit_len: vec![16], 74 | is_acc2: true, 75 | intra_index: true, 76 | skip_list_max_level: 2, 77 | }); 78 | let param = api.get_param(); 79 | assert_eq!(param.v_bit_len, vec![16]); 80 | assert_eq!(param.acc_type, acc::Type::ACC2); 81 | assert_eq!(param.use_sk, false); 82 | assert_eq!(param.intra_index, true); 83 | assert_eq!(param.skip_list_max_level, 2); 84 | } 85 | 86 | #[test] 87 | fn test_add_objs() { 88 | let (mut testkit, api) = create_testkit(InitParam { 89 | v_bit_len: vec![16], 90 | is_acc2: true, 91 | intra_index: true, 92 | skip_list_max_level: 2, 93 | }); 94 | let tx_input = TxAddObjs { 95 | objs: vec![ 96 | RawObject { 97 | v_data: vec![1], 98 | w_data: vec!["a".to_owned()], 99 | }, 100 | RawObject { 101 | v_data: vec![2], 102 | w_data: vec!["b".to_owned()], 103 | }, 104 | ], 105 | }; 106 | 107 | let (tx1, _) = api.add_objs(tx_input.clone()); 108 | let (tx2, _) = api.add_objs(tx_input); 109 | testkit.create_block(); 110 | api.assert_tx_status(tx1.object_hash(), &json!({ "type": "success" })); 111 | api.assert_tx_status(tx2.object_hash(), &json!({ "type": "success" })); 112 | } 113 | -------------------------------------------------------------------------------- /vchain-exonum/src/transactions.rs: -------------------------------------------------------------------------------- 1 | use exonum_derive::{BinaryValue, ObjectHash}; 2 | use exonum_proto::ProtobufConvert; 3 | use std::collections::HashSet; 4 | use std::iter::FromIterator; 5 | use vchain::IdType; 6 | 7 | use super::proto; 8 | 9 | #[derive(Clone, Debug, Serialize, Deserialize, ProtobufConvert, BinaryValue, ObjectHash)] 10 | #[protobuf_convert(source = "proto::RawObject")] 11 | pub struct RawObject { 12 | pub v_data: Vec, 13 | pub w_data: Vec, 14 | } 15 | 16 | impl RawObject { 17 | pub fn create(input: &vchain::RawObject) -> Self { 18 | Self { 19 | v_data: input.v_data.clone(), 20 | w_data: Vec::from_iter(input.w_data.iter().cloned()), 21 | } 22 | } 23 | 24 | pub fn into_vchain_type(self, block_id: IdType) -> vchain::RawObject { 25 | vchain::RawObject { 26 | block_id, 27 | v_data: self.v_data, 28 | w_data: HashSet::from_iter(self.w_data.into_iter()), 29 | } 30 | } 31 | } 32 | 33 | #[derive(Clone, Debug, Serialize, Deserialize, ProtobufConvert, BinaryValue, ObjectHash)] 34 | #[protobuf_convert(source = "proto::TxAddObjs")] 35 | pub struct TxAddObjs { 36 | pub objs: Vec, 37 | } 38 | 39 | #[derive(Serialize, Deserialize, Clone, Debug, ProtobufConvert, BinaryValue, ObjectHash)] 40 | #[protobuf_convert(source = "proto::InitParam")] 41 | pub struct InitParam { 42 | pub v_bit_len: Vec, 43 | pub is_acc2: bool, 44 | pub intra_index: bool, 45 | pub skip_list_max_level: u32, 46 | } 47 | 48 | impl InitParam { 49 | pub fn into_vchain_type(self) -> vchain::Parameter { 50 | vchain::Parameter { 51 | v_bit_len: self.v_bit_len.iter().map(|x| *x as u8).collect(), 52 | acc_type: if self.is_acc2 { 53 | vchain::acc::Type::ACC2 54 | } else { 55 | vchain::acc::Type::ACC1 56 | }, 57 | use_sk: false, 58 | intra_index: self.intra_index, 59 | skip_list_max_level: self.skip_list_max_level as vchain::SkipLstLvlType, 60 | } 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /vchain-simchain/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "vchain-simchain" 3 | version = "0.1.0" 4 | authors = ["Cheng XU "] 5 | edition = "2018" 6 | publish = false 7 | 8 | [dependencies] 9 | actix-cors = "0.5" 10 | actix-rt = "1.1" 11 | actix-web = "3.3" 12 | anyhow = "1.0" 13 | async-trait = "0.1" 14 | bincode = "1.3" 15 | env_logger = "0.8" 16 | futures = "0.3" 17 | log = "0.4" 18 | rocksdb = "0.15" 19 | serde = { version = "1.0", features = ["derive"] } 20 | serde_json = "1.0" 21 | structopt = "0.3" 22 | vchain = { path = "../vchain" } 23 | -------------------------------------------------------------------------------- /vchain-simchain/src/bin/simchain-build.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate log; 3 | 4 | use anyhow::{bail, Result}; 5 | use std::path::{Path, PathBuf}; 6 | use structopt::StructOpt; 7 | use vchain::acc; 8 | use vchain::chain::*; 9 | use vchain::{Digest, Digestible}; 10 | use vchain_simchain::SimChain; 11 | 12 | fn parse_acc(input: &str) -> Result { 13 | let input = input.to_ascii_lowercase(); 14 | if input == "acc1" { 15 | Ok(acc::Type::ACC1) 16 | } else if input == "acc2" { 17 | Ok(acc::Type::ACC2) 18 | } else { 19 | bail!("invalid acc type, please specify as acc1 or acc2."); 20 | } 21 | } 22 | 23 | #[allow(clippy::box_vec)] 24 | fn parse_v_bit_len(input: &str) -> Result>> { 25 | let x = input 26 | .split(',') 27 | .map(|s| s.trim().parse::().map_err(anyhow::Error::msg)) 28 | .collect::>>()?; 29 | Ok(Box::new(x)) 30 | } 31 | 32 | #[derive(StructOpt, Debug)] 33 | #[structopt(name = "simchain-build")] 34 | struct Opts { 35 | /// input data path 36 | #[structopt(short, long, parse(from_os_str))] 37 | input: PathBuf, 38 | 39 | /// output db path, should be a directory 40 | #[structopt(short, long, parse(from_os_str))] 41 | output: PathBuf, 42 | 43 | /// acc type to be used 44 | #[structopt(long, default_value = "acc2", parse(try_from_str = parse_acc))] 45 | acc: acc::Type, 46 | 47 | /// bit len for each dimension of the v data (e.g. 16,8) 48 | #[structopt(long, parse(try_from_str = parse_v_bit_len))] 49 | #[allow(clippy::box_vec)] 50 | bit_len: Box>, 51 | 52 | /// use sk to build chain 53 | #[structopt(short = "-s", long)] 54 | use_sk: bool, 55 | 56 | /// don't build intra index 57 | #[structopt(short = "-f", long)] 58 | no_intra_index: bool, 59 | 60 | /// max skip list level, 0 means no skip list. 61 | #[structopt(long, default_value = "0")] 62 | skip_list_max_level: SkipLstLvlType, 63 | } 64 | 65 | fn build_chain(data_path: &Path, out_path: &Path, param: &Parameter) -> Result<()> { 66 | info!("build chain using data from {:?}", data_path); 67 | info!("out path: {:?}", out_path); 68 | info!("param: {:?}", param); 69 | 70 | let raw_objs = load_raw_obj_from_file(data_path)?; 71 | let mut chain = SimChain::create(out_path, param.clone())?; 72 | chain.set_parameter(param.clone())?; 73 | 74 | let mut prev_hash = Digest::default(); 75 | for (id, objs) in raw_objs.iter() { 76 | if id % 1000 == 0 { 77 | info!("build blk #{}", id); 78 | } 79 | let header = build_block(*id, prev_hash, objs.iter(), &mut chain)?; 80 | prev_hash = header.to_digest(); 81 | } 82 | 83 | // overwrite use_sk 84 | if param.use_sk { 85 | let mut new_param = param.clone(); 86 | new_param.use_sk = false; 87 | chain.set_parameter(new_param)?; 88 | } 89 | Ok(()) 90 | } 91 | 92 | fn main() -> Result<()> { 93 | env_logger::init_from_env(env_logger::Env::default().filter_or("RUST_LOG", "info")); 94 | 95 | let opts = Opts::from_args(); 96 | let param = Parameter { 97 | v_bit_len: opts.bit_len.to_vec(), 98 | acc_type: opts.acc, 99 | use_sk: opts.use_sk, 100 | intra_index: !opts.no_intra_index, 101 | skip_list_max_level: opts.skip_list_max_level, 102 | }; 103 | 104 | build_chain(&opts.input, &opts.output, ¶m)?; 105 | 106 | Ok(()) 107 | } 108 | -------------------------------------------------------------------------------- /vchain-simchain/src/bin/simchain-server.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate log; 3 | 4 | use actix_cors::Cors; 5 | use actix_web::{web, App, HttpResponse, HttpServer, Responder}; 6 | use futures::StreamExt; 7 | use serde::Serialize; 8 | use serde_json::json; 9 | use std::fmt; 10 | use std::path::PathBuf; 11 | use structopt::StructOpt; 12 | use vchain::acc; 13 | use vchain::chain::*; 14 | use vchain_simchain::SimChain; 15 | 16 | static mut CHAIN: Option = None; 17 | 18 | fn get_chain() -> &'static SimChain { 19 | unsafe { CHAIN.as_ref().unwrap() } 20 | } 21 | 22 | #[derive(Debug)] 23 | struct MyErr(anyhow::Error); 24 | 25 | impl fmt::Display for MyErr { 26 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 27 | write!(f, "error: {}", self.0.to_string()) 28 | } 29 | } 30 | 31 | fn handle_err(e: E) -> MyErr { 32 | MyErr(anyhow::Error::msg(e)) 33 | } 34 | 35 | impl actix_web::error::ResponseError for MyErr {} 36 | 37 | macro_rules! impl_get_info { 38 | ($name: ident, $func: ident) => { 39 | async fn $name(req: web::Path<(IdType,)>) -> actix_web::Result { 40 | let id = req.into_inner().0; 41 | info!("call {} with {}", stringify!($func), id); 42 | let data = get_chain().$func(id).map_err(handle_err)?; 43 | Ok(HttpResponse::Ok().json(data)) 44 | } 45 | }; 46 | } 47 | 48 | impl_get_info!(web_get_blk_header, read_block_header); 49 | impl_get_info!(web_get_blk_data, read_block_data); 50 | impl_get_info!(web_get_intra_index_node, read_intra_index_node); 51 | impl_get_info!(web_get_skip_list_node, read_skip_list_node); 52 | impl_get_info!(web_get_object, read_object); 53 | 54 | async fn web_get_index_node(req: web::Path<(IdType,)>) -> actix_web::Result { 55 | let id = req.into_inner().0; 56 | info!("call read_index_node with {}", id); 57 | match get_chain().read_intra_index_node(id) { 58 | Ok(data) => Ok(HttpResponse::Ok().json(data)), 59 | _ => { 60 | let data = get_chain().read_skip_list_node(id).map_err(handle_err)?; 61 | Ok(HttpResponse::Ok().json(json!({ "SkipListNode": data }))) 62 | } 63 | } 64 | } 65 | 66 | async fn web_get_param() -> actix_web::Result { 67 | info!("call get_parameter"); 68 | let data = get_chain().get_parameter().map_err(handle_err)?; 69 | Ok(HttpResponse::Ok().json(data)) 70 | } 71 | 72 | async fn web_query(query: web::Json) -> actix_web::Result { 73 | let param = get_chain().get_parameter().map_err(handle_err)?; 74 | match param.acc_type { 75 | acc::Type::ACC1 => { 76 | let res: OverallResult = 77 | historical_query(&query, get_chain()).map_err(handle_err)?; 78 | Ok(HttpResponse::Ok().json(res)) 79 | } 80 | acc::Type::ACC2 => { 81 | let res: OverallResult = 82 | historical_query(&query, get_chain()).map_err(handle_err)?; 83 | Ok(HttpResponse::Ok().json(res)) 84 | } 85 | } 86 | } 87 | 88 | #[derive(Serialize)] 89 | struct VerifyResponse { 90 | pass: bool, 91 | detail: VerifyResult, 92 | verify_time_in_ms: u64, 93 | } 94 | 95 | async fn web_verify(mut body: web::Payload) -> actix_web::Result { 96 | let mut bytes = web::BytesMut::new(); 97 | while let Some(item) = body.next().await { 98 | bytes.extend_from_slice(&item?); 99 | } 100 | 101 | let param = get_chain() 102 | .lightnode_get_parameter() 103 | .await 104 | .map_err(handle_err)?; 105 | let (verify_result, time) = match param.acc_type { 106 | acc::Type::ACC1 => { 107 | let res: OverallResult = 108 | serde_json::from_slice(&bytes).map_err(handle_err)?; 109 | res.verify(get_chain()).await 110 | } 111 | acc::Type::ACC2 => { 112 | let res: OverallResult = 113 | serde_json::from_slice(&bytes).map_err(handle_err)?; 114 | res.verify(get_chain()).await 115 | } 116 | } 117 | .map_err(handle_err)?; 118 | let response = VerifyResponse { 119 | pass: verify_result.is_ok(), 120 | detail: verify_result, 121 | verify_time_in_ms: time.as_millis() as u64, 122 | }; 123 | Ok(HttpResponse::Ok().json(response)) 124 | } 125 | 126 | #[derive(StructOpt, Debug)] 127 | #[structopt(name = "simchain-server")] 128 | struct Opts { 129 | /// input db path 130 | #[structopt(short = "-i", long, parse(from_os_str))] 131 | db: PathBuf, 132 | 133 | /// server binding address 134 | #[structopt(short, long, default_value = "127.0.0.1:8000")] 135 | binding: String, 136 | } 137 | 138 | #[actix_rt::main] 139 | async fn main() -> actix_web::Result<()> { 140 | env_logger::init_from_env(env_logger::Env::default().filter_or("RUST_LOG", "info")); 141 | let opts = Opts::from_args(); 142 | let chain = SimChain::open(&opts.db).map_err(handle_err)?; 143 | unsafe { 144 | CHAIN = Some(chain); 145 | } 146 | 147 | HttpServer::new(|| { 148 | App::new() 149 | .wrap( 150 | Cors::default() 151 | .send_wildcard() 152 | .allowed_methods(vec!["GET", "POST"]), 153 | ) 154 | .route("/get/param", web::get().to(web_get_param)) 155 | .route("/get/blk_header/{id}", web::get().to(web_get_blk_header)) 156 | .route("/get/blk_data/{id}", web::get().to(web_get_blk_data)) 157 | .route( 158 | "/get/intraindex/{id}", 159 | web::get().to(web_get_intra_index_node), 160 | ) 161 | .route("/get/skiplist/{id}", web::get().to(web_get_skip_list_node)) 162 | .route("/get/index/{id}", web::get().to(web_get_index_node)) 163 | .route("/get/obj/{id}", web::get().to(web_get_object)) 164 | .route("/query", web::post().to(web_query)) 165 | .route("/verify", web::post().to(web_verify)) 166 | }) 167 | .bind(opts.binding)? 168 | .run() 169 | .await?; 170 | 171 | Ok(()) 172 | } 173 | -------------------------------------------------------------------------------- /vchain-simchain/src/lib.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate log; 3 | 4 | use anyhow::{Context, Result}; 5 | use rocksdb::{self, DB}; 6 | use std::fs; 7 | use std::path::{Path, PathBuf}; 8 | use vchain::*; 9 | 10 | pub struct SimChain { 11 | root_path: PathBuf, 12 | param: Parameter, 13 | block_header_db: DB, 14 | block_data_db: DB, 15 | intra_index_db: DB, 16 | skip_list_db: DB, 17 | obj_db: DB, 18 | } 19 | 20 | impl SimChain { 21 | pub fn create(path: &Path, param: Parameter) -> Result { 22 | info!("create db at {:?}", path); 23 | fs::create_dir_all(path).context(format!("failed to create dir {:?}", path))?; 24 | fs::write( 25 | path.join("param.json"), 26 | serde_json::to_string_pretty(¶m)?, 27 | )?; 28 | let mut opts = rocksdb::Options::default(); 29 | opts.create_if_missing(true); 30 | Ok(Self { 31 | root_path: path.to_owned(), 32 | param, 33 | block_header_db: DB::open(&opts, path.join("blk_header.db"))?, 34 | block_data_db: DB::open(&opts, path.join("blk_data.db"))?, 35 | intra_index_db: DB::open(&opts, path.join("intra_index.db"))?, 36 | skip_list_db: DB::open(&opts, path.join("skiplist.db"))?, 37 | obj_db: DB::open(&opts, path.join("obj.db"))?, 38 | }) 39 | } 40 | 41 | pub fn open(path: &Path) -> Result { 42 | info!("open db at {:?}", path); 43 | Ok(Self { 44 | root_path: path.to_owned(), 45 | param: serde_json::from_str::(&fs::read_to_string( 46 | path.join("param.json"), 47 | )?)?, 48 | block_header_db: DB::open_default(path.join("blk_header.db"))?, 49 | block_data_db: DB::open_default(path.join("blk_data.db"))?, 50 | intra_index_db: DB::open_default(path.join("intra_index.db"))?, 51 | skip_list_db: DB::open_default(path.join("skiplist.db"))?, 52 | obj_db: DB::open_default(path.join("obj.db"))?, 53 | }) 54 | } 55 | } 56 | 57 | #[async_trait::async_trait] 58 | impl LightNodeInterface for SimChain { 59 | async fn lightnode_get_parameter(&self) -> Result { 60 | self.get_parameter() 61 | } 62 | async fn lightnode_read_block_header(&self, id: IdType) -> Result { 63 | self.read_block_header(id) 64 | } 65 | } 66 | 67 | impl ReadInterface for SimChain { 68 | fn get_parameter(&self) -> Result { 69 | Ok(self.param.clone()) 70 | } 71 | fn read_block_header(&self, id: IdType) -> Result { 72 | let data = self 73 | .block_header_db 74 | .get(id.to_le_bytes())? 75 | .context("failed to read block header")?; 76 | Ok(bincode::deserialize::(&data[..])?) 77 | } 78 | fn read_block_data(&self, id: IdType) -> Result { 79 | let data = self 80 | .block_data_db 81 | .get(id.to_le_bytes())? 82 | .context("failed to read block data")?; 83 | Ok(bincode::deserialize::(&data[..])?) 84 | } 85 | fn read_intra_index_node(&self, id: IdType) -> Result { 86 | let data = self 87 | .intra_index_db 88 | .get(id.to_le_bytes())? 89 | .context("failed to read index node")?; 90 | Ok(bincode::deserialize::(&data[..])?) 91 | } 92 | fn read_skip_list_node(&self, id: IdType) -> Result { 93 | let data = self 94 | .skip_list_db 95 | .get(id.to_le_bytes())? 96 | .context("failed to read skip list")?; 97 | Ok(bincode::deserialize::(&data[..])?) 98 | } 99 | fn read_object(&self, id: IdType) -> Result { 100 | let data = self 101 | .obj_db 102 | .get(id.to_le_bytes())? 103 | .context("failed to read object")?; 104 | Ok(bincode::deserialize::(&data[..])?) 105 | } 106 | } 107 | 108 | impl WriteInterface for SimChain { 109 | fn set_parameter(&mut self, param: Parameter) -> Result<()> { 110 | self.param = param; 111 | let data = serde_json::to_string_pretty(&self.param)?; 112 | fs::write(self.root_path.join("param.json"), data)?; 113 | Ok(()) 114 | } 115 | fn write_block_header(&mut self, header: BlockHeader) -> Result<()> { 116 | let bytes = bincode::serialize(&header)?; 117 | self.block_header_db 118 | .put(header.block_id.to_le_bytes(), bytes)?; 119 | Ok(()) 120 | } 121 | fn write_block_data(&mut self, data: BlockData) -> Result<()> { 122 | let bytes = bincode::serialize(&data)?; 123 | self.block_data_db.put(data.block_id.to_le_bytes(), bytes)?; 124 | Ok(()) 125 | } 126 | fn write_intra_index_node(&mut self, node: IntraIndexNode) -> Result<()> { 127 | let bytes = bincode::serialize(&node)?; 128 | self.intra_index_db.put(node.id().to_le_bytes(), bytes)?; 129 | Ok(()) 130 | } 131 | fn write_skip_list_node(&mut self, node: SkipListNode) -> Result<()> { 132 | let bytes = bincode::serialize(&node)?; 133 | self.skip_list_db.put(node.id.to_le_bytes(), bytes)?; 134 | Ok(()) 135 | } 136 | fn write_object(&mut self, obj: Object) -> Result<()> { 137 | let bytes = bincode::serialize(&obj)?; 138 | self.obj_db.put(obj.id.to_le_bytes(), bytes)?; 139 | Ok(()) 140 | } 141 | } 142 | -------------------------------------------------------------------------------- /vchain/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "vchain" 3 | version = "0.1.0" 4 | authors = ["Cheng XU "] 5 | edition = "2018" 6 | publish = false 7 | 8 | [[bench]] 9 | name = "fixed_base_pow" 10 | harness = false 11 | 12 | [[bench]] 13 | name = "points_mul_sum" 14 | harness = false 15 | 16 | [dependencies] 17 | anyhow = "1.0" 18 | ark-bls12-381 = "0.2" 19 | ark-ec = { version = "0.2", features = ["parallel"] } 20 | ark-ff = { version = "0.2", features = ["asm", "parallel"] } 21 | ark-poly = { version = "0.2", features = ["parallel"] } 22 | async-trait = "0.1" 23 | bincode = "1.3" 24 | blake2b_simd = "0.5" 25 | futures = "0.3" 26 | hex = "0.4" 27 | howlong = "0.1" 28 | itertools = "0.10" 29 | lazy_static = "1.4" 30 | log = "0.4" 31 | rayon = "1.5" 32 | serde = { version = "1.0", features = ["derive"] } 33 | smallvec = { version = "1.6", features = ["serde"] } 34 | 35 | [dev-dependencies] 36 | actix-rt = "1.1" 37 | criterion = "0.3" 38 | env_logger = "0.8" 39 | rand = "0.7" 40 | serde_json = "1.0" 41 | -------------------------------------------------------------------------------- /vchain/benches/fixed_base_pow.rs: -------------------------------------------------------------------------------- 1 | use ark_ec::ProjectiveCurve; 2 | use ark_ff::{Field, PrimeField}; 3 | use core::ops::MulAssign; 4 | use core::str::FromStr; 5 | use criterion::{black_box, criterion_group, criterion_main, Criterion}; 6 | use vchain::acc::utils::*; 7 | use vchain::acc::{Fr, G1Projective as G1, G2Projective as G2}; 8 | 9 | pub fn bench_pow_g1(c: &mut Criterion) { 10 | let mut group = c.benchmark_group("pow_g1"); 11 | let num = Fr::from_str("1050806240378915932164293810269605748").unwrap(); 12 | let g1p = FixedBaseCurvePow::build(&G1::prime_subgroup_generator()); 13 | group.bench_function("normal", |b| { 14 | b.iter(|| { 15 | let mut ans = G1::prime_subgroup_generator(); 16 | ans.mul_assign(black_box(num)); 17 | }) 18 | }); 19 | group.bench_function("optimized", |b| b.iter(|| g1p.apply(black_box(&num)))); 20 | group.finish(); 21 | } 22 | 23 | pub fn bench_pow_g2(c: &mut Criterion) { 24 | let mut group = c.benchmark_group("pow_g2"); 25 | let num = Fr::from_str("1050806240378915932164293810269605748").unwrap(); 26 | let g2p = FixedBaseCurvePow::build(&G2::prime_subgroup_generator()); 27 | group.bench_function("nomral", |b| { 28 | b.iter(|| { 29 | let mut ans = G2::prime_subgroup_generator(); 30 | ans.mul_assign(black_box(num)); 31 | }) 32 | }); 33 | group.bench_function("optimized", |b| b.iter(|| g2p.apply(black_box(&num)))); 34 | group.finish(); 35 | } 36 | 37 | pub fn bench_pow_fr(c: &mut Criterion) { 38 | let mut group = c.benchmark_group("pow_fr"); 39 | let base = Fr::from_str("186375271183577333671420248211302045980").unwrap(); 40 | let num = Fr::from_str("1050806240378915932164293810269605748").unwrap(); 41 | let frp = FixedBaseScalarPow::build(&base); 42 | group.bench_function("nomral", |b| { 43 | b.iter(|| base.pow(black_box(num.into_repr()))) 44 | }); 45 | group.bench_function("optimized", |b| b.iter(|| frp.apply(black_box(&num)))); 46 | group.finish(); 47 | } 48 | 49 | criterion_group!(benches, bench_pow_g1, bench_pow_g2, bench_pow_fr); 50 | criterion_main!(benches); 51 | -------------------------------------------------------------------------------- /vchain/benches/points_mul_sum.rs: -------------------------------------------------------------------------------- 1 | use ark_ec::{msm::VariableBaseMSM, AffineCurve, ProjectiveCurve}; 2 | use ark_ff::{PrimeField, UniformRand, Zero}; 3 | use criterion::{black_box, criterion_group, criterion_main, Criterion}; 4 | use rand::SeedableRng; 5 | use rayon::prelude::*; 6 | use vchain::acc::utils::*; 7 | use vchain::acc::{Fr, G1Projective as G1}; 8 | 9 | fn naive( 10 | bases: &[G], 11 | scalars: &[::BigInt], 12 | ) -> G::Projective { 13 | let mut acc = G::Projective::zero(); 14 | 15 | for (base, scalar) in bases.iter().zip(scalars.iter()) { 16 | acc += &base.mul(*scalar); 17 | } 18 | acc 19 | } 20 | 21 | fn fixed_base_pow( 22 | bases: &[FixedBaseCurvePow], 23 | scalars: &[::BigInt], 24 | ) -> G { 25 | let mut acc = G::zero(); 26 | 27 | for (base, scalar) in bases.iter().zip(scalars.iter()) { 28 | acc += &base.apply( 29 | &::from_repr(*scalar) 30 | .expect("failed to convert to prime field"), 31 | ); 32 | } 33 | acc 34 | } 35 | 36 | pub fn bench_points_mul_sum(c: &mut Criterion) { 37 | const SAMPLES: usize = 1 << 10; 38 | let mut rng = rand::rngs::StdRng::seed_from_u64(123_456_789u64); 39 | 40 | let v = (0..SAMPLES) 41 | .map(|_| Fr::rand(&mut rng).into_repr()) 42 | .collect::>(); 43 | let g = (0..SAMPLES) 44 | .map(|_| G1::rand(&mut rng).into_affine()) 45 | .collect::>(); 46 | let mut gp: Vec> = Vec::with_capacity(g.len()); 47 | (0..g.len()) 48 | .into_par_iter() 49 | .map(|i| FixedBaseCurvePow::build(&g[i].into_projective())) 50 | .collect_into_vec(&mut gp); 51 | 52 | let mut group = c.benchmark_group("points_mul_sum"); 53 | group.sample_size(10); 54 | group.bench_function("naive", |b| { 55 | b.iter(|| black_box(naive(g.as_slice(), v.as_slice()))) 56 | }); 57 | group.bench_function("multi_scalar_mul", |b| { 58 | b.iter(|| { 59 | black_box(VariableBaseMSM::multi_scalar_mul( 60 | g.as_slice(), 61 | v.as_slice(), 62 | )) 63 | }) 64 | }); 65 | group.bench_function("fixed_base_pow", |b| { 66 | b.iter(|| black_box(fixed_base_pow(gp.as_slice(), v.as_slice()))) 67 | }); 68 | group.finish(); 69 | } 70 | 71 | criterion_group!(benches, bench_points_mul_sum); 72 | criterion_main!(benches); 73 | -------------------------------------------------------------------------------- /vchain/src/acc/digest_set.rs: -------------------------------------------------------------------------------- 1 | use crate::acc::utils::digest_to_prime_field; 2 | use crate::set::{MultiSet, SetElement}; 3 | use ark_ff::PrimeField; 4 | use ark_poly::{univariate::DensePolynomial, UVPolynomial}; 5 | use core::ops::Deref; 6 | use rayon::{self, prelude::*}; 7 | use std::borrow::Cow; 8 | 9 | #[derive(Debug, Clone, Default)] 10 | pub struct DigestSet { 11 | pub(crate) inner: Vec<(F, u32)>, 12 | } 13 | 14 | impl DigestSet { 15 | pub fn new(input: &MultiSet) -> Self { 16 | let mut inner: Vec<(F, u32)> = Vec::with_capacity(input.len()); 17 | (0..input.len()) 18 | .into_par_iter() 19 | .map(|i| { 20 | let (k, v) = input.iter().nth(i).unwrap(); 21 | let d = k.to_digest(); 22 | (digest_to_prime_field(&d), *v) 23 | }) 24 | .collect_into_vec(&mut inner); 25 | Self { inner } 26 | } 27 | 28 | pub fn expand_to_poly(&self) -> DensePolynomial { 29 | let mut inputs = Vec::new(); 30 | for (k, v) in &self.inner { 31 | for _ in 0..*v { 32 | inputs.push(DensePolynomial::from_coefficients_vec(vec![*k, F::one()])); 33 | } 34 | } 35 | 36 | fn expand<'a, F: PrimeField>( 37 | polys: &'a [DensePolynomial], 38 | ) -> Cow<'a, DensePolynomial> { 39 | if polys.is_empty() { 40 | return Cow::Owned(DensePolynomial::from_coefficients_vec(vec![F::one()])); 41 | } else if polys.len() == 1 { 42 | return Cow::Borrowed(&polys[0]); 43 | } 44 | let mid = polys.len() / 2; 45 | let (left, right) = rayon::join(|| expand(&polys[..mid]), || expand(&polys[mid..])); 46 | Cow::Owned(left.as_ref() * right.as_ref()) 47 | } 48 | 49 | expand(&inputs).into_owned() 50 | } 51 | } 52 | 53 | impl Deref for DigestSet { 54 | type Target = Vec<(F, u32)>; 55 | 56 | fn deref(&self) -> &Self::Target { 57 | &self.inner 58 | } 59 | } 60 | 61 | #[cfg(test)] 62 | mod tests { 63 | use super::*; 64 | use ark_bls12_381::Fr; 65 | 66 | #[test] 67 | fn test_digest_to_poly() { 68 | let set = DigestSet { 69 | inner: vec![ 70 | (Fr::from(1u32), 2), 71 | (Fr::from(2u32), 1), 72 | (Fr::from(3u32), 1), 73 | ], 74 | }; 75 | let expect = DensePolynomial::from_coefficients_vec(vec![ 76 | Fr::from(6u32), 77 | Fr::from(17u32), 78 | Fr::from(17u32), 79 | Fr::from(7u32), 80 | Fr::from(1u32), 81 | ]); 82 | assert_eq!(set.expand_to_poly(), expect); 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /vchain/src/acc/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod digest_set; 2 | pub mod serde_impl; 3 | pub mod utils; 4 | 5 | pub use ark_bls12_381::{ 6 | Bls12_381 as Curve, Fq12, Fr, G1Affine, G1Projective, G2Affine, G2Projective, 7 | }; 8 | pub type DigestSet = digest_set::DigestSet; 9 | 10 | use crate::digest::{Digest, Digestible}; 11 | use crate::set::{MultiSet, SetElement}; 12 | use anyhow::{self, bail, ensure, Context}; 13 | use ark_ec::{msm::VariableBaseMSM, AffineCurve, PairingEngine, ProjectiveCurve}; 14 | use ark_ff::{Field, One, PrimeField, ToBytes, Zero}; 15 | use ark_poly::{univariate::DensePolynomial, Polynomial}; 16 | use core::any::Any; 17 | use core::str::FromStr; 18 | use rayon::prelude::*; 19 | use serde::{Deserialize, Serialize}; 20 | use utils::{xgcd, FixedBaseCurvePow, FixedBaseScalarPow}; 21 | 22 | #[cfg(test)] 23 | const GS_VEC_LEN: usize = 0; 24 | #[cfg(not(test))] 25 | const GS_VEC_LEN: usize = 5000; 26 | 27 | lazy_static! { 28 | // 250 bits 29 | static ref PUB_Q: Fr = Fr::from_str("480721077433357505777975950918924200361380912084288598463024400624539293706").unwrap(); 30 | // 128 bits 31 | static ref PRI_S: Fr = Fr::from_str("259535143263514268207918833918737523409").unwrap(); 32 | static ref G1_POWER: FixedBaseCurvePow = 33 | FixedBaseCurvePow::build(&G1Projective::prime_subgroup_generator()); 34 | static ref G2_POWER: FixedBaseCurvePow = 35 | FixedBaseCurvePow::build(&G2Projective::prime_subgroup_generator()); 36 | static ref PRI_S_POWER: FixedBaseScalarPow = FixedBaseScalarPow::build(&PRI_S); 37 | static ref G1_S_VEC: Vec = { 38 | info!("Initialize G1_S_VEC..."); 39 | let timer = howlong::ProcessCPUTimer::new(); 40 | let mut res: Vec = Vec::with_capacity(GS_VEC_LEN); 41 | (0..GS_VEC_LEN) 42 | .into_par_iter() 43 | .map(|i| get_g1s(Fr::from(i as u64))) 44 | .collect_into_vec(&mut res); 45 | info!("Done in {}.", timer.elapsed()); 46 | res 47 | }; 48 | static ref G2_S_VEC: Vec = { 49 | info!("Initialize G2_S_VEC..."); 50 | let timer = howlong::ProcessCPUTimer::new(); 51 | let mut res: Vec = Vec::with_capacity(GS_VEC_LEN); 52 | (0..GS_VEC_LEN) 53 | .into_par_iter() 54 | .map(|i| get_g2s(Fr::from(i as u64))) 55 | .collect_into_vec(&mut res); 56 | info!("Done in {}.", timer.elapsed()); 57 | res 58 | }; 59 | static ref E_G_G: Fq12 = Curve::pairing( 60 | G1Affine::prime_subgroup_generator(), 61 | G2Affine::prime_subgroup_generator() 62 | ); 63 | } 64 | 65 | fn get_g1s(coeff: Fr) -> G1Affine { 66 | let si = PRI_S_POWER.apply(&coeff); 67 | G1_POWER.apply(&si).into_affine() 68 | } 69 | 70 | fn get_g2s(coeff: Fr) -> G2Affine { 71 | let si = PRI_S_POWER.apply(&coeff); 72 | G2_POWER.apply(&si).into_affine() 73 | } 74 | 75 | #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)] 76 | pub enum Type { 77 | ACC1, 78 | ACC2, 79 | } 80 | 81 | pub trait Accumulator { 82 | const TYPE: Type; 83 | type Proof; 84 | 85 | fn cal_acc_g1_sk(set: &MultiSet) -> G1Affine { 86 | Self::cal_acc_g1_sk_d(&DigestSet::new(set)) 87 | } 88 | fn cal_acc_g1(set: &MultiSet) -> G1Affine { 89 | Self::cal_acc_g1_d(&DigestSet::new(set)) 90 | } 91 | fn cal_acc_g2_sk(set: &MultiSet) -> G2Affine { 92 | Self::cal_acc_g2_sk_d(&DigestSet::new(set)) 93 | } 94 | fn cal_acc_g2(set: &MultiSet) -> G2Affine { 95 | Self::cal_acc_g2_d(&DigestSet::new(set)) 96 | } 97 | fn cal_acc_g1_sk_d(set: &DigestSet) -> G1Affine; 98 | fn cal_acc_g1_d(set: &DigestSet) -> G1Affine; 99 | fn cal_acc_g2_sk_d(set: &DigestSet) -> G2Affine; 100 | fn cal_acc_g2_d(set: &DigestSet) -> G2Affine; 101 | fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result; 102 | } 103 | 104 | pub trait AccumulatorProof: Eq + PartialEq { 105 | const TYPE: Type; 106 | 107 | fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result 108 | where 109 | Self: core::marker::Sized; 110 | 111 | fn combine_proof(&mut self, other: &Self) -> anyhow::Result<()>; 112 | 113 | fn as_any(&self) -> &dyn Any; 114 | } 115 | 116 | pub struct Acc1; 117 | 118 | impl Acc1 { 119 | fn poly_to_g1(poly: DensePolynomial) -> G1Affine { 120 | let mut idxes: Vec = Vec::with_capacity(poly.degree() + 1); 121 | for (i, coeff) in poly.coeffs.iter().enumerate() { 122 | if coeff.is_zero() { 123 | continue; 124 | } 125 | idxes.push(i); 126 | } 127 | 128 | let mut bases: Vec = Vec::with_capacity(idxes.len()); 129 | let mut scalars: Vec<::BigInt> = Vec::with_capacity(idxes.len()); 130 | (0..idxes.len()) 131 | .into_par_iter() 132 | .map(|i| { 133 | G1_S_VEC.get(i).copied().unwrap_or_else(|| { 134 | trace!("access g1 pub key at {}", i); 135 | get_g1s(Fr::from(i as u64)) 136 | }) 137 | }) 138 | .collect_into_vec(&mut bases); 139 | (0..idxes.len()) 140 | .into_par_iter() 141 | .map(|i| poly.coeffs[i].into_repr()) 142 | .collect_into_vec(&mut scalars); 143 | 144 | VariableBaseMSM::multi_scalar_mul(&bases[..], &scalars[..]).into_affine() 145 | } 146 | 147 | fn poly_to_g2(poly: DensePolynomial) -> G2Affine { 148 | let mut idxes: Vec = Vec::with_capacity(poly.degree() + 1); 149 | for (i, coeff) in poly.coeffs.iter().enumerate() { 150 | if coeff.is_zero() { 151 | continue; 152 | } 153 | idxes.push(i); 154 | } 155 | 156 | let mut bases: Vec = Vec::with_capacity(idxes.len()); 157 | let mut scalars: Vec<::BigInt> = Vec::with_capacity(idxes.len()); 158 | (0..idxes.len()) 159 | .into_par_iter() 160 | .map(|i| { 161 | G2_S_VEC.get(i).copied().unwrap_or_else(|| { 162 | trace!("access g2 pub key at {}", i); 163 | get_g2s(Fr::from(i as u64)) 164 | }) 165 | }) 166 | .collect_into_vec(&mut bases); 167 | (0..idxes.len()) 168 | .into_par_iter() 169 | .map(|i| poly.coeffs[i].into_repr()) 170 | .collect_into_vec(&mut scalars); 171 | 172 | VariableBaseMSM::multi_scalar_mul(&bases[..], &scalars[..]).into_affine() 173 | } 174 | } 175 | 176 | #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] 177 | pub struct Acc1Proof { 178 | #[serde(with = "serde_impl")] 179 | f1: G2Affine, 180 | #[serde(with = "serde_impl")] 181 | f2: G2Affine, 182 | } 183 | 184 | impl AccumulatorProof for Acc1Proof { 185 | const TYPE: Type = Type::ACC1; 186 | 187 | fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result { 188 | Acc1::gen_proof(set1, set2) 189 | } 190 | 191 | fn combine_proof(&mut self, _other: &Self) -> anyhow::Result<()> { 192 | bail!("invalid operation"); 193 | } 194 | 195 | fn as_any(&self) -> &dyn Any { 196 | self 197 | } 198 | } 199 | 200 | impl Acc1Proof { 201 | pub fn verify(&self, acc1: &G1Affine, acc2: &G1Affine) -> bool { 202 | Curve::product_of_pairings(&[ 203 | ((*acc1).into(), self.f1.into()), 204 | ((*acc2).into(), self.f2.into()), 205 | ]) == *E_G_G 206 | } 207 | } 208 | 209 | impl Accumulator for Acc1 { 210 | const TYPE: Type = Type::ACC1; 211 | type Proof = Acc1Proof; 212 | 213 | fn cal_acc_g1_sk_d(set: &DigestSet) -> G1Affine { 214 | let x = set 215 | .par_iter() 216 | .map(|(v, exp)| { 217 | let s = *PRI_S + v; 218 | let exp = [*exp as u64]; 219 | s.pow(&exp) 220 | }) 221 | .reduce(Fr::one, |a, b| a * &b); 222 | G1_POWER.apply(&x).into_affine() 223 | } 224 | fn cal_acc_g1_d(set: &DigestSet) -> G1Affine { 225 | let poly = set.expand_to_poly(); 226 | Self::poly_to_g1(poly) 227 | } 228 | fn cal_acc_g2_sk_d(set: &DigestSet) -> G2Affine { 229 | let x = set 230 | .par_iter() 231 | .map(|(v, exp)| { 232 | let s = *PRI_S + v; 233 | let exp = [*exp as u64]; 234 | s.pow(&exp) 235 | }) 236 | .reduce(Fr::one, |a, b| a * &b); 237 | G2_POWER.apply(&x).into_affine() 238 | } 239 | fn cal_acc_g2_d(set: &DigestSet) -> G2Affine { 240 | let poly = set.expand_to_poly(); 241 | Self::poly_to_g2(poly) 242 | } 243 | fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result { 244 | let poly1 = set1.expand_to_poly(); 245 | let poly2 = set2.expand_to_poly(); 246 | let (g, x, y) = xgcd(poly1, poly2).context("failed to compute xgcd")?; 247 | ensure!(g.degree() == 0, "cannot generate proof"); 248 | Ok(Acc1Proof { 249 | f1: Self::poly_to_g2(&x / &g), 250 | f2: Self::poly_to_g2(&y / &g), 251 | }) 252 | } 253 | } 254 | 255 | pub struct Acc2; 256 | 257 | #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] 258 | pub struct Acc2Proof { 259 | #[serde(with = "serde_impl")] 260 | f: G1Affine, 261 | } 262 | 263 | impl AccumulatorProof for Acc2Proof { 264 | const TYPE: Type = Type::ACC2; 265 | 266 | fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result { 267 | Acc2::gen_proof(set1, set2) 268 | } 269 | 270 | fn combine_proof(&mut self, other: &Self) -> anyhow::Result<()> { 271 | let mut f = self.f.into_projective(); 272 | f.add_assign_mixed(&other.f); 273 | self.f = f.into_affine(); 274 | Ok(()) 275 | } 276 | 277 | fn as_any(&self) -> &dyn Any { 278 | self 279 | } 280 | } 281 | 282 | impl Acc2Proof { 283 | pub fn verify(&self, acc1: &G1Affine, acc2: &G2Affine) -> bool { 284 | let a = Curve::pairing(*acc1, *acc2); 285 | let b = Curve::pairing(self.f, G2Affine::prime_subgroup_generator()); 286 | a == b 287 | } 288 | } 289 | 290 | impl Accumulator for Acc2 { 291 | const TYPE: Type = Type::ACC2; 292 | type Proof = Acc2Proof; 293 | 294 | fn cal_acc_g1_sk_d(set: &DigestSet) -> G1Affine { 295 | let x = set 296 | .par_iter() 297 | .map(|(a, b)| { 298 | let s = PRI_S_POWER.apply(a); 299 | s * &Fr::from(*b) 300 | }) 301 | .reduce(Fr::zero, |a, b| a + &b); 302 | G1_POWER.apply(&x).into_affine() 303 | } 304 | fn cal_acc_g1_d(set: &DigestSet) -> G1Affine { 305 | let mut bases: Vec = Vec::with_capacity(set.len()); 306 | let mut scalars: Vec<::BigInt> = Vec::with_capacity(set.len()); 307 | (0..set.len()) 308 | .into_par_iter() 309 | .map(|i| get_g1s(set[i].0)) 310 | .collect_into_vec(&mut bases); 311 | (0..set.len()) 312 | .into_par_iter() 313 | .map(|i| ::BigInt::from(set[i].1 as u64)) 314 | .collect_into_vec(&mut scalars); 315 | VariableBaseMSM::multi_scalar_mul(&bases[..], &scalars[..]).into_affine() 316 | } 317 | fn cal_acc_g2_sk_d(set: &DigestSet) -> G2Affine { 318 | let x = set 319 | .par_iter() 320 | .map(|(a, b)| { 321 | let s = PRI_S_POWER.apply(&(*PUB_Q - a)); 322 | s * &Fr::from(*b) 323 | }) 324 | .reduce(Fr::zero, |a, b| a + &b); 325 | G2_POWER.apply(&x).into_affine() 326 | } 327 | fn cal_acc_g2_d(set: &DigestSet) -> G2Affine { 328 | let mut bases: Vec = Vec::with_capacity(set.len()); 329 | let mut scalars: Vec<::BigInt> = Vec::with_capacity(set.len()); 330 | (0..set.len()) 331 | .into_par_iter() 332 | .map(|i| get_g2s(*PUB_Q - &set[i].0)) 333 | .collect_into_vec(&mut bases); 334 | (0..set.len()) 335 | .into_par_iter() 336 | .map(|i| ::BigInt::from(set[i].1 as u64)) 337 | .collect_into_vec(&mut scalars); 338 | VariableBaseMSM::multi_scalar_mul(&bases[..], &scalars[..]).into_affine() 339 | } 340 | fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result { 341 | let produce_size = set1.len() * set2.len(); 342 | let mut product: Vec<(Fr, u64)> = Vec::with_capacity(produce_size); 343 | (0..produce_size) 344 | .into_par_iter() 345 | .map(|i| { 346 | let set1idx = i / set2.len(); 347 | let set2idx = i % set2.len(); 348 | let (s1, q1) = set1[set1idx]; 349 | let (s2, q2) = set2[set2idx]; 350 | (*PUB_Q + &s1 - &s2, (q1 * q2) as u64) 351 | }) 352 | .collect_into_vec(&mut product); 353 | if product.par_iter().any(|(x, _)| *x == *PUB_Q) { 354 | bail!("cannot generate proof"); 355 | } 356 | 357 | let mut bases: Vec = Vec::with_capacity(produce_size); 358 | let mut scalars: Vec<::BigInt> = Vec::with_capacity(produce_size); 359 | (0..produce_size) 360 | .into_par_iter() 361 | .map(|i| get_g1s(product[i].0)) 362 | .collect_into_vec(&mut bases); 363 | (0..produce_size) 364 | .into_par_iter() 365 | .map(|i| ::BigInt::from(product[i].1)) 366 | .collect_into_vec(&mut scalars); 367 | let f = VariableBaseMSM::multi_scalar_mul(&bases[..], &scalars[..]).into_affine(); 368 | Ok(Acc2Proof { f }) 369 | } 370 | } 371 | 372 | #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] 373 | pub enum Proof { 374 | ACC1(Box), 375 | ACC2(Box), 376 | } 377 | 378 | impl Digestible for G1Affine { 379 | fn to_digest(&self) -> Digest { 380 | let mut buf = Vec::::new(); 381 | self.write(&mut buf) 382 | .unwrap_or_else(|_| panic!("failed to serialize {:?}", self)); 383 | buf.to_digest() 384 | } 385 | } 386 | 387 | #[cfg(test)] 388 | mod tests { 389 | use super::*; 390 | 391 | fn init_logger() { 392 | let _ = env_logger::builder().is_test(true).try_init(); 393 | } 394 | 395 | #[test] 396 | fn test_cal_acc() { 397 | init_logger(); 398 | let set = MultiSet::from_vec(vec![1, 1, 2, 3, 4, 4, 5, 6, 6, 7, 8, 9]); 399 | assert_eq!(Acc1::cal_acc_g1(&set), Acc1::cal_acc_g1_sk(&set)); 400 | assert_eq!(Acc1::cal_acc_g2(&set), Acc1::cal_acc_g2_sk(&set)); 401 | assert_eq!(Acc2::cal_acc_g1(&set), Acc2::cal_acc_g1_sk(&set)); 402 | assert_eq!(Acc2::cal_acc_g2(&set), Acc2::cal_acc_g2_sk(&set)); 403 | } 404 | 405 | #[test] 406 | fn test_acc1_proof() { 407 | init_logger(); 408 | let set1 = DigestSet::new(&MultiSet::from_vec(vec![1, 2, 3])); 409 | let set2 = DigestSet::new(&MultiSet::from_vec(vec![4, 5, 6])); 410 | let set3 = DigestSet::new(&MultiSet::from_vec(vec![1, 1])); 411 | let proof = Acc1::gen_proof(&set1, &set2).unwrap(); 412 | let acc1 = Acc1::cal_acc_g1_sk_d(&set1); 413 | let acc2 = Acc1::cal_acc_g1_sk_d(&set2); 414 | assert!(proof.verify(&acc1, &acc2)); 415 | assert!(Acc1::gen_proof(&set1, &set3).is_err()); 416 | } 417 | 418 | #[test] 419 | fn test_acc2_proof() { 420 | init_logger(); 421 | let set1 = DigestSet::new(&MultiSet::from_vec(vec![1, 2, 3])); 422 | let set2 = DigestSet::new(&MultiSet::from_vec(vec![4, 5, 6])); 423 | let set3 = DigestSet::new(&MultiSet::from_vec(vec![1, 1])); 424 | let proof = Acc2::gen_proof(&set1, &set2).unwrap(); 425 | let acc1 = Acc2::cal_acc_g1_sk_d(&set1); 426 | let acc2 = Acc2::cal_acc_g2_sk_d(&set2); 427 | assert!(proof.verify(&acc1, &acc2)); 428 | assert!(Acc2::gen_proof(&set1, &set3).is_err()); 429 | } 430 | 431 | #[test] 432 | fn test_acc2_proof_sum() { 433 | init_logger(); 434 | let set1 = DigestSet::new(&MultiSet::from_vec(vec![1, 2, 3])); 435 | let set2 = DigestSet::new(&MultiSet::from_vec(vec![4, 5, 6])); 436 | let set3 = DigestSet::new(&MultiSet::from_vec(vec![7, 8, 9])); 437 | let mut proof1 = Acc2::gen_proof(&set1, &set2).unwrap(); 438 | let proof2 = Acc2::gen_proof(&set1, &set3).unwrap(); 439 | proof1.combine_proof(&proof2).unwrap(); 440 | let acc1 = Acc2::cal_acc_g1_sk_d(&set1); 441 | let acc2 = Acc2::cal_acc_g2_sk_d(&set2); 442 | let acc3 = Acc2::cal_acc_g2_sk_d(&set3); 443 | let acc4 = { 444 | let mut acc = acc2.into_projective(); 445 | acc.add_assign_mixed(&acc3); 446 | acc.into_affine() 447 | }; 448 | assert!(proof1.verify(&acc1, &acc4)); 449 | } 450 | } 451 | -------------------------------------------------------------------------------- /vchain/src/acc/serde_impl.rs: -------------------------------------------------------------------------------- 1 | use ark_ec::AffineCurve; 2 | use core::marker::PhantomData; 3 | use serde::{ 4 | de::{Deserializer, Visitor}, 5 | ser::Serializer, 6 | }; 7 | 8 | pub fn serialize(c: &C, s: S) -> Result { 9 | let mut buf = Vec::::new(); 10 | c.serialize(&mut buf) 11 | .map_err(::custom)?; 12 | if s.is_human_readable() { 13 | s.serialize_str(&hex::encode(&buf)) 14 | } else { 15 | s.serialize_bytes(&buf) 16 | } 17 | } 18 | 19 | pub fn deserialize<'de, D: Deserializer<'de>, C: AffineCurve>(d: D) -> Result { 20 | use core::fmt; 21 | use serde::de::Error as DeError; 22 | 23 | struct HexVisitor(PhantomData); 24 | 25 | impl<'de, C: AffineCurve> Visitor<'de> for HexVisitor { 26 | type Value = C; 27 | 28 | fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result { 29 | f.write_str("AffineCurve") 30 | } 31 | 32 | fn visit_str(self, value: &str) -> Result { 33 | let data = hex::decode(value).map_err(E::custom)?; 34 | C::deserialize(&data[..]).map_err(E::custom) 35 | } 36 | } 37 | 38 | struct BytesVisitor(PhantomData); 39 | 40 | impl<'de, C: AffineCurve> Visitor<'de> for BytesVisitor { 41 | type Value = C; 42 | 43 | fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result { 44 | f.write_str("AffineCurve") 45 | } 46 | 47 | fn visit_bytes(self, v: &[u8]) -> Result { 48 | C::deserialize(v).map_err(E::custom) 49 | } 50 | } 51 | 52 | if d.is_human_readable() { 53 | d.deserialize_str(HexVisitor(PhantomData)) 54 | } else { 55 | d.deserialize_bytes(BytesVisitor(PhantomData)) 56 | } 57 | } 58 | 59 | #[cfg(test)] 60 | mod tests { 61 | use super::*; 62 | use ark_bls12_381::{G1Affine, G2Affine}; 63 | use serde::{Deserialize, Serialize}; 64 | 65 | #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] 66 | struct Foo { 67 | #[serde(with = "super")] 68 | f1: G1Affine, 69 | #[serde(with = "super")] 70 | f2: G2Affine, 71 | } 72 | 73 | #[test] 74 | fn test_serde() { 75 | #[allow(clippy::blacklisted_name)] 76 | let foo = Foo { 77 | f1: G1Affine::prime_subgroup_generator(), 78 | f2: G2Affine::prime_subgroup_generator(), 79 | }; 80 | 81 | let json = serde_json::to_string_pretty(&foo).unwrap(); 82 | let bin = bincode::serialize(&foo).unwrap(); 83 | 84 | assert_eq!(serde_json::from_str::(&json).unwrap(), foo); 85 | assert_eq!(bincode::deserialize::(&bin[..]).unwrap(), foo); 86 | } 87 | } 88 | -------------------------------------------------------------------------------- /vchain/src/acc/utils.rs: -------------------------------------------------------------------------------- 1 | use crate::digest::Digest; 2 | use ark_ec::ProjectiveCurve; 3 | use ark_ff::{BigInteger, FpParameters, PrimeField, Zero}; 4 | use ark_poly::{ 5 | univariate::{DenseOrSparsePolynomial, DensePolynomial}, 6 | UVPolynomial, 7 | }; 8 | use itertools::unfold; 9 | 10 | pub fn try_digest_to_prime_field(input: &Digest) -> Option { 11 | let mut num = F::from_be_bytes_mod_order(&input.0).into_repr(); 12 | // ensure the result is at most in 248 bits. so PUB_Q - Fr and Fr + PUB_Q - Fr never overflow. 13 | for v in num.as_mut().iter_mut().skip(3) { 14 | *v = 0; 15 | } 16 | num.as_mut().get_mut(3).map(|v| *v &= 0x00ff_ffff_ffff_ffff); 17 | F::from_repr(num) 18 | } 19 | 20 | pub fn digest_to_prime_field(input: &Digest) -> F { 21 | try_digest_to_prime_field(input).expect("failed to convert digest to prime field") 22 | } 23 | 24 | /// Return (g, x, y) s.t. a*x + b*y = g = gcd(a, b) 25 | pub fn xgcd<'a, F: PrimeField>( 26 | a: impl Into>, 27 | b: impl Into>, 28 | ) -> Option<(DensePolynomial, DensePolynomial, DensePolynomial)> { 29 | let mut a = a.into(); 30 | let mut b = b.into(); 31 | let mut x0 = DensePolynomial::::zero(); 32 | let mut x1 = DensePolynomial::::from_coefficients_vec(vec![F::one()]); 33 | let mut y0 = DensePolynomial::::from_coefficients_vec(vec![F::one()]); 34 | let mut y1 = DensePolynomial::::zero(); 35 | while !a.is_zero() { 36 | let (q, r) = b.divide_with_q_and_r(&a)?; 37 | b = a.into(); 38 | a = r.into(); 39 | let y1old = y1; 40 | y1 = &y0 - &(&q * &y1old); 41 | y0 = y1old; 42 | let x1old = x1; 43 | x1 = &x0 - &(&q * &x1old); 44 | x0 = x1old; 45 | } 46 | Some((b.into(), x0, y0)) 47 | } 48 | 49 | // Ref: https://github.com/blynn/pbc/blob/fbf4589036ce4f662e2d06905862c9e816cf9d08/arith/field.c#L251-L330 50 | pub struct FixedBaseCurvePow { 51 | table: Vec>, 52 | } 53 | 54 | impl FixedBaseCurvePow { 55 | const K: usize = 5; 56 | 57 | pub fn build(base: &G) -> Self { 58 | let bits = 59 | <::ScalarField as PrimeField>::Params::MODULUS_BITS as usize; 60 | let num_lookups = bits / Self::K + 1; 61 | let lookup_size = (1 << Self::K) - 1; 62 | let last_lookup_size = (1 << (bits - (num_lookups - 1) * Self::K)) - 1; 63 | 64 | let mut table: Vec> = Vec::with_capacity(num_lookups); 65 | 66 | let mut multiplier = *base; 67 | for i in 0..num_lookups { 68 | let table_size = if i == num_lookups - 1 { 69 | last_lookup_size 70 | } else { 71 | lookup_size 72 | }; 73 | let sub_table: Vec = unfold(multiplier, |last| { 74 | let ret = *last; 75 | last.add_assign(&multiplier); 76 | Some(ret) 77 | }) 78 | .take(table_size) 79 | .collect(); 80 | table.push(sub_table); 81 | if i != num_lookups - 1 { 82 | let last = *table.last().unwrap().last().unwrap(); 83 | multiplier.add_assign(&last); 84 | } 85 | } 86 | Self { table } 87 | } 88 | 89 | pub fn apply(&self, input: &::ScalarField) -> G { 90 | let mut res = G::zero(); 91 | let input_repr = input.into_repr(); 92 | let num_lookups = input_repr.num_bits() as usize / Self::K + 1; 93 | for i in 0..num_lookups { 94 | let mut word: usize = 0; 95 | for j in 0..Self::K { 96 | if input_repr.get_bit(i * Self::K + j) { 97 | word |= 1 << j; 98 | } 99 | } 100 | if word > 0 { 101 | res.add_assign(&self.table[i][word - 1]); 102 | } 103 | } 104 | res 105 | } 106 | } 107 | 108 | pub struct FixedBaseScalarPow { 109 | table: Vec>, 110 | } 111 | 112 | impl FixedBaseScalarPow { 113 | const K: usize = 8; 114 | 115 | pub fn build(base: &F) -> Self { 116 | let bits = ::Params::MODULUS_BITS as usize; 117 | let num_lookups = bits / Self::K + 1; 118 | let lookup_size = (1 << Self::K) - 1; 119 | let last_lookup_size = (1 << (bits - (num_lookups - 1) * Self::K)) - 1; 120 | 121 | let mut table: Vec> = Vec::with_capacity(num_lookups); 122 | 123 | let mut multiplier = *base; 124 | for i in 0..num_lookups { 125 | let table_size = if i == num_lookups - 1 { 126 | last_lookup_size 127 | } else { 128 | lookup_size 129 | }; 130 | let sub_table: Vec = unfold(multiplier, |last| { 131 | let ret = *last; 132 | last.mul_assign(&multiplier); 133 | Some(ret) 134 | }) 135 | .take(table_size) 136 | .collect(); 137 | table.push(sub_table); 138 | if i != num_lookups - 1 { 139 | let last = *table.last().unwrap().last().unwrap(); 140 | multiplier.mul_assign(&last); 141 | } 142 | } 143 | Self { table } 144 | } 145 | 146 | pub fn apply(&self, input: &F) -> F { 147 | let mut res = F::one(); 148 | let input_repr = input.into_repr(); 149 | let num_lookups = input_repr.num_bits() as usize / Self::K + 1; 150 | for i in 0..num_lookups { 151 | let mut word: usize = 0; 152 | for j in 0..Self::K { 153 | if input_repr.get_bit(i * Self::K + j) { 154 | word |= 1 << j; 155 | } 156 | } 157 | if word > 0 { 158 | res.mul_assign(&self.table[i][word - 1]); 159 | } 160 | } 161 | res 162 | } 163 | } 164 | 165 | #[cfg(test)] 166 | mod tests { 167 | use super::*; 168 | use ark_bls12_381::{Fr, G1Projective, G2Projective}; 169 | use ark_ff::Field; 170 | use ark_poly::Polynomial; 171 | use core::ops::MulAssign; 172 | use rand::Rng; 173 | 174 | #[test] 175 | fn test_xgcd() { 176 | let poly1 = DensePolynomial::from_coefficients_vec(vec![Fr::from(1u32), Fr::from(1u32)]); 177 | let poly2 = DensePolynomial::from_coefficients_vec(vec![Fr::from(2u32), Fr::from(1u32)]); 178 | let (g, x, y) = xgcd(&poly1, &poly2).unwrap(); 179 | assert_eq!(g.degree(), 0); 180 | let gcd = &(&poly1 * &x) + &(&poly2 * &y); 181 | assert_eq!(gcd, g); 182 | } 183 | 184 | #[test] 185 | fn test_pow_g1() { 186 | let g1p = FixedBaseCurvePow::build(&G1Projective::prime_subgroup_generator()); 187 | let mut rng = rand::thread_rng(); 188 | let num: Fr = rng.gen(); 189 | let mut expect = G1Projective::prime_subgroup_generator(); 190 | expect.mul_assign(num); 191 | assert_eq!(g1p.apply(&num), expect); 192 | } 193 | 194 | #[test] 195 | fn test_pow_g2() { 196 | let g2p = FixedBaseCurvePow::build(&G2Projective::prime_subgroup_generator()); 197 | let mut rng = rand::thread_rng(); 198 | let num: Fr = rng.gen(); 199 | let mut expect = G2Projective::prime_subgroup_generator(); 200 | expect.mul_assign(num); 201 | assert_eq!(g2p.apply(&num), expect); 202 | } 203 | 204 | #[test] 205 | fn test_pow_fr() { 206 | let mut rng = rand::thread_rng(); 207 | let base: Fr = rng.gen(); 208 | let num: Fr = rng.gen(); 209 | let frp = FixedBaseScalarPow::build(&base); 210 | let expect = base.pow(num.into_repr()); 211 | assert_eq!(frp.apply(&num), expect); 212 | } 213 | } 214 | -------------------------------------------------------------------------------- /vchain/src/chain/build.rs: -------------------------------------------------------------------------------- 1 | #![allow(clippy::cognitive_complexity)] 2 | 3 | use super::*; 4 | use crate::digest::{concat_digest, concat_digest_ref, Digest, Digestible}; 5 | use crate::set::MultiSet; 6 | use ark_ec::{AffineCurve, ProjectiveCurve}; 7 | use smallvec::smallvec; 8 | 9 | pub fn build_block<'a>( 10 | block_id: IdType, 11 | prev_hash: Digest, 12 | raw_objs: impl Iterator, 13 | chain: &mut (impl ReadInterface + WriteInterface), 14 | ) -> Result { 15 | debug!("build block #{}", block_id); 16 | 17 | let param = chain.get_parameter()?; 18 | let objs: Vec = raw_objs.map(|o| Object::create(o, ¶m)).collect(); 19 | for obj in &objs { 20 | chain.write_object(obj.clone())?; 21 | } 22 | 23 | let mut block_header = BlockHeader { 24 | block_id, 25 | prev_hash, 26 | ..Default::default() 27 | }; 28 | 29 | let mut block_data = if param.intra_index { 30 | let mut leaves: Vec = Vec::with_capacity(objs.len()); 31 | for obj in &objs { 32 | let node = IntraIndexLeaf::create( 33 | block_id, 34 | obj.set_data.clone(), 35 | obj.acc_value, 36 | obj.id, 37 | obj.to_digest(), 38 | ); 39 | leaves.push(node.clone()); 40 | chain.write_intra_index_node(IntraIndexNode::Leaf(Box::new(node)))?; 41 | } 42 | 43 | let mut non_leaves: Vec = Vec::with_capacity(leaves.len()); 44 | while !leaves.is_empty() { 45 | let left_idx = leaves 46 | .iter() 47 | .enumerate() 48 | .max_by_key(|(_i, n)| n.set_data.len()) 49 | .unwrap() 50 | .0; 51 | let left = leaves.remove(left_idx); 52 | 53 | if leaves.is_empty() { 54 | let node = IntraIndexNonLeaf::create( 55 | block_id, 56 | left.set_data.clone(), 57 | left.acc_value, 58 | smallvec![left.to_digest()], 59 | smallvec![left.id], 60 | ); 61 | non_leaves.push(node.clone()); 62 | chain.write_intra_index_node(IntraIndexNode::NonLeaf(Box::new(node)))?; 63 | break; 64 | } 65 | 66 | let mut right_idx = 0; 67 | let mut min_set = &left.set_data | &leaves[0].set_data; 68 | let mut max_sim = 69 | (&left.set_data & &leaves[0].set_data).len() as f64 / min_set.len() as f64; 70 | for (i, n) in leaves.iter().enumerate().skip(1) { 71 | let s = &left.set_data | &n.set_data; 72 | let sim = (&left.set_data & &n.set_data).len() as f64 / s.len() as f64; 73 | if sim > max_sim { 74 | max_sim = sim; 75 | min_set = s; 76 | right_idx = i; 77 | } 78 | } 79 | let right = leaves.remove(right_idx); 80 | let min_set_acc_value = multiset_to_g1(&min_set, ¶m); 81 | let node = IntraIndexNonLeaf::create( 82 | block_id, 83 | min_set, 84 | min_set_acc_value, 85 | smallvec![left.to_digest(), right.to_digest()], 86 | smallvec![left.id, right.id], 87 | ); 88 | non_leaves.push(node.clone()); 89 | chain.write_intra_index_node(IntraIndexNode::NonLeaf(Box::new(node)))?; 90 | } 91 | 92 | while non_leaves.len() > 1 { 93 | let mut new_non_leaves: Vec = Vec::with_capacity(non_leaves.len()); 94 | while non_leaves.len() > 1 { 95 | let left_idx = non_leaves 96 | .iter() 97 | .enumerate() 98 | .max_by_key(|(_i, n)| n.set_data.len()) 99 | .unwrap() 100 | .0; 101 | let left = non_leaves.remove(left_idx); 102 | 103 | let mut right_idx = 0; 104 | let mut min_set = &left.set_data | &non_leaves[0].set_data; 105 | let mut max_sim = 106 | (&left.set_data & &non_leaves[0].set_data).len() as f64 / min_set.len() as f64; 107 | for (i, n) in non_leaves.iter().enumerate().skip(1) { 108 | let s = &left.set_data | &n.set_data; 109 | let sim = (&left.set_data & &n.set_data).len() as f64 / s.len() as f64; 110 | if sim > max_sim { 111 | max_sim = sim; 112 | min_set = s; 113 | right_idx = i; 114 | } 115 | } 116 | let right = non_leaves.remove(right_idx); 117 | let min_set_acc_value = multiset_to_g1(&min_set, ¶m); 118 | let node = IntraIndexNonLeaf::create( 119 | block_id, 120 | min_set, 121 | min_set_acc_value, 122 | smallvec![left.to_digest(), right.to_digest()], 123 | smallvec![left.id, right.id], 124 | ); 125 | new_non_leaves.push(node.clone()); 126 | chain.write_intra_index_node(IntraIndexNode::NonLeaf(Box::new(node)))?; 127 | } 128 | non_leaves.append(&mut new_non_leaves); 129 | } 130 | 131 | // no objs in this block 132 | if non_leaves.is_empty() { 133 | let empty_set: MultiSet = MultiSet::new(); 134 | let acc_value = multiset_to_g1(&empty_set, ¶m); 135 | let node = 136 | IntraIndexNonLeaf::create(block_id, empty_set, acc_value, smallvec![], smallvec![]); 137 | non_leaves.push(node.clone()); 138 | chain.write_intra_index_node(IntraIndexNode::NonLeaf(Box::new(node)))?; 139 | } 140 | 141 | let root = non_leaves.pop().unwrap(); 142 | block_header.data_root = root.to_digest(); 143 | BlockData { 144 | block_id, 145 | data: IntraData::Index(root.id), 146 | set_data: root.set_data, 147 | acc_value: root.acc_value, 148 | skip_list_ids: Vec::new(), 149 | } 150 | } else { 151 | let mut hs: Vec = Vec::with_capacity(objs.len()); 152 | let mut set_data: MultiSet = MultiSet::new(); 153 | for obj in &objs { 154 | let h = concat_digest_ref([obj.acc_value.to_digest(), obj.to_digest()].iter()); 155 | hs.push(h); 156 | set_data = &set_data | &obj.set_data; 157 | } 158 | block_header.data_root = concat_digest(hs.into_iter()); 159 | let acc_value = multiset_to_g1(&set_data, ¶m); 160 | BlockData { 161 | block_id, 162 | data: IntraData::Flat(objs.iter().map(|o| o.id).collect::>()), 163 | set_data, 164 | acc_value, 165 | skip_list_ids: Vec::new(), 166 | } 167 | }; 168 | 169 | if param.skip_list_max_level > 0 && block_id >= 1 { 170 | let mut prev_blk_id = block_id - 1; 171 | let mut skipped_blk_num = 1; 172 | let mut set_data_to_skip = block_data.set_data.clone(); 173 | let mut acc_value_to_skip = block_data.acc_value.into_projective(); 174 | let mut skip_list_ids: Vec = Vec::with_capacity(param.skip_list_max_level as usize); 175 | let mut skip_list_digests: Vec = 176 | Vec::with_capacity(param.skip_list_max_level as usize); 177 | let mut hash_to_skip = Digest::default(); 178 | 179 | 'outer: for level in 0..param.skip_list_max_level { 180 | let blk_num = skipped_blocks_num(level); 181 | while skipped_blk_num < blk_num { 182 | if prev_blk_id == 0 { 183 | break 'outer; 184 | } 185 | let prev_blk_header = match chain.read_block_header(prev_blk_id) { 186 | Ok(header) => header, 187 | _ => break 'outer, 188 | }; 189 | hash_to_skip = prev_blk_header.prev_hash; 190 | let prev_blk = chain.read_block_data(prev_blk_id)?; 191 | match param.acc_type { 192 | acc::Type::ACC1 => { 193 | set_data_to_skip = &set_data_to_skip | &prev_blk.set_data; 194 | } 195 | acc::Type::ACC2 => { 196 | set_data_to_skip = &set_data_to_skip + &prev_blk.set_data; 197 | acc_value_to_skip.add_assign_mixed(&prev_blk.acc_value); 198 | } 199 | } 200 | 201 | skipped_blk_num += 1; 202 | prev_blk_id -= 1; 203 | } 204 | 205 | let acc_value_to_skip = match param.acc_type { 206 | acc::Type::ACC1 => multiset_to_g1(&set_data_to_skip, ¶m), 207 | acc::Type::ACC2 => acc_value_to_skip.into_affine(), 208 | }; 209 | 210 | let skip_node = SkipListNode::create( 211 | block_id, 212 | level, 213 | set_data_to_skip.clone(), 214 | acc_value_to_skip, 215 | hash_to_skip, 216 | ); 217 | skip_list_ids.push(skip_node.id); 218 | skip_list_digests.push(skip_node.digest); 219 | chain.write_skip_list_node(skip_node)?; 220 | } 221 | 222 | if !skip_list_ids.is_empty() { 223 | block_header.skip_list_root = Some(concat_digest(skip_list_digests.into_iter())); 224 | block_data.skip_list_ids = skip_list_ids; 225 | } 226 | } 227 | 228 | chain.write_block_header(block_header)?; 229 | chain.write_block_data(block_data)?; 230 | 231 | Ok(block_header) 232 | } 233 | -------------------------------------------------------------------------------- /vchain/src/chain/historical_query.rs: -------------------------------------------------------------------------------- 1 | #![allow(clippy::cognitive_complexity)] 2 | 3 | use super::*; 4 | use crate::acc::{AccumulatorProof, DigestSet}; 5 | use anyhow::{bail, Result}; 6 | use std::collections::VecDeque; 7 | 8 | pub fn historical_query( 9 | q: &Query, 10 | chain: &impl ReadInterface, 11 | ) -> Result> { 12 | info!("process query {:?}", q); 13 | let param = chain.get_parameter()?; 14 | let cpu_timer = howlong::ProcessCPUTimer::new(); 15 | let timer = howlong::HighResolutionTimer::new(); 16 | 17 | let query_exp = q.to_bool_exp(¶m.v_bit_len); 18 | let mut res = OverallResult { 19 | res_objs: ResultObjs::new(), 20 | res_vo: ResultVO::::new(), 21 | query: q.clone(), 22 | query_exp_set: query_exp.inner.clone(), 23 | query_time_in_ms: 0, 24 | v_bit_len: param.v_bit_len.clone(), 25 | vo_size: 0, 26 | vo_stats: VOStatistic::default(), 27 | }; 28 | let query_exp_digest_set = query_exp 29 | .inner 30 | .iter() 31 | .map(|s| DigestSet::new(s)) 32 | .collect::>(); 33 | 34 | let mut block_id = q.end_block; 35 | while block_id >= q.start_block { 36 | let blk_data = chain.read_block_data(block_id)?; 37 | let blk_header = chain.read_block_header(block_id)?; 38 | 39 | if !blk_data.skip_list_ids.is_empty() { 40 | let mut vo_skip = vo::SkipListRoot { 41 | block_id, 42 | blk_prev_hash: blk_header.prev_hash, 43 | blk_data_root: blk_header.data_root, 44 | sub_nodes: Vec::new(), 45 | }; 46 | let mut jmp_level: Option = None; 47 | 48 | for (lvl, &skip_list_id) in blk_data.skip_list_ids.iter().enumerate().rev() { 49 | let jmp_node = chain.read_skip_list_node(skip_list_id)?; 50 | if jmp_level.is_some() 51 | || q.start_block + skipped_blocks_num(lvl as SkipLstLvlType) > block_id 52 | { 53 | vo_skip 54 | .sub_nodes 55 | .push(vo::NoJumpNode::create(&jmp_node).into_jump_or_no_jump_node()); 56 | } else { 57 | let mismatch_idx = query_exp.mismatch_idx(&jmp_node.set_data); 58 | if let Some(mismatch_idx) = mismatch_idx { 59 | jmp_level = Some(lvl as SkipLstLvlType); 60 | let proof_idx = res.res_vo.vo_acc.add_proof( 61 | mismatch_idx, 62 | &query_exp_digest_set[mismatch_idx], 63 | &DigestSet::new(&jmp_node.set_data), 64 | &jmp_node.acc_value, 65 | )?; 66 | vo_skip.sub_nodes.push( 67 | vo::JumpNode::create(&jmp_node, proof_idx).into_jump_or_no_jump_node(), 68 | ); 69 | } else { 70 | vo_skip 71 | .sub_nodes 72 | .push(vo::NoJumpNode::create(&jmp_node).into_jump_or_no_jump_node()); 73 | } 74 | } 75 | } 76 | 77 | if let Some(jmp_level) = jmp_level { 78 | vo_skip.sub_nodes.reverse(); 79 | res.res_vo.vo_t.0.push(vo_skip.into_result_vo_node()); 80 | block_id -= skipped_blocks_num(jmp_level); 81 | continue; 82 | } 83 | } // skip list 84 | 85 | if param.intra_index { 86 | query_block_intra_index( 87 | &query_exp, 88 | &query_exp_digest_set, 89 | &blk_header, 90 | &blk_data, 91 | chain, 92 | &mut res, 93 | )?; 94 | } else { 95 | query_block_no_intra_index( 96 | &query_exp, 97 | &query_exp_digest_set, 98 | &blk_header, 99 | &blk_data, 100 | chain, 101 | &mut res, 102 | )?; 103 | } 104 | 105 | block_id -= 1; 106 | } 107 | 108 | res.res_vo.vo_t.0.reverse(); 109 | res.query_time_in_ms = timer.elapsed().as_millis() as u64; 110 | res.compute_stats()?; 111 | info!("used time: {}", cpu_timer.elapsed()); 112 | Ok(res) 113 | } 114 | 115 | fn query_block_intra_index( 116 | query_exp: &BoolExp, 117 | query_exp_digest_set: &[DigestSet], 118 | block_header: &BlockHeader, 119 | block_data: &BlockData, 120 | chain: &impl ReadInterface, 121 | res: &mut OverallResult, 122 | ) -> Result<()> { 123 | let mut vo_blk = vo::BlkNode { 124 | block_id: block_header.block_id, 125 | skip_list_root: block_header.skip_list_root, 126 | sub_node: vo::IntraNode::Empty, 127 | }; 128 | 129 | let root = match &block_data.data { 130 | IntraData::Index(id) => match chain.read_intra_index_node(*id)? { 131 | IntraIndexNode::NonLeaf(n) => n, 132 | IntraIndexNode::Leaf(_) => bail!("invalid data"), 133 | }, 134 | _ => bail!("invalid data"), 135 | }; 136 | 137 | let mut intra_index_q: VecDeque<(Box, *mut vo::IntraNode)> = VecDeque::new(); 138 | intra_index_q.push_back((root, &mut vo_blk.sub_node as *mut vo::IntraNode)); 139 | while let Some((node, ptr)) = intra_index_q.pop_front() { 140 | let mismatch_idx = query_exp.mismatch_idx(&node.set_data); 141 | if let Some(mismatch_idx) = mismatch_idx { 142 | let proof_idx = res.res_vo.vo_acc.add_proof( 143 | mismatch_idx, 144 | &query_exp_digest_set[mismatch_idx], 145 | &DigestSet::new(&node.set_data), 146 | &node.acc_value, 147 | )?; 148 | unsafe { 149 | *ptr = vo::NoMatchIntraNonLeaf::create(&node, proof_idx).into_intra_node(); 150 | } 151 | } else { 152 | let intra_non_leaf = unsafe { 153 | *ptr = vo::IntraNonLeaf::create(&node).into_intra_node(); 154 | match &mut *ptr { 155 | vo::IntraNode::IntraNonLeaf(x) => x, 156 | _ => unreachable!(), 157 | } 158 | }; 159 | for &child_id in &node.child_ids { 160 | match chain.read_intra_index_node(child_id)? { 161 | IntraIndexNode::NonLeaf(n) => { 162 | intra_non_leaf.children.push(vo::IntraNode::Empty); 163 | intra_index_q.push_back(( 164 | n, 165 | intra_non_leaf.children.last_mut().unwrap() as *mut vo::IntraNode, 166 | )); 167 | } 168 | IntraIndexNode::Leaf(n) => { 169 | let mismatch_idx = query_exp.mismatch_idx(&n.set_data); 170 | if let Some(mismatch_idx) = mismatch_idx { 171 | let proof_idx = res.res_vo.vo_acc.add_proof( 172 | mismatch_idx, 173 | &query_exp_digest_set[mismatch_idx], 174 | &DigestSet::new(&n.set_data), 175 | &n.acc_value, 176 | )?; 177 | intra_non_leaf.children.push( 178 | vo::NoMatchIntraLeaf::create(&n, proof_idx).into_intra_node(), 179 | ); 180 | } else { 181 | let obj = chain.read_object(n.obj_id)?; 182 | res.res_objs.insert(obj); 183 | intra_non_leaf 184 | .children 185 | .push(vo::MatchIntraLeaf::create(&n).into_intra_node()); 186 | } 187 | } 188 | } 189 | } 190 | } 191 | } 192 | 193 | res.res_vo.vo_t.0.push(vo_blk.into_result_vo_node()); 194 | Ok(()) 195 | } 196 | 197 | fn query_block_no_intra_index( 198 | query_exp: &BoolExp, 199 | query_exp_digest_set: &[DigestSet], 200 | block_header: &BlockHeader, 201 | block_data: &BlockData, 202 | chain: &impl ReadInterface, 203 | res: &mut OverallResult, 204 | ) -> Result<()> { 205 | let mut vo_blk = vo::FlatBlkNode { 206 | block_id: block_header.block_id, 207 | skip_list_root: block_header.skip_list_root, 208 | sub_nodes: Vec::new(), 209 | }; 210 | 211 | let objs = match &block_data.data { 212 | IntraData::Flat(ids) => ids 213 | .iter() 214 | .map(|&id| chain.read_object(id)) 215 | .collect::>>()?, 216 | _ => bail!("invalid data"), 217 | }; 218 | 219 | for obj in &objs { 220 | let mismatch_idx = query_exp.mismatch_idx(&obj.set_data); 221 | if let Some(mismatch_idx) = mismatch_idx { 222 | let proof_idx = res.res_vo.vo_acc.add_proof( 223 | mismatch_idx, 224 | &query_exp_digest_set[mismatch_idx], 225 | &DigestSet::new(&obj.set_data), 226 | &obj.acc_value, 227 | )?; 228 | vo_blk 229 | .sub_nodes 230 | .push(vo::NoMatchObjNode::create(obj, proof_idx).into_obj_node()); 231 | } else { 232 | vo_blk 233 | .sub_nodes 234 | .push(vo::MatchObjNode::create(obj).into_obj_node()); 235 | res.res_objs.insert(obj.clone()); 236 | } 237 | } 238 | 239 | res.res_vo.vo_t.0.push(vo_blk.into_result_vo_node()); 240 | Ok(()) 241 | } 242 | -------------------------------------------------------------------------------- /vchain/src/chain/index.rs: -------------------------------------------------------------------------------- 1 | use super::{IdType, SetElementType, SkipLstLvlType}; 2 | use crate::acc::G1Affine; 3 | use crate::digest::{blake2, concat_digest_ref, Digest, Digestible}; 4 | use crate::set::MultiSet; 5 | use core::sync::atomic::{AtomicU64, Ordering}; 6 | use serde::{Deserialize, Serialize}; 7 | use smallvec::SmallVec; 8 | 9 | static INDEX_ID_CNT: AtomicU64 = AtomicU64::new(0); 10 | 11 | #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] 12 | pub enum IntraIndexNode { 13 | NonLeaf(Box), 14 | Leaf(Box), 15 | } 16 | 17 | impl IntraIndexNode { 18 | pub fn id(&self) -> IdType { 19 | match self { 20 | Self::NonLeaf(x) => x.id, 21 | Self::Leaf(x) => x.id, 22 | } 23 | } 24 | pub fn block_id(&self) -> IdType { 25 | match self { 26 | Self::NonLeaf(x) => x.block_id, 27 | Self::Leaf(x) => x.block_id, 28 | } 29 | } 30 | pub fn set_data(&self) -> &MultiSet { 31 | match self { 32 | Self::NonLeaf(x) => &x.set_data, 33 | Self::Leaf(x) => &x.set_data, 34 | } 35 | } 36 | pub fn acc_value(&self) -> &G1Affine { 37 | match self { 38 | Self::NonLeaf(x) => &x.acc_value, 39 | Self::Leaf(x) => &x.acc_value, 40 | } 41 | } 42 | } 43 | 44 | #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] 45 | pub struct IntraIndexNonLeaf { 46 | pub id: IdType, 47 | pub block_id: IdType, 48 | pub set_data: MultiSet, 49 | #[serde(with = "crate::acc::serde_impl")] 50 | pub acc_value: G1Affine, 51 | pub child_hash_digest: Digest, 52 | pub child_hashes: SmallVec<[Digest; 2]>, 53 | pub child_ids: SmallVec<[IdType; 2]>, 54 | } 55 | 56 | impl IntraIndexNonLeaf { 57 | pub fn create( 58 | block_id: IdType, 59 | set_data: MultiSet, 60 | acc_value: G1Affine, 61 | child_hashes: SmallVec<[Digest; 2]>, 62 | child_ids: SmallVec<[IdType; 2]>, 63 | ) -> Self { 64 | let id = INDEX_ID_CNT.fetch_add(1, Ordering::SeqCst) as IdType; 65 | Self { 66 | id, 67 | block_id, 68 | set_data, 69 | acc_value, 70 | child_hash_digest: concat_digest_ref(child_hashes.iter()), 71 | child_hashes, 72 | child_ids, 73 | } 74 | } 75 | } 76 | 77 | impl Digestible for IntraIndexNonLeaf { 78 | fn to_digest(&self) -> Digest { 79 | concat_digest_ref([self.acc_value.to_digest(), self.child_hash_digest].iter()) 80 | } 81 | } 82 | 83 | #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] 84 | pub struct IntraIndexLeaf { 85 | pub id: IdType, 86 | pub block_id: IdType, 87 | pub set_data: MultiSet, 88 | #[serde(with = "crate::acc::serde_impl")] 89 | pub acc_value: G1Affine, 90 | pub obj_id: IdType, 91 | pub obj_hash: Digest, 92 | } 93 | 94 | impl IntraIndexLeaf { 95 | pub fn create( 96 | block_id: IdType, 97 | set_data: MultiSet, 98 | acc_value: G1Affine, 99 | obj_id: IdType, 100 | obj_hash: Digest, 101 | ) -> Self { 102 | let id = INDEX_ID_CNT.fetch_add(1, Ordering::SeqCst) as IdType; 103 | Self { 104 | id, 105 | block_id, 106 | set_data, 107 | acc_value, 108 | obj_id, 109 | obj_hash, 110 | } 111 | } 112 | } 113 | 114 | impl Digestible for IntraIndexLeaf { 115 | fn to_digest(&self) -> Digest { 116 | concat_digest_ref([self.acc_value.to_digest(), self.obj_hash].iter()) 117 | } 118 | } 119 | 120 | #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] 121 | pub struct SkipListNode { 122 | pub id: IdType, 123 | pub block_id: IdType, 124 | pub level: SkipLstLvlType, 125 | pub set_data: MultiSet, 126 | #[serde(with = "crate::acc::serde_impl")] 127 | pub acc_value: G1Affine, 128 | pub pre_skipped_hash: Digest, 129 | pub digest: Digest, 130 | } 131 | 132 | impl SkipListNode { 133 | pub fn create( 134 | block_id: IdType, 135 | level: SkipLstLvlType, 136 | set_data: MultiSet, 137 | acc_value: G1Affine, 138 | pre_skipped_hash: Digest, 139 | ) -> Self { 140 | let id = INDEX_ID_CNT.fetch_add(1, Ordering::SeqCst) as IdType; 141 | let digest = concat_digest_ref([acc_value.to_digest(), pre_skipped_hash].iter()); 142 | Self { 143 | id, 144 | block_id, 145 | level, 146 | set_data, 147 | acc_value, 148 | pre_skipped_hash, 149 | digest, 150 | } 151 | } 152 | } 153 | 154 | #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] 155 | pub enum IntraData { 156 | // List of object ids 157 | Flat(Vec), 158 | // IntraIndexNode root id 159 | Index(IdType), 160 | } 161 | 162 | #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] 163 | pub struct BlockData { 164 | pub block_id: IdType, 165 | pub data: IntraData, 166 | pub set_data: MultiSet, 167 | #[serde(with = "crate::acc::serde_impl")] 168 | pub acc_value: G1Affine, 169 | pub skip_list_ids: Vec, 170 | } 171 | 172 | #[derive(Debug, Clone, Copy, Eq, PartialEq, Default, Serialize, Deserialize)] 173 | pub struct BlockHeader { 174 | pub block_id: IdType, 175 | pub prev_hash: Digest, 176 | pub data_root: Digest, 177 | pub skip_list_root: Option, 178 | } 179 | 180 | impl Digestible for BlockHeader { 181 | fn to_digest(&self) -> Digest { 182 | let mut state = blake2().to_state(); 183 | state.update(&self.block_id.to_le_bytes()); 184 | state.update(&self.prev_hash.0); 185 | state.update(&self.data_root.0); 186 | if let Some(d) = self.skip_list_root { 187 | state.update(&d.0); 188 | } 189 | Digest::from(state.finalize()) 190 | } 191 | } 192 | -------------------------------------------------------------------------------- /vchain/src/chain/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::acc; 2 | use anyhow::Result; 3 | use serde::{Deserialize, Serialize}; 4 | 5 | pub mod utils; 6 | pub use utils::*; 7 | 8 | pub mod object; 9 | pub use object::*; 10 | 11 | pub mod index; 12 | pub use index::*; 13 | 14 | pub mod build; 15 | pub use build::*; 16 | 17 | pub mod query; 18 | pub use query::*; 19 | 20 | pub mod query_result; 21 | pub use query_result::*; 22 | 23 | pub mod historical_query; 24 | pub use historical_query::*; 25 | 26 | pub type IdType = u32; 27 | pub type SkipLstLvlType = u8; 28 | 29 | #[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)] 30 | pub struct Parameter { 31 | pub v_bit_len: Vec, 32 | pub acc_type: acc::Type, 33 | pub use_sk: bool, // only for debug purpose 34 | pub intra_index: bool, 35 | pub skip_list_max_level: SkipLstLvlType, 36 | } 37 | 38 | #[async_trait::async_trait] 39 | pub trait LightNodeInterface { 40 | async fn lightnode_get_parameter(&self) -> Result; 41 | async fn lightnode_read_block_header(&self, id: IdType) -> Result; 42 | } 43 | 44 | pub trait ReadInterface { 45 | fn get_parameter(&self) -> Result; 46 | fn read_block_header(&self, id: IdType) -> Result; 47 | fn read_block_data(&self, id: IdType) -> Result; 48 | fn read_intra_index_node(&self, id: IdType) -> Result; 49 | fn read_skip_list_node(&self, id: IdType) -> Result; 50 | fn read_object(&self, id: IdType) -> Result; 51 | } 52 | 53 | pub trait WriteInterface { 54 | fn set_parameter(&mut self, param: Parameter) -> Result<()>; 55 | fn write_block_header(&mut self, header: BlockHeader) -> Result<()>; 56 | fn write_block_data(&mut self, data: BlockData) -> Result<()>; 57 | fn write_intra_index_node(&mut self, node: IntraIndexNode) -> Result<()>; 58 | fn write_skip_list_node(&mut self, node: SkipListNode) -> Result<()>; 59 | fn write_object(&mut self, obj: Object) -> Result<()>; 60 | } 61 | 62 | #[cfg(test)] 63 | mod tests; 64 | -------------------------------------------------------------------------------- /vchain/src/chain/object.rs: -------------------------------------------------------------------------------- 1 | use super::{multiset_to_g1, IdType, Parameter}; 2 | use crate::acc::G1Affine; 3 | use crate::digest::{blake2, Digest, Digestible}; 4 | use crate::set::MultiSet; 5 | use core::sync::atomic::{AtomicU64, Ordering}; 6 | use rayon::prelude::*; 7 | use serde::{Deserialize, Serialize}; 8 | use std::collections::HashSet; 9 | 10 | static OBJECT_ID_CNT: AtomicU64 = AtomicU64::new(0); 11 | 12 | #[derive(Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize)] 13 | pub struct RawObject { 14 | pub block_id: IdType, 15 | pub v_data: Vec, 16 | pub w_data: HashSet, 17 | } 18 | 19 | #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] 20 | pub struct Object { 21 | pub id: IdType, 22 | pub block_id: IdType, 23 | pub v_data: Vec, 24 | pub w_data: HashSet, 25 | pub set_data: MultiSet, 26 | #[serde(with = "crate::acc::serde_impl")] 27 | pub acc_value: G1Affine, 28 | } 29 | 30 | impl Object { 31 | pub fn create(obj: &RawObject, param: &Parameter) -> Self { 32 | let id = OBJECT_ID_CNT.fetch_add(1, Ordering::SeqCst) as IdType; 33 | let set_v = v_data_to_set(&obj.v_data, ¶m.v_bit_len); 34 | let set_w = obj 35 | .w_data 36 | .iter() 37 | .map(|w| SetElementType::W(w.clone())) 38 | .collect::>(); 39 | let set_data = &set_v + &set_w; 40 | let acc_value = multiset_to_g1(&set_data, param); 41 | Self { 42 | id, 43 | block_id: obj.block_id, 44 | v_data: obj.v_data.clone(), 45 | w_data: obj.w_data.clone(), 46 | set_data, 47 | acc_value, 48 | } 49 | } 50 | } 51 | 52 | impl Digestible for Object { 53 | fn to_digest(&self) -> Digest { 54 | let mut state = blake2().to_state(); 55 | state.update(&self.id.to_le_bytes()); 56 | state.update(&self.block_id.to_le_bytes()); 57 | for v in &self.v_data { 58 | state.update(&v.to_le_bytes()); 59 | } 60 | let mut ws: Vec<_> = self.w_data.iter().collect(); 61 | ws.par_sort_unstable(); 62 | for w in &ws { 63 | state.update(w.as_bytes()); 64 | } 65 | Digest::from(state.finalize()) 66 | } 67 | } 68 | 69 | #[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)] 70 | pub enum SetElementType { 71 | // To transform V to range: [val, val + ~mask & (mask - 1)] 72 | V { dim: u32, val: u32, mask: u32 }, 73 | W(String), 74 | } 75 | 76 | impl Digestible for SetElementType { 77 | fn to_digest(&self) -> Digest { 78 | match self { 79 | SetElementType::V { dim, val, mask } => { 80 | let mut state = blake2().to_state(); 81 | state.update(&dim.to_le_bytes()); 82 | state.update(&val.to_le_bytes()); 83 | state.update(&mask.to_le_bytes()); 84 | Digest::from(state.finalize()) 85 | } 86 | SetElementType::W(s) => s.to_digest(), 87 | } 88 | } 89 | } 90 | 91 | pub fn v_data_to_set(input: &[u32], bit_len: &[u8]) -> MultiSet { 92 | input 93 | .iter() 94 | .enumerate() 95 | .flat_map(|(i, &v)| { 96 | let m: u32 = !(0xffff_ffff << bit_len[i]); 97 | (0..bit_len[i]).map(move |j| { 98 | let mask = (0xffff_ffff << j) & m; 99 | let val = v & mask; 100 | SetElementType::V { 101 | dim: i as u32, 102 | val, 103 | mask, 104 | } 105 | }) 106 | }) 107 | .collect() 108 | } 109 | 110 | #[cfg(test)] 111 | mod tests { 112 | use super::*; 113 | 114 | #[test] 115 | fn test_v_data_to_set() { 116 | use SetElementType::V; 117 | let res = v_data_to_set(&[4, 2], &[3, 3]); 118 | #[rustfmt::skip] 119 | let expect = MultiSet::from_vec(vec![ 120 | V { dim: 0, val: 0b100, mask: 0b100 }, 121 | V { dim: 0, val: 0b100, mask: 0b110 }, 122 | V { dim: 0, val: 0b100, mask: 0b111 }, 123 | V { dim: 1, val: 0b000, mask: 0b100 }, 124 | V { dim: 1, val: 0b010, mask: 0b110 }, 125 | V { dim: 1, val: 0b010, mask: 0b111 }, 126 | ]); 127 | assert_eq!(res, expect) 128 | } 129 | } 130 | -------------------------------------------------------------------------------- /vchain/src/chain/query.rs: -------------------------------------------------------------------------------- 1 | use super::{IdType, SetElementType}; 2 | use crate::set::{MultiSet, SetElement}; 3 | use core::iter::FromIterator; 4 | use core::ops::Deref; 5 | use serde::{Deserialize, Serialize}; 6 | use std::collections::{HashSet, VecDeque}; 7 | 8 | #[derive(Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize)] 9 | pub struct BoolExp { 10 | pub(crate) inner: Vec>, 11 | } 12 | 13 | impl BoolExp { 14 | pub fn new() -> Self { 15 | Self { inner: Vec::new() } 16 | } 17 | 18 | pub fn from_vec(input: Vec>) -> Self { 19 | Self::from_iter(input.into_iter()) 20 | } 21 | 22 | pub fn is_match(&self, set: &MultiSet) -> bool { 23 | self.mismatch_idx(set).is_none() 24 | } 25 | 26 | pub fn mismatch_idx(&self, set: &MultiSet) -> Option { 27 | self.iter().position(|s| !s.is_intersected_with(set)) 28 | } 29 | } 30 | 31 | impl Deref for BoolExp { 32 | type Target = Vec>; 33 | 34 | fn deref(&self) -> &Self::Target { 35 | &self.inner 36 | } 37 | } 38 | 39 | impl FromIterator> for BoolExp { 40 | fn from_iter>>(iter: I) -> Self { 41 | Self { 42 | inner: iter.into_iter().collect::>(), 43 | } 44 | } 45 | } 46 | 47 | #[derive(Debug, Default, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)] 48 | pub struct Range(pub(crate) [Vec>; 2]); 49 | 50 | impl Range { 51 | pub fn to_bool_exp(&self, bit_len: &[u8]) -> BoolExp { 52 | let mut exp = BoolExp::new(); 53 | for (i, range) in self[0].iter().zip(self[1].iter()).enumerate() { 54 | let (l, r) = match (range.0, range.1) { 55 | (Some(x), Some(y)) => (*x, *y), 56 | _ => continue, 57 | }; 58 | 59 | let mut set_data = MultiSet::::new(); 60 | 61 | let mut queue: VecDeque<(u32, u32)> = VecDeque::new(); 62 | queue.push_back((0, 0)); 63 | 64 | while let Some((mut mask, left)) = queue.pop_front() { 65 | let mask_inv = !mask; 66 | let right = left | mask_inv; 67 | 68 | if l <= left && right <= r { 69 | if bit_len[i] < 32 { 70 | mask &= !(0xffff_ffff << bit_len[i]); 71 | } 72 | set_data.inner.insert( 73 | SetElementType::V { 74 | dim: i as u32, 75 | val: left, 76 | mask, 77 | }, 78 | 1, 79 | ); 80 | continue; 81 | } 82 | 83 | if right < l || r < left { 84 | continue; 85 | } 86 | 87 | let new_mask = !(mask_inv >> 1); 88 | queue.push_back((new_mask, left)); 89 | queue.push_back((new_mask, left | (new_mask & mask_inv))); 90 | } 91 | 92 | exp.inner.push(set_data); 93 | } 94 | exp 95 | } 96 | } 97 | 98 | impl Deref for Range { 99 | type Target = [Vec>; 2]; 100 | 101 | fn deref(&self) -> &Self::Target { 102 | &self.0 103 | } 104 | } 105 | 106 | #[derive(Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize)] 107 | pub struct Query { 108 | pub start_block: IdType, 109 | pub end_block: IdType, 110 | #[serde(rename = "range")] 111 | pub q_range: Option, 112 | #[serde(rename = "bool")] 113 | pub q_bool: Option>>, 114 | } 115 | 116 | impl Query { 117 | pub fn to_bool_exp(&self, bit_len: &[u8]) -> BoolExp { 118 | let mut exp = BoolExp::new(); 119 | if let Some(q_range) = &self.q_range { 120 | exp.inner 121 | .extend(q_range.to_bool_exp(bit_len).iter().cloned()); 122 | } 123 | if let Some(q_bool) = &self.q_bool { 124 | for sub_exp in q_bool.iter() { 125 | exp.inner.push(MultiSet::from_iter( 126 | sub_exp.iter().map(|w| SetElementType::W(w.clone())), 127 | )); 128 | } 129 | } 130 | exp 131 | } 132 | } 133 | 134 | #[cfg(test)] 135 | mod tests { 136 | use super::*; 137 | use serde_json::json; 138 | 139 | #[test] 140 | fn test_boolexp() { 141 | let exp = BoolExp::from_vec(vec![ 142 | MultiSet::from_vec(vec!["a".to_owned(), "b".to_owned()]), 143 | MultiSet::from_vec(vec!["c".to_owned()]), 144 | ]); 145 | let set1 = MultiSet::from_vec(vec!["a".to_owned(), "b".to_owned()]); 146 | let set2 = MultiSet::from_vec(vec!["a".to_owned(), "b".to_owned(), "c".to_owned()]); 147 | assert_eq!(exp.mismatch_idx(&set1), Some(1)); 148 | assert!(exp.is_match(&set2)); 149 | } 150 | 151 | #[test] 152 | fn test_range() { 153 | use SetElementType::V; 154 | 155 | let range = Range([vec![Some(0), None, Some(3)], vec![Some(6), None, Some(4)]]); 156 | #[rustfmt::skip] 157 | let expect = BoolExp::from_vec(vec![ 158 | MultiSet::from_vec(vec![ 159 | V { dim: 0, val: 0b000, mask: 0b100 }, 160 | V { dim: 0, val: 0b100, mask: 0b110 }, 161 | V { dim: 0, val: 0b110, mask: 0b111 }, 162 | ]), 163 | MultiSet::from_vec(vec![ 164 | V { dim: 2, val: 0b011, mask: 0b111 }, 165 | V { dim: 2, val: 0b100, mask: 0b111 }, 166 | ]), 167 | ]); 168 | assert_eq!(range.to_bool_exp(&[3, 3, 3]), expect); 169 | } 170 | 171 | #[test] 172 | fn test_query() { 173 | let data = json!({ 174 | "start_block": 1, 175 | "end_block": 2, 176 | "range": [ 177 | [0, null, 3], 178 | [6, null, 4], 179 | ], 180 | "bool": [ 181 | ["a"], 182 | ["b"], 183 | ], 184 | }); 185 | let expect = Query { 186 | start_block: 1, 187 | end_block: 2, 188 | q_range: Some(Range([ 189 | vec![Some(0), None, Some(3)], 190 | vec![Some(6), None, Some(4)], 191 | ])), 192 | q_bool: Some(vec![ 193 | ["a".to_owned()].iter().cloned().collect::>(), 194 | ["b".to_owned()].iter().cloned().collect::>(), 195 | ]), 196 | }; 197 | assert_eq!( 198 | serde_json::from_value::(data.clone()).unwrap(), 199 | expect 200 | ); 201 | assert_eq!(data, serde_json::to_value(expect).unwrap()); 202 | } 203 | } 204 | -------------------------------------------------------------------------------- /vchain/src/chain/query_result.rs: -------------------------------------------------------------------------------- 1 | use super::*; 2 | use crate::acc::{self, Accumulator, AccumulatorProof}; 3 | use crate::acc::{G1Affine, G1Projective}; 4 | use crate::digest::{blake2, concat_digest, concat_digest_ref, Digest, Digestible}; 5 | use crate::set::MultiSet; 6 | use ark_ec::ProjectiveCurve; 7 | use ark_ff::Zero; 8 | use core::ops::Deref; 9 | use futures::join; 10 | use howlong::Duration; 11 | use serde::{Deserialize, Serialize}; 12 | use smallvec::SmallVec; 13 | use std::collections::HashMap; 14 | 15 | #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] 16 | pub enum InvalidReason { 17 | InvalidSetIdx(usize), 18 | InvalidAccIdx(AccProofIdxType), 19 | InvalidAccProof(AccProofIdxType), 20 | InvalidMatchObj(IdType), 21 | InvalidHash, 22 | } 23 | 24 | #[derive(Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize)] 25 | pub struct VerifyResult(Vec); 26 | 27 | impl VerifyResult { 28 | pub fn add(&mut self, reason: InvalidReason) { 29 | self.0.push(reason); 30 | } 31 | 32 | pub fn append(&mut self, mut other: Self) { 33 | self.0.append(&mut other.0); 34 | } 35 | 36 | pub fn is_ok(&self) -> bool { 37 | self.0.is_empty() 38 | } 39 | } 40 | 41 | #[derive(Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize)] 42 | pub struct ResultObjs(pub HashMap); 43 | 44 | impl ResultObjs { 45 | pub fn new() -> Self { 46 | Self(HashMap::new()) 47 | } 48 | 49 | pub fn insert(&mut self, obj: Object) { 50 | self.0.insert(obj.id, obj); 51 | } 52 | } 53 | 54 | impl Deref for ResultObjs { 55 | type Target = HashMap; 56 | 57 | fn deref(&self) -> &Self::Target { 58 | &self.0 59 | } 60 | } 61 | 62 | #[derive(Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize)] 63 | pub struct ObjAcc(#[serde(with = "crate::acc::serde_impl")] pub G1Affine); 64 | 65 | // set_idx, [ acc_idx / proof_idx ] 66 | // query_set = query.to_bool_exp(...)[set_idx] 67 | pub type AccProofIdxType = (usize, usize); 68 | 69 | #[derive(Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize)] 70 | pub struct ResultVOAcc { 71 | // 72 | pub proofs: HashMap>, 73 | // 74 | pub object_accs: HashMap>, 75 | } 76 | 77 | impl ResultVOAcc { 78 | pub fn new() -> Self { 79 | Self { 80 | proofs: HashMap::new(), 81 | object_accs: HashMap::new(), 82 | } 83 | } 84 | 85 | pub fn get_object_acc(&self, proof_idx: AccProofIdxType) -> Option<&G1Affine> { 86 | Some(&self.object_accs.get(&proof_idx.0)?.get(proof_idx.1)?.0) 87 | } 88 | 89 | pub fn verify(&self, query_exp: &BoolExp) -> VerifyResult { 90 | let mut result = VerifyResult::default(); 91 | match AP::TYPE { 92 | acc::Type::ACC1 => { 93 | for (&i, proofs) in self.proofs.iter() { 94 | let query_acc = match query_exp.get(i) { 95 | Some(set) => acc::Acc1::cal_acc_g1(set), 96 | None => { 97 | result.add(InvalidReason::InvalidSetIdx(i)); 98 | continue; 99 | } 100 | }; 101 | for (j, proof) in proofs.iter().enumerate() { 102 | let acc_proof_idx = (i, j); 103 | let proof = match proof.as_any().downcast_ref::() { 104 | Some(proof) => proof, 105 | None => { 106 | result.add(InvalidReason::InvalidAccIdx(acc_proof_idx)); 107 | continue; 108 | } 109 | }; 110 | let obj_acc = match self.get_object_acc(acc_proof_idx) { 111 | Some(acc) => acc, 112 | None => { 113 | result.add(InvalidReason::InvalidAccIdx(acc_proof_idx)); 114 | continue; 115 | } 116 | }; 117 | if !proof.verify(obj_acc, &query_acc) { 118 | result.add(InvalidReason::InvalidAccProof(acc_proof_idx)); 119 | } 120 | } 121 | } 122 | } 123 | acc::Type::ACC2 => { 124 | for (&i, proofs) in self.proofs.iter() { 125 | let query_acc = match query_exp.get(i) { 126 | Some(set) => acc::Acc2::cal_acc_g2(set), 127 | None => { 128 | result.add(InvalidReason::InvalidSetIdx(i)); 129 | continue; 130 | } 131 | }; 132 | let obj_accs = match self.object_accs.get(&i) { 133 | Some(accs) => accs, 134 | None => { 135 | result.add(InvalidReason::InvalidSetIdx(i)); 136 | continue; 137 | } 138 | }; 139 | debug_assert_eq!(proofs.len(), 1); 140 | let acc_proof_idx = (i, 0); 141 | let proof = match proofs[0].as_any().downcast_ref::() { 142 | Some(proof) => proof, 143 | None => { 144 | result.add(InvalidReason::InvalidAccIdx(acc_proof_idx)); 145 | continue; 146 | } 147 | }; 148 | let mut g1 = G1Projective::zero(); 149 | for obj_acc in obj_accs.iter() { 150 | g1.add_assign_mixed(&obj_acc.0); 151 | } 152 | if !proof.verify(&g1.into_affine(), &query_acc) { 153 | result.add(InvalidReason::InvalidAccProof(acc_proof_idx)); 154 | } 155 | } 156 | } 157 | } 158 | result 159 | } 160 | 161 | pub fn add_proof( 162 | &mut self, 163 | mismatch_idx: usize, 164 | query_exp_set_d: &acc::DigestSet, 165 | object_set_d: &acc::DigestSet, 166 | object_acc: &G1Affine, 167 | ) -> Result { 168 | let object_acc = ObjAcc(*object_acc); 169 | let proof = AP::gen_proof(object_set_d, query_exp_set_d)?; 170 | 171 | match AP::TYPE { 172 | acc::Type::ACC1 => { 173 | let proof_ptr = self.proofs.entry(mismatch_idx).or_insert_with(Vec::new); 174 | proof_ptr.push(proof); 175 | let acc_ptr = self 176 | .object_accs 177 | .entry(mismatch_idx) 178 | .or_insert_with(Vec::new); 179 | acc_ptr.push(object_acc); 180 | debug_assert_eq!(proof_ptr.len(), acc_ptr.len()); 181 | Ok((mismatch_idx, proof_ptr.len() - 1)) 182 | } 183 | acc::Type::ACC2 => { 184 | let proof_ptr = self.proofs.entry(mismatch_idx).or_insert_with(Vec::new); 185 | let acc_ptr = self 186 | .object_accs 187 | .entry(mismatch_idx) 188 | .or_insert_with(Vec::new); 189 | acc_ptr.push(object_acc); 190 | if proof_ptr.is_empty() { 191 | proof_ptr.push(proof); 192 | } else { 193 | debug_assert_eq!(proof_ptr.len(), 1); 194 | proof_ptr[0].combine_proof(&proof)?; 195 | } 196 | Ok((mismatch_idx, acc_ptr.len() - 1)) 197 | } 198 | } 199 | } 200 | 201 | pub fn compute_stats(&self, stats: &mut VOStatistic) { 202 | stats.num_of_acc_proofs = self.proofs.values().map(|v| v.len() as u64).sum(); 203 | } 204 | } 205 | 206 | #[derive(Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize)] 207 | pub struct ResultVOTree(pub Vec); 208 | 209 | impl ResultVOTree { 210 | pub fn new() -> Self { 211 | Self(Vec::new()) 212 | } 213 | 214 | pub fn compute_digest( 215 | &self, 216 | res_objs: &ResultObjs, 217 | vo_acc: &ResultVOAcc, 218 | prev_hash: &Digest, 219 | ) -> Option { 220 | let mut hash_root = *prev_hash; 221 | for n in &self.0 { 222 | hash_root = n.compute_digest(res_objs, vo_acc, &hash_root)?; 223 | } 224 | Some(hash_root) 225 | } 226 | 227 | pub fn compute_stats(&self, stats: &mut VOStatistic) { 228 | for sub_node in &self.0 { 229 | sub_node.compute_stats(stats); 230 | } 231 | } 232 | } 233 | 234 | #[derive(Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize)] 235 | pub struct ResultVO { 236 | #[serde(rename = "tree")] 237 | pub vo_t: ResultVOTree, 238 | #[serde(rename = "acc")] 239 | pub vo_acc: ResultVOAcc, 240 | } 241 | 242 | impl ResultVO { 243 | pub fn new() -> Self { 244 | Self { 245 | vo_t: ResultVOTree::new(), 246 | vo_acc: ResultVOAcc::::new(), 247 | } 248 | } 249 | pub fn compute_stats(&self, stats: &mut VOStatistic) { 250 | self.vo_t.compute_stats(stats); 251 | self.vo_acc.compute_stats(stats); 252 | } 253 | } 254 | 255 | #[derive(Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize)] 256 | pub struct VOStatistic { 257 | pub num_of_acc_proofs: u64, 258 | pub num_of_objs: u64, 259 | pub num_of_mismatch_objs: u64, 260 | pub num_of_mismatch_intra_nodes: u64, 261 | pub num_of_mismatch_inter_nodes: u64, 262 | } 263 | 264 | #[derive(Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize)] 265 | pub struct OverallResult { 266 | #[serde(rename = "result")] 267 | pub res_objs: ResultObjs, 268 | #[serde(rename = "vo")] 269 | pub res_vo: ResultVO, 270 | pub query: Query, 271 | pub query_exp_set: Vec>, 272 | pub query_time_in_ms: u64, 273 | pub v_bit_len: Vec, 274 | pub vo_size: u64, 275 | #[serde(rename = "stats")] 276 | pub vo_stats: VOStatistic, 277 | } 278 | 279 | impl OverallResult { 280 | pub async fn verify( 281 | &self, 282 | chain: &impl LightNodeInterface, 283 | ) -> Result<(VerifyResult, Duration)> { 284 | info!("verify result"); 285 | let cpu_timer = howlong::ProcessCPUTimer::new(); 286 | let timer = howlong::HighResolutionTimer::new(); 287 | let res = self.inner_verify(chain).await?; 288 | let time = timer.elapsed(); 289 | info!("used time: {}", cpu_timer.elapsed()); 290 | Ok((res, time)) 291 | } 292 | 293 | async fn inner_verify(&self, chain: &impl LightNodeInterface) -> Result { 294 | let mut result = VerifyResult::default(); 295 | let query_exp = self.query.to_bool_exp(&self.v_bit_len); 296 | for (id, obj) in self.res_objs.iter() { 297 | if !query_exp.is_match(&obj.set_data) { 298 | result.add(InvalidReason::InvalidMatchObj(*id)); 299 | } 300 | } 301 | let acc_res = self.res_vo.vo_acc.verify(&query_exp); 302 | result.append(acc_res); 303 | let (blk1, blk2) = join!( 304 | chain.lightnode_read_block_header(self.query.start_block), 305 | chain.lightnode_read_block_header(self.query.end_block) 306 | ); 307 | let prev_hash = blk1?.prev_hash; 308 | let hash_root = blk2?.to_digest(); 309 | if self 310 | .res_vo 311 | .vo_t 312 | .compute_digest(&self.res_objs, &self.res_vo.vo_acc, &prev_hash) 313 | != Some(hash_root) 314 | { 315 | result.add(InvalidReason::InvalidHash); 316 | } 317 | Ok(result) 318 | } 319 | 320 | pub fn compute_stats(&mut self) -> Result<()> { 321 | self.vo_size = bincode::serialize(&self.res_vo)?.len() as u64; 322 | self.vo_stats = Default::default(); 323 | self.res_vo.compute_stats(&mut self.vo_stats); 324 | Ok(()) 325 | } 326 | } 327 | 328 | pub mod vo { 329 | use super::*; 330 | 331 | #[derive(Debug, Default, Clone, Copy, Eq, PartialEq, Serialize, Deserialize)] 332 | pub struct MatchObjNode { 333 | pub obj_id: IdType, 334 | } 335 | 336 | impl MatchObjNode { 337 | pub fn create(o: &Object) -> Self { 338 | Self { obj_id: o.id } 339 | } 340 | pub fn compute_digest( 341 | self, 342 | res_objs: &ResultObjs, 343 | _vo_acc: &ResultVOAcc, 344 | ) -> Option { 345 | let obj = res_objs.get(&self.obj_id)?; 346 | Some(concat_digest_ref( 347 | [obj.acc_value.to_digest(), obj.to_digest()].iter(), 348 | )) 349 | } 350 | pub fn into_obj_node(self) -> ObjNode { 351 | ObjNode::Match(Box::new(self)) 352 | } 353 | pub fn compute_stats(self, stats: &mut VOStatistic) { 354 | stats.num_of_objs += 1; 355 | } 356 | } 357 | 358 | #[derive(Debug, Default, Clone, Copy, Eq, PartialEq, Serialize, Deserialize)] 359 | pub struct NoMatchObjNode { 360 | pub obj_id: IdType, 361 | pub obj_hash: Digest, 362 | pub proof_idx: AccProofIdxType, 363 | } 364 | 365 | impl NoMatchObjNode { 366 | pub fn create(o: &Object, proof_idx: AccProofIdxType) -> Self { 367 | Self { 368 | obj_id: o.id, 369 | obj_hash: o.to_digest(), 370 | proof_idx, 371 | } 372 | } 373 | pub fn into_obj_node(self) -> ObjNode { 374 | ObjNode::NoMatch(Box::new(self)) 375 | } 376 | pub fn compute_digest( 377 | &self, 378 | _res_objs: &ResultObjs, 379 | vo_acc: &ResultVOAcc, 380 | ) -> Option { 381 | let acc_value = vo_acc.get_object_acc(self.proof_idx)?; 382 | Some(concat_digest_ref( 383 | [acc_value.to_digest(), self.obj_hash].iter(), 384 | )) 385 | } 386 | pub fn compute_stats(&self, stats: &mut VOStatistic) { 387 | stats.num_of_mismatch_objs += 1; 388 | } 389 | } 390 | 391 | #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] 392 | pub enum ObjNode { 393 | Match(Box), 394 | NoMatch(Box), 395 | } 396 | 397 | impl ObjNode { 398 | pub fn compute_digest( 399 | &self, 400 | res_objs: &ResultObjs, 401 | vo_acc: &ResultVOAcc, 402 | ) -> Option { 403 | match self { 404 | Self::Match(n) => n.compute_digest(res_objs, vo_acc), 405 | Self::NoMatch(n) => n.compute_digest(res_objs, vo_acc), 406 | } 407 | } 408 | pub fn compute_stats(&self, stats: &mut VOStatistic) { 409 | match self { 410 | Self::Match(n) => n.compute_stats(stats), 411 | Self::NoMatch(n) => n.compute_stats(stats), 412 | } 413 | } 414 | } 415 | 416 | #[derive(Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize)] 417 | pub struct FlatBlkNode { 418 | pub block_id: IdType, 419 | pub skip_list_root: Option, 420 | pub sub_nodes: Vec, 421 | } 422 | 423 | impl FlatBlkNode { 424 | pub fn compute_digest( 425 | &self, 426 | res_objs: &ResultObjs, 427 | vo_acc: &ResultVOAcc, 428 | prev_hash: &Digest, 429 | ) -> Option { 430 | let mut hs: Vec = Vec::with_capacity(self.sub_nodes.len()); 431 | for sub_node in &self.sub_nodes { 432 | hs.push(sub_node.compute_digest(res_objs, vo_acc)?); 433 | } 434 | let data_root = concat_digest(hs.into_iter()); 435 | 436 | let mut state = blake2().to_state(); 437 | state.update(&self.block_id.to_le_bytes()); 438 | state.update(&prev_hash.0); 439 | state.update(&data_root.0); 440 | if let Some(d) = self.skip_list_root { 441 | state.update(&d.0); 442 | } 443 | Some(Digest::from(state.finalize())) 444 | } 445 | pub fn into_result_vo_node(self) -> ResultVONode { 446 | ResultVONode::FlatBlkNode(Box::new(self)) 447 | } 448 | pub fn compute_stats(&self, stats: &mut VOStatistic) { 449 | for sub_node in &self.sub_nodes { 450 | sub_node.compute_stats(stats); 451 | } 452 | } 453 | } 454 | 455 | #[derive(Debug, Default, Clone, Copy, Eq, PartialEq, Serialize, Deserialize)] 456 | pub struct NoMatchIntraNonLeaf { 457 | pub id: IdType, 458 | pub child_hash_digest: Digest, 459 | pub proof_idx: AccProofIdxType, 460 | } 461 | 462 | impl NoMatchIntraNonLeaf { 463 | pub fn create(n: &IntraIndexNonLeaf, proof_idx: AccProofIdxType) -> Self { 464 | Self { 465 | id: n.id, 466 | child_hash_digest: n.child_hash_digest, 467 | proof_idx, 468 | } 469 | } 470 | pub fn into_intra_node(self) -> IntraNode { 471 | IntraNode::NoMatchIntraNonLeaf(Box::new(self)) 472 | } 473 | pub fn compute_digest( 474 | &self, 475 | _res_objs: &ResultObjs, 476 | vo_acc: &ResultVOAcc, 477 | ) -> Option { 478 | let acc_value = vo_acc.get_object_acc(self.proof_idx)?; 479 | Some(concat_digest_ref( 480 | [acc_value.to_digest(), self.child_hash_digest].iter(), 481 | )) 482 | } 483 | pub fn compute_stats(&self, stats: &mut VOStatistic) { 484 | stats.num_of_mismatch_intra_nodes += 1; 485 | } 486 | } 487 | 488 | #[derive(Debug, Default, Clone, Copy, Eq, PartialEq, Serialize, Deserialize)] 489 | pub struct NoMatchIntraLeaf { 490 | pub id: IdType, 491 | pub obj_hash: Digest, 492 | pub proof_idx: AccProofIdxType, 493 | } 494 | 495 | impl NoMatchIntraLeaf { 496 | pub fn create(n: &IntraIndexLeaf, proof_idx: AccProofIdxType) -> Self { 497 | Self { 498 | id: n.id, 499 | obj_hash: n.obj_hash, 500 | proof_idx, 501 | } 502 | } 503 | pub fn into_intra_node(self) -> IntraNode { 504 | IntraNode::NoMatchIntraLeaf(Box::new(self)) 505 | } 506 | pub fn compute_digest( 507 | &self, 508 | _res_objs: &ResultObjs, 509 | vo_acc: &ResultVOAcc, 510 | ) -> Option { 511 | let acc_value = vo_acc.get_object_acc(self.proof_idx)?; 512 | Some(concat_digest_ref( 513 | [acc_value.to_digest(), self.obj_hash].iter(), 514 | )) 515 | } 516 | pub fn compute_stats(&self, stats: &mut VOStatistic) { 517 | stats.num_of_mismatch_intra_nodes += 1; 518 | } 519 | } 520 | 521 | #[derive(Debug, Default, Clone, Copy, Eq, PartialEq, Serialize, Deserialize)] 522 | pub struct MatchIntraLeaf { 523 | pub id: IdType, 524 | pub obj_id: IdType, 525 | } 526 | 527 | impl MatchIntraLeaf { 528 | pub fn create(n: &IntraIndexLeaf) -> Self { 529 | Self { 530 | id: n.id, 531 | obj_id: n.obj_id, 532 | } 533 | } 534 | pub fn into_intra_node(self) -> IntraNode { 535 | IntraNode::MatchIntraLeaf(Box::new(self)) 536 | } 537 | pub fn compute_digest( 538 | self, 539 | res_objs: &ResultObjs, 540 | _vo_acc: &ResultVOAcc, 541 | ) -> Option { 542 | let obj = res_objs.get(&self.obj_id)?; 543 | Some(concat_digest_ref( 544 | [obj.acc_value.to_digest(), obj.to_digest()].iter(), 545 | )) 546 | } 547 | pub fn compute_stats(self, stats: &mut VOStatistic) { 548 | stats.num_of_objs += 1; 549 | } 550 | } 551 | 552 | #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] 553 | pub enum IntraNode { 554 | NoMatchIntraLeaf(Box), 555 | NoMatchIntraNonLeaf(Box), 556 | MatchIntraLeaf(Box), 557 | IntraNonLeaf(Box), 558 | Empty, 559 | } 560 | 561 | impl IntraNode { 562 | pub fn compute_digest( 563 | &self, 564 | res_objs: &ResultObjs, 565 | vo_acc: &ResultVOAcc, 566 | ) -> Option { 567 | match self { 568 | Self::NoMatchIntraLeaf(n) => n.compute_digest(res_objs, vo_acc), 569 | Self::NoMatchIntraNonLeaf(n) => n.compute_digest(res_objs, vo_acc), 570 | Self::MatchIntraLeaf(n) => n.compute_digest(res_objs, vo_acc), 571 | Self::IntraNonLeaf(n) => n.compute_digest(res_objs, vo_acc), 572 | Self::Empty => None, 573 | } 574 | } 575 | pub fn compute_stats(&self, stats: &mut VOStatistic) { 576 | match self { 577 | Self::NoMatchIntraLeaf(n) => n.compute_stats(stats), 578 | Self::NoMatchIntraNonLeaf(n) => n.compute_stats(stats), 579 | Self::MatchIntraLeaf(n) => n.compute_stats(stats), 580 | Self::IntraNonLeaf(n) => n.compute_stats(stats), 581 | Self::Empty => {} 582 | } 583 | } 584 | } 585 | 586 | #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] 587 | pub struct IntraNonLeaf { 588 | pub id: IdType, 589 | #[serde(with = "crate::acc::serde_impl")] 590 | pub acc_value: G1Affine, 591 | pub children: SmallVec<[IntraNode; 2]>, 592 | } 593 | 594 | impl IntraNonLeaf { 595 | pub fn create(n: &IntraIndexNonLeaf) -> Self { 596 | Self { 597 | id: n.id, 598 | acc_value: n.acc_value, 599 | children: SmallVec::new(), 600 | } 601 | } 602 | pub fn into_intra_node(self) -> IntraNode { 603 | IntraNode::IntraNonLeaf(Box::new(self)) 604 | } 605 | pub fn compute_digest( 606 | &self, 607 | res_objs: &ResultObjs, 608 | vo_acc: &ResultVOAcc, 609 | ) -> Option { 610 | let mut child_hashes: SmallVec<[Digest; 2]> = SmallVec::new(); 611 | for child in &self.children { 612 | child_hashes.push(child.compute_digest(res_objs, vo_acc)?); 613 | } 614 | let child_hash_digest = concat_digest_ref(child_hashes.iter()); 615 | Some(concat_digest_ref( 616 | [self.acc_value.to_digest(), child_hash_digest].iter(), 617 | )) 618 | } 619 | pub fn compute_stats(&self, stats: &mut VOStatistic) { 620 | for child in &self.children { 621 | child.compute_stats(stats); 622 | } 623 | } 624 | } 625 | 626 | #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] 627 | pub struct BlkNode { 628 | pub block_id: IdType, 629 | pub skip_list_root: Option, 630 | pub sub_node: IntraNode, 631 | } 632 | 633 | impl BlkNode { 634 | pub fn compute_digest( 635 | &self, 636 | res_objs: &ResultObjs, 637 | vo_acc: &ResultVOAcc, 638 | prev_hash: &Digest, 639 | ) -> Option { 640 | let data_root = self.sub_node.compute_digest(res_objs, vo_acc)?; 641 | let mut state = blake2().to_state(); 642 | state.update(&self.block_id.to_le_bytes()); 643 | state.update(&prev_hash.0); 644 | state.update(&data_root.0); 645 | if let Some(d) = self.skip_list_root { 646 | state.update(&d.0); 647 | } 648 | Some(Digest::from(state.finalize())) 649 | } 650 | pub fn into_result_vo_node(self) -> ResultVONode { 651 | ResultVONode::BlkNode(Box::new(self)) 652 | } 653 | pub fn compute_stats(&self, stats: &mut VOStatistic) { 654 | self.sub_node.compute_stats(stats); 655 | } 656 | } 657 | 658 | #[derive(Debug, Default, Clone, Copy, Eq, PartialEq, Serialize, Deserialize)] 659 | pub struct JumpNode { 660 | pub id: IdType, 661 | pub proof_idx: AccProofIdxType, 662 | } 663 | 664 | impl JumpNode { 665 | pub fn create(n: &SkipListNode, proof_idx: AccProofIdxType) -> Self { 666 | Self { 667 | id: n.id, 668 | proof_idx, 669 | } 670 | } 671 | pub fn compute_digest( 672 | &self, 673 | _res_objs: &ResultObjs, 674 | vo_acc: &ResultVOAcc, 675 | prev_hash: &Digest, 676 | ) -> Option { 677 | let acc_value = vo_acc.get_object_acc(self.proof_idx)?; 678 | Some(concat_digest_ref( 679 | [acc_value.to_digest(), *prev_hash].iter(), 680 | )) 681 | } 682 | pub fn into_jump_or_no_jump_node(self) -> JumpOrNoJumpNode { 683 | JumpOrNoJumpNode::Jump(Box::new(self)) 684 | } 685 | pub fn compute_stats(&self, stats: &mut VOStatistic) { 686 | stats.num_of_mismatch_inter_nodes += 1; 687 | } 688 | } 689 | 690 | #[derive(Debug, Default, Clone, Copy, Eq, PartialEq, Serialize, Deserialize)] 691 | pub struct NoJumpNode { 692 | pub id: IdType, 693 | pub digest: Digest, 694 | } 695 | 696 | impl NoJumpNode { 697 | pub fn create(n: &SkipListNode) -> Self { 698 | Self { 699 | id: n.id, 700 | digest: n.digest, 701 | } 702 | } 703 | pub fn compute_digest( 704 | &self, 705 | _res_objs: &ResultObjs, 706 | _vo_acc: &ResultVOAcc, 707 | _prev_hash: &Digest, 708 | ) -> Option { 709 | Some(self.digest) 710 | } 711 | pub fn into_jump_or_no_jump_node(self) -> JumpOrNoJumpNode { 712 | JumpOrNoJumpNode::NoJump(Box::new(self)) 713 | } 714 | pub fn compute_stats(&self, _stats: &mut VOStatistic) {} 715 | } 716 | 717 | #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] 718 | pub enum JumpOrNoJumpNode { 719 | Jump(Box), 720 | NoJump(Box), 721 | } 722 | 723 | impl JumpOrNoJumpNode { 724 | pub fn compute_digest( 725 | &self, 726 | res_objs: &ResultObjs, 727 | vo_acc: &ResultVOAcc, 728 | prev_hash: &Digest, 729 | ) -> Option { 730 | match self { 731 | Self::Jump(n) => n.compute_digest(res_objs, vo_acc, prev_hash), 732 | Self::NoJump(n) => n.compute_digest(res_objs, vo_acc, prev_hash), 733 | } 734 | } 735 | pub fn compute_stats(&self, stats: &mut VOStatistic) { 736 | match self { 737 | Self::Jump(n) => n.compute_stats(stats), 738 | Self::NoJump(n) => n.compute_stats(stats), 739 | } 740 | } 741 | } 742 | 743 | #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] 744 | pub struct SkipListRoot { 745 | pub block_id: IdType, 746 | pub blk_prev_hash: Digest, 747 | pub blk_data_root: Digest, 748 | pub sub_nodes: Vec, 749 | } 750 | 751 | impl SkipListRoot { 752 | pub fn compute_digest( 753 | &self, 754 | res_objs: &ResultObjs, 755 | vo_acc: &ResultVOAcc, 756 | prev_hash: &Digest, 757 | ) -> Option { 758 | let mut hs: Vec = Vec::with_capacity(self.sub_nodes.len()); 759 | for sub_node in &self.sub_nodes { 760 | hs.push(sub_node.compute_digest(res_objs, vo_acc, prev_hash)?); 761 | } 762 | let skip_list_root = concat_digest(hs.into_iter()); 763 | let mut state = blake2().to_state(); 764 | state.update(&self.block_id.to_le_bytes()); 765 | state.update(&self.blk_prev_hash.0); 766 | state.update(&self.blk_data_root.0); 767 | state.update(&skip_list_root.0); 768 | Some(Digest::from(state.finalize())) 769 | } 770 | pub fn into_result_vo_node(self) -> ResultVONode { 771 | ResultVONode::SkipListRoot(Box::new(self)) 772 | } 773 | pub fn compute_stats(&self, stats: &mut VOStatistic) { 774 | for sub_node in &self.sub_nodes { 775 | sub_node.compute_stats(stats); 776 | } 777 | } 778 | } 779 | 780 | #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] 781 | pub enum ResultVONode { 782 | FlatBlkNode(Box), 783 | BlkNode(Box), 784 | SkipListRoot(Box), 785 | } 786 | 787 | impl ResultVONode { 788 | pub fn compute_digest( 789 | &self, 790 | res_objs: &ResultObjs, 791 | vo_acc: &ResultVOAcc, 792 | prev_hash: &Digest, 793 | ) -> Option { 794 | match self { 795 | Self::FlatBlkNode(n) => n.compute_digest(res_objs, vo_acc, prev_hash), 796 | Self::BlkNode(n) => n.compute_digest(res_objs, vo_acc, prev_hash), 797 | Self::SkipListRoot(n) => n.compute_digest(res_objs, vo_acc, prev_hash), 798 | } 799 | } 800 | pub fn compute_stats(&self, stats: &mut VOStatistic) { 801 | match self { 802 | Self::FlatBlkNode(n) => n.compute_stats(stats), 803 | Self::BlkNode(n) => n.compute_stats(stats), 804 | Self::SkipListRoot(n) => n.compute_stats(stats), 805 | } 806 | } 807 | } 808 | } 809 | -------------------------------------------------------------------------------- /vchain/src/chain/tests.rs: -------------------------------------------------------------------------------- 1 | use super::*; 2 | use crate::acc; 3 | use crate::digest::{Digest, Digestible}; 4 | use anyhow::Context; 5 | use serde_json::json; 6 | use std::collections::HashMap; 7 | 8 | #[derive(Debug, Default)] 9 | struct FakeInMemChain { 10 | param: Option, 11 | block_headers: HashMap, 12 | block_data: HashMap, 13 | intra_index_nodes: HashMap, 14 | skip_list_nodes: HashMap, 15 | objects: HashMap, 16 | } 17 | 18 | #[async_trait::async_trait] 19 | impl LightNodeInterface for FakeInMemChain { 20 | async fn lightnode_get_parameter(&self) -> Result { 21 | self.get_parameter() 22 | } 23 | async fn lightnode_read_block_header(&self, id: IdType) -> Result { 24 | self.read_block_header(id) 25 | } 26 | } 27 | 28 | impl ReadInterface for FakeInMemChain { 29 | fn get_parameter(&self) -> Result { 30 | self.param.clone().context("failed to get param") 31 | } 32 | fn read_block_header(&self, id: IdType) -> Result { 33 | self.block_headers 34 | .get(&id) 35 | .cloned() 36 | .context("failed to read block header") 37 | } 38 | fn read_block_data(&self, id: IdType) -> Result { 39 | self.block_data 40 | .get(&id) 41 | .cloned() 42 | .context("failed to read block data") 43 | } 44 | fn read_intra_index_node(&self, id: IdType) -> Result { 45 | self.intra_index_nodes 46 | .get(&id) 47 | .cloned() 48 | .context("failed to read intra index") 49 | } 50 | fn read_skip_list_node(&self, id: IdType) -> Result { 51 | self.skip_list_nodes 52 | .get(&id) 53 | .cloned() 54 | .context("failed to read skip list") 55 | } 56 | fn read_object(&self, id: IdType) -> Result { 57 | self.objects 58 | .get(&id) 59 | .cloned() 60 | .context("failed to read object") 61 | } 62 | } 63 | 64 | impl WriteInterface for FakeInMemChain { 65 | fn set_parameter(&mut self, param: Parameter) -> Result<()> { 66 | self.param = Some(param); 67 | Ok(()) 68 | } 69 | fn write_block_header(&mut self, header: BlockHeader) -> Result<()> { 70 | let id = header.block_id; 71 | self.block_headers.insert(id, header); 72 | Ok(()) 73 | } 74 | fn write_block_data(&mut self, data: BlockData) -> Result<()> { 75 | let id = data.block_id; 76 | self.block_data.insert(id, data); 77 | Ok(()) 78 | } 79 | fn write_intra_index_node(&mut self, node: IntraIndexNode) -> Result<()> { 80 | let id = node.id(); 81 | self.intra_index_nodes.insert(id, node); 82 | Ok(()) 83 | } 84 | fn write_skip_list_node(&mut self, node: SkipListNode) -> Result<()> { 85 | let id = node.id; 86 | self.skip_list_nodes.insert(id, node); 87 | Ok(()) 88 | } 89 | fn write_object(&mut self, obj: Object) -> Result<()> { 90 | let id = obj.id; 91 | self.objects.insert(id, obj); 92 | Ok(()) 93 | } 94 | } 95 | 96 | impl FakeInMemChain { 97 | fn new() -> Self { 98 | Default::default() 99 | } 100 | 101 | fn build_chain(&mut self, data: &str, param: &Parameter) -> Result<()> { 102 | info!("build chain"); 103 | self.set_parameter(param.clone())?; 104 | let mut prev_hash = Digest::default(); 105 | for (id, objs) in load_raw_obj_from_str(data)?.iter() { 106 | let header = build_block(*id, prev_hash, objs.iter(), self)?; 107 | prev_hash = header.to_digest(); 108 | } 109 | Ok(()) 110 | } 111 | } 112 | 113 | const TEST_DATA_1: &str = r#" 114 | 1 [ 1 ] { a } 115 | 1 [ 2 ] { a } 116 | 1 [ 3 ] { a } 117 | 1 [ 4 ] { a } 118 | 2 [ 1 ] { b } 119 | 2 [ 2 ] { b } 120 | 2 [ 3 ] { b } 121 | 2 [ 4 ] { b } 122 | "#; 123 | 124 | const TEST_DATA_2: &str = r#" 125 | 1 [ 1 ] { a } 126 | 2 [ 1 ] { b } 127 | 3 [ 1 ] { b } 128 | 4 [ 1 ] { b } 129 | 5 [ 1 ] { a } 130 | 6 [ 1 ] { b } 131 | 7 [ 1 ] { b } 132 | 8 [ 1 ] { b } 133 | 9 [ 1 ] { b } 134 | 10 [ 1 ] { a } 135 | 11 [ 1 ] { b } 136 | 12 [ 1 ] { b } 137 | 13 [ 1 ] { b } 138 | 14 [ 1 ] { b } 139 | 15 [ 1 ] { b } 140 | 16 [ 1 ] { b } 141 | 17 [ 1 ] { b } 142 | 18 [ 1 ] { b } 143 | 19 [ 1 ] { a } 144 | 20 [ 1 ] { b } 145 | "#; 146 | 147 | fn init_logger() { 148 | let _ = env_logger::builder().is_test(true).try_init(); 149 | } 150 | 151 | #[actix_rt::test] 152 | async fn test_data1_acc1_flat() { 153 | init_logger(); 154 | let mut chain = FakeInMemChain::new(); 155 | let param = Parameter { 156 | v_bit_len: vec![3], 157 | acc_type: acc::Type::ACC1, 158 | use_sk: true, 159 | intra_index: false, 160 | skip_list_max_level: 0, 161 | }; 162 | chain.build_chain(TEST_DATA_1, ¶m).unwrap(); 163 | let query = serde_json::from_value::(json!({ 164 | "start_block": 1, 165 | "end_block": 2, 166 | "range": [ 167 | [1], 168 | [1], 169 | ], 170 | "bool": [["a"]], 171 | })) 172 | .unwrap(); 173 | let res: OverallResult = historical_query(&query, &chain).unwrap(); 174 | assert_eq!(res.vo_stats.num_of_objs, 1); 175 | assert!(res.verify(&chain).await.unwrap().0.is_ok()); 176 | } 177 | 178 | #[actix_rt::test] 179 | async fn test_data1_acc1() { 180 | init_logger(); 181 | let mut chain = FakeInMemChain::new(); 182 | let param = Parameter { 183 | v_bit_len: vec![3], 184 | acc_type: acc::Type::ACC1, 185 | use_sk: true, 186 | intra_index: true, 187 | skip_list_max_level: 0, 188 | }; 189 | chain.build_chain(TEST_DATA_1, ¶m).unwrap(); 190 | let query = serde_json::from_value::(json!({ 191 | "start_block": 1, 192 | "end_block": 2, 193 | "range": [ 194 | [1], 195 | [1], 196 | ], 197 | "bool": [["a"]], 198 | })) 199 | .unwrap(); 200 | let res: OverallResult = historical_query(&query, &chain).unwrap(); 201 | assert_eq!(res.vo_stats.num_of_objs, 1); 202 | assert!(res.verify(&chain).await.unwrap().0.is_ok()); 203 | } 204 | 205 | #[actix_rt::test] 206 | async fn test_data1_acc2_flat() { 207 | init_logger(); 208 | let mut chain = FakeInMemChain::new(); 209 | let param = Parameter { 210 | v_bit_len: vec![3], 211 | acc_type: acc::Type::ACC2, 212 | use_sk: true, 213 | intra_index: false, 214 | skip_list_max_level: 0, 215 | }; 216 | chain.build_chain(TEST_DATA_1, ¶m).unwrap(); 217 | let query = serde_json::from_value::(json!({ 218 | "start_block": 1, 219 | "end_block": 2, 220 | "range": [ 221 | [1], 222 | [1], 223 | ], 224 | "bool": [["a"]], 225 | })) 226 | .unwrap(); 227 | let res: OverallResult = historical_query(&query, &chain).unwrap(); 228 | assert_eq!(res.vo_stats.num_of_objs, 1); 229 | assert!(res.verify(&chain).await.unwrap().0.is_ok()); 230 | } 231 | 232 | #[actix_rt::test] 233 | async fn test_data1_acc2() { 234 | init_logger(); 235 | let mut chain = FakeInMemChain::new(); 236 | let param = Parameter { 237 | v_bit_len: vec![3], 238 | acc_type: acc::Type::ACC2, 239 | use_sk: true, 240 | intra_index: true, 241 | skip_list_max_level: 0, 242 | }; 243 | chain.build_chain(TEST_DATA_1, ¶m).unwrap(); 244 | let query = serde_json::from_value::(json!({ 245 | "start_block": 1, 246 | "end_block": 2, 247 | "range": [ 248 | [1], 249 | [1], 250 | ], 251 | "bool": [["a"]], 252 | })) 253 | .unwrap(); 254 | let res: OverallResult = historical_query(&query, &chain).unwrap(); 255 | assert_eq!(res.vo_stats.num_of_objs, 1); 256 | assert!(res.verify(&chain).await.unwrap().0.is_ok()); 257 | } 258 | 259 | #[actix_rt::test] 260 | async fn test_data2_acc2() { 261 | init_logger(); 262 | let mut chain = FakeInMemChain::new(); 263 | let param = Parameter { 264 | v_bit_len: vec![3], 265 | acc_type: acc::Type::ACC2, 266 | use_sk: true, 267 | intra_index: true, 268 | skip_list_max_level: 0, 269 | }; 270 | chain.build_chain(TEST_DATA_2, ¶m).unwrap(); 271 | let query = serde_json::from_value::(json!({ 272 | "start_block": 1, 273 | "end_block": 20, 274 | "range": [ 275 | [1], 276 | [1], 277 | ], 278 | "bool": [["a"]], 279 | })) 280 | .unwrap(); 281 | let res: OverallResult = historical_query(&query, &chain).unwrap(); 282 | assert_eq!(res.vo_stats.num_of_objs, 4); 283 | assert!(res.verify(&chain).await.unwrap().0.is_ok()); 284 | } 285 | 286 | #[actix_rt::test] 287 | async fn test_data2_acc2_skip_list() { 288 | init_logger(); 289 | let mut chain = FakeInMemChain::new(); 290 | let param = Parameter { 291 | v_bit_len: vec![3], 292 | acc_type: acc::Type::ACC2, 293 | use_sk: true, 294 | intra_index: true, 295 | skip_list_max_level: 2, 296 | }; 297 | chain.build_chain(TEST_DATA_2, ¶m).unwrap(); 298 | let query = serde_json::from_value::(json!({ 299 | "start_block": 1, 300 | "end_block": 20, 301 | "range": [ 302 | [1], 303 | [1], 304 | ], 305 | "bool": [["a"]], 306 | })) 307 | .unwrap(); 308 | let res: OverallResult = historical_query(&query, &chain).unwrap(); 309 | assert_eq!(res.vo_stats.num_of_objs, 4); 310 | assert!(res.verify(&chain).await.unwrap().0.is_ok()); 311 | } 312 | 313 | #[actix_rt::test] 314 | async fn test_data2_acc1_skip_list() { 315 | init_logger(); 316 | let mut chain = FakeInMemChain::new(); 317 | let param = Parameter { 318 | v_bit_len: vec![3], 319 | acc_type: acc::Type::ACC1, 320 | use_sk: true, 321 | intra_index: true, 322 | skip_list_max_level: 2, 323 | }; 324 | chain.build_chain(TEST_DATA_2, ¶m).unwrap(); 325 | let query = serde_json::from_value::(json!({ 326 | "start_block": 1, 327 | "end_block": 20, 328 | "range": [ 329 | [1], 330 | [1], 331 | ], 332 | "bool": [["a"]], 333 | })) 334 | .unwrap(); 335 | let res: OverallResult = historical_query(&query, &chain).unwrap(); 336 | assert_eq!(res.vo_stats.num_of_objs, 4); 337 | assert!(res.verify(&chain).await.unwrap().0.is_ok()); 338 | } 339 | 340 | #[actix_rt::test] 341 | async fn test_data1_incomplete() { 342 | init_logger(); 343 | let mut chain = FakeInMemChain::new(); 344 | let param = Parameter { 345 | v_bit_len: vec![3], 346 | acc_type: acc::Type::ACC2, 347 | use_sk: true, 348 | intra_index: true, 349 | skip_list_max_level: 2, 350 | }; 351 | chain.build_chain(TEST_DATA_1, ¶m).unwrap(); 352 | let query = serde_json::from_value::(json!({ 353 | "start_block": 1, 354 | "end_block": 2, 355 | "range": [ 356 | [1], 357 | [1], 358 | ], 359 | "bool": null, 360 | })) 361 | .unwrap(); 362 | let mut res: OverallResult = historical_query(&query, &chain).unwrap(); 363 | let new_range = Range([vec![Some(1)], vec![Some(2)]]); 364 | res.query.q_range = Some(new_range); 365 | assert!(!res.verify(&chain).await.unwrap().0.is_ok()); 366 | } 367 | -------------------------------------------------------------------------------- /vchain/src/chain/utils.rs: -------------------------------------------------------------------------------- 1 | use super::{IdType, Parameter, RawObject, SetElementType, SkipLstLvlType}; 2 | use crate::acc::{self, Accumulator, G1Affine, G2Affine}; 3 | use crate::set::MultiSet; 4 | use anyhow::{Context, Error, Result}; 5 | use std::collections::{BTreeMap, HashSet}; 6 | use std::fs::File; 7 | use std::io::prelude::*; 8 | use std::io::BufReader; 9 | use std::path::Path; 10 | 11 | #[inline] 12 | pub fn multiset_to_g1(set: &MultiSet, param: &Parameter) -> G1Affine { 13 | match (param.acc_type, param.use_sk) { 14 | (acc::Type::ACC1, true) => acc::Acc1::cal_acc_g1_sk(&set), 15 | (acc::Type::ACC1, false) => acc::Acc1::cal_acc_g1(&set), 16 | (acc::Type::ACC2, true) => acc::Acc2::cal_acc_g1_sk(&set), 17 | (acc::Type::ACC2, false) => acc::Acc2::cal_acc_g1(&set), 18 | } 19 | } 20 | 21 | #[inline] 22 | pub fn multiset_to_g2(set: &MultiSet, param: &Parameter) -> G2Affine { 23 | match (param.acc_type, param.use_sk) { 24 | (acc::Type::ACC1, true) => acc::Acc1::cal_acc_g2_sk(&set), 25 | (acc::Type::ACC1, false) => acc::Acc1::cal_acc_g2(&set), 26 | (acc::Type::ACC2, true) => acc::Acc2::cal_acc_g2_sk(&set), 27 | (acc::Type::ACC2, false) => acc::Acc2::cal_acc_g2(&set), 28 | } 29 | } 30 | 31 | #[inline] 32 | pub fn skipped_blocks_num(level: SkipLstLvlType) -> IdType { 33 | 1 << (level + 2) 34 | } 35 | 36 | // input format: block_id sep [ v_data ] sep { w_data } 37 | // sep = \t or space 38 | // v_data = v_1 comma v_2 ... 39 | // w_data = w_1 comma w_2 ... 40 | pub fn load_raw_obj_from_file(path: &Path) -> Result>> { 41 | let mut reader = BufReader::new(File::open(path)?); 42 | let mut buf = String::new(); 43 | reader.read_to_string(&mut buf)?; 44 | load_raw_obj_from_str(&buf) 45 | } 46 | pub fn load_raw_obj_from_str(input: &str) -> Result>> { 47 | let mut res = BTreeMap::new(); 48 | for line in input.lines() { 49 | let line = line.trim(); 50 | if line.is_empty() { 51 | continue; 52 | } 53 | let mut split_str = line.splitn(3, |c| c == '[' || c == ']'); 54 | let block_id: IdType = split_str 55 | .next() 56 | .context(format!("failed to parse line {}", line))? 57 | .trim() 58 | .parse()?; 59 | let v_data: Vec = split_str 60 | .next() 61 | .context(format!("failed to parse line {}", line))? 62 | .trim() 63 | .split(',') 64 | .map(|s| s.trim()) 65 | .filter(|s| !s.is_empty()) 66 | .map(|s| s.parse::().map_err(Error::from)) 67 | .collect::>()?; 68 | let w_data: HashSet = split_str 69 | .next() 70 | .context(format!("failed to parse line {}", line))? 71 | .trim() 72 | .replace('{', "") 73 | .replace('}', "") 74 | .split(',') 75 | .map(|s| s.trim().to_owned()) 76 | .filter(|s| !s.is_empty()) 77 | .collect(); 78 | 79 | let raw_obj = RawObject { 80 | block_id, 81 | v_data, 82 | w_data, 83 | }; 84 | res.entry(block_id).or_insert_with(Vec::new).push(raw_obj); 85 | } 86 | Ok(res) 87 | } 88 | 89 | #[cfg(test)] 90 | mod tests { 91 | use super::*; 92 | 93 | #[test] 94 | fn test_load_raw_obj() { 95 | let input = "1\t[1,2]\t{a,b}\n2 [ 3, 4 ] { c, d, }\n2\t[ 5, 6 ]\t { e }\n"; 96 | let expect = { 97 | let mut out: BTreeMap> = BTreeMap::new(); 98 | out.insert( 99 | 1, 100 | vec![RawObject { 101 | block_id: 1, 102 | v_data: vec![1, 2], 103 | w_data: ["a".to_owned(), "b".to_owned()].iter().cloned().collect(), 104 | }], 105 | ); 106 | out.insert( 107 | 2, 108 | vec![ 109 | RawObject { 110 | block_id: 2, 111 | v_data: vec![3, 4], 112 | w_data: ["c".to_owned(), "d".to_owned()].iter().cloned().collect(), 113 | }, 114 | RawObject { 115 | block_id: 2, 116 | v_data: vec![5, 6], 117 | w_data: ["e".to_owned()].iter().cloned().collect(), 118 | }, 119 | ], 120 | ); 121 | out 122 | }; 123 | assert_eq!(load_raw_obj_from_str(&input).unwrap(), expect); 124 | } 125 | } 126 | -------------------------------------------------------------------------------- /vchain/src/digest.rs: -------------------------------------------------------------------------------- 1 | use core::fmt; 2 | use serde::{ 3 | de::{Deserializer, SeqAccess, Visitor}, 4 | ser::{SerializeTupleStruct, Serializer}, 5 | Deserialize, Serialize, 6 | }; 7 | 8 | pub const DIGEST_LEN: usize = 32; 9 | 10 | #[derive(Clone, Copy, Eq, PartialEq, Hash, Default)] 11 | pub struct Digest(pub [u8; DIGEST_LEN]); 12 | 13 | impl fmt::Display for Digest { 14 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 15 | write!(f, "{}", hex::encode(&self.0)) 16 | } 17 | } 18 | 19 | impl fmt::Debug for Digest { 20 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 21 | write!(f, "{}", hex::encode(&self.0)) 22 | } 23 | } 24 | 25 | // Ref: https://github.com/slowli/hex-buffer-serde 26 | 27 | impl Serialize for Digest { 28 | fn serialize(&self, serializer: S) -> Result 29 | where 30 | S: Serializer, 31 | { 32 | if serializer.is_human_readable() { 33 | serializer.serialize_str(&hex::encode(&self.0)) 34 | } else { 35 | let mut state = serializer.serialize_tuple_struct("Digest", 1)?; 36 | state.serialize_field(&self.0)?; 37 | state.end() 38 | } 39 | } 40 | } 41 | 42 | impl<'de> Deserialize<'de> for Digest { 43 | fn deserialize(deserializer: D) -> Result 44 | where 45 | D: Deserializer<'de>, 46 | { 47 | use serde::de::Error as DeError; 48 | 49 | struct HexVisitor; 50 | 51 | impl<'de> Visitor<'de> for HexVisitor { 52 | type Value = Digest; 53 | 54 | fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result { 55 | f.write_str("struct Digest") 56 | } 57 | 58 | fn visit_str(self, value: &str) -> Result { 59 | let data = hex::decode(value).map_err(E::custom)?; 60 | if data.len() == DIGEST_LEN { 61 | let mut out = Digest::default(); 62 | out.0.copy_from_slice(&data[..DIGEST_LEN]); 63 | Ok(out) 64 | } else { 65 | Err(E::custom(format!("invalid length: {}", data.len()))) 66 | } 67 | } 68 | } 69 | 70 | struct BytesVisitor; 71 | 72 | impl<'de> Visitor<'de> for BytesVisitor { 73 | type Value = Digest; 74 | 75 | fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result { 76 | f.write_str("struct Digest") 77 | } 78 | 79 | fn visit_seq(self, mut seq: V) -> Result 80 | where 81 | V: SeqAccess<'de>, 82 | { 83 | let inner = seq 84 | .next_element()? 85 | .ok_or_else(|| DeError::invalid_length(0, &self))?; 86 | Ok(Digest(inner)) 87 | } 88 | } 89 | 90 | if deserializer.is_human_readable() { 91 | deserializer.deserialize_str(HexVisitor) 92 | } else { 93 | deserializer.deserialize_tuple_struct("Digest", 1, BytesVisitor) 94 | } 95 | } 96 | } 97 | 98 | impl From for Digest { 99 | fn from(input: blake2b_simd::Hash) -> Self { 100 | let data = input.as_bytes(); 101 | debug_assert_eq!(data.len(), DIGEST_LEN); 102 | let mut out = Self::default(); 103 | out.0.copy_from_slice(&data[..DIGEST_LEN]); 104 | out 105 | } 106 | } 107 | 108 | pub fn blake2() -> blake2b_simd::Params { 109 | let mut params = blake2b_simd::Params::new(); 110 | params.hash_length(DIGEST_LEN); 111 | params 112 | } 113 | 114 | pub trait Digestible { 115 | fn to_digest(&self) -> Digest; 116 | } 117 | 118 | impl Digestible for [u8] { 119 | fn to_digest(&self) -> Digest { 120 | Digest::from(blake2().hash(self)) 121 | } 122 | } 123 | 124 | impl Digestible for str { 125 | fn to_digest(&self) -> Digest { 126 | self.as_bytes().to_digest() 127 | } 128 | } 129 | 130 | impl Digestible for String { 131 | fn to_digest(&self) -> Digest { 132 | self.as_bytes().to_digest() 133 | } 134 | } 135 | 136 | macro_rules! impl_digestable_for_numeric { 137 | ($x: ty) => { 138 | impl Digestible for $x { 139 | fn to_digest(&self) -> Digest { 140 | self.to_le_bytes().to_digest() 141 | } 142 | } 143 | }; 144 | ($($x: ty),*) => {$(impl_digestable_for_numeric!($x);)*} 145 | } 146 | 147 | impl_digestable_for_numeric!(i8, i16, i32, i64, i128); 148 | impl_digestable_for_numeric!(u8, u16, u32, u64, u128); 149 | impl_digestable_for_numeric!(f32, f64); 150 | 151 | pub fn concat_digest_ref<'a>(input: impl Iterator) -> Digest { 152 | let mut state = blake2().to_state(); 153 | for d in input { 154 | state.update(&d.0); 155 | } 156 | Digest::from(state.finalize()) 157 | } 158 | 159 | pub fn concat_digest(input: impl Iterator) -> Digest { 160 | let mut state = blake2().to_state(); 161 | for d in input { 162 | state.update(&d.0); 163 | } 164 | Digest::from(state.finalize()) 165 | } 166 | 167 | #[cfg(test)] 168 | mod tests { 169 | use super::*; 170 | 171 | #[test] 172 | fn test_to_digest() { 173 | let expect = Digest(*b"\x32\x4d\xcf\x02\x7d\xd4\xa3\x0a\x93\x2c\x44\x1f\x36\x5a\x25\xe8\x6b\x17\x3d\xef\xa4\xb8\xe5\x89\x48\x25\x34\x71\xb8\x1b\x72\xcf"); 174 | assert_eq!(b"hello"[..].to_digest(), expect); 175 | assert_eq!("hello".to_digest(), expect); 176 | assert_eq!("hello".to_owned().to_digest(), expect); 177 | } 178 | 179 | #[test] 180 | fn test_digest_concat() { 181 | let input = vec!["hello".to_digest(), "world!".to_digest()]; 182 | let expect = { 183 | let mut buf: Vec = Vec::new(); 184 | buf.extend_from_slice(&input[0].0[..]); 185 | buf.extend_from_slice(&input[1].0[..]); 186 | buf.as_slice().to_digest() 187 | }; 188 | assert_eq!(concat_digest_ref(input.iter()), expect); 189 | assert_eq!(concat_digest(input.into_iter()), expect); 190 | } 191 | 192 | #[test] 193 | fn test_serde() { 194 | let digest = "hello".to_digest(); 195 | let json = serde_json::to_string_pretty(&digest).unwrap(); 196 | assert_eq!( 197 | json, 198 | "\"324dcf027dd4a30a932c441f365a25e86b173defa4b8e58948253471b81b72cf\"" 199 | ); 200 | let bin = bincode::serialize(&digest).unwrap(); 201 | assert_eq!( 202 | bin, 203 | b"\x32\x4d\xcf\x02\x7d\xd4\xa3\x0a\x93\x2c\x44\x1f\x36\x5a\x25\xe8\x6b\x17\x3d\xef\xa4\xb8\xe5\x89\x48\x25\x34\x71\xb8\x1b\x72\xcf", 204 | ); 205 | 206 | assert_eq!(serde_json::from_str::(&json).unwrap(), digest); 207 | assert_eq!(bincode::deserialize::(&bin[..]).unwrap(), digest); 208 | } 209 | } 210 | -------------------------------------------------------------------------------- /vchain/src/lib.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate lazy_static; 3 | #[macro_use] 4 | extern crate log; 5 | 6 | pub mod digest; 7 | pub use digest::*; 8 | 9 | pub mod set; 10 | pub use set::*; 11 | 12 | pub mod acc; 13 | pub use acc::*; 14 | 15 | pub mod chain; 16 | pub use chain::*; 17 | -------------------------------------------------------------------------------- /vchain/src/set.rs: -------------------------------------------------------------------------------- 1 | use crate::digest::Digestible; 2 | use core::iter::FromIterator; 3 | use core::ops::{Add, BitAnd, BitOr, Deref}; 4 | use serde::{ 5 | de::Deserializer, 6 | ser::{SerializeSeq, SerializeStruct, Serializer}, 7 | Deserialize, Serialize, 8 | }; 9 | use std::collections::HashMap; 10 | 11 | pub trait SetElement: Digestible + Clone + Send + Sync + Eq + PartialEq + core::hash::Hash {} 12 | 13 | impl SetElement for T where 14 | T: Digestible + Clone + Send + Sync + Eq + PartialEq + core::hash::Hash 15 | { 16 | } 17 | 18 | #[derive(Debug, Clone, Eq, PartialEq, Default)] 19 | pub struct MultiSet { 20 | pub(crate) inner: HashMap, 21 | } 22 | 23 | impl MultiSet { 24 | pub fn new() -> Self { 25 | Self { 26 | inner: HashMap::new(), 27 | } 28 | } 29 | 30 | pub fn from_vec(input: Vec) -> Self { 31 | Self::from_iter(input.into_iter()) 32 | } 33 | 34 | pub fn from_tuple_vec(input: Vec<(T, u32)>) -> Self { 35 | Self::from_iter(input.into_iter()) 36 | } 37 | 38 | pub fn is_intersected_with(&self, other: &Self) -> bool { 39 | let (a, b) = if self.len() < other.len() { 40 | (self, other) 41 | } else { 42 | (other, self) 43 | }; 44 | a.keys().any(|v| b.contains_key(v)) 45 | } 46 | } 47 | 48 | impl Deref for MultiSet { 49 | type Target = HashMap; 50 | 51 | fn deref(&self) -> &Self::Target { 52 | &self.inner 53 | } 54 | } 55 | 56 | impl<'a, 'b, T: SetElement> Add<&'a MultiSet> for &'b MultiSet { 57 | type Output = MultiSet; 58 | 59 | fn add(self, other: &'a MultiSet) -> MultiSet { 60 | let mut data = HashMap::new(); 61 | for (k, v) in self.iter().chain(other.iter()) { 62 | *data.entry(k.clone()).or_insert(0) += v; 63 | } 64 | MultiSet { inner: data } 65 | } 66 | } 67 | 68 | impl<'a, 'b, T: SetElement> BitOr<&'a MultiSet> for &'b MultiSet { 69 | type Output = MultiSet; 70 | 71 | fn bitor(self, other: &'a MultiSet) -> MultiSet { 72 | let mut data = HashMap::new(); 73 | for k in self.keys().chain(other.keys()) { 74 | data.entry(k.clone()).or_insert(1); 75 | } 76 | MultiSet { inner: data } 77 | } 78 | } 79 | 80 | impl<'a, 'b, T: SetElement> BitAnd<&'a MultiSet> for &'b MultiSet { 81 | type Output = MultiSet; 82 | 83 | fn bitand(self, other: &'a MultiSet) -> MultiSet { 84 | let mut data = HashMap::new(); 85 | for k in self.keys() { 86 | if other.contains_key(k) { 87 | data.insert(k.clone(), 1); 88 | } 89 | } 90 | MultiSet { inner: data } 91 | } 92 | } 93 | 94 | impl FromIterator for MultiSet { 95 | fn from_iter>(iter: I) -> Self { 96 | let mut data = HashMap::new(); 97 | for d in iter { 98 | *data.entry(d).or_insert(0) += 1; 99 | } 100 | Self { inner: data } 101 | } 102 | } 103 | 104 | impl FromIterator<(T, u32)> for MultiSet { 105 | fn from_iter>(iter: I) -> Self { 106 | let mut data = HashMap::new(); 107 | for (k, v) in iter { 108 | *data.entry(k).or_insert(0) += v; 109 | } 110 | Self { inner: data } 111 | } 112 | } 113 | 114 | #[derive(Serialize, Deserialize)] 115 | struct ElementTuple { 116 | obj: T, 117 | cnt: u32, 118 | } 119 | 120 | impl Serialize for MultiSet { 121 | fn serialize(&self, serializer: S) -> Result 122 | where 123 | S: Serializer, 124 | { 125 | if serializer.is_human_readable() { 126 | let mut seq = serializer.serialize_seq(Some(self.len()))?; 127 | for (k, v) in self.iter() { 128 | seq.serialize_element(&ElementTuple { 129 | obj: k.clone(), 130 | cnt: *v, 131 | })?; 132 | } 133 | seq.end() 134 | } else { 135 | let mut state = serializer.serialize_struct("MultiSet", 1)?; 136 | state.serialize_field("inner", &self.inner)?; 137 | state.end() 138 | } 139 | } 140 | } 141 | 142 | impl<'de, T: SetElement + Deserialize<'de>> Deserialize<'de> for MultiSet { 143 | fn deserialize(deserializer: D) -> Result 144 | where 145 | D: Deserializer<'de>, 146 | { 147 | if deserializer.is_human_readable() { 148 | let inner: Vec> = Deserialize::deserialize(deserializer)?; 149 | Ok(Self::from_iter(inner.into_iter().map(|v| (v.obj, v.cnt)))) 150 | } else { 151 | let inner: HashMap = Deserialize::deserialize(deserializer)?; 152 | Ok(Self { inner }) 153 | } 154 | } 155 | } 156 | 157 | #[cfg(test)] 158 | mod tests { 159 | use super::*; 160 | 161 | #[test] 162 | fn test_is_intersected_with() { 163 | let s1 = MultiSet::from_vec(vec![1, 2, 3]); 164 | let s2 = MultiSet::from_vec(vec![2, 2, 5]); 165 | let s3 = MultiSet::from_vec(vec![5, 6]); 166 | assert!(s1.is_intersected_with(&s2)); 167 | assert!(!s1.is_intersected_with(&s3)); 168 | } 169 | 170 | #[test] 171 | fn test_set_sum() { 172 | let s1 = MultiSet::from_vec(vec![1, 1, 2]); 173 | let s2 = MultiSet::from_vec(vec![2, 2, 3]); 174 | let s3 = MultiSet::from_tuple_vec(vec![(1, 2), (2, 3), (3, 1)]); 175 | assert_eq!(&s1 + &s2, s3); 176 | } 177 | 178 | #[test] 179 | fn test_set_union() { 180 | let s1 = MultiSet::from_vec(vec![1, 1, 2]); 181 | let s2 = MultiSet::from_vec(vec![2, 2, 3]); 182 | let s3 = MultiSet::from_tuple_vec(vec![(1, 1), (2, 1), (3, 1)]); 183 | assert_eq!(&s1 | &s2, s3); 184 | } 185 | 186 | #[test] 187 | fn test_set_intersection() { 188 | let s1 = MultiSet::from_vec(vec![1, 1, 2]); 189 | let s2 = MultiSet::from_vec(vec![2, 2, 3]); 190 | let s3 = MultiSet::from_tuple_vec(vec![(2, 1)]); 191 | assert_eq!(&s1 & &s2, s3); 192 | } 193 | 194 | #[test] 195 | fn test_serde() { 196 | let s = MultiSet::from_vec(vec![1, 1, 2]); 197 | let json = serde_json::to_string_pretty(&s).unwrap(); 198 | let bin = bincode::serialize(&s).unwrap(); 199 | assert_eq!(serde_json::from_str::>(&json).unwrap(), s); 200 | assert_eq!(bincode::deserialize::>(&bin[..]).unwrap(), s); 201 | } 202 | } 203 | --------------------------------------------------------------------------------